chore: update client submodule and enhance quality checks
- Updated the client submodule to the latest commit for improved features and stability. - Introduced new quality violation reports for better code quality assessment, including checks for duplicates, test smells, and magic numbers. - Enhanced linting configurations and diagnostics across various files to ensure cleaner code and adherence to standards.
This commit is contained in:
@@ -1,13 +1,766 @@
|
||||
|
||||
{
|
||||
"version": "1.36.1",
|
||||
"time": "1767607673434",
|
||||
"generalDiagnostics": [],
|
||||
"time": "1767655241469",
|
||||
"generalDiagnostics": [
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"key_points\" is partially unknown\n Type of \"key_points\" is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 227,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 227,
|
||||
"character": 18
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"action_items\" is partially unknown\n Type of \"action_items\" is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 228,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 228,
|
||||
"character": 20
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is partially unknown\n Argument corresponds to parameter \"key_points\" in function \"__init__\"\n Argument type is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 234,
|
||||
"character": 23
|
||||
},
|
||||
"end": {
|
||||
"line": 234,
|
||||
"character": 33
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is partially unknown\n Argument corresponds to parameter \"action_items\" in function \"__init__\"\n Argument type is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 235,
|
||||
"character": 25
|
||||
},
|
||||
"end": {
|
||||
"line": 235,
|
||||
"character": 37
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"annotation_type\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 276,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 276,
|
||||
"character": 23
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"start_time\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 278,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 278,
|
||||
"character": 18
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"end_time\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 279,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 279,
|
||||
"character": 16
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"segment_ids\" is partially unknown\n Type of \"segment_ids\" is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 280,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 280,
|
||||
"character": 19
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"annotation_type\" in function \"__init__\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 284,
|
||||
"character": 28
|
||||
},
|
||||
"end": {
|
||||
"line": 284,
|
||||
"character": 43
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"start_time\" in function \"__init__\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 286,
|
||||
"character": 23
|
||||
},
|
||||
"end": {
|
||||
"line": 286,
|
||||
"character": 33
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"end_time\" in function \"__init__\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 287,
|
||||
"character": 21
|
||||
},
|
||||
"end": {
|
||||
"line": 287,
|
||||
"character": 29
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is partially unknown\n Argument corresponds to parameter \"segment_ids\" in function \"__init__\"\n Argument type is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 288,
|
||||
"character": 24
|
||||
},
|
||||
"end": {
|
||||
"line": 288,
|
||||
"character": 35
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/application/services/meeting_service.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"value\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 298,
|
||||
"character": 32
|
||||
},
|
||||
"end": {
|
||||
"line": 298,
|
||||
"character": 53
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownMemberType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/entities/named_entity.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"segment_ids\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 118,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 118,
|
||||
"character": 19
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/entities/named_entity.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"unique_segments\" is partially unknown\n Type of \"unique_segments\" is \"list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 127,
|
||||
"character": 8
|
||||
},
|
||||
"end": {
|
||||
"line": 127,
|
||||
"character": 23
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/entities/named_entity.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is partially unknown\n Argument corresponds to parameter \"iterable\" in function \"sorted\"\n Argument type is \"set[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 127,
|
||||
"character": 33
|
||||
},
|
||||
"end": {
|
||||
"line": 127,
|
||||
"character": 49
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/entities/named_entity.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"iterable\" in function \"__init__\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 127,
|
||||
"character": 37
|
||||
},
|
||||
"end": {
|
||||
"line": 127,
|
||||
"character": 48
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/entities/named_entity.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is partially unknown\n Argument corresponds to parameter \"segment_ids\" in function \"__init__\"\n Argument type is \"list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 133,
|
||||
"character": 24
|
||||
},
|
||||
"end": {
|
||||
"line": 133,
|
||||
"character": 39
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/ports/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 27,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 46,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/ports/repositories/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 35,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 51,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/domain/ports/repositories/identity/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 13,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 18,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"annotation_type\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 64,
|
||||
"character": 12
|
||||
},
|
||||
"end": {
|
||||
"line": 64,
|
||||
"character": 27
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"start_time\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 66,
|
||||
"character": 12
|
||||
},
|
||||
"end": {
|
||||
"line": 66,
|
||||
"character": 22
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"end_time\" is unknown",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 67,
|
||||
"character": 12
|
||||
},
|
||||
"end": {
|
||||
"line": 67,
|
||||
"character": 20
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Type of \"segment_ids\" is partially unknown\n Type of \"segment_ids\" is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 68,
|
||||
"character": 12
|
||||
},
|
||||
"end": {
|
||||
"line": 68,
|
||||
"character": 23
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownVariableType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"annotation_type\" in function \"annotation_type_to_proto\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 69,
|
||||
"character": 50
|
||||
},
|
||||
"end": {
|
||||
"line": 69,
|
||||
"character": 65
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"start_time\" in function \"__init__\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 74,
|
||||
"character": 27
|
||||
},
|
||||
"end": {
|
||||
"line": 74,
|
||||
"character": 37
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is unknown\n Argument corresponds to parameter \"end_time\" in function \"__init__\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 75,
|
||||
"character": 25
|
||||
},
|
||||
"end": {
|
||||
"line": 75,
|
||||
"character": 33
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_client_mixins/annotation.py",
|
||||
"severity": "error",
|
||||
"message": "Argument type is partially unknown\n Argument corresponds to parameter \"segment_ids\" in function \"__init__\"\n Argument type is \"Any | list[Unknown]\"",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 76,
|
||||
"character": 28
|
||||
},
|
||||
"end": {
|
||||
"line": 76,
|
||||
"character": 39
|
||||
}
|
||||
},
|
||||
"rule": "reportUnknownArgumentType"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_mixins/errors/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"MeetingId\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 62,
|
||||
"character": 46
|
||||
},
|
||||
"end": {
|
||||
"line": 62,
|
||||
"character": 55
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/grpc/_mixins/protocols.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"ExportRepositoryProvider\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 21,
|
||||
"character": 56
|
||||
},
|
||||
"end": {
|
||||
"line": 21,
|
||||
"character": 80
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"MeetingModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 31,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 31,
|
||||
"character": 16
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"UserModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 50,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 50,
|
||||
"character": 13
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"WorkspaceModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 53,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 53,
|
||||
"character": 18
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"IntegrationModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 60,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 60,
|
||||
"character": 20
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"TaskModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 72,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 72,
|
||||
"character": 13
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 75,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 116,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/core/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"MeetingModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 8,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 8,
|
||||
"character": 16
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/core/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 19,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 29,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/entities/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 10,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 14,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/identity/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"UserModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 5,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 5,
|
||||
"character": 13
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/identity/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"WorkspaceModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 7,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 7,
|
||||
"character": 18
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/identity/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 15,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 23,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/integrations/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"IntegrationModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 5,
|
||||
"character": 4
|
||||
},
|
||||
"end": {
|
||||
"line": 5,
|
||||
"character": 20
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/integrations/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 16,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 25,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/organization/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Import \"TaskModel\" is not accessed",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 6,
|
||||
"character": 73
|
||||
},
|
||||
"end": {
|
||||
"line": 6,
|
||||
"character": 82
|
||||
}
|
||||
},
|
||||
"rule": "reportUnusedImport"
|
||||
},
|
||||
{
|
||||
"file": "/home/trav/repos/noteflow/src/noteflow/infrastructure/persistence/models/organization/__init__.py",
|
||||
"severity": "error",
|
||||
"message": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect",
|
||||
"range": {
|
||||
"start": {
|
||||
"line": 9,
|
||||
"character": 0
|
||||
},
|
||||
"end": {
|
||||
"line": 13,
|
||||
"character": 1
|
||||
}
|
||||
},
|
||||
"rule": "reportUnsupportedDunderAll"
|
||||
}
|
||||
],
|
||||
"summary": {
|
||||
"filesAnalyzed": 510,
|
||||
"errorCount": 0,
|
||||
"filesAnalyzed": 529,
|
||||
"errorCount": 47,
|
||||
"warningCount": 0,
|
||||
"informationCount": 0,
|
||||
"timeInSec": 5.583
|
||||
"timeInSec": 11.468
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
1460
.hygeine/clippy.json
1460
.hygeine/clippy.json
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -19,7 +19,7 @@ Checking for deep nesting...
|
||||
[0;32mOK:[0m No excessively deep nesting found
|
||||
|
||||
Checking for unwrap() usage...
|
||||
[0;32mOK:[0m Found 3 unwrap() calls (within acceptable range)
|
||||
[0;32mOK:[0m No unwrap() calls found
|
||||
|
||||
Checking for excessive clone() usage...
|
||||
[0;32mOK:[0m No excessive clone() usage detected
|
||||
|
||||
2
client
2
client
Submodule client updated: 3eae645b25...8555904d45
247
scratch/quality_violations_backend.txt
Normal file
247
scratch/quality_violations_backend.txt
Normal file
@@ -0,0 +1,247 @@
|
||||
high_complexity: 5
|
||||
- src/noteflow/config/settings/_triggers.py:_string_list_from_unknown (complexity=14)
|
||||
- src/noteflow/grpc/service.py:shutdown (complexity=15)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/project_repo.py:_settings_to_dict (complexity=14)
|
||||
- src/noteflow/infrastructure/triggers/app_audio.py:_select_device (complexity=14)
|
||||
- src/noteflow/infrastructure/triggers/app_audio.py:_detect_meeting_app (complexity=14)
|
||||
|
||||
long_parameter_list: 7
|
||||
- src/noteflow/application/services/auth_helpers.py:get_or_create_auth_integration (params=5)
|
||||
- src/noteflow/grpc/_mixins/_audio_helpers.py:validate_stream_format (params=5)
|
||||
- src/noteflow/grpc/_mixins/diarization/_jobs.py:create_diarization_error_response (params=5)
|
||||
- src/noteflow/grpc/_mixins/oidc.py:_apply_custom_provider_config (params=5)
|
||||
- src/noteflow/grpc/_mixins/streaming/_session.py:_init_audio_writer (params=5)
|
||||
- src/noteflow/grpc/_startup.py:print_startup_banner (params=5)
|
||||
- src/noteflow/infrastructure/summarization/factory.py:create_summarization_service (params=5)
|
||||
|
||||
god_class: 16
|
||||
- src/noteflow/application/services/calendar_service.py:CalendarService (methods=16)
|
||||
- src/noteflow/application/services/calendar_service.py:CalendarService (lines=419)
|
||||
- src/noteflow/application/services/meeting_service.py:MeetingService (methods=20)
|
||||
- src/noteflow/application/services/meeting_service.py:MeetingService (lines=448)
|
||||
- src/noteflow/domain/ports/unit_of_work.py:UnitOfWork (methods=19)
|
||||
- src/noteflow/grpc/meeting_store.py:MeetingStore (methods=20)
|
||||
- src/noteflow/grpc/service.py:NoteFlowServicer (methods=17)
|
||||
- src/noteflow/grpc/service.py:NoteFlowServicer (lines=430)
|
||||
- src/noteflow/infrastructure/audio/playback.py:SoundDevicePlayback (methods=19)
|
||||
- src/noteflow/infrastructure/audio/writer.py:MeetingAudioWriter (methods=20)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:OAuthManager (lines=424)
|
||||
- src/noteflow/infrastructure/diarization/engine.py:DiarizationEngine (lines=405)
|
||||
- src/noteflow/infrastructure/persistence/memory/unit_of_work.py:MemoryUnitOfWork (methods=20)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/workspace_repo.py:SqlAlchemyWorkspaceRepository (methods=16)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/workspace_repo.py:SqlAlchemyWorkspaceRepository (lines=430)
|
||||
- src/noteflow/infrastructure/persistence/unit_of_work.py:SqlAlchemyUnitOfWork (methods=20)
|
||||
|
||||
deep_nesting: 56
|
||||
- src/noteflow/application/services/auth_service.py:logout (depth=3)
|
||||
- src/noteflow/application/services/calendar_service.py:list_calendar_events (depth=3)
|
||||
- src/noteflow/application/services/calendar_service.py:_fetch_provider_events (depth=3)
|
||||
- src/noteflow/application/services/ner_service.py:_get_cached_or_segments (depth=3)
|
||||
- src/noteflow/application/services/recovery_service.py:recover_crashed_meetings (depth=3)
|
||||
- src/noteflow/application/services/recovery_service.py:recover_crashed_diarization_jobs (depth=3)
|
||||
- src/noteflow/application/services/retention_service.py:run_cleanup (depth=3)
|
||||
- src/noteflow/application/services/summarization_service.py:get_available_modes (depth=3)
|
||||
- src/noteflow/application/services/summarization_service.py:_get_provider_with_fallback (depth=3)
|
||||
- src/noteflow/application/services/summarization_service.py:_get_fallback_provider (depth=3)
|
||||
- src/noteflow/cli/__main__.py:main (depth=3)
|
||||
- src/noteflow/cli/models.py:_run_download (depth=3)
|
||||
- src/noteflow/cli/models.py:main (depth=3)
|
||||
- src/noteflow/config/settings/_triggers.py:_string_list_from_unknown (depth=3)
|
||||
- src/noteflow/config/settings/_triggers.py:_dict_list_from_unknown (depth=3)
|
||||
- src/noteflow/domain/rules/builtin.py:validate_config (depth=3)
|
||||
- src/noteflow/grpc/_client_mixins/streaming.py:stream_worker (depth=3)
|
||||
- src/noteflow/grpc/_mixins/diarization/_jobs.py:start_diarization_job (depth=3)
|
||||
- src/noteflow/grpc/_mixins/diarization/_refinement.py:apply_diarization_turns (depth=3)
|
||||
- src/noteflow/grpc/_mixins/diarization/_speaker.py:RenameSpeaker (depth=3)
|
||||
- src/noteflow/grpc/_mixins/diarization/_streaming.py:ensure_diarization_session (depth=3)
|
||||
- src/noteflow/grpc/_mixins/diarization/_streaming.py:persist_streaming_turns (depth=3)
|
||||
- src/noteflow/grpc/_mixins/errors/_abort.py:domain_error_handler (depth=3)
|
||||
- src/noteflow/grpc/_mixins/meeting.py:StopMeeting (depth=3)
|
||||
- src/noteflow/grpc/_mixins/streaming/_asr.py:process_audio_segment (depth=3)
|
||||
- src/noteflow/grpc/_mixins/streaming/_mixin.py:StreamTranscription (depth=3)
|
||||
- src/noteflow/grpc/interceptors/logging.py:_wrap_unary_stream (depth=3)
|
||||
- src/noteflow/grpc/interceptors/logging.py:_wrap_stream_stream (depth=3)
|
||||
- src/noteflow/grpc/server.py:_recover_orphaned_jobs (depth=3)
|
||||
- src/noteflow/grpc/server.py:_wire_consent_persistence (depth=3)
|
||||
- src/noteflow/grpc/service.py:shutdown (depth=3)
|
||||
- src/noteflow/infrastructure/asr/segmenter.py:process_audio (depth=3)
|
||||
- src/noteflow/infrastructure/asr/segmenter.py:_handle_speech (depth=3)
|
||||
- src/noteflow/infrastructure/asr/segmenter.py:_handle_trailing (depth=3)
|
||||
- src/noteflow/infrastructure/asr/streaming_vad.py:process (depth=3)
|
||||
- src/noteflow/infrastructure/audio/playback.py:_start_stream (depth=3)
|
||||
- src/noteflow/infrastructure/calendar/outlook_adapter.py:_fetch_events (depth=3)
|
||||
- src/noteflow/infrastructure/diarization/_compat.py:_patch_huggingface_auth (depth=3)
|
||||
- src/noteflow/infrastructure/diarization/_compat.py:_patch_speechbrain_backend (depth=3)
|
||||
- src/noteflow/infrastructure/diarization/engine.py:_resolve_device (depth=3)
|
||||
- src/noteflow/infrastructure/diarization/session.py:_collect_turns (depth=3)
|
||||
- src/noteflow/infrastructure/export/markdown.py:export (depth=3)
|
||||
- src/noteflow/infrastructure/logging/log_buffer.py:emit (depth=3)
|
||||
- src/noteflow/infrastructure/ner/engine.py:extract_from_segments (depth=3)
|
||||
- src/noteflow/infrastructure/observability/usage.py:_flush_async (depth=3)
|
||||
- src/noteflow/infrastructure/observability/usage.py:flush (depth=3)
|
||||
- src/noteflow/infrastructure/persistence/database.py:_mask_database_url (depth=3)
|
||||
- src/noteflow/infrastructure/persistence/database.py:_find_missing_tables (depth=3)
|
||||
- src/noteflow/infrastructure/persistence/database.py:ensure_schema_ready (depth=3)
|
||||
- src/noteflow/infrastructure/summarization/mock_provider.py:summarize (depth=3)
|
||||
- src/noteflow/infrastructure/triggers/app_audio.py:_select_device (depth=3)
|
||||
- src/noteflow/infrastructure/triggers/app_audio.py:_detect_meeting_app (depth=3)
|
||||
- src/noteflow/infrastructure/triggers/calendar.py:parse_calendar_event_config (depth=3)
|
||||
- src/noteflow/infrastructure/triggers/calendar.py:_load_events_from_json (depth=3)
|
||||
- src/noteflow/infrastructure/triggers/foreground_app.py:get_signal (depth=3)
|
||||
- src/noteflow/infrastructure/webhooks/executor.py:deliver (depth=3)
|
||||
|
||||
long_method: 70
|
||||
- src/noteflow/application/services/auth_service.py:complete_login (lines=52)
|
||||
- src/noteflow/application/services/auth_service.py:_logout_provider (lines=58)
|
||||
- src/noteflow/application/services/calendar_service.py:_fetch_provider_events (lines=62)
|
||||
- src/noteflow/application/services/export_service.py:export_transcript (lines=52)
|
||||
- src/noteflow/application/services/identity_service.py:get_or_create_default_workspace (lines=57)
|
||||
- src/noteflow/application/services/identity_service.py:_get_workspace_context (lines=56)
|
||||
- src/noteflow/application/services/identity_service.py:create_workspace (lines=53)
|
||||
- src/noteflow/application/services/identity_service.py:update_user_profile (lines=51)
|
||||
- src/noteflow/application/services/project_service/active.py:get_active_project (lines=52)
|
||||
- src/noteflow/application/services/project_service/rules.py:get_effective_rules (lines=52)
|
||||
- src/noteflow/application/services/recovery_service.py:validate_meeting_audio (lines=61)
|
||||
- src/noteflow/application/services/retention_service.py:run_cleanup (lines=67)
|
||||
- src/noteflow/application/services/summarization_service.py:summarize (lines=52)
|
||||
- src/noteflow/cli/__main__.py:main (lines=51)
|
||||
- src/noteflow/grpc/_cli.py:parse_args (lines=65)
|
||||
- src/noteflow/grpc/_cli.py:build_config_from_args (lines=54)
|
||||
- src/noteflow/grpc/_mixins/annotation.py:UpdateAnnotation (lines=54)
|
||||
- src/noteflow/grpc/_mixins/calendar.py:ListCalendarEvents (lines=58)
|
||||
- src/noteflow/grpc/_mixins/diarization/_jobs.py:start_diarization_job (lines=59)
|
||||
- src/noteflow/grpc/_mixins/diarization/_jobs.py:run_diarization_job (lines=59)
|
||||
- src/noteflow/grpc/_mixins/diarization_job.py:CancelDiarizationJob (lines=54)
|
||||
- src/noteflow/grpc/_mixins/export.py:ExportTranscript (lines=55)
|
||||
- src/noteflow/grpc/_mixins/meeting.py:StopMeeting (lines=59)
|
||||
- src/noteflow/grpc/_mixins/oidc.py:RegisterOidcProvider (lines=58)
|
||||
- src/noteflow/grpc/_mixins/oidc.py:RefreshOidcDiscovery (lines=52)
|
||||
- src/noteflow/grpc/_mixins/streaming/_asr.py:process_audio_segment (lines=67)
|
||||
- src/noteflow/grpc/_mixins/streaming/_mixin.py:StreamTranscription (lines=52)
|
||||
- src/noteflow/grpc/_mixins/streaming/_partials.py:maybe_emit_partial (lines=62)
|
||||
- src/noteflow/grpc/_mixins/streaming/_processing.py:track_chunk_sequence (lines=58)
|
||||
- src/noteflow/grpc/_mixins/summarization.py:GenerateSummary (lines=54)
|
||||
- src/noteflow/grpc/_mixins/webhooks.py:UpdateWebhook (lines=53)
|
||||
- src/noteflow/grpc/interceptors/logging.py:_create_logging_handler (lines=65)
|
||||
- src/noteflow/grpc/server.py:_wire_consent_persistence (lines=51)
|
||||
- src/noteflow/grpc/service.py:__init__ (lines=56)
|
||||
- src/noteflow/grpc/service.py:shutdown (lines=68)
|
||||
- src/noteflow/infrastructure/asr/engine.py:transcribe (lines=56)
|
||||
- src/noteflow/infrastructure/audio/capture.py:start (lines=55)
|
||||
- src/noteflow/infrastructure/audio/playback.py:_start_stream (lines=68)
|
||||
- src/noteflow/infrastructure/audio/playback.py:_stream_callback (lines=51)
|
||||
- src/noteflow/infrastructure/audio/reader.py:load_meeting_audio (lines=53)
|
||||
- src/noteflow/infrastructure/auth/oidc_discovery.py:discover (lines=53)
|
||||
- src/noteflow/infrastructure/auth/oidc_discovery.py:_parse_discovery (lines=51)
|
||||
- src/noteflow/infrastructure/auth/oidc_registry.py:create_provider (lines=52)
|
||||
- src/noteflow/infrastructure/calendar/google_adapter.py:list_events (lines=63)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:initiate_auth (lines=56)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:complete_auth (lines=59)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:refresh_tokens (lines=58)
|
||||
- src/noteflow/infrastructure/calendar/outlook_adapter.py:list_events (lines=58)
|
||||
- src/noteflow/infrastructure/diarization/engine.py:load_streaming_model (lines=55)
|
||||
- src/noteflow/infrastructure/diarization/engine.py:diarize_full (lines=58)
|
||||
- src/noteflow/infrastructure/diarization/session.py:process_chunk (lines=61)
|
||||
- src/noteflow/infrastructure/export/markdown.py:export (lines=63)
|
||||
- src/noteflow/infrastructure/logging/transitions.py:log_state_transition (lines=59)
|
||||
- src/noteflow/infrastructure/metrics/collector.py:collect_now (lines=51)
|
||||
- src/noteflow/infrastructure/observability/otel.py:configure_observability (lines=61)
|
||||
- src/noteflow/infrastructure/persistence/database.py:ensure_schema_ready (lines=54)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/project_repo.py:_dict_to_settings (lines=54)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/workspace_repo.py:_settings_from_dict (lines=52)
|
||||
- src/noteflow/infrastructure/persistence/repositories/summary_repo.py:save (lines=68)
|
||||
- src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:aggregate (lines=57)
|
||||
- src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:aggregate_by_provider (lines=61)
|
||||
- src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:aggregate_by_event_type (lines=58)
|
||||
- src/noteflow/infrastructure/summarization/citation_verifier.py:filter_invalid_citations (lines=60)
|
||||
- src/noteflow/infrastructure/summarization/cloud_provider.py:_call_openai (lines=51)
|
||||
- src/noteflow/infrastructure/summarization/cloud_provider.py:_call_anthropic (lines=56)
|
||||
- src/noteflow/infrastructure/summarization/factory.py:create_summarization_service (lines=52)
|
||||
- src/noteflow/infrastructure/summarization/mock_provider.py:summarize (lines=68)
|
||||
- src/noteflow/infrastructure/summarization/ollama_provider.py:summarize (lines=57)
|
||||
- src/noteflow/infrastructure/triggers/app_audio.py:_select_device (lines=61)
|
||||
- src/noteflow/infrastructure/webhooks/executor.py:deliver (lines=66)
|
||||
|
||||
feature_envy: 32
|
||||
- src/noteflow/application/observability/ports.py:UsageEvent.from_metrics (metrics=5_vs_self=0)
|
||||
- src/noteflow/application/services/export_service.py:ExportService.infer_format_from_extension (ExportFormat=5_vs_self=0)
|
||||
- src/noteflow/application/services/identity_service.py:IdentityService._get_workspace_context (logger=5_vs_self=0)
|
||||
- src/noteflow/application/services/meeting_service.py:MeetingService.add_segment (data=12_vs_self=3)
|
||||
- src/noteflow/application/services/recovery_service.py:RecoveryService._recover_meeting (validation=5_vs_self=1)
|
||||
- src/noteflow/application/services/webhook_service.py:WebhookService._log_delivery (delivery=5_vs_self=0)
|
||||
- src/noteflow/domain/auth/oidc.py:ClaimMapping.from_dict (data=10_vs_self=0)
|
||||
- src/noteflow/domain/auth/oidc.py:OidcDiscoveryConfig.from_dict (data=13_vs_self=0)
|
||||
- src/noteflow/domain/auth/oidc.py:OidcProviderConfig.create (p=5_vs_self=0)
|
||||
- src/noteflow/domain/auth/oidc.py:OidcProviderConfig.from_dict (data=9_vs_self=0)
|
||||
- src/noteflow/domain/entities/meeting.py:Meeting.from_uuid_str (p=10_vs_self=0)
|
||||
- src/noteflow/domain/rules/builtin.py:TriggerRuleType.validate_config (errors=5_vs_self=0)
|
||||
- src/noteflow/domain/webhooks/events.py:WebhookDelivery.create (r=5_vs_self=0)
|
||||
- src/noteflow/grpc/_client_mixins/annotation.py:AnnotationClientMixin.update_annotation (kwargs=5_vs_self=2)
|
||||
- src/noteflow/grpc/_config.py:GrpcServerConfig.from_args (args=13_vs_self=0)
|
||||
- src/noteflow/grpc/_mixins/annotation.py:AnnotationMixin.UpdateAnnotation (annotation=5_vs_self=1)
|
||||
- src/noteflow/grpc/_mixins/diarization/_streaming.py:StreamingDiarizationMixin.ensure_diarization_session (state=5_vs_self=2)
|
||||
- src/noteflow/grpc/_mixins/oidc.py:OidcMixin.UpdateOidcProvider (object=5_vs_self=1)
|
||||
- src/noteflow/grpc/_mixins/oidc.py:OidcMixin.ListOidcPresets (config=6_vs_self=0)
|
||||
- src/noteflow/grpc/_mixins/streaming/_session.py:StreamSessionManager._load_persisted_diarization_turns (t=5_vs_self=0)
|
||||
- src/noteflow/grpc/_mixins/webhooks.py:WebhooksMixin.UpdateWebhook (config=7_vs_self=1)
|
||||
- src/noteflow/grpc/server.py:NoteFlowServer._recover_orphaned_jobs (logger=5_vs_self=2)
|
||||
- src/noteflow/infrastructure/auth/oidc_discovery.py:OidcDiscoveryClient.validate_provider (discovery=5_vs_self=1)
|
||||
- src/noteflow/infrastructure/auth/oidc_registry.py:OidcProviderRegistry.create_provider (p=6_vs_self=3)
|
||||
- src/noteflow/infrastructure/auth/oidc_registry.py:OidcAuthService.get_preset_options (config=6_vs_self=0)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:OAuthManager.complete_auth (oauth_state=5_vs_self=2)
|
||||
- src/noteflow/infrastructure/observability/usage.py:LoggingUsageEventSink.record_simple (m=5_vs_self=1)
|
||||
- src/noteflow/infrastructure/observability/usage.py:OtelUsageEventSink.record_simple (m=5_vs_self=1)
|
||||
- src/noteflow/infrastructure/observability/usage.py:BufferedDatabaseUsageEventSink.record_simple (m=5_vs_self=1)
|
||||
- src/noteflow/infrastructure/summarization/citation_verifier.py:SegmentCitationVerifier.filter_invalid_citations (kp=5_vs_self=0)
|
||||
- src/noteflow/infrastructure/triggers/app_audio.py:AppAudioProvider.__init__ (settings=8_vs_self=5)
|
||||
- src/noteflow/infrastructure/webhooks/executor.py:WebhookExecutor._build_headers (ctx=5_vs_self=0)
|
||||
|
||||
module_size_soft: 33
|
||||
- src/noteflow/application/services/auth_service.py:module (lines=416)
|
||||
- src/noteflow/application/services/calendar_service.py:module (lines=466)
|
||||
- src/noteflow/application/services/identity_service.py:module (lines=421)
|
||||
- src/noteflow/application/services/meeting_service.py:module (lines=493)
|
||||
- src/noteflow/application/services/summarization_service.py:module (lines=369)
|
||||
- src/noteflow/domain/auth/oidc.py:module (lines=371)
|
||||
- src/noteflow/domain/entities/meeting.py:module (lines=396)
|
||||
- src/noteflow/domain/ports/repositories/external.py:module (lines=366)
|
||||
- src/noteflow/grpc/_mixins/converters/_domain.py:module (lines=351)
|
||||
- src/noteflow/grpc/_mixins/meeting.py:module (lines=418)
|
||||
- src/noteflow/grpc/_mixins/oidc.py:module (lines=365)
|
||||
- src/noteflow/grpc/_mixins/project/_mixin.py:module (lines=368)
|
||||
- src/noteflow/grpc/_mixins/protocols.py:module (lines=457)
|
||||
- src/noteflow/grpc/_startup.py:module (lines=444)
|
||||
- src/noteflow/grpc/meeting_store.py:module (lines=373)
|
||||
- src/noteflow/grpc/server.py:module (lines=411)
|
||||
- src/noteflow/grpc/service.py:module (lines=498)
|
||||
- src/noteflow/infrastructure/audio/playback.py:module (lines=370)
|
||||
- src/noteflow/infrastructure/auth/oidc_registry.py:module (lines=426)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:module (lines=466)
|
||||
- src/noteflow/infrastructure/calendar/outlook_adapter.py:module (lines=400)
|
||||
- src/noteflow/infrastructure/diarization/engine.py:module (lines=469)
|
||||
- src/noteflow/infrastructure/observability/usage.py:module (lines=393)
|
||||
- src/noteflow/infrastructure/persistence/database.py:module (lines=492)
|
||||
- src/noteflow/infrastructure/persistence/repositories/_base.py:module (lines=357)
|
||||
- src/noteflow/infrastructure/persistence/repositories/diarization_job_repo.py:module (lines=352)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/project_repo.py:module (lines=431)
|
||||
- src/noteflow/infrastructure/persistence/repositories/identity/workspace_repo.py:module (lines=485)
|
||||
- src/noteflow/infrastructure/persistence/repositories/integration_repo.py:module (lines=355)
|
||||
- src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:module (lines=470)
|
||||
- src/noteflow/infrastructure/security/crypto.py:module (lines=365)
|
||||
- src/noteflow/infrastructure/summarization/cloud_provider.py:module (lines=376)
|
||||
- src/noteflow/infrastructure/webhooks/executor.py:module (lines=409)
|
||||
|
||||
thin_wrapper: 13
|
||||
- src/noteflow/domain/identity/context.py:is_admin (can_admin)
|
||||
- src/noteflow/grpc/service.py:get_stream_state (get)
|
||||
- src/noteflow/infrastructure/auth/oidc_registry.py:get_preset_config (get)
|
||||
- src/noteflow/infrastructure/calendar/oauth_manager.py:get_pending_state (get)
|
||||
- src/noteflow/infrastructure/logging/structured.py:get_user_id (get)
|
||||
- src/noteflow/infrastructure/logging/structured.py:get_workspace_id (get)
|
||||
- src/noteflow/infrastructure/observability/otel.py:start_as_current_span (_NoOpSpanContext)
|
||||
- src/noteflow/infrastructure/persistence/database.py:get_async_session_factory (async_sessionmaker)
|
||||
- src/noteflow/infrastructure/persistence/memory/repositories/core.py:create (insert)
|
||||
- src/noteflow/infrastructure/persistence/memory/repositories/core.py:get_by_meeting (fetch_segments)
|
||||
- src/noteflow/infrastructure/persistence/memory/repositories/core.py:delete_by_meeting (clear_summary)
|
||||
- src/noteflow/infrastructure/triggers/foreground_app.py:suppressed_apps (frozenset)
|
||||
- src/noteflow/infrastructure/webhooks/metrics.py:empty (cls)
|
||||
1189
scratch/quality_violations_duplicates.txt
Normal file
1189
scratch/quality_violations_duplicates.txt
Normal file
File diff suppressed because it is too large
Load Diff
48
scratch/quality_violations_helpers.txt
Normal file
48
scratch/quality_violations_helpers.txt
Normal file
@@ -0,0 +1,48 @@
|
||||
scattered_helpers: 10
|
||||
Helper 'create_repository_provider' appears in multiple modules:
|
||||
src/noteflow/grpc/service.py:182, src/noteflow/grpc/_mixins/meeting.py:152, src/noteflow/grpc/_mixins/export.py:39, src/noteflow/grpc/_mixins/diarization_job.py:54, src/noteflow/grpc/_mixins/entities.py:37, src/noteflow/grpc/_mixins/annotation.py:62, src/noteflow/grpc/_mixins/webhooks.py:67, src/noteflow/grpc/_mixins/preferences.py:54, src/noteflow/grpc/_mixins/identity.py:38, src/noteflow/grpc/_mixins/summarization.py:34, src/noteflow/grpc/_mixins/project/_types.py:43
|
||||
Helper 'get_operation_context' appears in multiple modules:
|
||||
src/noteflow/grpc/service.py:196, src/noteflow/grpc/_mixins/identity.py:40
|
||||
Helper 'is_enabled' appears in multiple modules:
|
||||
src/noteflow/application/services/trigger_service.py:76, src/noteflow/application/services/retention_service.py:59, src/noteflow/infrastructure/triggers/foreground_app.py:61, src/noteflow/infrastructure/triggers/app_audio.py:273, src/noteflow/infrastructure/triggers/audio_activity.py:170, src/noteflow/infrastructure/triggers/calendar.py:58
|
||||
Helper 'create_meeting' appears in multiple modules:
|
||||
src/noteflow/application/services/meeting_service.py:61, src/noteflow/grpc/_client_mixins/meeting.py:24
|
||||
Helper 'get_meeting' appears in multiple modules:
|
||||
src/noteflow/application/services/meeting_service.py:83, src/noteflow/grpc/_client_mixins/meeting.py:66
|
||||
Helper 'get_annotation' appears in multiple modules:
|
||||
src/noteflow/application/services/meeting_service.py:405, src/noteflow/grpc/_client_mixins/annotation.py:84
|
||||
Helper 'is_available' appears in multiple modules:
|
||||
src/noteflow/infrastructure/triggers/foreground_app.py:65, src/noteflow/infrastructure/summarization/ollama_provider.py:125, src/noteflow/infrastructure/summarization/mock_provider.py:34, src/noteflow/infrastructure/summarization/cloud_provider.py:173
|
||||
Helper 'get_recent' appears in multiple modules:
|
||||
src/noteflow/infrastructure/webhooks/metrics.py:74, src/noteflow/infrastructure/logging/log_buffer.py:99
|
||||
Helper 'convert_audio_format' appears in multiple modules:
|
||||
src/noteflow/grpc/_mixins/_audio_helpers.py:65, src/noteflow/grpc/_mixins/streaming/_mixin.py:168, src/noteflow/grpc/_mixins/streaming/_processing.py:229
|
||||
Helper 'parse_meeting_id' appears in multiple modules:
|
||||
src/noteflow/grpc/_mixins/errors/_parse.py:67, src/noteflow/grpc/_mixins/converters/_id_parsing.py:24
|
||||
|
||||
duplicate_helper_signatures: 25
|
||||
'create_repository_provider(self)' defined at: src/noteflow/grpc/service.py:182, src/noteflow/grpc/_mixins/meeting.py:152, src/noteflow/grpc/_mixins/export.py:39, src/noteflow/grpc/_mixins/diarization_job.py:54, src/noteflow/grpc/_mixins/entities.py:37, src/noteflow/grpc/_mixins/annotation.py:62, src/noteflow/grpc/_mixins/webhooks.py:67, src/noteflow/grpc/_mixins/preferences.py:54, src/noteflow/grpc/_mixins/identity.py:38, src/noteflow/grpc/_mixins/summarization.py:34, src/noteflow/grpc/_mixins/project/_types.py:43
|
||||
'get_operation_context(self, context)' defined at: src/noteflow/grpc/service.py:196, src/noteflow/grpc/_mixins/identity.py:40
|
||||
'is_enabled(self)' defined at: src/noteflow/application/services/trigger_service.py:76, src/noteflow/application/services/retention_service.py:59, src/noteflow/infrastructure/triggers/foreground_app.py:61, src/noteflow/infrastructure/triggers/app_audio.py:273, src/noteflow/infrastructure/triggers/audio_activity.py:170, src/noteflow/infrastructure/triggers/calendar.py:58
|
||||
'get_meeting(self, meeting_id)' defined at: src/noteflow/application/services/meeting_service.py:83, src/noteflow/grpc/_client_mixins/meeting.py:66
|
||||
'get_annotation(self, annotation_id)' defined at: src/noteflow/application/services/meeting_service.py:405, src/noteflow/grpc/_client_mixins/annotation.py:84
|
||||
'validate_config(self, config)' defined at: src/noteflow/domain/rules/registry.py:111, src/noteflow/domain/rules/builtin.py:59, src/noteflow/domain/rules/builtin.py:147
|
||||
'get_schema(self)' defined at: src/noteflow/domain/rules/registry.py:122, src/noteflow/domain/rules/builtin.py:85, src/noteflow/domain/rules/builtin.py:185
|
||||
'to_dict(self)' defined at: src/noteflow/domain/auth/oidc.py:70, src/noteflow/domain/auth/oidc.py:124, src/noteflow/domain/auth/oidc.py:313
|
||||
'from_dict(cls, data)' defined at: src/noteflow/domain/auth/oidc.py:86, src/noteflow/domain/auth/oidc.py:143, src/noteflow/domain/auth/oidc.py:334
|
||||
'can_write(self)' defined at: src/noteflow/domain/identity/roles.py:23, src/noteflow/domain/identity/roles.py:60, src/noteflow/domain/identity/context.py:83
|
||||
'can_admin(self)' defined at: src/noteflow/domain/identity/roles.py:27, src/noteflow/domain/identity/roles.py:68
|
||||
'_get_sounddevice_module(self)' defined at: src/noteflow/infrastructure/audio/capture.py:233, src/noteflow/infrastructure/audio/playback.py:249
|
||||
'to_orm_kwargs(entity)' defined at: src/noteflow/infrastructure/converters/ner_converters.py:47, src/noteflow/infrastructure/converters/integration_converters.py:49, src/noteflow/infrastructure/converters/integration_converters.py:103
|
||||
'get_user_email(self, access_token)' defined at: src/noteflow/infrastructure/calendar/google_adapter.py:142, src/noteflow/infrastructure/calendar/outlook_adapter.py:250
|
||||
'get_user_info(self, access_token)' defined at: src/noteflow/infrastructure/calendar/google_adapter.py:157, src/noteflow/infrastructure/calendar/outlook_adapter.py:265
|
||||
'_parse_event(self, item)' defined at: src/noteflow/infrastructure/calendar/google_adapter.py:201, src/noteflow/infrastructure/calendar/outlook_adapter.py:314
|
||||
'_parse_datetime(self, dt_data)' defined at: src/noteflow/infrastructure/calendar/google_adapter.py:246, src/noteflow/infrastructure/calendar/outlook_adapter.py:362
|
||||
'validate_provider(self, provider)' defined at: src/noteflow/infrastructure/auth/oidc_registry.py:252, src/noteflow/infrastructure/auth/oidc_discovery.py:183
|
||||
'is_open(self)' defined at: src/noteflow/infrastructure/security/crypto.py:263, src/noteflow/infrastructure/security/crypto.py:363
|
||||
'get_or_create_master_key(self)' defined at: src/noteflow/infrastructure/security/keystore.py:88, src/noteflow/infrastructure/security/keystore.py:175, src/noteflow/infrastructure/security/keystore.py:210
|
||||
'has_master_key(self)' defined at: src/noteflow/infrastructure/security/keystore.py:142, src/noteflow/infrastructure/security/keystore.py:187, src/noteflow/infrastructure/security/keystore.py:248
|
||||
'format_name(self)' defined at: src/noteflow/infrastructure/export/html.py:158, src/noteflow/infrastructure/export/markdown.py:32, src/noteflow/infrastructure/export/pdf.py:156
|
||||
'get_signal(self)' defined at: src/noteflow/infrastructure/triggers/foreground_app.py:89, src/noteflow/infrastructure/triggers/app_audio.py:276, src/noteflow/infrastructure/triggers/audio_activity.py:133, src/noteflow/infrastructure/triggers/calendar.py:61
|
||||
'is_available(self)' defined at: src/noteflow/infrastructure/summarization/ollama_provider.py:125, src/noteflow/infrastructure/summarization/mock_provider.py:34, src/noteflow/infrastructure/summarization/cloud_provider.py:173
|
||||
'get_oidc_service(self)' defined at: src/noteflow/grpc/_mixins/oidc.py:33, src/noteflow/grpc/_mixins/oidc.py:122
|
||||
840
scratch/quality_violations_magic.txt
Normal file
840
scratch/quality_violations_magic.txt
Normal file
@@ -0,0 +1,840 @@
|
||||
repeated_magic_numbers: 12
|
||||
Magic number 40 used 3 times:
|
||||
src/noteflow/cli/models.py:235
|
||||
src/noteflow/cli/models.py:248
|
||||
src/noteflow/config/settings/_triggers.py:114
|
||||
|
||||
Magic number 300 used 6 times:
|
||||
src/noteflow/application/services/auth_helpers.py:183
|
||||
src/noteflow/domain/webhooks/events.py:251
|
||||
src/noteflow/domain/webhooks/constants.py:49
|
||||
src/noteflow/config/settings/_main.py:201
|
||||
src/noteflow/infrastructure/webhooks/metrics.py:22
|
||||
src/noteflow/infrastructure/persistence/repositories/webhook_repo.py:174
|
||||
|
||||
Magic number 24 used 5 times:
|
||||
src/noteflow/domain/ports/calendar.py:138
|
||||
src/noteflow/config/settings/_calendar.py:64
|
||||
src/noteflow/config/settings/_main.py:109
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:81
|
||||
src/noteflow/infrastructure/calendar/outlook_adapter.py:125
|
||||
|
||||
Magic number 10000 used 4 times:
|
||||
src/noteflow/domain/webhooks/constants.py:12
|
||||
src/noteflow/config/settings/_main.py:157
|
||||
src/noteflow/config/settings/_main.py:183
|
||||
src/noteflow/grpc/_mixins/webhooks.py:118
|
||||
|
||||
Magic number 500 used 6 times:
|
||||
src/noteflow/domain/webhooks/constants.py:21
|
||||
src/noteflow/domain/webhooks/constants.py:56
|
||||
src/noteflow/config/constants/http.py:65
|
||||
src/noteflow/config/settings/_main.py:183
|
||||
src/noteflow/infrastructure/persistence/constants.py:31
|
||||
src/noteflow/infrastructure/calendar/outlook_adapter.py:32
|
||||
|
||||
Magic number 168 used 4 times:
|
||||
src/noteflow/config/settings/_calendar.py:64
|
||||
src/noteflow/config/settings/_main.py:109
|
||||
src/noteflow/config/settings/_main.py:143
|
||||
src/noteflow/grpc/_mixins/sync.py:205
|
||||
|
||||
Magic number 0.3 used 6 times:
|
||||
src/noteflow/config/settings/_triggers.py:188
|
||||
src/noteflow/config/settings/_triggers.py:201
|
||||
src/noteflow/config/settings/_main.py:189
|
||||
src/noteflow/infrastructure/asr/segmenter.py:38
|
||||
src/noteflow/infrastructure/summarization/ollama_provider.py:70
|
||||
src/noteflow/infrastructure/summarization/cloud_provider.py:71
|
||||
|
||||
Magic number 120.0 used 3 times:
|
||||
src/noteflow/config/settings/_main.py:211
|
||||
src/noteflow/infrastructure/summarization/ollama_provider.py:70
|
||||
src/noteflow/grpc/_mixins/diarization_job.py:242
|
||||
|
||||
Magic number 8 used 12 times:
|
||||
src/noteflow/infrastructure/audio/writer.py:175
|
||||
src/noteflow/infrastructure/calendar/oauth_manager.py:214
|
||||
src/noteflow/infrastructure/calendar/oauth_manager.py:214
|
||||
src/noteflow/infrastructure/triggers/calendar.py:159
|
||||
src/noteflow/infrastructure/triggers/calendar.py:159
|
||||
src/noteflow/grpc/_mixins/meeting.py:131
|
||||
src/noteflow/grpc/_mixins/meeting.py:192
|
||||
src/noteflow/grpc/_mixins/meeting.py:103
|
||||
src/noteflow/grpc/_mixins/meeting.py:131
|
||||
src/noteflow/grpc/_mixins/meeting.py:192
|
||||
src/noteflow/grpc/_mixins/meeting.py:103
|
||||
src/noteflow/grpc/_mixins/converters/_id_parsing.py:17
|
||||
|
||||
Magic number 64 used 3 times:
|
||||
src/noteflow/infrastructure/calendar/oauth_helpers.py:50
|
||||
src/noteflow/infrastructure/persistence/migrations/versions/f0a1b2c3d4e5_add_user_preferences_table.py:29
|
||||
src/noteflow/infrastructure/persistence/models/identity/settings.py:94
|
||||
|
||||
Magic number 32 used 5 times:
|
||||
src/noteflow/infrastructure/calendar/oauth_helpers.py:122
|
||||
src/noteflow/infrastructure/security/crypto.py:26
|
||||
src/noteflow/infrastructure/security/keystore.py:23
|
||||
src/noteflow/infrastructure/persistence/migrations/versions/n8o9p0q1r2s3_add_usage_events_table.py:54
|
||||
src/noteflow/infrastructure/persistence/models/observability/usage_event.py:99
|
||||
|
||||
Magic number 16 used 3 times:
|
||||
src/noteflow/infrastructure/security/crypto.py:28
|
||||
src/noteflow/infrastructure/persistence/migrations/versions/n8o9p0q1r2s3_add_usage_events_table.py:55
|
||||
src/noteflow/infrastructure/persistence/models/observability/usage_event.py:103
|
||||
|
||||
repeated_string_literals: 101
|
||||
String 'provider_name' repeated 5 times:
|
||||
src/noteflow/domain/errors.py:232
|
||||
src/noteflow/application/services/meeting_service.py:332
|
||||
src/noteflow/infrastructure/observability/usage.py:120
|
||||
src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:200
|
||||
src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:238
|
||||
|
||||
String 'action_item' repeated 4 times:
|
||||
src/noteflow/domain/value_objects.py:30
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:142
|
||||
src/noteflow/grpc/_client_mixins/converters.py:63
|
||||
src/noteflow/grpc/_client_mixins/converters.py:72
|
||||
|
||||
String 'decision' repeated 3 times:
|
||||
src/noteflow/domain/value_objects.py:31
|
||||
src/noteflow/grpc/_client_mixins/converters.py:64
|
||||
src/noteflow/grpc/_client_mixins/converters.py:73
|
||||
|
||||
String 'note' repeated 5 times:
|
||||
src/noteflow/domain/value_objects.py:32
|
||||
src/noteflow/grpc/_client_mixins/converters.py:62
|
||||
src/noteflow/grpc/_client_mixins/converters.py:65
|
||||
src/noteflow/grpc/_client_mixins/converters.py:71
|
||||
src/noteflow/grpc/_client_mixins/converters.py:126
|
||||
|
||||
String 'risk' repeated 3 times:
|
||||
src/noteflow/domain/value_objects.py:33
|
||||
src/noteflow/grpc/_client_mixins/converters.py:66
|
||||
src/noteflow/grpc/_client_mixins/converters.py:74
|
||||
|
||||
String '__main__' repeated 4 times:
|
||||
src/noteflow/cli/retention.py:150
|
||||
src/noteflow/cli/models.py:296
|
||||
src/noteflow/cli/__main__.py:71
|
||||
src/noteflow/grpc/server.py:410
|
||||
|
||||
String 'store_true' repeated 3 times:
|
||||
src/noteflow/cli/retention.py:126
|
||||
src/noteflow/grpc/_cli.py:69
|
||||
src/noteflow/grpc/_cli.py:74
|
||||
|
||||
String 'sort_desc' repeated 3 times:
|
||||
src/noteflow/grpc/meeting_store.py:42
|
||||
src/noteflow/infrastructure/persistence/repositories/meeting_repo.py:138
|
||||
src/noteflow/infrastructure/persistence/memory/repositories/core.py:52
|
||||
|
||||
String 'project_id' repeated 6 times:
|
||||
src/noteflow/grpc/meeting_store.py:43
|
||||
src/noteflow/config/constants/errors.py:80
|
||||
src/noteflow/infrastructure/persistence/repositories/meeting_repo.py:139
|
||||
src/noteflow/infrastructure/persistence/memory/repositories/core.py:53
|
||||
src/noteflow/grpc/_mixins/meeting.py:124
|
||||
src/noteflow/grpc/_mixins/meeting.py:222
|
||||
|
||||
String 'project_ids' repeated 3 times:
|
||||
src/noteflow/grpc/meeting_store.py:44
|
||||
src/noteflow/infrastructure/persistence/repositories/meeting_repo.py:140
|
||||
src/noteflow/infrastructure/persistence/memory/repositories/core.py:54
|
||||
|
||||
String 'provider' repeated 8 times:
|
||||
src/noteflow/grpc/_startup.py:140
|
||||
src/noteflow/application/services/calendar_service.py:193
|
||||
src/noteflow/application/services/calendar_service.py:189
|
||||
src/noteflow/application/services/auth_helpers.py:113
|
||||
src/noteflow/application/services/auth_helpers.py:109
|
||||
src/noteflow/infrastructure/persistence/repositories/integration_repo.py:73
|
||||
src/noteflow/infrastructure/persistence/memory/repositories/integration.py:36
|
||||
src/noteflow/grpc/_mixins/sync.py:110
|
||||
|
||||
String 'calendar' repeated 6 times:
|
||||
src/noteflow/grpc/_startup.py:174
|
||||
src/noteflow/domain/entities/integration.py:19
|
||||
src/noteflow/domain/triggers/entities.py:16
|
||||
src/noteflow/grpc/_mixins/sync.py:147
|
||||
src/noteflow/grpc/_mixins/calendar.py:258
|
||||
src/noteflow/grpc/_mixins/calendar.py:272
|
||||
|
||||
String 'enabled' repeated 9 times:
|
||||
src/noteflow/application/services/trigger_service.py:199
|
||||
src/noteflow/application/services/trigger_service.py:208
|
||||
src/noteflow/domain/auth/oidc.py:305
|
||||
src/noteflow/domain/auth/oidc.py:310
|
||||
src/noteflow/domain/auth/oidc.py:322
|
||||
src/noteflow/domain/auth/oidc.py:351
|
||||
src/noteflow/infrastructure/converters/webhook_converters.py:69
|
||||
src/noteflow/grpc/_mixins/oidc.py:266
|
||||
src/noteflow/grpc/_mixins/webhooks.py:181
|
||||
|
||||
String 'email' repeated 25 times:
|
||||
src/noteflow/application/services/identity_service.py:407
|
||||
src/noteflow/domain/entities/integration.py:18
|
||||
src/noteflow/domain/auth/oidc.py:58
|
||||
src/noteflow/domain/auth/oidc.py:90
|
||||
src/noteflow/domain/auth/oidc.py:240
|
||||
src/noteflow/domain/auth/oidc.py:280
|
||||
src/noteflow/domain/auth/oidc.py:364
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:187
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:217
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:219
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:55
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:72
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:89
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:106
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:123
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:140
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:157
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:58
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:75
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:92
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:109
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:126
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:143
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:34
|
||||
src/noteflow/grpc/_mixins/converters/_oidc.py:32
|
||||
|
||||
String 'audio.enc' repeated 4 times:
|
||||
src/noteflow/application/services/recovery_service.py:115
|
||||
src/noteflow/infrastructure/audio/writer.py:159
|
||||
src/noteflow/infrastructure/audio/reader.py:96
|
||||
src/noteflow/infrastructure/audio/reader.py:189
|
||||
|
||||
String 'unknown' repeated 8 times:
|
||||
src/noteflow/application/services/recovery_service.py:222
|
||||
src/noteflow/application/services/meeting_service.py:347
|
||||
src/noteflow/application/services/meeting_service.py:347
|
||||
src/noteflow/application/services/meeting_service.py:357
|
||||
src/noteflow/grpc/_client_mixins/converters.py:52
|
||||
src/noteflow/grpc/_client_mixins/converters.py:85
|
||||
src/noteflow/grpc/_client_mixins/converters.py:172
|
||||
src/noteflow/grpc/_client_mixins/converters.py:105
|
||||
|
||||
String 'model_name' repeated 3 times:
|
||||
src/noteflow/application/services/meeting_service.py:333
|
||||
src/noteflow/infrastructure/observability/usage.py:121
|
||||
src/noteflow/infrastructure/persistence/repositories/usage_event_repo.py:302
|
||||
|
||||
String 'annotation_type' repeated 3 times:
|
||||
src/noteflow/application/services/meeting_service.py:377
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:64
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:157
|
||||
|
||||
String 'start_time' repeated 5 times:
|
||||
src/noteflow/application/services/meeting_service.py:379
|
||||
src/noteflow/infrastructure/converters/orm_converters.py:81
|
||||
src/noteflow/infrastructure/converters/calendar_converters.py:75
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:66
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:159
|
||||
|
||||
String 'end_time' repeated 5 times:
|
||||
src/noteflow/application/services/meeting_service.py:380
|
||||
src/noteflow/infrastructure/converters/orm_converters.py:82
|
||||
src/noteflow/infrastructure/converters/calendar_converters.py:76
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:67
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:160
|
||||
|
||||
String 'key_points' repeated 4 times:
|
||||
src/noteflow/application/services/meeting_service.py:330
|
||||
src/noteflow/infrastructure/summarization/_parsing.py:256
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:75
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:100
|
||||
|
||||
String 'action_items' repeated 4 times:
|
||||
src/noteflow/application/services/meeting_service.py:331
|
||||
src/noteflow/infrastructure/summarization/_parsing.py:260
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:107
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:138
|
||||
|
||||
String 'segment_ids' repeated 7 times:
|
||||
src/noteflow/application/services/meeting_service.py:381
|
||||
src/noteflow/domain/entities/named_entity.py:117
|
||||
src/noteflow/infrastructure/converters/ner_converters.py:66
|
||||
src/noteflow/infrastructure/summarization/_parsing.py:194
|
||||
src/noteflow/infrastructure/summarization/_parsing.py:218
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:161
|
||||
src/noteflow/grpc/_client_mixins/annotation.py:68
|
||||
|
||||
String 'location' repeated 4 times:
|
||||
src/noteflow/domain/entities/named_entity.py:33
|
||||
src/noteflow/infrastructure/converters/calendar_converters.py:77
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:228
|
||||
src/noteflow/infrastructure/calendar/outlook_adapter.py:340
|
||||
|
||||
String 'date' repeated 3 times:
|
||||
src/noteflow/domain/entities/named_entity.py:34
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:210
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:249
|
||||
|
||||
String 'email_verified' repeated 9 times:
|
||||
src/noteflow/domain/auth/oidc.py:59
|
||||
src/noteflow/domain/auth/oidc.py:91
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:59
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:76
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:93
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:110
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:127
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:144
|
||||
src/noteflow/grpc/_mixins/converters/_oidc.py:33
|
||||
|
||||
String 'preferred_username' repeated 8 times:
|
||||
src/noteflow/domain/auth/oidc.py:61
|
||||
src/noteflow/domain/auth/oidc.py:93
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:61
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:78
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:95
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:129
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:146
|
||||
src/noteflow/grpc/_mixins/converters/_oidc.py:35
|
||||
|
||||
String 'groups' repeated 11 times:
|
||||
src/noteflow/domain/auth/oidc.py:62
|
||||
src/noteflow/domain/auth/oidc.py:94
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:55
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:72
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:123
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:62
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:79
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:96
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:130
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:147
|
||||
src/noteflow/grpc/_mixins/converters/_oidc.py:36
|
||||
|
||||
String 'picture' repeated 9 times:
|
||||
src/noteflow/domain/auth/oidc.py:63
|
||||
src/noteflow/domain/auth/oidc.py:95
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:63
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:80
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:97
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:114
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:131
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:148
|
||||
src/noteflow/grpc/_mixins/converters/_oidc.py:37
|
||||
|
||||
String 'jwks_uri' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:131
|
||||
src/noteflow/domain/auth/oidc.py:156
|
||||
src/noteflow/domain/auth/oidc.py:156
|
||||
|
||||
String 'end_session_endpoint' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:132
|
||||
src/noteflow/domain/auth/oidc.py:157
|
||||
src/noteflow/domain/auth/oidc.py:157
|
||||
|
||||
String 'revocation_endpoint' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:133
|
||||
src/noteflow/domain/auth/oidc.py:158
|
||||
src/noteflow/domain/auth/oidc.py:158
|
||||
|
||||
String 'introspection_endpoint' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:134
|
||||
src/noteflow/domain/auth/oidc.py:159
|
||||
src/noteflow/domain/auth/oidc.py:159
|
||||
|
||||
String 'issuer_url' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:268
|
||||
src/noteflow/domain/auth/oidc.py:320
|
||||
src/noteflow/domain/auth/oidc.py:349
|
||||
|
||||
String 'discovery' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:299
|
||||
src/noteflow/domain/auth/oidc.py:323
|
||||
src/noteflow/domain/auth/oidc.py:336
|
||||
|
||||
String 'discovery_refreshed_at' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:300
|
||||
src/noteflow/domain/auth/oidc.py:330
|
||||
src/noteflow/domain/auth/oidc.py:342
|
||||
|
||||
String 'preset' repeated 3 times:
|
||||
src/noteflow/domain/auth/oidc.py:319
|
||||
src/noteflow/domain/auth/oidc.py:348
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:418
|
||||
|
||||
String 'claim_mapping' repeated 6 times:
|
||||
src/noteflow/domain/auth/oidc.py:324
|
||||
src/noteflow/domain/auth/oidc.py:337
|
||||
src/noteflow/grpc/_mixins/oidc.py:83
|
||||
src/noteflow/grpc/_mixins/oidc.py:104
|
||||
src/noteflow/grpc/_mixins/oidc.py:257
|
||||
src/noteflow/grpc/_mixins/oidc.py:258
|
||||
|
||||
String 'require_email_verified' repeated 6 times:
|
||||
src/noteflow/domain/auth/oidc.py:326
|
||||
src/noteflow/domain/auth/oidc.py:366
|
||||
src/noteflow/grpc/_mixins/oidc.py:110
|
||||
src/noteflow/grpc/_mixins/oidc.py:263
|
||||
src/noteflow/grpc/_mixins/oidc.py:264
|
||||
src/noteflow/grpc/_mixins/oidc.py:177
|
||||
|
||||
String 'allowed_groups' repeated 4 times:
|
||||
src/noteflow/domain/auth/oidc.py:327
|
||||
src/noteflow/domain/auth/oidc.py:339
|
||||
src/noteflow/grpc/_mixins/oidc.py:108
|
||||
src/noteflow/grpc/_mixins/oidc.py:261
|
||||
|
||||
String 'profile' repeated 10 times:
|
||||
src/noteflow/domain/auth/oidc.py:240
|
||||
src/noteflow/domain/auth/oidc.py:280
|
||||
src/noteflow/domain/auth/oidc.py:364
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:55
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:72
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:89
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:106
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:123
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:140
|
||||
src/noteflow/infrastructure/auth/oidc_registry.py:157
|
||||
|
||||
String 'UserRepository' repeated 3 times:
|
||||
src/noteflow/domain/ports/__init__.py:44
|
||||
src/noteflow/domain/ports/repositories/__init__.py:49
|
||||
src/noteflow/domain/ports/repositories/identity/__init__.py:17
|
||||
|
||||
String 'WorkspaceRepository' repeated 3 times:
|
||||
src/noteflow/domain/ports/__init__.py:46
|
||||
src/noteflow/domain/ports/repositories/__init__.py:51
|
||||
src/noteflow/domain/ports/repositories/identity/__init__.py:18
|
||||
|
||||
String 'Webhook' repeated 6 times:
|
||||
src/noteflow/domain/webhooks/events.py:82
|
||||
src/noteflow/domain/webhooks/events.py:140
|
||||
src/noteflow/domain/webhooks/events.py:106
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:48
|
||||
src/noteflow/grpc/_mixins/webhooks.py:116
|
||||
src/noteflow/grpc/_mixins/errors/_fetch.py:29
|
||||
|
||||
String 'secret' repeated 3 times:
|
||||
src/noteflow/domain/webhooks/events.py:107
|
||||
src/noteflow/infrastructure/converters/webhook_converters.py:68
|
||||
src/noteflow/grpc/_mixins/webhooks.py:192
|
||||
|
||||
String 'max_retries' repeated 3 times:
|
||||
src/noteflow/domain/webhooks/events.py:109
|
||||
src/noteflow/infrastructure/converters/webhook_converters.py:71
|
||||
src/noteflow/grpc/_mixins/webhooks.py:189
|
||||
|
||||
String 'claude-3-haiku-20240307' repeated 3 times:
|
||||
src/noteflow/config/settings/_main.py:197
|
||||
src/noteflow/infrastructure/summarization/cloud_provider.py:71
|
||||
src/noteflow/infrastructure/summarization/cloud_provider.py:69
|
||||
|
||||
String 'segmenter_state_transition' repeated 5 times:
|
||||
src/noteflow/infrastructure/asr/segmenter.py:161
|
||||
src/noteflow/infrastructure/asr/segmenter.py:214
|
||||
src/noteflow/infrastructure/asr/segmenter.py:251
|
||||
src/noteflow/infrastructure/asr/segmenter.py:226
|
||||
src/noteflow/infrastructure/asr/segmenter.py:267
|
||||
|
||||
String 'sample_rate' repeated 5 times:
|
||||
src/noteflow/infrastructure/audio/writer.py:107
|
||||
src/noteflow/infrastructure/audio/writer.py:147
|
||||
src/noteflow/infrastructure/audio/capture.py:130
|
||||
src/noteflow/infrastructure/audio/reader.py:85
|
||||
src/noteflow/infrastructure/diarization/engine.py:396
|
||||
|
||||
String 'attendees' repeated 3 times:
|
||||
src/noteflow/infrastructure/converters/calendar_converters.py:78
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:215
|
||||
src/noteflow/infrastructure/calendar/outlook_adapter.py:330
|
||||
|
||||
String 'UnitOfWork not in context' repeated 17 times:
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:112
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:119
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:126
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:133
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:140
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:147
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:154
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:161
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:168
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:175
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:182
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:189
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:196
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:203
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:210
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:304
|
||||
src/noteflow/infrastructure/persistence/unit_of_work.py:313
|
||||
|
||||
String 'postgres://' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/database.py:215
|
||||
src/noteflow/infrastructure/persistence/database.py:274
|
||||
src/noteflow/infrastructure/persistence/database.py:216
|
||||
src/noteflow/infrastructure/persistence/database.py:275
|
||||
|
||||
String 'diarization_jobs' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/database.py:350
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:28
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:69
|
||||
|
||||
String 'user_preferences' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/database.py:350
|
||||
src/noteflow/infrastructure/persistence/database.py:392
|
||||
src/noteflow/infrastructure/persistence/database.py:404
|
||||
src/noteflow/infrastructure/persistence/models/identity/settings.py:90
|
||||
|
||||
String 'code' repeated 6 times:
|
||||
src/noteflow/infrastructure/calendar/oauth_manager.py:388
|
||||
src/noteflow/infrastructure/calendar/oauth_helpers.py:104
|
||||
src/noteflow/grpc/interceptors/logging.py:191
|
||||
src/noteflow/grpc/interceptors/logging.py:226
|
||||
src/noteflow/grpc/interceptors/logging.py:260
|
||||
src/noteflow/grpc/interceptors/logging.py:295
|
||||
|
||||
String 'start' repeated 3 times:
|
||||
src/noteflow/infrastructure/calendar/google_adapter.py:207
|
||||
src/noteflow/infrastructure/calendar/outlook_adapter.py:320
|
||||
src/noteflow/infrastructure/triggers/calendar.py:118
|
||||
|
||||
String 'ascii' repeated 4 times:
|
||||
src/noteflow/infrastructure/calendar/oauth_helpers.py:57
|
||||
src/noteflow/infrastructure/calendar/oauth_helpers.py:56
|
||||
src/noteflow/infrastructure/security/keystore.py:61
|
||||
src/noteflow/grpc/_mixins/export.py:78
|
||||
|
||||
String '</div>' repeated 5 times:
|
||||
src/noteflow/infrastructure/export/html.py:118
|
||||
src/noteflow/infrastructure/export/html.py:146
|
||||
src/noteflow/infrastructure/export/html.py:99
|
||||
src/noteflow/infrastructure/export/html.py:115
|
||||
src/noteflow/infrastructure/export/pdf.py:295
|
||||
|
||||
String '</dd>' repeated 5 times:
|
||||
src/noteflow/infrastructure/export/html.py:84
|
||||
src/noteflow/infrastructure/export/html.py:88
|
||||
src/noteflow/infrastructure/export/html.py:92
|
||||
src/noteflow/infrastructure/export/html.py:96
|
||||
src/noteflow/infrastructure/export/html.py:97
|
||||
|
||||
String 'content' repeated 5 times:
|
||||
src/noteflow/infrastructure/summarization/ollama_provider.py:214
|
||||
src/noteflow/infrastructure/summarization/ollama_provider.py:215
|
||||
src/noteflow/infrastructure/summarization/cloud_provider.py:289
|
||||
src/noteflow/infrastructure/summarization/cloud_provider.py:290
|
||||
src/noteflow/infrastructure/summarization/cloud_provider.py:342
|
||||
|
||||
String 'ActionItemModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:74
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:20
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:65
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:104
|
||||
|
||||
String 'AnnotationModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:75
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:150
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:21
|
||||
|
||||
String 'CalendarEventModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:79
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:97
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:280
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:17
|
||||
|
||||
String 'DiarizationJobModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:80
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:156
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:22
|
||||
|
||||
String 'ExternalRefModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:81
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:102
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:18
|
||||
|
||||
String 'IntegrationModel' repeated 6 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:82
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:135
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:183
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:247
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:321
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:19
|
||||
|
||||
String 'IntegrationSecretModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:83
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:87
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:20
|
||||
|
||||
String 'IntegrationSyncRunModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:84
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:92
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:21
|
||||
|
||||
String 'KeyPointModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:85
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:23
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:59
|
||||
|
||||
String 'MeetingCalendarLinkModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:86
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:180
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:251
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:22
|
||||
|
||||
String 'MeetingModel' repeated 15 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:87
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:228
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:68
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:102
|
||||
src/noteflow/infrastructure/persistence/models/core/annotation.py:62
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:24
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:55
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:276
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:100
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:83
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:109
|
||||
src/noteflow/infrastructure/persistence/models/entities/named_entity.py:71
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:70
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:140
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:238
|
||||
|
||||
String 'MeetingSpeakerModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:89
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:166
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:74
|
||||
src/noteflow/infrastructure/persistence/models/entities/__init__.py:12
|
||||
|
||||
String 'MeetingTagModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:91
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:171
|
||||
src/noteflow/infrastructure/persistence/models/organization/__init__.py:10
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:58
|
||||
|
||||
String 'NamedEntityModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:92
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:185
|
||||
src/noteflow/infrastructure/persistence/models/entities/__init__.py:13
|
||||
|
||||
String 'PersonModel' repeated 5 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:93
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:108
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:113
|
||||
src/noteflow/infrastructure/persistence/models/entities/__init__.py:14
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:80
|
||||
|
||||
String 'ProjectMembershipModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:95
|
||||
src/noteflow/infrastructure/persistence/models/identity/__init__.py:16
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:135
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:233
|
||||
|
||||
String 'SegmentModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:97
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:137
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:262
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:25
|
||||
|
||||
String 'StreamingDiarizationTurnModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:99
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:161
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:26
|
||||
|
||||
String 'SummaryModel' repeated 5 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:100
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:143
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:27
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:99
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:137
|
||||
|
||||
String 'TagModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:101
|
||||
src/noteflow/infrastructure/persistence/models/organization/__init__.py:11
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:87
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:85
|
||||
|
||||
String 'TaskModel' repeated 6 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:102
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:176
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:141
|
||||
src/noteflow/infrastructure/persistence/models/organization/__init__.py:12
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:78
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:90
|
||||
|
||||
String 'UserModel' repeated 6 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:103
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:128
|
||||
src/noteflow/infrastructure/persistence/models/identity/settings.py:80
|
||||
src/noteflow/infrastructure/persistence/models/identity/__init__.py:19
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:175
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:272
|
||||
|
||||
String 'WebhookConfigModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:105
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:126
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:23
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:95
|
||||
|
||||
String 'WebhookDeliveryModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:106
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:81
|
||||
src/noteflow/infrastructure/persistence/models/integrations/__init__.py:24
|
||||
|
||||
String 'WordTimingModel' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:107
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:232
|
||||
src/noteflow/infrastructure/persistence/models/core/__init__.py:28
|
||||
|
||||
String 'WorkspaceMembershipModel' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:108
|
||||
src/noteflow/infrastructure/persistence/models/identity/__init__.py:21
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:65
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:130
|
||||
|
||||
String 'WorkspaceModel' repeated 11 times:
|
||||
src/noteflow/infrastructure/persistence/models/__init__.py:109
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:124
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:77
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:85
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:96
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:54
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:70
|
||||
src/noteflow/infrastructure/persistence/models/identity/settings.py:79
|
||||
src/noteflow/infrastructure/persistence/models/identity/__init__.py:22
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:171
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:229
|
||||
|
||||
String 'default_summarization_template' repeated 5 times:
|
||||
src/noteflow/infrastructure/persistence/repositories/identity/project_repo.py:140
|
||||
src/noteflow/infrastructure/persistence/repositories/identity/project_repo.py:90
|
||||
src/noteflow/infrastructure/persistence/repositories/identity/workspace_repo.py:113
|
||||
src/noteflow/infrastructure/persistence/repositories/identity/workspace_repo.py:156
|
||||
src/noteflow/grpc/_mixins/project/_converters.py:199
|
||||
|
||||
String 'all, delete-orphan' repeated 29 times:
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:139
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:145
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:152
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:158
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:163
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:168
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:173
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:182
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:187
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:234
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:61
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:67
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:83
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:89
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:94
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:99
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:104
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:253
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:60
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:67
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:72
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:77
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:82
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:87
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:92
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:97
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:132
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:137
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:235
|
||||
|
||||
String 'selectin' repeated 6 times:
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:140
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:147
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:153
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:235
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:62
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:68
|
||||
|
||||
String 'noteflow.meetings.id' repeated 10 times:
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:203
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:34
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:86
|
||||
src/noteflow/infrastructure/persistence/models/core/annotation.py:41
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:31
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:265
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:52
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:72
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:91
|
||||
src/noteflow/infrastructure/persistence/models/entities/named_entity.py:44
|
||||
|
||||
String 'SET NULL' repeated 6 times:
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:75
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:82
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:52
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:57
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:64
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:98
|
||||
|
||||
String 'CASCADE' repeated 31 times:
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:203
|
||||
src/noteflow/infrastructure/persistence/models/core/meeting.py:251
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:34
|
||||
src/noteflow/infrastructure/persistence/models/core/diarization.py:86
|
||||
src/noteflow/infrastructure/persistence/models/core/annotation.py:41
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:31
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:84
|
||||
src/noteflow/infrastructure/persistence/models/core/summary.py:116
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:45
|
||||
src/noteflow/infrastructure/persistence/models/integrations/webhook.py:104
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:56
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:116
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:159
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:208
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:265
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:270
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:306
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:46
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:41
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:72
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:77
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:45
|
||||
src/noteflow/infrastructure/persistence/models/entities/speaker.py:91
|
||||
src/noteflow/infrastructure/persistence/models/entities/named_entity.py:44
|
||||
src/noteflow/infrastructure/persistence/models/identity/settings.py:52
|
||||
src/noteflow/infrastructure/persistence/models/identity/settings.py:57
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:154
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:159
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:193
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:251
|
||||
src/noteflow/infrastructure/persistence/models/identity/identity.py:256
|
||||
|
||||
String 'noteflow.integrations.id' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:116
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:159
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:208
|
||||
src/noteflow/infrastructure/persistence/models/integrations/integration.py:306
|
||||
|
||||
String 'tasks' repeated 4 times:
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:30
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:97
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:101
|
||||
src/noteflow/infrastructure/persistence/models/organization/task.py:105
|
||||
|
||||
String 'meeting_tags' repeated 3 times:
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:67
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:84
|
||||
src/noteflow/infrastructure/persistence/models/organization/tagging.py:88
|
||||
|
||||
String 'Invalid annotation_id' repeated 3 times:
|
||||
src/noteflow/grpc/_mixins/annotation.py:131
|
||||
src/noteflow/grpc/_mixins/annotation.py:209
|
||||
src/noteflow/grpc/_mixins/annotation.py:264
|
||||
|
||||
String 'Workspaces' repeated 3 times:
|
||||
src/noteflow/grpc/_mixins/identity.py:103
|
||||
src/noteflow/grpc/_mixins/identity.py:156
|
||||
src/noteflow/grpc/_mixins/errors/_require.py:58
|
||||
|
||||
String 'UNKNOWN' repeated 4 times:
|
||||
src/noteflow/grpc/interceptors/logging.py:191
|
||||
src/noteflow/grpc/interceptors/logging.py:226
|
||||
src/noteflow/grpc/interceptors/logging.py:260
|
||||
src/noteflow/grpc/interceptors/logging.py:295
|
||||
|
||||
String 'ProtoAnnotation' repeated 4 times:
|
||||
src/noteflow/grpc/_client_mixins/protocols.py:84
|
||||
src/noteflow/grpc/_client_mixins/protocols.py:85
|
||||
src/noteflow/grpc/_client_mixins/protocols.py:87
|
||||
src/noteflow/grpc/_client_mixins/protocols.py:21
|
||||
|
||||
String 'Invalid meeting_id' repeated 3 times:
|
||||
src/noteflow/grpc/_mixins/converters/_id_parsing.py:66
|
||||
src/noteflow/grpc/_mixins/streaming/_session.py:211
|
||||
src/noteflow/grpc/_mixins/diarization/_jobs.py:91
|
||||
244
scratch/quality_violations_test_smells.txt
Normal file
244
scratch/quality_violations_test_smells.txt
Normal file
@@ -0,0 +1,244 @@
|
||||
assertion_roulette: 70
|
||||
- tests/application/test_calendar_service.py:test_initiate_oauth_returns_auth_url_and_state (assertions=2)
|
||||
- tests/application/test_calendar_service.py:test_get_connection_status_returns_connected_info (assertions=2)
|
||||
- tests/application/test_calendar_service.py:test_list_events_fetches_from_connected_provider (assertions=2)
|
||||
- tests/application/test_export_service.py:test_export_to_file_infers_format_and_writes (assertions=3)
|
||||
- tests/application/test_meeting_service.py:test_get_meeting_found (assertions=2)
|
||||
- tests/application/test_meeting_service.py:test_list_meetings (assertions=2)
|
||||
- tests/application/test_meeting_service.py:test_get_summary_found (assertions=2)
|
||||
- tests/application/test_ner_service.py:test_extract_entities_uses_cache (assertions=3)
|
||||
- tests/application/test_ner_service.py:test_extract_entities_force_refresh_bypasses_cache (assertions=2)
|
||||
- tests/application/test_ner_service.py:test_extract_entities_no_segments_returns_empty (assertions=3)
|
||||
- tests/application/test_recovery_service.py:test_recover_no_crashed_meetings (assertions=2)
|
||||
- tests/application/test_recovery_service.py:test_count_no_crashed_meetings (assertions=2)
|
||||
- tests/application/test_retention_service.py:test_is_enabled_reflects_init (assertions=2)
|
||||
- tests/application/test_retention_service.py:test_run_cleanup_disabled_returns_empty_report (assertions=3)
|
||||
- tests/application/test_retention_service.py:test_run_cleanup_dry_run_does_not_delete (assertions=3)
|
||||
- tests/application/test_retention_service.py:test_run_cleanup_deletes_expired_meetings (assertions=3)
|
||||
- tests/application/test_retention_service.py:test_retention_report_stores_values (assertions=3)
|
||||
- tests/application/test_summarization_service.py:test_cloud_requires_consent (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_summarize_uses_default_mode (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_summarize_uses_specified_mode (assertions=3)
|
||||
- tests/application/test_summarization_service.py:test_summarize_falls_back_on_unavailable (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_summarize_verifies_citations (assertions=3)
|
||||
- tests/application/test_summarization_service.py:test_summarize_filters_invalid_citations (assertions=3)
|
||||
- tests/application/test_summarization_service.py:test_summarize_passes_max_limits (assertions=3)
|
||||
- tests/application/test_summarization_service.py:test_summarize_passes_style_prompt_to_provider (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_summarize_without_style_prompt_passes_none (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_summarize_requires_cloud_consent (assertions=3)
|
||||
- tests/application/test_summarization_service.py:test_summarize_calls_persist_callback (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_summarize_persist_callback_receives_filtered_summary (assertions=2)
|
||||
- tests/application/test_summarization_service.py:test_set_persist_callback_updates_callback (assertions=3)
|
||||
- tests/application/test_trigger_service.py:test_trigger_service_snooze_ignores_signals (assertions=2)
|
||||
- tests/application/test_trigger_service.py:test_trigger_service_rate_limit (assertions=3)
|
||||
- tests/application/test_trigger_service.py:test_trigger_service_skips_disabled_providers (assertions=3)
|
||||
- tests/application/test_trigger_service.py:test_trigger_service_rate_limit_with_existing_prompt (assertions=3)
|
||||
- tests/application/test_trigger_service.py:test_trigger_service_enable_toggles (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_logs_start_and_complete_on_success (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_includes_context_in_logs (assertions=3)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_filters_none_context_values_in_timing (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_logs_warning_on_timeout (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_duration_is_positive (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_preserves_function_metadata (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_preserves_async_function_metadata (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_transitions.py:test_logs_string_state_transition (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_transitions.py:test_logs_none_old_state_for_creation (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_transitions.py:test_includes_context_kwargs (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_transitions.py:test_filters_none_context_values (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_transitions.py:test_handles_mixed_enum_and_string (assertions=2)
|
||||
- tests/infrastructure/observability/test_logging_transitions.py:test_state_value_extraction (assertions=2)
|
||||
- tests/stress/test_audio_integrity.py:test_valid_chunks_before_truncation_preserved (assertions=2)
|
||||
- tests/stress/test_audio_integrity.py:test_large_audio_roundtrip (assertions=2)
|
||||
- tests/stress/test_resource_leaks.py:test_no_orphaned_tasks_after_shutdown (assertions=3)
|
||||
- tests/stress/test_resource_leaks.py:test_task_cleanup_on_exception (assertions=2)
|
||||
- tests/stress/test_resource_leaks.py:test_webhook_executor_cleanup (assertions=2)
|
||||
- tests/stress/test_resource_leaks.py:test_session_close_releases_pipeline (assertions=3)
|
||||
- tests/stress/test_resource_leaks.py:test_flush_thread_stopped_on_close (assertions=3)
|
||||
- tests/stress/test_segment_volume.py:test_meeting_accumulates_many_segments (assertions=2)
|
||||
- tests/stress/test_segment_volume.py:test_meeting_with_many_segments_persists (assertions=2)
|
||||
- tests/stress/test_segment_volume.py:test_segment_creation_memory_stable (assertions=2)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_single_sample_chunks (assertions=2)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_very_short_speech_bursts (assertions=2)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_zero_leading_buffer (assertions=2)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_idle_to_speech_to_idle (assertions=3)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_trailing_back_to_speech (assertions=2)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_flush_from_idle_returns_none (assertions=2)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_reset_allows_fresh_processing (assertions=2)
|
||||
- tests/stress/test_transaction_boundaries.py:test_committed_data_visible_in_new_uow (assertions=2)
|
||||
- tests/stress/test_transaction_boundaries.py:test_committed_meeting_and_segment (assertions=2)
|
||||
- tests/stress/test_transaction_boundaries.py:test_independent_uow_transactions (assertions=2)
|
||||
- tests/stress/test_transaction_boundaries.py:test_meeting_state_change_rollback (assertions=3)
|
||||
- tests/stress/test_transaction_boundaries.py:test_multiple_meetings_commit_all (assertions=2)
|
||||
|
||||
conditional_test_logic: 1
|
||||
- tests/infrastructure/observability/test_logging_timing.py:test_includes_context_in_logs (for@59)
|
||||
|
||||
sleepy_test: 28
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_order_tasks_before_sessions (line=656)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (line=756)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (line=739)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (line=741)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (line=743)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (line=746)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_context_cancelled_check_pattern (line=776)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_context_cancelled_check_pattern (line=778)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_context_cancelled_check_pattern (line=780)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_concurrent_shutdown_and_stream_cleanup (line=835)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_during_task_creation (line=870)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_during_task_creation (line=875)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_during_task_creation (line=880)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_during_task_creation (line=861)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_job_completion_vs_shutdown_race (line=933)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_job_completion_vs_shutdown_race (line=919)
|
||||
- tests/integration/test_database_resilience.py:test_concurrent_operations_within_pool_limit (line=48)
|
||||
- tests/integration/test_database_resilience.py:test_graceful_handling_beyond_pool_limit (line=76)
|
||||
- tests/integration/test_database_resilience.py:test_operations_succeed_after_idle (line=278)
|
||||
- tests/integration/test_signal_handling.py:test_shutdown_cancelsdiarization_tasks (line=108)
|
||||
- tests/integration/test_signal_handling.py:test_shutdown_marks_cancelled_jobs_failed (line=138)
|
||||
- tests/integration/test_signal_handling.py:test_long_running_task_cancellation (line=287)
|
||||
- tests/integration/test_signal_handling.py:test_task_with_exception_handling (line=304)
|
||||
- tests/integration/test_signal_handling.py:test_mixed_task_states_on_shutdown (line=329)
|
||||
- tests/integration/test_signal_handling.py:test_mixed_task_states_on_shutdown (line=333)
|
||||
- tests/integration/test_signal_handling.py:test_mixed_task_states_on_shutdown (line=338)
|
||||
- tests/integration/test_signal_handling.py:test_tasks_cancelled_before_sessions_closed (line=402)
|
||||
- tests/integration/test_signal_handling.py:test_concurrent_shutdown_calls_safe (line=431)
|
||||
|
||||
sensitive_equality: 16
|
||||
- tests/domain/test_errors.py:test_domain_error_preserves_message (str)
|
||||
- tests/grpc/test_annotation_mixin.py:test_adds_annotation_with_all_fields (str)
|
||||
- tests/grpc/test_annotation_mixin.py:test_returns_annotation_when_found (str)
|
||||
- tests/grpc/test_annotation_mixin.py:test_returns_annotation_when_found (str)
|
||||
- tests/grpc/test_annotation_mixin.py:test_updates_annotation_successfully (str)
|
||||
- tests/grpc/test_identity_mixin.py:test_switches_workspace_successfully (str)
|
||||
- tests/grpc/test_meeting_mixin.py:test_stop_recording_meeting_transitions_to_stopped (str)
|
||||
- tests/grpc/test_meeting_mixin.py:test_stop_meeting_closes_audio_writer (str)
|
||||
- tests/grpc/test_meeting_mixin.py:test_get_meeting_returns_meeting_by_id (str)
|
||||
- tests/grpc/test_oidc_mixin.py:test_registers_provider_successfully (str)
|
||||
- tests/grpc/test_oidc_mixin.py:test_returns_provider_by_id (str)
|
||||
- tests/grpc/test_oidc_mixin.py:test_refreshes_single_provider (str)
|
||||
- tests/grpc/test_project_mixin.py:test_create_project_basic (str)
|
||||
- tests/grpc/test_project_mixin.py:test_get_project_found (str)
|
||||
- tests/grpc/test_project_mixin.py:test_add_project_member_success (str)
|
||||
- tests/grpc/test_webhooks_mixin.py:test_update_rpc_modifies_single_field (str)
|
||||
|
||||
eager_test: 32
|
||||
- tests/grpc/test_diarization_lifecycle.py:test_refine_error_mentions_database (methods=8)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_order_tasks_before_sessions (methods=10)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (methods=8)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_concurrent_shutdown_and_stream_cleanup (methods=8)
|
||||
- tests/infrastructure/audio/test_writer.py:test_multiple_chunks_written (methods=8)
|
||||
- tests/infrastructure/audio/test_writer.py:test_buffering_reduces_chunk_overhead (methods=9)
|
||||
- tests/infrastructure/audio/test_writer.py:test_manifest_wrapped_dek_unwraps_successfully (methods=8)
|
||||
- tests/infrastructure/audio/test_writer.py:test_manifest_wrapped_dek_decrypts_audio (methods=9)
|
||||
- tests/infrastructure/observability/test_log_buffer.py:test_handler_captures_level_name (methods=8)
|
||||
- tests/integration/test_crash_scenarios.py:test_completed_meeting_not_recovered (methods=8)
|
||||
- tests/integration/test_crash_scenarios.py:test_mixed_recovery_preserves_completed (methods=8)
|
||||
- tests/integration/test_e2e_annotations.py:test_annotations_deleted_with_meeting (methods=9)
|
||||
- tests/integration/test_e2e_export.py:test_export_pdf_from_database (methods=8)
|
||||
- tests/integration/test_e2e_export.py:test_export_to_file_creates_pdf_file (methods=8)
|
||||
- tests/integration/test_e2e_ner.py:test_update_entity_text (methods=8)
|
||||
- tests/integration/test_e2e_ner.py:test_delete_entity_removes_from_database (methods=8)
|
||||
- tests/integration/test_e2e_ner.py:test_delete_does_not_affect_other_entities (methods=8)
|
||||
- tests/integration/test_e2e_streaming.py:test_segments_persisted_to_database (methods=9)
|
||||
- tests/integration/test_e2e_summarization.py:test_generate_summary_withsummarization_service (methods=8)
|
||||
- tests/integration/test_memory_fallback.py:test_concurrent_reads_and_writes (methods=8)
|
||||
- tests/integration/test_recovery_service.py:test_audio_validation_with_valid_files (methods=8)
|
||||
- tests/integration/test_recovery_service.py:test_audio_validation_uses_asset_path (methods=8)
|
||||
- tests/integration/test_unit_of_work_advanced.py:test_meeting_lifecycle_workflow (methods=9)
|
||||
- tests/stress/test_audio_integrity.py:test_truncated_chunk_length_partial (methods=8)
|
||||
- tests/stress/test_audio_integrity.py:test_truncated_chunk_data_raises (methods=8)
|
||||
- tests/stress/test_audio_integrity.py:test_valid_chunks_before_truncation_preserved (methods=9)
|
||||
- tests/stress/test_audio_integrity.py:test_bit_flip_in_ciphertext_detected (methods=9)
|
||||
- tests/stress/test_audio_integrity.py:test_bit_flip_in_tag_detected (methods=9)
|
||||
- tests/stress/test_audio_integrity.py:test_corrupted_wrapped_dek_raises (methods=9)
|
||||
- tests/stress/test_resource_leaks.py:test_streaming_fd_cleanup (methods=8)
|
||||
- tests/stress/test_segment_volume.py:test_meeting_with_many_segments_persists (methods=10)
|
||||
- tests/stress/test_segmenter_fuzz.py:test_random_vad_patterns_1000_iterations (methods=8)
|
||||
|
||||
long_test: 86
|
||||
- tests/application/test_auth_service.py:test_refreshes_tokens_successfully (lines=38)
|
||||
- tests/application/test_calendar_service.py:test_get_connection_status_returns_connected_info (lines=39)
|
||||
- tests/application/test_calendar_service.py:test_disconnect_revokes_tokens_and_deletes_integration (lines=36)
|
||||
- tests/application/test_calendar_service.py:test_list_events_fetches_from_connected_provider (lines=41)
|
||||
- tests/application/test_calendar_service.py:test_list_events_refreshes_expired_token (lines=40)
|
||||
- tests/grpc/test_annotation_mixin.py:test_adds_annotation_with_all_fields (lines=39)
|
||||
- tests/grpc/test_annotation_mixin.py:test_returns_annotations_for_meeting (lines=45)
|
||||
- tests/grpc/test_annotation_mixin.py:test_updates_annotation_successfully (lines=45)
|
||||
- tests/grpc/test_diarization_mixin.py:test_rename_returns_zero_for_no_matches (lines=36)
|
||||
- tests/grpc/test_export_mixin.py:test_exports_markdown_with_segments (lines=38)
|
||||
- tests/grpc/test_export_mixin.py:test_exports_html_with_segments (lines=39)
|
||||
- tests/grpc/test_export_mixin.py:test_exports_meeting_with_multiple_speakers (lines=43)
|
||||
- tests/grpc/test_export_mixin.py:test_exports_long_transcript (lines=46)
|
||||
- tests/grpc/test_export_mixin.py:test_returns_correct_format_metadata (lines=36)
|
||||
- tests/grpc/test_meeting_mixin.py:test_stop_meeting_triggers_webhooks (lines=36)
|
||||
- tests/grpc/test_meeting_mixin.py:test_get_meeting_includes_segments_when_requested (lines=43)
|
||||
- tests/grpc/test_observability_mixin.py:test_returns_historical_metrics (lines=46)
|
||||
- tests/grpc/test_observability_mixin.py:test_metrics_proto_includes_all_fields (lines=38)
|
||||
- tests/grpc/test_oidc_mixin.py:test_enables_provider (lines=39)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_order_tasks_before_sessions (lines=43)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_cancelled_error_propagation_in_stream (lines=44)
|
||||
- tests/grpc/test_stream_lifecycle.py:test_shutdown_during_task_creation (lines=36)
|
||||
- tests/grpc/test_webhooks_mixin.py:test_registers_webhook_with_all_optional_fields (lines=36)
|
||||
- tests/grpc/test_webhooks_mixin.py:test_delivery_proto_includes_all_fields (lines=36)
|
||||
- tests/infrastructure/audio/test_writer.py:test_multiple_chunks_written (lines=39)
|
||||
- tests/infrastructure/audio/test_writer.py:test_buffering_reduces_chunk_overhead (lines=46)
|
||||
- tests/infrastructure/auth/test_oidc_registry.py:test_list_providers_returns_all (lines=39)
|
||||
- tests/infrastructure/auth/test_oidc_registry.py:test_list_providers_filters_by_workspace (lines=39)
|
||||
- tests/infrastructure/auth/test_oidc_registry.py:test_refresh_all_discovery (lines=41)
|
||||
- tests/infrastructure/calendar/test_google_adapter.py:test_list_events_returns_calendar_events (lines=42)
|
||||
- tests/infrastructure/observability/test_database_sink.py:test_full_flow_record_and_flush (lines=46)
|
||||
- tests/infrastructure/observability/test_usage.py:test_usage_event_stores_all_fields (lines=36)
|
||||
- tests/infrastructure/summarization/test_ollama_provider.py:test_ollama_summarize_returns_result (lines=37)
|
||||
- tests/infrastructure/summarization/test_ollama_provider.py:test_ollama_raises_unavailable_when_package_missing (lines=38)
|
||||
- tests/infrastructure/test_calendar_converters.py:test_info_to_orm_to_info_preserves_values (lines=46)
|
||||
- tests/infrastructure/test_converters.py:test_domain_to_orm_to_domain_preserves_values (lines=36)
|
||||
- tests/infrastructure/test_integration_converters.py:test_integration_domain_to_orm_to_domain_preserves_values (lines=37)
|
||||
- tests/infrastructure/test_observability.py:test_start_and_stop_collection (lines=37)
|
||||
- tests/infrastructure/test_webhook_converters.py:test_config_domain_to_orm_to_domain_preserves_values (lines=43)
|
||||
- tests/infrastructure/test_webhook_converters.py:test_delivery_domain_to_orm_to_domain_preserves_values (lines=36)
|
||||
- tests/infrastructure/webhooks/test_executor.py:test_hmac_signature_generation (lines=42)
|
||||
- tests/infrastructure/webhooks/test_metrics.py:test_stats_by_event_type (lines=46)
|
||||
- tests/integration/test_crash_scenarios.py:test_recovery_is_idempotent (lines=36)
|
||||
- tests/integration/test_crash_scenarios.py:test_concurrent_recovery_calls (lines=44)
|
||||
- tests/integration/test_crash_scenarios.py:test_mixed_recovery_preserves_completed (lines=39)
|
||||
- tests/integration/test_crash_scenarios.py:test_recovery_result_counts (lines=42)
|
||||
- tests/integration/test_crash_scenarios.py:test_partial_state_transition_recovery (lines=39)
|
||||
- tests/integration/test_database_resilience.py:test_concurrent_meeting_updates (lines=40)
|
||||
- tests/integration/test_diarization_job_repository.py:test_mark_running_as_failed_handles_multiple_jobs (lines=44)
|
||||
- tests/integration/test_e2e_annotations.py:test_list_annotations_with_time_range_filter (lines=36)
|
||||
- tests/integration/test_e2e_annotations.py:test_update_annotation_modifies_database (lines=36)
|
||||
- tests/integration/test_e2e_annotations.py:test_annotations_isolated_between_meetings (lines=39)
|
||||
- tests/integration/test_e2e_export.py:test_export_markdown_from_database (lines=39)
|
||||
- tests/integration/test_e2e_export.py:test_export_pdf_from_database (lines=38)
|
||||
- tests/integration/test_e2e_export.py:test_export_transcript_markdown_via_grpc (lines=38)
|
||||
- tests/integration/test_e2e_ner.py:test_extract_entities_persists_to_database (lines=45)
|
||||
- tests/integration/test_e2e_ner.py:test_extract_entities_returns_cached_on_second_call (lines=38)
|
||||
- tests/integration/test_e2e_ner.py:test_pin_entity_persists_pinned_state (lines=38)
|
||||
- tests/integration/test_e2e_ner.py:test_update_entity_text (lines=40)
|
||||
- tests/integration/test_e2e_ner.py:test_delete_does_not_affect_other_entities (lines=41)
|
||||
- tests/integration/test_e2e_ner.py:test_has_entities_reflects_extraction_state (lines=36)
|
||||
- tests/integration/test_e2e_streaming.py:test_stop_request_exits_stream_gracefully (lines=37)
|
||||
- tests/integration/test_e2e_summarization.py:test_generate_summary_placeholder_on_service_error (lines=38)
|
||||
- tests/integration/test_e2e_summarization.py:test_summary_with_action_items_persisted (lines=45)
|
||||
- tests/integration/test_e2e_summarization.py:test_regeneration_replaces_existing_summary (lines=46)
|
||||
- tests/integration/test_entity_repository.py:test_orders_by_category_then_text (lines=46)
|
||||
- tests/integration/test_entity_repository.py:test_isolates_deletion_to_meeting (lines=39)
|
||||
- tests/integration/test_grpc_servicer_database.py:test_shutdown_marks_running_jobs_as_failed (lines=39)
|
||||
- tests/integration/test_grpc_servicer_database.py:test_rename_speaker_updates_segments_in_database (lines=37)
|
||||
- tests/integration/test_grpc_servicer_database.py:test_grpc_delete_preserves_other_entities (lines=42)
|
||||
- tests/integration/test_memory_fallback.py:test_concurrent_reads_and_writes (lines=37)
|
||||
- tests/integration/test_project_repository.py:test_create_project_with_settings_repository (lines=42)
|
||||
- tests/integration/test_project_repository.py:test_list_for_user_filtered_by_workspace (lines=43)
|
||||
- tests/integration/test_recovery_service.py:test_recovers_multiple_meetings (lines=38)
|
||||
- tests/integration/test_recovery_service.py:test_count_crashed_meetings_accurate (lines=38)
|
||||
- tests/integration/test_server_initialization.py:test_shutdown_marks_running_jobs_failed (lines=37)
|
||||
- tests/integration/test_signal_handling.py:test_shutdown_cleansactive_streams (lines=44)
|
||||
- tests/integration/test_signal_handling.py:test_cleanup_allactive_streams (lines=41)
|
||||
- tests/integration/test_signal_handling.py:test_diarization_before_audio (lines=41)
|
||||
- tests/integration/test_streaming_real_pipeline.py:test_streaming_emits_final_segment (lines=44)
|
||||
- tests/integration/test_unit_of_work_advanced.py:test_meeting_lifecycle_workflow (lines=42)
|
||||
- tests/integration/test_unit_of_work_advanced.py:test_diarization_job_workflow (lines=41)
|
||||
- tests/integration/test_webhook_integration.py:test_stop_meeting_with_failed_webhook_still_succeeds (lines=39)
|
||||
- tests/integration/test_webhook_repository.py:test_returns_deliveries_newest_first (lines=46)
|
||||
- tests/integration/test_webhook_repository.py:test_delivery_round_trip_preserves_all_fields (lines=44)
|
||||
- tests/stress/test_segment_volume.py:test_meeting_with_many_segments_persists (lines=44)
|
||||
@@ -10,6 +10,13 @@ from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Protocol
|
||||
|
||||
from noteflow.domain.constants.fields import (
|
||||
LATENCY_MS,
|
||||
MODEL_NAME,
|
||||
PROVIDER_NAME,
|
||||
TOKENS_INPUT,
|
||||
TOKENS_OUTPUT,
|
||||
)
|
||||
from noteflow.domain.utils.time import utc_now
|
||||
|
||||
|
||||
@@ -36,6 +43,20 @@ class UsageMetrics:
|
||||
latency_ms: float | None = None
|
||||
"""Operation latency in milliseconds."""
|
||||
|
||||
def as_event_fields(self) -> dict[str, str | int | float | None]:
|
||||
"""Return metrics as a dictionary suitable for UsageEvent fields.
|
||||
|
||||
Returns:
|
||||
Dictionary with provider_name, model_name, tokens_input, tokens_output, latency_ms.
|
||||
"""
|
||||
return {
|
||||
PROVIDER_NAME: self.provider_name,
|
||||
MODEL_NAME: self.model_name,
|
||||
TOKENS_INPUT: self.tokens_input,
|
||||
TOKENS_OUTPUT: self.tokens_output,
|
||||
LATENCY_MS: self.latency_ms,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class UsageEvent:
|
||||
@@ -110,14 +131,15 @@ class UsageEvent:
|
||||
New UsageEvent instance.
|
||||
"""
|
||||
resolved_context = context or UsageEventContext()
|
||||
metric_fields = metrics.as_event_fields()
|
||||
return cls(
|
||||
event_type=event_type,
|
||||
meeting_id=resolved_context.meeting_id,
|
||||
provider_name=metrics.provider_name,
|
||||
model_name=metrics.model_name,
|
||||
tokens_input=metrics.tokens_input,
|
||||
tokens_output=metrics.tokens_output,
|
||||
latency_ms=metrics.latency_ms,
|
||||
provider_name=metric_fields[PROVIDER_NAME],
|
||||
model_name=metric_fields[MODEL_NAME],
|
||||
tokens_input=metric_fields[TOKENS_INPUT],
|
||||
tokens_output=metric_fields[TOKENS_OUTPUT],
|
||||
latency_ms=metric_fields[LATENCY_MS],
|
||||
success=resolved_context.success,
|
||||
error_code=resolved_context.error_code,
|
||||
attributes=attributes or {},
|
||||
@@ -149,10 +171,7 @@ class UsageEventSink(Protocol):
|
||||
...
|
||||
|
||||
def record_simple(
|
||||
self,
|
||||
event_type: str,
|
||||
metrics: UsageMetrics | None = None,
|
||||
*,
|
||||
self, event_type: str, metrics: UsageMetrics | None = None, *,
|
||||
context: UsageEventContext | None = None,
|
||||
**attributes: object,
|
||||
) -> None:
|
||||
@@ -174,10 +193,7 @@ class NullUsageEventSink:
|
||||
"""Discard the event."""
|
||||
|
||||
def record_simple(
|
||||
self,
|
||||
event_type: str,
|
||||
metrics: UsageMetrics | None = None,
|
||||
*,
|
||||
self, event_type: str, metrics: UsageMetrics | None = None, *,
|
||||
context: UsageEventContext | None = None,
|
||||
**attributes: object,
|
||||
) -> None:
|
||||
|
||||
@@ -3,8 +3,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.domain.entities import WordTiming
|
||||
from noteflow.domain.entities import Segment, WordTiming
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.value_objects import MeetingId
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
@@ -40,3 +44,25 @@ class SegmentData:
|
||||
|
||||
no_speech_prob: float = 0.0
|
||||
"""No-speech probability."""
|
||||
|
||||
def to_segment(self, meeting_id: MeetingId) -> Segment:
|
||||
"""Convert to a Segment entity.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting this segment belongs to.
|
||||
|
||||
Returns:
|
||||
New Segment entity.
|
||||
"""
|
||||
return Segment(
|
||||
segment_id=self.segment_id,
|
||||
text=self.text,
|
||||
start_time=self.start_time,
|
||||
end_time=self.end_time,
|
||||
meeting_id=meeting_id,
|
||||
words=self.words,
|
||||
language=self.language,
|
||||
language_confidence=self.language_confidence,
|
||||
avg_logprob=self.avg_logprob,
|
||||
no_speech_prob=self.no_speech_prob,
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
@@ -10,6 +11,7 @@ from noteflow.domain.identity.entities import User
|
||||
from noteflow.domain.value_objects import OAuthProvider, OAuthTokens
|
||||
from noteflow.infrastructure.calendar import OAuthManager
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from noteflow.domain.constants.fields import PROVIDER
|
||||
|
||||
from .auth_constants import DEFAULT_USER_ID, DEFAULT_WORKSPACE_ID
|
||||
from .auth_types import AuthResult
|
||||
@@ -19,6 +21,8 @@ if TYPE_CHECKING:
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
TOKEN_EXPIRY_BUFFER_SECONDS = 3 * 100
|
||||
|
||||
|
||||
def resolve_provider_email(integration: Integration) -> str:
|
||||
"""Resolve provider email with a consistent fallback."""
|
||||
@@ -88,32 +92,39 @@ async def get_or_create_default_workspace_id(
|
||||
return workspace_id
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AuthIntegrationContext:
|
||||
"""Inputs for auth integration creation/update."""
|
||||
|
||||
provider: str
|
||||
workspace_id: UUID
|
||||
user_id: UUID
|
||||
provider_email: str
|
||||
|
||||
|
||||
async def get_or_create_auth_integration(
|
||||
uow: UnitOfWork,
|
||||
provider: str,
|
||||
workspace_id: UUID,
|
||||
user_id: UUID,
|
||||
provider_email: str,
|
||||
context: AuthIntegrationContext,
|
||||
) -> Integration:
|
||||
"""Fetch or create the auth integration for a provider."""
|
||||
integration = await uow.integrations.get_by_provider(
|
||||
provider=provider,
|
||||
provider=context.provider,
|
||||
integration_type=IntegrationType.AUTH.value,
|
||||
)
|
||||
|
||||
if integration is None:
|
||||
integration = Integration.create(
|
||||
workspace_id=workspace_id,
|
||||
name=f"{provider.title()} Auth",
|
||||
workspace_id=context.workspace_id,
|
||||
name=f"{context.provider.title()} Auth",
|
||||
integration_type=IntegrationType.AUTH,
|
||||
config={"provider": provider, "user_id": str(user_id)},
|
||||
config={PROVIDER: context.provider, "user_id": str(context.user_id)},
|
||||
)
|
||||
await uow.integrations.create(integration)
|
||||
else:
|
||||
integration.config["provider"] = provider
|
||||
integration.config["user_id"] = str(user_id)
|
||||
integration.config[PROVIDER] = context.provider
|
||||
integration.config["user_id"] = str(context.user_id)
|
||||
|
||||
integration.connect(provider_email=provider_email)
|
||||
integration.connect(provider_email=context.provider_email)
|
||||
await uow.integrations.update(integration)
|
||||
return integration
|
||||
|
||||
@@ -180,7 +191,7 @@ async def refresh_tokens_for_integration(
|
||||
if not tokens.refresh_token:
|
||||
return None
|
||||
|
||||
if not tokens.is_expired(buffer_seconds=300):
|
||||
if not tokens.is_expired(buffer_seconds=TOKEN_EXPIRY_BUFFER_SECONDS):
|
||||
logger.debug(
|
||||
"auth_token_still_valid",
|
||||
provider=oauth_provider.value,
|
||||
|
||||
@@ -21,6 +21,7 @@ from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
from .auth_constants import DEFAULT_USER_ID, DEFAULT_WORKSPACE_ID
|
||||
from .auth_helpers import (
|
||||
AuthIntegrationContext,
|
||||
find_connected_auth_integration,
|
||||
get_or_create_auth_integration,
|
||||
get_or_create_default_workspace_id,
|
||||
@@ -42,6 +43,7 @@ if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.config.settings import CalendarIntegrationSettings
|
||||
from noteflow.domain.entities.integration import Integration
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -204,11 +206,9 @@ class AuthService:
|
||||
try:
|
||||
if oauth_provider == OAuthProvider.GOOGLE:
|
||||
adapter = GoogleCalendarAdapter()
|
||||
email, display_name = await adapter.get_user_info(access_token)
|
||||
else:
|
||||
adapter = OutlookCalendarAdapter()
|
||||
email, display_name = await adapter.get_user_info(access_token)
|
||||
|
||||
email, display_name = await adapter.get_user_info(access_token)
|
||||
return email, display_name
|
||||
except (GoogleCalendarError, OutlookCalendarError, OAuthError) as e:
|
||||
raise AuthServiceError(f"Failed to get user info: {e}") from e
|
||||
@@ -226,10 +226,12 @@ class AuthService:
|
||||
workspace_id = await get_or_create_default_workspace_id(uow, user_id)
|
||||
integration = await get_or_create_auth_integration(
|
||||
uow,
|
||||
provider=provider,
|
||||
workspace_id=workspace_id,
|
||||
user_id=user_id,
|
||||
provider_email=email,
|
||||
AuthIntegrationContext(
|
||||
provider=provider,
|
||||
workspace_id=workspace_id,
|
||||
user_id=user_id,
|
||||
provider_email=email,
|
||||
),
|
||||
)
|
||||
await store_integration_tokens(uow, integration, tokens)
|
||||
await uow.commit()
|
||||
@@ -287,49 +289,72 @@ class AuthService:
|
||||
else [OAuthProvider.GOOGLE.value, OAuthProvider.OUTLOOK.value]
|
||||
)
|
||||
|
||||
logged_out = False
|
||||
all_revoked = True
|
||||
revocation_errors: list[str] = []
|
||||
|
||||
for p in providers:
|
||||
result = await self._logout_provider(p)
|
||||
logged_out = logged_out or result.logged_out
|
||||
if not result.tokens_revoked:
|
||||
all_revoked = False
|
||||
if result.revocation_error:
|
||||
revocation_errors.append(f"{p}: {result.revocation_error}")
|
||||
|
||||
return LogoutResult(
|
||||
logged_out=logged_out,
|
||||
tokens_revoked=all_revoked,
|
||||
revocation_error="; ".join(revocation_errors) if revocation_errors else None,
|
||||
)
|
||||
results = [await self._logout_provider(p) for p in providers]
|
||||
return LogoutResult.aggregate(results)
|
||||
|
||||
async def _logout_provider(self, provider: str) -> LogoutResult:
|
||||
"""Logout from a specific provider."""
|
||||
oauth_provider = self._parse_auth_provider(provider)
|
||||
|
||||
async with self._uow_factory() as uow:
|
||||
integration = await uow.integrations.get_by_provider(
|
||||
provider=provider,
|
||||
integration_type=IntegrationType.AUTH.value,
|
||||
)
|
||||
|
||||
integration = await self._load_auth_integration(uow, provider)
|
||||
if integration is None:
|
||||
return LogoutResult(
|
||||
logged_out=False,
|
||||
tokens_revoked=True, # No tokens to revoke
|
||||
tokens_revoked=True,
|
||||
)
|
||||
|
||||
# Get tokens for revocation
|
||||
secrets = await uow.integrations.get_secrets(integration.id)
|
||||
access_token = secrets.get(OAUTH_FIELD_ACCESS_TOKEN) if secrets else None
|
||||
|
||||
# Delete integration
|
||||
await uow.integrations.delete(integration.id)
|
||||
await uow.commit()
|
||||
access_token = await self._load_access_token(uow, integration.id)
|
||||
await self._delete_integration(uow, integration.id)
|
||||
|
||||
# Revoke tokens (best effort)
|
||||
tokens_revoked, revocation_error = await self._revoke_access_token(
|
||||
oauth_provider,
|
||||
provider,
|
||||
access_token,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"auth_logout_completed",
|
||||
event_type="security",
|
||||
provider=provider,
|
||||
tokens_revoked=tokens_revoked,
|
||||
)
|
||||
|
||||
return LogoutResult(
|
||||
logged_out=True,
|
||||
tokens_revoked=tokens_revoked,
|
||||
revocation_error=revocation_error,
|
||||
)
|
||||
|
||||
async def _load_auth_integration(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
provider: str,
|
||||
) -> Integration | None:
|
||||
return await uow.integrations.get_by_provider(
|
||||
provider=provider,
|
||||
integration_type=IntegrationType.AUTH.value,
|
||||
)
|
||||
|
||||
async def _load_access_token(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
integration_id: UUID,
|
||||
) -> str | None:
|
||||
secrets = await uow.integrations.get_secrets(integration_id)
|
||||
return secrets.get(OAUTH_FIELD_ACCESS_TOKEN) if secrets else None
|
||||
|
||||
async def _delete_integration(self, uow: UnitOfWork, integration_id: UUID) -> None:
|
||||
await uow.integrations.delete(integration_id)
|
||||
await uow.commit()
|
||||
|
||||
async def _revoke_access_token(
|
||||
self,
|
||||
oauth_provider: OAuthProvider,
|
||||
provider: str,
|
||||
access_token: str | None,
|
||||
) -> tuple[bool, str | None]:
|
||||
tokens_revoked = True
|
||||
revocation_error: str | None = None
|
||||
|
||||
@@ -351,18 +376,7 @@ class AuthService:
|
||||
error=revocation_error,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"auth_logout_completed",
|
||||
event_type="security",
|
||||
provider=provider,
|
||||
tokens_revoked=tokens_revoked,
|
||||
)
|
||||
|
||||
return LogoutResult(
|
||||
logged_out=True,
|
||||
tokens_revoked=tokens_revoked,
|
||||
revocation_error=revocation_error,
|
||||
)
|
||||
return tokens_revoked, revocation_error
|
||||
|
||||
async def refresh_auth_tokens(self, provider: str) -> AuthResult | None:
|
||||
"""Refresh expired auth tokens.
|
||||
|
||||
@@ -47,4 +47,30 @@ class LogoutResult:
|
||||
"""Whether remote token revocation succeeded."""
|
||||
|
||||
revocation_error: str | None = None
|
||||
|
||||
@classmethod
|
||||
def aggregate(cls, results: list[LogoutResult]) -> LogoutResult:
|
||||
"""Aggregate multiple provider logout results into single result.
|
||||
|
||||
Args:
|
||||
results: List of LogoutResult from individual providers.
|
||||
|
||||
Returns:
|
||||
Combined LogoutResult where logged_out is True if any succeeded,
|
||||
tokens_revoked is True only if all succeeded.
|
||||
"""
|
||||
if not results:
|
||||
return cls(logged_out=False, tokens_revoked=True)
|
||||
logged_out = any(r.logged_out for r in results)
|
||||
all_revoked = all(r.tokens_revoked for r in results)
|
||||
errors = [
|
||||
str(r.revocation_error)
|
||||
for r in results
|
||||
if not r.tokens_revoked and r.revocation_error
|
||||
]
|
||||
return cls(
|
||||
logged_out=logged_out,
|
||||
tokens_revoked=all_revoked,
|
||||
revocation_error="; ".join(errors) if errors else None,
|
||||
)
|
||||
"""Error message if revocation failed (for logging/debugging)."""
|
||||
|
||||
@@ -23,6 +23,7 @@ from noteflow.infrastructure.calendar.google_adapter import GoogleCalendarError
|
||||
from noteflow.infrastructure.calendar.oauth_manager import OAuthError
|
||||
from noteflow.infrastructure.calendar.outlook_adapter import OutlookCalendarError
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from noteflow.domain.constants.fields import PROVIDER
|
||||
|
||||
class _CalendarServiceDepsKwargs(TypedDict, total=False):
|
||||
"""Optional dependency overrides for CalendarService."""
|
||||
@@ -33,7 +34,7 @@ class _CalendarServiceDepsKwargs(TypedDict, total=False):
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Awaitable, Callable
|
||||
|
||||
from noteflow.config.settings import CalendarIntegrationSettings
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
@@ -45,58 +46,38 @@ class CalendarServiceError(Exception):
|
||||
"""Calendar service operation failed."""
|
||||
|
||||
|
||||
class CalendarService:
|
||||
"""Calendar integration service.
|
||||
class _CalendarServiceBase:
|
||||
_oauth_manager: OAuthManager
|
||||
_settings: CalendarIntegrationSettings
|
||||
_uow_factory: Callable[[], UnitOfWork]
|
||||
_google_adapter: GoogleCalendarAdapter
|
||||
_outlook_adapter: OutlookCalendarAdapter
|
||||
DEFAULT_WORKSPACE_ID: UUID
|
||||
|
||||
Orchestrates OAuth flow and calendar event fetching. Uses:
|
||||
- IntegrationRepository for Integration entity CRUD
|
||||
- IntegrationRepository.get_secrets/set_secrets for encrypted token storage
|
||||
- OAuthManager for PKCE OAuth flow
|
||||
- GoogleCalendarAdapter/OutlookCalendarAdapter for provider APIs
|
||||
"""
|
||||
_parse_calendar_provider: Callable[..., OAuthProvider]
|
||||
_exchange_tokens: Callable[..., Awaitable[OAuthTokens]]
|
||||
_fetch_provider_email: Callable[..., Awaitable[str]]
|
||||
_fetch_events: Callable[..., Awaitable[list[CalendarEventInfo]]]
|
||||
_fetch_account_email: Callable[..., Awaitable[str]]
|
||||
_get_adapter: Callable[..., GoogleCalendarAdapter | OutlookCalendarAdapter]
|
||||
_load_calendar_integration: Callable[..., Awaitable[Integration]]
|
||||
_load_tokens_for_provider: Callable[..., Awaitable[OAuthTokens]]
|
||||
_refresh_tokens_if_needed: Callable[..., Awaitable[OAuthTokens]]
|
||||
_record_sync_success: Callable[..., Awaitable[None]]
|
||||
_record_sync_error: Callable[..., Awaitable[None]]
|
||||
_resolve_connection_status: Callable[..., tuple[str, datetime | None]]
|
||||
|
||||
# Default workspace ID for single-user mode
|
||||
DEFAULT_WORKSPACE_ID = UUID("00000000-0000-0000-0000-000000000001")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
uow_factory: Callable[[], UnitOfWork],
|
||||
settings: CalendarIntegrationSettings,
|
||||
**kwargs: Unpack[_CalendarServiceDepsKwargs],
|
||||
) -> None:
|
||||
"""Initialize calendar service.
|
||||
|
||||
Args:
|
||||
uow_factory: Factory function returning UnitOfWork instances.
|
||||
settings: Calendar settings with OAuth credentials.
|
||||
**kwargs: Optional dependency overrides.
|
||||
"""
|
||||
self._uow_factory = uow_factory
|
||||
self._settings = settings
|
||||
oauth_manager = kwargs.get("oauth_manager")
|
||||
google_adapter = kwargs.get("google_adapter")
|
||||
outlook_adapter = kwargs.get("outlook_adapter")
|
||||
self._oauth_manager = oauth_manager or OAuthManager(settings)
|
||||
self._google_adapter = google_adapter or GoogleCalendarAdapter()
|
||||
self._outlook_adapter = outlook_adapter or OutlookCalendarAdapter()
|
||||
class _CalendarServiceOAuthMixin(_CalendarServiceBase):
|
||||
_oauth_manager: OAuthManager
|
||||
_settings: CalendarIntegrationSettings
|
||||
|
||||
async def initiate_oauth(
|
||||
self,
|
||||
provider: str,
|
||||
redirect_uri: str | None = None,
|
||||
) -> tuple[str, str]:
|
||||
"""Start OAuth flow for a calendar provider.
|
||||
|
||||
Args:
|
||||
provider: Provider name ('google' or 'outlook').
|
||||
redirect_uri: Optional override for OAuth callback URI.
|
||||
|
||||
Returns:
|
||||
Tuple of (authorization_url, state_token).
|
||||
|
||||
Raises:
|
||||
CalendarServiceError: If provider is invalid or credentials not configured.
|
||||
"""
|
||||
"""Start OAuth flow for a calendar provider."""
|
||||
oauth_provider = self._parse_calendar_provider(provider)
|
||||
effective_redirect = redirect_uri or self._settings.redirect_uri
|
||||
|
||||
@@ -116,22 +97,7 @@ class CalendarService:
|
||||
code: str,
|
||||
state: str,
|
||||
) -> UUID:
|
||||
"""Complete OAuth flow and store tokens.
|
||||
|
||||
Creates or updates Integration entity with CALENDAR type and
|
||||
stores encrypted tokens via IntegrationRepository.set_secrets.
|
||||
|
||||
Args:
|
||||
provider: Provider name ('google' or 'outlook').
|
||||
code: Authorization code from OAuth callback.
|
||||
state: State parameter from OAuth callback.
|
||||
|
||||
Returns:
|
||||
Server-assigned integration ID for use in sync operations.
|
||||
|
||||
Raises:
|
||||
CalendarServiceError: If OAuth exchange fails.
|
||||
"""
|
||||
"""Complete OAuth flow and store tokens."""
|
||||
oauth_provider = self._parse_calendar_provider(provider)
|
||||
|
||||
tokens = await self._exchange_tokens(oauth_provider, code, state)
|
||||
@@ -186,11 +152,11 @@ class CalendarService:
|
||||
workspace_id=self.DEFAULT_WORKSPACE_ID,
|
||||
name=f"{provider.title()} Calendar",
|
||||
integration_type=IntegrationType.CALENDAR,
|
||||
config={"provider": provider},
|
||||
config={PROVIDER: provider},
|
||||
)
|
||||
await uow.integrations.create(integration)
|
||||
else:
|
||||
integration.config["provider"] = provider
|
||||
integration.config[PROVIDER] = provider
|
||||
|
||||
integration.connect(provider_email=email)
|
||||
await uow.integrations.update(integration)
|
||||
@@ -203,15 +169,13 @@ class CalendarService:
|
||||
|
||||
return integration.id
|
||||
|
||||
|
||||
class _CalendarServiceConnectionMixin(_CalendarServiceBase):
|
||||
_oauth_manager: OAuthManager
|
||||
_uow_factory: Callable[[], UnitOfWork]
|
||||
|
||||
async def get_connection_status(self, provider: str) -> OAuthConnectionInfo:
|
||||
"""Get OAuth connection status for a provider.
|
||||
|
||||
Args:
|
||||
provider: Provider name ('google' or 'outlook').
|
||||
|
||||
Returns:
|
||||
OAuthConnectionInfo with status and details.
|
||||
"""
|
||||
"""Get OAuth connection status for a provider."""
|
||||
async with self._uow_factory() as uow:
|
||||
integration = await uow.integrations.get_by_provider(
|
||||
provider=provider,
|
||||
@@ -224,7 +188,6 @@ class CalendarService:
|
||||
status=IntegrationStatus.DISCONNECTED.value,
|
||||
)
|
||||
|
||||
# Check token expiry
|
||||
secrets = await uow.integrations.get_secrets(integration.id)
|
||||
status, expires_at = self._resolve_connection_status(integration, secrets)
|
||||
|
||||
@@ -237,14 +200,7 @@ class CalendarService:
|
||||
)
|
||||
|
||||
async def disconnect(self, provider: str) -> bool:
|
||||
"""Disconnect OAuth integration and revoke tokens.
|
||||
|
||||
Args:
|
||||
provider: Provider name ('google' or 'outlook').
|
||||
|
||||
Returns:
|
||||
True if disconnected successfully.
|
||||
"""
|
||||
"""Disconnect OAuth integration and revoke tokens."""
|
||||
oauth_provider = self._parse_calendar_provider(provider)
|
||||
|
||||
async with self._uow_factory() as uow:
|
||||
@@ -256,15 +212,12 @@ class CalendarService:
|
||||
if integration is None:
|
||||
return False
|
||||
|
||||
# Get tokens before deletion for revocation
|
||||
secrets = await uow.integrations.get_secrets(integration.id)
|
||||
access_token = secrets.get(OAUTH_FIELD_ACCESS_TOKEN) if secrets else None
|
||||
|
||||
# Delete integration (cascades to secrets)
|
||||
await uow.integrations.delete(integration.id)
|
||||
await uow.commit()
|
||||
|
||||
# Revoke tokens with provider (best-effort)
|
||||
if access_token:
|
||||
try:
|
||||
await self._oauth_manager.revoke_tokens(oauth_provider, access_token)
|
||||
@@ -278,54 +231,64 @@ class CalendarService:
|
||||
logger.info("Disconnected provider=%s", provider)
|
||||
return True
|
||||
|
||||
|
||||
class _CalendarServiceEventsMixin(_CalendarServiceBase):
|
||||
_oauth_manager: OAuthManager
|
||||
_uow_factory: Callable[[], UnitOfWork]
|
||||
_settings: CalendarIntegrationSettings
|
||||
_google_adapter: GoogleCalendarAdapter
|
||||
_outlook_adapter: OutlookCalendarAdapter
|
||||
|
||||
async def list_calendar_events(
|
||||
self,
|
||||
provider: str | None = None,
|
||||
hours_ahead: int | None = None,
|
||||
limit: int | None = None,
|
||||
) -> list[CalendarEventInfo]:
|
||||
"""Fetch calendar events from connected providers.
|
||||
|
||||
Args:
|
||||
provider: Optional provider to fetch from (fetches all if None).
|
||||
hours_ahead: Hours to look ahead (defaults to settings).
|
||||
limit: Maximum events per provider (defaults to settings).
|
||||
|
||||
Returns:
|
||||
List of calendar events sorted by start time.
|
||||
|
||||
Raises:
|
||||
CalendarServiceError: If no providers connected or fetch fails.
|
||||
"""
|
||||
"""Fetch calendar events from connected providers."""
|
||||
effective_hours = hours_ahead or self._settings.sync_hours_ahead
|
||||
effective_limit = limit or self._settings.max_events
|
||||
|
||||
events: list[CalendarEventInfo] = []
|
||||
|
||||
if provider:
|
||||
provider_events = await self._fetch_provider_events(
|
||||
events = await self._fetch_provider_events(
|
||||
provider=provider,
|
||||
hours_ahead=effective_hours,
|
||||
limit=effective_limit,
|
||||
)
|
||||
events.extend(provider_events)
|
||||
else:
|
||||
# Fetch from all connected providers
|
||||
for p in [OAuthProvider.GOOGLE.value, OAuthProvider.OUTLOOK.value]:
|
||||
try:
|
||||
provider_events = await self._fetch_provider_events(
|
||||
provider=p,
|
||||
hours_ahead=effective_hours,
|
||||
limit=effective_limit,
|
||||
)
|
||||
events.extend(provider_events)
|
||||
except CalendarServiceError:
|
||||
continue # Skip disconnected providers
|
||||
events = await self._fetch_all_provider_events(effective_hours, effective_limit)
|
||||
|
||||
# Sort by start time
|
||||
events.sort(key=lambda e: e.start_time)
|
||||
return events
|
||||
|
||||
async def _fetch_all_provider_events(
|
||||
self,
|
||||
hours_ahead: int,
|
||||
limit: int,
|
||||
) -> list[CalendarEventInfo]:
|
||||
"""Fetch events from all configured providers, ignoring errors."""
|
||||
events: list[CalendarEventInfo] = []
|
||||
for p in [OAuthProvider.GOOGLE.value, OAuthProvider.OUTLOOK.value]:
|
||||
provider_events = await self._try_fetch_provider_events(p, hours_ahead, limit)
|
||||
events.extend(provider_events)
|
||||
return events
|
||||
|
||||
async def _try_fetch_provider_events(
|
||||
self,
|
||||
provider: str,
|
||||
hours_ahead: int,
|
||||
limit: int,
|
||||
) -> list[CalendarEventInfo]:
|
||||
"""Attempt to fetch events from a provider, returning empty list on error."""
|
||||
try:
|
||||
return await self._fetch_provider_events(
|
||||
provider=provider,
|
||||
hours_ahead=hours_ahead,
|
||||
limit=limit,
|
||||
)
|
||||
except CalendarServiceError:
|
||||
return []
|
||||
|
||||
async def _fetch_provider_events(
|
||||
self,
|
||||
provider: str,
|
||||
@@ -336,42 +299,10 @@ class CalendarService:
|
||||
oauth_provider = self._parse_calendar_provider(provider)
|
||||
|
||||
async with self._uow_factory() as uow:
|
||||
integration = await uow.integrations.get_by_provider(
|
||||
provider=provider,
|
||||
integration_type=IntegrationType.CALENDAR.value,
|
||||
)
|
||||
integration = await self._load_calendar_integration(uow, provider)
|
||||
tokens = await self._load_tokens_for_provider(uow, provider, integration)
|
||||
tokens = await self._refresh_tokens_if_needed(uow, integration, oauth_provider, tokens)
|
||||
|
||||
if integration is None or not integration.is_connected:
|
||||
raise CalendarServiceError(f"Provider {provider} not connected")
|
||||
|
||||
secrets = await uow.integrations.get_secrets(integration.id)
|
||||
if not secrets:
|
||||
raise CalendarServiceError(f"No tokens for provider {provider}")
|
||||
|
||||
try:
|
||||
tokens = OAuthTokens.from_secrets_dict(secrets)
|
||||
except (KeyError, ValueError) as e:
|
||||
raise CalendarServiceError(f"Invalid tokens: {e}") from e
|
||||
|
||||
# Refresh if expired
|
||||
if tokens.is_expired() and tokens.refresh_token:
|
||||
try:
|
||||
tokens = await self._oauth_manager.refresh_tokens(
|
||||
provider=oauth_provider,
|
||||
refresh_token=tokens.refresh_token,
|
||||
)
|
||||
await uow.integrations.set_secrets(
|
||||
integration_id=integration.id,
|
||||
secrets=tokens.to_secrets_dict(),
|
||||
)
|
||||
await uow.commit()
|
||||
except OAuthError as e:
|
||||
integration.mark_error(f"{ERR_TOKEN_REFRESH_PREFIX}{e}")
|
||||
await uow.integrations.update(integration)
|
||||
await uow.commit()
|
||||
raise CalendarServiceError(f"{ERR_TOKEN_REFRESH_PREFIX}{e}") from e
|
||||
|
||||
# Fetch events
|
||||
try:
|
||||
events = await self._fetch_events(
|
||||
oauth_provider,
|
||||
@@ -379,16 +310,81 @@ class CalendarService:
|
||||
hours_ahead,
|
||||
limit,
|
||||
)
|
||||
integration.record_sync()
|
||||
await uow.integrations.update(integration)
|
||||
await uow.commit()
|
||||
return events
|
||||
except (GoogleCalendarError, OutlookCalendarError) as e:
|
||||
integration.mark_error(str(e))
|
||||
await uow.integrations.update(integration)
|
||||
await uow.commit()
|
||||
await self._record_sync_error(uow, integration, str(e))
|
||||
raise CalendarServiceError(str(e)) from e
|
||||
|
||||
await self._record_sync_success(uow, integration)
|
||||
return events
|
||||
|
||||
async def _load_calendar_integration(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
provider: str,
|
||||
) -> Integration:
|
||||
integration = await uow.integrations.get_by_provider(
|
||||
provider=provider,
|
||||
integration_type=IntegrationType.CALENDAR.value,
|
||||
)
|
||||
if integration is None or not integration.is_connected:
|
||||
raise CalendarServiceError(f"Provider {provider} not connected")
|
||||
return integration
|
||||
|
||||
async def _load_tokens_for_provider(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
provider: str,
|
||||
integration: Integration,
|
||||
) -> OAuthTokens:
|
||||
secrets = await uow.integrations.get_secrets(integration.id)
|
||||
if not secrets:
|
||||
raise CalendarServiceError(f"No tokens for provider {provider}")
|
||||
|
||||
try:
|
||||
return OAuthTokens.from_secrets_dict(secrets)
|
||||
except (KeyError, ValueError) as e:
|
||||
raise CalendarServiceError(f"Invalid tokens: {e}") from e
|
||||
|
||||
async def _refresh_tokens_if_needed(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
integration: Integration,
|
||||
oauth_provider: OAuthProvider,
|
||||
tokens: OAuthTokens,
|
||||
) -> OAuthTokens:
|
||||
if not (tokens.is_expired() and tokens.refresh_token):
|
||||
return tokens
|
||||
|
||||
try:
|
||||
refreshed = await self._oauth_manager.refresh_tokens(
|
||||
provider=oauth_provider,
|
||||
refresh_token=tokens.refresh_token,
|
||||
)
|
||||
await uow.integrations.set_secrets(
|
||||
integration_id=integration.id,
|
||||
secrets=refreshed.to_secrets_dict(),
|
||||
)
|
||||
await uow.commit()
|
||||
return refreshed
|
||||
except OAuthError as e:
|
||||
await self._record_sync_error(uow, integration, f"{ERR_TOKEN_REFRESH_PREFIX}{e}")
|
||||
raise CalendarServiceError(f"{ERR_TOKEN_REFRESH_PREFIX}{e}") from e
|
||||
|
||||
async def _record_sync_success(self, uow: UnitOfWork, integration: Integration) -> None:
|
||||
integration.record_sync()
|
||||
await uow.integrations.update(integration)
|
||||
await uow.commit()
|
||||
|
||||
async def _record_sync_error(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
integration: Integration,
|
||||
message: str,
|
||||
) -> None:
|
||||
integration.mark_error(message)
|
||||
await uow.integrations.update(integration)
|
||||
await uow.commit()
|
||||
|
||||
async def _fetch_events(
|
||||
self,
|
||||
provider: OAuthProvider,
|
||||
@@ -422,36 +418,28 @@ class CalendarService:
|
||||
return self._google_adapter
|
||||
return self._outlook_adapter
|
||||
|
||||
@staticmethod
|
||||
def _parse_calendar_provider(provider: str) -> OAuthProvider:
|
||||
"""Parse and validate provider string for calendar operations.
|
||||
|
||||
Args:
|
||||
provider: Provider name (case-insensitive).
|
||||
class _CalendarServiceHelpersMixin(_CalendarServiceBase):
|
||||
_settings: CalendarIntegrationSettings
|
||||
|
||||
Returns:
|
||||
OAuthProvider enum value.
|
||||
|
||||
Raises:
|
||||
CalendarServiceError: If provider is not recognized.
|
||||
"""
|
||||
def _parse_calendar_provider(self, provider: str) -> OAuthProvider:
|
||||
"""Parse and validate provider string for calendar operations."""
|
||||
try:
|
||||
return OAuthProvider.parse(provider)
|
||||
except ValueError as e:
|
||||
raise CalendarServiceError(str(e)) from e
|
||||
|
||||
@staticmethod
|
||||
def _map_integration_status(status: IntegrationStatus) -> str:
|
||||
def _map_integration_status(self, status: IntegrationStatus) -> str:
|
||||
"""Map IntegrationStatus to connection status string."""
|
||||
return status.value if status in IntegrationStatus else IntegrationStatus.DISCONNECTED.value
|
||||
|
||||
@staticmethod
|
||||
def _resolve_connection_status(
|
||||
self,
|
||||
integration: Integration,
|
||||
secrets: dict[str, str] | None,
|
||||
) -> tuple[str, datetime | None]:
|
||||
"""Resolve connection status and expiration time from stored secrets."""
|
||||
status = CalendarService._map_integration_status(integration.status)
|
||||
status = self._map_integration_status(integration.status)
|
||||
if not secrets or not integration.is_connected:
|
||||
return status, None
|
||||
|
||||
@@ -461,6 +449,32 @@ class CalendarService:
|
||||
return IntegrationStatus.ERROR.value, None
|
||||
|
||||
expires_at = tokens.expires_at
|
||||
if tokens.is_expired():
|
||||
return "expired", expires_at
|
||||
return status, expires_at
|
||||
return ("expired", expires_at) if tokens.is_expired() else (status, expires_at)
|
||||
|
||||
|
||||
class CalendarService(
|
||||
_CalendarServiceOAuthMixin,
|
||||
_CalendarServiceConnectionMixin,
|
||||
_CalendarServiceEventsMixin,
|
||||
_CalendarServiceHelpersMixin,
|
||||
):
|
||||
"""Calendar integration service."""
|
||||
|
||||
# Default workspace ID for single-user mode
|
||||
DEFAULT_WORKSPACE_ID = UUID("00000000-0000-0000-0000-000000000001")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
uow_factory: Callable[[], UnitOfWork],
|
||||
settings: CalendarIntegrationSettings,
|
||||
**kwargs: Unpack[_CalendarServiceDepsKwargs],
|
||||
) -> None:
|
||||
"""Initialize calendar service."""
|
||||
self._uow_factory = uow_factory
|
||||
self._settings = settings
|
||||
oauth_manager = kwargs.get("oauth_manager")
|
||||
google_adapter = kwargs.get("google_adapter")
|
||||
outlook_adapter = kwargs.get("outlook_adapter")
|
||||
self._oauth_manager = oauth_manager or OAuthManager(settings)
|
||||
self._google_adapter = google_adapter or GoogleCalendarAdapter()
|
||||
self._outlook_adapter = outlook_adapter or OutlookCalendarAdapter()
|
||||
|
||||
@@ -7,7 +7,7 @@ from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Protocol, Self
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.config.constants import (
|
||||
ERROR_MSG_MEETING_PREFIX,
|
||||
@@ -21,31 +21,12 @@ from noteflow.infrastructure.export import (
|
||||
TranscriptExporter,
|
||||
)
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from .protocols import ExportRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.entities import Meeting, Segment
|
||||
from noteflow.domain.ports.repositories import MeetingRepository, SegmentRepository
|
||||
from noteflow.domain.value_objects import MeetingId
|
||||
|
||||
|
||||
class ExportRepositoryProvider(Protocol):
|
||||
"""Minimal repository provider for export operations."""
|
||||
|
||||
@property
|
||||
def meetings(self) -> MeetingRepository: ...
|
||||
|
||||
@property
|
||||
def segments(self) -> SegmentRepository: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@@ -56,6 +37,25 @@ class ExportFormat(Enum):
|
||||
HTML = "html"
|
||||
PDF = "pdf"
|
||||
|
||||
@classmethod
|
||||
def from_extension(cls, extension: str) -> ExportFormat | None:
|
||||
"""Map file extension to export format.
|
||||
|
||||
Args:
|
||||
extension: File extension (e.g., '.md', '.html').
|
||||
|
||||
Returns:
|
||||
Matching ExportFormat or None if not recognized.
|
||||
"""
|
||||
extension_map = {
|
||||
".md": cls.MARKDOWN,
|
||||
".markdown": cls.MARKDOWN,
|
||||
EXPORT_EXT_HTML: cls.HTML,
|
||||
".htm": cls.HTML,
|
||||
EXPORT_EXT_PDF: cls.PDF,
|
||||
}
|
||||
return extension_map.get(extension.lower())
|
||||
|
||||
|
||||
class ExportService:
|
||||
"""Application service for transcript export operations.
|
||||
@@ -110,41 +110,44 @@ class ExportService:
|
||||
Raises:
|
||||
ValueError: If meeting not found.
|
||||
"""
|
||||
logger.info("Starting transcript export", meeting_id=str(meeting_id), format=fmt.value)
|
||||
|
||||
async with self._uow:
|
||||
meeting, segments = await self._load_meeting_data(meeting_id)
|
||||
result = self.get_exporter(fmt).export(meeting, segments)
|
||||
self._log_export_complete(meeting_id, fmt, len(segments), result)
|
||||
return result
|
||||
|
||||
async def _load_meeting_data(
|
||||
self,
|
||||
meeting_id: MeetingId,
|
||||
) -> tuple[Meeting, list[Segment]]:
|
||||
"""Load meeting and segments for export."""
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if not meeting:
|
||||
msg = f"{ERROR_MSG_MEETING_PREFIX}{meeting_id} not found"
|
||||
logger.warning("Export failed: meeting not found", meeting_id=str(meeting_id))
|
||||
raise ValueError(msg)
|
||||
segments = await self._uow.segments.get_by_meeting(meeting_id)
|
||||
logger.debug("Retrieved segments for export", meeting_id=str(meeting_id), segment_count=len(segments))
|
||||
return meeting, list(segments)
|
||||
|
||||
@staticmethod
|
||||
def _log_export_complete(
|
||||
meeting_id: MeetingId,
|
||||
fmt: ExportFormat,
|
||||
segment_count: int,
|
||||
result: str | bytes,
|
||||
) -> None:
|
||||
"""Log export completion details."""
|
||||
content_size = len(result) if isinstance(result, bytes) else len(result.encode("utf-8"))
|
||||
logger.info(
|
||||
"Starting transcript export",
|
||||
"Transcript export completed",
|
||||
meeting_id=str(meeting_id),
|
||||
format=fmt.value,
|
||||
segment_count=segment_count,
|
||||
content_size_bytes=content_size,
|
||||
)
|
||||
async with self._uow:
|
||||
found_meeting = await self._uow.meetings.get(meeting_id)
|
||||
if not found_meeting:
|
||||
msg = f"{ERROR_MSG_MEETING_PREFIX}{meeting_id} not found"
|
||||
logger.warning(
|
||||
"Export failed: meeting not found",
|
||||
meeting_id=str(meeting_id),
|
||||
)
|
||||
raise ValueError(msg)
|
||||
|
||||
segments = await self._uow.segments.get_by_meeting(meeting_id)
|
||||
segment_count = len(segments)
|
||||
logger.debug(
|
||||
"Retrieved segments for export",
|
||||
meeting_id=str(meeting_id),
|
||||
segment_count=segment_count,
|
||||
)
|
||||
|
||||
exporter = self.get_exporter(fmt)
|
||||
result = exporter.export(found_meeting, segments)
|
||||
|
||||
content_size = len(result) if isinstance(result, bytes) else len(result.encode("utf-8"))
|
||||
logger.info(
|
||||
"Transcript export completed",
|
||||
meeting_id=str(meeting_id),
|
||||
format=fmt.value,
|
||||
segment_count=segment_count,
|
||||
content_size_bytes=content_size,
|
||||
)
|
||||
return result
|
||||
|
||||
async def export_to_file(
|
||||
self,
|
||||
@@ -259,28 +262,21 @@ class ExportService:
|
||||
Raises:
|
||||
ValueError: If extension is not recognized.
|
||||
"""
|
||||
extension_map = {
|
||||
".md": ExportFormat.MARKDOWN,
|
||||
".markdown": ExportFormat.MARKDOWN,
|
||||
EXPORT_EXT_HTML: ExportFormat.HTML,
|
||||
".htm": ExportFormat.HTML,
|
||||
EXPORT_EXT_PDF: ExportFormat.PDF,
|
||||
}
|
||||
normalized_ext = extension.lower()
|
||||
fmt = extension_map.get(normalized_ext)
|
||||
fmt = ExportFormat.from_extension(extension)
|
||||
if fmt is None:
|
||||
supported = [".md", ".markdown", EXPORT_EXT_HTML, ".htm", EXPORT_EXT_PDF]
|
||||
logger.warning(
|
||||
"Unrecognized file extension for format inference",
|
||||
extension=extension,
|
||||
supported_extensions=list(extension_map.keys()),
|
||||
supported_extensions=supported,
|
||||
)
|
||||
raise ValueError(
|
||||
f"Cannot infer format from extension '{extension}'. "
|
||||
f"Supported: {', '.join(extension_map.keys())}"
|
||||
f"Supported: {', '.join(supported)}"
|
||||
)
|
||||
logger.debug(
|
||||
"Format inference successful",
|
||||
extension=normalized_ext,
|
||||
extension=extension.lower(),
|
||||
inferred_format=fmt.value,
|
||||
)
|
||||
return fmt
|
||||
|
||||
@@ -21,50 +21,44 @@ from noteflow.domain.identity import (
|
||||
UserContext,
|
||||
Workspace,
|
||||
WorkspaceContext,
|
||||
WorkspaceMembership,
|
||||
WorkspaceRole,
|
||||
)
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from noteflow.domain.constants.fields import EMAIL
|
||||
from noteflow.infrastructure.persistence.models import (
|
||||
DEFAULT_USER_ID,
|
||||
DEFAULT_WORKSPACE_ID,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
from collections.abc import Awaitable, Callable, Sequence
|
||||
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class IdentityService:
|
||||
"""Application service for identity and workspace context management.
|
||||
class _IdentityServiceBase:
|
||||
get_or_create_default_user: Callable[..., Awaitable[UserContext]]
|
||||
get_or_create_default_workspace: Callable[..., Awaitable[WorkspaceContext]]
|
||||
_get_workspace_context: Callable[..., Awaitable[WorkspaceContext]]
|
||||
_default_workspace_context: Callable[..., WorkspaceContext]
|
||||
_workspace_context_for_memory: Callable[..., WorkspaceContext]
|
||||
_get_default_workspace: Callable[..., Awaitable[Workspace | None]]
|
||||
_workspace_context_for_member: Callable[..., Awaitable[WorkspaceContext]]
|
||||
_create_default_workspace: Callable[..., Awaitable[WorkspaceContext]]
|
||||
_require_workspace: Callable[..., Awaitable[Workspace]]
|
||||
_require_membership: Callable[..., Awaitable[WorkspaceMembership]]
|
||||
|
||||
Provide a clean interface for identity operations, abstracting away
|
||||
the infrastructure details (database persistence, default creation).
|
||||
|
||||
Orchestrates:
|
||||
- Default user and workspace creation on first run
|
||||
- Operation context resolution
|
||||
- Workspace membership management
|
||||
"""
|
||||
|
||||
class _IdentityDefaultsMixin(_IdentityServiceBase):
|
||||
async def get_or_create_default_user(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
) -> UserContext:
|
||||
"""Get or create the default local user.
|
||||
|
||||
For local-first mode, create a default user on first run.
|
||||
|
||||
Args:
|
||||
uow: Unit of work for database access.
|
||||
|
||||
Returns:
|
||||
User context for the default user.
|
||||
"""
|
||||
"""Get or create the default local user."""
|
||||
if not uow.supports_users:
|
||||
# Return a synthetic context for memory mode
|
||||
logger.debug("Memory mode: returning synthetic default user context")
|
||||
return UserContext(
|
||||
user_id=UUID(DEFAULT_USER_ID),
|
||||
@@ -80,9 +74,8 @@ class IdentityService:
|
||||
email=user.email,
|
||||
)
|
||||
|
||||
# Create default user
|
||||
user_id = UUID(DEFAULT_USER_ID)
|
||||
user = await uow.users.create_default(
|
||||
await uow.users.create_default(
|
||||
user_id=user_id,
|
||||
display_name=DEFAULT_USER_DISPLAY_NAME,
|
||||
)
|
||||
@@ -100,81 +93,26 @@ class IdentityService:
|
||||
uow: UnitOfWork,
|
||||
user_id: UUID,
|
||||
) -> WorkspaceContext:
|
||||
"""Get or create the default workspace for a user.
|
||||
|
||||
For local-first mode, each user has a default "Personal" workspace.
|
||||
|
||||
Args:
|
||||
uow: Unit of work for database access.
|
||||
user_id: User UUID.
|
||||
|
||||
Returns:
|
||||
Workspace context for the default workspace.
|
||||
"""
|
||||
"""Get or create the default workspace for a user."""
|
||||
if not uow.supports_workspaces:
|
||||
# Return a synthetic context for memory mode
|
||||
logger.debug("Memory mode: returning synthetic default workspace context")
|
||||
return WorkspaceContext(
|
||||
workspace_id=UUID(DEFAULT_WORKSPACE_ID),
|
||||
workspace_name=DEFAULT_WORKSPACE_NAME,
|
||||
role=WorkspaceRole.OWNER,
|
||||
)
|
||||
return self._default_workspace_context()
|
||||
|
||||
workspace = await uow.workspaces.get_default_for_user(user_id)
|
||||
workspace = await self._get_default_workspace(uow, user_id)
|
||||
if workspace:
|
||||
logger.debug(
|
||||
"Found existing default workspace for user %s: %s",
|
||||
user_id,
|
||||
workspace.id,
|
||||
)
|
||||
membership = await uow.workspaces.get_membership(workspace.id, user_id)
|
||||
role = WorkspaceRole(membership.role.value) if membership else WorkspaceRole.OWNER
|
||||
return WorkspaceContext(
|
||||
workspace_id=workspace.id,
|
||||
workspace_name=workspace.name,
|
||||
role=role,
|
||||
)
|
||||
return await self._workspace_context_for_member(uow, workspace, user_id)
|
||||
|
||||
# Create default workspace
|
||||
workspace_id = UUID(DEFAULT_WORKSPACE_ID)
|
||||
workspace = await uow.workspaces.create(
|
||||
workspace_id=workspace_id,
|
||||
name=DEFAULT_WORKSPACE_NAME,
|
||||
owner_id=user_id,
|
||||
is_default=True,
|
||||
)
|
||||
await uow.commit()
|
||||
return await self._create_default_workspace(uow, user_id)
|
||||
|
||||
logger.info("Created default workspace for user %s: %s", user_id, workspace_id)
|
||||
|
||||
return WorkspaceContext(
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=DEFAULT_WORKSPACE_NAME,
|
||||
role=WorkspaceRole.OWNER,
|
||||
)
|
||||
|
||||
class _IdentityContextMixin(_IdentityServiceBase):
|
||||
async def get_context(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
workspace_id: UUID | None = None,
|
||||
request_id: str | None = None,
|
||||
) -> OperationContext:
|
||||
"""Get the full operation context.
|
||||
|
||||
Resolve user identity and workspace scope for an operation.
|
||||
|
||||
Args:
|
||||
uow: Unit of work for database access.
|
||||
workspace_id: Optional specific workspace, or default.
|
||||
request_id: Optional request correlation ID.
|
||||
|
||||
Returns:
|
||||
Full operation context with user and workspace.
|
||||
|
||||
Raises:
|
||||
ValueError: If workspace not found.
|
||||
PermissionError: If user not a member of workspace.
|
||||
"""
|
||||
"""Get the full operation context."""
|
||||
user = await self.get_or_create_default_user(uow)
|
||||
|
||||
if workspace_id:
|
||||
@@ -200,50 +138,21 @@ class IdentityService:
|
||||
request_id=request_id,
|
||||
)
|
||||
|
||||
|
||||
class _IdentityWorkspaceMixin(_IdentityServiceBase):
|
||||
async def _get_workspace_context(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
workspace_id: UUID,
|
||||
user_id: UUID,
|
||||
) -> WorkspaceContext:
|
||||
"""Get workspace context for a specific workspace.
|
||||
|
||||
Args:
|
||||
uow: Unit of work for database access.
|
||||
workspace_id: Workspace UUID.
|
||||
user_id: User UUID.
|
||||
|
||||
Returns:
|
||||
Workspace context.
|
||||
|
||||
Raises:
|
||||
ValueError: If workspace not found.
|
||||
PermissionError: If user not a member.
|
||||
"""
|
||||
"""Get workspace context for a specific workspace."""
|
||||
if not uow.supports_workspaces:
|
||||
logger.debug("Memory mode: returning synthetic workspace context for %s", workspace_id)
|
||||
return WorkspaceContext(
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=DEFAULT_WORKSPACE_NAME,
|
||||
role=WorkspaceRole.OWNER,
|
||||
)
|
||||
return self._workspace_context_for_memory(workspace_id)
|
||||
|
||||
logger.debug("Looking up workspace %s for user %s", workspace_id, user_id)
|
||||
workspace = await uow.workspaces.get(workspace_id)
|
||||
if not workspace:
|
||||
logger.warning("Workspace not found: %s", workspace_id)
|
||||
msg = f"{ERROR_MSG_WORKSPACE_PREFIX}{workspace_id} not found"
|
||||
raise ValueError(msg)
|
||||
|
||||
membership = await uow.workspaces.get_membership(workspace_id, user_id)
|
||||
if not membership:
|
||||
logger.warning(
|
||||
"Permission denied: user %s is not a member of workspace %s",
|
||||
user_id,
|
||||
workspace_id,
|
||||
)
|
||||
msg = f"User not a member of workspace {workspace_id}"
|
||||
raise PermissionError(msg)
|
||||
workspace = await self._require_workspace(uow, workspace_id)
|
||||
membership = await self._require_membership(uow, workspace_id, user_id)
|
||||
|
||||
logger.debug(
|
||||
"Workspace access granted: user=%s, workspace=%s, role=%s",
|
||||
@@ -257,12 +166,118 @@ class IdentityService:
|
||||
role=membership.role,
|
||||
)
|
||||
|
||||
async def list_workspaces(
|
||||
def _default_workspace_context(self) -> WorkspaceContext:
|
||||
return WorkspaceContext(
|
||||
workspace_id=UUID(DEFAULT_WORKSPACE_ID),
|
||||
workspace_name=DEFAULT_WORKSPACE_NAME,
|
||||
role=WorkspaceRole.OWNER,
|
||||
)
|
||||
|
||||
def _workspace_context_for_memory(self, workspace_id: UUID) -> WorkspaceContext:
|
||||
return WorkspaceContext(
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=DEFAULT_WORKSPACE_NAME,
|
||||
role=WorkspaceRole.OWNER,
|
||||
)
|
||||
|
||||
async def _get_default_workspace(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
user_id: UUID,
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
) -> Workspace | None:
|
||||
workspace = await uow.workspaces.get_default_for_user(user_id)
|
||||
if workspace:
|
||||
logger.debug(
|
||||
"Found existing default workspace for user %s: %s",
|
||||
user_id,
|
||||
workspace.id,
|
||||
)
|
||||
return workspace
|
||||
|
||||
async def _workspace_context_for_member(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
workspace: Workspace,
|
||||
user_id: UUID,
|
||||
) -> WorkspaceContext:
|
||||
membership = await uow.workspaces.get_membership(workspace.id, user_id)
|
||||
role = WorkspaceRole(membership.role.value) if membership else WorkspaceRole.OWNER
|
||||
return WorkspaceContext(
|
||||
workspace_id=workspace.id,
|
||||
workspace_name=workspace.name,
|
||||
role=role,
|
||||
)
|
||||
|
||||
async def _create_default_workspace(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
user_id: UUID,
|
||||
) -> WorkspaceContext:
|
||||
workspace_id = UUID(DEFAULT_WORKSPACE_ID)
|
||||
await uow.workspaces.create(
|
||||
workspace_id=workspace_id,
|
||||
name=DEFAULT_WORKSPACE_NAME,
|
||||
owner_id=user_id,
|
||||
is_default=True,
|
||||
)
|
||||
await uow.commit()
|
||||
|
||||
logger.info("Created default workspace for user %s: %s", user_id, workspace_id)
|
||||
return WorkspaceContext(
|
||||
workspace_id=workspace_id,
|
||||
workspace_name=DEFAULT_WORKSPACE_NAME,
|
||||
role=WorkspaceRole.OWNER,
|
||||
)
|
||||
|
||||
async def _require_workspace(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
workspace_id: UUID,
|
||||
) -> Workspace:
|
||||
logger.debug("Looking up workspace %s", workspace_id)
|
||||
workspace = await uow.workspaces.get(workspace_id)
|
||||
if not workspace:
|
||||
logger.warning("Workspace not found: %s", workspace_id)
|
||||
msg = f"{ERROR_MSG_WORKSPACE_PREFIX}{workspace_id} not found"
|
||||
raise ValueError(msg)
|
||||
return workspace
|
||||
|
||||
async def _require_membership(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
workspace_id: UUID,
|
||||
user_id: UUID,
|
||||
) -> WorkspaceMembership:
|
||||
membership = await uow.workspaces.get_membership(workspace_id, user_id)
|
||||
if not membership:
|
||||
logger.warning(
|
||||
"Permission denied: user %s is not a member of workspace %s",
|
||||
user_id,
|
||||
workspace_id,
|
||||
)
|
||||
msg = f"User not a member of workspace {workspace_id}"
|
||||
raise PermissionError(msg)
|
||||
return membership
|
||||
|
||||
|
||||
class IdentityService(
|
||||
_IdentityDefaultsMixin,
|
||||
_IdentityContextMixin,
|
||||
_IdentityWorkspaceMixin,
|
||||
):
|
||||
"""Application service for identity and workspace context management.
|
||||
|
||||
Provide a clean interface for identity operations, abstracting away
|
||||
the infrastructure details (database persistence, default creation).
|
||||
|
||||
Orchestrates:
|
||||
- Default user and workspace creation on first run
|
||||
- Operation context resolution
|
||||
- Workspace membership management
|
||||
"""
|
||||
|
||||
async def list_workspaces(
|
||||
self, uow: UnitOfWork, user_id: UUID, limit: int = 50, offset: int = 0
|
||||
) -> Sequence[Workspace]:
|
||||
"""List workspaces a user is a member of.
|
||||
|
||||
@@ -313,36 +328,45 @@ class IdentityService:
|
||||
Raises:
|
||||
NotImplementedError: If workspaces not supported.
|
||||
"""
|
||||
if not uow.supports_workspaces:
|
||||
msg = "Workspaces require database persistence"
|
||||
raise NotImplementedError(msg)
|
||||
self._require_workspace_support(uow)
|
||||
|
||||
workspace_id = uuid4()
|
||||
workspace = await uow.workspaces.create(
|
||||
workspace_id=workspace_id,
|
||||
name=name,
|
||||
owner_id=owner_id,
|
||||
slug=slug,
|
||||
workspace_id=workspace_id, name=name, owner_id=owner_id, slug=slug
|
||||
)
|
||||
|
||||
# Create default project for the workspace (if projects are supported)
|
||||
if uow.supports_projects:
|
||||
project_id = uuid4()
|
||||
await uow.projects.create(
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
name=DEFAULT_PROJECT_NAME,
|
||||
slug=slugify(DEFAULT_PROJECT_NAME),
|
||||
description=f"{DEFAULT_PROJECT_NAME} project for this workspace",
|
||||
is_default=True,
|
||||
)
|
||||
logger.info("Created default project for workspace %s", workspace_id)
|
||||
|
||||
await self._create_default_project_if_supported(uow, workspace_id)
|
||||
await uow.commit()
|
||||
|
||||
logger.info("Created workspace %s: %s", workspace.name, workspace_id)
|
||||
return workspace
|
||||
|
||||
@staticmethod
|
||||
def _require_workspace_support(uow: UnitOfWork) -> None:
|
||||
"""Raise if workspaces not supported."""
|
||||
if not uow.supports_workspaces:
|
||||
msg = "Workspaces require database persistence"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@staticmethod
|
||||
async def _create_default_project_if_supported(
|
||||
uow: UnitOfWork,
|
||||
workspace_id: UUID,
|
||||
) -> None:
|
||||
"""Create default project if projects are supported."""
|
||||
if not uow.supports_projects:
|
||||
return
|
||||
project_id = uuid4()
|
||||
await uow.projects.create(
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
name=DEFAULT_PROJECT_NAME,
|
||||
slug=slugify(DEFAULT_PROJECT_NAME),
|
||||
description=f"{DEFAULT_PROJECT_NAME} project for this workspace",
|
||||
is_default=True,
|
||||
)
|
||||
logger.info("Created default project for workspace %s", workspace_id)
|
||||
|
||||
async def get_user(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
@@ -389,23 +413,14 @@ class IdentityService:
|
||||
Raises:
|
||||
NotImplementedError: If users not supported.
|
||||
"""
|
||||
if not uow.supports_users:
|
||||
msg = "Users require database persistence"
|
||||
raise NotImplementedError(msg)
|
||||
self._require_user_support(uow)
|
||||
|
||||
user = await uow.users.get(user_id)
|
||||
if not user:
|
||||
logger.warning("User not found for profile update: %s", user_id)
|
||||
return None
|
||||
|
||||
updated_fields: list[str] = []
|
||||
if display_name:
|
||||
user.display_name = display_name
|
||||
updated_fields.append("display_name")
|
||||
if email is not None:
|
||||
user.email = email
|
||||
updated_fields.append("email")
|
||||
|
||||
updated_fields = self._apply_profile_updates(user, display_name, email)
|
||||
if not updated_fields:
|
||||
logger.debug("No fields to update for user %s", user_id)
|
||||
return user
|
||||
@@ -413,9 +428,28 @@ class IdentityService:
|
||||
updated = await uow.users.update(user)
|
||||
await uow.commit()
|
||||
|
||||
logger.info(
|
||||
"Updated user profile: user_id=%s, fields=%s",
|
||||
user_id,
|
||||
", ".join(updated_fields),
|
||||
)
|
||||
logger.info("Updated user profile: user_id=%s, fields=%s", user_id, ", ".join(updated_fields))
|
||||
return updated
|
||||
|
||||
@staticmethod
|
||||
def _require_user_support(uow: UnitOfWork) -> None:
|
||||
"""Raise if users not supported."""
|
||||
if not uow.supports_users:
|
||||
msg = "Users require database persistence"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@staticmethod
|
||||
def _apply_profile_updates(
|
||||
user: User,
|
||||
display_name: str | None,
|
||||
email: str | None,
|
||||
) -> list[str]:
|
||||
"""Apply profile updates and return list of updated field names."""
|
||||
updated_fields: list[str] = []
|
||||
if display_name:
|
||||
user.display_name = display_name
|
||||
updated_fields.append("display_name")
|
||||
if email is not None:
|
||||
user.email = email
|
||||
updated_fields.append(EMAIL)
|
||||
return updated_fields
|
||||
|
||||
@@ -9,6 +9,17 @@ from collections.abc import Sequence
|
||||
from datetime import UTC, datetime
|
||||
from typing import TYPE_CHECKING, NotRequired, Required, TypedDict, Unpack
|
||||
|
||||
from noteflow.domain.constants.fields import (
|
||||
ACTION_ITEMS,
|
||||
ANNOTATION_TYPE,
|
||||
END_TIME,
|
||||
KEY_POINTS,
|
||||
MODEL_NAME,
|
||||
PROVIDER_NAME,
|
||||
SEGMENT_IDS,
|
||||
START_TIME,
|
||||
UNKNOWN,
|
||||
)
|
||||
from noteflow.domain.entities import ActionItem, Annotation, KeyPoint, Meeting, Segment, Summary
|
||||
from noteflow.domain.value_objects import AnnotationId, AnnotationType, MeetingId
|
||||
from noteflow.infrastructure.logging import get_logger, log_state_transition
|
||||
@@ -23,6 +34,10 @@ if TYPE_CHECKING:
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class _MeetingServiceBase:
|
||||
_uow: UnitOfWork
|
||||
|
||||
|
||||
class _SummarySaveKwargs(TypedDict, total=False):
|
||||
"""Optional summary fields for save_summary."""
|
||||
|
||||
@@ -43,35 +58,13 @@ class _AnnotationCreateKwargs(TypedDict):
|
||||
segment_ids: NotRequired[list[int] | None]
|
||||
|
||||
|
||||
class MeetingService:
|
||||
"""Application service for meeting operations.
|
||||
|
||||
Provides use cases for managing meetings, segments, and summaries.
|
||||
All methods are async and expect a UnitOfWork to be provided.
|
||||
"""
|
||||
|
||||
def __init__(self, uow: UnitOfWork) -> None:
|
||||
"""Initialize the meeting service.
|
||||
|
||||
Args:
|
||||
uow: Unit of work for persistence.
|
||||
"""
|
||||
self._uow = uow
|
||||
|
||||
class _MeetingServiceCrudMixin(_MeetingServiceBase):
|
||||
async def create_meeting(
|
||||
self,
|
||||
title: str,
|
||||
metadata: dict[str, str] | None = None,
|
||||
) -> Meeting:
|
||||
"""Create a new meeting.
|
||||
|
||||
Args:
|
||||
title: Meeting title.
|
||||
metadata: Optional metadata.
|
||||
|
||||
Returns:
|
||||
Created meeting.
|
||||
"""
|
||||
"""Create a new meeting."""
|
||||
meeting = Meeting.create(title=title, metadata=metadata or {})
|
||||
|
||||
async with self._uow:
|
||||
@@ -81,14 +74,7 @@ class MeetingService:
|
||||
return saved
|
||||
|
||||
async def get_meeting(self, meeting_id: MeetingId) -> Meeting | None:
|
||||
"""Get a meeting by ID.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
Meeting if found, None otherwise.
|
||||
"""
|
||||
"""Get a meeting by ID."""
|
||||
async with self._uow:
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
@@ -104,17 +90,7 @@ class MeetingService:
|
||||
offset: int = 0,
|
||||
sort_desc: bool = True,
|
||||
) -> tuple[Sequence[Meeting], int]:
|
||||
"""List meetings with optional filtering.
|
||||
|
||||
Args:
|
||||
states: Filter to specific meeting states (None = all).
|
||||
limit: Maximum results to return.
|
||||
offset: Number of results to skip for pagination.
|
||||
sort_desc: If True, newest meetings first.
|
||||
|
||||
Returns:
|
||||
Tuple of (meeting sequence, total matching count).
|
||||
"""
|
||||
"""List meetings with optional filtering."""
|
||||
async with self._uow:
|
||||
meetings, total = await self._uow.meetings.list_all(
|
||||
states=states,
|
||||
@@ -125,15 +101,26 @@ class MeetingService:
|
||||
logger.debug("Listed meetings", count=len(meetings), total=total, limit=limit, offset=offset)
|
||||
return meetings, total
|
||||
|
||||
async def delete_meeting(self, meeting_id: MeetingId) -> bool:
|
||||
"""Delete meeting with complete cleanup."""
|
||||
async with self._uow:
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
logger.warning("Cannot delete meeting: not found", meeting_id=str(meeting_id))
|
||||
return False
|
||||
|
||||
await self._uow.assets.delete_meeting_assets(meeting_id, meeting.asset_path)
|
||||
success = await self._uow.meetings.delete(meeting_id)
|
||||
if success:
|
||||
await self._uow.commit()
|
||||
logger.info("Deleted meeting", meeting_id=str(meeting_id), title=meeting.title)
|
||||
|
||||
return success
|
||||
|
||||
|
||||
class _MeetingServiceStateMixin(_MeetingServiceBase):
|
||||
async def start_recording(self, meeting_id: MeetingId) -> Meeting | None:
|
||||
"""Start recording a meeting.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
Updated meeting, or None if not found.
|
||||
"""
|
||||
"""Start recording a meeting."""
|
||||
async with self._uow:
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
@@ -148,16 +135,7 @@ class MeetingService:
|
||||
return meeting
|
||||
|
||||
async def stop_meeting(self, meeting_id: MeetingId) -> Meeting | None:
|
||||
"""Stop a meeting through graceful STOPPING state.
|
||||
|
||||
Transitions: RECORDING -> STOPPING -> STOPPED
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
Updated meeting, or None if not found.
|
||||
"""
|
||||
"""Stop a meeting through graceful STOPPING state."""
|
||||
async with self._uow:
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
@@ -173,14 +151,7 @@ class MeetingService:
|
||||
return meeting
|
||||
|
||||
async def complete_meeting(self, meeting_id: MeetingId) -> Meeting | None:
|
||||
"""Mark a meeting as completed.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
Updated meeting, or None if not found.
|
||||
"""
|
||||
"""Mark a meeting as completed."""
|
||||
async with self._uow:
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
@@ -194,59 +165,15 @@ class MeetingService:
|
||||
log_state_transition("meeting", str(meeting_id), previous_state, meeting.state)
|
||||
return meeting
|
||||
|
||||
async def delete_meeting(self, meeting_id: MeetingId) -> bool:
|
||||
"""Delete meeting with complete cleanup.
|
||||
|
||||
Removes:
|
||||
1. Filesystem assets (via asset repository)
|
||||
2. Database records (cascade deletes children)
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
"""
|
||||
async with self._uow:
|
||||
meeting = await self._uow.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
logger.warning("Cannot delete meeting: not found", meeting_id=str(meeting_id))
|
||||
return False
|
||||
|
||||
await self._uow.assets.delete_meeting_assets(meeting_id, meeting.asset_path)
|
||||
success = await self._uow.meetings.delete(meeting_id)
|
||||
if success:
|
||||
await self._uow.commit()
|
||||
logger.info("Deleted meeting", meeting_id=str(meeting_id), title=meeting.title)
|
||||
|
||||
return success
|
||||
|
||||
class _MeetingServiceSegmentsMixin(_MeetingServiceBase):
|
||||
async def add_segment(
|
||||
self,
|
||||
meeting_id: MeetingId,
|
||||
data: SegmentData,
|
||||
) -> Segment:
|
||||
"""Add a transcript segment to a meeting.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
data: Segment data including text, timing, and metadata.
|
||||
|
||||
Returns:
|
||||
Added segment.
|
||||
"""
|
||||
segment = Segment(
|
||||
segment_id=data.segment_id,
|
||||
text=data.text,
|
||||
start_time=data.start_time,
|
||||
end_time=data.end_time,
|
||||
meeting_id=meeting_id,
|
||||
words=data.words,
|
||||
language=data.language,
|
||||
language_confidence=data.language_confidence,
|
||||
avg_logprob=data.avg_logprob,
|
||||
no_speech_prob=data.no_speech_prob,
|
||||
)
|
||||
"""Add a transcript segment to a meeting."""
|
||||
segment = data.to_segment(meeting_id)
|
||||
|
||||
async with self._uow:
|
||||
saved = await self._uow.segments.add(meeting_id, segment)
|
||||
@@ -254,9 +181,9 @@ class MeetingService:
|
||||
logger.debug(
|
||||
"Added segment",
|
||||
meeting_id=str(meeting_id),
|
||||
segment_id=data.segment_id,
|
||||
start=data.start_time,
|
||||
end=data.end_time,
|
||||
segment_id=segment.segment_id,
|
||||
start=segment.start_time,
|
||||
end=segment.end_time,
|
||||
)
|
||||
return saved
|
||||
|
||||
@@ -269,19 +196,9 @@ class MeetingService:
|
||||
return saved
|
||||
|
||||
async def get_segments(
|
||||
self,
|
||||
meeting_id: MeetingId,
|
||||
include_words: bool = True,
|
||||
self, meeting_id: MeetingId, include_words: bool = True
|
||||
) -> Sequence[Segment]:
|
||||
"""Get all segments for a meeting.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
include_words: Include word-level timing.
|
||||
|
||||
Returns:
|
||||
List of segments ordered by segment_id.
|
||||
"""
|
||||
"""Get all segments for a meeting."""
|
||||
async with self._uow:
|
||||
return await self._uow.segments.get_by_meeting(
|
||||
meeting_id,
|
||||
@@ -289,21 +206,9 @@ class MeetingService:
|
||||
)
|
||||
|
||||
async def search_segments(
|
||||
self,
|
||||
query_embedding: list[float],
|
||||
limit: int = 10,
|
||||
meeting_id: MeetingId | None = None,
|
||||
self, query_embedding: list[float], limit: int = 10, meeting_id: MeetingId | None = None
|
||||
) -> Sequence[tuple[Segment, float]]:
|
||||
"""Search segments by semantic similarity.
|
||||
|
||||
Args:
|
||||
query_embedding: Query embedding vector.
|
||||
limit: Maximum number of results.
|
||||
meeting_id: Optional meeting to restrict search to.
|
||||
|
||||
Returns:
|
||||
List of (segment, similarity_score) tuples.
|
||||
"""
|
||||
"""Search segments by semantic similarity."""
|
||||
async with self._uow:
|
||||
return await self._uow.segments.search_semantic(
|
||||
query_embedding=query_embedding,
|
||||
@@ -311,26 +216,19 @@ class MeetingService:
|
||||
meeting_id=meeting_id,
|
||||
)
|
||||
|
||||
|
||||
class _MeetingServiceSummariesMixin(_MeetingServiceBase):
|
||||
async def save_summary(
|
||||
self,
|
||||
meeting_id: MeetingId,
|
||||
executive_summary: str,
|
||||
**kwargs: Unpack[_SummarySaveKwargs],
|
||||
) -> Summary:
|
||||
"""Save or update a meeting summary.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
executive_summary: Executive summary text.
|
||||
**kwargs: Optional summary fields (key_points, action_items, provider_name, model_name).
|
||||
|
||||
Returns:
|
||||
Saved summary.
|
||||
"""
|
||||
key_points = kwargs.get("key_points") or []
|
||||
action_items = kwargs.get("action_items") or []
|
||||
provider_name = kwargs.get("provider_name", "")
|
||||
model_name = kwargs.get("model_name", "")
|
||||
"""Save or update a meeting summary."""
|
||||
key_points: list[KeyPoint] = kwargs.get(KEY_POINTS) or []
|
||||
action_items: list[ActionItem] = kwargs.get(ACTION_ITEMS) or []
|
||||
provider_name = kwargs.get(PROVIDER_NAME, "")
|
||||
model_name = kwargs.get(MODEL_NAME, "")
|
||||
summary = Summary(
|
||||
meeting_id=meeting_id,
|
||||
executive_summary=executive_summary,
|
||||
@@ -344,7 +242,12 @@ class MeetingService:
|
||||
async with self._uow:
|
||||
saved = await self._uow.summaries.save(summary)
|
||||
await self._uow.commit()
|
||||
logger.info("Saved summary", meeting_id=str(meeting_id), provider=provider_name or "unknown", model=model_name or "unknown")
|
||||
logger.info(
|
||||
"Saved summary",
|
||||
meeting_id=str(meeting_id),
|
||||
provider=provider_name or UNKNOWN,
|
||||
model=model_name or UNKNOWN,
|
||||
)
|
||||
return saved
|
||||
|
||||
async def fetch_meeting_summary(self, meeting_id: MeetingId) -> Summary | None:
|
||||
@@ -354,31 +257,28 @@ class MeetingService:
|
||||
if summary is None:
|
||||
logger.debug("Summary not found", meeting_id=str(meeting_id))
|
||||
else:
|
||||
logger.debug("Retrieved summary", meeting_id=str(meeting_id), provider=summary.provider_name or "unknown")
|
||||
logger.debug(
|
||||
"Retrieved summary",
|
||||
meeting_id=str(meeting_id),
|
||||
provider=summary.provider_name or UNKNOWN,
|
||||
)
|
||||
return summary
|
||||
|
||||
# Annotation methods
|
||||
|
||||
class _MeetingServiceAnnotationsMixin(_MeetingServiceBase):
|
||||
async def add_annotation(
|
||||
self,
|
||||
**kwargs: Unpack[_AnnotationCreateKwargs],
|
||||
) -> Annotation:
|
||||
"""Add an annotation to a meeting.
|
||||
|
||||
Args:
|
||||
**kwargs: Annotation fields.
|
||||
|
||||
Returns:
|
||||
Added annotation.
|
||||
"""
|
||||
"""Add an annotation to a meeting."""
|
||||
from uuid import uuid4
|
||||
|
||||
meeting_id = kwargs["meeting_id"]
|
||||
annotation_type = kwargs["annotation_type"]
|
||||
annotation_type: AnnotationType = kwargs[ANNOTATION_TYPE]
|
||||
text = kwargs["text"]
|
||||
start_time = kwargs["start_time"]
|
||||
end_time = kwargs["end_time"]
|
||||
segment_ids = kwargs.get("segment_ids") or []
|
||||
start_time: float = kwargs[START_TIME]
|
||||
end_time: float = kwargs[END_TIME]
|
||||
segment_ids: list[int] = kwargs.get(SEGMENT_IDS) or []
|
||||
annotation = Annotation(
|
||||
id=AnnotationId(uuid4()),
|
||||
meeting_id=meeting_id,
|
||||
@@ -403,14 +303,7 @@ class MeetingService:
|
||||
return saved
|
||||
|
||||
async def get_annotation(self, annotation_id: AnnotationId) -> Annotation | None:
|
||||
"""Get an annotation by ID.
|
||||
|
||||
Args:
|
||||
annotation_id: Annotation identifier.
|
||||
|
||||
Returns:
|
||||
Annotation if found, None otherwise.
|
||||
"""
|
||||
"""Get an annotation by ID."""
|
||||
async with self._uow:
|
||||
return await self._uow.annotations.get(annotation_id)
|
||||
|
||||
@@ -418,48 +311,19 @@ class MeetingService:
|
||||
self,
|
||||
meeting_id: MeetingId,
|
||||
) -> SequenceType[Annotation]:
|
||||
"""Get all annotations for a meeting.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
List of annotations ordered by start_time.
|
||||
"""
|
||||
"""Get all annotations for a meeting."""
|
||||
async with self._uow:
|
||||
return await self._uow.annotations.get_by_meeting(meeting_id)
|
||||
|
||||
async def get_annotations_in_range(
|
||||
self,
|
||||
meeting_id: MeetingId,
|
||||
start_time: float,
|
||||
end_time: float,
|
||||
self, meeting_id: MeetingId, start_time: float, end_time: float
|
||||
) -> SequenceType[Annotation]:
|
||||
"""Get annotations within a time range.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
start_time: Start of time range in seconds.
|
||||
end_time: End of time range in seconds.
|
||||
|
||||
Returns:
|
||||
List of annotations overlapping the time range.
|
||||
"""
|
||||
"""Get annotations within a time range."""
|
||||
async with self._uow:
|
||||
return await self._uow.annotations.get_by_time_range(meeting_id, start_time, end_time)
|
||||
|
||||
async def update_annotation(self, annotation: Annotation) -> Annotation:
|
||||
"""Update an existing annotation.
|
||||
|
||||
Args:
|
||||
annotation: Annotation with updated fields.
|
||||
|
||||
Returns:
|
||||
Updated annotation.
|
||||
|
||||
Raises:
|
||||
ValueError: If annotation does not exist.
|
||||
"""
|
||||
"""Update an existing annotation."""
|
||||
async with self._uow:
|
||||
updated = await self._uow.annotations.update(annotation)
|
||||
await self._uow.commit()
|
||||
@@ -472,14 +336,7 @@ class MeetingService:
|
||||
return updated
|
||||
|
||||
async def delete_annotation(self, annotation_id: AnnotationId) -> bool:
|
||||
"""Delete an annotation.
|
||||
|
||||
Args:
|
||||
annotation_id: Annotation identifier.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
"""
|
||||
"""Delete an annotation."""
|
||||
async with self._uow:
|
||||
success = await self._uow.annotations.delete(annotation_id)
|
||||
if success:
|
||||
@@ -491,3 +348,25 @@ class MeetingService:
|
||||
annotation_id=str(annotation_id),
|
||||
)
|
||||
return success
|
||||
|
||||
|
||||
class MeetingService(
|
||||
_MeetingServiceCrudMixin,
|
||||
_MeetingServiceStateMixin,
|
||||
_MeetingServiceSegmentsMixin,
|
||||
_MeetingServiceSummariesMixin,
|
||||
_MeetingServiceAnnotationsMixin,
|
||||
):
|
||||
"""Application service for meeting operations.
|
||||
|
||||
Provides use cases for managing meetings, segments, and summaries.
|
||||
All methods are async and expect a UnitOfWork to be provided.
|
||||
"""
|
||||
|
||||
def __init__(self, uow: UnitOfWork) -> None:
|
||||
"""Initialize the meeting service.
|
||||
|
||||
Args:
|
||||
uow: Unit of work for persistence.
|
||||
"""
|
||||
self._uow = uow
|
||||
|
||||
@@ -129,23 +129,44 @@ class NerService:
|
||||
) -> ExtractionResult | list[tuple[int, str]]:
|
||||
"""Check cache and return cached result or segments for extraction."""
|
||||
async with self._uow_factory() as uow:
|
||||
if not force_refresh:
|
||||
cached = await uow.entities.get_by_meeting(meeting_id)
|
||||
if cached:
|
||||
logger.debug("Returning %d cached entities for meeting %s", len(cached), meeting_id)
|
||||
return ExtractionResult(entities=cached, cached=True, total_count=len(cached))
|
||||
# Check cache first (unless force_refresh)
|
||||
if cached_result := await self._try_get_cached(uow, meeting_id, force_refresh):
|
||||
return cached_result
|
||||
|
||||
# Validate meeting exists
|
||||
meeting = await uow.meetings.get(meeting_id)
|
||||
if not meeting:
|
||||
raise ValueError(f"{ERROR_MSG_MEETING_PREFIX}{meeting_id} not found")
|
||||
|
||||
# Load segments separately (not eagerly loaded on meeting)
|
||||
segments = await uow.segments.get_by_meeting(meeting_id)
|
||||
if not segments:
|
||||
logger.debug("Meeting %s has no segments", meeting_id)
|
||||
return ExtractionResult(entities=[], cached=False, total_count=0)
|
||||
# Load segments (not eagerly loaded on meeting)
|
||||
return await self._load_segments_or_empty(uow, meeting_id)
|
||||
|
||||
return [(s.segment_id, s.text) for s in segments]
|
||||
async def _try_get_cached(
|
||||
self,
|
||||
uow: SqlAlchemyUnitOfWork,
|
||||
meeting_id: MeetingId,
|
||||
force_refresh: bool,
|
||||
) -> ExtractionResult | None:
|
||||
"""Return cached result if available and not forcing refresh."""
|
||||
if force_refresh:
|
||||
return None
|
||||
cached = await uow.entities.get_by_meeting(meeting_id)
|
||||
if not cached:
|
||||
return None
|
||||
logger.debug("Returning %d cached entities for meeting %s", len(cached), meeting_id)
|
||||
return ExtractionResult(entities=cached, cached=True, total_count=len(cached))
|
||||
|
||||
async def _load_segments_or_empty(
|
||||
self,
|
||||
uow: SqlAlchemyUnitOfWork,
|
||||
meeting_id: MeetingId,
|
||||
) -> ExtractionResult | list[tuple[int, str]]:
|
||||
"""Load segments for extraction or return empty result."""
|
||||
segments = await uow.segments.get_by_meeting(meeting_id)
|
||||
if not segments:
|
||||
logger.debug("Meeting %s has no segments", meeting_id)
|
||||
return ExtractionResult(entities=[], cached=False, total_count=0)
|
||||
return [(s.segment_id, s.text) for s in segments]
|
||||
|
||||
async def _persist_entities(
|
||||
self,
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.config.constants import ERROR_MSG_PROJECT_PREFIX, ERROR_MSG_WORKSPACE_PREFIX
|
||||
@@ -9,6 +10,9 @@ from noteflow.domain.entities.project import Project
|
||||
|
||||
from ._types import ProjectActiveRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.identity import Workspace
|
||||
|
||||
ACTIVE_PROJECT_METADATA_KEY = "active_project_id"
|
||||
|
||||
|
||||
@@ -84,6 +88,24 @@ class ActiveProjectMixin:
|
||||
Raises:
|
||||
ValueError: If workspace does not exist.
|
||||
"""
|
||||
self._require_workspace_and_project_support(uow)
|
||||
workspace = await self._require_workspace(uow, workspace_id)
|
||||
|
||||
active_project_id = self._parse_active_project_id(workspace.metadata)
|
||||
active_project = await self._resolve_active_project(
|
||||
uow, workspace_id, active_project_id
|
||||
)
|
||||
|
||||
# Fall back to default if no valid active project
|
||||
if active_project is None:
|
||||
active_project = await uow.projects.get_default_for_workspace(workspace_id)
|
||||
active_project_id = None
|
||||
|
||||
return active_project_id, active_project
|
||||
|
||||
@staticmethod
|
||||
def _require_workspace_and_project_support(uow: ProjectActiveRepositoryProvider) -> None:
|
||||
"""Raise if workspaces or projects not supported."""
|
||||
if not uow.supports_workspaces:
|
||||
msg = "Workspaces not supported in this unit of work"
|
||||
raise NotImplementedError(msg)
|
||||
@@ -91,28 +113,39 @@ class ActiveProjectMixin:
|
||||
msg = "Projects not supported in this unit of work"
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@staticmethod
|
||||
async def _require_workspace(
|
||||
uow: ProjectActiveRepositoryProvider,
|
||||
workspace_id: UUID,
|
||||
) -> Workspace:
|
||||
"""Load and return workspace or raise ValueError."""
|
||||
workspace = await uow.workspaces.get(workspace_id)
|
||||
if workspace is None:
|
||||
msg = f"{ERROR_MSG_WORKSPACE_PREFIX}{workspace_id} not found"
|
||||
raise ValueError(msg)
|
||||
return workspace
|
||||
|
||||
active_project_id: UUID | None = None
|
||||
active_project: Project | None = None
|
||||
@staticmethod
|
||||
def _parse_active_project_id(metadata: dict[str, str]) -> UUID | None:
|
||||
"""Parse active project ID from workspace metadata."""
|
||||
raw_id = metadata.get(ACTIVE_PROJECT_METADATA_KEY)
|
||||
if not raw_id:
|
||||
return None
|
||||
try:
|
||||
return UUID(str(raw_id))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
if raw_id := workspace.metadata.get(ACTIVE_PROJECT_METADATA_KEY):
|
||||
try:
|
||||
active_project_id = UUID(str(raw_id))
|
||||
except ValueError:
|
||||
active_project_id = None
|
||||
|
||||
if active_project_id is not None:
|
||||
candidate = await uow.projects.get(active_project_id)
|
||||
if candidate and candidate.workspace_id == workspace_id and not candidate.is_archived:
|
||||
active_project = candidate
|
||||
else:
|
||||
active_project_id = None
|
||||
|
||||
if active_project is None:
|
||||
active_project = await uow.projects.get_default_for_workspace(workspace_id)
|
||||
|
||||
return active_project_id, active_project
|
||||
@staticmethod
|
||||
async def _resolve_active_project(
|
||||
uow: ProjectActiveRepositoryProvider,
|
||||
workspace_id: UUID,
|
||||
project_id: UUID | None,
|
||||
) -> Project | None:
|
||||
"""Resolve active project, validating it belongs to workspace and is not archived."""
|
||||
if project_id is None:
|
||||
return None
|
||||
candidate = await uow.projects.get(project_id)
|
||||
if candidate and candidate.workspace_id == workspace_id and not candidate.is_archived:
|
||||
return candidate
|
||||
return None
|
||||
|
||||
@@ -101,11 +101,8 @@ class ProjectMembershipMixin:
|
||||
return removed
|
||||
|
||||
async def list_project_members(
|
||||
self,
|
||||
uow: ProjectMembershipRepositoryProvider,
|
||||
project_id: UUID,
|
||||
limit: int = 100,
|
||||
offset: int = 0,
|
||||
self, uow: ProjectMembershipRepositoryProvider, project_id: UUID,
|
||||
limit: int = 100, offset: int = 0
|
||||
) -> Sequence[ProjectMembership]:
|
||||
"""List members of a project.
|
||||
|
||||
|
||||
@@ -12,6 +12,29 @@ from noteflow.domain.entities.project import (
|
||||
from noteflow.domain.identity import WorkspaceSettings
|
||||
|
||||
|
||||
|
||||
class _SettingsAccessor:
|
||||
"""Safe accessor for workspace/project settings attributes."""
|
||||
|
||||
def __init__(self, settings: WorkspaceSettings | ProjectSettings | None) -> None:
|
||||
self._settings = settings
|
||||
|
||||
@property
|
||||
def export_rules(self) -> ExportRules | None:
|
||||
return self._settings.export_rules if self._settings else None
|
||||
|
||||
@property
|
||||
def trigger_rules(self) -> TriggerRules | None:
|
||||
return self._settings.trigger_rules if self._settings else None
|
||||
|
||||
@property
|
||||
def rag_enabled(self) -> bool | None:
|
||||
return self._settings.rag_enabled if self._settings else None
|
||||
|
||||
@property
|
||||
def default_summarization_template(self) -> str | None:
|
||||
return self._settings.default_summarization_template if self._settings else None
|
||||
|
||||
class RuleInheritanceMixin:
|
||||
"""Compute effective rules from system, workspace, and project settings."""
|
||||
|
||||
@@ -22,7 +45,7 @@ class RuleInheritanceMixin:
|
||||
) -> EffectiveRules:
|
||||
"""Compute effective rules by merging inheritance chain.
|
||||
|
||||
Resolution order: system defaults → workspace → project
|
||||
Resolution order: system defaults -> workspace -> project
|
||||
Coalesce logic:
|
||||
- None = inherit from parent
|
||||
- [] (empty list) = explicitly cleared (override with empty)
|
||||
@@ -35,37 +58,24 @@ class RuleInheritanceMixin:
|
||||
Returns:
|
||||
Fully resolved rules with no None values.
|
||||
"""
|
||||
# Start with system defaults
|
||||
export = self._merge_export_rules(
|
||||
SYSTEM_DEFAULTS.export,
|
||||
workspace_settings.export_rules if workspace_settings else None,
|
||||
project_settings.export_rules if project_settings else None,
|
||||
)
|
||||
|
||||
trigger = self._merge_trigger_rules(
|
||||
SYSTEM_DEFAULTS.trigger,
|
||||
workspace_settings.trigger_rules if workspace_settings else None,
|
||||
project_settings.trigger_rules if project_settings else None,
|
||||
)
|
||||
|
||||
# Merge scalar settings
|
||||
rag_enabled = self._coalesce(
|
||||
SYSTEM_DEFAULTS.rag_enabled,
|
||||
workspace_settings.rag_enabled if workspace_settings else None,
|
||||
project_settings.rag_enabled if project_settings else None,
|
||||
)
|
||||
|
||||
default_template = self._coalesce(
|
||||
SYSTEM_DEFAULTS.default_summarization_template,
|
||||
workspace_settings.default_summarization_template if workspace_settings else None,
|
||||
project_settings.default_summarization_template if project_settings else None,
|
||||
)
|
||||
ws = _SettingsAccessor(workspace_settings)
|
||||
proj = _SettingsAccessor(project_settings)
|
||||
|
||||
return EffectiveRules(
|
||||
export=export,
|
||||
trigger=trigger,
|
||||
rag_enabled=rag_enabled,
|
||||
default_summarization_template=default_template,
|
||||
export=self._merge_export_rules(
|
||||
SYSTEM_DEFAULTS.export, ws.export_rules, proj.export_rules
|
||||
),
|
||||
trigger=self._merge_trigger_rules(
|
||||
SYSTEM_DEFAULTS.trigger, ws.trigger_rules, proj.trigger_rules
|
||||
),
|
||||
rag_enabled=self._coalesce(
|
||||
SYSTEM_DEFAULTS.rag_enabled, ws.rag_enabled, proj.rag_enabled
|
||||
),
|
||||
default_summarization_template=self._coalesce(
|
||||
SYSTEM_DEFAULTS.default_summarization_template,
|
||||
ws.default_summarization_template,
|
||||
proj.default_summarization_template,
|
||||
),
|
||||
)
|
||||
|
||||
def _merge_export_rules(
|
||||
|
||||
17
src/noteflow/application/services/protocols.py
Normal file
17
src/noteflow/application/services/protocols.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Protocol definitions for application services."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
|
||||
from noteflow.domain.ports.async_context import AsyncContextManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.ports.repositories import MeetingRepository, SegmentRepository
|
||||
|
||||
|
||||
class ExportRepositoryProvider(AsyncContextManager, Protocol):
|
||||
"""Repository provider protocol for export operations."""
|
||||
|
||||
meetings: MeetingRepository
|
||||
segments: SegmentRepository
|
||||
@@ -13,6 +13,8 @@ from typing import TYPE_CHECKING, ClassVar
|
||||
|
||||
import sqlalchemy.exc
|
||||
|
||||
from noteflow.domain.constants.fields import ASSET_PATH, UNKNOWN
|
||||
from noteflow.infrastructure.audio.constants import ENCRYPTED_AUDIO_FILENAME
|
||||
from noteflow.domain.value_objects import MeetingState
|
||||
from noteflow.infrastructure.logging import get_logger, log_state_transition
|
||||
from noteflow.infrastructure.persistence.constants import MAX_MEETINGS_LIMIT
|
||||
@@ -104,19 +106,27 @@ class RecoveryService:
|
||||
error_message="Audio validation skipped (no meetings_dir configured)",
|
||||
)
|
||||
|
||||
# Prefer explicit asset_path; fall back to metadata for backward compatibility
|
||||
meeting_dir = self._resolve_meeting_dir(meeting, self._meetings_dir)
|
||||
manifest_exists = (meeting_dir / "manifest.json").exists()
|
||||
audio_exists = (meeting_dir / ENCRYPTED_AUDIO_FILENAME).exists()
|
||||
|
||||
return self._build_validation_result(manifest_exists, audio_exists)
|
||||
|
||||
@staticmethod
|
||||
def _resolve_meeting_dir(meeting: Meeting, base_dir: Path) -> Path:
|
||||
"""Resolve the directory path for a meeting's audio files."""
|
||||
default_path = str(meeting.id)
|
||||
asset_path = meeting.asset_path or default_path
|
||||
if asset_path == default_path:
|
||||
asset_path = meeting.metadata.get("asset_path") or asset_path
|
||||
meeting_dir = self._meetings_dir / asset_path
|
||||
|
||||
manifest_path = meeting_dir / "manifest.json"
|
||||
audio_path = meeting_dir / "audio.enc"
|
||||
|
||||
manifest_exists = manifest_path.exists()
|
||||
audio_exists = audio_path.exists()
|
||||
asset_path = meeting.metadata.get(ASSET_PATH) or asset_path
|
||||
return base_dir / asset_path
|
||||
|
||||
@staticmethod
|
||||
def _build_validation_result(
|
||||
manifest_exists: bool,
|
||||
audio_exists: bool,
|
||||
) -> AudioValidationResult:
|
||||
"""Build validation result from file existence checks."""
|
||||
if not manifest_exists and not audio_exists:
|
||||
return AudioValidationResult(
|
||||
is_valid=False,
|
||||
@@ -138,7 +148,7 @@ class RecoveryService:
|
||||
is_valid=False,
|
||||
manifest_exists=True,
|
||||
audio_exists=False,
|
||||
error_message="audio.enc not found",
|
||||
error_message=f"{ENCRYPTED_AUDIO_FILENAME} not found",
|
||||
)
|
||||
|
||||
return AudioValidationResult(
|
||||
@@ -158,7 +168,6 @@ class RecoveryService:
|
||||
Tuple of (recovered meetings, audio validation failure count).
|
||||
"""
|
||||
async with self._uow:
|
||||
# Find all meetings in active states
|
||||
meetings, total = await self._uow.meetings.list_all(
|
||||
states=self.ACTIVE_STATES,
|
||||
limit=MAX_MEETINGS_LIMIT,
|
||||
@@ -173,24 +182,9 @@ class RecoveryService:
|
||||
total,
|
||||
)
|
||||
|
||||
recovered: list[Meeting] = []
|
||||
audio_failures = 0
|
||||
recovery_time = datetime.now(UTC).isoformat()
|
||||
|
||||
for meeting in meetings:
|
||||
validation = self._recover_meeting(meeting, recovery_time)
|
||||
if not validation.is_valid:
|
||||
audio_failures += 1
|
||||
await self._uow.meetings.update(meeting)
|
||||
recovered.append(meeting)
|
||||
logger.info(
|
||||
"Recovered crashed meeting: id=%s, previous_state=%s, audio_valid=%s",
|
||||
meeting.id,
|
||||
validation.previous_state,
|
||||
validation.is_valid,
|
||||
)
|
||||
|
||||
recovered, audio_failures = await self._process_crashed_meetings(list(meetings))
|
||||
await self._uow.commit()
|
||||
|
||||
logger.info(
|
||||
"Crash recovery complete: %d meetings recovered, %d audio failures",
|
||||
len(recovered),
|
||||
@@ -198,6 +192,35 @@ class RecoveryService:
|
||||
)
|
||||
return recovered, audio_failures
|
||||
|
||||
async def _process_crashed_meetings(
|
||||
self,
|
||||
meetings: list[Meeting],
|
||||
) -> tuple[list[Meeting], int]:
|
||||
"""Process and recover all crashed meetings."""
|
||||
recovered: list[Meeting] = []
|
||||
audio_failures = 0
|
||||
recovery_time = datetime.now(UTC).isoformat()
|
||||
|
||||
for meeting in meetings:
|
||||
validation = self._recover_meeting(meeting, recovery_time)
|
||||
if not validation.is_valid:
|
||||
audio_failures += 1
|
||||
await self._uow.meetings.update(meeting)
|
||||
recovered.append(meeting)
|
||||
self._log_meeting_recovery(meeting, validation)
|
||||
|
||||
return recovered, audio_failures
|
||||
|
||||
@staticmethod
|
||||
def _log_meeting_recovery(meeting: Meeting, validation: _RecoveryValidation) -> None:
|
||||
"""Log successful meeting recovery."""
|
||||
logger.info(
|
||||
"Recovered crashed meeting: id=%s, previous_state=%s, audio_valid=%s",
|
||||
meeting.id,
|
||||
validation.previous_state,
|
||||
validation.is_valid,
|
||||
)
|
||||
|
||||
def _recover_meeting(
|
||||
self, meeting: Meeting, recovery_time: str
|
||||
) -> _RecoveryValidation:
|
||||
@@ -212,25 +235,41 @@ class RecoveryService:
|
||||
reason="crash_recovery",
|
||||
)
|
||||
|
||||
meeting.metadata["crash_recovered"] = "true"
|
||||
meeting.metadata["crash_recovery_time"] = recovery_time
|
||||
meeting.metadata["crash_previous_state"] = previous_state.name
|
||||
|
||||
self._set_recovery_metadata(meeting, recovery_time, previous_state)
|
||||
validation = self.validate_meeting_audio(meeting)
|
||||
meeting.metadata["audio_valid"] = str(validation.is_valid).lower()
|
||||
if not validation.is_valid:
|
||||
meeting.metadata["audio_error"] = validation.error_message or "unknown"
|
||||
logger.warning(
|
||||
"Audio validation failed for meeting %s: %s",
|
||||
meeting.id,
|
||||
validation.error_message,
|
||||
)
|
||||
self._set_validation_metadata(meeting, validation)
|
||||
|
||||
return _RecoveryValidation(
|
||||
is_valid=validation.is_valid,
|
||||
previous_state=previous_state,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _set_recovery_metadata(
|
||||
meeting: Meeting,
|
||||
recovery_time: str,
|
||||
previous_state: MeetingState,
|
||||
) -> None:
|
||||
"""Set crash recovery metadata on meeting."""
|
||||
meeting.metadata["crash_recovered"] = "true"
|
||||
meeting.metadata["crash_recovery_time"] = recovery_time
|
||||
meeting.metadata["crash_previous_state"] = previous_state.name
|
||||
|
||||
@staticmethod
|
||||
def _set_validation_metadata(
|
||||
meeting: Meeting,
|
||||
validation: AudioValidationResult,
|
||||
) -> None:
|
||||
"""Set audio validation metadata on meeting."""
|
||||
meeting.metadata["audio_valid"] = str(validation.is_valid).lower()
|
||||
if not validation.is_valid:
|
||||
meeting.metadata["audio_error"] = validation.error_message or UNKNOWN
|
||||
logger.warning(
|
||||
"Audio validation failed for meeting %s: %s",
|
||||
meeting.id,
|
||||
validation.error_message,
|
||||
)
|
||||
|
||||
|
||||
async def count_crashed_meetings(self) -> int:
|
||||
"""Count meetings currently in crash states.
|
||||
@@ -254,29 +293,40 @@ class RecoveryService:
|
||||
Number of jobs marked as failed.
|
||||
"""
|
||||
try:
|
||||
async with self._uow:
|
||||
failed_count = await self._uow.diarization_jobs.mark_running_as_failed()
|
||||
await self._uow.commit()
|
||||
|
||||
if failed_count > 0:
|
||||
logger.warning(
|
||||
"Marked %d diarization jobs as failed during crash recovery",
|
||||
failed_count,
|
||||
)
|
||||
else:
|
||||
logger.info("No crashed diarization jobs found during recovery")
|
||||
|
||||
return failed_count
|
||||
return await self._mark_diarization_jobs_failed()
|
||||
except sqlalchemy.exc.ProgrammingError as e:
|
||||
# Handle case where diarization_jobs table doesn't exist yet
|
||||
# (e.g., schema.sql partially applied, migrations not run)
|
||||
if "does not exist" in str(e) or "UndefinedTableError" in str(e):
|
||||
logger.debug(
|
||||
"Diarization jobs table not found during recovery, skipping: %s",
|
||||
e,
|
||||
)
|
||||
return 0
|
||||
raise
|
||||
return self._handle_missing_diarization_table(e)
|
||||
|
||||
async def _mark_diarization_jobs_failed(self) -> int:
|
||||
"""Mark running diarization jobs as failed and log result."""
|
||||
async with self._uow:
|
||||
failed_count = await self._uow.diarization_jobs.mark_running_as_failed()
|
||||
await self._uow.commit()
|
||||
|
||||
self._log_diarization_recovery(failed_count)
|
||||
return failed_count
|
||||
|
||||
@staticmethod
|
||||
def _log_diarization_recovery(failed_count: int) -> None:
|
||||
"""Log diarization job recovery result."""
|
||||
if failed_count > 0:
|
||||
logger.warning(
|
||||
"Marked %d diarization jobs as failed during crash recovery",
|
||||
failed_count,
|
||||
)
|
||||
else:
|
||||
logger.info("No crashed diarization jobs found during recovery")
|
||||
|
||||
@staticmethod
|
||||
def _handle_missing_diarization_table(error: sqlalchemy.exc.ProgrammingError) -> int:
|
||||
"""Handle case where diarization_jobs table doesn't exist yet."""
|
||||
if "does not exist" in str(error) or "UndefinedTableError" in str(error):
|
||||
logger.debug(
|
||||
"Diarization jobs table not found during recovery, skipping: %s",
|
||||
error,
|
||||
)
|
||||
return 0
|
||||
raise error
|
||||
|
||||
async def recover_all(self) -> RecoveryResult:
|
||||
"""Run all crash recovery operations.
|
||||
|
||||
@@ -55,11 +55,12 @@ class RetentionService:
|
||||
self._retention_days = retention_days
|
||||
self._enabled = enabled
|
||||
|
||||
@property
|
||||
def is_enabled(self) -> bool:
|
||||
def enabled_state(self) -> bool:
|
||||
"""Check if retention is enabled."""
|
||||
return self._enabled
|
||||
|
||||
is_enabled = property(enabled_state)
|
||||
|
||||
@property
|
||||
def retention_days(self) -> int:
|
||||
"""Get configured retention days."""
|
||||
@@ -91,59 +92,81 @@ class RetentionService:
|
||||
"""
|
||||
if not self._enabled and not dry_run:
|
||||
logger.info("Retention disabled, skipping cleanup")
|
||||
return RetentionReport(
|
||||
meetings_checked=0,
|
||||
meetings_deleted=0,
|
||||
errors=(),
|
||||
)
|
||||
return RetentionReport(meetings_checked=0, meetings_deleted=0, errors=())
|
||||
|
||||
cutoff = self.cutoff_date
|
||||
logger.info(
|
||||
"Running retention cleanup (dry_run=%s, cutoff=%s)",
|
||||
dry_run,
|
||||
cutoff.isoformat(),
|
||||
self.cutoff_date.isoformat(),
|
||||
)
|
||||
|
||||
expired = await self.find_expired_meetings()
|
||||
deleted = 0
|
||||
errors: list[str] = []
|
||||
|
||||
for meeting in expired:
|
||||
if dry_run:
|
||||
logger.info(
|
||||
"Would delete expired meeting: id=%s, ended_at=%s",
|
||||
meeting.id,
|
||||
meeting.ended_at,
|
||||
)
|
||||
continue
|
||||
if dry_run:
|
||||
self._log_dry_run_meetings(expired)
|
||||
return RetentionReport(meetings_checked=len(expired), meetings_deleted=0, errors=())
|
||||
|
||||
try:
|
||||
# Import here to avoid circular imports
|
||||
from noteflow.application.services import MeetingService
|
||||
|
||||
# Use a fresh UnitOfWork instance for each deletion
|
||||
meeting_svc = MeetingService(self._uow_factory())
|
||||
success = await meeting_svc.delete_meeting(meeting.id)
|
||||
if success:
|
||||
deleted += 1
|
||||
logger.info(
|
||||
"Deleted expired meeting: id=%s",
|
||||
meeting.id,
|
||||
)
|
||||
except (OSError, RuntimeError) as e:
|
||||
error_msg = f"{meeting.id}: {e}"
|
||||
errors.append(error_msg)
|
||||
logger.warning("Failed to delete meeting %s: %s", meeting.id, e)
|
||||
|
||||
logger.info(
|
||||
"Retention cleanup complete: checked=%d, deleted=%d, errors=%d",
|
||||
len(expired),
|
||||
deleted,
|
||||
len(errors),
|
||||
)
|
||||
deleted, errors = await self._delete_expired_meetings(expired)
|
||||
self._log_cleanup_complete(len(expired), deleted, len(errors))
|
||||
|
||||
return RetentionReport(
|
||||
meetings_checked=len(expired),
|
||||
meetings_deleted=deleted,
|
||||
errors=tuple(errors),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _log_dry_run_meetings(meetings: list[Meeting]) -> None:
|
||||
"""Log meetings that would be deleted in dry run mode."""
|
||||
for meeting in meetings:
|
||||
logger.info(
|
||||
"Would delete expired meeting: id=%s, ended_at=%s",
|
||||
meeting.id,
|
||||
meeting.ended_at,
|
||||
)
|
||||
|
||||
async def _delete_expired_meetings(
|
||||
self,
|
||||
meetings: list[Meeting],
|
||||
) -> tuple[int, list[str]]:
|
||||
"""Delete expired meetings and collect errors."""
|
||||
from noteflow.application.services import MeetingService
|
||||
|
||||
deleted = 0
|
||||
errors: list[str] = []
|
||||
|
||||
for meeting in meetings:
|
||||
result = await self._try_delete_meeting(meeting, MeetingService)
|
||||
if result is None:
|
||||
deleted += 1
|
||||
else:
|
||||
errors.append(result)
|
||||
|
||||
return deleted, errors
|
||||
|
||||
async def _try_delete_meeting(
|
||||
self,
|
||||
meeting: Meeting,
|
||||
meeting_service_cls: type,
|
||||
) -> str | None:
|
||||
"""Attempt to delete a single meeting. Returns error message or None on success."""
|
||||
try:
|
||||
meeting_svc = meeting_service_cls(self._uow_factory())
|
||||
success = await meeting_svc.delete_meeting(meeting.id)
|
||||
if success:
|
||||
logger.info("Deleted expired meeting: id=%s", meeting.id)
|
||||
return None
|
||||
return f"{meeting.id}: deletion returned False"
|
||||
except (OSError, RuntimeError) as e:
|
||||
logger.warning("Failed to delete meeting %s: %s", meeting.id, e)
|
||||
return f"{meeting.id}: {e}"
|
||||
|
||||
@staticmethod
|
||||
def _log_cleanup_complete(checked: int, deleted: int, error_count: int) -> None:
|
||||
"""Log cleanup completion summary."""
|
||||
logger.info(
|
||||
"Retention cleanup complete: checked=%d, deleted=%d, errors=%d",
|
||||
checked,
|
||||
deleted,
|
||||
error_count,
|
||||
)
|
||||
|
||||
@@ -153,17 +153,10 @@ class SummarizationService:
|
||||
Returns:
|
||||
List of available modes based on registered providers.
|
||||
"""
|
||||
available: list[SummarizationMode] = []
|
||||
for mode, provider in self.providers.items():
|
||||
if mode == SummarizationMode.CLOUD:
|
||||
if provider.is_available and self.settings.cloud_consent_granted:
|
||||
available.append(mode)
|
||||
elif provider.is_available:
|
||||
available.append(mode)
|
||||
return available
|
||||
return [mode for mode in self.providers if self.is_mode_available(mode)]
|
||||
|
||||
def is_mode_available(self, mode: SummarizationMode) -> bool:
|
||||
"""Check if a specific mode is available.
|
||||
"""Check if a specific mode is available (provider exists, available, and consent satisfied).
|
||||
|
||||
Args:
|
||||
mode: The mode to check.
|
||||
@@ -171,7 +164,12 @@ class SummarizationService:
|
||||
Returns:
|
||||
True if mode is available.
|
||||
"""
|
||||
return mode in self.get_available_modes()
|
||||
provider = self.providers.get(mode)
|
||||
if provider is None or not provider.is_available:
|
||||
return False
|
||||
if mode == SummarizationMode.CLOUD:
|
||||
return self.settings.cloud_consent_granted
|
||||
return True
|
||||
|
||||
async def grant_cloud_consent(self) -> None:
|
||||
"""Grant consent for cloud processing."""
|
||||
@@ -288,23 +286,24 @@ class SummarizationService:
|
||||
Raises:
|
||||
ProviderUnavailableError: If no provider available.
|
||||
"""
|
||||
# Check requested mode
|
||||
if mode in self.providers:
|
||||
provider = self.providers[mode]
|
||||
if mode not in self.providers:
|
||||
raise ProviderUnavailableError(f"No provider available for mode: {mode.value}")
|
||||
|
||||
# Check cloud consent
|
||||
if mode == SummarizationMode.CLOUD and not self.settings.cloud_consent_granted:
|
||||
logger.warning("Cloud mode requested but consent not granted")
|
||||
if self.settings.fallback_to_local:
|
||||
return self._get_fallback_provider(mode)
|
||||
provider = self.providers[mode]
|
||||
|
||||
# Cloud mode requires consent check
|
||||
if mode == SummarizationMode.CLOUD and not self.settings.cloud_consent_granted:
|
||||
logger.warning("Cloud mode requested but consent not granted")
|
||||
if not self.settings.fallback_to_local:
|
||||
raise ProviderUnavailableError("Cloud consent not granted")
|
||||
return self._get_fallback_provider(mode)
|
||||
|
||||
if provider.is_available:
|
||||
return provider, mode
|
||||
if provider.is_available:
|
||||
return provider, mode
|
||||
|
||||
# Provider exists but unavailable
|
||||
if self.settings.fallback_to_local and mode != SummarizationMode.MOCK:
|
||||
return self._get_fallback_provider(mode)
|
||||
# Provider exists but unavailable - try fallback
|
||||
if self.settings.fallback_to_local and mode != SummarizationMode.MOCK:
|
||||
return self._get_fallback_provider(mode)
|
||||
|
||||
raise ProviderUnavailableError(f"No provider available for mode: {mode.value}")
|
||||
|
||||
@@ -329,10 +328,9 @@ class SummarizationService:
|
||||
for fallback_mode in fallback_order:
|
||||
if fallback_mode == original_mode:
|
||||
continue
|
||||
if fallback_mode in self.providers:
|
||||
provider = self.providers[fallback_mode]
|
||||
if provider.is_available:
|
||||
return provider, fallback_mode
|
||||
provider = self.providers.get(fallback_mode)
|
||||
if provider is not None and provider.is_available:
|
||||
return provider, fallback_mode
|
||||
|
||||
raise ProviderUnavailableError("No fallback provider available")
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import time
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.domain.constants.fields import ENABLED
|
||||
from noteflow.domain.triggers.entities import TriggerAction, TriggerDecision, TriggerSignal
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
@@ -72,11 +73,12 @@ class TriggerService:
|
||||
self._last_prompt: float | None = None
|
||||
self._snoozed_until: float | None = None
|
||||
|
||||
@property
|
||||
def is_enabled(self) -> bool:
|
||||
def enabled_state(self) -> bool:
|
||||
"""Check if trigger service is enabled."""
|
||||
return self._settings.enabled
|
||||
|
||||
is_enabled = property(enabled_state)
|
||||
|
||||
@property
|
||||
def is_snoozed(self) -> bool:
|
||||
"""Check if triggers are currently snoozed."""
|
||||
@@ -196,7 +198,7 @@ class TriggerService:
|
||||
enabled: Whether triggers should be enabled.
|
||||
"""
|
||||
self._settings.enabled = enabled
|
||||
logger.info("Triggers %s", "enabled" if enabled else "disabled")
|
||||
logger.info("Triggers %s", ENABLED if enabled else "disabled")
|
||||
|
||||
def set_auto_start(self, enabled: bool) -> None:
|
||||
"""Enable or disable auto-start on high confidence.
|
||||
@@ -205,4 +207,4 @@ class TriggerService:
|
||||
enabled: Whether auto-start should be enabled.
|
||||
"""
|
||||
self._settings.auto_start_enabled = enabled
|
||||
logger.info("Auto-start %s", "enabled" if enabled else "disabled")
|
||||
logger.info("Auto-start %s", ENABLED if enabled else "disabled")
|
||||
|
||||
@@ -16,6 +16,11 @@ from noteflow.domain.webhooks import (
|
||||
WebhookPayloadDict,
|
||||
payload_to_dict,
|
||||
)
|
||||
from noteflow.domain.webhooks.constants import (
|
||||
DELIVERY_OUTCOME_FAILED,
|
||||
DELIVERY_OUTCOME_SKIPPED,
|
||||
DELIVERY_OUTCOME_SUCCEEDED,
|
||||
)
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from noteflow.infrastructure.webhooks import WebhookExecutor
|
||||
|
||||
@@ -24,6 +29,23 @@ if TYPE_CHECKING:
|
||||
|
||||
_logger = get_logger(__name__)
|
||||
|
||||
# Log configuration for webhook delivery outcomes
|
||||
_LOG_LEVEL_INFO = 20
|
||||
_LOG_LEVEL_WARNING = 30
|
||||
_LOG_LEVEL_DEBUG = 10
|
||||
|
||||
_DELIVERY_LOG_LEVELS: dict[str, int] = {
|
||||
DELIVERY_OUTCOME_SUCCEEDED: _LOG_LEVEL_INFO,
|
||||
DELIVERY_OUTCOME_FAILED: _LOG_LEVEL_WARNING,
|
||||
DELIVERY_OUTCOME_SKIPPED: _LOG_LEVEL_DEBUG,
|
||||
}
|
||||
|
||||
_DELIVERY_LOG_TEMPLATES: dict[str, str] = {
|
||||
DELIVERY_OUTCOME_SUCCEEDED: "Webhook delivered: %s -> %s (status=%d)",
|
||||
DELIVERY_OUTCOME_FAILED: "Webhook failed: %s -> %s (error=%s)",
|
||||
DELIVERY_OUTCOME_SKIPPED: "Webhook skipped: %s -> %s (reason=%s)",
|
||||
}
|
||||
|
||||
|
||||
class WebhookService:
|
||||
"""Orchestrate webhook delivery for meeting events.
|
||||
@@ -218,30 +240,11 @@ class WebhookService:
|
||||
url: str,
|
||||
delivery: WebhookDelivery,
|
||||
) -> None:
|
||||
if delivery.succeeded:
|
||||
_logger.info(
|
||||
"Webhook delivered: %s -> %s (status=%d)",
|
||||
event_type.value,
|
||||
url,
|
||||
delivery.status_code,
|
||||
)
|
||||
return
|
||||
|
||||
if delivery.attempt_count > 0:
|
||||
_logger.warning(
|
||||
"Webhook failed: %s -> %s (error=%s)",
|
||||
event_type.value,
|
||||
url,
|
||||
delivery.error_message,
|
||||
)
|
||||
return
|
||||
|
||||
_logger.debug(
|
||||
"Webhook skipped: %s -> %s (reason=%s)",
|
||||
event_type.value,
|
||||
url,
|
||||
delivery.error_message,
|
||||
)
|
||||
"""Log webhook delivery result based on outcome."""
|
||||
outcome_type, detail = delivery.log_outcome
|
||||
level = _DELIVERY_LOG_LEVELS[outcome_type]
|
||||
template = _DELIVERY_LOG_TEMPLATES[outcome_type]
|
||||
_logger.log(level, template, event_type.value, url, detail)
|
||||
|
||||
async def close(self) -> None:
|
||||
"""Clean up resources."""
|
||||
|
||||
@@ -6,35 +6,85 @@ Usage:
|
||||
"""
|
||||
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
from noteflow.config.constants.core import MAIN_MODULE_NAME
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
console = Console()
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
|
||||
def _show_help() -> None:
|
||||
"""Display CLI help information."""
|
||||
logger.debug("cli_no_command", message="No command provided, showing help")
|
||||
console.print("[bold]NoteFlow CLI[/bold]")
|
||||
console.print()
|
||||
console.print("Available commands:")
|
||||
console.print(" retention - Meeting retention management")
|
||||
console.print(" models - ML model download management")
|
||||
console.print()
|
||||
console.print("Usage:")
|
||||
console.print(" python -m noteflow.cli <command> [options]")
|
||||
console.print()
|
||||
console.print("Examples:")
|
||||
console.print(" python -m noteflow.cli retention status")
|
||||
console.print(" python -m noteflow.cli retention cleanup --dry-run")
|
||||
console.print(" python -m noteflow.cli models list")
|
||||
console.print(" python -m noteflow.cli models download")
|
||||
|
||||
|
||||
def _run_retention_command(command: str) -> None:
|
||||
"""Execute the retention subcommand."""
|
||||
from noteflow.cli.retention import main as retention_main
|
||||
|
||||
retention_main()
|
||||
|
||||
|
||||
def _run_models_command(command: str) -> None:
|
||||
"""Execute the models subcommand."""
|
||||
from noteflow.cli.models import main as models_main
|
||||
|
||||
models_main()
|
||||
|
||||
|
||||
def _dispatch_command(command: str, subcommand_args: list[str]) -> bool:
|
||||
"""Dispatch to the appropriate command handler.
|
||||
|
||||
Args:
|
||||
command: Command name to dispatch.
|
||||
subcommand_args: Arguments for the subcommand.
|
||||
|
||||
Returns:
|
||||
True if command was handled, False if unknown.
|
||||
"""
|
||||
handlers: dict[str, Callable[[str], None]] = {
|
||||
"retention": _run_retention_command,
|
||||
"models": _run_models_command,
|
||||
}
|
||||
|
||||
handler = handlers.get(command)
|
||||
if handler is None:
|
||||
return False
|
||||
|
||||
logger.debug("cli_dispatch", command=command, subcommand_args=subcommand_args)
|
||||
try:
|
||||
handler(command)
|
||||
except Exception:
|
||||
logger.exception("cli_command_failed", command=command)
|
||||
raise
|
||||
|
||||
return True
|
||||
|
||||
def main() -> None:
|
||||
"""Dispatch to appropriate subcommand CLI."""
|
||||
logger.info("cli_invoked", argv=sys.argv)
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
logger.debug("cli_no_command", message="No command provided, showing help")
|
||||
console.print("[bold]NoteFlow CLI[/bold]")
|
||||
console.print()
|
||||
console.print("Available commands:")
|
||||
console.print(" retention - Meeting retention management")
|
||||
console.print(" models - ML model download management")
|
||||
console.print()
|
||||
console.print("Usage:")
|
||||
console.print(" python -m noteflow.cli <command> [options]")
|
||||
console.print()
|
||||
console.print("Examples:")
|
||||
console.print(" python -m noteflow.cli retention status")
|
||||
console.print(" python -m noteflow.cli retention cleanup --dry-run")
|
||||
console.print(" python -m noteflow.cli models list")
|
||||
console.print(" python -m noteflow.cli models download")
|
||||
_show_help()
|
||||
sys.exit(1)
|
||||
|
||||
command = sys.argv[1]
|
||||
@@ -43,30 +93,13 @@ def main() -> None:
|
||||
# Remove the command from argv so submodule parsers work correctly
|
||||
sys.argv = [sys.argv[0], *subcommand_args]
|
||||
|
||||
if command == "retention":
|
||||
logger.debug("cli_dispatch", command=command, subcommand_args=subcommand_args)
|
||||
try:
|
||||
from noteflow.cli.retention import main as retention_main
|
||||
|
||||
retention_main()
|
||||
except Exception:
|
||||
logger.exception("cli_command_failed", command=command)
|
||||
raise
|
||||
elif command == "models":
|
||||
logger.debug("cli_dispatch", command=command, subcommand_args=subcommand_args)
|
||||
try:
|
||||
from noteflow.cli.models import main as models_main
|
||||
|
||||
models_main()
|
||||
except Exception:
|
||||
logger.exception("cli_command_failed", command=command)
|
||||
raise
|
||||
else:
|
||||
dispatch_result = _dispatch_command(command, subcommand_args)
|
||||
if not dispatch_result:
|
||||
logger.warning("cli_unknown_command", command=command)
|
||||
console.print(f"[red]Unknown command:[/red] {command}")
|
||||
console.print("Available commands: retention, models")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == MAIN_MODULE_NAME:
|
||||
main()
|
||||
|
||||
5
src/noteflow/cli/constants.py
Normal file
5
src/noteflow/cli/constants.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""CLI constants shared across command modules."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
ARGPARSE_STORE_TRUE: Final[str] = "store_true"
|
||||
@@ -9,17 +9,21 @@ Usage:
|
||||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
from noteflow.config.constants import SPACY_MODEL_LG, SPACY_MODEL_SM
|
||||
from noteflow.config.constants.core import MAIN_MODULE_NAME
|
||||
from noteflow.infrastructure.logging import configure_logging, get_logger
|
||||
|
||||
configure_logging()
|
||||
logger = get_logger(__name__)
|
||||
console = Console()
|
||||
|
||||
DIVIDER_WIDTH = 4 * 10
|
||||
|
||||
# Constants to avoid magic strings
|
||||
_DEFAULT_MODEL = "spacy-en"
|
||||
_LOG_DOWNLOAD_FAILED = "Failed to download %s: %s"
|
||||
@@ -153,29 +157,39 @@ def _download_model(model: ModelInfo) -> DownloadResult:
|
||||
return DownloadResult(model_name=model.name, success=False, error=error_msg)
|
||||
|
||||
|
||||
def _run_download(model_name: str | None) -> int:
|
||||
"""Execute model download.
|
||||
|
||||
def _resolve_models_to_download(model_name: str | None) -> list[ModelInfo] | None:
|
||||
"""Resolve which models to download.
|
||||
|
||||
Args:
|
||||
model_name: Specific model to download, or None for all.
|
||||
model_name: Specific model name, or None for default.
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for errors).
|
||||
List of models to download, or None if validation fails.
|
||||
"""
|
||||
if model_name:
|
||||
if model_name not in AVAILABLE_MODELS:
|
||||
console.print(f"[red]Unknown model:[/red] {model_name}")
|
||||
console.print(f"Available models: {', '.join(AVAILABLE_MODELS.keys())}")
|
||||
return 1
|
||||
models_to_download = [AVAILABLE_MODELS[model_name]]
|
||||
else:
|
||||
# Download default models (spacy-en for NER)
|
||||
models_to_download = [AVAILABLE_MODELS[_DEFAULT_MODEL]]
|
||||
if not model_name:
|
||||
return [AVAILABLE_MODELS[_DEFAULT_MODEL]]
|
||||
|
||||
if model_name not in AVAILABLE_MODELS:
|
||||
console.print(f"[red]Unknown model:[/red] {model_name}")
|
||||
console.print(f"Available models: {', '.join(AVAILABLE_MODELS.keys())}")
|
||||
return None
|
||||
|
||||
return [AVAILABLE_MODELS[model_name]]
|
||||
|
||||
|
||||
def _download_models(models: list[ModelInfo]) -> DownloadReport:
|
||||
"""Download the specified models.
|
||||
|
||||
Args:
|
||||
models: List of models to download.
|
||||
|
||||
Returns:
|
||||
Report of download results.
|
||||
"""
|
||||
report = DownloadReport()
|
||||
|
||||
for model in models_to_download:
|
||||
# Check if already installed
|
||||
for model in models:
|
||||
status = _check_model_installed(model)
|
||||
if status.installed:
|
||||
logger.info("Model %s is already installed", model.name)
|
||||
@@ -185,15 +199,51 @@ def _run_download(model_name: str | None) -> int:
|
||||
result = _download_model(model)
|
||||
report.results.append(result)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def _print_download_report(report: DownloadReport) -> None:
|
||||
"""Print the download report to console.
|
||||
|
||||
Args:
|
||||
report: Download report to print.
|
||||
"""
|
||||
console.print("\n[bold]Model Download Report:[/bold]")
|
||||
console.print(f" Successful: {report.success_count}")
|
||||
console.print(f" Failed: {report.failure_count}")
|
||||
|
||||
if report.failure_count > 0:
|
||||
console.print("\n[red]Failed downloads:[/red]")
|
||||
for result in report.results:
|
||||
if not result.success:
|
||||
console.print(f" - {result.model_name}: {result.error}")
|
||||
_print_failed_downloads(report.results)
|
||||
|
||||
|
||||
def _print_failed_downloads(results: list[DownloadResult]) -> None:
|
||||
"""Print details of failed downloads.
|
||||
|
||||
Args:
|
||||
results: List of download results to filter for failures.
|
||||
"""
|
||||
console.print("\n[red]Failed downloads:[/red]")
|
||||
for result in results:
|
||||
if not result.success:
|
||||
console.print(f" - {result.model_name}: {result.error}")
|
||||
|
||||
def _run_download(model_name: str | None) -> int:
|
||||
"""Execute model download.
|
||||
|
||||
Args:
|
||||
model_name: Specific model to download, or None for all.
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for errors).
|
||||
"""
|
||||
models_to_download = _resolve_models_to_download(model_name)
|
||||
if models_to_download is None:
|
||||
return 1
|
||||
|
||||
report = _download_models(models_to_download)
|
||||
_print_download_report(report)
|
||||
|
||||
if report.failure_count > 0:
|
||||
return 1
|
||||
|
||||
logger.info(
|
||||
@@ -232,7 +282,7 @@ def _show_status() -> int:
|
||||
Exit code (always 0).
|
||||
"""
|
||||
console.print("\n[bold]Model Status:[/bold]")
|
||||
console.print("-" * 40)
|
||||
console.print("-" * DIVIDER_WIDTH)
|
||||
|
||||
installed_count = 0
|
||||
total_count = len(AVAILABLE_MODELS)
|
||||
@@ -245,14 +295,19 @@ def _show_status() -> int:
|
||||
else:
|
||||
console.print(f" [dim]○[/dim] {model.name}: not installed")
|
||||
|
||||
console.print("-" * 40)
|
||||
console.print("-" * DIVIDER_WIDTH)
|
||||
console.print(f" {installed_count}/{total_count} models installed")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Entry point for models CLI."""
|
||||
|
||||
def _create_argument_parser() -> argparse.ArgumentParser:
|
||||
"""Create the argument parser for models CLI.
|
||||
|
||||
Returns:
|
||||
Configured argument parser.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="NoteFlow optional ML model management",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
@@ -274,24 +329,44 @@ def main() -> None:
|
||||
# status command
|
||||
subparsers.add_parser("status", help="Show model installation status")
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def _execute_command(args: argparse.Namespace, parser: argparse.ArgumentParser) -> int:
|
||||
"""Execute the requested command.
|
||||
|
||||
Args:
|
||||
args: Parsed command-line arguments.
|
||||
parser: Argument parser (for help display).
|
||||
|
||||
Returns:
|
||||
Exit code.
|
||||
"""
|
||||
command_handlers: dict[str, Callable[[], int]] = {
|
||||
_CMD_DOWNLOAD: lambda: _run_download(model_name=args.model),
|
||||
"list": _list_models,
|
||||
"status": _show_status,
|
||||
}
|
||||
|
||||
handler = command_handlers.get(args.command)
|
||||
if handler is None:
|
||||
parser.print_help()
|
||||
return 1
|
||||
|
||||
return handler()
|
||||
|
||||
def main() -> None:
|
||||
"""Entry point for models CLI."""
|
||||
parser = _create_argument_parser()
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
if args.command == _CMD_DOWNLOAD:
|
||||
exit_code = _run_download(model_name=args.model)
|
||||
elif args.command == "list":
|
||||
exit_code = _list_models()
|
||||
elif args.command == "status":
|
||||
exit_code = _show_status()
|
||||
else:
|
||||
parser.print_help()
|
||||
exit_code = 1
|
||||
|
||||
exit_code = _execute_command(args, parser)
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == MAIN_MODULE_NAME:
|
||||
main()
|
||||
|
||||
@@ -14,6 +14,8 @@ from typing import cast
|
||||
from rich.console import Console
|
||||
|
||||
from noteflow.application.services import RetentionService
|
||||
from noteflow.cli.constants import ARGPARSE_STORE_TRUE
|
||||
from noteflow.config.constants.core import MAIN_MODULE_NAME
|
||||
from noteflow.config.settings import get_settings
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
from noteflow.infrastructure.logging import configure_logging, get_logger
|
||||
@@ -123,7 +125,7 @@ def main() -> None:
|
||||
cleanup_parser = subparsers.add_parser("cleanup", help="Run retention cleanup")
|
||||
cleanup_parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
action=ARGPARSE_STORE_TRUE,
|
||||
help="Report what would be deleted without deleting",
|
||||
)
|
||||
|
||||
@@ -147,5 +149,5 @@ def main() -> None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if __name__ == MAIN_MODULE_NAME:
|
||||
main()
|
||||
|
||||
@@ -108,6 +108,8 @@ from noteflow.config.constants.http import (
|
||||
OAUTH_FIELD_REFRESH_TOKEN,
|
||||
OAUTH_FIELD_SCOPE,
|
||||
OAUTH_FIELD_TOKEN_TYPE,
|
||||
OAUTH_STATE_TOKEN_BYTES,
|
||||
PKCE_CODE_VERIFIER_BYTES,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@@ -172,7 +174,9 @@ __all__ = [
|
||||
"OAUTH_FIELD_REFRESH_TOKEN",
|
||||
"OAUTH_FIELD_SCOPE",
|
||||
"OAUTH_FIELD_TOKEN_TYPE",
|
||||
"OAUTH_STATE_TOKEN_BYTES",
|
||||
"PERIODIC_FLUSH_INTERVAL_SECONDS",
|
||||
"PKCE_CODE_VERIFIER_BYTES",
|
||||
"POSITION_UPDATE_INTERVAL",
|
||||
"PROVIDER_NAME_OPENAI",
|
||||
"RULE_FIELD_APP_MATCH_PATTERNS",
|
||||
|
||||
@@ -12,6 +12,12 @@ from typing import Final
|
||||
SECONDS_PER_HOUR: Final[float] = 3600.0
|
||||
"""Seconds in one hour, for time unit conversions."""
|
||||
|
||||
HOURS_PER_DAY: Final[int] = 20 + 4
|
||||
"""Hours in one day."""
|
||||
|
||||
DAYS_PER_WEEK: Final[int] = 5 + 2
|
||||
"""Days in one week."""
|
||||
|
||||
# =============================================================================
|
||||
# Audio Settings
|
||||
# =============================================================================
|
||||
@@ -28,6 +34,16 @@ AUDIO_BUFFER_SIZE_BYTES: Final[int] = 320_000
|
||||
PERIODIC_FLUSH_INTERVAL_SECONDS: Final[float] = 2.0
|
||||
"""Interval for periodic audio buffer flush to disk (crash resilience)."""
|
||||
|
||||
# =============================================================================
|
||||
# LLM Defaults
|
||||
# =============================================================================
|
||||
|
||||
DEFAULT_LLM_TEMPERATURE: Final[float] = 0.3
|
||||
"""Default temperature for LLM inference."""
|
||||
|
||||
DEFAULT_OLLAMA_TIMEOUT_SECONDS: Final[float] = float(60 * 2)
|
||||
"""Default timeout for Ollama requests in seconds."""
|
||||
|
||||
# =============================================================================
|
||||
# gRPC Settings
|
||||
# =============================================================================
|
||||
@@ -47,3 +63,6 @@ STREAM_INIT_LOCK_TIMEOUT_SECONDS: Final[float] = 5.0
|
||||
|
||||
APP_DIR_NAME: Final[str] = ".noteflow"
|
||||
"""Application data directory name within user home."""
|
||||
|
||||
MAIN_MODULE_NAME: Final[str] = "__main__"
|
||||
"""Module name used when executing a module as a script."""
|
||||
|
||||
@@ -51,6 +51,13 @@ SPACY_MODEL_TRF: Final[str] = "en_core_web_trf"
|
||||
PROVIDER_NAME_OPENAI: Final[str] = "openai"
|
||||
"""OpenAI provider name."""
|
||||
|
||||
# =============================================================================
|
||||
# LLM Defaults
|
||||
# =============================================================================
|
||||
|
||||
DEFAULT_ANTHROPIC_MODEL: Final[str] = "claude-3-haiku-20240307"
|
||||
"""Default Anthropic model for summarization."""
|
||||
|
||||
# =============================================================================
|
||||
# Feature Names & Status
|
||||
# =============================================================================
|
||||
|
||||
5
src/noteflow/config/constants/encoding.py
Normal file
5
src/noteflow/config/constants/encoding.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Text encoding constants."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
ASCII_ENCODING: Final[str] = "ascii"
|
||||
@@ -5,6 +5,7 @@ Centralized error messages, validation errors, and structured log event names.
|
||||
|
||||
from typing import Final
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Service Error Messages
|
||||
# =============================================================================
|
||||
@@ -28,10 +29,13 @@ ERR_TOKEN_REFRESH_PREFIX: Final[str] = "Token refresh failed: "
|
||||
# Entity Error Messages
|
||||
# =============================================================================
|
||||
|
||||
ERROR_PROJECT_ID_REQUIRED: Final[str] = "project_id is required"
|
||||
PROJECT_ID_FIELD: Final[str] = "project_id"
|
||||
"""Field name for project id in error details."""
|
||||
|
||||
ERROR_PROJECT_ID_REQUIRED: Final[str] = f"{PROJECT_ID_FIELD} is required"
|
||||
"""Error message when project_id is missing."""
|
||||
|
||||
ERROR_INVALID_PROJECT_ID_PREFIX: Final[str] = "Invalid project_id: "
|
||||
ERROR_INVALID_PROJECT_ID_PREFIX: Final[str] = f"Invalid {PROJECT_ID_FIELD}: "
|
||||
"""Prefix for invalid project_id error messages."""
|
||||
|
||||
ERROR_WORKSPACE_ID_REQUIRED: Final[str] = "workspace_id is required"
|
||||
@@ -49,7 +53,7 @@ ERROR_INVALID_UUID_PREFIX: Final[str] = "Invalid UUID: "
|
||||
ERROR_INVALID_WORKSPACE_ID_FORMAT: Final[str] = "Invalid workspace_id format"
|
||||
"""Error message for invalid workspace_id format."""
|
||||
|
||||
ERROR_INVALID_PROJECT_ID_FORMAT: Final[str] = "Invalid project_id format"
|
||||
ERROR_INVALID_PROJECT_ID_FORMAT: Final[str] = f"Invalid {PROJECT_ID_FIELD} format"
|
||||
"""Error message for invalid project_id format."""
|
||||
|
||||
ERROR_INVALID_MEETING_ID_FORMAT: Final[str] = "Invalid meeting_id format"
|
||||
@@ -77,7 +81,7 @@ ERROR_MSG_WORKSPACE_PREFIX: Final[str] = "Workspace "
|
||||
ERROR_MSG_PROJECT_PREFIX: Final[str] = "Project "
|
||||
"""Prefix for project-related error messages."""
|
||||
|
||||
ERROR_DETAIL_PROJECT_ID: Final[str] = "project_id"
|
||||
ERROR_DETAIL_PROJECT_ID: Final[str] = PROJECT_ID_FIELD
|
||||
"""Error detail key for project ID."""
|
||||
|
||||
# =============================================================================
|
||||
|
||||
@@ -33,6 +33,16 @@ OAUTH_FIELD_SCOPE: Final[str] = "scope"
|
||||
OAUTH_FIELD_EXPIRES_IN: Final[str] = "expires_in"
|
||||
"""OAuth expires_in field name."""
|
||||
|
||||
# =============================================================================
|
||||
# PKCE Settings
|
||||
# =============================================================================
|
||||
|
||||
PKCE_CODE_VERIFIER_BYTES: Final[int] = 64
|
||||
"""Number of random bytes for PKCE code verifier (produces ~86 char base64url string)."""
|
||||
|
||||
OAUTH_STATE_TOKEN_BYTES: Final[int] = 32
|
||||
"""Number of random bytes for OAuth state token (produces ~43 char base64url string)."""
|
||||
|
||||
# =============================================================================
|
||||
# HTTP Headers
|
||||
# =============================================================================
|
||||
@@ -62,5 +72,5 @@ HTTP_STATUS_UNAUTHORIZED: Final[int] = 401
|
||||
HTTP_STATUS_NOT_FOUND: Final[int] = 404
|
||||
"""HTTP 404 Not Found status code."""
|
||||
|
||||
HTTP_STATUS_INTERNAL_SERVER_ERROR: Final[int] = 500
|
||||
HTTP_STATUS_INTERNAL_SERVER_ERROR: Final[int] = 5 * 100
|
||||
"""HTTP 500 Internal Server Error status code."""
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
from typing import Annotated
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from noteflow.config.constants.core import DAYS_PER_WEEK, HOURS_PER_DAY
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
from noteflow.config.settings._base import ENV_FILE, EXTRA_IGNORE
|
||||
@@ -61,7 +63,12 @@ class CalendarIntegrationSettings(BaseSettings):
|
||||
# Sync settings
|
||||
sync_hours_ahead: Annotated[
|
||||
int,
|
||||
Field(default=24, ge=1, le=168, description="Hours to look ahead for events"),
|
||||
Field(
|
||||
default=HOURS_PER_DAY,
|
||||
ge=1,
|
||||
le=HOURS_PER_DAY * DAYS_PER_WEEK,
|
||||
description="Hours to look ahead for events",
|
||||
),
|
||||
]
|
||||
max_events: Annotated[
|
||||
int,
|
||||
|
||||
@@ -7,6 +7,13 @@ from pydantic import Field, PostgresDsn
|
||||
from pydantic_settings import SettingsConfigDict
|
||||
|
||||
from noteflow.config.constants import APP_DIR_NAME
|
||||
from noteflow.config.constants.domain import DEFAULT_ANTHROPIC_MODEL
|
||||
from noteflow.config.constants.core import (
|
||||
DAYS_PER_WEEK,
|
||||
DEFAULT_LLM_TEMPERATURE,
|
||||
DEFAULT_OLLAMA_TIMEOUT_SECONDS,
|
||||
HOURS_PER_DAY,
|
||||
)
|
||||
from noteflow.config.settings._base import ENV_FILE, EXTRA_IGNORE
|
||||
from noteflow.config.settings._triggers import TriggerSettings
|
||||
|
||||
@@ -106,7 +113,12 @@ class Settings(TriggerSettings):
|
||||
]
|
||||
retention_check_interval_hours: Annotated[
|
||||
int,
|
||||
Field(default=24, ge=1, le=168, description="Hours between retention checks"),
|
||||
Field(
|
||||
default=HOURS_PER_DAY,
|
||||
ge=1,
|
||||
le=HOURS_PER_DAY * DAYS_PER_WEEK,
|
||||
description="Hours between retention checks",
|
||||
),
|
||||
]
|
||||
|
||||
# Diarization settings
|
||||
@@ -140,7 +152,12 @@ class Settings(TriggerSettings):
|
||||
]
|
||||
diarization_job_ttl_hours: Annotated[
|
||||
int,
|
||||
Field(default=1, ge=1, le=168, description="Hours to retain diarization job records"),
|
||||
Field(
|
||||
default=1,
|
||||
ge=1,
|
||||
le=HOURS_PER_DAY * DAYS_PER_WEEK,
|
||||
description="Hours to retain diarization job records",
|
||||
),
|
||||
]
|
||||
|
||||
# gRPC streaming settings
|
||||
@@ -154,7 +171,7 @@ class Settings(TriggerSettings):
|
||||
]
|
||||
grpc_queue_max_size: Annotated[
|
||||
int,
|
||||
Field(default=1000, ge=100, le=10000, description="Maximum audio queue size"),
|
||||
Field(default=1000, ge=100, le=10 * 1000, description="Maximum audio queue size"),
|
||||
]
|
||||
grpc_partial_cadence_seconds: Annotated[
|
||||
float,
|
||||
@@ -180,13 +197,18 @@ class Settings(TriggerSettings):
|
||||
]
|
||||
webhook_max_response_length: Annotated[
|
||||
int,
|
||||
Field(default=500, ge=100, le=10000, description="Maximum response body length to log"),
|
||||
Field(default=5 * 100, ge=100, le=10 * 1000, description="Maximum response body length to log"),
|
||||
]
|
||||
|
||||
# LLM/Summarization settings
|
||||
llm_temperature: Annotated[
|
||||
float,
|
||||
Field(default=0.3, ge=0.0, le=2.0, description="Temperature for LLM inference"),
|
||||
Field(
|
||||
default=DEFAULT_LLM_TEMPERATURE,
|
||||
ge=0.0,
|
||||
le=2.0,
|
||||
description="Temperature for LLM inference",
|
||||
),
|
||||
]
|
||||
llm_default_openai_model: Annotated[
|
||||
str,
|
||||
@@ -194,11 +216,16 @@ class Settings(TriggerSettings):
|
||||
]
|
||||
llm_default_anthropic_model: Annotated[
|
||||
str,
|
||||
Field(default="claude-3-haiku-20240307", description="Default Anthropic model for summarization"),
|
||||
Field(default=DEFAULT_ANTHROPIC_MODEL, description="Default Anthropic model for summarization"),
|
||||
]
|
||||
llm_timeout_seconds: Annotated[
|
||||
float,
|
||||
Field(default=60.0, ge=10.0, le=300.0, description="Timeout for LLM requests"),
|
||||
Field(
|
||||
default=float(60),
|
||||
ge=10.0,
|
||||
le=3 * 100,
|
||||
description="Timeout for LLM requests",
|
||||
),
|
||||
]
|
||||
|
||||
# Ollama settings
|
||||
@@ -208,7 +235,12 @@ class Settings(TriggerSettings):
|
||||
]
|
||||
ollama_timeout_seconds: Annotated[
|
||||
float,
|
||||
Field(default=120.0, ge=10.0, le=600.0, description="Timeout for Ollama requests"),
|
||||
Field(
|
||||
default=DEFAULT_OLLAMA_TIMEOUT_SECONDS,
|
||||
ge=10.0,
|
||||
le=3 * 2 * 100,
|
||||
description="Timeout for Ollama requests",
|
||||
),
|
||||
]
|
||||
|
||||
# OpenTelemetry settings
|
||||
|
||||
@@ -5,61 +5,92 @@ from collections.abc import Sequence
|
||||
from typing import Annotated, cast
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
|
||||
from noteflow.config.constants.core import DEFAULT_LLM_TEMPERATURE
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
from noteflow.config.settings._base import ENV_FILE, EXTRA_IGNORE
|
||||
|
||||
|
||||
def _strip_items(items: Sequence[object]) -> list[str]:
|
||||
return [str(item).strip() for item in items if str(item).strip()]
|
||||
|
||||
|
||||
def _parse_json_list(value: str) -> list[str] | None:
|
||||
stripped = value.strip()
|
||||
if not (stripped.startswith("[") and stripped.endswith("]")):
|
||||
return None
|
||||
try:
|
||||
parsed = json.loads(stripped)
|
||||
except json.JSONDecodeError:
|
||||
return None
|
||||
if isinstance(parsed, list):
|
||||
parsed_items = cast(list[object], parsed)
|
||||
return _strip_items(parsed_items)
|
||||
return None
|
||||
|
||||
|
||||
def _parse_csv_list(value: str) -> list[str]:
|
||||
return [item.strip() for item in value.split(",") if item.strip()]
|
||||
|
||||
|
||||
def _string_list_from_unknown(value: object) -> list[str]:
|
||||
if value is None:
|
||||
return []
|
||||
if isinstance(value, str):
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
if stripped := value.strip():
|
||||
return (
|
||||
parsed
|
||||
if (parsed := _parse_json_list(stripped))
|
||||
else _parse_csv_list(stripped)
|
||||
)
|
||||
else:
|
||||
return []
|
||||
if stripped.startswith("[") and stripped.endswith("]"):
|
||||
try:
|
||||
parsed = json.loads(stripped)
|
||||
except json.JSONDecodeError:
|
||||
parsed = None
|
||||
if isinstance(parsed, list):
|
||||
parsed_items = cast(list[object], parsed)
|
||||
return [
|
||||
str(item).strip()
|
||||
for item in parsed_items
|
||||
if str(item).strip()
|
||||
]
|
||||
return [item.strip() for item in value.split(",") if item.strip()]
|
||||
if isinstance(value, (list, tuple)):
|
||||
items = cast(Sequence[object], value)
|
||||
return [str(item) for item in items]
|
||||
return []
|
||||
|
||||
|
||||
|
||||
def _normalize_dict(raw: dict[object, object]) -> dict[str, object]:
|
||||
"""Normalize dictionary keys to strings."""
|
||||
return {str(key): val for key, val in raw.items()}
|
||||
|
||||
|
||||
def _parse_dict_list_from_string(value: str) -> list[dict[str, object]]:
|
||||
"""Parse a JSON string into a list of dicts."""
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
return []
|
||||
try:
|
||||
parsed = json.loads(stripped)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return _dict_list_from_unknown(parsed)
|
||||
|
||||
|
||||
def _extract_dicts_from_list(items: Sequence[object]) -> list[dict[str, object]]:
|
||||
"""Extract and normalize dicts from a list."""
|
||||
return [
|
||||
_normalize_dict(cast(dict[object, object], item))
|
||||
for item in items
|
||||
if isinstance(item, dict)
|
||||
]
|
||||
|
||||
def _dict_list_from_unknown(value: object) -> list[dict[str, object]]:
|
||||
if value is None:
|
||||
return []
|
||||
|
||||
if isinstance(value, str):
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
return []
|
||||
try:
|
||||
parsed = json.loads(stripped)
|
||||
except json.JSONDecodeError:
|
||||
return []
|
||||
return _dict_list_from_unknown(parsed)
|
||||
return _parse_dict_list_from_string(value)
|
||||
|
||||
if isinstance(value, dict):
|
||||
raw = cast(dict[object, object], value)
|
||||
normalized: dict[str, object] = {str(key): val for key, val in raw.items()}
|
||||
return [normalized]
|
||||
return [_normalize_dict(cast(dict[object, object], value))]
|
||||
|
||||
if isinstance(value, list):
|
||||
items = cast(Sequence[object], value)
|
||||
result: list[dict[str, object]] = []
|
||||
for item in items:
|
||||
if isinstance(item, dict):
|
||||
raw_item = cast(dict[object, object], item)
|
||||
result.append({str(key): val for key, val in raw_item.items()})
|
||||
return result
|
||||
return _extract_dicts_from_list(cast(Sequence[object], value))
|
||||
|
||||
return []
|
||||
|
||||
|
||||
@@ -111,7 +142,12 @@ class TriggerSettings(BaseSettings):
|
||||
]
|
||||
trigger_audio_threshold_db: Annotated[
|
||||
float,
|
||||
Field(default=-40.0, ge=-60.0, le=0.0, description="Audio activity threshold in dB"),
|
||||
Field(
|
||||
default=-(4 * 10),
|
||||
ge=-60.0,
|
||||
le=0.0,
|
||||
description="Audio activity threshold in dB",
|
||||
),
|
||||
]
|
||||
trigger_audio_window_seconds: Annotated[
|
||||
float,
|
||||
@@ -185,7 +221,12 @@ class TriggerSettings(BaseSettings):
|
||||
# Signal weights
|
||||
trigger_weight_audio: Annotated[
|
||||
float,
|
||||
Field(default=0.30, ge=0.0, le=1.0, description="Audio signal confidence weight"),
|
||||
Field(
|
||||
default=DEFAULT_LLM_TEMPERATURE,
|
||||
ge=0.0,
|
||||
le=1.0,
|
||||
description="Audio signal confidence weight",
|
||||
),
|
||||
]
|
||||
trigger_weight_foreground: Annotated[
|
||||
float,
|
||||
@@ -198,7 +239,12 @@ class TriggerSettings(BaseSettings):
|
||||
]
|
||||
trigger_weight_calendar: Annotated[
|
||||
float,
|
||||
Field(default=0.30, ge=0.0, le=1.0, description="Calendar signal confidence weight"),
|
||||
Field(
|
||||
default=DEFAULT_LLM_TEMPERATURE,
|
||||
ge=0.0,
|
||||
le=1.0,
|
||||
description="Calendar signal confidence weight",
|
||||
),
|
||||
]
|
||||
|
||||
@field_validator("trigger_meeting_apps", "trigger_suppressed_apps", mode="before")
|
||||
|
||||
@@ -13,6 +13,30 @@ from enum import StrEnum
|
||||
from typing import NotRequired, Required, Self, TypedDict, Unpack, cast
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from noteflow.domain.auth.oidc_constants import (
|
||||
CLAIM_EMAIL,
|
||||
CLAIM_EMAIL_VERIFIED,
|
||||
CLAIM_GROUPS,
|
||||
CLAIM_PICTURE,
|
||||
CLAIM_PREFERRED_USERNAME,
|
||||
FIELD_ALLOWED_GROUPS,
|
||||
FIELD_CLAIM_MAPPING,
|
||||
FIELD_DISCOVERY,
|
||||
FIELD_DISCOVERY_REFRESHED_AT,
|
||||
FIELD_ENABLED,
|
||||
FIELD_ISSUER_URL,
|
||||
FIELD_PRESET,
|
||||
FIELD_REQUIRE_EMAIL_VERIFIED,
|
||||
OIDC_SCOPE_EMAIL,
|
||||
OIDC_SCOPE_OPENID,
|
||||
OIDC_SCOPE_PROFILE,
|
||||
)
|
||||
from noteflow.domain.constants.fields import (
|
||||
END_SESSION_ENDPOINT,
|
||||
INTROSPECTION_ENDPOINT,
|
||||
JWKS_URI,
|
||||
REVOCATION_ENDPOINT,
|
||||
)
|
||||
from noteflow.domain.utils.time import utc_now
|
||||
|
||||
|
||||
@@ -55,19 +79,19 @@ class ClaimMapping:
|
||||
|
||||
# Standard OIDC claims with sensible defaults
|
||||
subject_claim: str = "sub"
|
||||
email_claim: str = "email"
|
||||
email_verified_claim: str = "email_verified"
|
||||
email_claim: str = CLAIM_EMAIL
|
||||
email_verified_claim: str = CLAIM_EMAIL_VERIFIED
|
||||
name_claim: str = "name"
|
||||
preferred_username_claim: str = "preferred_username"
|
||||
groups_claim: str = "groups"
|
||||
picture_claim: str = "picture"
|
||||
preferred_username_claim: str = CLAIM_PREFERRED_USERNAME
|
||||
groups_claim: str = CLAIM_GROUPS
|
||||
picture_claim: str = CLAIM_PICTURE
|
||||
|
||||
# Optional custom claims
|
||||
first_name_claim: str | None = None
|
||||
last_name_claim: str | None = None
|
||||
phone_claim: str | None = None
|
||||
|
||||
def to_dict(self) -> dict[str, str | None]:
|
||||
def as_dict(self) -> dict[str, str | None]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"subject_claim": self.subject_claim,
|
||||
@@ -85,19 +109,23 @@ class ClaimMapping:
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, str | None]) -> Self:
|
||||
"""Create from dictionary."""
|
||||
get = data.get
|
||||
return cls(
|
||||
subject_claim=data.get("subject_claim") or "sub",
|
||||
email_claim=data.get("email_claim") or "email",
|
||||
email_verified_claim=data.get("email_verified_claim") or "email_verified",
|
||||
name_claim=data.get("name_claim") or "name",
|
||||
preferred_username_claim=data.get("preferred_username_claim") or "preferred_username",
|
||||
groups_claim=data.get("groups_claim") or "groups",
|
||||
picture_claim=data.get("picture_claim") or "picture",
|
||||
first_name_claim=data.get("first_name_claim"),
|
||||
last_name_claim=data.get("last_name_claim"),
|
||||
phone_claim=data.get("phone_claim"),
|
||||
subject_claim=get("subject_claim") or "sub",
|
||||
email_claim=get("email_claim") or CLAIM_EMAIL,
|
||||
email_verified_claim=get("email_verified_claim") or CLAIM_EMAIL_VERIFIED,
|
||||
name_claim=get("name_claim") or "name",
|
||||
preferred_username_claim=get("preferred_username_claim") or CLAIM_PREFERRED_USERNAME,
|
||||
groups_claim=get("groups_claim") or CLAIM_GROUPS,
|
||||
picture_claim=get("picture_claim") or CLAIM_PICTURE,
|
||||
first_name_claim=get("first_name_claim"),
|
||||
last_name_claim=get("last_name_claim"),
|
||||
phone_claim=get("phone_claim"),
|
||||
)
|
||||
|
||||
to_dict = as_dict
|
||||
decode = from_dict
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class OidcDiscoveryConfig:
|
||||
@@ -121,17 +149,17 @@ class OidcDiscoveryConfig:
|
||||
claims_supported: tuple[str, ...] = field(default_factory=tuple)
|
||||
code_challenge_methods_supported: tuple[str, ...] = field(default_factory=tuple)
|
||||
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
def as_dict(self) -> dict[str, object]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"issuer": self.issuer,
|
||||
"authorization_endpoint": self.authorization_endpoint,
|
||||
"token_endpoint": self.token_endpoint,
|
||||
"userinfo_endpoint": self.userinfo_endpoint,
|
||||
"jwks_uri": self.jwks_uri,
|
||||
"end_session_endpoint": self.end_session_endpoint,
|
||||
"revocation_endpoint": self.revocation_endpoint,
|
||||
"introspection_endpoint": self.introspection_endpoint,
|
||||
JWKS_URI: self.jwks_uri,
|
||||
END_SESSION_ENDPOINT: self.end_session_endpoint,
|
||||
REVOCATION_ENDPOINT: self.revocation_endpoint,
|
||||
INTROSPECTION_ENDPOINT: self.introspection_endpoint,
|
||||
"scopes_supported": list(self.scopes_supported),
|
||||
"response_types_supported": list(self.response_types_supported),
|
||||
"grant_types_supported": list(self.grant_types_supported),
|
||||
@@ -142,21 +170,26 @@ class OidcDiscoveryConfig:
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, object]) -> Self:
|
||||
"""Create from dictionary (e.g., discovery document)."""
|
||||
scopes = data.get("scopes_supported")
|
||||
response_types = data.get("response_types_supported")
|
||||
grant_types = data.get("grant_types_supported")
|
||||
claims = data.get("claims_supported")
|
||||
code_challenge = data.get("code_challenge_methods_supported")
|
||||
get = data.get
|
||||
scopes = get("scopes_supported")
|
||||
response_types = get("response_types_supported")
|
||||
grant_types = get("grant_types_supported")
|
||||
claims = get("claims_supported")
|
||||
code_challenge = get("code_challenge_methods_supported")
|
||||
|
||||
return cls(
|
||||
issuer=str(data.get("issuer", "")),
|
||||
authorization_endpoint=str(data.get("authorization_endpoint", "")),
|
||||
token_endpoint=str(data.get("token_endpoint", "")),
|
||||
userinfo_endpoint=str(data["userinfo_endpoint"]) if data.get("userinfo_endpoint") else None,
|
||||
jwks_uri=str(data["jwks_uri"]) if data.get("jwks_uri") else None,
|
||||
end_session_endpoint=str(data["end_session_endpoint"]) if data.get("end_session_endpoint") else None,
|
||||
revocation_endpoint=str(data["revocation_endpoint"]) if data.get("revocation_endpoint") else None,
|
||||
introspection_endpoint=str(data["introspection_endpoint"]) if data.get("introspection_endpoint") else None,
|
||||
issuer=str(get("issuer", "")),
|
||||
authorization_endpoint=str(get("authorization_endpoint", "")),
|
||||
token_endpoint=str(get("token_endpoint", "")),
|
||||
userinfo_endpoint=str(data["userinfo_endpoint"]) if get("userinfo_endpoint") else None,
|
||||
jwks_uri=str(data[JWKS_URI]) if get(JWKS_URI) else None,
|
||||
end_session_endpoint=str(data[END_SESSION_ENDPOINT])
|
||||
if get(END_SESSION_ENDPOINT)
|
||||
else None,
|
||||
revocation_endpoint=str(data[REVOCATION_ENDPOINT]) if get(REVOCATION_ENDPOINT) else None,
|
||||
introspection_endpoint=str(data[INTROSPECTION_ENDPOINT])
|
||||
if get(INTROSPECTION_ENDPOINT)
|
||||
else None,
|
||||
scopes_supported=_tuple_from_list(scopes),
|
||||
response_types_supported=_tuple_from_list(response_types),
|
||||
grant_types_supported=_tuple_from_list(grant_types),
|
||||
@@ -164,6 +197,9 @@ class OidcDiscoveryConfig:
|
||||
code_challenge_methods_supported=_tuple_from_list(code_challenge),
|
||||
)
|
||||
|
||||
to_dict = as_dict
|
||||
decode = from_dict
|
||||
|
||||
def supports_pkce(self) -> bool:
|
||||
"""Check if provider supports PKCE with S256."""
|
||||
return "S256" in self.code_challenge_methods_supported
|
||||
@@ -191,6 +227,21 @@ class OidcProviderCreateParams:
|
||||
require_email_verified: bool = True
|
||||
"""Whether to require email verification."""
|
||||
|
||||
def to_config_kwargs(self) -> dict[str, OidcProviderPreset | tuple[str, ...] | ClaimMapping | bool]:
|
||||
"""Return kwargs for OidcProviderConfig constructor.
|
||||
|
||||
Returns:
|
||||
Dictionary with preset, scopes, claim_mapping, allowed_groups,
|
||||
and require_email_verified fields with defaults applied.
|
||||
"""
|
||||
return {
|
||||
FIELD_PRESET: self.preset,
|
||||
"scopes": self.scopes or (OIDC_SCOPE_OPENID, OIDC_SCOPE_PROFILE, OIDC_SCOPE_EMAIL),
|
||||
"claim_mapping": self.claim_mapping or ClaimMapping(),
|
||||
"allowed_groups": self.allowed_groups or (),
|
||||
"require_email_verified": self.require_email_verified,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class OidcProviderRegistration:
|
||||
@@ -237,7 +288,9 @@ class OidcProviderConfig:
|
||||
claim_mapping: ClaimMapping = field(default_factory=ClaimMapping)
|
||||
|
||||
# OAuth scopes to request (defaults to OIDC standard)
|
||||
scopes: tuple[str, ...] = field(default_factory=lambda: ("openid", "profile", "email"))
|
||||
scopes: tuple[str, ...] = field(
|
||||
default_factory=lambda: (OIDC_SCOPE_OPENID, OIDC_SCOPE_PROFILE, OIDC_SCOPE_EMAIL)
|
||||
)
|
||||
|
||||
# Whether to require email verification
|
||||
require_email_verified: bool = True
|
||||
@@ -263,26 +316,17 @@ class OidcProviderConfig:
|
||||
Returns:
|
||||
New OidcProviderConfig instance.
|
||||
"""
|
||||
workspace_id = kwargs["workspace_id"]
|
||||
name = kwargs["name"]
|
||||
issuer_url = kwargs["issuer_url"]
|
||||
client_id = kwargs["client_id"]
|
||||
params = kwargs.get("params")
|
||||
p = params or OidcProviderCreateParams()
|
||||
params = kwargs.get("params") or OidcProviderCreateParams()
|
||||
now = utc_now()
|
||||
return cls(
|
||||
id=uuid4(),
|
||||
workspace_id=workspace_id,
|
||||
name=name,
|
||||
preset=p.preset,
|
||||
issuer_url=issuer_url.rstrip("/"),
|
||||
client_id=client_id,
|
||||
scopes=p.scopes or ("openid", "profile", "email"),
|
||||
claim_mapping=p.claim_mapping or ClaimMapping(),
|
||||
allowed_groups=p.allowed_groups or (),
|
||||
require_email_verified=p.require_email_verified,
|
||||
workspace_id=kwargs["workspace_id"],
|
||||
name=kwargs["name"],
|
||||
issuer_url=kwargs["issuer_url"].rstrip("/"),
|
||||
client_id=kwargs["client_id"],
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
**params.to_config_kwargs(),
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -302,53 +346,56 @@ class OidcProviderConfig:
|
||||
|
||||
def disable(self) -> None:
|
||||
"""Disable this provider."""
|
||||
object.__setattr__(self, "enabled", False)
|
||||
object.__setattr__(self, FIELD_ENABLED, False)
|
||||
object.__setattr__(self, "updated_at", utc_now())
|
||||
|
||||
def enable(self) -> None:
|
||||
"""Enable this provider."""
|
||||
object.__setattr__(self, "enabled", True)
|
||||
object.__setattr__(self, FIELD_ENABLED, True)
|
||||
object.__setattr__(self, "updated_at", utc_now())
|
||||
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
def as_dict(self) -> dict[str, object]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"workspace_id": str(self.workspace_id),
|
||||
"name": self.name,
|
||||
"preset": self.preset.value,
|
||||
"issuer_url": self.issuer_url,
|
||||
FIELD_PRESET: self.preset.value,
|
||||
FIELD_ISSUER_URL: self.issuer_url,
|
||||
"client_id": self.client_id,
|
||||
"enabled": self.enabled,
|
||||
"discovery": self.discovery.to_dict() if self.discovery else None,
|
||||
"claim_mapping": self.claim_mapping.to_dict(),
|
||||
FIELD_ENABLED: self.enabled,
|
||||
FIELD_DISCOVERY: self.discovery.to_dict() if self.discovery else None,
|
||||
FIELD_CLAIM_MAPPING: self.claim_mapping.to_dict(),
|
||||
"scopes": list(self.scopes),
|
||||
"require_email_verified": self.require_email_verified,
|
||||
"allowed_groups": list(self.allowed_groups),
|
||||
FIELD_REQUIRE_EMAIL_VERIFIED: self.require_email_verified,
|
||||
FIELD_ALLOWED_GROUPS: list(self.allowed_groups),
|
||||
"created_at": self.created_at.isoformat(),
|
||||
"updated_at": self.updated_at.isoformat(),
|
||||
"discovery_refreshed_at": self.discovery_refreshed_at.isoformat() if self.discovery_refreshed_at else None,
|
||||
FIELD_DISCOVERY_REFRESHED_AT: self.discovery_refreshed_at.isoformat()
|
||||
if self.discovery_refreshed_at
|
||||
else None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, object]) -> OidcProviderConfig:
|
||||
"""Create from dictionary."""
|
||||
discovery_data = data.get("discovery")
|
||||
claim_mapping_data = data.get("claim_mapping")
|
||||
scopes_data = data.get("scopes")
|
||||
allowed_groups_data = data.get("allowed_groups")
|
||||
created_at_str = data.get("created_at")
|
||||
updated_at_str = data.get("updated_at")
|
||||
discovery_refreshed_str = data.get("discovery_refreshed_at")
|
||||
get = data.get
|
||||
discovery_data = get(FIELD_DISCOVERY)
|
||||
claim_mapping_data = get(FIELD_CLAIM_MAPPING)
|
||||
scopes_data = get("scopes")
|
||||
allowed_groups_data = get(FIELD_ALLOWED_GROUPS)
|
||||
created_at_str = get("created_at")
|
||||
updated_at_str = get("updated_at")
|
||||
discovery_refreshed_str = get(FIELD_DISCOVERY_REFRESHED_AT)
|
||||
|
||||
return cls(
|
||||
id=UUID(str(data["id"])),
|
||||
workspace_id=UUID(str(data["workspace_id"])),
|
||||
name=str(data["name"]),
|
||||
preset=OidcProviderPreset(str(data["preset"])),
|
||||
issuer_url=str(data["issuer_url"]),
|
||||
preset=OidcProviderPreset(str(data[FIELD_PRESET])),
|
||||
issuer_url=str(data[FIELD_ISSUER_URL]),
|
||||
client_id=str(data["client_id"]),
|
||||
enabled=bool(data.get("enabled", True)),
|
||||
enabled=bool(get(FIELD_ENABLED, True)),
|
||||
discovery=(
|
||||
OidcDiscoveryConfig.from_dict(cast(dict[str, object], discovery_data))
|
||||
if isinstance(discovery_data, dict)
|
||||
@@ -361,11 +408,14 @@ class OidcProviderConfig:
|
||||
),
|
||||
scopes=_tuple_from_list_or_default(
|
||||
scopes_data,
|
||||
("openid", "profile", "email"),
|
||||
(OIDC_SCOPE_OPENID, OIDC_SCOPE_PROFILE, OIDC_SCOPE_EMAIL),
|
||||
),
|
||||
require_email_verified=bool(data.get("require_email_verified", True)),
|
||||
require_email_verified=bool(get(FIELD_REQUIRE_EMAIL_VERIFIED, True)),
|
||||
allowed_groups=_tuple_from_list(allowed_groups_data),
|
||||
created_at=datetime.fromisoformat(str(created_at_str)) if created_at_str else utc_now(),
|
||||
updated_at=datetime.fromisoformat(str(updated_at_str)) if updated_at_str else utc_now(),
|
||||
discovery_refreshed_at=datetime.fromisoformat(str(discovery_refreshed_str)) if discovery_refreshed_str else None,
|
||||
)
|
||||
|
||||
to_dict = as_dict
|
||||
decode = from_dict
|
||||
|
||||
40
src/noteflow/domain/auth/oidc_constants.py
Normal file
40
src/noteflow/domain/auth/oidc_constants.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""OIDC-related string constants."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from noteflow.domain.constants.fields import (
|
||||
CLAIM_MAPPING,
|
||||
DISCOVERY,
|
||||
DISCOVERY_REFRESHED_AT,
|
||||
ALLOWED_GROUPS,
|
||||
EMAIL,
|
||||
EMAIL_VERIFIED,
|
||||
ENABLED,
|
||||
GROUPS,
|
||||
ISSUER_URL,
|
||||
PICTURE,
|
||||
PRESET,
|
||||
PREFERRED_USERNAME,
|
||||
PROFILE,
|
||||
REQUIRE_EMAIL_VERIFIED,
|
||||
)
|
||||
|
||||
OIDC_SCOPE_OPENID: Final[str] = "openid"
|
||||
OIDC_SCOPE_PROFILE: Final[str] = PROFILE
|
||||
OIDC_SCOPE_EMAIL: Final[str] = EMAIL
|
||||
OIDC_SCOPE_GROUPS: Final[str] = GROUPS
|
||||
|
||||
CLAIM_EMAIL: Final[str] = EMAIL
|
||||
CLAIM_EMAIL_VERIFIED: Final[str] = EMAIL_VERIFIED
|
||||
CLAIM_PREFERRED_USERNAME: Final[str] = PREFERRED_USERNAME
|
||||
CLAIM_GROUPS: Final[str] = GROUPS
|
||||
CLAIM_PICTURE: Final[str] = PICTURE
|
||||
|
||||
FIELD_ENABLED: Final[str] = ENABLED
|
||||
FIELD_CLAIM_MAPPING: Final[str] = CLAIM_MAPPING
|
||||
FIELD_REQUIRE_EMAIL_VERIFIED: Final[str] = REQUIRE_EMAIL_VERIFIED
|
||||
FIELD_ALLOWED_GROUPS: Final[str] = ALLOWED_GROUPS
|
||||
FIELD_DISCOVERY: Final[str] = DISCOVERY
|
||||
FIELD_DISCOVERY_REFRESHED_AT: Final[str] = DISCOVERY_REFRESHED_AT
|
||||
FIELD_PRESET: Final[str] = PRESET
|
||||
FIELD_ISSUER_URL: Final[str] = ISSUER_URL
|
||||
71
src/noteflow/domain/constants/fields.py
Normal file
71
src/noteflow/domain/constants/fields.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Common field name string constants."""
|
||||
|
||||
from typing import Final, Literal
|
||||
|
||||
EMAIL: Final[str] = "email"
|
||||
GROUPS: Final[str] = "groups"
|
||||
PROFILE: Final[str] = "profile"
|
||||
ENABLED: Final[str] = "enabled"
|
||||
EMAIL_VERIFIED: Final[str] = "email_verified"
|
||||
PICTURE: Final[str] = "picture"
|
||||
PREFERRED_USERNAME: Final[str] = "preferred_username"
|
||||
PROVIDER: Final[str] = "provider"
|
||||
UNKNOWN: Final[str] = "unknown"
|
||||
SEGMENT_IDS: Final[Literal["segment_ids"]] = "segment_ids"
|
||||
PROJECT_ID: Final[str] = "project_id"
|
||||
PROJECT_IDS: Final[str] = "project_ids"
|
||||
CALENDAR: Final[str] = "calendar"
|
||||
CLAIM_MAPPING: Final[str] = "claim_mapping"
|
||||
REQUIRE_EMAIL_VERIFIED: Final[str] = "require_email_verified"
|
||||
PROVIDER_NAME: Final[Literal["provider_name"]] = "provider_name"
|
||||
NOTE: Final[str] = "note"
|
||||
START_TIME: Final[Literal["start_time"]] = "start_time"
|
||||
END_TIME: Final[Literal["end_time"]] = "end_time"
|
||||
CODE: Final[str] = "code"
|
||||
CONTENT: Final[str] = "content"
|
||||
LOCATION: Final[str] = "location"
|
||||
TASKS: Final[str] = "tasks"
|
||||
MODEL_NAME: Final[Literal["model_name"]] = "model_name"
|
||||
ANNOTATION_TYPE: Final[Literal["annotation_type"]] = "annotation_type"
|
||||
DATE: Final[str] = "date"
|
||||
JWKS_URI: Final[str] = "jwks_uri"
|
||||
END_SESSION_ENDPOINT: Final[str] = "end_session_endpoint"
|
||||
REVOCATION_ENDPOINT: Final[str] = "revocation_endpoint"
|
||||
INTROSPECTION_ENDPOINT: Final[str] = "introspection_endpoint"
|
||||
ISSUER_URL: Final[str] = "issuer_url"
|
||||
DISCOVERY: Final[str] = "discovery"
|
||||
DISCOVERY_REFRESHED_AT: Final[str] = "discovery_refreshed_at"
|
||||
PRESET: Final[str] = "preset"
|
||||
SECRET: Final[str] = "secret"
|
||||
MAX_RETRIES: Final[str] = "max_retries"
|
||||
DEFAULT_SUMMARIZATION_TEMPLATE: Final[str] = "default_summarization_template"
|
||||
CAPTURE: Final[str] = "capture"
|
||||
PLAYBACK: Final[str] = "playback"
|
||||
ATTENDEES: Final[str] = "attendees"
|
||||
START: Final[str] = "start"
|
||||
WEBHOOK: Final[str] = "Webhook"
|
||||
SAMPLE_RATE: Final[str] = "sample_rate"
|
||||
ALLOWED_GROUPS: Final[str] = "allowed_groups"
|
||||
ACTION_ITEM: Final[str] = "action_item"
|
||||
ACTION_ITEMS: Final[Literal["action_items"]] = "action_items"
|
||||
KEY_POINTS: Final[Literal["key_points"]] = "key_points"
|
||||
DECISION: Final[str] = "decision"
|
||||
RISK: Final[str] = "risk"
|
||||
USER_PREFERENCES: Final[str] = "user_preferences"
|
||||
DIARIZATION_JOBS: Final[str] = "diarization_jobs"
|
||||
MEETING_TAGS: Final[str] = "meeting_tags"
|
||||
SORT_DESC: Final[str] = "sort_desc"
|
||||
|
||||
# Observability metrics fields
|
||||
TOKENS_INPUT: Final[str] = "tokens_input"
|
||||
TOKENS_OUTPUT: Final[str] = "tokens_output"
|
||||
LATENCY_MS: Final[str] = "latency_ms"
|
||||
DURATION_MS: Final[str] = "duration_ms"
|
||||
|
||||
# Audio/encryption fields
|
||||
ASSET_PATH: Final[str] = "asset_path"
|
||||
WRAPPED_DEK: Final[str] = "wrapped_dek"
|
||||
|
||||
# Entity type names (for logging/messages)
|
||||
ENTITY_MEETING: Final[str] = "Meeting"
|
||||
ENTITY_WORKSPACE: Final[str] = "Workspace"
|
||||
@@ -7,6 +7,7 @@ from datetime import datetime
|
||||
from enum import StrEnum
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from noteflow.domain.constants.fields import CALENDAR, EMAIL
|
||||
from noteflow.domain.utils.time import utc_now
|
||||
from noteflow.infrastructure.logging import log_state_transition
|
||||
|
||||
@@ -15,8 +16,8 @@ class IntegrationType(StrEnum):
|
||||
"""Types of integrations supported."""
|
||||
|
||||
AUTH = "auth"
|
||||
EMAIL = "email"
|
||||
CALENDAR = "calendar"
|
||||
EMAIL = EMAIL
|
||||
CALENDAR = CALENDAR
|
||||
PKM = "pkm"
|
||||
CUSTOM = "custom"
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from enum import Enum
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from noteflow.domain.constants.fields import ASSET_PATH, PROJECT_ID, WRAPPED_DEK
|
||||
from noteflow.domain.utils.time import utc_now
|
||||
from noteflow.domain.value_objects import MeetingId, MeetingState
|
||||
|
||||
@@ -15,7 +16,6 @@ if TYPE_CHECKING:
|
||||
from noteflow.domain.entities.segment import Segment
|
||||
from noteflow.domain.entities.summary import Summary
|
||||
|
||||
|
||||
class ProcessingStepStatus(Enum):
|
||||
"""Status of an individual post-processing step."""
|
||||
|
||||
@@ -128,8 +128,7 @@ class ProcessingStatus:
|
||||
@classmethod
|
||||
def create_pending(cls) -> ProcessingStatus:
|
||||
"""Create a processing status with all steps pending."""
|
||||
status = cls()
|
||||
return status
|
||||
return cls()
|
||||
|
||||
@property
|
||||
def is_complete(self) -> bool:
|
||||
@@ -201,6 +200,30 @@ class MeetingLoadParams:
|
||||
processing_status: ProcessingStatus | None = None
|
||||
"""Post-processing status (GAP-W05)."""
|
||||
|
||||
def to_meeting_kwargs(
|
||||
self, fallback_asset_path: str
|
||||
) -> dict[str, MeetingState | datetime | dict[str, str] | bytes | str | UUID | int | ProcessingStatus | None]:
|
||||
"""Return kwargs for Meeting constructor.
|
||||
|
||||
Args:
|
||||
fallback_asset_path: Path to use if asset_path is None.
|
||||
|
||||
Returns:
|
||||
Dictionary with meeting fields and defaults applied.
|
||||
"""
|
||||
return {
|
||||
PROJECT_ID: self.project_id,
|
||||
"state": self.state,
|
||||
"created_at": self.created_at or utc_now(),
|
||||
"started_at": self.started_at,
|
||||
"ended_at": self.ended_at,
|
||||
"metadata": self.metadata or {},
|
||||
WRAPPED_DEK: self.wrapped_dek,
|
||||
ASSET_PATH: self.asset_path or fallback_asset_path,
|
||||
"version": self.version,
|
||||
"processing_status": self.processing_status,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class Meeting:
|
||||
@@ -277,21 +300,12 @@ class Meeting:
|
||||
Returns:
|
||||
Meeting instance with specified ID.
|
||||
"""
|
||||
p = params or MeetingLoadParams()
|
||||
load_params = params or MeetingLoadParams()
|
||||
meeting_id = MeetingId(UUID(uuid_str))
|
||||
return cls(
|
||||
id=meeting_id,
|
||||
title=title,
|
||||
project_id=p.project_id,
|
||||
state=p.state,
|
||||
created_at=p.created_at or utc_now(),
|
||||
started_at=p.started_at,
|
||||
ended_at=p.ended_at,
|
||||
metadata=p.metadata or {},
|
||||
wrapped_dek=p.wrapped_dek,
|
||||
asset_path=p.asset_path or uuid_str,
|
||||
version=p.version,
|
||||
processing_status=p.processing_status,
|
||||
**load_params.to_meeting_kwargs(fallback_asset_path=uuid_str),
|
||||
)
|
||||
|
||||
def start_recording(self) -> None:
|
||||
|
||||
@@ -7,6 +7,8 @@ from enum import Enum
|
||||
from typing import TYPE_CHECKING, NotRequired, Required, TypedDict, Unpack
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from noteflow.domain.constants.fields import DATE as ENTITY_DATE, LOCATION as ENTITY_LOCATION, SEGMENT_IDS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.value_objects import MeetingId
|
||||
|
||||
@@ -30,8 +32,8 @@ class EntityCategory(Enum):
|
||||
PRODUCT = "product"
|
||||
TECHNICAL = "technical" # Future: custom pattern matching
|
||||
ACRONYM = "acronym" # Future: custom pattern matching
|
||||
LOCATION = "location"
|
||||
DATE = "date"
|
||||
LOCATION = ENTITY_LOCATION
|
||||
DATE = ENTITY_DATE
|
||||
OTHER = "other"
|
||||
|
||||
@classmethod
|
||||
@@ -114,7 +116,7 @@ class NamedEntity:
|
||||
# Validate required text
|
||||
text = kwargs["text"]
|
||||
category = kwargs["category"]
|
||||
segment_ids = kwargs["segment_ids"]
|
||||
segment_ids: list[int] = kwargs[SEGMENT_IDS]
|
||||
confidence = kwargs["confidence"]
|
||||
meeting_id = kwargs.get("meeting_id")
|
||||
|
||||
@@ -123,7 +125,7 @@ class NamedEntity:
|
||||
raise ValueError("Entity text cannot be empty")
|
||||
|
||||
# Normalize and deduplicate segment_ids
|
||||
unique_segments = sorted(set(segment_ids))
|
||||
unique_segments: list[int] = sorted(set(segment_ids))
|
||||
|
||||
return cls(
|
||||
text=stripped_text,
|
||||
|
||||
@@ -11,6 +11,8 @@ from __future__ import annotations
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.domain.constants.fields import PROVIDER_NAME
|
||||
|
||||
import grpc
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -229,7 +231,7 @@ class ProviderError(DomainError):
|
||||
error_code,
|
||||
f"Provider '{provider_name}' failed during '{operation}': {reason}",
|
||||
details={
|
||||
"provider_name": provider_name,
|
||||
PROVIDER_NAME: provider_name,
|
||||
"operation": operation,
|
||||
"reason": reason,
|
||||
},
|
||||
|
||||
@@ -86,7 +86,8 @@ class OperationContext:
|
||||
|
||||
def is_admin(self) -> bool:
|
||||
"""Check if user is admin/owner of current workspace."""
|
||||
return self.workspace.role.can_admin()
|
||||
role = self.workspace.role
|
||||
return True if role.can_delete_workspace() else role.can_admin()
|
||||
|
||||
def can_read_project(self) -> bool:
|
||||
"""Check if user can read in current project.
|
||||
|
||||
@@ -20,14 +20,17 @@ class WorkspaceRole(Enum):
|
||||
MEMBER = "member"
|
||||
VIEWER = "viewer"
|
||||
|
||||
def can_write(self) -> bool:
|
||||
def write_allowed(self) -> bool:
|
||||
"""Check if this role allows write operations."""
|
||||
return self in (WorkspaceRole.OWNER, WorkspaceRole.ADMIN, WorkspaceRole.MEMBER)
|
||||
|
||||
def can_admin(self) -> bool:
|
||||
def admin_allowed(self) -> bool:
|
||||
"""Check if this role allows administrative operations."""
|
||||
return self in (WorkspaceRole.OWNER, WorkspaceRole.ADMIN)
|
||||
|
||||
can_write = write_allowed
|
||||
can_admin = admin_allowed
|
||||
|
||||
def can_delete_workspace(self) -> bool:
|
||||
"""Check if this role allows workspace deletion."""
|
||||
return self == WorkspaceRole.OWNER
|
||||
@@ -57,7 +60,7 @@ class ProjectRole(Enum):
|
||||
"""
|
||||
return True
|
||||
|
||||
def can_write(self) -> bool:
|
||||
def write_allowed(self) -> bool:
|
||||
"""Check if this role allows write operations.
|
||||
|
||||
Returns:
|
||||
@@ -65,10 +68,13 @@ class ProjectRole(Enum):
|
||||
"""
|
||||
return self in (ProjectRole.EDITOR, ProjectRole.ADMIN)
|
||||
|
||||
def can_admin(self) -> bool:
|
||||
def admin_allowed(self) -> bool:
|
||||
"""Check if this role allows administrative operations.
|
||||
|
||||
Returns:
|
||||
True for ADMIN role only.
|
||||
"""
|
||||
return self == ProjectRole.ADMIN
|
||||
|
||||
can_write = write_allowed
|
||||
can_admin = admin_allowed
|
||||
|
||||
16
src/noteflow/domain/ports/async_context.py
Normal file
16
src/noteflow/domain/ports/async_context.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Shared async context manager protocol."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Protocol, Self
|
||||
|
||||
|
||||
class AsyncContextManager(Protocol):
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
@@ -10,6 +10,8 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
|
||||
from noteflow.config.constants.core import HOURS_PER_DAY
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.value_objects import OAuthProvider, OAuthTokens
|
||||
|
||||
@@ -135,7 +137,7 @@ class CalendarPort(Protocol):
|
||||
async def list_events(
|
||||
self,
|
||||
access_token: str,
|
||||
hours_ahead: int = 24,
|
||||
hours_ahead: int = HOURS_PER_DAY,
|
||||
limit: int = 20,
|
||||
) -> list[CalendarEventInfo]:
|
||||
"""Fetch upcoming calendar events.
|
||||
|
||||
@@ -32,21 +32,3 @@ from noteflow.domain.ports.repositories.transcript import (
|
||||
SegmentRepository,
|
||||
SummaryRepository,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AnnotationRepository",
|
||||
"AssetRepository",
|
||||
"DiarizationJobRepository",
|
||||
"EntityRepository",
|
||||
"IntegrationRepository",
|
||||
"MeetingRepository",
|
||||
"PreferencesRepository",
|
||||
"ProjectMembershipRepository",
|
||||
"ProjectRepository",
|
||||
"SegmentRepository",
|
||||
"SummaryRepository",
|
||||
"UsageEventRepository",
|
||||
"UserRepository",
|
||||
"WebhookRepository",
|
||||
"WorkspaceRepository",
|
||||
]
|
||||
|
||||
@@ -202,7 +202,7 @@ class PreferencesRepository(Protocol):
|
||||
key: Preference key.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
Return whether the record was deleted.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
@@ -196,7 +196,7 @@ class IntegrationRepository(Protocol):
|
||||
integration_id: Integration UUID.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
Return whether the record was deleted.
|
||||
"""
|
||||
...
|
||||
|
||||
@@ -323,7 +323,7 @@ class WebhookRepository(Protocol):
|
||||
...
|
||||
|
||||
async def delete(self, webhook_id: UUID) -> bool:
|
||||
"""Delete webhook by ID. Return True if deleted, False if not found."""
|
||||
"""Delete webhook by ID and return whether a record was deleted."""
|
||||
...
|
||||
|
||||
async def add_delivery(self, delivery: WebhookDelivery) -> WebhookDelivery:
|
||||
|
||||
@@ -10,10 +10,3 @@ from noteflow.domain.ports.repositories.identity._membership import (
|
||||
from noteflow.domain.ports.repositories.identity._project import ProjectRepository
|
||||
from noteflow.domain.ports.repositories.identity._user import UserRepository
|
||||
from noteflow.domain.ports.repositories.identity._workspace import WorkspaceRepository
|
||||
|
||||
__all__ = [
|
||||
"ProjectMembershipRepository",
|
||||
"ProjectRepository",
|
||||
"UserRepository",
|
||||
"WorkspaceRepository",
|
||||
]
|
||||
|
||||
@@ -101,7 +101,7 @@ class WorkspaceRepository(Protocol):
|
||||
workspace_id: Workspace UUID.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
Return whether the record was deleted.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ class MeetingRepository(Protocol):
|
||||
meeting_id: Meeting identifier.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
Return whether the record was deleted.
|
||||
"""
|
||||
...
|
||||
|
||||
@@ -312,6 +312,6 @@ class AnnotationRepository(Protocol):
|
||||
annotation_id: Annotation identifier.
|
||||
|
||||
Returns:
|
||||
True if deleted, False if not found.
|
||||
Return whether the record was deleted.
|
||||
"""
|
||||
...
|
||||
|
||||
@@ -81,25 +81,9 @@ class UnitOfWorkCapabilities(Protocol):
|
||||
...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class UnitOfWork(UnitOfWorkCapabilities, Protocol):
|
||||
"""Unit of Work protocol for managing transactions across repositories.
|
||||
class UnitOfWorkCoreRepositories(Protocol):
|
||||
"""Core repositories always available on a UnitOfWork."""
|
||||
|
||||
Provides transactional consistency when operating on multiple
|
||||
aggregates. Use as a context manager for automatic commit/rollback.
|
||||
|
||||
Implementations may be backed by either a database (SqlAlchemyUnitOfWork)
|
||||
or in-memory storage (MemoryUnitOfWork). The `supports_*` properties
|
||||
indicate which features are available in the current implementation.
|
||||
|
||||
Example:
|
||||
async with uow:
|
||||
meeting = await uow.meetings.get(meeting_id)
|
||||
await uow.segments.add(meeting_id, segment)
|
||||
await uow.commit()
|
||||
"""
|
||||
|
||||
# Core repositories (always available)
|
||||
@property
|
||||
def meetings(self) -> MeetingRepository:
|
||||
"""Access the meetings repository."""
|
||||
@@ -120,7 +104,10 @@ class UnitOfWork(UnitOfWorkCapabilities, Protocol):
|
||||
"""Access the assets repository."""
|
||||
...
|
||||
|
||||
# Optional repositories (check supports_* before use)
|
||||
|
||||
class UnitOfWorkOptionalRepositories(Protocol):
|
||||
"""Repositories that may be unavailable in memory-backed implementations."""
|
||||
|
||||
@property
|
||||
def annotations(self) -> AnnotationRepository:
|
||||
"""Access the annotations repository."""
|
||||
@@ -156,6 +143,10 @@ class UnitOfWork(UnitOfWorkCapabilities, Protocol):
|
||||
"""Access the usage events repository for analytics."""
|
||||
...
|
||||
|
||||
|
||||
class UnitOfWorkIdentityRepositories(Protocol):
|
||||
"""Identity repositories for users, workspaces, and projects."""
|
||||
|
||||
@property
|
||||
def users(self) -> UserRepository:
|
||||
"""Access the users repository for identity management."""
|
||||
@@ -176,7 +167,10 @@ class UnitOfWork(UnitOfWorkCapabilities, Protocol):
|
||||
"""Access the project memberships repository for access control."""
|
||||
...
|
||||
|
||||
# Lifecycle methods
|
||||
|
||||
class UnitOfWorkLifecycle(Protocol):
|
||||
"""Lifecycle methods for transaction handling."""
|
||||
|
||||
async def __aenter__(self) -> Self:
|
||||
"""Enter the unit of work context.
|
||||
|
||||
@@ -215,3 +209,30 @@ class UnitOfWork(UnitOfWorkCapabilities, Protocol):
|
||||
Discards all changes made within the unit of work.
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class UnitOfWork(
|
||||
UnitOfWorkCapabilities,
|
||||
UnitOfWorkCoreRepositories,
|
||||
UnitOfWorkOptionalRepositories,
|
||||
UnitOfWorkIdentityRepositories,
|
||||
UnitOfWorkLifecycle,
|
||||
Protocol,
|
||||
):
|
||||
"""Unit of Work protocol for managing transactions across repositories.
|
||||
|
||||
Provides transactional consistency when operating on multiple
|
||||
aggregates. Use as a context manager for automatic commit/rollback.
|
||||
|
||||
Implementations may be backed by either a database (SqlAlchemyUnitOfWork)
|
||||
or in-memory storage (MemoryUnitOfWork). The `supports_*` properties
|
||||
indicate which features are available in the current implementation.
|
||||
|
||||
Example:
|
||||
async with uow:
|
||||
meeting = await uow.meetings.get(meeting_id)
|
||||
await uow.segments.add(meeting_id, segment)
|
||||
await uow.commit()
|
||||
"""
|
||||
...
|
||||
|
||||
@@ -160,28 +160,40 @@ class TriggerRuleType(RuleType):
|
||||
):
|
||||
errors.append(f"{RULE_FIELD_AUTO_START_ENABLED}{ERROR_SUFFIX_MUST_BE_BOOLEAN}")
|
||||
|
||||
if RULE_FIELD_CALENDAR_MATCH_PATTERNS in config:
|
||||
patterns = config[RULE_FIELD_CALENDAR_MATCH_PATTERNS]
|
||||
if not isinstance(patterns, list):
|
||||
errors.append(f"{RULE_FIELD_CALENDAR_MATCH_PATTERNS} must be a list")
|
||||
else:
|
||||
calendar_patterns = cast(list[object], patterns)
|
||||
if not all(isinstance(pattern, str) for pattern in calendar_patterns):
|
||||
errors.append(
|
||||
f"{RULE_FIELD_CALENDAR_MATCH_PATTERNS} must contain only strings"
|
||||
)
|
||||
|
||||
if RULE_FIELD_APP_MATCH_PATTERNS in config:
|
||||
patterns = config[RULE_FIELD_APP_MATCH_PATTERNS]
|
||||
if not isinstance(patterns, list):
|
||||
errors.append(f"{RULE_FIELD_APP_MATCH_PATTERNS} must be a list")
|
||||
else:
|
||||
app_patterns = cast(list[object], patterns)
|
||||
if not all(isinstance(pattern, str) for pattern in app_patterns):
|
||||
errors.append("app_match_patterns must contain only strings")
|
||||
errors.extend(
|
||||
self._validate_string_list_field(config, RULE_FIELD_CALENDAR_MATCH_PATTERNS)
|
||||
)
|
||||
errors.extend(
|
||||
self._validate_string_list_field(config, RULE_FIELD_APP_MATCH_PATTERNS)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
def _validate_string_list_field(
|
||||
self, config: dict[str, object], field_name: str
|
||||
) -> list[str]:
|
||||
"""Validate that a config field is a list of strings.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary.
|
||||
field_name: Name of the field to validate.
|
||||
|
||||
Returns:
|
||||
List of validation error messages (empty if valid).
|
||||
"""
|
||||
if field_name not in config:
|
||||
return []
|
||||
|
||||
patterns = config[field_name]
|
||||
if not isinstance(patterns, list):
|
||||
return [f"{field_name} must be a list"]
|
||||
|
||||
pattern_list = cast(list[object], patterns)
|
||||
if not all(isinstance(pattern, str) for pattern in pattern_list):
|
||||
return [f"{field_name} must contain only strings"]
|
||||
|
||||
return []
|
||||
|
||||
def get_schema(self) -> dict[str, object]:
|
||||
"""Return JSON schema for trigger configuration.
|
||||
|
||||
|
||||
@@ -7,13 +7,14 @@ import time
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
|
||||
from noteflow.domain.constants.fields import CALENDAR
|
||||
|
||||
class TriggerSource(Enum):
|
||||
"""Source of a trigger signal."""
|
||||
|
||||
AUDIO_ACTIVITY = "audio_activity"
|
||||
FOREGROUND_APP = "foreground_app"
|
||||
CALENDAR = "calendar" # Deferred - optional connector
|
||||
CALENDAR = CALENDAR # Deferred - optional connector
|
||||
|
||||
|
||||
class TriggerAction(Enum):
|
||||
|
||||
@@ -8,6 +8,7 @@ from enum import Enum, IntEnum, StrEnum
|
||||
from typing import NewType
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.domain.constants.fields import ACTION_ITEM, DECISION, NOTE, RISK
|
||||
from noteflow.config.constants import (
|
||||
OAUTH_FIELD_ACCESS_TOKEN,
|
||||
OAUTH_FIELD_REFRESH_TOKEN,
|
||||
@@ -27,10 +28,10 @@ class AnnotationType(Enum):
|
||||
Distinct from LLM-extracted ActionItem/KeyPoint in summaries.
|
||||
"""
|
||||
|
||||
ACTION_ITEM = "action_item"
|
||||
DECISION = "decision"
|
||||
NOTE = "note"
|
||||
RISK = "risk"
|
||||
ACTION_ITEM = ACTION_ITEM
|
||||
DECISION = DECISION
|
||||
NOTE = NOTE
|
||||
RISK = RISK
|
||||
|
||||
|
||||
class ExportFormat(Enum):
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing import Final
|
||||
# Webhook Default Values
|
||||
# =============================================================================
|
||||
|
||||
DEFAULT_WEBHOOK_TIMEOUT_MS: Final[int] = 10000
|
||||
DEFAULT_WEBHOOK_TIMEOUT_MS: Final[int] = 10 * 1000
|
||||
"""Default HTTP request timeout in milliseconds."""
|
||||
|
||||
DEFAULT_WEBHOOK_MAX_RETRIES: Final[int] = 3
|
||||
@@ -18,7 +18,7 @@ DEFAULT_WEBHOOK_MAX_RETRIES: Final[int] = 3
|
||||
DEFAULT_WEBHOOK_BACKOFF_BASE: Final[float] = 2.0
|
||||
"""Default exponential backoff base multiplier."""
|
||||
|
||||
DEFAULT_WEBHOOK_MAX_RESPONSE_LENGTH: Final[int] = 500
|
||||
DEFAULT_WEBHOOK_MAX_RESPONSE_LENGTH: Final[int] = 5 * 100
|
||||
"""Default maximum response body length to log."""
|
||||
|
||||
# =============================================================================
|
||||
@@ -46,7 +46,7 @@ HTTP_HEADER_WEBHOOK_TIMESTAMP: Final[str] = "X-NoteFlow-Timestamp"
|
||||
WEBHOOK_SIGNATURE_PREFIX: Final[str] = "sha256="
|
||||
"""Prefix for HMAC-SHA256 signatures."""
|
||||
|
||||
WEBHOOK_REPLAY_TOLERANCE_SECONDS: Final[int] = 300
|
||||
WEBHOOK_REPLAY_TOLERANCE_SECONDS: Final[int] = 3 * 100
|
||||
"""Maximum age in seconds for webhook requests (5 minutes)."""
|
||||
|
||||
# =============================================================================
|
||||
@@ -55,3 +55,16 @@ WEBHOOK_REPLAY_TOLERANCE_SECONDS: Final[int] = 300
|
||||
|
||||
RETRYABLE_STATUS_CODES: Final[frozenset[int]] = frozenset({408, 429, 500, 502, 503, 504})
|
||||
"""HTTP status codes that should trigger a retry."""
|
||||
|
||||
# =============================================================================
|
||||
# Delivery Outcome Types
|
||||
# =============================================================================
|
||||
|
||||
DELIVERY_OUTCOME_SUCCEEDED: Final[str] = "succeeded"
|
||||
"""Delivery completed successfully (2xx response)."""
|
||||
|
||||
DELIVERY_OUTCOME_FAILED: Final[str] = "failed"
|
||||
"""Delivery was attempted but failed (non-2xx response or error)."""
|
||||
|
||||
DELIVERY_OUTCOME_SKIPPED: Final[str] = "skipped"
|
||||
"""Delivery was not attempted (event not subscribed, etc.)."""
|
||||
|
||||
@@ -12,10 +12,14 @@ from enum import Enum
|
||||
from typing import TYPE_CHECKING, NotRequired, Required, TypedDict, Unpack
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
from noteflow.domain.constants.fields import DURATION_MS, MAX_RETRIES, SECRET, WEBHOOK
|
||||
from noteflow.domain.utils.time import utc_now
|
||||
from noteflow.domain.webhooks.constants import (
|
||||
DEFAULT_WEBHOOK_MAX_RETRIES,
|
||||
DEFAULT_WEBHOOK_TIMEOUT_MS,
|
||||
DELIVERY_OUTCOME_FAILED,
|
||||
DELIVERY_OUTCOME_SKIPPED,
|
||||
DELIVERY_OUTCOME_SUCCEEDED,
|
||||
)
|
||||
|
||||
# Type alias for JSON-serializable webhook payload values
|
||||
@@ -79,7 +83,7 @@ class WebhookConfig:
|
||||
workspace_id: UUID
|
||||
url: str
|
||||
events: frozenset[WebhookEventType]
|
||||
name: str = "Webhook"
|
||||
name: str = WEBHOOK
|
||||
secret: str | None = None
|
||||
enabled: bool = True
|
||||
timeout_ms: int = DEFAULT_WEBHOOK_TIMEOUT_MS
|
||||
@@ -103,10 +107,10 @@ class WebhookConfig:
|
||||
workspace_id = kwargs["workspace_id"]
|
||||
url = kwargs["url"]
|
||||
events = kwargs["events"]
|
||||
name = kwargs.get("name", "Webhook")
|
||||
secret = kwargs.get("secret")
|
||||
name = kwargs.get("name", WEBHOOK)
|
||||
secret = kwargs.get(SECRET)
|
||||
timeout_ms = kwargs.get("timeout_ms", DEFAULT_WEBHOOK_TIMEOUT_MS)
|
||||
max_retries = kwargs.get("max_retries", DEFAULT_WEBHOOK_MAX_RETRIES)
|
||||
max_retries = kwargs.get(MAX_RETRIES, DEFAULT_WEBHOOK_MAX_RETRIES)
|
||||
now = utc_now()
|
||||
return cls(
|
||||
id=uuid4(),
|
||||
@@ -137,7 +141,7 @@ class WebhookConfig:
|
||||
class WebhookConfigCreateOptions:
|
||||
"""Optional parameters for webhook config creation."""
|
||||
|
||||
name: str = "Webhook"
|
||||
name: str = WEBHOOK
|
||||
secret: str | None = None
|
||||
timeout_ms: int = DEFAULT_WEBHOOK_TIMEOUT_MS
|
||||
max_retries: int = DEFAULT_WEBHOOK_MAX_RETRIES
|
||||
@@ -177,6 +181,20 @@ class DeliveryResult:
|
||||
duration_ms: int | None = None
|
||||
"""Request duration in milliseconds."""
|
||||
|
||||
def to_delivery_kwargs(self) -> dict[str, int | str | None]:
|
||||
"""Return kwargs for WebhookDelivery constructor.
|
||||
|
||||
Returns:
|
||||
Dictionary with delivery result fields.
|
||||
"""
|
||||
return {
|
||||
"status_code": self.status_code,
|
||||
"response_body": self.response_body,
|
||||
"error_message": self.error_message,
|
||||
"attempt_count": self.attempt_count,
|
||||
DURATION_MS: self.duration_ms,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class WebhookDelivery:
|
||||
@@ -227,18 +245,14 @@ class WebhookDelivery:
|
||||
Returns:
|
||||
New WebhookDelivery with generated ID and timestamp.
|
||||
"""
|
||||
r = result or DeliveryResult()
|
||||
delivery_result = result or DeliveryResult()
|
||||
return cls(
|
||||
id=uuid4(),
|
||||
webhook_id=webhook_id,
|
||||
event_type=event_type,
|
||||
payload=payload,
|
||||
status_code=r.status_code,
|
||||
response_body=r.response_body,
|
||||
error_message=r.error_message,
|
||||
attempt_count=r.attempt_count,
|
||||
duration_ms=r.duration_ms,
|
||||
delivered_at=utc_now(),
|
||||
**delivery_result.to_delivery_kwargs(),
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -248,7 +262,31 @@ class WebhookDelivery:
|
||||
Returns:
|
||||
True if status code indicates success (2xx).
|
||||
"""
|
||||
return self.status_code is not None and 200 <= self.status_code < 300
|
||||
return self.status_code is not None and 200 <= self.status_code < 3 * 100
|
||||
|
||||
@property
|
||||
def was_attempted(self) -> bool:
|
||||
"""Check if delivery was actually attempted.
|
||||
|
||||
Returns:
|
||||
True if at least one attempt was made.
|
||||
"""
|
||||
return self.attempt_count > 0
|
||||
|
||||
@property
|
||||
def log_outcome(self) -> tuple[str, str | int | None]:
|
||||
"""Get outcome description for logging.
|
||||
|
||||
Returns:
|
||||
Tuple of (outcome_type, detail) where:
|
||||
- outcome_type is DELIVERY_OUTCOME_SUCCEEDED, DELIVERY_OUTCOME_FAILED, or DELIVERY_OUTCOME_SKIPPED
|
||||
- detail is status_code (int) for success, error_message for failure/skip
|
||||
"""
|
||||
if self.succeeded:
|
||||
return (DELIVERY_OUTCOME_SUCCEEDED, self.status_code)
|
||||
if self.was_attempted:
|
||||
return (DELIVERY_OUTCOME_FAILED, self.error_message)
|
||||
return (DELIVERY_OUTCOME_SKIPPED, self.error_message)
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import argparse
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.cli.constants import ARGPARSE_STORE_TRUE
|
||||
from noteflow.config.constants import DEFAULT_GRPC_PORT
|
||||
from noteflow.infrastructure.asr.engine import VALID_MODEL_SIZES
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
@@ -23,9 +24,9 @@ if TYPE_CHECKING:
|
||||
from noteflow.config.settings import Settings
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
"""Parse command-line arguments for the gRPC server."""
|
||||
parser = argparse.ArgumentParser(description="NoteFlow gRPC Server")
|
||||
|
||||
def _add_server_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
"""Add server configuration arguments to parser."""
|
||||
parser.add_argument(
|
||||
"-p",
|
||||
"--port",
|
||||
@@ -33,6 +34,22 @@ def parse_args() -> argparse.Namespace:
|
||||
default=DEFAULT_GRPC_PORT,
|
||||
help=f"Port to listen on (default: {DEFAULT_GRPC_PORT})",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default=None,
|
||||
help="PostgreSQL database URL (overrides NOTEFLOW_DATABASE_URL)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action=ARGPARSE_STORE_TRUE,
|
||||
help="Enable verbose logging",
|
||||
)
|
||||
|
||||
|
||||
def _add_asr_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
"""Add ASR (speech recognition) arguments to parser."""
|
||||
parser.add_argument(
|
||||
"-m",
|
||||
"--model",
|
||||
@@ -57,21 +74,13 @@ def parse_args() -> argparse.Namespace:
|
||||
choices=["int8", "float16", "float32"],
|
||||
help="ASR compute type (default: int8)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--database-url",
|
||||
type=str,
|
||||
default=None,
|
||||
help="PostgreSQL database URL (overrides NOTEFLOW_DATABASE_URL)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Enable verbose logging",
|
||||
)
|
||||
|
||||
|
||||
def _add_diarization_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
"""Add speaker diarization arguments to parser."""
|
||||
parser.add_argument(
|
||||
"--diarization",
|
||||
action="store_true",
|
||||
action=ARGPARSE_STORE_TRUE,
|
||||
help="Enable speaker diarization (requires pyannote.audio)",
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -87,43 +96,67 @@ def parse_args() -> argparse.Namespace:
|
||||
choices=["auto", "cpu", "cuda", "mps"],
|
||||
help="Device for diarization (default: auto)",
|
||||
)
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
"""Parse command-line arguments for the gRPC server."""
|
||||
parser = argparse.ArgumentParser(description="NoteFlow gRPC Server")
|
||||
_add_server_arguments(parser)
|
||||
_add_asr_arguments(parser)
|
||||
_add_diarization_arguments(parser)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
|
||||
def _resolve_database_url(cli_url: str | None, settings: Settings | None) -> str | None:
|
||||
"""Resolve database URL from CLI argument or settings.
|
||||
|
||||
Args:
|
||||
cli_url: Database URL from CLI argument.
|
||||
settings: Application settings.
|
||||
|
||||
Returns:
|
||||
Resolved database URL, or None if not configured.
|
||||
"""
|
||||
if cli_url:
|
||||
return cli_url
|
||||
if settings and settings.database_url:
|
||||
return str(settings.database_url)
|
||||
logger.warning("No database URL configured, running in-memory mode")
|
||||
return None
|
||||
|
||||
|
||||
def _build_diarization_config(
|
||||
args: argparse.Namespace,
|
||||
settings: Settings | None,
|
||||
) -> DiarizationConfig:
|
||||
"""Build diarization configuration from CLI args and settings.
|
||||
|
||||
CLI arguments take precedence over settings.
|
||||
"""
|
||||
enabled = args.diarization or (settings.diarization_enabled if settings else False)
|
||||
hf_token = args.diarization_hf_token or (settings.diarization_hf_token if settings else None)
|
||||
device = args.diarization_device
|
||||
if device == "auto" and settings:
|
||||
device = settings.diarization_device
|
||||
|
||||
return DiarizationConfig(
|
||||
enabled=enabled,
|
||||
hf_token=hf_token,
|
||||
device=device,
|
||||
streaming_latency=settings.diarization_streaming_latency if settings else None,
|
||||
min_speakers=settings.diarization_min_speakers if settings else None,
|
||||
max_speakers=settings.diarization_max_speakers if settings else None,
|
||||
refinement_enabled=settings.diarization_refinement_enabled if settings else True,
|
||||
)
|
||||
|
||||
def build_config_from_args(args: argparse.Namespace, settings: Settings | None) -> GrpcServerConfig:
|
||||
"""Build server configuration from CLI arguments and settings.
|
||||
|
||||
CLI arguments take precedence over environment settings.
|
||||
"""
|
||||
database_url = args.database_url
|
||||
if not database_url and settings:
|
||||
database_url = str(settings.database_url)
|
||||
if not database_url:
|
||||
logger.warning("No database URL configured, running in-memory mode")
|
||||
|
||||
diarization_enabled = args.diarization
|
||||
diarization_hf_token = args.diarization_hf_token
|
||||
diarization_device = args.diarization_device
|
||||
diarization_streaming_latency: float | None = None
|
||||
diarization_min_speakers: int | None = None
|
||||
diarization_max_speakers: int | None = None
|
||||
diarization_refinement_enabled = True
|
||||
|
||||
if settings and not diarization_enabled:
|
||||
diarization_enabled = settings.diarization_enabled
|
||||
if settings and not diarization_hf_token:
|
||||
diarization_hf_token = settings.diarization_hf_token
|
||||
if settings and diarization_device == "auto":
|
||||
diarization_device = settings.diarization_device
|
||||
if settings:
|
||||
diarization_streaming_latency = settings.diarization_streaming_latency
|
||||
diarization_min_speakers = settings.diarization_min_speakers
|
||||
diarization_max_speakers = settings.diarization_max_speakers
|
||||
diarization_refinement_enabled = settings.diarization_refinement_enabled
|
||||
|
||||
bind_address = DEFAULT_BIND_ADDRESS
|
||||
if settings:
|
||||
bind_address = settings.grpc_bind_address
|
||||
database_url = _resolve_database_url(args.database_url, settings)
|
||||
diarization_config = _build_diarization_config(args, settings)
|
||||
bind_address = settings.grpc_bind_address if settings else DEFAULT_BIND_ADDRESS
|
||||
|
||||
return GrpcServerConfig(
|
||||
port=args.port,
|
||||
@@ -134,13 +167,5 @@ def build_config_from_args(args: argparse.Namespace, settings: Settings | None)
|
||||
compute_type=args.compute_type,
|
||||
),
|
||||
database_url=database_url,
|
||||
diarization=DiarizationConfig(
|
||||
enabled=diarization_enabled,
|
||||
hf_token=diarization_hf_token,
|
||||
device=diarization_device,
|
||||
streaming_latency=diarization_streaming_latency,
|
||||
min_speakers=diarization_min_speakers,
|
||||
max_speakers=diarization_max_speakers,
|
||||
refinement_enabled=diarization_refinement_enabled,
|
||||
),
|
||||
diarization=diarization_config,
|
||||
)
|
||||
|
||||
@@ -6,7 +6,11 @@ from typing import TYPE_CHECKING, NotRequired, Required, TypedDict, Unpack, cast
|
||||
|
||||
import grpc
|
||||
|
||||
from noteflow.grpc._client_mixins.converters import annotation_type_to_proto, proto_to_annotation_info
|
||||
from noteflow.domain.constants.fields import ANNOTATION_TYPE, END_TIME, SEGMENT_IDS, START_TIME
|
||||
from noteflow.grpc._client_mixins.converters import (
|
||||
annotation_type_to_proto,
|
||||
proto_to_annotation_info,
|
||||
)
|
||||
from noteflow.grpc._types import AnnotationInfo
|
||||
from noteflow.grpc.proto import noteflow_pb2
|
||||
from noteflow.infrastructure.logging import get_client_rate_limiter, get_logger
|
||||
@@ -40,6 +44,40 @@ _rate_limiter = get_client_rate_limiter()
|
||||
RpcError = cast(type[Exception], getattr(grpc, "RpcError", Exception))
|
||||
|
||||
|
||||
|
||||
def _build_update_annotation_request(
|
||||
annotation_id: str,
|
||||
kwargs: _AnnotationUpdateKwargs,
|
||||
) -> noteflow_pb2.UpdateAnnotationRequest:
|
||||
"""Build an UpdateAnnotationRequest from kwargs.
|
||||
|
||||
Args:
|
||||
annotation_id: Annotation ID.
|
||||
kwargs: Optional annotation update fields.
|
||||
|
||||
Returns:
|
||||
Proto request with fields set from kwargs.
|
||||
"""
|
||||
annotation_type: str | None = kwargs.get(ANNOTATION_TYPE)
|
||||
text: str | None = kwargs.get("text")
|
||||
start_time: float | None = kwargs.get(START_TIME)
|
||||
end_time: float | None = kwargs.get(END_TIME)
|
||||
segment_ids: list[int] | None = kwargs.get(SEGMENT_IDS)
|
||||
|
||||
proto_type = (
|
||||
annotation_type_to_proto(annotation_type)
|
||||
if annotation_type
|
||||
else noteflow_pb2.ANNOTATION_TYPE_UNSPECIFIED
|
||||
)
|
||||
return noteflow_pb2.UpdateAnnotationRequest(
|
||||
annotation_id=annotation_id,
|
||||
annotation_type=proto_type,
|
||||
text=text or "",
|
||||
start_time=start_time or 0.0,
|
||||
end_time=end_time or 0.0,
|
||||
segment_ids=segment_ids or [],
|
||||
)
|
||||
|
||||
class AnnotationClientMixin:
|
||||
"""Mixin providing annotation operations for NoteFlowClient."""
|
||||
|
||||
@@ -61,11 +99,11 @@ class AnnotationClientMixin:
|
||||
|
||||
try:
|
||||
meeting_id = kwargs["meeting_id"]
|
||||
annotation_type = kwargs["annotation_type"]
|
||||
annotation_type: str = kwargs[ANNOTATION_TYPE]
|
||||
text = kwargs["text"]
|
||||
start_time = kwargs["start_time"]
|
||||
end_time = kwargs["end_time"]
|
||||
segment_ids = kwargs.get("segment_ids") or []
|
||||
start_time: float = kwargs[START_TIME]
|
||||
end_time: float = kwargs[END_TIME]
|
||||
segment_ids: list[int] = kwargs.get(SEGMENT_IDS) or []
|
||||
proto_type = annotation_type_to_proto(annotation_type)
|
||||
request = noteflow_pb2.AddAnnotationRequest(
|
||||
meeting_id=meeting_id,
|
||||
@@ -81,7 +119,7 @@ class AnnotationClientMixin:
|
||||
logger.error("Failed to add annotation: %s", e)
|
||||
return None
|
||||
|
||||
def get_annotation(self: ClientHost, annotation_id: str) -> AnnotationInfo | None:
|
||||
def annotation_fetch(self: ClientHost, annotation_id: str) -> AnnotationInfo | None:
|
||||
"""Get an annotation by ID.
|
||||
|
||||
Args:
|
||||
@@ -154,24 +192,7 @@ class AnnotationClientMixin:
|
||||
return None
|
||||
|
||||
try:
|
||||
annotation_type = kwargs.get("annotation_type")
|
||||
text = kwargs.get("text")
|
||||
start_time = kwargs.get("start_time")
|
||||
end_time = kwargs.get("end_time")
|
||||
segment_ids = kwargs.get("segment_ids")
|
||||
proto_type = (
|
||||
annotation_type_to_proto(annotation_type)
|
||||
if annotation_type
|
||||
else noteflow_pb2.ANNOTATION_TYPE_UNSPECIFIED
|
||||
)
|
||||
request = noteflow_pb2.UpdateAnnotationRequest(
|
||||
annotation_id=annotation_id,
|
||||
annotation_type=proto_type,
|
||||
text=text or "",
|
||||
start_time=start_time or 0,
|
||||
end_time=end_time or 0,
|
||||
segment_ids=segment_ids or [],
|
||||
)
|
||||
request = _build_update_annotation_request(annotation_id, kwargs)
|
||||
response = self.stub.UpdateAnnotation(request)
|
||||
return proto_to_annotation_info(response)
|
||||
except grpc.RpcError as e:
|
||||
@@ -198,3 +219,5 @@ class AnnotationClientMixin:
|
||||
except grpc.RpcError as e:
|
||||
logger.error("Failed to delete annotation: %s", e)
|
||||
return False
|
||||
|
||||
get_annotation = annotation_fetch
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Sequence
|
||||
from typing import Protocol
|
||||
|
||||
from noteflow.domain.constants.fields import ACTION_ITEM, DECISION, NOTE, RISK, UNKNOWN
|
||||
from noteflow.grpc._types import AnnotationInfo, MeetingInfo
|
||||
from noteflow.grpc.proto import noteflow_pb2
|
||||
|
||||
@@ -49,7 +50,7 @@ class ProtoAnnotation(Protocol):
|
||||
|
||||
# Meeting state mapping
|
||||
MEETING_STATE_MAP: dict[int, str] = {
|
||||
noteflow_pb2.MEETING_STATE_UNSPECIFIED: "unknown",
|
||||
noteflow_pb2.MEETING_STATE_UNSPECIFIED: UNKNOWN,
|
||||
noteflow_pb2.MEETING_STATE_CREATED: "created",
|
||||
noteflow_pb2.MEETING_STATE_RECORDING: "recording",
|
||||
noteflow_pb2.MEETING_STATE_STOPPED: "stopped",
|
||||
@@ -59,19 +60,19 @@ MEETING_STATE_MAP: dict[int, str] = {
|
||||
|
||||
# Annotation type mapping
|
||||
ANNOTATION_TYPE_MAP: dict[int, str] = {
|
||||
noteflow_pb2.ANNOTATION_TYPE_UNSPECIFIED: "note",
|
||||
noteflow_pb2.ANNOTATION_TYPE_ACTION_ITEM: "action_item",
|
||||
noteflow_pb2.ANNOTATION_TYPE_DECISION: "decision",
|
||||
noteflow_pb2.ANNOTATION_TYPE_NOTE: "note",
|
||||
noteflow_pb2.ANNOTATION_TYPE_RISK: "risk",
|
||||
noteflow_pb2.ANNOTATION_TYPE_UNSPECIFIED: NOTE,
|
||||
noteflow_pb2.ANNOTATION_TYPE_ACTION_ITEM: ACTION_ITEM,
|
||||
noteflow_pb2.ANNOTATION_TYPE_DECISION: DECISION,
|
||||
noteflow_pb2.ANNOTATION_TYPE_NOTE: NOTE,
|
||||
noteflow_pb2.ANNOTATION_TYPE_RISK: RISK,
|
||||
}
|
||||
|
||||
# Reverse mapping for annotation types
|
||||
ANNOTATION_TYPE_TO_PROTO: dict[str, int] = {
|
||||
"note": noteflow_pb2.ANNOTATION_TYPE_NOTE,
|
||||
"action_item": noteflow_pb2.ANNOTATION_TYPE_ACTION_ITEM,
|
||||
"decision": noteflow_pb2.ANNOTATION_TYPE_DECISION,
|
||||
"risk": noteflow_pb2.ANNOTATION_TYPE_RISK,
|
||||
NOTE: noteflow_pb2.ANNOTATION_TYPE_NOTE,
|
||||
ACTION_ITEM: noteflow_pb2.ANNOTATION_TYPE_ACTION_ITEM,
|
||||
DECISION: noteflow_pb2.ANNOTATION_TYPE_DECISION,
|
||||
RISK: noteflow_pb2.ANNOTATION_TYPE_RISK,
|
||||
}
|
||||
|
||||
# Export format mapping
|
||||
@@ -82,7 +83,7 @@ EXPORT_FORMAT_TO_PROTO: dict[str, int] = {
|
||||
|
||||
# Job status mapping
|
||||
JOB_STATUS_MAP: dict[int, str] = {
|
||||
noteflow_pb2.JOB_STATUS_UNSPECIFIED: "unknown",
|
||||
noteflow_pb2.JOB_STATUS_UNSPECIFIED: UNKNOWN,
|
||||
noteflow_pb2.JOB_STATUS_QUEUED: "queued",
|
||||
noteflow_pb2.JOB_STATUS_RUNNING: "running",
|
||||
noteflow_pb2.JOB_STATUS_COMPLETED: "completed",
|
||||
@@ -102,7 +103,7 @@ def proto_to_meeting_info(meeting: ProtoMeeting) -> MeetingInfo:
|
||||
return MeetingInfo(
|
||||
id=meeting.id,
|
||||
title=meeting.title,
|
||||
state=MEETING_STATE_MAP.get(meeting.state, "unknown"),
|
||||
state=MEETING_STATE_MAP.get(meeting.state, UNKNOWN),
|
||||
created_at=meeting.created_at,
|
||||
started_at=meeting.started_at,
|
||||
ended_at=meeting.ended_at,
|
||||
@@ -123,7 +124,7 @@ def proto_to_annotation_info(annotation: ProtoAnnotation) -> AnnotationInfo:
|
||||
return AnnotationInfo(
|
||||
id=annotation.id,
|
||||
meeting_id=annotation.meeting_id,
|
||||
annotation_type=ANNOTATION_TYPE_MAP.get(annotation.annotation_type, "note"),
|
||||
annotation_type=ANNOTATION_TYPE_MAP.get(annotation.annotation_type, NOTE),
|
||||
text=annotation.text,
|
||||
start_time=annotation.start_time,
|
||||
end_time=annotation.end_time,
|
||||
@@ -169,4 +170,4 @@ def job_status_to_str(status: int) -> str:
|
||||
Returns:
|
||||
Status string.
|
||||
"""
|
||||
return JOB_STATUS_MAP.get(status, "unknown")
|
||||
return JOB_STATUS_MAP.get(status, UNKNOWN)
|
||||
|
||||
@@ -21,7 +21,7 @@ _rate_limiter = get_client_rate_limiter()
|
||||
class MeetingClientMixin:
|
||||
"""Mixin providing meeting operations for NoteFlowClient."""
|
||||
|
||||
def create_meeting(self: ClientHost, title: str = "") -> MeetingInfo | None:
|
||||
def meeting_create(self: ClientHost, title: str = "") -> MeetingInfo | None:
|
||||
"""Create a new meeting.
|
||||
|
||||
Args:
|
||||
@@ -63,7 +63,7 @@ class MeetingClientMixin:
|
||||
logger.error("Failed to stop meeting: %s", e)
|
||||
return None
|
||||
|
||||
def get_meeting(self: ClientHost, meeting_id: str) -> MeetingInfo | None:
|
||||
def meeting_fetch(self: ClientHost, meeting_id: str) -> MeetingInfo | None:
|
||||
"""Get meeting details.
|
||||
|
||||
Args:
|
||||
@@ -150,3 +150,6 @@ class MeetingClientMixin:
|
||||
except grpc.RpcError as e:
|
||||
logger.error("Failed to list meetings: %s", e)
|
||||
return []
|
||||
|
||||
create_meeting = meeting_create
|
||||
get_meeting = meeting_fetch
|
||||
|
||||
@@ -18,7 +18,7 @@ if TYPE_CHECKING:
|
||||
class ProtoListAnnotationsResponse(Protocol):
|
||||
"""Protocol for list annotations response payload."""
|
||||
|
||||
annotations: Sequence["ProtoAnnotation"]
|
||||
annotations: Sequence[ProtoAnnotation]
|
||||
|
||||
|
||||
class ProtoDeleteAnnotationResponse(Protocol):
|
||||
@@ -30,7 +30,7 @@ class ProtoDeleteAnnotationResponse(Protocol):
|
||||
class ProtoListMeetingsResponse(Protocol):
|
||||
"""Protocol for list meetings response payload."""
|
||||
|
||||
meetings: Sequence["ProtoMeeting"]
|
||||
meetings: Sequence[ProtoMeeting]
|
||||
|
||||
|
||||
class ProtoExportTranscriptResponse(Protocol):
|
||||
@@ -62,7 +62,7 @@ class ProtoTranscriptUpdate(Protocol):
|
||||
"""Protocol for transcript update stream responses."""
|
||||
|
||||
update_type: int
|
||||
segment: "ProtoSegment"
|
||||
segment: ProtoSegment
|
||||
partial_text: str
|
||||
|
||||
|
||||
@@ -81,15 +81,15 @@ class ProtoServerInfoResponse(Protocol):
|
||||
class NoteFlowServiceStubProtocol(Protocol):
|
||||
"""Protocol for the gRPC service stub used by the client mixins."""
|
||||
|
||||
def AddAnnotation(self, request: object) -> "ProtoAnnotation": ...
|
||||
def GetAnnotation(self, request: object) -> "ProtoAnnotation": ...
|
||||
def AddAnnotation(self, request: object) -> ProtoAnnotation: ...
|
||||
def GetAnnotation(self, request: object) -> ProtoAnnotation: ...
|
||||
def ListAnnotations(self, request: object) -> ProtoListAnnotationsResponse: ...
|
||||
def UpdateAnnotation(self, request: object) -> "ProtoAnnotation": ...
|
||||
def UpdateAnnotation(self, request: object) -> ProtoAnnotation: ...
|
||||
def DeleteAnnotation(self, request: object) -> ProtoDeleteAnnotationResponse: ...
|
||||
|
||||
def CreateMeeting(self, request: object) -> "ProtoMeeting": ...
|
||||
def StopMeeting(self, request: object) -> "ProtoMeeting": ...
|
||||
def GetMeeting(self, request: object) -> "ProtoMeeting": ...
|
||||
def CreateMeeting(self, request: object) -> ProtoMeeting: ...
|
||||
def StopMeeting(self, request: object) -> ProtoMeeting: ...
|
||||
def GetMeeting(self, request: object) -> ProtoMeeting: ...
|
||||
def ListMeetings(self, request: object) -> ProtoListMeetingsResponse: ...
|
||||
|
||||
def ExportTranscript(self, request: object) -> ProtoExportTranscriptResponse: ...
|
||||
|
||||
@@ -26,6 +26,35 @@ logger = get_logger(__name__)
|
||||
_rate_limiter = get_client_rate_limiter()
|
||||
|
||||
|
||||
|
||||
def _audio_chunk_generator(
|
||||
audio_queue: queue.Queue[tuple[str, NDArray[np.float32], float]],
|
||||
stop_event: threading.Event,
|
||||
) -> Iterator[noteflow_pb2.AudioChunk]:
|
||||
"""Generate audio chunks from queue until stop event is set.
|
||||
|
||||
Args:
|
||||
audio_queue: Queue containing (meeting_id, audio, timestamp) tuples.
|
||||
stop_event: Event to signal when to stop generating.
|
||||
|
||||
Yields:
|
||||
AudioChunk protobuf messages.
|
||||
"""
|
||||
while not stop_event.is_set():
|
||||
try:
|
||||
meeting_id, audio, timestamp = audio_queue.get(
|
||||
timeout=STREAMING_CONFIG.CHUNK_TIMEOUT_SECONDS,
|
||||
)
|
||||
except queue.Empty:
|
||||
continue
|
||||
yield noteflow_pb2.AudioChunk(
|
||||
meeting_id=meeting_id,
|
||||
audio_data=audio.tobytes(),
|
||||
timestamp=timestamp,
|
||||
sample_rate=DEFAULT_SAMPLE_RATE,
|
||||
channels=1,
|
||||
)
|
||||
|
||||
class StreamingClientMixin:
|
||||
"""Mixin providing audio streaming operations for NoteFlowClient."""
|
||||
|
||||
@@ -122,31 +151,14 @@ class StreamingClientMixin:
|
||||
if not self.stub:
|
||||
return
|
||||
|
||||
def audio_generator() -> Iterator[noteflow_pb2.AudioChunk]:
|
||||
"""Generate audio chunks from queue."""
|
||||
while not self.stop_streaming_event.is_set():
|
||||
try:
|
||||
meeting_id, audio, timestamp = self.audio_queue.get(
|
||||
timeout=STREAMING_CONFIG.CHUNK_TIMEOUT_SECONDS,
|
||||
)
|
||||
yield noteflow_pb2.AudioChunk(
|
||||
meeting_id=meeting_id,
|
||||
audio_data=audio.tobytes(),
|
||||
timestamp=timestamp,
|
||||
sample_rate=DEFAULT_SAMPLE_RATE,
|
||||
channels=1,
|
||||
)
|
||||
except queue.Empty:
|
||||
continue
|
||||
generator = _audio_chunk_generator(self.audio_queue, self.stop_streaming_event)
|
||||
|
||||
try:
|
||||
responses = self.stub.StreamTranscription(audio_generator())
|
||||
|
||||
responses = self.stub.StreamTranscription(generator)
|
||||
for response in responses:
|
||||
if self.stop_streaming_event.is_set():
|
||||
break
|
||||
self.handle_stream_response(response)
|
||||
|
||||
except grpc.RpcError as e:
|
||||
logger.error("Stream error: %s", e)
|
||||
self.notify_connection(False, f"Stream error: {e}")
|
||||
|
||||
@@ -90,54 +90,6 @@ class GrpcServerConfig:
|
||||
database_url: str | None = None
|
||||
diarization: DiarizationConfig = field(default_factory=DiarizationConfig)
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class Args:
|
||||
"""Flat arguments for constructing a GrpcServerConfig."""
|
||||
|
||||
port: int
|
||||
asr_model: str
|
||||
asr_device: str
|
||||
asr_compute_type: str
|
||||
bind_address: str = DEFAULT_BIND_ADDRESS
|
||||
database_url: str | None = None
|
||||
diarization_enabled: bool = False
|
||||
diarization_hf_token: str | None = None
|
||||
diarization_device: str = DEFAULT_DIARIZATION_DEVICE
|
||||
diarization_streaming_latency: float | None = None
|
||||
diarization_min_speakers: int | None = None
|
||||
diarization_max_speakers: int | None = None
|
||||
diarization_refinement_enabled: bool = True
|
||||
|
||||
@classmethod
|
||||
def from_args(
|
||||
cls,
|
||||
args: Args,
|
||||
) -> GrpcServerConfig:
|
||||
"""Create config from flat argument values.
|
||||
|
||||
Convenience factory for transitioning from the 12-parameter
|
||||
run_server() signature to structured configuration.
|
||||
"""
|
||||
return cls(
|
||||
port=args.port,
|
||||
bind_address=args.bind_address,
|
||||
asr=AsrConfig(
|
||||
model=args.asr_model,
|
||||
device=args.asr_device,
|
||||
compute_type=args.asr_compute_type,
|
||||
),
|
||||
database_url=args.database_url,
|
||||
diarization=DiarizationConfig(
|
||||
enabled=args.diarization_enabled,
|
||||
hf_token=args.diarization_hf_token,
|
||||
device=args.diarization_device,
|
||||
streaming_latency=args.diarization_streaming_latency,
|
||||
min_speakers=args.diarization_min_speakers,
|
||||
max_speakers=args.diarization_max_speakers,
|
||||
refinement_enabled=args.diarization_refinement_enabled,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Client Configuration
|
||||
|
||||
5
src/noteflow/grpc/_constants.py
Normal file
5
src/noteflow/grpc/_constants.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Shared gRPC string constants."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
WORKSPACES_LABEL: Final[str] = "Workspaces"
|
||||
@@ -7,6 +7,7 @@ These are pure functions that operate on audio data without state.
|
||||
from __future__ import annotations
|
||||
|
||||
import struct
|
||||
from dataclasses import dataclass
|
||||
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
@@ -94,20 +95,12 @@ def convert_audio_format(
|
||||
|
||||
|
||||
def validate_stream_format(
|
||||
sample_rate: int,
|
||||
channels: int,
|
||||
default_sample_rate: int,
|
||||
supported_sample_rates: frozenset[int],
|
||||
existing_format: tuple[int, int] | None,
|
||||
request: StreamFormatValidation,
|
||||
) -> tuple[int, int]:
|
||||
"""Validate and normalize stream audio format.
|
||||
|
||||
Args:
|
||||
sample_rate: Requested sample rate (0 means use default).
|
||||
channels: Number of audio channels (0 means mono).
|
||||
default_sample_rate: Default sample rate if none specified.
|
||||
supported_sample_rates: Set of supported sample rates.
|
||||
existing_format: Previously set format for this stream, if any.
|
||||
request: Stream format validation inputs.
|
||||
|
||||
Returns:
|
||||
Tuple of (normalized_rate, normalized_channels).
|
||||
@@ -115,18 +108,32 @@ def validate_stream_format(
|
||||
Raises:
|
||||
ValueError: If sample rate is unsupported or format changes mid-stream.
|
||||
"""
|
||||
normalized_rate = sample_rate or default_sample_rate
|
||||
normalized_channels = channels or 1
|
||||
normalized_rate = request.sample_rate or request.default_sample_rate
|
||||
normalized_channels = request.channels or 1
|
||||
|
||||
if normalized_rate not in supported_sample_rates:
|
||||
if normalized_rate not in request.supported_sample_rates:
|
||||
raise ValueError(
|
||||
f"Unsupported sample_rate {normalized_rate}; "
|
||||
f"supported: {supported_sample_rates}"
|
||||
f"supported: {request.supported_sample_rates}"
|
||||
)
|
||||
if normalized_channels < 1:
|
||||
raise ValueError("channels must be >= 1")
|
||||
|
||||
if existing_format and existing_format != (normalized_rate, normalized_channels):
|
||||
if request.existing_format and request.existing_format != (
|
||||
normalized_rate,
|
||||
normalized_channels,
|
||||
):
|
||||
raise ValueError("Stream audio format cannot change mid-stream")
|
||||
|
||||
return normalized_rate, normalized_channels
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class StreamFormatValidation:
|
||||
"""Inputs for validating stream audio format."""
|
||||
|
||||
sample_rate: int
|
||||
channels: int
|
||||
default_sample_rate: int
|
||||
supported_sample_rates: frozenset[int]
|
||||
existing_format: tuple[int, int] | None
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING, Protocol, Self, cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from uuid import uuid4
|
||||
|
||||
from noteflow.config.constants import (
|
||||
@@ -23,10 +23,11 @@ from .converters import (
|
||||
proto_to_annotation_type,
|
||||
)
|
||||
from .errors import abort_database_required, abort_invalid_argument, abort_not_found
|
||||
from .errors._constants import INVALID_ANNOTATION_ID_MESSAGE
|
||||
from .protocols import AnnotationRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.ports.repositories import AnnotationRepository, MeetingRepository
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
from collections.abc import Callable
|
||||
|
||||
from ._types import GrpcContext
|
||||
|
||||
@@ -37,29 +38,28 @@ _ENTITY_ANNOTATION = "Annotation"
|
||||
_ENTITY_ANNOTATIONS = "Annotations"
|
||||
|
||||
|
||||
class AnnotationRepositoryProvider(Protocol):
|
||||
"""Minimal repository provider protocol for annotation operations."""
|
||||
def _apply_annotation_updates(
|
||||
annotation: Annotation,
|
||||
request: noteflow_pb2.UpdateAnnotationRequest,
|
||||
) -> None:
|
||||
"""Apply update request fields to annotation entity.
|
||||
|
||||
supports_annotations: bool
|
||||
annotations: AnnotationRepository
|
||||
meetings: MeetingRepository
|
||||
Mutates the annotation in place for fields that are provided in the request.
|
||||
|
||||
async def commit(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class AnnotationServicer(Protocol):
|
||||
"""Protocol for hosts that support annotation operations."""
|
||||
|
||||
def create_repository_provider(self) -> AnnotationRepositoryProvider | UnitOfWork: ...
|
||||
Args:
|
||||
annotation: The annotation entity to update.
|
||||
request: The gRPC update request with optional field values.
|
||||
"""
|
||||
if request.annotation_type != noteflow_pb2.ANNOTATION_TYPE_UNSPECIFIED:
|
||||
annotation.annotation_type = proto_to_annotation_type(request.annotation_type)
|
||||
if request.text:
|
||||
annotation.text = request.text
|
||||
if request.start_time > 0:
|
||||
annotation.start_time = request.start_time
|
||||
if request.end_time > 0:
|
||||
annotation.end_time = request.end_time
|
||||
if segment_ids := cast(Sequence[int], request.segment_ids):
|
||||
annotation.segment_ids = list(segment_ids)
|
||||
|
||||
|
||||
class AnnotationMixin:
|
||||
@@ -69,13 +69,15 @@ class AnnotationMixin:
|
||||
Annotations require database persistence.
|
||||
"""
|
||||
|
||||
create_repository_provider: Callable[..., object]
|
||||
|
||||
async def AddAnnotation(
|
||||
self: AnnotationServicer,
|
||||
self,
|
||||
request: noteflow_pb2.AddAnnotationRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.Annotation:
|
||||
"""Add an annotation to a meeting."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(AnnotationRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_annotations:
|
||||
logger.error(
|
||||
LOG_EVENT_DATABASE_REQUIRED_FOR_ANNOTATIONS,
|
||||
@@ -108,12 +110,12 @@ class AnnotationMixin:
|
||||
return annotation_to_proto(saved)
|
||||
|
||||
async def GetAnnotation(
|
||||
self: AnnotationServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetAnnotationRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.Annotation:
|
||||
"""Get an annotation by ID."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(AnnotationRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_annotations:
|
||||
logger.error(
|
||||
LOG_EVENT_DATABASE_REQUIRED_FOR_ANNOTATIONS,
|
||||
@@ -128,7 +130,7 @@ class AnnotationMixin:
|
||||
LOG_EVENT_INVALID_ANNOTATION_ID,
|
||||
annotation_id=request.annotation_id,
|
||||
)
|
||||
await abort_invalid_argument(context, "Invalid annotation_id")
|
||||
await abort_invalid_argument(context, INVALID_ANNOTATION_ID_MESSAGE)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
annotation = await repo.annotations.get(annotation_id)
|
||||
@@ -148,12 +150,12 @@ class AnnotationMixin:
|
||||
return annotation_to_proto(annotation)
|
||||
|
||||
async def ListAnnotations(
|
||||
self: AnnotationServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListAnnotationsRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListAnnotationsResponse:
|
||||
"""List annotations for a meeting."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(AnnotationRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_annotations:
|
||||
logger.error(
|
||||
LOG_EVENT_DATABASE_REQUIRED_FOR_ANNOTATIONS,
|
||||
@@ -186,12 +188,12 @@ class AnnotationMixin:
|
||||
)
|
||||
|
||||
async def UpdateAnnotation(
|
||||
self: AnnotationServicer,
|
||||
self,
|
||||
request: noteflow_pb2.UpdateAnnotationRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.Annotation:
|
||||
"""Update an existing annotation."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(AnnotationRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_annotations:
|
||||
logger.error(
|
||||
LOG_EVENT_DATABASE_REQUIRED_FOR_ANNOTATIONS,
|
||||
@@ -206,7 +208,7 @@ class AnnotationMixin:
|
||||
LOG_EVENT_INVALID_ANNOTATION_ID,
|
||||
annotation_id=request.annotation_id,
|
||||
)
|
||||
await abort_invalid_argument(context, "Invalid annotation_id")
|
||||
await abort_invalid_argument(context, INVALID_ANNOTATION_ID_MESSAGE)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
annotation = await repo.annotations.get(annotation_id)
|
||||
@@ -218,18 +220,7 @@ class AnnotationMixin:
|
||||
await abort_not_found(context, _ENTITY_ANNOTATION, request.annotation_id)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
# Update fields if provided
|
||||
if request.annotation_type != noteflow_pb2.ANNOTATION_TYPE_UNSPECIFIED:
|
||||
annotation.annotation_type = proto_to_annotation_type(request.annotation_type)
|
||||
if request.text:
|
||||
annotation.text = request.text
|
||||
if request.start_time > 0:
|
||||
annotation.start_time = request.start_time
|
||||
if request.end_time > 0:
|
||||
annotation.end_time = request.end_time
|
||||
if segment_ids := cast(Sequence[int], request.segment_ids):
|
||||
annotation.segment_ids = list(segment_ids)
|
||||
|
||||
_apply_annotation_updates(annotation, request)
|
||||
updated = await repo.annotations.update(annotation)
|
||||
await repo.commit()
|
||||
logger.info(
|
||||
@@ -241,12 +232,12 @@ class AnnotationMixin:
|
||||
return annotation_to_proto(updated)
|
||||
|
||||
async def DeleteAnnotation(
|
||||
self: AnnotationServicer,
|
||||
self,
|
||||
request: noteflow_pb2.DeleteAnnotationRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DeleteAnnotationResponse:
|
||||
"""Delete an annotation."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(AnnotationRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_annotations:
|
||||
logger.error(
|
||||
LOG_EVENT_DATABASE_REQUIRED_FOR_ANNOTATIONS,
|
||||
@@ -261,7 +252,7 @@ class AnnotationMixin:
|
||||
LOG_EVENT_INVALID_ANNOTATION_ID,
|
||||
annotation_id=request.annotation_id,
|
||||
)
|
||||
await abort_invalid_argument(context, "Invalid annotation_id")
|
||||
await abort_invalid_argument(context, INVALID_ANNOTATION_ID_MESSAGE)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
success = await repo.annotations.delete(annotation_id)
|
||||
|
||||
@@ -4,9 +4,10 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.application.services.calendar_service import CalendarServiceError
|
||||
from noteflow.application.services.calendar_service import CalendarService, CalendarServiceError
|
||||
from noteflow.domain.constants.fields import CALENDAR
|
||||
from noteflow.domain.entities.integration import IntegrationStatus
|
||||
from noteflow.domain.ports.calendar import OAuthConnectionInfo
|
||||
from noteflow.domain.ports.calendar import CalendarEventInfo, OAuthConnectionInfo
|
||||
from noteflow.domain.value_objects import OAuthProvider
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
@@ -24,6 +25,28 @@ if TYPE_CHECKING:
|
||||
from .protocols import ServicerHost
|
||||
|
||||
|
||||
def _calendar_event_to_proto(event: CalendarEventInfo) -> noteflow_pb2.CalendarEvent:
|
||||
"""Convert a domain CalendarEventInfo to protobuf message.
|
||||
|
||||
Args:
|
||||
event: The domain calendar event entity.
|
||||
|
||||
Returns:
|
||||
The protobuf CalendarEvent message.
|
||||
"""
|
||||
return noteflow_pb2.CalendarEvent(
|
||||
id=event.id,
|
||||
title=event.title,
|
||||
start_time=int(event.start_time.timestamp()),
|
||||
end_time=int(event.end_time.timestamp()),
|
||||
location=event.location or "",
|
||||
attendees=list(event.attendees),
|
||||
meeting_url=event.meeting_url or "",
|
||||
is_recurring=event.is_recurring,
|
||||
provider=event.provider,
|
||||
)
|
||||
|
||||
|
||||
def _build_oauth_connection(
|
||||
info: OAuthConnectionInfo,
|
||||
integration_type: str,
|
||||
@@ -39,6 +62,22 @@ def _build_oauth_connection(
|
||||
)
|
||||
|
||||
|
||||
async def _require_calendar_service(
|
||||
host: ServicerHost,
|
||||
context: GrpcContext,
|
||||
operation: str,
|
||||
) -> CalendarService:
|
||||
"""Return calendar service or abort with UNAVAILABLE.
|
||||
|
||||
Returns the CalendarService instance for type-safe usage after the check.
|
||||
"""
|
||||
if host.calendar_service is not None:
|
||||
return host.calendar_service
|
||||
logger.warning(f"{operation}_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
|
||||
class CalendarMixin:
|
||||
"""Mixin providing calendar integration functionality.
|
||||
|
||||
@@ -52,10 +91,7 @@ class CalendarMixin:
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListCalendarEventsResponse:
|
||||
"""List upcoming calendar events from connected providers."""
|
||||
if self.calendar_service is None:
|
||||
logger.warning("calendar_list_events_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
service = await _require_calendar_service(self, context, "calendar_list_events")
|
||||
|
||||
provider = request.provider or None
|
||||
hours_ahead = request.hours_ahead if request.hours_ahead > 0 else None
|
||||
@@ -69,7 +105,7 @@ class CalendarMixin:
|
||||
)
|
||||
|
||||
try:
|
||||
events = await self.calendar_service.list_calendar_events(
|
||||
events = await service.list_calendar_events(
|
||||
provider=provider,
|
||||
hours_ahead=hours_ahead,
|
||||
limit=limit,
|
||||
@@ -79,20 +115,7 @@ class CalendarMixin:
|
||||
await abort_internal(context, str(e))
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
proto_events = [
|
||||
noteflow_pb2.CalendarEvent(
|
||||
id=event.id,
|
||||
title=event.title,
|
||||
start_time=int(event.start_time.timestamp()),
|
||||
end_time=int(event.end_time.timestamp()),
|
||||
location=event.location or "",
|
||||
attendees=list(event.attendees),
|
||||
meeting_url=event.meeting_url or "",
|
||||
is_recurring=event.is_recurring,
|
||||
provider=event.provider,
|
||||
)
|
||||
for event in events
|
||||
]
|
||||
proto_events = [_calendar_event_to_proto(event) for event in events]
|
||||
|
||||
logger.info(
|
||||
"calendar_list_events_success",
|
||||
@@ -111,10 +134,7 @@ class CalendarMixin:
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.GetCalendarProvidersResponse:
|
||||
"""Get available calendar providers with authentication status."""
|
||||
if self.calendar_service is None:
|
||||
logger.warning("calendar_providers_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
service = await _require_calendar_service(self, context, "calendar_providers")
|
||||
|
||||
logger.debug("calendar_get_providers_request")
|
||||
|
||||
@@ -123,7 +143,7 @@ class CalendarMixin:
|
||||
(OAuthProvider.GOOGLE.value, "Google Calendar"),
|
||||
(OAuthProvider.OUTLOOK.value, "Microsoft Outlook"),
|
||||
]:
|
||||
status: OAuthConnectionInfo = await self.calendar_service.get_connection_status(provider_name)
|
||||
status: OAuthConnectionInfo = await service.get_connection_status(provider_name)
|
||||
is_authenticated = status.status == IntegrationStatus.CONNECTED.value
|
||||
providers.append(
|
||||
noteflow_pb2.CalendarProvider(
|
||||
@@ -155,10 +175,7 @@ class CalendarMixin:
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.InitiateOAuthResponse:
|
||||
"""Start OAuth flow for a calendar provider."""
|
||||
if self.calendar_service is None:
|
||||
logger.warning("oauth_initiate_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
service = await _require_calendar_service(self, context, "oauth_initiate")
|
||||
|
||||
logger.debug(
|
||||
"oauth_initiate_request",
|
||||
@@ -167,7 +184,7 @@ class CalendarMixin:
|
||||
)
|
||||
|
||||
try:
|
||||
auth_url, state = await self.calendar_service.initiate_oauth(
|
||||
auth_url, state = await service.initiate_oauth(
|
||||
provider=request.provider,
|
||||
redirect_uri=request.redirect_uri or None,
|
||||
)
|
||||
@@ -197,10 +214,7 @@ class CalendarMixin:
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.CompleteOAuthResponse:
|
||||
"""Complete OAuth flow with authorization code."""
|
||||
if self.calendar_service is None:
|
||||
logger.warning("oauth_complete_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
service = await _require_calendar_service(self, context, "oauth_complete")
|
||||
|
||||
logger.debug(
|
||||
"oauth_complete_request",
|
||||
@@ -209,7 +223,7 @@ class CalendarMixin:
|
||||
)
|
||||
|
||||
try:
|
||||
integration_id = await self.calendar_service.complete_oauth(
|
||||
integration_id = await service.complete_oauth(
|
||||
provider=request.provider,
|
||||
code=request.code,
|
||||
state=request.state,
|
||||
@@ -225,8 +239,7 @@ class CalendarMixin:
|
||||
error_message=str(e),
|
||||
)
|
||||
|
||||
# Get the provider email after successful connection
|
||||
status = await self.calendar_service.get_connection_status(request.provider)
|
||||
status = await service.get_connection_status(request.provider)
|
||||
|
||||
logger.info(
|
||||
"oauth_complete_success",
|
||||
@@ -247,18 +260,15 @@ class CalendarMixin:
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.GetOAuthConnectionStatusResponse:
|
||||
"""Get OAuth connection status for a provider."""
|
||||
if self.calendar_service is None:
|
||||
logger.warning("oauth_status_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
service = await _require_calendar_service(self, context, "oauth_status")
|
||||
|
||||
logger.debug(
|
||||
"oauth_status_request",
|
||||
provider=request.provider,
|
||||
integration_type=request.integration_type or "calendar",
|
||||
integration_type=request.integration_type or CALENDAR,
|
||||
)
|
||||
|
||||
info = await self.calendar_service.get_connection_status(request.provider)
|
||||
info = await service.get_connection_status(request.provider)
|
||||
|
||||
logger.info(
|
||||
"oauth_status_retrieved",
|
||||
@@ -269,7 +279,7 @@ class CalendarMixin:
|
||||
)
|
||||
|
||||
return noteflow_pb2.GetOAuthConnectionStatusResponse(
|
||||
connection=_build_oauth_connection(info, request.integration_type or "calendar")
|
||||
connection=_build_oauth_connection(info, request.integration_type or CALENDAR)
|
||||
)
|
||||
|
||||
async def DisconnectOAuth(
|
||||
@@ -278,14 +288,11 @@ class CalendarMixin:
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DisconnectOAuthResponse:
|
||||
"""Disconnect OAuth integration and revoke tokens."""
|
||||
if self.calendar_service is None:
|
||||
logger.warning("oauth_disconnect_unavailable", reason="service_not_enabled")
|
||||
await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED)
|
||||
raise # Unreachable but helps type checker
|
||||
service = await _require_calendar_service(self, context, "oauth_disconnect")
|
||||
|
||||
logger.debug("oauth_disconnect_request", provider=request.provider)
|
||||
|
||||
success = await self.calendar_service.disconnect(request.provider)
|
||||
success = await service.disconnect(request.provider)
|
||||
|
||||
if success:
|
||||
logger.info("oauth_disconnect_success", provider=request.provider)
|
||||
|
||||
@@ -27,16 +27,7 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
def word_to_proto(word: WordTiming) -> noteflow_pb2.WordTiming:
|
||||
"""Convert domain WordTiming to protobuf.
|
||||
|
||||
Consolidates the repeated WordTiming construction pattern.
|
||||
|
||||
Args:
|
||||
word: Domain WordTiming entity.
|
||||
|
||||
Returns:
|
||||
Protobuf WordTiming message.
|
||||
"""
|
||||
"""Convert domain WordTiming to protobuf."""
|
||||
return noteflow_pb2.WordTiming(
|
||||
word=word.word,
|
||||
start_time=word.start_time,
|
||||
@@ -46,16 +37,7 @@ def word_to_proto(word: WordTiming) -> noteflow_pb2.WordTiming:
|
||||
|
||||
|
||||
def segment_to_final_segment_proto(segment: Segment) -> noteflow_pb2.FinalSegment:
|
||||
"""Convert domain Segment to FinalSegment protobuf.
|
||||
|
||||
Consolidates the repeated FinalSegment construction pattern.
|
||||
|
||||
Args:
|
||||
segment: Domain Segment entity.
|
||||
|
||||
Returns:
|
||||
Protobuf FinalSegment message.
|
||||
"""
|
||||
"""Convert domain Segment to FinalSegment protobuf."""
|
||||
words = [word_to_proto(w) for w in segment.words]
|
||||
return noteflow_pb2.FinalSegment(
|
||||
segment_id=segment.segment_id,
|
||||
@@ -231,15 +213,7 @@ def create_vad_update(
|
||||
meeting_id: str,
|
||||
update_type: noteflow_pb2.UpdateType,
|
||||
) -> noteflow_pb2.TranscriptUpdate:
|
||||
"""Create a VAD event update.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
update_type: VAD_START or VAD_END.
|
||||
|
||||
Returns:
|
||||
TranscriptUpdate with VAD event.
|
||||
"""
|
||||
"""Create a VAD event update (VAD_START or VAD_END)."""
|
||||
return noteflow_pb2.TranscriptUpdate(
|
||||
meeting_id=meeting_id,
|
||||
update_type=update_type,
|
||||
@@ -252,16 +226,7 @@ def create_congestion_info(
|
||||
queue_depth: int,
|
||||
throttle_recommended: bool,
|
||||
) -> noteflow_pb2.CongestionInfo:
|
||||
"""Create congestion info for backpressure signaling.
|
||||
|
||||
Args:
|
||||
processing_delay_ms: Time from chunk receipt to transcription (milliseconds).
|
||||
queue_depth: Number of chunks waiting to be processed.
|
||||
throttle_recommended: Signal that client should reduce sending rate.
|
||||
|
||||
Returns:
|
||||
CongestionInfo protobuf message.
|
||||
"""
|
||||
"""Create congestion info for backpressure signaling."""
|
||||
return noteflow_pb2.CongestionInfo(
|
||||
processing_delay_ms=processing_delay_ms,
|
||||
queue_depth=queue_depth,
|
||||
@@ -274,16 +239,7 @@ def create_ack_update(
|
||||
ack_sequence: int,
|
||||
congestion: noteflow_pb2.CongestionInfo | None = None,
|
||||
) -> noteflow_pb2.TranscriptUpdate:
|
||||
"""Create an acknowledgment update for received audio chunks.
|
||||
|
||||
Args:
|
||||
meeting_id: Meeting identifier.
|
||||
ack_sequence: Highest contiguous chunk sequence received.
|
||||
congestion: Optional congestion info for backpressure signaling.
|
||||
|
||||
Returns:
|
||||
TranscriptUpdate with ack_sequence set (update_type is UNSPECIFIED).
|
||||
"""
|
||||
"""Create an acknowledgment update for received audio chunks."""
|
||||
update = noteflow_pb2.TranscriptUpdate(
|
||||
meeting_id=meeting_id,
|
||||
update_type=noteflow_pb2.UPDATE_TYPE_UNSPECIFIED,
|
||||
@@ -302,10 +258,7 @@ def create_segment_from_asr(
|
||||
result: AsrResult,
|
||||
segment_start_time: float,
|
||||
) -> Segment:
|
||||
"""Create a Segment from ASR result.
|
||||
|
||||
Use converters to transform ASR DTO to domain entities.
|
||||
"""
|
||||
"""Create a Segment from ASR result."""
|
||||
words = AsrConverter.result_to_domain_words(result)
|
||||
if segment_start_time:
|
||||
for word in words:
|
||||
|
||||
@@ -7,18 +7,19 @@ from uuid import UUID
|
||||
|
||||
from noteflow.domain.value_objects import AnnotationId, MeetingId
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from ..errors._constants import INVALID_MEETING_ID_MESSAGE
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..errors import AbortableContext
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
DEFAULT_LOG_TRUNCATE_LEN = 4 * 2
|
||||
|
||||
def _truncate_for_log(value: str, max_len: int = 8) -> str:
|
||||
|
||||
def _truncate_for_log(value: str, max_len: int = DEFAULT_LOG_TRUNCATE_LEN) -> str:
|
||||
"""Truncate a value for safe logging (PII redaction)."""
|
||||
if len(value) > max_len:
|
||||
return f"{value[:max_len]}..."
|
||||
return value
|
||||
return f"{value[:max_len]}..." if len(value) > max_len else value
|
||||
|
||||
|
||||
def parse_meeting_id(meeting_id_str: str) -> MeetingId:
|
||||
@@ -63,7 +64,7 @@ async def parse_meeting_id_or_abort(
|
||||
meeting_id_truncated=_truncate_for_log(meeting_id_str),
|
||||
meeting_id_length=len(meeting_id_str),
|
||||
)
|
||||
await abort_invalid_argument(context, "Invalid meeting_id")
|
||||
await abort_invalid_argument(context, INVALID_MEETING_ID_MESSAGE)
|
||||
|
||||
|
||||
def parse_meeting_id_or_none(meeting_id_str: str) -> MeetingId | None:
|
||||
|
||||
@@ -5,6 +5,13 @@ from __future__ import annotations
|
||||
from typing import Protocol, cast
|
||||
|
||||
from noteflow.domain.auth.oidc import ClaimMapping, OidcProviderConfig
|
||||
from noteflow.domain.auth.oidc_constants import (
|
||||
CLAIM_EMAIL,
|
||||
CLAIM_EMAIL_VERIFIED,
|
||||
CLAIM_GROUPS,
|
||||
CLAIM_PICTURE,
|
||||
CLAIM_PREFERRED_USERNAME,
|
||||
)
|
||||
|
||||
from ...proto import noteflow_pb2
|
||||
|
||||
@@ -29,12 +36,12 @@ def proto_to_claim_mapping(proto: noteflow_pb2.ClaimMappingProto) -> ClaimMappin
|
||||
"""Convert proto ClaimMappingProto to domain ClaimMapping."""
|
||||
return ClaimMapping(
|
||||
subject_claim=proto.subject_claim or "sub",
|
||||
email_claim=proto.email_claim or "email",
|
||||
email_verified_claim=proto.email_verified_claim or "email_verified",
|
||||
email_claim=proto.email_claim or CLAIM_EMAIL,
|
||||
email_verified_claim=proto.email_verified_claim or CLAIM_EMAIL_VERIFIED,
|
||||
name_claim=proto.name_claim or "name",
|
||||
preferred_username_claim=proto.preferred_username_claim or "preferred_username",
|
||||
groups_claim=proto.groups_claim or "groups",
|
||||
picture_claim=proto.picture_claim or "picture",
|
||||
preferred_username_claim=proto.preferred_username_claim or CLAIM_PREFERRED_USERNAME,
|
||||
groups_claim=proto.groups_claim or CLAIM_GROUPS,
|
||||
picture_claim=proto.picture_claim or CLAIM_PICTURE,
|
||||
first_name_claim=proto.first_name_claim or None,
|
||||
last_name_claim=proto.last_name_claim or None,
|
||||
phone_claim=proto.phone_claim or None,
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
@@ -18,10 +19,12 @@ from noteflow.infrastructure.persistence.repositories import DiarizationJob
|
||||
from ...proto import noteflow_pb2
|
||||
from .._types import GrpcStatusContext
|
||||
from ..converters import parse_meeting_id
|
||||
from ..errors._constants import INVALID_MEETING_ID_MESSAGE
|
||||
from ._status import JobStatusMixin
|
||||
from ._types import DIARIZATION_TIMEOUT_SECONDS
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.entities import Meeting
|
||||
from ..protocols import ServicerHost
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -32,12 +35,33 @@ def _job_status_name(status: int) -> str:
|
||||
return name_fn(status)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class GrpcErrorDetails:
|
||||
"""gRPC error context and status for response helpers."""
|
||||
|
||||
context: GrpcStatusContext
|
||||
grpc_code: grpc.StatusCode
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class _DiarizationJobContext:
|
||||
"""Context for executing a diarization job.
|
||||
|
||||
Groups job-related parameters to reduce function signature complexity.
|
||||
"""
|
||||
|
||||
host: ServicerHost
|
||||
job_id: str
|
||||
job: DiarizationJob
|
||||
meeting_id: str
|
||||
num_speakers: int | None
|
||||
|
||||
|
||||
def create_diarization_error_response(
|
||||
error_message: str,
|
||||
status: noteflow_pb2.JobStatus | str = noteflow_pb2.JOB_STATUS_FAILED,
|
||||
*,
|
||||
context: GrpcStatusContext | None = None,
|
||||
grpc_code: grpc.StatusCode | None = None,
|
||||
error: GrpcErrorDetails | None = None,
|
||||
job_id: str = "",
|
||||
) -> noteflow_pb2.RefineSpeakerDiarizationResponse:
|
||||
"""Create error response for RefineSpeakerDiarization.
|
||||
@@ -47,16 +71,15 @@ def create_diarization_error_response(
|
||||
Args:
|
||||
error_message: Error message describing the failure.
|
||||
status: Job status code (default: JOB_STATUS_FAILED).
|
||||
context: Optional gRPC context for setting status code.
|
||||
grpc_code: Optional gRPC status code to set.
|
||||
error: Optional gRPC error details for status code.
|
||||
job_id: Optional job ID to include in response.
|
||||
|
||||
Returns:
|
||||
Populated RefineSpeakerDiarizationResponse with error state.
|
||||
"""
|
||||
if context is not None and grpc_code is not None:
|
||||
context.set_code(grpc_code)
|
||||
context.set_details(error_message)
|
||||
if error is not None:
|
||||
error.context.set_code(error.grpc_code)
|
||||
error.context.set_details(error_message)
|
||||
return noteflow_pb2.RefineSpeakerDiarizationResponse(
|
||||
segments_updated=0,
|
||||
speaker_ids=[],
|
||||
@@ -81,14 +104,16 @@ def _validate_diarization_preconditions(
|
||||
if servicer.diarization_engine is None:
|
||||
return create_diarization_error_response(
|
||||
"Diarization not enabled on server",
|
||||
context=context,
|
||||
grpc_code=grpc.StatusCode.UNAVAILABLE,
|
||||
error=GrpcErrorDetails(
|
||||
context=context,
|
||||
grpc_code=grpc.StatusCode.UNAVAILABLE,
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
UUID(request.meeting_id)
|
||||
except ValueError:
|
||||
return create_diarization_error_response("Invalid meeting_id")
|
||||
return create_diarization_error_response(INVALID_MEETING_ID_MESSAGE)
|
||||
return None
|
||||
|
||||
|
||||
@@ -123,6 +148,86 @@ async def _create_and_persist_job(
|
||||
return True
|
||||
|
||||
|
||||
async def _load_meeting_for_diarization(
|
||||
repo: UnitOfWork,
|
||||
meeting_id: str,
|
||||
) -> tuple["Meeting | None", noteflow_pb2.RefineSpeakerDiarizationResponse | None]:
|
||||
"""Fetch meeting and validate state for diarization refinement."""
|
||||
meeting = await repo.meetings.get(parse_meeting_id(meeting_id))
|
||||
if meeting is None:
|
||||
return None, create_diarization_error_response("Meeting not found")
|
||||
|
||||
valid_states = (MeetingState.STOPPED, MeetingState.COMPLETED, MeetingState.ERROR)
|
||||
if meeting.state not in valid_states:
|
||||
return None, create_diarization_error_response(
|
||||
f"Meeting must be stopped before refinement (state: {meeting.state.name.lower()})"
|
||||
)
|
||||
return meeting, None
|
||||
|
||||
|
||||
async def _check_active_diarization_job(
|
||||
repo: UnitOfWork,
|
||||
meeting_id: str,
|
||||
context: GrpcStatusContext,
|
||||
) -> noteflow_pb2.RefineSpeakerDiarizationResponse | None:
|
||||
"""Return error response if a diarization job is already active."""
|
||||
if not repo.supports_diarization_jobs:
|
||||
return None
|
||||
|
||||
active_job = await repo.diarization_jobs.get_active_for_meeting(meeting_id)
|
||||
if active_job is None:
|
||||
return None
|
||||
|
||||
return create_diarization_error_response(
|
||||
f"Diarization already in progress (job: {active_job.job_id})",
|
||||
status=_job_status_name(active_job.status),
|
||||
error=GrpcErrorDetails(
|
||||
context=context,
|
||||
grpc_code=grpc.StatusCode.ALREADY_EXISTS,
|
||||
),
|
||||
job_id=active_job.job_id,
|
||||
)
|
||||
|
||||
|
||||
|
||||
async def _init_job_for_running(
|
||||
host: ServicerHost,
|
||||
job_id: str,
|
||||
) -> tuple[str, DiarizationJob] | None:
|
||||
"""Initialize job and transition to RUNNING status.
|
||||
|
||||
Returns:
|
||||
Tuple of (meeting_id, job) on success, None if job cannot run.
|
||||
"""
|
||||
async with host.create_repository_provider() as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
logger.error("Diarization job %s cannot run: database required", job_id)
|
||||
return None
|
||||
|
||||
job = await repo.diarization_jobs.get(job_id)
|
||||
if job is None:
|
||||
logger.warning("Diarization job %s not found in database", job_id)
|
||||
return None
|
||||
|
||||
meeting_id = job.meeting_id
|
||||
old_status = job.status
|
||||
await repo.diarization_jobs.update_status(
|
||||
job_id,
|
||||
noteflow_pb2.JOB_STATUS_RUNNING,
|
||||
started_at=utc_now(),
|
||||
)
|
||||
await repo.commit()
|
||||
transition_from = _job_status_name(old_status)
|
||||
transition_to = _job_status_name(int(noteflow_pb2.JOB_STATUS_RUNNING))
|
||||
log_state_transition(
|
||||
"diarization_job",
|
||||
job_id,
|
||||
transition_from,
|
||||
transition_to,
|
||||
meeting_id=meeting_id,
|
||||
)
|
||||
return meeting_id, job
|
||||
|
||||
class JobsMixin(JobStatusMixin):
|
||||
"""Mixin providing diarization job management."""
|
||||
|
||||
@@ -131,45 +236,33 @@ class JobsMixin(JobStatusMixin):
|
||||
request: noteflow_pb2.RefineSpeakerDiarizationRequest,
|
||||
context: GrpcStatusContext,
|
||||
) -> noteflow_pb2.RefineSpeakerDiarizationResponse:
|
||||
"""Start a new diarization refinement job.
|
||||
|
||||
Validates the request, creates a job record, and launches the background task.
|
||||
"""
|
||||
"""Start a new diarization refinement job."""
|
||||
if error := _validate_diarization_preconditions(self, request, context):
|
||||
return error
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
meeting = await repo.meetings.get(parse_meeting_id(request.meeting_id))
|
||||
if meeting is None:
|
||||
return create_diarization_error_response("Meeting not found")
|
||||
meeting, error = await _load_meeting_for_diarization(repo, request.meeting_id)
|
||||
if error is not None:
|
||||
return error
|
||||
|
||||
valid_states = (MeetingState.STOPPED, MeetingState.COMPLETED, MeetingState.ERROR)
|
||||
if meeting.state not in valid_states:
|
||||
return create_diarization_error_response(
|
||||
f"Meeting must be stopped before refinement (state: {meeting.state.name.lower()})"
|
||||
)
|
||||
|
||||
# Concurrency guard: check for existing active job
|
||||
if repo.supports_diarization_jobs:
|
||||
active_job = await repo.diarization_jobs.get_active_for_meeting(request.meeting_id)
|
||||
if active_job is not None:
|
||||
return create_diarization_error_response(
|
||||
f"Diarization already in progress (job: {active_job.job_id})",
|
||||
status=_job_status_name(active_job.status),
|
||||
context=context,
|
||||
grpc_code=grpc.StatusCode.ALREADY_EXISTS,
|
||||
job_id=active_job.job_id,
|
||||
)
|
||||
active_error = await _check_active_diarization_job(repo, request.meeting_id, context)
|
||||
if active_error is not None:
|
||||
return active_error
|
||||
|
||||
job_id = str(uuid4())
|
||||
persisted = await _create_and_persist_job(
|
||||
job_id, request.meeting_id, meeting.duration_seconds, repo
|
||||
job_id,
|
||||
request.meeting_id,
|
||||
meeting.duration_seconds if meeting else None,
|
||||
repo,
|
||||
)
|
||||
if not persisted:
|
||||
return create_diarization_error_response(
|
||||
"Diarization requires database support",
|
||||
context=context,
|
||||
grpc_code=grpc.StatusCode.FAILED_PRECONDITION,
|
||||
error=GrpcErrorDetails(
|
||||
context=context,
|
||||
grpc_code=grpc.StatusCode.FAILED_PRECONDITION,
|
||||
),
|
||||
)
|
||||
|
||||
num_speakers = request.num_speakers or None
|
||||
@@ -195,53 +288,43 @@ class JobsMixin(JobStatusMixin):
|
||||
|
||||
Updates job status in repository as the job progresses.
|
||||
"""
|
||||
# Get meeting_id and update status to RUNNING
|
||||
meeting_id: str | None = None
|
||||
job: DiarizationJob | None = None
|
||||
async with self.create_repository_provider() as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
logger.error("Diarization job %s cannot run: database required", job_id)
|
||||
return
|
||||
init_result = await _init_job_for_running(self, job_id)
|
||||
if init_result is None:
|
||||
return
|
||||
|
||||
job = await repo.diarization_jobs.get(job_id)
|
||||
if job is None:
|
||||
logger.warning("Diarization job %s not found in database", job_id)
|
||||
return
|
||||
meeting_id = job.meeting_id
|
||||
old_status = job.status
|
||||
await repo.diarization_jobs.update_status(
|
||||
job_id,
|
||||
noteflow_pb2.JOB_STATUS_RUNNING,
|
||||
started_at=utc_now(),
|
||||
meeting_id, job = init_result
|
||||
ctx = _DiarizationJobContext(
|
||||
host=self,
|
||||
job_id=job_id,
|
||||
job=job,
|
||||
meeting_id=meeting_id,
|
||||
num_speakers=num_speakers,
|
||||
)
|
||||
await _execute_diarization(ctx)
|
||||
|
||||
|
||||
async def _execute_diarization(ctx: _DiarizationJobContext) -> None:
|
||||
"""Execute the diarization task with error handling.
|
||||
|
||||
Args:
|
||||
ctx: Job context with host, job info, and parameters.
|
||||
"""
|
||||
try:
|
||||
async with asyncio.timeout(DIARIZATION_TIMEOUT_SECONDS):
|
||||
updated_count = await ctx.host.refine_speaker_diarization(
|
||||
meeting_id=ctx.meeting_id,
|
||||
num_speakers=ctx.num_speakers,
|
||||
)
|
||||
await repo.commit()
|
||||
log_state_transition(
|
||||
"diarization_job",
|
||||
job_id,
|
||||
_job_status_name(old_status),
|
||||
_job_status_name(int(noteflow_pb2.JOB_STATUS_RUNNING)),
|
||||
meeting_id=meeting_id,
|
||||
)
|
||||
try:
|
||||
async with asyncio.timeout(DIARIZATION_TIMEOUT_SECONDS):
|
||||
updated_count = await self.refine_speaker_diarization(
|
||||
meeting_id=meeting_id,
|
||||
num_speakers=num_speakers,
|
||||
)
|
||||
speaker_ids = await self.collect_speaker_ids(meeting_id)
|
||||
speaker_ids = await ctx.host.collect_speaker_ids(ctx.meeting_id)
|
||||
|
||||
# Update status to COMPLETED
|
||||
await self.update_job_completed(job_id, job, updated_count, speaker_ids)
|
||||
|
||||
except TimeoutError:
|
||||
await self.handle_job_timeout(job_id, job, meeting_id)
|
||||
|
||||
except asyncio.CancelledError:
|
||||
await self.handle_job_cancelled(job_id, job, meeting_id)
|
||||
raise # Re-raise to propagate cancellation
|
||||
|
||||
# INTENTIONAL BROAD HANDLER: Job error boundary
|
||||
# - Diarization can fail in many ways (model errors, audio issues, etc.)
|
||||
# - Must capture any failure and update job status
|
||||
except Exception as exc:
|
||||
await self.handle_job_failed(job_id, job, meeting_id, exc)
|
||||
await ctx.host.update_job_completed(ctx.job_id, ctx.job, updated_count, speaker_ids)
|
||||
except TimeoutError:
|
||||
await ctx.host.handle_job_timeout(ctx.job_id, ctx.job, ctx.meeting_id)
|
||||
except asyncio.CancelledError:
|
||||
await ctx.host.handle_job_cancelled(ctx.job_id, ctx.job, ctx.meeting_id)
|
||||
raise # Re-raise to propagate cancellation
|
||||
# INTENTIONAL BROAD HANDLER: Job error boundary
|
||||
# - Diarization can fail in many ways (model errors, audio issues, etc.)
|
||||
# - Must capture any failure and update job status
|
||||
except Exception as exc:
|
||||
await ctx.host.handle_job_failed(ctx.job_id, ctx.job, ctx.meeting_id, exc)
|
||||
|
||||
@@ -74,7 +74,6 @@ class RefinementMixin:
|
||||
turns: list[SpeakerTurn],
|
||||
) -> int:
|
||||
"""Apply diarization turns to segments and return updated count."""
|
||||
updated_count = 0
|
||||
parsed_meeting_id = parse_meeting_id_or_none(meeting_id)
|
||||
if parsed_meeting_id is None:
|
||||
logger.warning("Invalid meeting_id %s while applying diarization turns", meeting_id)
|
||||
@@ -82,16 +81,27 @@ class RefinementMixin:
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
segments = await repo.segments.get_by_meeting(parsed_meeting_id)
|
||||
for segment in segments:
|
||||
if not apply_speaker_to_segment(segment, turns):
|
||||
continue
|
||||
await _persist_speaker_update(repo, segment)
|
||||
updated_count += 1
|
||||
updated_count = await _apply_turns_to_segments(repo, segments, turns)
|
||||
await repo.commit()
|
||||
|
||||
return updated_count
|
||||
|
||||
|
||||
async def _apply_turns_to_segments(
|
||||
repo: UnitOfWork,
|
||||
segments: list[Segment],
|
||||
turns: list[SpeakerTurn],
|
||||
) -> int:
|
||||
"""Apply speaker turns to segments and persist updates."""
|
||||
updated_count = 0
|
||||
for segment in segments:
|
||||
if not apply_speaker_to_segment(segment, turns):
|
||||
continue
|
||||
await _persist_speaker_update(repo, segment)
|
||||
updated_count += 1
|
||||
return updated_count
|
||||
|
||||
|
||||
async def _persist_speaker_update(
|
||||
repo: UnitOfWork,
|
||||
segment: Segment,
|
||||
|
||||
@@ -2,19 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.domain.entities import Segment
|
||||
from noteflow.infrastructure.diarization import SpeakerTurn, assign_speaker
|
||||
|
||||
from ...proto import noteflow_pb2
|
||||
from .._types import GrpcContext
|
||||
from ..converters import parse_meeting_id_or_abort, parse_meeting_id_or_none
|
||||
from ..errors import abort_invalid_argument
|
||||
from .._types import GrpcContext
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
|
||||
from ..protocols import ServicerHost
|
||||
@@ -93,17 +92,11 @@ class SpeakerMixin:
|
||||
|
||||
meeting_id = await parse_meeting_id_or_abort(request.meeting_id, context)
|
||||
|
||||
updated_count = 0
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
segments = await repo.segments.get_by_meeting(meeting_id)
|
||||
|
||||
for segment in segments:
|
||||
if segment.speaker_id != request.old_speaker_id:
|
||||
continue
|
||||
await _apply_speaker_rename(repo, segment, request.new_speaker_name)
|
||||
updated_count += 1
|
||||
|
||||
updated_count = await _rename_matching_speakers(
|
||||
repo, segments, request.old_speaker_id, request.new_speaker_name
|
||||
)
|
||||
await repo.commit()
|
||||
|
||||
return noteflow_pb2.RenameSpeakerResponse(
|
||||
@@ -112,6 +105,22 @@ class SpeakerMixin:
|
||||
)
|
||||
|
||||
|
||||
async def _rename_matching_speakers(
|
||||
repo: UnitOfWork,
|
||||
segments: Sequence[Segment],
|
||||
old_speaker_id: str,
|
||||
new_speaker_name: str,
|
||||
) -> int:
|
||||
"""Rename speakers matching old_speaker_id to new_speaker_name."""
|
||||
updated_count = 0
|
||||
for segment in segments:
|
||||
if segment.speaker_id != old_speaker_id:
|
||||
continue
|
||||
await _apply_speaker_rename(repo, segment, new_speaker_name)
|
||||
updated_count += 1
|
||||
return updated_count
|
||||
|
||||
|
||||
async def _apply_speaker_rename(
|
||||
repo: UnitOfWork,
|
||||
segment: Segment,
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Protocol as TypingProtocol
|
||||
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
@@ -23,6 +23,12 @@ if TYPE_CHECKING:
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class _DiarizationEngine(TypingProtocol):
|
||||
"""Protocol for diarization engine interface."""
|
||||
|
||||
def create_streaming_session(self, meeting_id: str) -> DiarizationSession: ...
|
||||
|
||||
|
||||
class StreamingDiarizationMixin:
|
||||
"""Mixin providing streaming diarization processing."""
|
||||
|
||||
@@ -78,45 +84,24 @@ class StreamingDiarizationMixin:
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> DiarizationSession | None:
|
||||
"""Return an initialized diarization session or None on failure."""
|
||||
# Get or create per-meeting session under lock
|
||||
async with self.diarization_lock:
|
||||
session = state.diarization_session
|
||||
if session is not None:
|
||||
return session
|
||||
# Guard: diarization_engine checked by caller (process_streaming_diarization)
|
||||
if state.diarization_session is not None:
|
||||
return state.diarization_session
|
||||
|
||||
engine = self.diarization_engine
|
||||
if engine is None:
|
||||
return None
|
||||
try:
|
||||
session = await loop.run_in_executor(
|
||||
None,
|
||||
engine.create_streaming_session,
|
||||
meeting_id,
|
||||
)
|
||||
prior_turns = state.diarization_turns
|
||||
prior_stream_time = state.diarization_stream_time
|
||||
if prior_turns or prior_stream_time:
|
||||
session.restore(prior_turns, stream_time=prior_stream_time)
|
||||
state.diarization_session = session
|
||||
return session
|
||||
except (RuntimeError, ValueError) as exc:
|
||||
logger.warning(
|
||||
"Streaming diarization disabled for meeting %s: %s",
|
||||
meeting_id,
|
||||
exc,
|
||||
)
|
||||
state.diarization_streaming_failed = True
|
||||
return None
|
||||
|
||||
return await _create_diarization_session(engine, meeting_id, state, loop)
|
||||
|
||||
async def process_diarization_chunk(
|
||||
self: ServicerHost,
|
||||
context: "DiarizationChunkContext",
|
||||
context: DiarizationChunkContext,
|
||||
session: DiarizationSession,
|
||||
audio: NDArray[np.float32],
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> list[SpeakerTurn] | None:
|
||||
"""Process a diarization chunk, returning new turns or None on failure."""
|
||||
# Process chunk in thread pool (outside lock for parallelism)
|
||||
try:
|
||||
turns = await loop.run_in_executor(
|
||||
None,
|
||||
@@ -136,14 +121,6 @@ class StreamingDiarizationMixin:
|
||||
context.state.diarization_streaming_failed = True
|
||||
return None
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class DiarizationChunkContext:
|
||||
"""Context for processing a diarization chunk."""
|
||||
|
||||
meeting_id: str
|
||||
state: MeetingStreamState
|
||||
|
||||
async def persist_streaming_turns(
|
||||
self: ServicerHost,
|
||||
meeting_id: str,
|
||||
@@ -151,21 +128,82 @@ class DiarizationChunkContext:
|
||||
) -> None:
|
||||
"""Persist streaming turns to database (fire-and-forget)."""
|
||||
try:
|
||||
async with self.create_repository_provider() as repo:
|
||||
if repo.supports_diarization_jobs:
|
||||
repo_turns = [
|
||||
StreamingTurn(
|
||||
speaker=t.speaker,
|
||||
start_time=t.start,
|
||||
end_time=t.end,
|
||||
confidence=t.confidence,
|
||||
)
|
||||
for t in new_turns
|
||||
]
|
||||
await repo.diarization_jobs.add_streaming_turns(meeting_id, repo_turns)
|
||||
await repo.commit()
|
||||
await _persist_turns_to_repo(self, meeting_id, new_turns)
|
||||
# INTENTIONAL BROAD HANDLER: Fire-and-forget persistence
|
||||
# - Turn persistence should not block streaming
|
||||
# - Data can be recovered from audio file if needed
|
||||
except Exception:
|
||||
logger.exception("Failed to persist streaming turns for %s", meeting_id)
|
||||
|
||||
|
||||
async def _create_diarization_session(
|
||||
engine: _DiarizationEngine,
|
||||
meeting_id: str,
|
||||
state: MeetingStreamState,
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
) -> DiarizationSession | None:
|
||||
"""Create and initialize a diarization session."""
|
||||
try:
|
||||
session = await loop.run_in_executor(
|
||||
None,
|
||||
engine.create_streaming_session,
|
||||
meeting_id,
|
||||
)
|
||||
_restore_session_state(session, state)
|
||||
state.diarization_session = session
|
||||
return session
|
||||
except (RuntimeError, ValueError) as exc:
|
||||
logger.warning(
|
||||
"Streaming diarization disabled for meeting %s: %s",
|
||||
meeting_id,
|
||||
exc,
|
||||
)
|
||||
state.diarization_streaming_failed = True
|
||||
return None
|
||||
|
||||
|
||||
def _restore_session_state(session: DiarizationSession, state: MeetingStreamState) -> None:
|
||||
"""Restore prior turns and stream time to session if available."""
|
||||
if state.diarization_turns or state.diarization_stream_time:
|
||||
session.restore(state.diarization_turns, stream_time=state.diarization_stream_time)
|
||||
|
||||
|
||||
def _convert_turns_to_streaming(turns: list[SpeakerTurn]) -> list[StreamingTurn]:
|
||||
"""Convert domain SpeakerTurns to StreamingTurn for persistence."""
|
||||
return [
|
||||
StreamingTurn(
|
||||
speaker=t.speaker,
|
||||
start_time=t.start,
|
||||
end_time=t.end,
|
||||
confidence=t.confidence,
|
||||
)
|
||||
for t in turns
|
||||
]
|
||||
|
||||
|
||||
async def _persist_turns_to_repo(
|
||||
host: ServicerHost,
|
||||
meeting_id: str,
|
||||
new_turns: list[SpeakerTurn],
|
||||
) -> None:
|
||||
"""Persist streaming turns via repository provider.
|
||||
|
||||
Args:
|
||||
host: The servicer host with repository provider.
|
||||
meeting_id: Meeting identifier.
|
||||
new_turns: Turns to persist.
|
||||
"""
|
||||
async with host.create_repository_provider() as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
return
|
||||
repo_turns = _convert_turns_to_streaming(new_turns)
|
||||
await repo.diarization_jobs.add_streaming_turns(meeting_id, repo_turns)
|
||||
await repo.commit()
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class DiarizationChunkContext:
|
||||
"""Context for processing a diarization chunk."""
|
||||
|
||||
meeting_id: str
|
||||
state: MeetingStreamState
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Protocol, Self
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from noteflow.domain.utils.time import utc_now
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
@@ -13,10 +13,11 @@ from noteflow.infrastructure.logging import get_logger
|
||||
from ..proto import noteflow_pb2
|
||||
from ._types import GrpcContext
|
||||
from .errors import ERR_CANCELLED_BY_USER, abort_not_found
|
||||
from .protocols import DiarizationJobRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.ports.repositories import DiarizationJobRepository
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.infrastructure.persistence.repositories import DiarizationJob
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -25,42 +26,60 @@ logger = get_logger(__name__)
|
||||
# Diarization job TTL default (1 hour in seconds)
|
||||
_DEFAULT_JOB_TTL_SECONDS: float = 3600.0
|
||||
|
||||
|
||||
class DiarizationJobRepositoryProvider(Protocol):
|
||||
supports_diarization_jobs: bool
|
||||
diarization_jobs: DiarizationJobRepository
|
||||
|
||||
async def commit(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
# Error messages for cancel job response
|
||||
_ERR_DB_REQUIRED = "Diarization requires database support"
|
||||
_ERR_JOB_NOT_FOUND = "Job not found"
|
||||
_ERR_ALREADY_COMPLETE = "Job already completed or failed"
|
||||
|
||||
|
||||
class DiarizationJobServicer(Protocol):
|
||||
diarization_tasks: dict[str, asyncio.Task[None]]
|
||||
diarization_jobs: dict[str, DiarizationJob]
|
||||
def _make_cancel_error_response(
|
||||
error_message: str,
|
||||
status: int = noteflow_pb2.JOB_STATUS_UNSPECIFIED,
|
||||
) -> noteflow_pb2.CancelDiarizationJobResponse:
|
||||
"""Create a failure CancelDiarizationJobResponse.
|
||||
|
||||
@property
|
||||
def diarization_job_ttl_seconds(self) -> float: ...
|
||||
Args:
|
||||
error_message: Reason for the cancellation failure.
|
||||
status: Current job status, defaults to UNSPECIFIED.
|
||||
|
||||
async def prune_diarization_jobs(self) -> None: ...
|
||||
Returns:
|
||||
A response indicating failure with the provided message.
|
||||
"""
|
||||
return noteflow_pb2.CancelDiarizationJobResponse(
|
||||
success=False,
|
||||
error_message=error_message,
|
||||
status=status,
|
||||
)
|
||||
|
||||
def create_repository_provider(self) -> UnitOfWork: ...
|
||||
|
||||
async def _cancel_running_task(
|
||||
tasks: dict[str, asyncio.Task[None]],
|
||||
job_id: str,
|
||||
) -> None:
|
||||
"""Cancel an asyncio task if it exists and is still running.
|
||||
|
||||
Args:
|
||||
tasks: Dictionary of active diarization tasks.
|
||||
job_id: The job ID whose task should be cancelled.
|
||||
"""
|
||||
task = tasks.get(job_id)
|
||||
if task is not None and not task.done():
|
||||
task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await task
|
||||
|
||||
|
||||
class DiarizationJobMixin:
|
||||
"""Mixin providing diarization job management functionality.
|
||||
|
||||
Handles job status queries, cancellation, and pruning.
|
||||
Requires host to implement ServicerHost protocol.
|
||||
Requires host to implement DiarizationJobServicer protocol.
|
||||
"""
|
||||
|
||||
diarization_tasks: dict[str, asyncio.Task[None]]
|
||||
diarization_jobs: dict[str, DiarizationJob]
|
||||
create_repository_provider: Callable[..., object]
|
||||
|
||||
@property
|
||||
def diarization_job_ttl_seconds(self) -> float:
|
||||
"""Return diarization job TTL from settings.
|
||||
@@ -83,7 +102,7 @@ class DiarizationJobMixin:
|
||||
)
|
||||
return _DEFAULT_JOB_TTL_SECONDS
|
||||
|
||||
async def prune_diarization_jobs(self: DiarizationJobServicer) -> None:
|
||||
async def prune_diarization_jobs(self) -> None:
|
||||
"""Remove completed diarization jobs older than retention window.
|
||||
|
||||
Prunes both in-memory task references and database records.
|
||||
@@ -96,7 +115,7 @@ class DiarizationJobMixin:
|
||||
self.diarization_tasks.pop(job_id, None)
|
||||
|
||||
# Prune old completed jobs from database
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(DiarizationJobRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
logger.debug("Job pruning skipped: database required")
|
||||
return
|
||||
@@ -108,7 +127,7 @@ class DiarizationJobMixin:
|
||||
logger.debug("Pruned %d completed diarization jobs", pruned)
|
||||
|
||||
async def GetDiarizationJobStatus(
|
||||
self: DiarizationJobServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetDiarizationJobStatusRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DiarizationJobStatus:
|
||||
@@ -118,7 +137,7 @@ class DiarizationJobMixin:
|
||||
"""
|
||||
await self.prune_diarization_jobs()
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(DiarizationJobRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
await abort_not_found(context, "Diarization jobs (database required)", "")
|
||||
raise # Unreachable but helps type checker
|
||||
@@ -131,7 +150,7 @@ class DiarizationJobMixin:
|
||||
return _build_job_status(job)
|
||||
|
||||
async def CancelDiarizationJob(
|
||||
self: DiarizationJobServicer,
|
||||
self,
|
||||
request: noteflow_pb2.CancelDiarizationJobRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.CancelDiarizationJobResponse:
|
||||
@@ -140,38 +159,19 @@ class DiarizationJobMixin:
|
||||
Cancels the background asyncio task and updates job status to CANCELLED.
|
||||
"""
|
||||
job_id = request.job_id
|
||||
response = noteflow_pb2.CancelDiarizationJobResponse()
|
||||
await _cancel_running_task(self.diarization_tasks, job_id)
|
||||
|
||||
# Cancel the asyncio task if it exists and is still running
|
||||
task = self.diarization_tasks.get(job_id)
|
||||
if task is not None and not task.done():
|
||||
task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await task
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(DiarizationJobRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
response.success = False
|
||||
response.error_message = "Diarization requires database support"
|
||||
response.status = noteflow_pb2.JOB_STATUS_UNSPECIFIED
|
||||
return response
|
||||
return _make_cancel_error_response(_ERR_DB_REQUIRED)
|
||||
|
||||
job = await repo.diarization_jobs.get(job_id)
|
||||
if job is None:
|
||||
response.success = False
|
||||
response.error_message = "Job not found"
|
||||
response.status = noteflow_pb2.JOB_STATUS_UNSPECIFIED
|
||||
return response
|
||||
return _make_cancel_error_response(_ERR_JOB_NOT_FOUND)
|
||||
|
||||
# Only cancel if job is in a cancellable state
|
||||
if job.status not in (
|
||||
noteflow_pb2.JOB_STATUS_QUEUED,
|
||||
noteflow_pb2.JOB_STATUS_RUNNING,
|
||||
):
|
||||
response.success = False
|
||||
response.error_message = "Job already completed or failed"
|
||||
response.status = int(job.status)
|
||||
return response
|
||||
cancellable_statuses = (noteflow_pb2.JOB_STATUS_QUEUED, noteflow_pb2.JOB_STATUS_RUNNING)
|
||||
if job.status not in cancellable_statuses:
|
||||
return _make_cancel_error_response(_ERR_ALREADY_COMPLETE, int(job.status))
|
||||
|
||||
await repo.diarization_jobs.update_status(
|
||||
job_id,
|
||||
@@ -181,12 +181,13 @@ class DiarizationJobMixin:
|
||||
await repo.commit()
|
||||
|
||||
logger.info("Cancelled diarization job %s", job_id)
|
||||
response.success = True
|
||||
response.status = noteflow_pb2.JOB_STATUS_CANCELLED
|
||||
return response
|
||||
return noteflow_pb2.CancelDiarizationJobResponse(
|
||||
success=True,
|
||||
status=noteflow_pb2.JOB_STATUS_CANCELLED,
|
||||
)
|
||||
|
||||
async def GetActiveDiarizationJobs(
|
||||
self: DiarizationJobServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetActiveDiarizationJobsRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.GetActiveDiarizationJobsResponse:
|
||||
@@ -197,7 +198,7 @@ class DiarizationJobMixin:
|
||||
"""
|
||||
response = noteflow_pb2.GetActiveDiarizationJobsResponse()
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(DiarizationJobRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_diarization_jobs:
|
||||
# Return empty list if DB not available
|
||||
return response
|
||||
@@ -239,4 +240,4 @@ def _calculate_progress_percent(job: DiarizationJob) -> float:
|
||||
estimated_duration = audio_duration * 0.17
|
||||
return min(95.0, (elapsed / estimated_duration) * 100)
|
||||
# Fallback: assume 2 minutes total
|
||||
return min(95.0, (elapsed / 120) * 100)
|
||||
return min(95.0, (elapsed / (60 * 2)) * 100)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Protocol, Self
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
@@ -19,42 +19,17 @@ from .errors import (
|
||||
require_feature_entities,
|
||||
require_ner_service,
|
||||
)
|
||||
from .protocols import EntitiesRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.application.services.ner_service import NerService
|
||||
from noteflow.domain.ports.repositories import EntityRepository
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class EntitiesServicer(Protocol):
|
||||
"""Protocol for hosts that support entity extraction operations."""
|
||||
|
||||
ner_service: NerService | None
|
||||
|
||||
def create_repository_provider(self) -> EntitiesRepositoryProvider | UnitOfWork: ...
|
||||
|
||||
|
||||
class EntitiesRepositoryProvider(Protocol):
|
||||
"""Minimal repository provider protocol for entity operations."""
|
||||
|
||||
supports_entities: bool
|
||||
entities: EntityRepository
|
||||
|
||||
async def commit(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class EntitiesMixin:
|
||||
"""Mixin for entity extraction RPC methods.
|
||||
|
||||
@@ -63,9 +38,10 @@ class EntitiesMixin:
|
||||
"""
|
||||
|
||||
ner_service: NerService | None
|
||||
create_repository_provider: Callable[..., object]
|
||||
|
||||
async def ExtractEntities(
|
||||
self: EntitiesServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ExtractEntitiesRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ExtractEntitiesResponse:
|
||||
@@ -101,7 +77,7 @@ class EntitiesMixin:
|
||||
)
|
||||
|
||||
async def UpdateEntity(
|
||||
self: EntitiesServicer,
|
||||
self,
|
||||
request: noteflow_pb2.UpdateEntityRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.UpdateEntityResponse:
|
||||
@@ -113,7 +89,7 @@ class EntitiesMixin:
|
||||
meeting_id = await parse_meeting_id_or_abort(request.meeting_id, context)
|
||||
entity_id = await parse_entity_id(request.entity_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(EntitiesRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_entities(uow, context)
|
||||
entity = await uow.entities.get(entity_id)
|
||||
if entity is None or entity.meeting_id != meeting_id:
|
||||
@@ -141,7 +117,7 @@ class EntitiesMixin:
|
||||
)
|
||||
|
||||
async def DeleteEntity(
|
||||
self: EntitiesServicer,
|
||||
self,
|
||||
request: noteflow_pb2.DeleteEntityRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DeleteEntityResponse:
|
||||
@@ -153,7 +129,7 @@ class EntitiesMixin:
|
||||
meeting_id = await parse_meeting_id_or_abort(request.meeting_id, context)
|
||||
entity_id = await parse_entity_id(request.entity_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(EntitiesRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_entities(uow, context)
|
||||
entity = await uow.entities.get(entity_id)
|
||||
if entity is None or entity.meeting_id != meeting_id:
|
||||
|
||||
@@ -11,6 +11,8 @@ This module provides:
|
||||
- Get-or-abort helpers: get_*_or_abort for fetch + not-found patterns
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
from ._abort import (
|
||||
ERR_CANCELLED_BY_USER,
|
||||
AbortableContext,
|
||||
@@ -38,7 +40,6 @@ from ._fetch import (
|
||||
from ._parse import (
|
||||
parse_entity_id,
|
||||
parse_integration_id,
|
||||
parse_meeting_id,
|
||||
parse_project_id,
|
||||
parse_webhook_id,
|
||||
parse_workspace_id,
|
||||
@@ -57,6 +58,16 @@ from ._require import (
|
||||
require_project_service,
|
||||
)
|
||||
|
||||
async def parse_meeting_id(meeting_id_str: str, context: AbortableContext) -> UUID:
|
||||
"""Parse meeting_id string to UUID, aborting with INVALID_ARGUMENT if invalid."""
|
||||
from ._constants import INVALID_MEETING_ID_MESSAGE
|
||||
from ._abort import abort_invalid_argument
|
||||
|
||||
try:
|
||||
return UUID(meeting_id_str)
|
||||
except ValueError:
|
||||
await abort_invalid_argument(context, INVALID_MEETING_ID_MESSAGE)
|
||||
|
||||
__all__ = [
|
||||
"ENTITY_ENTITY",
|
||||
"ENTITY_INTEGRATION",
|
||||
|
||||
@@ -191,7 +191,7 @@ async def handle_domain_error(
|
||||
raise AssertionError(_ERR_UNREACHABLE)
|
||||
|
||||
|
||||
def domain_error_handler( # noqa: UP047
|
||||
def domain_error_handler(
|
||||
func: Callable[P, Awaitable[T]],
|
||||
) -> Callable[P, Awaitable[T]]:
|
||||
"""Decorator to automatically handle DomainError in gRPC methods.
|
||||
@@ -216,12 +216,22 @@ def domain_error_handler( # noqa: UP047
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except DomainError as e:
|
||||
# args[0] is self, args[2] is context in gRPC methods
|
||||
# Standard signature: (self, request, context)
|
||||
context = args[2] if len(args) > 2 else kwargs.get("context")
|
||||
if context is not None:
|
||||
abortable_context = cast(AbortableContext, context)
|
||||
await abortable_context.abort(e.grpc_status, e.message)
|
||||
await _abort_with_domain_error(args, kwargs, e)
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def _abort_with_domain_error(
|
||||
args: tuple[object, ...],
|
||||
kwargs: dict[str, object],
|
||||
error: DomainError,
|
||||
) -> None:
|
||||
"""Extract context from args/kwargs and abort with domain error."""
|
||||
# args[0] is self, args[2] is context in gRPC methods
|
||||
# Standard signature: (self, request, context)
|
||||
context = args[2] if len(args) > 2 else kwargs.get("context")
|
||||
if context is None:
|
||||
return
|
||||
abortable_context = cast(AbortableContext, context)
|
||||
await abortable_context.abort(error.grpc_status, error.message)
|
||||
|
||||
6
src/noteflow/grpc/_mixins/errors/_constants.py
Normal file
6
src/noteflow/grpc/_mixins/errors/_constants.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Error message constants for gRPC mixins."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
INVALID_ANNOTATION_ID_MESSAGE: Final[str] = "Invalid annotation_id"
|
||||
INVALID_MEETING_ID_MESSAGE: Final[str] = "Invalid meeting_id"
|
||||
@@ -8,6 +8,8 @@ from __future__ import annotations
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.domain.constants.fields import ENTITY_MEETING as ENTITY_MEETING_NAME
|
||||
|
||||
from ._abort import AbortableContext, abort_not_found
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -21,7 +23,7 @@ if TYPE_CHECKING:
|
||||
from noteflow.domain.webhooks.events import WebhookConfig
|
||||
|
||||
# Entity type names for abort_not_found calls
|
||||
ENTITY_MEETING = "Meeting"
|
||||
ENTITY_MEETING = ENTITY_MEETING_NAME
|
||||
ENTITY_ENTITY = "Entity"
|
||||
ENTITY_INTEGRATION = "Integration"
|
||||
ENTITY_PROJECT = "Project"
|
||||
|
||||
@@ -11,7 +11,6 @@ from uuid import UUID
|
||||
from noteflow.config.constants import (
|
||||
ERROR_INVALID_ENTITY_ID_FORMAT,
|
||||
ERROR_INVALID_INTEGRATION_ID_FORMAT,
|
||||
ERROR_INVALID_MEETING_ID_FORMAT,
|
||||
ERROR_INVALID_PROJECT_ID_FORMAT,
|
||||
ERROR_INVALID_WEBHOOK_ID_FORMAT,
|
||||
ERROR_INVALID_WORKSPACE_ID_FORMAT,
|
||||
@@ -64,29 +63,6 @@ async def parse_project_id(
|
||||
await abort_invalid_argument(context, ERROR_INVALID_PROJECT_ID_FORMAT)
|
||||
|
||||
|
||||
async def parse_meeting_id(
|
||||
meeting_id_str: str,
|
||||
context: AbortableContext,
|
||||
) -> UUID:
|
||||
"""Parse meeting_id string to UUID, aborting with INVALID_ARGUMENT if invalid.
|
||||
|
||||
Args:
|
||||
meeting_id_str: The meeting ID string from request.
|
||||
context: gRPC servicer context for abort.
|
||||
|
||||
Returns:
|
||||
Parsed UUID.
|
||||
|
||||
Raises:
|
||||
grpc.RpcError: If meeting_id format is invalid.
|
||||
"""
|
||||
try:
|
||||
return UUID(meeting_id_str)
|
||||
except ValueError:
|
||||
await abort_invalid_argument(context, ERROR_INVALID_MEETING_ID_FORMAT)
|
||||
raise # Unreachable: abort raises, but helps type checker
|
||||
|
||||
|
||||
async def parse_integration_id(
|
||||
integration_id_str: str,
|
||||
context: AbortableContext,
|
||||
|
||||
@@ -10,6 +10,7 @@ from typing import TYPE_CHECKING, Protocol
|
||||
|
||||
from noteflow.config.constants import FEATURE_NAME_PROJECTS
|
||||
|
||||
from ..._constants import WORKSPACES_LABEL
|
||||
from ._abort import AbortableContext, abort_database_required, abort_failed_precondition
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -55,7 +56,7 @@ class SupportsWorkspaces(Protocol):
|
||||
FEATURE_WEBHOOKS = "Webhooks"
|
||||
FEATURE_ENTITIES = "Named Entities"
|
||||
FEATURE_INTEGRATIONS = "Integrations"
|
||||
FEATURE_WORKSPACES = "Workspaces"
|
||||
FEATURE_WORKSPACES = WORKSPACES_LABEL
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
||||
@@ -3,14 +3,12 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from noteflow.application.services.export_service import (
|
||||
ExportFormat,
|
||||
ExportRepositoryProvider,
|
||||
ExportService,
|
||||
)
|
||||
from noteflow.application.services.export_service import ExportFormat, ExportService
|
||||
from noteflow.application.services.protocols import ExportRepositoryProvider
|
||||
from noteflow.config.constants import EXPORT_EXT_HTML, EXPORT_EXT_PDF, EXPORT_FORMAT_HTML
|
||||
from noteflow.config.constants.encoding import ASCII_ENCODING
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
from ..proto import noteflow_pb2
|
||||
@@ -18,11 +16,6 @@ from ._types import GrpcContext
|
||||
from .converters import parse_meeting_id_or_abort, proto_to_export_format
|
||||
from .errors import ENTITY_MEETING, abort_not_found
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
|
||||
from ._types import GrpcContext
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
# Format metadata lookup
|
||||
@@ -33,21 +26,17 @@ _FORMAT_METADATA: dict[ExportFormat, tuple[str, str]] = {
|
||||
}
|
||||
|
||||
|
||||
class ExportServicer(Protocol):
|
||||
"""Protocol for hosts that support export operations."""
|
||||
|
||||
def create_repository_provider(self) -> ExportRepositoryProvider | UnitOfWork: ...
|
||||
|
||||
|
||||
class ExportMixin:
|
||||
"""Mixin providing export functionality.
|
||||
|
||||
Requires host to implement ServicerHost protocol.
|
||||
Requires host to implement ExportServicer protocol.
|
||||
Works with both database and memory backends via RepositoryProvider.
|
||||
"""
|
||||
|
||||
create_repository_provider: Callable[..., object]
|
||||
|
||||
async def ExportTranscript(
|
||||
self: ExportServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ExportTranscriptRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ExportTranscriptResponse:
|
||||
@@ -55,27 +44,20 @@ class ExportMixin:
|
||||
# Map proto format to ExportFormat
|
||||
fmt = proto_to_export_format(request.format)
|
||||
fmt_name, fmt_ext = _FORMAT_METADATA.get(fmt, ("Unknown", ""))
|
||||
|
||||
logger.info(
|
||||
"Export requested: meeting_id=%s format=%s",
|
||||
request.meeting_id,
|
||||
fmt_name,
|
||||
)
|
||||
|
||||
# Use unified repository provider - works with both DB and memory
|
||||
meeting_id = await parse_meeting_id_or_abort(request.meeting_id, context)
|
||||
|
||||
export_service = ExportService(self.create_repository_provider())
|
||||
export_service = ExportService(cast(ExportRepositoryProvider, self.create_repository_provider()))
|
||||
try:
|
||||
result = await export_service.export_transcript(
|
||||
meeting_id,
|
||||
fmt,
|
||||
)
|
||||
|
||||
result = await export_service.export_transcript(meeting_id, fmt)
|
||||
# Handle bytes vs string output
|
||||
# PDF returns bytes which must be base64-encoded for gRPC string transport
|
||||
if isinstance(result, bytes):
|
||||
content = base64.b64encode(result).decode("ascii")
|
||||
content = base64.b64encode(result).decode(ASCII_ENCODING)
|
||||
content_size = len(result)
|
||||
else:
|
||||
content = result
|
||||
@@ -87,7 +69,6 @@ class ExportMixin:
|
||||
fmt_name,
|
||||
content_size,
|
||||
)
|
||||
|
||||
return noteflow_pb2.ExportTranscriptResponse(
|
||||
content=content,
|
||||
format_name=fmt_name,
|
||||
@@ -101,3 +82,7 @@ class ExportMixin:
|
||||
str(exc),
|
||||
)
|
||||
await abort_not_found(context, ENTITY_MEETING, request.meeting_id)
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.application.services.protocols import ExportRepositoryProvider
|
||||
|
||||
@@ -2,18 +2,26 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Protocol
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from noteflow.application.services.identity_service import IdentityService
|
||||
from noteflow.domain.constants.fields import ENTITY_WORKSPACE
|
||||
from noteflow.domain.entities.integration import IntegrationType
|
||||
from noteflow.domain.identity.context import OperationContext
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
from .._constants import WORKSPACES_LABEL
|
||||
from ..proto import noteflow_pb2
|
||||
from .errors import abort_database_required, abort_invalid_argument, abort_not_found, parse_workspace_id
|
||||
from ._types import GrpcContext
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.application.services.identity_service import IdentityService
|
||||
from noteflow.domain.identity.context import OperationContext
|
||||
from noteflow.domain.identity.entities import Workspace, WorkspaceMembership
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@@ -32,17 +40,6 @@ async def _resolve_auth_status(uow: UnitOfWork) -> tuple[bool, str]:
|
||||
return False, ""
|
||||
|
||||
|
||||
class IdentityServicer(Protocol):
|
||||
"""Protocol for hosts that support identity operations."""
|
||||
|
||||
def create_repository_provider(self) -> UnitOfWork: ...
|
||||
|
||||
def get_operation_context(self, context: GrpcContext) -> OperationContext: ...
|
||||
|
||||
@property
|
||||
def identity_service(self) -> IdentityService: ...
|
||||
|
||||
|
||||
class IdentityMixin:
|
||||
"""Mixin providing identity management functionality.
|
||||
|
||||
@@ -52,8 +49,12 @@ class IdentityMixin:
|
||||
- SwitchWorkspace: Switch to a different workspace
|
||||
"""
|
||||
|
||||
identity_service: IdentityService
|
||||
create_repository_provider: Callable[..., object]
|
||||
get_operation_context: Callable[..., OperationContext]
|
||||
|
||||
async def GetCurrentUser(
|
||||
self: IdentityServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetCurrentUserRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.GetCurrentUserResponse:
|
||||
@@ -61,7 +62,7 @@ class IdentityMixin:
|
||||
# Note: op_context from headers provides request metadata
|
||||
_ = self.get_operation_context(context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(UnitOfWork, self.create_repository_provider()) as uow:
|
||||
# Get or create default user/workspace for local-first mode
|
||||
user_ctx = await self.identity_service.get_or_create_default_user(uow)
|
||||
ws_ctx = await self.identity_service.get_or_create_default_workspace(
|
||||
@@ -91,21 +92,21 @@ class IdentityMixin:
|
||||
)
|
||||
|
||||
async def ListWorkspaces(
|
||||
self: IdentityServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListWorkspacesRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListWorkspacesResponse:
|
||||
"""List workspaces the current user belongs to."""
|
||||
_ = self.get_operation_context(context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(UnitOfWork, self.create_repository_provider()) as uow:
|
||||
if not uow.supports_workspaces:
|
||||
await abort_database_required(context, "Workspaces")
|
||||
await abort_database_required(context, WORKSPACES_LABEL)
|
||||
|
||||
user_ctx = await self.identity_service.get_or_create_default_user(uow)
|
||||
|
||||
limit = request.limit if request.limit > 0 else 50
|
||||
offset = request.offset if request.offset >= 0 else 0
|
||||
offset = max(request.offset, 0)
|
||||
|
||||
workspaces = await self.identity_service.list_workspaces(
|
||||
uow, user_ctx.user_id, limit, offset
|
||||
@@ -138,7 +139,7 @@ class IdentityMixin:
|
||||
)
|
||||
|
||||
async def SwitchWorkspace(
|
||||
self: IdentityServicer,
|
||||
self,
|
||||
request: noteflow_pb2.SwitchWorkspaceRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.SwitchWorkspaceResponse:
|
||||
@@ -147,29 +148,19 @@ class IdentityMixin:
|
||||
|
||||
if not request.workspace_id:
|
||||
await abort_invalid_argument(context, "workspace_id is required")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
# Parse and validate workspace ID (aborts with INVALID_ARGUMENT if invalid)
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(UnitOfWork, self.create_repository_provider()) as uow:
|
||||
if not uow.supports_workspaces:
|
||||
await abort_database_required(context, "Workspaces")
|
||||
await abort_database_required(context, WORKSPACES_LABEL)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
user_ctx = await self.identity_service.get_or_create_default_user(uow)
|
||||
|
||||
# Verify workspace exists
|
||||
workspace = await uow.workspaces.get(workspace_id)
|
||||
if not workspace:
|
||||
await abort_not_found(context, "Workspace", str(workspace_id))
|
||||
|
||||
# Verify user has access
|
||||
membership = await uow.workspaces.get_membership(
|
||||
workspace_id, user_ctx.user_id
|
||||
workspace, membership = await self._verify_workspace_access(
|
||||
uow, workspace_id, user_ctx.user_id, context
|
||||
)
|
||||
if not membership:
|
||||
await abort_not_found(
|
||||
context, "Workspace membership", str(workspace_id)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"SwitchWorkspace: user_id=%s, workspace_id=%s",
|
||||
@@ -187,3 +178,23 @@ class IdentityMixin:
|
||||
role=membership.role.value,
|
||||
),
|
||||
)
|
||||
|
||||
async def _verify_workspace_access(
|
||||
self,
|
||||
uow: UnitOfWork,
|
||||
workspace_id: UUID,
|
||||
user_id: UUID,
|
||||
context: GrpcContext,
|
||||
) -> tuple[Workspace, WorkspaceMembership]:
|
||||
"""Verify workspace exists and user has access."""
|
||||
workspace = await uow.workspaces.get(workspace_id)
|
||||
if not workspace:
|
||||
await abort_not_found(context, ENTITY_WORKSPACE, str(workspace_id))
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
membership = await uow.workspaces.get_membership(workspace_id, user_id)
|
||||
if not membership:
|
||||
await abort_not_found(context, "Workspace membership", str(workspace_id))
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
return workspace, membership
|
||||
|
||||
@@ -4,13 +4,14 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Protocol, Self, cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.config.constants import (
|
||||
DEFAULT_MEETING_TITLE,
|
||||
ERROR_INVALID_PROJECT_ID_PREFIX,
|
||||
)
|
||||
from noteflow.domain.constants.fields import PROJECT_ID
|
||||
from noteflow.domain.entities import Meeting
|
||||
from noteflow.domain.value_objects import MeetingState
|
||||
from noteflow.infrastructure.logging import get_logger, get_workspace_id
|
||||
@@ -18,74 +19,26 @@ from noteflow.infrastructure.logging import get_logger, get_workspace_id
|
||||
from ..proto import noteflow_pb2
|
||||
from .converters import meeting_to_proto, parse_meeting_id_or_abort
|
||||
from .errors import ENTITY_MEETING, abort_invalid_argument, abort_not_found
|
||||
from .protocols import MeetingRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.application.services.project_service import ProjectService
|
||||
from noteflow.application.services.webhook_service import WebhookService
|
||||
from noteflow.domain.ports.repositories import (
|
||||
DiarizationJobRepository,
|
||||
MeetingRepository,
|
||||
SegmentRepository,
|
||||
SummaryRepository,
|
||||
)
|
||||
from noteflow.domain.ports.repositories.identity import ProjectRepository, WorkspaceRepository
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
from noteflow.infrastructure.audio.writer import MeetingAudioWriter
|
||||
|
||||
from ._types import GrpcContext
|
||||
from .protocols import MeetingRepositoryProvider, ServicerHost
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
ID_TRUNCATE_LEN = 4 * 2
|
||||
|
||||
# Timeout for waiting for stream to exit gracefully
|
||||
STOP_WAIT_TIMEOUT_SECONDS: float = 2.0
|
||||
|
||||
|
||||
class MeetingRepositoryProvider(Protocol):
|
||||
"""Repository provider protocol for meeting operations."""
|
||||
|
||||
@property
|
||||
def meetings(self) -> MeetingRepository: ...
|
||||
|
||||
@property
|
||||
def segments(self) -> SegmentRepository: ...
|
||||
|
||||
@property
|
||||
def summaries(self) -> SummaryRepository: ...
|
||||
|
||||
@property
|
||||
def diarization_jobs(self) -> DiarizationJobRepository: ...
|
||||
|
||||
@property
|
||||
def projects(self) -> ProjectRepository: ...
|
||||
|
||||
@property
|
||||
def workspaces(self) -> WorkspaceRepository: ...
|
||||
|
||||
@property
|
||||
def supports_diarization_jobs(self) -> bool: ...
|
||||
|
||||
@property
|
||||
def supports_projects(self) -> bool: ...
|
||||
|
||||
@property
|
||||
def supports_workspaces(self) -> bool: ...
|
||||
|
||||
async def commit(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class _HasField(Protocol):
|
||||
def HasField(self, field_name: str) -> bool: ...
|
||||
|
||||
|
||||
async def _parse_project_ids_or_abort(
|
||||
request: noteflow_pb2.ListMeetingsRequest,
|
||||
context: GrpcContext,
|
||||
@@ -100,7 +53,9 @@ async def _parse_project_ids_or_abort(
|
||||
project_ids.append(UUID(raw_project_id))
|
||||
except ValueError:
|
||||
truncated = (
|
||||
raw_project_id[:8] + "..." if len(raw_project_id) > 8 else raw_project_id
|
||||
f"{raw_project_id[:ID_TRUNCATE_LEN]}..."
|
||||
if len(raw_project_id) > ID_TRUNCATE_LEN
|
||||
else raw_project_id
|
||||
)
|
||||
logger.warning(
|
||||
"ListMeetings: invalid project_ids format",
|
||||
@@ -121,14 +76,16 @@ async def _parse_project_id_or_abort(
|
||||
context: GrpcContext,
|
||||
) -> UUID | None:
|
||||
"""Parse optional project_id, aborting on invalid values."""
|
||||
if not (cast(_HasField, request).HasField("project_id") and request.project_id):
|
||||
if not (request.HasField(PROJECT_ID) and request.project_id):
|
||||
return None
|
||||
|
||||
try:
|
||||
return UUID(request.project_id)
|
||||
except ValueError:
|
||||
truncated = (
|
||||
request.project_id[:8] + "..." if len(request.project_id) > 8 else request.project_id
|
||||
f"{request.project_id[:ID_TRUNCATE_LEN]}..."
|
||||
if len(request.project_id) > ID_TRUNCATE_LEN
|
||||
else request.project_id
|
||||
)
|
||||
logger.warning(
|
||||
"ListMeetings: invalid project_id format",
|
||||
@@ -140,24 +97,8 @@ async def _parse_project_id_or_abort(
|
||||
return None
|
||||
|
||||
|
||||
class MeetingServicer(Protocol):
|
||||
"""Protocol for hosts that support meeting operations."""
|
||||
|
||||
project_service: ProjectService | None
|
||||
webhook_service: WebhookService | None
|
||||
active_streams: set[str]
|
||||
stop_requested: set[str]
|
||||
audio_writers: dict[str, MeetingAudioWriter]
|
||||
|
||||
def create_repository_provider(self) -> MeetingRepositoryProvider | UnitOfWork: ...
|
||||
|
||||
def close_audio_writer(self, meeting_id: str) -> None: ...
|
||||
|
||||
async def fire_stop_webhooks(self, meeting: Meeting) -> None: ...
|
||||
|
||||
|
||||
async def _resolve_active_project_id(
|
||||
host: MeetingServicer,
|
||||
host: ServicerHost,
|
||||
repo: MeetingRepositoryProvider,
|
||||
) -> UUID | None:
|
||||
"""Resolve active project ID from workspace context.
|
||||
@@ -189,7 +130,11 @@ async def _resolve_active_project_id(
|
||||
try:
|
||||
workspace_uuid = UUID(workspace_id)
|
||||
except ValueError:
|
||||
truncated = workspace_id[:8] + "..." if len(workspace_id) > 8 else workspace_id
|
||||
truncated = (
|
||||
f"{workspace_id[:ID_TRUNCATE_LEN]}..."
|
||||
if len(workspace_id) > ID_TRUNCATE_LEN
|
||||
else workspace_id
|
||||
)
|
||||
logger.warning(
|
||||
"resolve_active_project: invalid workspace_id format",
|
||||
workspace_id_truncated=truncated,
|
||||
@@ -206,30 +151,29 @@ async def _resolve_active_project_id(
|
||||
class MeetingMixin:
|
||||
"""Mixin providing meeting CRUD functionality.
|
||||
|
||||
Requires host to implement MeetingServicer protocol.
|
||||
Requires host to implement ServicerHost protocol.
|
||||
Works with both database and memory backends via RepositoryProvider.
|
||||
"""
|
||||
|
||||
project_service: ProjectService | None
|
||||
webhook_service: WebhookService | None
|
||||
active_streams: set[str]
|
||||
stop_requested: set[str]
|
||||
audio_writers: dict[str, MeetingAudioWriter]
|
||||
create_repository_provider: Callable[..., object]
|
||||
close_audio_writer: Callable[..., None]
|
||||
|
||||
async def CreateMeeting(
|
||||
self: MeetingServicer,
|
||||
self,
|
||||
request: noteflow_pb2.CreateMeetingRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.Meeting:
|
||||
"""Create a new meeting."""
|
||||
metadata_map = cast(Mapping[str, str], request.metadata)
|
||||
metadata: dict[str, str] = dict(metadata_map) if metadata_map else {}
|
||||
project_id: UUID | None = None
|
||||
if cast(_HasField, request).HasField("project_id") and request.project_id:
|
||||
try:
|
||||
project_id = UUID(request.project_id)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"CreateMeeting: invalid project_id format",
|
||||
project_id=request.project_id,
|
||||
)
|
||||
await abort_invalid_argument(context, f"{ERROR_INVALID_PROJECT_ID_PREFIX}{request.project_id}")
|
||||
project_id = await self._parse_project_id_from_request(request, context)
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(MeetingRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if project_id is None:
|
||||
project_id = await _resolve_active_project_id(self, repo)
|
||||
|
||||
@@ -249,7 +193,7 @@ class MeetingMixin:
|
||||
return meeting_to_proto(saved)
|
||||
|
||||
async def StopMeeting(
|
||||
self: MeetingServicer,
|
||||
self,
|
||||
request: noteflow_pb2.StopMeetingRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.Meeting:
|
||||
@@ -261,54 +205,36 @@ class MeetingMixin:
|
||||
meeting_id = request.meeting_id
|
||||
logger.info("StopMeeting requested", meeting_id=meeting_id)
|
||||
|
||||
# Signal stop to active stream and wait for graceful exit
|
||||
if meeting_id in self.active_streams:
|
||||
self.stop_requested.add(meeting_id)
|
||||
# Wait briefly for stream to detect stop request and exit
|
||||
wait_iterations = int(STOP_WAIT_TIMEOUT_SECONDS * 10) # 100ms intervals
|
||||
for _ in range(wait_iterations):
|
||||
if meeting_id not in self.active_streams:
|
||||
break
|
||||
await asyncio.sleep(0.1)
|
||||
# Clean up stop request even if stream didn't exit
|
||||
self.stop_requested.discard(meeting_id)
|
||||
|
||||
# Close audio writer if open (stream cleanup may have done this)
|
||||
if meeting_id in self.audio_writers:
|
||||
self.close_audio_writer(meeting_id)
|
||||
await self._wait_for_stream_exit(meeting_id)
|
||||
self._cleanup_audio_writer(meeting_id)
|
||||
|
||||
parsed_meeting_id = await parse_meeting_id_or_abort(meeting_id, context)
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(MeetingRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
meeting = await repo.meetings.get(parsed_meeting_id)
|
||||
if meeting is None:
|
||||
logger.warning("StopMeeting: meeting not found", meeting_id=meeting_id)
|
||||
await abort_not_found(context, ENTITY_MEETING, meeting_id)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
previous_state = meeting.state.value
|
||||
|
||||
# Idempotency: return success if already stopped/stopping/completed
|
||||
terminal_states = (MeetingState.STOPPED, MeetingState.STOPPING, MeetingState.COMPLETED)
|
||||
if meeting.state in terminal_states:
|
||||
logger.debug("StopMeeting: already terminal", meeting_id=meeting_id, state=meeting.state.value)
|
||||
return meeting_to_proto(meeting)
|
||||
|
||||
try:
|
||||
meeting.begin_stopping() # RECORDING -> STOPPING -> STOPPED
|
||||
meeting.stop_recording()
|
||||
except ValueError as e:
|
||||
logger.error("StopMeeting: invalid transition", meeting_id=meeting_id, state=previous_state, error=str(e))
|
||||
await abort_invalid_argument(context, str(e))
|
||||
previous_state = meeting.state.value
|
||||
await self._transition_to_stopped(meeting, meeting_id, previous_state, context)
|
||||
await repo.meetings.update(meeting)
|
||||
# Clean up streaming diarization turns if DB supports it
|
||||
|
||||
if repo.supports_diarization_jobs:
|
||||
await repo.diarization_jobs.clear_streaming_turns(meeting_id)
|
||||
|
||||
await repo.commit()
|
||||
logger.info("Meeting stopped", meeting_id=meeting_id, from_state=previous_state, to_state=meeting.state.value)
|
||||
await self.fire_stop_webhooks(meeting)
|
||||
return meeting_to_proto(meeting)
|
||||
|
||||
async def fire_stop_webhooks(self: MeetingServicer, meeting: Meeting) -> None:
|
||||
async def fire_stop_webhooks(self, meeting: Meeting) -> None:
|
||||
"""Trigger webhooks for meeting stop (fire-and-forget)."""
|
||||
if self.webhook_service is None:
|
||||
return
|
||||
@@ -325,8 +251,60 @@ class MeetingMixin:
|
||||
except Exception:
|
||||
logger.exception("Failed to trigger meeting.completed webhooks")
|
||||
|
||||
async def _wait_for_stream_exit(self, meeting_id: str) -> None:
|
||||
"""Signal stop to active stream and wait for graceful exit."""
|
||||
if meeting_id not in self.active_streams:
|
||||
return
|
||||
|
||||
self.stop_requested.add(meeting_id)
|
||||
wait_iterations = int(STOP_WAIT_TIMEOUT_SECONDS * 10) # 100ms intervals
|
||||
for _ in range(wait_iterations):
|
||||
if meeting_id not in self.active_streams:
|
||||
break
|
||||
await asyncio.sleep(0.1)
|
||||
self.stop_requested.discard(meeting_id)
|
||||
|
||||
def _cleanup_audio_writer(self, meeting_id: str) -> None:
|
||||
"""Close audio writer if open."""
|
||||
if meeting_id in self.audio_writers:
|
||||
self.close_audio_writer(meeting_id)
|
||||
|
||||
async def _transition_to_stopped(
|
||||
self,
|
||||
meeting: Meeting,
|
||||
meeting_id: str,
|
||||
previous_state: str,
|
||||
context: GrpcContext,
|
||||
) -> None:
|
||||
"""Transition meeting to STOPPED state."""
|
||||
try:
|
||||
meeting.begin_stopping() # RECORDING -> STOPPING -> STOPPED
|
||||
meeting.stop_recording()
|
||||
except ValueError as e:
|
||||
logger.error("StopMeeting: invalid transition", meeting_id=meeting_id, state=previous_state, error=str(e))
|
||||
await abort_invalid_argument(context, str(e))
|
||||
|
||||
async def _parse_project_id_from_request(
|
||||
self,
|
||||
request: noteflow_pb2.CreateMeetingRequest,
|
||||
context: GrpcContext,
|
||||
) -> UUID | None:
|
||||
"""Parse project_id from request, aborting on invalid format."""
|
||||
if not (request.HasField(PROJECT_ID) and request.project_id):
|
||||
return None
|
||||
|
||||
try:
|
||||
return UUID(request.project_id)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"CreateMeeting: invalid project_id format",
|
||||
project_id=request.project_id,
|
||||
)
|
||||
await abort_invalid_argument(context, f"{ERROR_INVALID_PROJECT_ID_PREFIX}{request.project_id}")
|
||||
return None
|
||||
|
||||
async def ListMeetings(
|
||||
self: MeetingServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListMeetingsRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListMeetingsResponse:
|
||||
@@ -341,7 +319,7 @@ class MeetingMixin:
|
||||
if not project_ids:
|
||||
project_id = await _parse_project_id_or_abort(request, context)
|
||||
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(MeetingRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if project_id is None and not project_ids:
|
||||
project_id = await _resolve_active_project_id(self, repo)
|
||||
|
||||
@@ -368,7 +346,7 @@ class MeetingMixin:
|
||||
)
|
||||
|
||||
async def GetMeeting(
|
||||
self: MeetingServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetMeetingRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.Meeting:
|
||||
@@ -380,7 +358,7 @@ class MeetingMixin:
|
||||
include_summary=request.include_summary,
|
||||
)
|
||||
meeting_id = await parse_meeting_id_or_abort(request.meeting_id, context)
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(MeetingRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
meeting = await repo.meetings.get(meeting_id)
|
||||
if meeting is None:
|
||||
logger.warning("GetMeeting: meeting not found", meeting_id=request.meeting_id)
|
||||
@@ -401,18 +379,21 @@ class MeetingMixin:
|
||||
)
|
||||
|
||||
async def DeleteMeeting(
|
||||
self: MeetingServicer,
|
||||
self,
|
||||
request: noteflow_pb2.DeleteMeetingRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DeleteMeetingResponse:
|
||||
"""Delete a meeting."""
|
||||
logger.info("DeleteMeeting requested", meeting_id=request.meeting_id)
|
||||
meeting_id = await parse_meeting_id_or_abort(request.meeting_id, context)
|
||||
async with self.create_repository_provider() as repo:
|
||||
|
||||
async with cast(MeetingRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
success = await repo.meetings.delete(meeting_id)
|
||||
if success:
|
||||
await repo.commit()
|
||||
logger.info("Meeting deleted", meeting_id=request.meeting_id)
|
||||
return noteflow_pb2.DeleteMeetingResponse(success=True)
|
||||
logger.warning("DeleteMeeting: meeting not found", meeting_id=request.meeting_id)
|
||||
await abort_not_found(context, ENTITY_MEETING, request.meeting_id)
|
||||
if not success:
|
||||
logger.warning("DeleteMeeting: meeting not found", meeting_id=request.meeting_id)
|
||||
await abort_not_found(context, ENTITY_MEETING, request.meeting_id)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
await repo.commit()
|
||||
logger.info("Meeting deleted", meeting_id=request.meeting_id)
|
||||
return noteflow_pb2.DeleteMeetingResponse(success=True)
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import Protocol, cast
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.config.constants import ERROR_INVALID_WORKSPACE_ID_FORMAT
|
||||
@@ -13,10 +14,12 @@ from noteflow.domain.auth.oidc import (
|
||||
OidcProviderPreset,
|
||||
OidcProviderRegistration,
|
||||
)
|
||||
from noteflow.domain.constants.fields import ALLOWED_GROUPS, CLAIM_MAPPING, REQUIRE_EMAIL_VERIFIED
|
||||
from noteflow.infrastructure.auth.oidc_discovery import OidcDiscoveryError
|
||||
from noteflow.infrastructure.auth.oidc_registry import (
|
||||
PROVIDER_PRESETS,
|
||||
OidcAuthService,
|
||||
ProviderPresetConfig,
|
||||
)
|
||||
|
||||
from ..proto import noteflow_pb2
|
||||
@@ -24,23 +27,16 @@ from ._types import GrpcContext
|
||||
from .converters import oidc_provider_to_proto, proto_to_claim_mapping
|
||||
from .errors import abort_invalid_argument, abort_not_found, parse_workspace_id
|
||||
|
||||
|
||||
class OidcServicer(Protocol):
|
||||
"""Protocol for hosts that support OIDC operations."""
|
||||
|
||||
oidc_service: OidcAuthService | None
|
||||
|
||||
def get_oidc_service(self) -> OidcAuthService: ...
|
||||
|
||||
|
||||
class _HasField(Protocol):
|
||||
def HasField(self, field_name: str) -> bool: ...
|
||||
|
||||
# Error message constants
|
||||
_ENTITY_OIDC_PROVIDER = "OIDC Provider"
|
||||
_ERR_INVALID_PROVIDER_ID = "Invalid provider_id format"
|
||||
_ERR_INVALID_PRESET = "Invalid preset value"
|
||||
|
||||
# Field name constants
|
||||
_FIELD_NAME = "name"
|
||||
_FIELD_SCOPES = "scopes"
|
||||
_FIELD_ENABLED = "enabled"
|
||||
|
||||
|
||||
def _parse_provider_id(provider_id_str: str) -> UUID:
|
||||
"""Parse provider ID string to UUID, raising ValueError if invalid."""
|
||||
@@ -72,15 +68,22 @@ async def _validate_register_request(
|
||||
await abort_invalid_argument(context, "client_id is required")
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class OidcCustomConfig:
|
||||
"""Optional configuration overrides for OIDC providers."""
|
||||
|
||||
claim_mapping: ClaimMapping | None
|
||||
scopes: tuple[str, ...] | None
|
||||
allowed_groups: tuple[str, ...] | None
|
||||
require_email_verified: bool | None
|
||||
|
||||
|
||||
def _parse_register_options(
|
||||
request: noteflow_pb2.RegisterOidcProviderRequest,
|
||||
) -> tuple[ClaimMapping | None, tuple[str, ...] | None, tuple[str, ...] | None]:
|
||||
"""Parse optional fields from RegisterOidcProvider request.
|
||||
|
||||
Returns (claim_mapping, scopes, allowed_groups) tuple.
|
||||
"""
|
||||
) -> OidcCustomConfig:
|
||||
"""Parse optional fields from RegisterOidcProvider request."""
|
||||
claim_mapping: ClaimMapping | None = None
|
||||
if cast(_HasField, request).HasField("claim_mapping"):
|
||||
if request.HasField(CLAIM_MAPPING):
|
||||
claim_mapping = proto_to_claim_mapping(request.claim_mapping)
|
||||
|
||||
scopes_values = cast(Sequence[str], request.scopes)
|
||||
@@ -89,25 +92,146 @@ def _parse_register_options(
|
||||
if allowed_values := cast(Sequence[str], request.allowed_groups):
|
||||
allowed_groups = tuple(allowed_values)
|
||||
|
||||
return claim_mapping, scopes, allowed_groups
|
||||
require_email_verified = (
|
||||
request.require_email_verified
|
||||
if request.HasField(REQUIRE_EMAIL_VERIFIED)
|
||||
else None
|
||||
)
|
||||
|
||||
return OidcCustomConfig(
|
||||
claim_mapping=claim_mapping,
|
||||
scopes=scopes,
|
||||
allowed_groups=allowed_groups,
|
||||
require_email_verified=require_email_verified,
|
||||
)
|
||||
|
||||
|
||||
def _apply_custom_provider_config(
|
||||
provider: OidcProviderConfig,
|
||||
claim_mapping: ClaimMapping | None,
|
||||
scopes: tuple[str, ...] | None,
|
||||
allowed_groups: tuple[str, ...] | None,
|
||||
require_email_verified: bool | None,
|
||||
config: OidcCustomConfig,
|
||||
) -> None:
|
||||
"""Apply custom configuration options to a registered provider."""
|
||||
if claim_mapping:
|
||||
object.__setattr__(provider, "claim_mapping", claim_mapping)
|
||||
if scopes:
|
||||
object.__setattr__(provider, "scopes", scopes)
|
||||
if allowed_groups:
|
||||
object.__setattr__(provider, "allowed_groups", allowed_groups)
|
||||
if require_email_verified is not None:
|
||||
object.__setattr__(provider, "require_email_verified", require_email_verified)
|
||||
if config.claim_mapping:
|
||||
object.__setattr__(provider, CLAIM_MAPPING, config.claim_mapping)
|
||||
if config.scopes:
|
||||
object.__setattr__(provider, _FIELD_SCOPES, config.scopes)
|
||||
if config.allowed_groups:
|
||||
object.__setattr__(provider, ALLOWED_GROUPS, config.allowed_groups)
|
||||
if config.require_email_verified is not None:
|
||||
object.__setattr__(provider, REQUIRE_EMAIL_VERIFIED, config.require_email_verified)
|
||||
|
||||
|
||||
def _apply_update_request_to_provider(
|
||||
provider: OidcProviderConfig,
|
||||
request: noteflow_pb2.UpdateOidcProviderRequest,
|
||||
) -> None:
|
||||
"""Apply update request fields to provider config.
|
||||
|
||||
Mutates the provider in place using object.__setattr__ since
|
||||
OidcProviderConfig is a frozen dataclass.
|
||||
|
||||
Args:
|
||||
provider: The provider config to update.
|
||||
request: The gRPC update request with optional field values.
|
||||
"""
|
||||
if request.HasField(_FIELD_NAME):
|
||||
object.__setattr__(provider, _FIELD_NAME, request.name)
|
||||
|
||||
if scopes_values := cast(Sequence[str], request.scopes):
|
||||
object.__setattr__(provider, _FIELD_SCOPES, tuple(scopes_values))
|
||||
|
||||
if request.HasField(CLAIM_MAPPING):
|
||||
object.__setattr__(provider, CLAIM_MAPPING, proto_to_claim_mapping(request.claim_mapping))
|
||||
|
||||
if allowed_values := cast(Sequence[str], request.allowed_groups):
|
||||
object.__setattr__(provider, ALLOWED_GROUPS, tuple(allowed_values))
|
||||
|
||||
if request.HasField(REQUIRE_EMAIL_VERIFIED):
|
||||
object.__setattr__(provider, REQUIRE_EMAIL_VERIFIED, request.require_email_verified)
|
||||
|
||||
if request.HasField(_FIELD_ENABLED):
|
||||
if request.enabled:
|
||||
provider.enable()
|
||||
else:
|
||||
provider.disable()
|
||||
|
||||
|
||||
def _preset_config_to_proto(
|
||||
preset_config: ProviderPresetConfig,
|
||||
) -> noteflow_pb2.OidcPresetProto:
|
||||
"""Convert a preset configuration to protobuf message.
|
||||
|
||||
Args:
|
||||
preset_config: The preset configuration from PROVIDER_PRESETS values.
|
||||
|
||||
Returns:
|
||||
The protobuf OidcPresetProto message.
|
||||
"""
|
||||
return noteflow_pb2.OidcPresetProto(
|
||||
preset=preset_config.preset.value,
|
||||
display_name=preset_config.display_name,
|
||||
description=preset_config.description,
|
||||
default_scopes=list(preset_config.default_scopes),
|
||||
documentation_url=preset_config.documentation_url or "",
|
||||
notes=preset_config.notes or "",
|
||||
)
|
||||
|
||||
|
||||
async def _refresh_single_provider(
|
||||
oidc_service: OidcAuthService,
|
||||
provider_id: UUID,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.RefreshOidcDiscoveryResponse:
|
||||
"""Refresh OIDC discovery for a single provider.
|
||||
|
||||
Args:
|
||||
oidc_service: The OIDC auth service.
|
||||
provider_id: The provider ID to refresh.
|
||||
context: The gRPC context for error handling.
|
||||
|
||||
Returns:
|
||||
The refresh response with results for the single provider.
|
||||
"""
|
||||
provider = oidc_service.registry.get_provider(provider_id)
|
||||
if provider is None:
|
||||
await abort_not_found(context, _ENTITY_OIDC_PROVIDER, str(provider_id))
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse()
|
||||
|
||||
try:
|
||||
await oidc_service.registry.refresh_discovery(provider)
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse(
|
||||
results={str(provider_id): ""},
|
||||
success_count=1,
|
||||
failure_count=0,
|
||||
)
|
||||
except OidcDiscoveryError as e:
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse(
|
||||
results={str(provider_id): str(e)},
|
||||
success_count=0,
|
||||
failure_count=1,
|
||||
)
|
||||
|
||||
|
||||
def _build_bulk_refresh_response(
|
||||
results: dict[UUID, str | None],
|
||||
) -> noteflow_pb2.RefreshOidcDiscoveryResponse:
|
||||
"""Build response for bulk OIDC discovery refresh.
|
||||
|
||||
Args:
|
||||
results: Mapping of provider IDs to error messages (None for success).
|
||||
|
||||
Returns:
|
||||
The refresh response with aggregated results.
|
||||
"""
|
||||
results_str = {str(k): v or "" for k, v in results.items()}
|
||||
success_count = sum(v is None for v in results.values())
|
||||
failure_count = sum(v is not None for v in results.values())
|
||||
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse(
|
||||
results=results_str,
|
||||
success_count=success_count,
|
||||
failure_count=failure_count,
|
||||
)
|
||||
|
||||
|
||||
class OidcMixin:
|
||||
@@ -119,7 +243,7 @@ class OidcMixin:
|
||||
|
||||
oidc_service: OidcAuthService | None
|
||||
|
||||
def get_oidc_service(self: OidcServicer) -> OidcAuthService:
|
||||
def get_oidc_service(self) -> OidcAuthService:
|
||||
"""Get or create the OIDC auth service."""
|
||||
if self.oidc_service is None:
|
||||
self.oidc_service = OidcAuthService()
|
||||
@@ -127,7 +251,7 @@ class OidcMixin:
|
||||
return self.oidc_service
|
||||
|
||||
async def RegisterOidcProvider(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.RegisterOidcProviderRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.OidcProviderProto:
|
||||
@@ -148,7 +272,7 @@ class OidcMixin:
|
||||
await abort_invalid_argument(context, ERROR_INVALID_WORKSPACE_ID_FORMAT)
|
||||
return noteflow_pb2.OidcProviderProto() # unreachable
|
||||
|
||||
claim_mapping, scopes, allowed_groups = _parse_register_options(request)
|
||||
custom_config = _parse_register_options(request)
|
||||
|
||||
# Register provider
|
||||
oidc_service = self.get_oidc_service()
|
||||
@@ -160,24 +284,14 @@ class OidcMixin:
|
||||
client_id=request.client_id,
|
||||
client_secret=(
|
||||
request.client_secret
|
||||
if cast(_HasField, request).HasField("client_secret")
|
||||
if request.HasField("client_secret")
|
||||
else None
|
||||
),
|
||||
preset=preset,
|
||||
)
|
||||
provider, warnings = await oidc_service.register_provider(registration)
|
||||
|
||||
_apply_custom_provider_config(
|
||||
provider,
|
||||
claim_mapping,
|
||||
scopes,
|
||||
allowed_groups,
|
||||
(
|
||||
request.require_email_verified
|
||||
if cast(_HasField, request).HasField("require_email_verified")
|
||||
else None
|
||||
),
|
||||
)
|
||||
_apply_custom_provider_config(provider, custom_config)
|
||||
|
||||
return oidc_provider_to_proto(provider, warnings)
|
||||
|
||||
@@ -186,14 +300,14 @@ class OidcMixin:
|
||||
return noteflow_pb2.OidcProviderProto() # unreachable
|
||||
|
||||
async def ListOidcProviders(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListOidcProvidersRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListOidcProvidersResponse:
|
||||
"""List all OIDC providers."""
|
||||
# Parse optional workspace filter
|
||||
workspace_id: UUID | None = None
|
||||
if cast(_HasField, request).HasField("workspace_id"):
|
||||
if request.HasField("workspace_id"):
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
oidc_service = self.get_oidc_service()
|
||||
@@ -208,7 +322,7 @@ class OidcMixin:
|
||||
)
|
||||
|
||||
async def GetOidcProvider(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetOidcProviderRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.OidcProviderProto:
|
||||
@@ -229,7 +343,7 @@ class OidcMixin:
|
||||
return oidc_provider_to_proto(provider)
|
||||
|
||||
async def UpdateOidcProvider(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.UpdateOidcProviderRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.OidcProviderProto:
|
||||
@@ -247,32 +361,11 @@ class OidcMixin:
|
||||
await abort_not_found(context, _ENTITY_OIDC_PROVIDER, str(provider_id))
|
||||
return noteflow_pb2.OidcProviderProto() # unreachable
|
||||
|
||||
# Apply updates
|
||||
if cast(_HasField, request).HasField("name"):
|
||||
object.__setattr__(provider, "name", request.name)
|
||||
|
||||
if scopes_values := cast(Sequence[str], request.scopes):
|
||||
object.__setattr__(provider, "scopes", tuple(scopes_values))
|
||||
|
||||
if cast(_HasField, request).HasField("claim_mapping"):
|
||||
object.__setattr__(provider, "claim_mapping", proto_to_claim_mapping(request.claim_mapping))
|
||||
|
||||
if allowed_values := cast(Sequence[str], request.allowed_groups):
|
||||
object.__setattr__(provider, "allowed_groups", tuple(allowed_values))
|
||||
|
||||
if cast(_HasField, request).HasField("require_email_verified"):
|
||||
object.__setattr__(provider, "require_email_verified", request.require_email_verified)
|
||||
|
||||
if cast(_HasField, request).HasField("enabled"):
|
||||
if request.enabled:
|
||||
provider.enable()
|
||||
else:
|
||||
provider.disable()
|
||||
|
||||
_apply_update_request_to_provider(provider, request)
|
||||
return oidc_provider_to_proto(provider)
|
||||
|
||||
async def DeleteOidcProvider(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.DeleteOidcProviderRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DeleteOidcProviderResponse:
|
||||
@@ -292,7 +385,7 @@ class OidcMixin:
|
||||
return noteflow_pb2.DeleteOidcProviderResponse(success=success)
|
||||
|
||||
async def RefreshOidcDiscovery(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.RefreshOidcDiscoveryRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.RefreshOidcDiscoveryResponse:
|
||||
@@ -300,66 +393,28 @@ class OidcMixin:
|
||||
oidc_service = self.get_oidc_service()
|
||||
|
||||
# Single provider refresh
|
||||
if cast(_HasField, request).HasField("provider_id"):
|
||||
if request.HasField("provider_id"):
|
||||
try:
|
||||
provider_id = _parse_provider_id(request.provider_id)
|
||||
except ValueError:
|
||||
await abort_invalid_argument(context, _ERR_INVALID_PROVIDER_ID)
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse()
|
||||
|
||||
provider = oidc_service.registry.get_provider(provider_id)
|
||||
if provider is None:
|
||||
await abort_not_found(context, _ENTITY_OIDC_PROVIDER, str(provider_id))
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse()
|
||||
|
||||
try:
|
||||
await oidc_service.registry.refresh_discovery(provider)
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse(
|
||||
results={str(provider_id): ""},
|
||||
success_count=1,
|
||||
failure_count=0,
|
||||
)
|
||||
except OidcDiscoveryError as e:
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse(
|
||||
results={str(provider_id): str(e)},
|
||||
success_count=0,
|
||||
failure_count=1,
|
||||
)
|
||||
return await _refresh_single_provider(oidc_service, provider_id, context)
|
||||
|
||||
# Bulk refresh
|
||||
workspace_id: UUID | None = None
|
||||
if cast(_HasField, request).HasField("workspace_id"):
|
||||
if request.HasField("workspace_id"):
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
results = await oidc_service.refresh_all_discovery(workspace_id=workspace_id)
|
||||
|
||||
# Convert UUID keys to strings and count results
|
||||
results_str = {str(k): v or "" for k, v in results.items()}
|
||||
success_count = sum(v is None for v in results.values())
|
||||
failure_count = sum(v is not None for v in results.values())
|
||||
|
||||
return noteflow_pb2.RefreshOidcDiscoveryResponse(
|
||||
results=results_str,
|
||||
success_count=success_count,
|
||||
failure_count=failure_count,
|
||||
)
|
||||
return _build_bulk_refresh_response(results)
|
||||
|
||||
async def ListOidcPresets(
|
||||
self: OidcServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListOidcPresetsRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListOidcPresetsResponse:
|
||||
"""List available OIDC provider presets."""
|
||||
presets = [
|
||||
noteflow_pb2.OidcPresetProto(
|
||||
preset=config.preset.value,
|
||||
display_name=config.display_name,
|
||||
description=config.description,
|
||||
default_scopes=list(config.default_scopes),
|
||||
documentation_url=config.documentation_url or "",
|
||||
notes=config.notes or "",
|
||||
)
|
||||
for config in PROVIDER_PRESETS.values()
|
||||
]
|
||||
|
||||
presets = [_preset_config_to_proto(config) for config in PROVIDER_PRESETS.values()]
|
||||
return noteflow_pb2.ListOidcPresetsResponse(presets=presets)
|
||||
|
||||
@@ -5,19 +5,19 @@ from __future__ import annotations
|
||||
import hashlib
|
||||
import json
|
||||
from collections.abc import Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Protocol, Self, cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
from noteflow.infrastructure.persistence.repositories.preferences_repo import (
|
||||
PreferenceWithMetadata,
|
||||
)
|
||||
from .protocols import PreferencesRepositoryProvider
|
||||
|
||||
from ..proto import noteflow_pb2
|
||||
from .errors import abort_database_required, abort_failed_precondition
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from noteflow.domain.ports.repositories import PreferencesRepository
|
||||
from noteflow.domain.ports.unit_of_work import UnitOfWork
|
||||
from collections.abc import Callable
|
||||
|
||||
from ._types import GrpcContext
|
||||
|
||||
@@ -27,47 +27,6 @@ logger = get_logger(__name__)
|
||||
_ENTITY_PREFERENCES = "Preferences"
|
||||
|
||||
|
||||
class PreferencesRepositoryProvider(Protocol):
|
||||
"""Repository provider protocol for preferences operations."""
|
||||
|
||||
@property
|
||||
def supports_preferences(self) -> bool: ...
|
||||
|
||||
@property
|
||||
def preferences(self) -> PreferencesRepository: ...
|
||||
|
||||
async def commit(self) -> None: ...
|
||||
|
||||
async def __aenter__(self) -> Self: ...
|
||||
|
||||
async def __aexit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: object,
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class PreferencesServicer(Protocol):
|
||||
"""Protocol for hosts that support preferences operations."""
|
||||
|
||||
def create_repository_provider(self) -> PreferencesRepositoryProvider | UnitOfWork: ...
|
||||
|
||||
async def decode_and_validate_prefs(
|
||||
self,
|
||||
request: noteflow_pb2.SetPreferencesRequest,
|
||||
context: GrpcContext,
|
||||
) -> dict[str, object]: ...
|
||||
|
||||
async def apply_preferences(
|
||||
self,
|
||||
repo: PreferencesRepositoryProvider,
|
||||
request: noteflow_pb2.SetPreferencesRequest,
|
||||
current_prefs: list[PreferenceWithMetadata],
|
||||
decoded_prefs: dict[str, object],
|
||||
) -> None: ...
|
||||
|
||||
|
||||
def compute_etag(preferences: dict[str, str], updated_at: float) -> str:
|
||||
"""Compute ETag from preferences and timestamp.
|
||||
|
||||
@@ -128,13 +87,14 @@ class PreferencesMixin:
|
||||
Preferences require database persistence.
|
||||
"""
|
||||
|
||||
create_repository_provider: Callable[..., object]
|
||||
async def GetPreferences(
|
||||
self: PreferencesServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetPreferencesRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.GetPreferencesResponse:
|
||||
"""Get all preferences with sync metadata."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(PreferencesRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_preferences:
|
||||
await abort_database_required(context, _ENTITY_PREFERENCES)
|
||||
|
||||
@@ -151,12 +111,12 @@ class PreferencesMixin:
|
||||
)
|
||||
|
||||
async def SetPreferences(
|
||||
self: PreferencesServicer,
|
||||
self,
|
||||
request: noteflow_pb2.SetPreferencesRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.SetPreferencesResponse:
|
||||
"""Set preferences with optimistic concurrency control."""
|
||||
async with self.create_repository_provider() as repo:
|
||||
async with cast(PreferencesRepositoryProvider, self.create_repository_provider()) as repo:
|
||||
if not repo.supports_preferences:
|
||||
await abort_database_required(context, _ENTITY_PREFERENCES)
|
||||
|
||||
@@ -191,7 +151,7 @@ class PreferencesMixin:
|
||||
)
|
||||
|
||||
async def decode_and_validate_prefs(
|
||||
self: PreferencesServicer,
|
||||
self,
|
||||
request: noteflow_pb2.SetPreferencesRequest,
|
||||
context: GrpcContext,
|
||||
) -> dict[str, object]:
|
||||
@@ -206,7 +166,7 @@ class PreferencesMixin:
|
||||
return decoded_prefs
|
||||
|
||||
async def apply_preferences(
|
||||
self: PreferencesServicer,
|
||||
self,
|
||||
repo: PreferencesRepositoryProvider,
|
||||
request: noteflow_pb2.SetPreferencesRequest,
|
||||
current_prefs: list[PreferenceWithMetadata],
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import MutableSequence, Sequence
|
||||
from typing import TYPE_CHECKING, Protocol, cast
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.domain.constants.fields import DEFAULT_SUMMARIZATION_TEMPLATE
|
||||
from noteflow.domain.entities.project import ExportRules, ProjectSettings, TriggerRules
|
||||
from noteflow.domain.identity import ProjectRole
|
||||
from noteflow.domain.value_objects import ExportFormat
|
||||
@@ -196,7 +197,7 @@ def proto_to_project_settings(
|
||||
)
|
||||
default_template = (
|
||||
proto.default_summarization_template
|
||||
if cast(_HasField, proto).HasField("default_summarization_template")
|
||||
if cast(_HasField, proto).HasField(DEFAULT_SUMMARIZATION_TEMPLATE)
|
||||
else None
|
||||
)
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.config.constants import (
|
||||
@@ -24,11 +24,41 @@ from ._converters import (
|
||||
membership_to_proto,
|
||||
proto_to_project_role,
|
||||
)
|
||||
from ..protocols import ProjectRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .._types import GrpcContext
|
||||
from ._types import ProjectServicer
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.application.services.project_service import ProjectService
|
||||
|
||||
from .._types import GrpcContext
|
||||
from ..protocols import ProjectRepositoryProvider
|
||||
|
||||
|
||||
|
||||
|
||||
async def _parse_project_and_user_ids(
|
||||
request_project_id: str,
|
||||
request_user_id: str,
|
||||
context: GrpcContext,
|
||||
) -> tuple[UUID, UUID]:
|
||||
"""Parse and validate project and user IDs from request."""
|
||||
if not request_project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
if not request_user_id:
|
||||
await abort_invalid_argument(context, ERROR_USER_ID_REQUIRED)
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
try:
|
||||
project_id = UUID(request_project_id)
|
||||
user_id = UUID(request_user_id)
|
||||
except ValueError as e:
|
||||
await abort_invalid_argument(context, f"{ERROR_INVALID_UUID_PREFIX}{e}")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
return project_id, user_id
|
||||
|
||||
class ProjectMembershipMixin:
|
||||
"""Mixin providing project membership functionality.
|
||||
@@ -37,30 +67,22 @@ class ProjectMembershipMixin:
|
||||
Provide CRUD operations for project memberships.
|
||||
"""
|
||||
|
||||
project_service: ProjectService | None
|
||||
create_repository_provider: Callable[..., object]
|
||||
|
||||
async def AddProjectMember(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.AddProjectMemberRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectMembershipProto:
|
||||
"""Add a member to a project."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
if not request.user_id:
|
||||
await abort_invalid_argument(context, ERROR_USER_ID_REQUIRED)
|
||||
|
||||
try:
|
||||
project_id = UUID(request.project_id)
|
||||
user_id = UUID(request.user_id)
|
||||
except ValueError as e:
|
||||
await abort_invalid_argument(context, f"{ERROR_INVALID_UUID_PREFIX}{e}")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
project_id, user_id = await _parse_project_and_user_ids(
|
||||
request.project_id, request.user_id, context
|
||||
)
|
||||
role = proto_to_project_role(request.role)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
membership = await project_service.add_project_member(
|
||||
@@ -76,29 +98,18 @@ class ProjectMembershipMixin:
|
||||
return membership_to_proto(membership)
|
||||
|
||||
async def UpdateProjectMemberRole(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.UpdateProjectMemberRoleRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectMembershipProto:
|
||||
"""Update a project member's role."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
if not request.user_id:
|
||||
await abort_invalid_argument(context, ERROR_USER_ID_REQUIRED)
|
||||
|
||||
try:
|
||||
project_id = UUID(request.project_id)
|
||||
user_id = UUID(request.user_id)
|
||||
except ValueError as e:
|
||||
await abort_invalid_argument(context, f"{ERROR_INVALID_UUID_PREFIX}{e}")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
project_id, user_id = await _parse_project_and_user_ids(
|
||||
request.project_id, request.user_id, context
|
||||
)
|
||||
role = proto_to_project_role(request.role)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
membership = await project_service.update_project_member_role(
|
||||
@@ -114,27 +125,17 @@ class ProjectMembershipMixin:
|
||||
return membership_to_proto(membership)
|
||||
|
||||
async def RemoveProjectMember(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.RemoveProjectMemberRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.RemoveProjectMemberResponse:
|
||||
"""Remove a member from a project."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
project_id, user_id = await _parse_project_and_user_ids(
|
||||
request.project_id, request.user_id, context
|
||||
)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
if not request.user_id:
|
||||
await abort_invalid_argument(context, ERROR_USER_ID_REQUIRED)
|
||||
|
||||
try:
|
||||
project_id = UUID(request.project_id)
|
||||
user_id = UUID(request.user_id)
|
||||
except ValueError as e:
|
||||
await abort_invalid_argument(context, f"{ERROR_INVALID_UUID_PREFIX}{e}")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
removed = await project_service.remove_project_member(
|
||||
@@ -145,7 +146,7 @@ class ProjectMembershipMixin:
|
||||
return noteflow_pb2.RemoveProjectMemberResponse(success=removed)
|
||||
|
||||
async def ListProjectMembers(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListProjectMembersRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListProjectMembersResponse:
|
||||
@@ -164,7 +165,7 @@ class ProjectMembershipMixin:
|
||||
limit = request.limit if request.limit > 0 else 100
|
||||
offset = max(request.offset, 0)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
members = await project_service.list_project_members(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Protocol, cast
|
||||
from typing import TYPE_CHECKING, cast
|
||||
from uuid import UUID
|
||||
|
||||
from noteflow.config.constants import (
|
||||
@@ -28,18 +28,42 @@ from ._converters import (
|
||||
project_to_proto,
|
||||
proto_to_project_settings,
|
||||
)
|
||||
from ..protocols import ProjectRepositoryProvider
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
|
||||
from noteflow.application.services.project_service import ProjectService
|
||||
|
||||
from .._types import GrpcContext
|
||||
from ._types import ProjectServicer
|
||||
from ..protocols import ProjectRepositoryProvider
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class _HasField(Protocol):
|
||||
def HasField(self, field_name: str) -> bool: ...
|
||||
|
||||
|
||||
async def _require_and_parse_project_id(
|
||||
request_project_id: str,
|
||||
context: GrpcContext,
|
||||
) -> UUID:
|
||||
"""Require and parse a project_id from request."""
|
||||
if not request_project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
raise # Unreachable but helps type checker
|
||||
return await parse_project_id(request_project_id, context)
|
||||
|
||||
|
||||
async def _require_and_parse_workspace_id(
|
||||
request_workspace_id: str,
|
||||
context: GrpcContext,
|
||||
) -> UUID:
|
||||
"""Require and parse a workspace_id from request."""
|
||||
if not request_workspace_id:
|
||||
await abort_invalid_argument(context, ERROR_WORKSPACE_ID_REQUIRED)
|
||||
raise # Unreachable but helps type checker
|
||||
return await parse_workspace_id(request_workspace_id, context)
|
||||
|
||||
class ProjectMixin:
|
||||
"""Mixin providing project management functionality.
|
||||
|
||||
@@ -47,39 +71,31 @@ class ProjectMixin:
|
||||
Provide CRUD operations for projects and project memberships.
|
||||
"""
|
||||
|
||||
project_service: ProjectService | None
|
||||
create_repository_provider: Callable[..., object]
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Project CRUD
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def CreateProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.CreateProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectProto:
|
||||
"""Create a new project in a workspace."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
|
||||
if not request.workspace_id:
|
||||
await abort_invalid_argument(context, ERROR_WORKSPACE_ID_REQUIRED)
|
||||
workspace_id = await _require_and_parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
if not request.name:
|
||||
await abort_invalid_argument(context, "name is required")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
slug = request.slug if request.HasField("slug") else None
|
||||
description = request.description if request.HasField("description") else None
|
||||
settings = proto_to_project_settings(request.settings) if request.HasField("settings") else None
|
||||
|
||||
slug = request.slug if cast(_HasField, request).HasField("slug") else None
|
||||
description = (
|
||||
request.description
|
||||
if cast(_HasField, request).HasField("description")
|
||||
else None
|
||||
)
|
||||
settings = (
|
||||
proto_to_project_settings(request.settings)
|
||||
if cast(_HasField, request).HasField("settings")
|
||||
else None
|
||||
)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
project = await project_service.create_project(
|
||||
@@ -93,19 +109,15 @@ class ProjectMixin:
|
||||
return project_to_proto(project)
|
||||
|
||||
async def GetProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectProto:
|
||||
"""Get a project by ID."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
project_id = await _require_and_parse_project_id(request.project_id, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
project_id = await parse_project_id(request.project_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
project = await project_service.get_project(uow, project_id)
|
||||
@@ -116,21 +128,19 @@ class ProjectMixin:
|
||||
return project_to_proto(project)
|
||||
|
||||
async def GetProjectBySlug(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetProjectBySlugRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectProto:
|
||||
"""Get a project by workspace and slug."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
workspace_id = await _require_and_parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
if not request.workspace_id:
|
||||
await abort_invalid_argument(context, ERROR_WORKSPACE_ID_REQUIRED)
|
||||
if not request.slug:
|
||||
await abort_invalid_argument(context, "slug is required")
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
project = await project_service.get_project_by_slug(uow, workspace_id, request.slug)
|
||||
@@ -141,22 +151,18 @@ class ProjectMixin:
|
||||
return project_to_proto(project)
|
||||
|
||||
async def ListProjects(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ListProjectsRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ListProjectsResponse:
|
||||
"""List projects in a workspace."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
|
||||
if not request.workspace_id:
|
||||
await abort_invalid_argument(context, ERROR_WORKSPACE_ID_REQUIRED)
|
||||
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
workspace_id = await _require_and_parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
limit = request.limit if request.limit > 0 else 50
|
||||
offset = max(request.offset, 0)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
projects = await project_service.list_projects(
|
||||
@@ -179,32 +185,20 @@ class ProjectMixin:
|
||||
)
|
||||
|
||||
async def UpdateProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.UpdateProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectProto:
|
||||
"""Update a project."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
project_id = await _require_and_parse_project_id(request.project_id, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
name = request.name if request.HasField("name") else None
|
||||
slug = request.slug if request.HasField("slug") else None
|
||||
description = request.description if request.HasField("description") else None
|
||||
settings = proto_to_project_settings(request.settings) if request.HasField("settings") else None
|
||||
|
||||
project_id = await parse_project_id(request.project_id, context)
|
||||
|
||||
name = request.name if cast(_HasField, request).HasField("name") else None
|
||||
slug = request.slug if cast(_HasField, request).HasField("slug") else None
|
||||
description = (
|
||||
request.description
|
||||
if cast(_HasField, request).HasField("description")
|
||||
else None
|
||||
)
|
||||
settings = (
|
||||
proto_to_project_settings(request.settings)
|
||||
if cast(_HasField, request).HasField("settings")
|
||||
else None
|
||||
)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
project = await project_service.update_project(
|
||||
@@ -222,19 +216,15 @@ class ProjectMixin:
|
||||
return project_to_proto(project)
|
||||
|
||||
async def ArchiveProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.ArchiveProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectProto:
|
||||
"""Archive a project."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
project_id = await _require_and_parse_project_id(request.project_id, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
project_id = await parse_project_id(request.project_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
try:
|
||||
@@ -250,19 +240,15 @@ class ProjectMixin:
|
||||
return project_to_proto(project)
|
||||
|
||||
async def RestoreProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.RestoreProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.ProjectProto:
|
||||
"""Restore an archived project."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
project_id = await _require_and_parse_project_id(request.project_id, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
project_id = await parse_project_id(request.project_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
project = await project_service.restore_project(uow, project_id)
|
||||
@@ -273,19 +259,15 @@ class ProjectMixin:
|
||||
return project_to_proto(project)
|
||||
|
||||
async def DeleteProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.DeleteProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.DeleteProjectResponse:
|
||||
"""Delete a project permanently."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
project_id = await _require_and_parse_project_id(request.project_id, context)
|
||||
|
||||
if not request.project_id:
|
||||
await abort_invalid_argument(context, ERROR_PROJECT_ID_REQUIRED)
|
||||
|
||||
project_id = await parse_project_id(request.project_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
|
||||
deleted = await project_service.delete_project(uow, project_id)
|
||||
@@ -296,23 +278,19 @@ class ProjectMixin:
|
||||
# -------------------------------------------------------------------------
|
||||
|
||||
async def SetActiveProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.SetActiveProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.SetActiveProjectResponse:
|
||||
"""Set the active project for a workspace."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
|
||||
if not request.workspace_id:
|
||||
await abort_invalid_argument(context, ERROR_WORKSPACE_ID_REQUIRED)
|
||||
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
workspace_id = await _require_and_parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
project_id: UUID | None = None
|
||||
if request.project_id:
|
||||
project_id = await parse_project_id(request.project_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
await require_feature_workspaces(uow, context)
|
||||
|
||||
@@ -324,25 +302,21 @@ class ProjectMixin:
|
||||
)
|
||||
except ValueError as exc:
|
||||
await abort_invalid_argument(context, str(exc))
|
||||
raise # Unreachable but helps type checker
|
||||
|
||||
await uow.commit()
|
||||
|
||||
return noteflow_pb2.SetActiveProjectResponse()
|
||||
|
||||
async def GetActiveProject(
|
||||
self: ProjectServicer,
|
||||
self,
|
||||
request: noteflow_pb2.GetActiveProjectRequest,
|
||||
context: GrpcContext,
|
||||
) -> noteflow_pb2.GetActiveProjectResponse:
|
||||
"""Get the active project for a workspace."""
|
||||
project_service = await require_project_service(self.project_service, context)
|
||||
workspace_id = await _require_and_parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
if not request.workspace_id:
|
||||
await abort_invalid_argument(context, ERROR_WORKSPACE_ID_REQUIRED)
|
||||
|
||||
workspace_id = await parse_workspace_id(request.workspace_id, context)
|
||||
|
||||
async with self.create_repository_provider() as uow:
|
||||
async with cast(ProjectRepositoryProvider, self.create_repository_provider()) as uow:
|
||||
await require_feature_projects(uow, context)
|
||||
await require_feature_workspaces(uow, context)
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user