merging langgraph changes into master
This commit is contained in:
@@ -1,6 +1,25 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest';
|
||||
import type { NoteFlowAPI } from '@/api/interface';
|
||||
import type { FinalSegment } from '@/api/types';
|
||||
import type {
|
||||
FinalSegment,
|
||||
UpdateWorkspaceSettingsRequest,
|
||||
CreateProjectRequest,
|
||||
AddProjectMemberRequest,
|
||||
UpdateProjectMemberRoleRequest,
|
||||
RemoveProjectMemberRequest,
|
||||
CreateSummarizationTemplateRequest,
|
||||
UpdateSummarizationTemplateRequest,
|
||||
ArchiveSummarizationTemplateRequest,
|
||||
RestoreSummarizationTemplateVersionRequest,
|
||||
RegisterOidcProviderRequest,
|
||||
UpdateOidcProviderRequest,
|
||||
GetOAuthClientConfigRequest,
|
||||
SetOAuthClientConfigRequest,
|
||||
UpdateTaskRequest,
|
||||
UpdateASRConfigurationRequest,
|
||||
UpdateStreamingConfigurationRequest,
|
||||
SetHuggingFaceTokenRequest,
|
||||
} from '@/api/types';
|
||||
|
||||
async function loadMockAPI(): Promise<NoteFlowAPI> {
|
||||
vi.resetModules();
|
||||
@@ -12,6 +31,11 @@ async function flushTimers() {
|
||||
await vi.runAllTimersAsync();
|
||||
}
|
||||
|
||||
async function run<T>(promise: Promise<T>): Promise<T> {
|
||||
await flushTimers();
|
||||
return promise;
|
||||
}
|
||||
|
||||
describe('mockAPI', () => {
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers();
|
||||
@@ -528,4 +552,387 @@ describe('mockAPI', () => {
|
||||
await flushTimers();
|
||||
await metricsDefaultPromise;
|
||||
});
|
||||
|
||||
it('merges workspace settings and manages active projects', async () => {
|
||||
const mockAPI = await loadMockAPI();
|
||||
|
||||
const workspaceList = await run(mockAPI.listWorkspaces());
|
||||
const primaryWorkspace = workspaceList.workspaces[0];
|
||||
const secondaryWorkspace = workspaceList.workspaces[1];
|
||||
if (!primaryWorkspace || !secondaryWorkspace) {
|
||||
throw new Error('Expected default workspaces');
|
||||
}
|
||||
|
||||
const settings = await run(
|
||||
mockAPI.getWorkspaceSettings({ workspace_id: primaryWorkspace.id })
|
||||
);
|
||||
expect(settings.export_rules).toBeUndefined();
|
||||
|
||||
const updateRequest: UpdateWorkspaceSettingsRequest = {
|
||||
workspace_id: primaryWorkspace.id,
|
||||
settings: {
|
||||
export_rules: { default_format: 'markdown', include_audio: true },
|
||||
trigger_rules: { auto_start_enabled: true },
|
||||
rag_enabled: true,
|
||||
},
|
||||
};
|
||||
const updated = await run(mockAPI.updateWorkspaceSettings(updateRequest));
|
||||
expect(updated.export_rules?.default_format).toBe('markdown');
|
||||
expect(updated.trigger_rules?.auto_start_enabled).toBe(true);
|
||||
expect(updated.rag_enabled).toBe(true);
|
||||
|
||||
const mergeRequest: UpdateWorkspaceSettingsRequest = {
|
||||
workspace_id: primaryWorkspace.id,
|
||||
settings: {
|
||||
export_rules: { include_timestamps: true },
|
||||
default_summarization_template: 'template-1',
|
||||
},
|
||||
};
|
||||
const merged = await run(mockAPI.updateWorkspaceSettings(mergeRequest));
|
||||
expect(merged.export_rules?.default_format).toBe('markdown');
|
||||
expect(merged.export_rules?.include_timestamps).toBe(true);
|
||||
expect(merged.default_summarization_template).toBe('template-1');
|
||||
|
||||
const createProjectRequest: CreateProjectRequest = {
|
||||
workspace_id: primaryWorkspace.id,
|
||||
name: ' New Project !! ',
|
||||
description: 'Testing',
|
||||
};
|
||||
const project = await run(mockAPI.createProject(createProjectRequest));
|
||||
expect(project.slug).toBe('new-project');
|
||||
|
||||
await run(
|
||||
mockAPI.setActiveProject({ workspace_id: primaryWorkspace.id, project_id: project.id })
|
||||
);
|
||||
const active = await run(mockAPI.getActiveProject({ workspace_id: primaryWorkspace.id }));
|
||||
expect(active.project_id).toBe(project.id);
|
||||
|
||||
const missingActive = mockAPI.setActiveProject({
|
||||
workspace_id: primaryWorkspace.id,
|
||||
project_id: 'missing-project',
|
||||
});
|
||||
await flushTimers();
|
||||
await expect(missingActive).rejects.toThrow('Project not found');
|
||||
|
||||
const wrongWorkspace = mockAPI.setActiveProject({
|
||||
workspace_id: secondaryWorkspace.id,
|
||||
project_id: project.id,
|
||||
});
|
||||
await flushTimers();
|
||||
await expect(wrongWorkspace).rejects.toThrow('Project does not belong to workspace');
|
||||
|
||||
const projects = await run(
|
||||
mockAPI.listProjects({ workspace_id: primaryWorkspace.id, include_archived: true })
|
||||
);
|
||||
const defaultProject = projects.projects.find((item) => item.is_default);
|
||||
if (!defaultProject) {
|
||||
throw new Error('Expected default project');
|
||||
}
|
||||
const archiveDefault = mockAPI.archiveProject(defaultProject.id);
|
||||
await flushTimers();
|
||||
await expect(archiveDefault).rejects.toThrow('Cannot archive default project');
|
||||
});
|
||||
|
||||
it('manages project members and roles', async () => {
|
||||
const mockAPI = await loadMockAPI();
|
||||
const workspaceList = await run(mockAPI.listWorkspaces());
|
||||
const workspace = workspaceList.workspaces[0];
|
||||
if (!workspace) {
|
||||
throw new Error('Expected workspace');
|
||||
}
|
||||
|
||||
const project = await run(
|
||||
mockAPI.createProject({ workspace_id: workspace.id, name: 'Members' })
|
||||
);
|
||||
|
||||
const addRequest: AddProjectMemberRequest = {
|
||||
project_id: project.id,
|
||||
user_id: 'user-2',
|
||||
role: 'editor',
|
||||
};
|
||||
const added = await run(mockAPI.addProjectMember(addRequest));
|
||||
expect(added.user_id).toBe('user-2');
|
||||
|
||||
const membersPage = await run(
|
||||
mockAPI.listProjectMembers({ project_id: project.id, limit: 1, offset: 0 })
|
||||
);
|
||||
expect(membersPage.total_count).toBeGreaterThan(0);
|
||||
|
||||
const updateRoleRequest: UpdateProjectMemberRoleRequest = {
|
||||
project_id: project.id,
|
||||
user_id: 'user-2',
|
||||
role: 'admin',
|
||||
};
|
||||
const updated = await run(mockAPI.updateProjectMemberRole(updateRoleRequest));
|
||||
expect(updated.role).toBe('admin');
|
||||
|
||||
const missingUpdate = mockAPI.updateProjectMemberRole({
|
||||
project_id: project.id,
|
||||
user_id: 'missing',
|
||||
role: 'viewer',
|
||||
});
|
||||
await flushTimers();
|
||||
await expect(missingUpdate).rejects.toThrow('Membership not found');
|
||||
|
||||
const removeRequest: RemoveProjectMemberRequest = {
|
||||
project_id: project.id,
|
||||
user_id: 'user-2',
|
||||
};
|
||||
const removed = await run(mockAPI.removeProjectMember(removeRequest));
|
||||
expect(removed.success).toBe(true);
|
||||
|
||||
const removedAgain = await run(mockAPI.removeProjectMember(removeRequest));
|
||||
expect(removedAgain.success).toBe(false);
|
||||
});
|
||||
|
||||
it('manages summarization templates and versions', async () => {
|
||||
const mockAPI = await loadMockAPI();
|
||||
const workspaceList = await run(mockAPI.listWorkspaces());
|
||||
const workspace = workspaceList.workspaces[0];
|
||||
if (!workspace) {
|
||||
throw new Error('Expected workspace');
|
||||
}
|
||||
|
||||
const templates = await run(
|
||||
mockAPI.listSummarizationTemplates({ workspace_id: workspace.id, include_system: true })
|
||||
);
|
||||
const systemTemplate = templates.templates.find((item) => item.is_system);
|
||||
if (!systemTemplate) {
|
||||
throw new Error('Expected system template');
|
||||
}
|
||||
|
||||
const createRequest: CreateSummarizationTemplateRequest = {
|
||||
workspace_id: workspace.id,
|
||||
name: ' Custom Template ',
|
||||
description: ' Description ',
|
||||
content: 'Hello {{meeting.title}}',
|
||||
change_note: ' Initial ',
|
||||
};
|
||||
const created = await run(mockAPI.createSummarizationTemplate(createRequest));
|
||||
expect(created.template.name).toBe('Custom Template');
|
||||
expect(created.version.version_number).toBe(1);
|
||||
|
||||
const fullTemplate = await run(
|
||||
mockAPI.getSummarizationTemplate({
|
||||
template_id: created.template.id,
|
||||
include_current_version: true,
|
||||
})
|
||||
);
|
||||
expect(fullTemplate.current_version?.id).toBe(created.version.id);
|
||||
|
||||
const withoutVersion = await run(
|
||||
mockAPI.getSummarizationTemplate({
|
||||
template_id: created.template.id,
|
||||
include_current_version: false,
|
||||
})
|
||||
);
|
||||
expect(withoutVersion.current_version).toBeUndefined();
|
||||
|
||||
const updateRequest: UpdateSummarizationTemplateRequest = {
|
||||
template_id: created.template.id,
|
||||
name: ' Updated ',
|
||||
content: 'Updated content',
|
||||
change_note: ' Revised ',
|
||||
};
|
||||
const updated = await run(mockAPI.updateSummarizationTemplate(updateRequest));
|
||||
expect(updated.template.name).toBe('Updated');
|
||||
expect(updated.version?.version_number).toBe(2);
|
||||
|
||||
const versions = await run(
|
||||
mockAPI.listSummarizationTemplateVersions({ template_id: created.template.id })
|
||||
);
|
||||
expect(versions.total_count).toBe(2);
|
||||
|
||||
const restoreRequest: RestoreSummarizationTemplateVersionRequest = {
|
||||
template_id: created.template.id,
|
||||
version_id: created.version.id,
|
||||
};
|
||||
const restored = await run(mockAPI.restoreSummarizationTemplateVersion(restoreRequest));
|
||||
expect(restored.current_version_id).toBe(created.version.id);
|
||||
|
||||
const archiveRequest: ArchiveSummarizationTemplateRequest = {
|
||||
template_id: created.template.id,
|
||||
};
|
||||
const archived = await run(mockAPI.archiveSummarizationTemplate(archiveRequest));
|
||||
expect(archived.is_archived).toBe(true);
|
||||
|
||||
const activeList = await run(
|
||||
mockAPI.listSummarizationTemplates({ workspace_id: workspace.id, include_archived: false })
|
||||
);
|
||||
expect(activeList.templates.some((item) => item.id === created.template.id)).toBe(false);
|
||||
|
||||
const archivedList = await run(
|
||||
mockAPI.listSummarizationTemplates({ workspace_id: workspace.id, include_archived: true })
|
||||
);
|
||||
expect(archivedList.templates.some((item) => item.id === created.template.id)).toBe(true);
|
||||
|
||||
const updateSystem = mockAPI.updateSummarizationTemplate({
|
||||
template_id: systemTemplate.id,
|
||||
name: 'Nope',
|
||||
});
|
||||
await flushTimers();
|
||||
await expect(updateSystem).rejects.toThrow('System templates are read-only');
|
||||
|
||||
const archiveSystem = mockAPI.archiveSummarizationTemplate({
|
||||
template_id: systemTemplate.id,
|
||||
});
|
||||
await flushTimers();
|
||||
await expect(archiveSystem).rejects.toThrow('System templates are read-only');
|
||||
|
||||
const systemVersions = await run(
|
||||
mockAPI.listSummarizationTemplateVersions({ template_id: systemTemplate.id })
|
||||
);
|
||||
const restoreSystem = mockAPI.restoreSummarizationTemplateVersion({
|
||||
template_id: systemTemplate.id,
|
||||
version_id: systemVersions.versions[0]?.id ?? 'missing',
|
||||
});
|
||||
await flushTimers();
|
||||
await expect(restoreSystem).rejects.toThrow('System templates are read-only');
|
||||
});
|
||||
|
||||
it('handles auth, calendar, oidc, and config flows', async () => {
|
||||
const mockAPI = await loadMockAPI();
|
||||
|
||||
const auth = await run(mockAPI.initiateAuthLogin('google'));
|
||||
expect(auth.auth_url).toContain('http');
|
||||
|
||||
const completed = await run(mockAPI.completeAuthLogin('google', 'code', 'state'));
|
||||
expect(completed.display_name).toBe('Google User');
|
||||
|
||||
const logout = await run(mockAPI.logout());
|
||||
expect(logout.tokens_revoked).toBe(true);
|
||||
|
||||
const providers = await run(mockAPI.getCalendarProviders());
|
||||
expect(providers.providers.length).toBeGreaterThan(0);
|
||||
|
||||
const calendarAuth = await run(mockAPI.initiateCalendarAuth('google'));
|
||||
expect(calendarAuth.auth_url).toContain('http');
|
||||
|
||||
const calendarComplete = await run(mockAPI.completeCalendarAuth('google', 'code', 'state'));
|
||||
expect(calendarComplete.success).toBe(true);
|
||||
|
||||
const oauthStatus = await run(mockAPI.getOAuthConnectionStatus('google'));
|
||||
expect(oauthStatus.connection.provider).toBe('google');
|
||||
|
||||
const oauthConfigRequest: GetOAuthClientConfigRequest = { provider: 'google' };
|
||||
const oauthConfig = await run(mockAPI.getOAuthClientConfig(oauthConfigRequest));
|
||||
expect(oauthConfig.config.override_enabled).toBe(false);
|
||||
|
||||
const setConfigRequest: SetOAuthClientConfigRequest = {
|
||||
provider: 'google',
|
||||
config: {
|
||||
client_id: 'id',
|
||||
redirect_uri: 'http://localhost',
|
||||
scopes: ['openid'],
|
||||
override_enabled: true,
|
||||
},
|
||||
};
|
||||
const setConfig = await run(mockAPI.setOAuthClientConfig(setConfigRequest));
|
||||
expect(setConfig.success).toBe(true);
|
||||
|
||||
const diagnostics = await run(mockAPI.runConnectionDiagnostics());
|
||||
expect(diagnostics.clientConnected).toBe(false);
|
||||
|
||||
const listEvents = await run(mockAPI.listCalendarEvents());
|
||||
expect(listEvents.events).toHaveLength(0);
|
||||
|
||||
const oidcRegister: RegisterOidcProviderRequest = {
|
||||
workspace_id: 'workspace-1',
|
||||
name: 'Provider',
|
||||
issuer_url: 'https://issuer',
|
||||
client_id: 'client',
|
||||
preset: 'custom',
|
||||
scopes: [],
|
||||
allowed_groups: [],
|
||||
auto_discover: true,
|
||||
};
|
||||
const oidcProvider = await run(mockAPI.registerOidcProvider(oidcRegister));
|
||||
expect(oidcProvider.discovery).toBeDefined();
|
||||
|
||||
const listProviders = await run(mockAPI.listOidcProviders(undefined, true));
|
||||
expect(listProviders.total_count).toBeGreaterThan(0);
|
||||
|
||||
const getProvider = await run(mockAPI.getOidcProvider(oidcProvider.id));
|
||||
expect(getProvider.id).toBe(oidcProvider.id);
|
||||
|
||||
const updateRequest: UpdateOidcProviderRequest = {
|
||||
provider_id: oidcProvider.id,
|
||||
scopes: [],
|
||||
allowed_groups: ['admins'],
|
||||
enabled: false,
|
||||
};
|
||||
const updated = await run(mockAPI.updateOidcProvider(updateRequest));
|
||||
expect(updated.enabled).toBe(false);
|
||||
|
||||
const refreshed = await run(mockAPI.refreshOidcDiscovery(oidcProvider.id));
|
||||
expect(refreshed.success_count).toBe(1);
|
||||
|
||||
const refreshMissing = await run(mockAPI.refreshOidcDiscovery('missing'));
|
||||
expect(refreshMissing.failure_count).toBe(1);
|
||||
|
||||
const presets = await run(mockAPI.listOidcPresets());
|
||||
expect(presets.presets.length).toBeGreaterThan(0);
|
||||
|
||||
const deleted = await run(mockAPI.deleteOidcProvider(oidcProvider.id));
|
||||
expect(deleted.success).toBe(true);
|
||||
});
|
||||
|
||||
it('handles ASR, streaming, token, tasks, and analytics flows', async () => {
|
||||
const mockAPI = await loadMockAPI();
|
||||
|
||||
const asrConfig = await run(mockAPI.getAsrConfiguration());
|
||||
expect(asrConfig.availableModelSizes.length).toBeGreaterThan(0);
|
||||
|
||||
const updateAsrRequest: UpdateASRConfigurationRequest = {
|
||||
modelSize: 'base',
|
||||
device: 'cpu',
|
||||
computeType: 'int8',
|
||||
};
|
||||
const asrUpdate = await run(mockAPI.updateAsrConfiguration(updateAsrRequest));
|
||||
expect(asrUpdate.accepted).toBe(true);
|
||||
|
||||
const asrStatus = await run(mockAPI.getAsrJobStatus(asrUpdate.jobId));
|
||||
expect(asrStatus.status).toBe('completed');
|
||||
|
||||
const streaming = await run(mockAPI.getStreamingConfiguration());
|
||||
expect(streaming.maxSegmentDurationSeconds).toBeGreaterThan(0);
|
||||
|
||||
const updateStreamingRequest: UpdateStreamingConfigurationRequest = {
|
||||
partialCadenceSeconds: 1.5,
|
||||
};
|
||||
const updatedStreaming = await run(
|
||||
mockAPI.updateStreamingConfiguration(updateStreamingRequest)
|
||||
);
|
||||
expect(updatedStreaming.partialCadenceSeconds).toBe(2.0);
|
||||
|
||||
const tokenRequest: SetHuggingFaceTokenRequest = { token: 'hf_token', validate: true };
|
||||
const tokenResult = await run(mockAPI.setHuggingFaceToken(tokenRequest));
|
||||
expect(tokenResult.success).toBe(true);
|
||||
|
||||
const tokenStatus = await run(mockAPI.getHuggingFaceTokenStatus());
|
||||
expect(tokenStatus.isConfigured).toBe(false);
|
||||
|
||||
const tokenValidation = await run(mockAPI.validateHuggingFaceToken());
|
||||
expect(tokenValidation.valid).toBe(false);
|
||||
|
||||
const tasks = await run(mockAPI.listTasks());
|
||||
expect(tasks.total_count).toBe(0);
|
||||
|
||||
const updateTaskRequest: UpdateTaskRequest = {
|
||||
task_id: 'task-1',
|
||||
text: 'Follow up',
|
||||
status: 'done',
|
||||
};
|
||||
const updatedTask = await run(mockAPI.updateTask(updateTaskRequest));
|
||||
expect(updatedTask.status).toBe('done');
|
||||
|
||||
const overview = await run(mockAPI.getAnalyticsOverview());
|
||||
expect(overview.total_meetings).toBe(0);
|
||||
|
||||
const speakerStats = await run(mockAPI.listSpeakerStats());
|
||||
expect(speakerStats.speakers).toHaveLength(0);
|
||||
|
||||
const entityAnalytics = await run(mockAPI.getEntityAnalytics());
|
||||
expect(entityAnalytics.total_entities).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,6 +34,10 @@ dependencies = [
|
||||
"sounddevice>=0.5.3",
|
||||
"spacy>=3.8.11",
|
||||
"openai-whisper>=20250625",
|
||||
"langgraph>=1.0.6",
|
||||
"langgraph-checkpoint-postgres>=3.0.3",
|
||||
"langgraph-checkpoint-redis>=0.3.2",
|
||||
"psycopg>=3.3.2",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final, Protocol, TypedDict, cast
|
||||
|
||||
@@ -34,7 +35,7 @@ class AsrPreferenceResolution:
|
||||
|
||||
|
||||
class _TorchCudaModule(Protocol):
|
||||
def is_available(self) -> bool: ...
|
||||
is_available: Callable[[], bool]
|
||||
|
||||
|
||||
class _TorchModule(Protocol):
|
||||
|
||||
@@ -12,6 +12,8 @@ from dataclasses import dataclass, field
|
||||
from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
from noteflow.domain.constants.fields import ACTION, CONFIG, COUNT
|
||||
|
||||
|
||||
class InterruptType(StrEnum):
|
||||
"""Types of human-in-the-loop interrupts."""
|
||||
@@ -29,9 +31,19 @@ class InterruptAction(StrEnum):
|
||||
MODIFY = "modify"
|
||||
|
||||
|
||||
DEFAULT_WEB_SEARCH_OPTIONS: Final[tuple[str, ...]] = ("approve", "reject")
|
||||
DEFAULT_ANNOTATION_OPTIONS: Final[tuple[str, ...]] = ("approve", "reject", "modify")
|
||||
DEFAULT_SENSITIVE_OPTIONS: Final[tuple[str, ...]] = ("approve", "reject")
|
||||
DEFAULT_WEB_SEARCH_OPTIONS: Final[tuple[str, ...]] = (
|
||||
InterruptAction.APPROVE.value,
|
||||
InterruptAction.REJECT.value,
|
||||
)
|
||||
DEFAULT_ANNOTATION_OPTIONS: Final[tuple[str, ...]] = (
|
||||
InterruptAction.APPROVE.value,
|
||||
InterruptAction.REJECT.value,
|
||||
InterruptAction.MODIFY.value,
|
||||
)
|
||||
DEFAULT_SENSITIVE_OPTIONS: Final[tuple[str, ...]] = (
|
||||
InterruptAction.APPROVE.value,
|
||||
InterruptAction.REJECT.value,
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -61,18 +73,18 @@ class InterruptRequest:
|
||||
interrupt_type: InterruptType
|
||||
message: str
|
||||
context: dict[str, object] = field(default_factory=dict)
|
||||
options: tuple[str, ...] = field(default_factory=lambda: ("approve", "reject"))
|
||||
options: tuple[str, ...] = field(default_factory=lambda: DEFAULT_WEB_SEARCH_OPTIONS)
|
||||
config: InterruptConfig = field(default_factory=InterruptConfig)
|
||||
request_id: str = ""
|
||||
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
def to_request_payload(self) -> dict[str, object]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"interrupt_type": self.interrupt_type,
|
||||
"message": self.message,
|
||||
"context": self.context,
|
||||
"options": list(self.options),
|
||||
"config": {
|
||||
CONFIG: {
|
||||
"allow_ignore": self.config.allow_ignore,
|
||||
"allow_modify": self.config.allow_modify,
|
||||
"timeout_seconds": self.config.timeout_seconds,
|
||||
@@ -114,10 +126,10 @@ class InterruptResponse:
|
||||
"""Check if the action was modified."""
|
||||
return self.action == InterruptAction.MODIFY
|
||||
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
def to_response_payload(self) -> dict[str, object]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
result: dict[str, object] = {
|
||||
"action": self.action,
|
||||
ACTION: self.action,
|
||||
"request_id": self.request_id,
|
||||
}
|
||||
if self.modified_value is not None:
|
||||
@@ -143,11 +155,13 @@ def create_web_search_interrupt(
|
||||
Returns:
|
||||
InterruptRequest configured for web search approval.
|
||||
"""
|
||||
options = DEFAULT_ANNOTATION_OPTIONS if allow_modify else DEFAULT_WEB_SEARCH_OPTIONS
|
||||
message = _build_web_search_message(query)
|
||||
return InterruptRequest(
|
||||
interrupt_type=InterruptType.WEB_SEARCH_APPROVAL,
|
||||
message=f"Allow web search for additional context? Query: {query[:100]}",
|
||||
message=message,
|
||||
context={"query": query},
|
||||
options=("approve", "reject", "modify") if allow_modify else DEFAULT_WEB_SEARCH_OPTIONS,
|
||||
options=options,
|
||||
config=InterruptConfig(allow_modify=allow_modify),
|
||||
request_id=request_id,
|
||||
)
|
||||
@@ -170,7 +184,7 @@ def create_annotation_interrupt(
|
||||
return InterruptRequest(
|
||||
interrupt_type=InterruptType.ANNOTATION_APPROVAL,
|
||||
message=f"Apply {count} suggested annotation(s)?",
|
||||
context={"annotations": annotations, "count": count},
|
||||
context={"annotations": annotations, COUNT: count},
|
||||
options=DEFAULT_ANNOTATION_OPTIONS,
|
||||
config=InterruptConfig(allow_modify=True, allow_ignore=True),
|
||||
request_id=request_id,
|
||||
@@ -192,11 +206,20 @@ def create_sensitive_action_interrupt(
|
||||
Returns:
|
||||
InterruptRequest configured for sensitive action confirmation.
|
||||
"""
|
||||
message = _build_sensitive_action_message(action_name)
|
||||
return InterruptRequest(
|
||||
interrupt_type=InterruptType.SENSITIVE_ACTION,
|
||||
message=f"Confirm action: {action_name}",
|
||||
message=message,
|
||||
context={"action_name": action_name, "description": action_description},
|
||||
options=DEFAULT_SENSITIVE_OPTIONS,
|
||||
config=InterruptConfig(allow_ignore=False),
|
||||
request_id=request_id,
|
||||
)
|
||||
|
||||
|
||||
def _build_web_search_message(query: str) -> str:
|
||||
return f"Allow web search for additional context? Query: {query[:100]}"
|
||||
|
||||
|
||||
def _build_sensitive_action_message(action_name: str) -> str:
|
||||
return f"Confirm action: {action_name}"
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Protocol
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -13,14 +14,16 @@ if TYPE_CHECKING:
|
||||
class AssistantPort(Protocol):
|
||||
"""Protocol for AI assistant operations."""
|
||||
|
||||
async def ask(
|
||||
self,
|
||||
question: str,
|
||||
user_id: UUID,
|
||||
meeting_id: UUID | None = None,
|
||||
thread_id: str | None = None,
|
||||
allow_web: bool = False,
|
||||
top_k: int = 8,
|
||||
) -> AssistantOutputState:
|
||||
async def ask(self, request: AssistantRequest) -> AssistantOutputState:
|
||||
"""Ask a question about meeting transcript(s)."""
|
||||
...
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AssistantRequest:
|
||||
question: str
|
||||
user_id: UUID
|
||||
meeting_id: UUID | None = None
|
||||
thread_id: str | None = None
|
||||
allow_web: bool = False
|
||||
top_k: int = 8
|
||||
|
||||
@@ -24,6 +24,9 @@ UPDATED_AT: Final[Literal["updated_at"]] = "updated_at"
|
||||
PROJECT_ID: Final[str] = "project_id"
|
||||
PROJECT_IDS: Final[str] = "project_ids"
|
||||
OPTIONS: Final[str] = "options"
|
||||
ACTION: Final[Literal["action"]] = "action"
|
||||
COUNT: Final[Literal["count"]] = "count"
|
||||
CONFIG: Final[Literal["config"]] = "config"
|
||||
CALENDAR: Final[str] = "calendar"
|
||||
CLAIM_MAPPING: Final[str] = "claim_mapping"
|
||||
REQUIRE_EMAIL_VERIFIED: Final[str] = "require_email_verified"
|
||||
|
||||
@@ -7,7 +7,7 @@ from uuid import UUID
|
||||
|
||||
from noteflow.application.services.calendar import CalendarServiceError
|
||||
from noteflow.config.constants.errors import ERROR_WORKSPACE_ADMIN_REQUIRED
|
||||
from noteflow.domain.constants.fields import ENTITY_WORKSPACE
|
||||
from noteflow.domain.constants.fields import CONFIG, ENTITY_WORKSPACE
|
||||
from noteflow.domain.value_objects import OAuthClientConfig
|
||||
|
||||
from ..proto import noteflow_pb2
|
||||
@@ -115,7 +115,7 @@ class CalendarOAuthConfigMixin:
|
||||
if not request.provider:
|
||||
await abort_invalid_argument(context, "Provider is required")
|
||||
raise AssertionError(UNREACHABLE_ERROR) from None
|
||||
if not request.HasField("config"):
|
||||
if not request.HasField(CONFIG):
|
||||
await abort_invalid_argument(context, "OAuth config is required")
|
||||
raise AssertionError(UNREACHABLE_ERROR) from None
|
||||
|
||||
|
||||
56
src/noteflow/infrastructure/ai/_langgraph_compat.py
Normal file
56
src/noteflow/infrastructure/ai/_langgraph_compat.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""LangGraph compatibility layer for basedpyright strict mode.
|
||||
|
||||
This module provides typed wrappers for LangGraph types that have incomplete
|
||||
or incorrect type annotations. All pyright: ignore comments are isolated here
|
||||
to keep the rest of the codebase clean.
|
||||
|
||||
The LangGraph Command class is a frozen dataclass with Generic[N] inheritance
|
||||
that basedpyright misinterprets as requiring a 'value' parameter. This module
|
||||
wraps Command construction to provide proper typing.
|
||||
|
||||
Issue: basedpyright reports "Argument missing for parameter 'value'" on
|
||||
Command() calls even though Command has no 'value' parameter. This appears
|
||||
to be a basedpyright issue with Generic dataclasses.
|
||||
|
||||
Usage:
|
||||
from noteflow.infrastructure.ai._langgraph_compat import create_command
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from langgraph.types import Command
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
def create_command(
|
||||
*,
|
||||
graph: str | None = None,
|
||||
update: Any | None = None,
|
||||
resume: dict[str, Any] | Any | None = None,
|
||||
) -> Command[Any]:
|
||||
"""Create a LangGraph Command with proper typing.
|
||||
|
||||
This wrapper isolates the pyright: ignore needed for Command construction
|
||||
due to basedpyright's incorrect handling of Generic dataclasses.
|
||||
|
||||
Args:
|
||||
graph: Graph to send the command to (None = current graph).
|
||||
update: Update to apply to the graph's state.
|
||||
resume: Value to resume execution with (for interrupt handling).
|
||||
|
||||
Returns:
|
||||
A properly typed Command instance.
|
||||
"""
|
||||
command = cast(
|
||||
Command[Any],
|
||||
Command( # pyright: ignore[reportCallIssue]
|
||||
graph=graph,
|
||||
update=update,
|
||||
resume=resume,
|
||||
),
|
||||
)
|
||||
return command
|
||||
@@ -28,7 +28,7 @@ class CacheEntry:
|
||||
embedding: tuple[float, ...]
|
||||
created_at: float
|
||||
|
||||
def is_expired(self, ttl_seconds: float, current_time: float) -> bool:
|
||||
def is_cache_expired(self, ttl_seconds: float, current_time: float) -> bool:
|
||||
"""Check if entry has expired based on TTL."""
|
||||
return (current_time - self.created_at) > ttl_seconds
|
||||
|
||||
@@ -72,62 +72,23 @@ class EmbeddingCache:
|
||||
key = self._compute_key(text)
|
||||
current_time = time.monotonic()
|
||||
|
||||
existing_future: asyncio.Future[list[float]] | None = None
|
||||
cached = await self._get_cached_embedding(key, current_time, count_hit=True)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
async with self._lock:
|
||||
if key in self._cache:
|
||||
entry = self._cache[key]
|
||||
if not entry.is_expired(self.ttl_seconds, current_time):
|
||||
self._cache.move_to_end(key)
|
||||
self._stats.hits += 1
|
||||
logger.debug("cache_hit", key=key[:16])
|
||||
return list(entry.embedding)
|
||||
del self._cache[key]
|
||||
self._stats.expirations += 1
|
||||
logger.debug("cache_expired", key=key[:16])
|
||||
|
||||
if key in self._in_flight:
|
||||
logger.debug("cache_in_flight_join", key=key[:16])
|
||||
existing_future = self._in_flight[key]
|
||||
|
||||
if existing_future is not None:
|
||||
return list(await existing_future)
|
||||
|
||||
new_future: asyncio.Future[list[float]] = asyncio.get_running_loop().create_future()
|
||||
|
||||
async with self._lock:
|
||||
if key in self._in_flight:
|
||||
existing_future = self._in_flight[key]
|
||||
else:
|
||||
self._stats.misses += 1
|
||||
self._in_flight[key] = new_future
|
||||
|
||||
if existing_future is not None:
|
||||
return list(await existing_future)
|
||||
future, is_new = await self._get_or_register_in_flight(key)
|
||||
if not is_new:
|
||||
return list(await future)
|
||||
|
||||
try:
|
||||
embedding = await embedder.embed(text)
|
||||
except Exception:
|
||||
async with self._lock:
|
||||
_ = self._in_flight.pop(key, None)
|
||||
new_future.set_exception(asyncio.CancelledError())
|
||||
await self._clear_in_flight(key)
|
||||
future.set_exception(asyncio.CancelledError())
|
||||
raise
|
||||
|
||||
async with self._lock:
|
||||
_ = self._in_flight.pop(key, None)
|
||||
|
||||
while len(self._cache) >= self.max_size:
|
||||
evicted_key, _ = self._cache.popitem(last=False)
|
||||
self._stats.evictions += 1
|
||||
logger.debug("cache_eviction", evicted_key=evicted_key[:16])
|
||||
|
||||
self._cache[key] = CacheEntry(
|
||||
embedding=tuple(embedding),
|
||||
created_at=current_time,
|
||||
)
|
||||
logger.debug("cache_store", key=key[:16])
|
||||
|
||||
new_future.set_result(embedding)
|
||||
await self._store_embedding(key, embedding, current_time)
|
||||
future.set_result(embedding)
|
||||
return embedding
|
||||
|
||||
async def get(self, text: str) -> list[float] | None:
|
||||
@@ -141,18 +102,7 @@ class EmbeddingCache:
|
||||
"""
|
||||
key = self._compute_key(text)
|
||||
current_time = time.monotonic()
|
||||
|
||||
async with self._lock:
|
||||
if key in self._cache:
|
||||
entry = self._cache[key]
|
||||
if not entry.is_expired(self.ttl_seconds, current_time):
|
||||
self._cache.move_to_end(key)
|
||||
return list(entry.embedding)
|
||||
else:
|
||||
del self._cache[key]
|
||||
self._stats.expirations += 1
|
||||
|
||||
return None
|
||||
return await self._get_cached_embedding(key, current_time, count_hit=False)
|
||||
|
||||
async def clear(self) -> int:
|
||||
"""Clear all cached entries.
|
||||
@@ -171,15 +121,75 @@ class EmbeddingCache:
|
||||
async with self._lock:
|
||||
return len(self._cache)
|
||||
|
||||
def get_stats(self) -> EmbeddingCacheStats:
|
||||
def stats_snapshot(self) -> EmbeddingCacheStats:
|
||||
"""Get cache statistics (not async - reads are atomic)."""
|
||||
stats = self._stats
|
||||
return EmbeddingCacheStats(
|
||||
hits=self._stats.hits,
|
||||
misses=self._stats.misses,
|
||||
evictions=self._stats.evictions,
|
||||
expirations=self._stats.expirations,
|
||||
hits=stats.hits,
|
||||
misses=stats.misses,
|
||||
evictions=stats.evictions,
|
||||
expirations=stats.expirations,
|
||||
)
|
||||
|
||||
async def _get_cached_embedding(
|
||||
self,
|
||||
key: str,
|
||||
current_time: float,
|
||||
*,
|
||||
count_hit: bool,
|
||||
) -> list[float] | None:
|
||||
async with self._lock:
|
||||
entry = self._cache.get(key)
|
||||
if entry is None:
|
||||
return None
|
||||
if entry.is_cache_expired(self.ttl_seconds, current_time):
|
||||
del self._cache[key]
|
||||
self._stats.expirations += 1
|
||||
logger.debug("cache_expired", key=key[:16])
|
||||
return None
|
||||
self._cache.move_to_end(key)
|
||||
if count_hit:
|
||||
self._stats.hits += 1
|
||||
logger.debug("cache_hit", key=key[:16])
|
||||
return list(entry.embedding)
|
||||
|
||||
async def _get_or_register_in_flight(
|
||||
self,
|
||||
key: str,
|
||||
) -> tuple[asyncio.Future[list[float]], bool]:
|
||||
async with self._lock:
|
||||
if key in self._in_flight:
|
||||
logger.debug("cache_in_flight_join", key=key[:16])
|
||||
return self._in_flight[key], False
|
||||
future: asyncio.Future[list[float]] = asyncio.get_running_loop().create_future()
|
||||
self._stats.misses += 1
|
||||
self._in_flight[key] = future
|
||||
return future, True
|
||||
|
||||
async def _clear_in_flight(self, key: str) -> None:
|
||||
async with self._lock:
|
||||
_ = self._in_flight.pop(key, None)
|
||||
|
||||
async def _store_embedding(
|
||||
self,
|
||||
key: str,
|
||||
embedding: list[float],
|
||||
current_time: float,
|
||||
) -> None:
|
||||
async with self._lock:
|
||||
_ = self._in_flight.pop(key, None)
|
||||
|
||||
while len(self._cache) >= self.max_size:
|
||||
evicted_key, _ = self._cache.popitem(last=False)
|
||||
self._stats.evictions += 1
|
||||
logger.debug("cache_eviction", evicted_key=evicted_key[:16])
|
||||
|
||||
self._cache[key] = CacheEntry(
|
||||
embedding=tuple(embedding),
|
||||
created_at=current_time,
|
||||
)
|
||||
logger.debug("cache_store", key=key[:16])
|
||||
|
||||
|
||||
class CachedEmbedder:
|
||||
"""Wrapper that adds caching to any EmbedderProtocol implementation.
|
||||
|
||||
23
src/noteflow/infrastructure/ai/constants.py
Normal file
23
src/noteflow/infrastructure/ai/constants.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Shared constants for AI graph state keys and node names."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
# Graph node names
|
||||
NODE_RETRIEVE: Final[str] = "retrieve"
|
||||
NODE_VERIFY: Final[str] = "verify"
|
||||
NODE_SYNTHESIZE: Final[str] = "synthesize"
|
||||
NODE_WEB_SEARCH_APPROVAL: Final[str] = "web_search_approval"
|
||||
NODE_WEB_SEARCH: Final[str] = "web_search"
|
||||
NODE_SUMMARIZE: Final[str] = "summarize"
|
||||
|
||||
# Graph state keys
|
||||
STATE_QUESTION: Final[str] = "question"
|
||||
STATE_RETRIEVED_SEGMENTS: Final[str] = "retrieved_segments"
|
||||
STATE_VERIFICATION_PASSED: Final[str] = "verification_passed"
|
||||
STATE_WEB_SEARCH_APPROVED: Final[str] = "web_search_approved"
|
||||
STATE_WEB_CONTEXT: Final[str] = "web_context"
|
||||
STATE_ANSWER: Final[str] = "answer"
|
||||
STATE_CITATIONS: Final[str] = "citations"
|
||||
STATE_SUGGESTED_ANNOTATIONS: Final[str] = "suggested_annotations"
|
||||
44
src/noteflow/infrastructure/ai/graphs/_shared.py
Normal file
44
src/noteflow/infrastructure/ai/graphs/_shared.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Shared helpers for AI graph wiring."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Protocol
|
||||
|
||||
from noteflow.infrastructure.ai.constants import (
|
||||
NODE_RETRIEVE,
|
||||
NODE_SYNTHESIZE,
|
||||
NODE_VERIFY,
|
||||
NODE_WEB_SEARCH,
|
||||
NODE_WEB_SEARCH_APPROVAL,
|
||||
STATE_ANSWER,
|
||||
STATE_CITATIONS,
|
||||
STATE_SUGGESTED_ANNOTATIONS,
|
||||
)
|
||||
|
||||
|
||||
class GraphBuilder(Protocol):
|
||||
def add_edge(self, start: object, end: object) -> None: ...
|
||||
|
||||
|
||||
def build_no_information_payload(answer: str) -> dict[str, object]:
|
||||
return {
|
||||
STATE_ANSWER: answer,
|
||||
STATE_CITATIONS: [],
|
||||
STATE_SUGGESTED_ANNOTATIONS: [],
|
||||
}
|
||||
|
||||
|
||||
def connect_web_search_flow(builder: GraphBuilder, start: object, end: object) -> None:
|
||||
builder.add_edge(start, NODE_RETRIEVE)
|
||||
builder.add_edge(NODE_RETRIEVE, NODE_VERIFY)
|
||||
builder.add_edge(NODE_VERIFY, NODE_WEB_SEARCH_APPROVAL)
|
||||
builder.add_edge(NODE_WEB_SEARCH_APPROVAL, NODE_WEB_SEARCH)
|
||||
builder.add_edge(NODE_WEB_SEARCH, NODE_SYNTHESIZE)
|
||||
builder.add_edge(NODE_SYNTHESIZE, end)
|
||||
|
||||
|
||||
def connect_base_flow(builder: GraphBuilder, start: object, end: object) -> None:
|
||||
builder.add_edge(start, NODE_RETRIEVE)
|
||||
builder.add_edge(NODE_RETRIEVE, NODE_VERIFY)
|
||||
builder.add_edge(NODE_VERIFY, NODE_SYNTHESIZE)
|
||||
builder.add_edge(NODE_SYNTHESIZE, end)
|
||||
@@ -2,8 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Final, TypedDict
|
||||
from typing import TYPE_CHECKING, Final, TypedDict, cast
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langgraph.graph import CompiledStateGraph
|
||||
@@ -12,6 +13,7 @@ if TYPE_CHECKING:
|
||||
from noteflow.domain.value_objects import MeetingId
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import SuggestedAnnotation
|
||||
from noteflow.infrastructure.ai.nodes.web_search import WebSearchProvider
|
||||
from noteflow.infrastructure.ai.tools.synthesis import SynthesisResult
|
||||
from noteflow.infrastructure.ai.tools.retrieval import (
|
||||
EmbedderProtocol,
|
||||
RetrievalResult,
|
||||
@@ -19,6 +21,27 @@ if TYPE_CHECKING:
|
||||
)
|
||||
from noteflow.infrastructure.ai.tools.synthesis import LLMProtocol
|
||||
|
||||
from noteflow.infrastructure.ai.constants import (
|
||||
NODE_RETRIEVE,
|
||||
NODE_SYNTHESIZE,
|
||||
NODE_VERIFY,
|
||||
NODE_WEB_SEARCH,
|
||||
NODE_WEB_SEARCH_APPROVAL,
|
||||
STATE_ANSWER,
|
||||
STATE_CITATIONS,
|
||||
STATE_QUESTION,
|
||||
STATE_RETRIEVED_SEGMENTS,
|
||||
STATE_SUGGESTED_ANNOTATIONS,
|
||||
STATE_VERIFICATION_PASSED,
|
||||
STATE_WEB_CONTEXT,
|
||||
STATE_WEB_SEARCH_APPROVED,
|
||||
)
|
||||
from noteflow.infrastructure.ai.graphs._shared import (
|
||||
build_no_information_payload,
|
||||
connect_base_flow,
|
||||
connect_web_search_flow,
|
||||
)
|
||||
|
||||
MEETING_QA_GRAPH_NAME: Final[str] = "meeting_qa"
|
||||
MEETING_QA_GRAPH_VERSION: Final[int] = 2
|
||||
NO_INFORMATION_ANSWER: Final[str] = "I couldn't find relevant information in this meeting."
|
||||
@@ -31,6 +54,14 @@ class MeetingQAConfig:
|
||||
require_annotation_approval: bool = False
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MeetingQADependencies:
|
||||
embedder: "EmbedderProtocol"
|
||||
segment_repo: "SegmentSearchProtocol"
|
||||
llm: "LLMProtocol"
|
||||
web_search_provider: "WebSearchProvider | None" = None
|
||||
|
||||
|
||||
class MeetingQAInputState(TypedDict):
|
||||
question: str
|
||||
meeting_id: MeetingId
|
||||
@@ -52,142 +83,197 @@ class MeetingQAInternalState(MeetingQAInputState, MeetingQAOutputState):
|
||||
|
||||
|
||||
def build_meeting_qa_graph(
|
||||
embedder: EmbedderProtocol,
|
||||
segment_repo: SegmentSearchProtocol,
|
||||
llm: LLMProtocol,
|
||||
deps: MeetingQADependencies,
|
||||
*,
|
||||
web_search_provider: WebSearchProvider | None = None,
|
||||
config: MeetingQAConfig | None = None,
|
||||
checkpointer: object | None = None,
|
||||
) -> CompiledStateGraph[MeetingQAInternalState]:
|
||||
"""Build a Q&A graph for single-meeting questions with segment citations.
|
||||
|
||||
Graph flow (with web search): retrieve -> verify -> [web_search_approval] -> [web_search] -> synthesize
|
||||
Graph flow (without): retrieve -> verify -> synthesize
|
||||
|
||||
Args:
|
||||
embedder: Protocol for generating text embeddings.
|
||||
segment_repo: Protocol for semantic segment search.
|
||||
llm: Protocol for LLM text completion.
|
||||
web_search_provider: Optional web search provider for augmentation.
|
||||
config: Graph configuration for features/interrupts.
|
||||
checkpointer: Optional checkpointer for interrupt support.
|
||||
|
||||
Returns:
|
||||
Compiled graph that accepts question/meeting_id and returns answer/citations.
|
||||
"""
|
||||
"""Build a Q&A graph for single-meeting questions with segment citations."""
|
||||
from langgraph.graph import END, START, StateGraph
|
||||
|
||||
from noteflow.domain.ai.citations import SegmentCitation
|
||||
from noteflow.infrastructure.ai.interrupts import check_web_search_approval
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import (
|
||||
extract_annotations_from_answer,
|
||||
effective_config = config or MeetingQAConfig()
|
||||
nodes = _build_meeting_nodes(deps, effective_config)
|
||||
|
||||
builder: StateGraph[MeetingQAInternalState] = StateGraph(MeetingQAInternalState)
|
||||
builder.add_node(NODE_RETRIEVE, nodes.retrieve)
|
||||
builder.add_node(NODE_VERIFY, nodes.verify)
|
||||
builder.add_node(NODE_SYNTHESIZE, nodes.synthesize)
|
||||
|
||||
if nodes.web_search_approval is not None and nodes.web_search is not None:
|
||||
builder.add_node(NODE_WEB_SEARCH_APPROVAL, nodes.web_search_approval)
|
||||
builder.add_node(NODE_WEB_SEARCH, nodes.web_search)
|
||||
connect_web_search_flow(builder, START, END)
|
||||
else:
|
||||
connect_base_flow(builder, START, END)
|
||||
|
||||
compile_method = getattr(builder, "compile")
|
||||
compiled: CompiledStateGraph[MeetingQAInternalState] = compile_method(checkpointer=checkpointer)
|
||||
return compiled
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _MeetingQANodes:
|
||||
retrieve: Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]
|
||||
verify: Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]
|
||||
synthesize: Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]
|
||||
web_search_approval: Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]] | None
|
||||
web_search: Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]] | None
|
||||
|
||||
|
||||
def _build_meeting_nodes(
|
||||
deps: MeetingQADependencies,
|
||||
config: MeetingQAConfig,
|
||||
) -> _MeetingQANodes:
|
||||
allow_web_search = config.enable_web_search and deps.web_search_provider is not None
|
||||
return _MeetingQANodes(
|
||||
retrieve=_build_meeting_retrieve_node(deps),
|
||||
verify=_build_verify_node(),
|
||||
synthesize=_build_meeting_synthesize_node(deps),
|
||||
web_search_approval=_build_meeting_web_search_approval_node(deps, config)
|
||||
if allow_web_search
|
||||
else None,
|
||||
web_search=_build_meeting_web_search_node(deps, config) if allow_web_search else None,
|
||||
)
|
||||
|
||||
|
||||
def _build_meeting_retrieve_node(
|
||||
deps: MeetingQADependencies,
|
||||
) -> Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.tools.retrieval import (
|
||||
MeetingRetrievalDependencies,
|
||||
MeetingRetrievalRequest,
|
||||
retrieve_segments,
|
||||
)
|
||||
|
||||
async def retrieve_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
request = MeetingRetrievalRequest(
|
||||
query=question,
|
||||
meeting_id=state["meeting_id"],
|
||||
top_k=state["top_k"],
|
||||
)
|
||||
results = await retrieve_segments(
|
||||
request,
|
||||
MeetingRetrievalDependencies(
|
||||
embedder=deps.embedder,
|
||||
segment_repo=deps.segment_repo,
|
||||
),
|
||||
)
|
||||
return {STATE_RETRIEVED_SEGMENTS: results}
|
||||
|
||||
return retrieve_node
|
||||
|
||||
|
||||
def _build_verify_node() -> Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]:
|
||||
async def verify_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
segments = cast(list["RetrievalResult"], state[STATE_RETRIEVED_SEGMENTS])
|
||||
has_segments = len(segments) > 0
|
||||
return {STATE_VERIFICATION_PASSED: has_segments}
|
||||
|
||||
return verify_node
|
||||
|
||||
|
||||
def _build_meeting_web_search_approval_node(
|
||||
deps: MeetingQADependencies,
|
||||
config: MeetingQAConfig,
|
||||
) -> Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.interrupts import check_web_search_approval
|
||||
from noteflow.infrastructure.ai.nodes.web_search import derive_search_query
|
||||
|
||||
async def web_search_approval_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
if not config.require_web_approval:
|
||||
return {STATE_WEB_SEARCH_APPROVED: True}
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
query = derive_search_query(question)
|
||||
approved = check_web_search_approval(query, require_approval=True)
|
||||
return {STATE_WEB_SEARCH_APPROVED: approved}
|
||||
|
||||
return web_search_approval_node
|
||||
|
||||
|
||||
def _build_meeting_web_search_node(
|
||||
deps: MeetingQADependencies,
|
||||
config: MeetingQAConfig,
|
||||
) -> Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.nodes.web_search import (
|
||||
WebSearchConfig,
|
||||
derive_search_query,
|
||||
execute_web_search,
|
||||
format_results_for_context,
|
||||
)
|
||||
from noteflow.infrastructure.ai.tools.retrieval import retrieve_segments
|
||||
from noteflow.infrastructure.ai.tools.synthesis import synthesize_answer
|
||||
|
||||
effective_config = config or MeetingQAConfig()
|
||||
|
||||
async def retrieve_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
results = await retrieve_segments(
|
||||
query=state["question"],
|
||||
embedder=embedder,
|
||||
segment_repo=segment_repo,
|
||||
meeting_id=state["meeting_id"],
|
||||
top_k=state["top_k"],
|
||||
)
|
||||
return {"retrieved_segments": results}
|
||||
|
||||
async def verify_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
has_segments = len(state["retrieved_segments"]) > 0
|
||||
return {"verification_passed": has_segments}
|
||||
|
||||
def web_search_approval_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
if not effective_config.enable_web_search or web_search_provider is None:
|
||||
return {"web_search_approved": False}
|
||||
|
||||
if not effective_config.require_web_approval:
|
||||
return {"web_search_approved": True}
|
||||
|
||||
query = derive_search_query(state["question"])
|
||||
approved = check_web_search_approval(query, require_approval=True)
|
||||
return {"web_search_approved": approved}
|
||||
|
||||
async def web_search_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
if not state.get("web_search_approved", False) or web_search_provider is None:
|
||||
return {"web_context": ""}
|
||||
|
||||
query = derive_search_query(state["question"])
|
||||
search_config = WebSearchConfig(enabled=True, require_approval=False)
|
||||
response = await execute_web_search(query, web_search_provider, search_config)
|
||||
approved = bool(state.get(STATE_WEB_SEARCH_APPROVED, False))
|
||||
if not approved or deps.web_search_provider is None:
|
||||
return {STATE_WEB_CONTEXT: ""}
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
query = derive_search_query(question)
|
||||
search_config = WebSearchConfig(enabled=config.enable_web_search, require_approval=False)
|
||||
response = await execute_web_search(query, deps.web_search_provider, search_config)
|
||||
context = format_results_for_context(response.results)
|
||||
return {"web_context": context}
|
||||
return {STATE_WEB_CONTEXT: context}
|
||||
|
||||
return web_search_node
|
||||
|
||||
|
||||
def _build_meeting_synthesize_node(
|
||||
deps: MeetingQADependencies,
|
||||
) -> Callable[[MeetingQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import (
|
||||
extract_annotations_from_answer,
|
||||
)
|
||||
from noteflow.infrastructure.ai.tools.synthesis import synthesize_answer
|
||||
|
||||
async def synthesize_node(state: MeetingQAInternalState) -> dict[str, object]:
|
||||
if not state["verification_passed"]:
|
||||
return {
|
||||
"answer": NO_INFORMATION_ANSWER,
|
||||
"citations": [],
|
||||
"suggested_annotations": [],
|
||||
}
|
||||
|
||||
if not state[STATE_VERIFICATION_PASSED]:
|
||||
return build_no_information_payload(NO_INFORMATION_ANSWER)
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
segments = cast(list["RetrievalResult"], state[STATE_RETRIEVED_SEGMENTS])
|
||||
result = await synthesize_answer(
|
||||
question=state["question"],
|
||||
segments=state["retrieved_segments"],
|
||||
llm=llm,
|
||||
question=question,
|
||||
segments=segments,
|
||||
llm=deps.llm,
|
||||
)
|
||||
return _build_meeting_answer_payload(
|
||||
meeting_id=state["meeting_id"],
|
||||
retrieved_segments=segments,
|
||||
result=result,
|
||||
extract_annotations=extract_annotations_from_answer,
|
||||
)
|
||||
|
||||
citations = [
|
||||
SegmentCitation(
|
||||
meeting_id=state["meeting_id"],
|
||||
segment_id=seg.segment_id,
|
||||
start_time=seg.start_time,
|
||||
end_time=seg.end_time,
|
||||
text=seg.text,
|
||||
score=seg.score,
|
||||
)
|
||||
for seg in state["retrieved_segments"]
|
||||
if seg.segment_id in result.cited_segment_ids
|
||||
]
|
||||
return synthesize_node
|
||||
|
||||
suggested_annotations = extract_annotations_from_answer(
|
||||
answer=result.answer,
|
||||
cited_segment_ids=tuple(result.cited_segment_ids),
|
||||
|
||||
def _build_meeting_answer_payload(
|
||||
*,
|
||||
meeting_id: "MeetingId",
|
||||
retrieved_segments: list["RetrievalResult"],
|
||||
result: "SynthesisResult",
|
||||
extract_annotations: Callable[[str, tuple[int, ...]], list["SuggestedAnnotation"]],
|
||||
) -> dict[str, object]:
|
||||
from noteflow.domain.ai.citations import SegmentCitation
|
||||
|
||||
cited_ids = set(result.cited_segment_ids)
|
||||
citations = [
|
||||
SegmentCitation(
|
||||
meeting_id=meeting_id,
|
||||
segment_id=seg.segment_id,
|
||||
start_time=seg.start_time,
|
||||
end_time=seg.end_time,
|
||||
text=seg.text,
|
||||
score=seg.score,
|
||||
)
|
||||
for seg in retrieved_segments
|
||||
if seg.segment_id in cited_ids
|
||||
]
|
||||
|
||||
return {
|
||||
"answer": result.answer,
|
||||
"citations": citations,
|
||||
"suggested_annotations": suggested_annotations,
|
||||
}
|
||||
suggested_annotations = extract_annotations(
|
||||
result.answer,
|
||||
tuple(result.cited_segment_ids),
|
||||
)
|
||||
|
||||
builder: StateGraph[MeetingQAInternalState] = StateGraph(MeetingQAInternalState)
|
||||
builder.add_node("retrieve", retrieve_node)
|
||||
builder.add_node("verify", verify_node)
|
||||
builder.add_node("synthesize", synthesize_node)
|
||||
|
||||
if effective_config.enable_web_search and web_search_provider is not None:
|
||||
builder.add_node("web_search_approval", web_search_approval_node)
|
||||
builder.add_node("web_search", web_search_node)
|
||||
|
||||
builder.add_edge(START, "retrieve")
|
||||
builder.add_edge("retrieve", "verify")
|
||||
builder.add_edge("verify", "web_search_approval")
|
||||
builder.add_edge("web_search_approval", "web_search")
|
||||
builder.add_edge("web_search", "synthesize")
|
||||
builder.add_edge("synthesize", END)
|
||||
else:
|
||||
builder.add_edge(START, "retrieve")
|
||||
builder.add_edge("retrieve", "verify")
|
||||
builder.add_edge("verify", "synthesize")
|
||||
builder.add_edge("synthesize", END)
|
||||
|
||||
compile_method = getattr(builder, "compile")
|
||||
compiled: CompiledStateGraph[MeetingQAInternalState] = compile_method(checkpointer=checkpointer)
|
||||
return compiled
|
||||
return {
|
||||
STATE_ANSWER: result.answer,
|
||||
STATE_CITATIONS: citations,
|
||||
STATE_SUGGESTED_ANNOTATIONS: suggested_annotations,
|
||||
}
|
||||
|
||||
@@ -14,10 +14,24 @@ if TYPE_CHECKING:
|
||||
|
||||
from langgraph.graph import CompiledStateGraph
|
||||
|
||||
from noteflow.application.services.summarization import SummarizationService
|
||||
from noteflow.application.services.summarization import (
|
||||
SummarizationService,
|
||||
SummarizationServiceResult,
|
||||
)
|
||||
from noteflow.domain.entities import ActionItem, KeyPoint
|
||||
from noteflow.domain.entities import Segment
|
||||
from noteflow.domain.value_objects import MeetingId
|
||||
|
||||
from noteflow.domain.constants.fields import (
|
||||
ACTION_ITEMS,
|
||||
END_TIME,
|
||||
KEY_POINTS,
|
||||
SEGMENT_IDS,
|
||||
START_TIME,
|
||||
TEXT,
|
||||
)
|
||||
from noteflow.infrastructure.ai.constants import NODE_SUMMARIZE
|
||||
|
||||
SUMMARIZATION_GRAPH_NAME: Final[str] = "summarization"
|
||||
SUMMARIZATION_GRAPH_VERSION: Final[int] = 1
|
||||
|
||||
@@ -68,36 +82,44 @@ def build_summarization_graph(
|
||||
meeting_id=meeting_id,
|
||||
segments=state["segments"],
|
||||
)
|
||||
summary = result.summary
|
||||
return {
|
||||
"summary_text": summary.executive_summary,
|
||||
"key_points": [
|
||||
{
|
||||
"text": kp.text,
|
||||
"segment_ids": kp.segment_ids,
|
||||
"start_time": kp.start_time,
|
||||
"end_time": kp.end_time,
|
||||
}
|
||||
for kp in summary.key_points
|
||||
],
|
||||
"action_items": [
|
||||
{
|
||||
"text": ai.text,
|
||||
"segment_ids": ai.segment_ids,
|
||||
"assignee": ai.assignee,
|
||||
"start_time": ai.start_time,
|
||||
"end_time": ai.end_time,
|
||||
}
|
||||
for ai in summary.action_items
|
||||
],
|
||||
"provider_used": result.provider_used,
|
||||
"tokens_used": summary.tokens_used,
|
||||
"latency_ms": summary.latency_ms,
|
||||
}
|
||||
return _summarization_result_to_payload(result)
|
||||
|
||||
builder = StateGraph(SummarizationState)
|
||||
builder.add_node("summarize", summarize_node)
|
||||
builder.add_edge(START, "summarize")
|
||||
builder.add_edge("summarize", END)
|
||||
builder.add_node(NODE_SUMMARIZE, summarize_node)
|
||||
builder.add_edge(START, NODE_SUMMARIZE)
|
||||
builder.add_edge(NODE_SUMMARIZE, END)
|
||||
|
||||
return builder.compile()
|
||||
|
||||
|
||||
def _summarization_result_to_payload(
|
||||
result: "SummarizationServiceResult",
|
||||
) -> dict[str, object]:
|
||||
summary = result.summary
|
||||
return {
|
||||
"summary_text": summary.executive_summary,
|
||||
KEY_POINTS: [_key_point_to_payload(kp) for kp in summary.key_points],
|
||||
ACTION_ITEMS: [_action_item_to_payload(ai) for ai in summary.action_items],
|
||||
"provider_used": result.provider_used,
|
||||
"tokens_used": summary.tokens_used,
|
||||
"latency_ms": summary.latency_ms,
|
||||
}
|
||||
|
||||
|
||||
def _key_point_to_payload(key_point: "KeyPoint") -> dict[str, object]:
|
||||
return {
|
||||
TEXT: key_point.text,
|
||||
SEGMENT_IDS: key_point.segment_ids,
|
||||
START_TIME: key_point.start_time,
|
||||
END_TIME: key_point.end_time,
|
||||
}
|
||||
|
||||
|
||||
def _action_item_to_payload(action_item: "ActionItem") -> dict[str, object]:
|
||||
return {
|
||||
TEXT: action_item.text,
|
||||
SEGMENT_IDS: action_item.segment_ids,
|
||||
"assignee": action_item.assignee,
|
||||
START_TIME: action_item.start_time,
|
||||
END_TIME: action_item.end_time,
|
||||
}
|
||||
|
||||
@@ -2,8 +2,30 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Final, TypedDict
|
||||
from typing import TYPE_CHECKING, Final, TypedDict, cast
|
||||
|
||||
from noteflow.infrastructure.ai.constants import (
|
||||
NODE_RETRIEVE,
|
||||
NODE_SYNTHESIZE,
|
||||
NODE_VERIFY,
|
||||
NODE_WEB_SEARCH,
|
||||
NODE_WEB_SEARCH_APPROVAL,
|
||||
STATE_ANSWER,
|
||||
STATE_CITATIONS,
|
||||
STATE_QUESTION,
|
||||
STATE_RETRIEVED_SEGMENTS,
|
||||
STATE_SUGGESTED_ANNOTATIONS,
|
||||
STATE_VERIFICATION_PASSED,
|
||||
STATE_WEB_CONTEXT,
|
||||
STATE_WEB_SEARCH_APPROVED,
|
||||
)
|
||||
from noteflow.infrastructure.ai.graphs._shared import (
|
||||
build_no_information_payload,
|
||||
connect_base_flow,
|
||||
connect_web_search_flow,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from uuid import UUID
|
||||
@@ -19,7 +41,7 @@ if TYPE_CHECKING:
|
||||
RetrievalResult,
|
||||
WorkspaceSegmentSearchProtocol,
|
||||
)
|
||||
from noteflow.infrastructure.ai.tools.synthesis import LLMProtocol
|
||||
from noteflow.infrastructure.ai.tools.synthesis import LLMProtocol, SynthesisResult
|
||||
|
||||
WORKSPACE_QA_GRAPH_NAME: Final[str] = "workspace_qa"
|
||||
WORKSPACE_QA_GRAPH_VERSION: Final[int] = 2
|
||||
@@ -33,6 +55,14 @@ class WorkspaceQAConfig:
|
||||
require_annotation_approval: bool = False
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class WorkspaceQADependencies:
|
||||
embedder: "EmbedderProtocol"
|
||||
segment_repo: "WorkspaceSegmentSearchProtocol"
|
||||
llm: "LLMProtocol"
|
||||
web_search_provider: "WebSearchProvider | None" = None
|
||||
|
||||
|
||||
class WorkspaceQAInputState(TypedDict):
|
||||
question: str
|
||||
workspace_id: UUID
|
||||
@@ -54,145 +84,199 @@ class WorkspaceQAInternalState(WorkspaceQAInputState, WorkspaceQAOutputState):
|
||||
|
||||
|
||||
def build_workspace_qa_graph(
|
||||
embedder: EmbedderProtocol,
|
||||
segment_repo: WorkspaceSegmentSearchProtocol,
|
||||
llm: LLMProtocol,
|
||||
deps: WorkspaceQADependencies,
|
||||
*,
|
||||
web_search_provider: WebSearchProvider | None = None,
|
||||
config: WorkspaceQAConfig | None = None,
|
||||
checkpointer: BaseCheckpointSaver[str] | None = None,
|
||||
) -> CompiledStateGraph[WorkspaceQAInternalState]:
|
||||
"""Build Q&A graph for cross-meeting questions with segment citations.
|
||||
|
||||
Graph flow (with web search): retrieve -> verify -> [web_search_approval] -> [web_search] -> synthesize
|
||||
Graph flow (without): retrieve -> verify -> synthesize
|
||||
|
||||
Args:
|
||||
embedder: Protocol for generating text embeddings.
|
||||
segment_repo: Protocol for workspace-scoped semantic segment search.
|
||||
llm: Protocol for LLM text completion.
|
||||
web_search_provider: Optional web search provider for augmentation.
|
||||
config: Graph configuration for features/interrupts.
|
||||
checkpointer: Optional checkpointer for interrupt support.
|
||||
|
||||
Returns:
|
||||
Compiled graph that accepts question/workspace_id and returns answer/citations.
|
||||
"""
|
||||
"""Build Q&A graph for cross-meeting questions with segment citations."""
|
||||
from langgraph.graph import END, START, StateGraph
|
||||
|
||||
from noteflow.domain.ai.citations import SegmentCitation
|
||||
from noteflow.infrastructure.ai.interrupts import check_web_search_approval
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import (
|
||||
extract_annotations_from_answer,
|
||||
)
|
||||
from noteflow.infrastructure.ai.nodes.web_search import (
|
||||
WebSearchConfig,
|
||||
derive_search_query,
|
||||
execute_web_search,
|
||||
format_results_for_context,
|
||||
)
|
||||
from noteflow.infrastructure.ai.tools.retrieval import retrieve_segments_workspace
|
||||
from noteflow.infrastructure.ai.tools.synthesis import synthesize_answer
|
||||
|
||||
effective_config = config or WorkspaceQAConfig()
|
||||
|
||||
async def retrieve_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
results = await retrieve_segments_workspace(
|
||||
query=state["question"],
|
||||
embedder=embedder,
|
||||
segment_repo=segment_repo,
|
||||
workspace_id=state["workspace_id"],
|
||||
project_id=state["project_id"],
|
||||
top_k=state["top_k"],
|
||||
)
|
||||
return {"retrieved_segments": results}
|
||||
|
||||
async def verify_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
has_segments = len(state["retrieved_segments"]) > 0
|
||||
return {"verification_passed": has_segments}
|
||||
|
||||
def web_search_approval_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
if not effective_config.enable_web_search or web_search_provider is None:
|
||||
return {"web_search_approved": False}
|
||||
|
||||
if not effective_config.require_web_approval:
|
||||
return {"web_search_approved": True}
|
||||
|
||||
query = derive_search_query(state["question"])
|
||||
approved = check_web_search_approval(query, require_approval=True)
|
||||
return {"web_search_approved": approved}
|
||||
|
||||
async def web_search_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
if not state.get("web_search_approved", False) or web_search_provider is None:
|
||||
return {"web_context": ""}
|
||||
|
||||
query = derive_search_query(state["question"])
|
||||
search_config = WebSearchConfig(enabled=True, require_approval=False)
|
||||
response = await execute_web_search(query, web_search_provider, search_config)
|
||||
context = format_results_for_context(response.results)
|
||||
return {"web_context": context}
|
||||
|
||||
async def synthesize_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
if not state["verification_passed"]:
|
||||
return {
|
||||
"answer": NO_INFORMATION_ANSWER,
|
||||
"citations": [],
|
||||
"suggested_annotations": [],
|
||||
}
|
||||
|
||||
result = await synthesize_answer(
|
||||
question=state["question"],
|
||||
segments=state["retrieved_segments"],
|
||||
llm=llm,
|
||||
)
|
||||
|
||||
citations = [
|
||||
SegmentCitation(
|
||||
meeting_id=seg.meeting_id,
|
||||
segment_id=seg.segment_id,
|
||||
start_time=seg.start_time,
|
||||
end_time=seg.end_time,
|
||||
text=seg.text,
|
||||
score=seg.score,
|
||||
)
|
||||
for seg in state["retrieved_segments"]
|
||||
if seg.segment_id in result.cited_segment_ids
|
||||
]
|
||||
|
||||
suggested_annotations = extract_annotations_from_answer(
|
||||
answer=result.answer,
|
||||
cited_segment_ids=tuple(result.cited_segment_ids),
|
||||
)
|
||||
|
||||
return {
|
||||
"answer": result.answer,
|
||||
"citations": citations,
|
||||
"suggested_annotations": suggested_annotations,
|
||||
}
|
||||
nodes = _build_workspace_nodes(deps, effective_config)
|
||||
|
||||
builder: StateGraph[WorkspaceQAInternalState] = StateGraph(WorkspaceQAInternalState)
|
||||
builder.add_node("retrieve", retrieve_node)
|
||||
builder.add_node("verify", verify_node)
|
||||
builder.add_node("synthesize", synthesize_node)
|
||||
builder.add_node(NODE_RETRIEVE, nodes.retrieve)
|
||||
builder.add_node(NODE_VERIFY, nodes.verify)
|
||||
builder.add_node(NODE_SYNTHESIZE, nodes.synthesize)
|
||||
|
||||
if effective_config.enable_web_search and web_search_provider is not None:
|
||||
builder.add_node("web_search_approval", web_search_approval_node)
|
||||
builder.add_node("web_search", web_search_node)
|
||||
|
||||
builder.add_edge(START, "retrieve")
|
||||
builder.add_edge("retrieve", "verify")
|
||||
builder.add_edge("verify", "web_search_approval")
|
||||
builder.add_edge("web_search_approval", "web_search")
|
||||
builder.add_edge("web_search", "synthesize")
|
||||
builder.add_edge("synthesize", END)
|
||||
if nodes.web_search_approval is not None and nodes.web_search is not None:
|
||||
builder.add_node(NODE_WEB_SEARCH_APPROVAL, nodes.web_search_approval)
|
||||
builder.add_node(NODE_WEB_SEARCH, nodes.web_search)
|
||||
connect_web_search_flow(builder, START, END)
|
||||
else:
|
||||
builder.add_edge(START, "retrieve")
|
||||
builder.add_edge("retrieve", "verify")
|
||||
builder.add_edge("verify", "synthesize")
|
||||
builder.add_edge("synthesize", END)
|
||||
connect_base_flow(builder, START, END)
|
||||
|
||||
compile_method = getattr(builder, "compile")
|
||||
compiled: CompiledStateGraph[WorkspaceQAInternalState] = compile_method(
|
||||
checkpointer=checkpointer
|
||||
)
|
||||
return compiled
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _WorkspaceQANodes:
|
||||
retrieve: Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]
|
||||
verify: Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]
|
||||
synthesize: Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]
|
||||
web_search_approval: Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]] | None
|
||||
web_search: Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]] | None
|
||||
|
||||
|
||||
def _build_workspace_nodes(
|
||||
deps: WorkspaceQADependencies,
|
||||
config: WorkspaceQAConfig,
|
||||
) -> _WorkspaceQANodes:
|
||||
allow_web_search = config.enable_web_search and deps.web_search_provider is not None
|
||||
return _WorkspaceQANodes(
|
||||
retrieve=_build_workspace_retrieve_node(deps),
|
||||
verify=_build_workspace_verify_node(),
|
||||
synthesize=_build_workspace_synthesize_node(deps),
|
||||
web_search_approval=_build_workspace_web_search_approval_node(deps, config)
|
||||
if allow_web_search
|
||||
else None,
|
||||
web_search=_build_workspace_web_search_node(deps, config) if allow_web_search else None,
|
||||
)
|
||||
|
||||
|
||||
def _build_workspace_retrieve_node(
|
||||
deps: WorkspaceQADependencies,
|
||||
) -> Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.tools.retrieval import (
|
||||
WorkspaceRetrievalDependencies,
|
||||
WorkspaceRetrievalRequest,
|
||||
retrieve_segments_workspace,
|
||||
)
|
||||
|
||||
async def retrieve_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
request = WorkspaceRetrievalRequest(
|
||||
query=question,
|
||||
workspace_id=state["workspace_id"],
|
||||
project_id=state["project_id"],
|
||||
top_k=state["top_k"],
|
||||
)
|
||||
results = await retrieve_segments_workspace(
|
||||
request,
|
||||
WorkspaceRetrievalDependencies(
|
||||
embedder=deps.embedder,
|
||||
segment_repo=deps.segment_repo,
|
||||
),
|
||||
)
|
||||
return {STATE_RETRIEVED_SEGMENTS: results}
|
||||
|
||||
return retrieve_node
|
||||
|
||||
|
||||
def _build_workspace_verify_node() -> Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]:
|
||||
async def verify_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
segments = cast(list["RetrievalResult"], state[STATE_RETRIEVED_SEGMENTS])
|
||||
has_segments = len(segments) > 0
|
||||
return {STATE_VERIFICATION_PASSED: has_segments}
|
||||
|
||||
return verify_node
|
||||
|
||||
|
||||
def _build_workspace_web_search_approval_node(
|
||||
deps: WorkspaceQADependencies,
|
||||
config: WorkspaceQAConfig,
|
||||
) -> Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.interrupts import check_web_search_approval
|
||||
from noteflow.infrastructure.ai.nodes.web_search import derive_search_query
|
||||
|
||||
async def web_search_approval_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
if not config.require_web_approval:
|
||||
return {STATE_WEB_SEARCH_APPROVED: True}
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
query = derive_search_query(question)
|
||||
approved = check_web_search_approval(query, require_approval=True)
|
||||
return {STATE_WEB_SEARCH_APPROVED: approved}
|
||||
|
||||
return web_search_approval_node
|
||||
|
||||
|
||||
def _build_workspace_web_search_node(
|
||||
deps: WorkspaceQADependencies,
|
||||
config: WorkspaceQAConfig,
|
||||
) -> Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.nodes.web_search import (
|
||||
WebSearchConfig,
|
||||
derive_search_query,
|
||||
execute_web_search,
|
||||
format_results_for_context,
|
||||
)
|
||||
|
||||
async def web_search_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
approved = bool(state.get(STATE_WEB_SEARCH_APPROVED, False))
|
||||
if not approved or deps.web_search_provider is None:
|
||||
return {STATE_WEB_CONTEXT: ""}
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
query = derive_search_query(question)
|
||||
search_config = WebSearchConfig(enabled=config.enable_web_search, require_approval=False)
|
||||
response = await execute_web_search(query, deps.web_search_provider, search_config)
|
||||
context = format_results_for_context(response.results)
|
||||
return {STATE_WEB_CONTEXT: context}
|
||||
|
||||
return web_search_node
|
||||
|
||||
|
||||
def _build_workspace_synthesize_node(
|
||||
deps: WorkspaceQADependencies,
|
||||
) -> Callable[[WorkspaceQAInternalState], Awaitable[dict[str, object]]]:
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import (
|
||||
extract_annotations_from_answer,
|
||||
)
|
||||
from noteflow.infrastructure.ai.tools.synthesis import synthesize_answer
|
||||
|
||||
async def synthesize_node(state: WorkspaceQAInternalState) -> dict[str, object]:
|
||||
if not state[STATE_VERIFICATION_PASSED]:
|
||||
return build_no_information_payload(NO_INFORMATION_ANSWER)
|
||||
|
||||
question = cast(str, state[STATE_QUESTION])
|
||||
segments = cast(list["RetrievalResult"], state[STATE_RETRIEVED_SEGMENTS])
|
||||
result = await synthesize_answer(
|
||||
question=question,
|
||||
segments=segments,
|
||||
llm=deps.llm,
|
||||
)
|
||||
return _build_workspace_answer_payload(
|
||||
retrieved_segments=segments,
|
||||
result=result,
|
||||
extract_annotations=extract_annotations_from_answer,
|
||||
)
|
||||
|
||||
return synthesize_node
|
||||
|
||||
|
||||
def _build_workspace_answer_payload(
|
||||
*,
|
||||
retrieved_segments: list["RetrievalResult"],
|
||||
result: "SynthesisResult",
|
||||
extract_annotations: Callable[[str, tuple[int, ...]], list["SuggestedAnnotation"]],
|
||||
) -> dict[str, object]:
|
||||
from noteflow.domain.ai.citations import SegmentCitation
|
||||
|
||||
cited_ids = set(result.cited_segment_ids)
|
||||
citations = [
|
||||
SegmentCitation(
|
||||
meeting_id=seg.meeting_id,
|
||||
segment_id=seg.segment_id,
|
||||
start_time=seg.start_time,
|
||||
end_time=seg.end_time,
|
||||
text=seg.text,
|
||||
score=seg.score,
|
||||
)
|
||||
for seg in retrieved_segments
|
||||
if seg.segment_id in cited_ids
|
||||
]
|
||||
|
||||
suggested_annotations = extract_annotations(
|
||||
result.answer,
|
||||
tuple(result.cited_segment_ids),
|
||||
)
|
||||
|
||||
return {
|
||||
STATE_ANSWER: result.answer,
|
||||
STATE_CITATIONS: citations,
|
||||
STATE_SUGGESTED_ANNOTATIONS: suggested_annotations,
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Final
|
||||
|
||||
from noteflow.domain.constants.fields import EMAIL
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -27,7 +28,7 @@ SSN_PATTERN: Final[re.Pattern[str]] = re.compile(r"\b\d{3}-\d{2}-\d{4}\b")
|
||||
CREDIT_CARD_PATTERN: Final[re.Pattern[str]] = re.compile(r"\b(?:\d{4}[-\s]?){3}\d{4}\b")
|
||||
|
||||
PII_PATTERNS: Final[tuple[tuple[str, re.Pattern[str]], ...]] = (
|
||||
("email", EMAIL_PATTERN),
|
||||
(EMAIL, EMAIL_PATTERN),
|
||||
("phone", PHONE_PATTERN),
|
||||
("ssn", SSN_PATTERN),
|
||||
("credit_card", CREDIT_CARD_PATTERN),
|
||||
@@ -35,6 +36,7 @@ PII_PATTERNS: Final[tuple[tuple[str, re.Pattern[str]], ...]] = (
|
||||
|
||||
# Redaction placeholder
|
||||
PII_REDACTION: Final[str] = "[REDACTED]"
|
||||
CHARACTER_SUFFIX: Final[str] = " characters"
|
||||
|
||||
|
||||
class GuardrailViolation(str, Enum):
|
||||
@@ -57,33 +59,6 @@ class GuardrailResult:
|
||||
reason: str | None = None
|
||||
filtered_content: str | None = None
|
||||
|
||||
@staticmethod
|
||||
def ok(content: str | None = None) -> GuardrailResult:
|
||||
"""Create a passing result."""
|
||||
return GuardrailResult(allowed=True, filtered_content=content)
|
||||
|
||||
@staticmethod
|
||||
def blocked(
|
||||
violation: GuardrailViolation,
|
||||
reason: str,
|
||||
) -> GuardrailResult:
|
||||
"""Create a blocking result."""
|
||||
return GuardrailResult(allowed=False, violation=violation, reason=reason)
|
||||
|
||||
@staticmethod
|
||||
def filtered(
|
||||
content: str,
|
||||
violation: GuardrailViolation,
|
||||
reason: str,
|
||||
) -> GuardrailResult:
|
||||
"""Create a result with filtered content."""
|
||||
return GuardrailResult(
|
||||
allowed=True,
|
||||
violation=violation,
|
||||
reason=reason,
|
||||
filtered_content=content,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GuardrailRules:
|
||||
@@ -126,22 +101,16 @@ def _check_length(
|
||||
) -> GuardrailResult | None:
|
||||
"""Check text length constraints."""
|
||||
if is_input and len(text) < min_length:
|
||||
return GuardrailResult.blocked(
|
||||
GuardrailViolation.INPUT_TOO_SHORT,
|
||||
f"Input must be at least {min_length} characters",
|
||||
)
|
||||
reason = f"Input must be at least {min_length}{CHARACTER_SUFFIX}"
|
||||
return _blocked_result(GuardrailViolation.INPUT_TOO_SHORT, reason)
|
||||
|
||||
if is_input and len(text) > max_length:
|
||||
return GuardrailResult.blocked(
|
||||
GuardrailViolation.INPUT_TOO_LONG,
|
||||
f"Input must be at most {max_length} characters",
|
||||
)
|
||||
reason = f"Input must be at most {max_length}{CHARACTER_SUFFIX}"
|
||||
return _blocked_result(GuardrailViolation.INPUT_TOO_LONG, reason)
|
||||
|
||||
if not is_input and len(text) > max_length:
|
||||
return GuardrailResult.blocked(
|
||||
GuardrailViolation.OUTPUT_TOO_LONG,
|
||||
f"Output exceeds {max_length} characters",
|
||||
)
|
||||
reason = f"Output exceeds {max_length}{CHARACTER_SUFFIX}"
|
||||
return _blocked_result(GuardrailViolation.OUTPUT_TOO_LONG, reason)
|
||||
|
||||
return None
|
||||
|
||||
@@ -155,7 +124,7 @@ def _check_blocked_phrases(
|
||||
for phrase in blocked_phrases:
|
||||
if phrase.lower() in text_lower:
|
||||
logger.warning("blocked_phrase_detected", phrase=phrase[:20])
|
||||
return GuardrailResult.blocked(
|
||||
return _blocked_result(
|
||||
GuardrailViolation.BLOCKED_CONTENT,
|
||||
"Content contains blocked phrase",
|
||||
)
|
||||
@@ -167,7 +136,7 @@ def _check_injection(text: str) -> GuardrailResult | None:
|
||||
for pattern in INJECTION_PATTERNS:
|
||||
if pattern.search(text):
|
||||
logger.warning("injection_attempt_detected")
|
||||
return GuardrailResult.blocked(
|
||||
return _blocked_result(
|
||||
GuardrailViolation.INJECTION_ATTEMPT,
|
||||
"Potential prompt injection detected",
|
||||
)
|
||||
@@ -182,8 +151,7 @@ def _detect_pii(text: str) -> list[tuple[str, str]]:
|
||||
"""
|
||||
findings: list[tuple[str, str]] = []
|
||||
for pii_type, pattern in PII_PATTERNS:
|
||||
for match in pattern.finditer(text):
|
||||
findings.append((pii_type, match.group()))
|
||||
findings.extend((pii_type, match.group()) for match in pattern.finditer(text))
|
||||
return findings
|
||||
|
||||
|
||||
@@ -205,7 +173,6 @@ async def check_input(text: str, rules: GuardrailRules) -> GuardrailResult:
|
||||
Returns:
|
||||
GuardrailResult indicating if input is allowed.
|
||||
"""
|
||||
# Length checks
|
||||
length_result = _check_length(
|
||||
text,
|
||||
rules.min_input_length,
|
||||
@@ -215,39 +182,19 @@ async def check_input(text: str, rules: GuardrailRules) -> GuardrailResult:
|
||||
if length_result is not None:
|
||||
return length_result
|
||||
|
||||
# Blocked phrases
|
||||
phrase_result = _check_blocked_phrases(text, rules.blocked_phrases)
|
||||
if phrase_result is not None:
|
||||
return phrase_result
|
||||
|
||||
# Injection detection
|
||||
if rules.detect_injection:
|
||||
injection_result = _check_injection(text)
|
||||
if injection_result is not None:
|
||||
return injection_result
|
||||
injection_result = _check_injection_if_enabled(text, rules.detect_injection)
|
||||
if injection_result is not None:
|
||||
return injection_result
|
||||
|
||||
# PII checks
|
||||
if rules.block_pii or rules.redact_pii:
|
||||
pii_findings = _detect_pii(text)
|
||||
if pii_findings:
|
||||
pii_types = [f[0] for f in pii_findings]
|
||||
logger.info("pii_detected_in_input", pii_types=pii_types)
|
||||
pii_result = _handle_pii_input(text, rules)
|
||||
if pii_result is not None:
|
||||
return pii_result
|
||||
|
||||
if rules.block_pii:
|
||||
return GuardrailResult.blocked(
|
||||
GuardrailViolation.CONTAINS_PII,
|
||||
f"Input contains PII: {', '.join(pii_types)}",
|
||||
)
|
||||
|
||||
# Redact instead of block
|
||||
redacted = _redact_pii(text)
|
||||
return GuardrailResult.filtered(
|
||||
redacted,
|
||||
GuardrailViolation.CONTAINS_PII,
|
||||
f"PII redacted: {', '.join(pii_types)}",
|
||||
)
|
||||
|
||||
return GuardrailResult.ok(text)
|
||||
return _allowed_result(text)
|
||||
|
||||
|
||||
async def filter_output(text: str, rules: GuardrailRules) -> GuardrailResult:
|
||||
@@ -260,7 +207,6 @@ async def filter_output(text: str, rules: GuardrailRules) -> GuardrailResult:
|
||||
Returns:
|
||||
GuardrailResult with potentially filtered content.
|
||||
"""
|
||||
# Length check
|
||||
length_result = _check_length(
|
||||
text,
|
||||
min_length=0, # No minimum for output
|
||||
@@ -268,45 +214,92 @@ async def filter_output(text: str, rules: GuardrailRules) -> GuardrailResult:
|
||||
is_input=False,
|
||||
)
|
||||
if length_result is not None:
|
||||
# Truncate instead of blocking for output
|
||||
truncated = text[: rules.max_output_length]
|
||||
return GuardrailResult.filtered(
|
||||
reason = f"Output truncated to {rules.max_output_length}{CHARACTER_SUFFIX}"
|
||||
return _filtered_result(
|
||||
truncated,
|
||||
GuardrailViolation.OUTPUT_TOO_LONG,
|
||||
f"Output truncated to {rules.max_output_length} characters",
|
||||
reason,
|
||||
)
|
||||
|
||||
# Blocked phrases in output
|
||||
phrase_result = _check_blocked_phrases(text, rules.blocked_phrases)
|
||||
if phrase_result is not None:
|
||||
return phrase_result
|
||||
|
||||
# PII redaction in output (always redact, never block output)
|
||||
if rules.redact_pii:
|
||||
pii_findings = _detect_pii(text)
|
||||
if pii_findings:
|
||||
pii_types = [f[0] for f in pii_findings]
|
||||
logger.info("pii_detected_in_output", pii_types=pii_types)
|
||||
redacted = _redact_pii(text)
|
||||
return GuardrailResult.filtered(
|
||||
redacted,
|
||||
GuardrailViolation.CONTAINS_PII,
|
||||
f"PII redacted: {', '.join(pii_types)}",
|
||||
)
|
||||
if rules.redact_pii and (pii_findings := _detect_pii(text)):
|
||||
pii_types = [f[0] for f in pii_findings]
|
||||
logger.info("pii_detected_in_output", pii_types=pii_types)
|
||||
redacted = _redact_pii(text)
|
||||
reason = f"PII redacted: {', '.join(pii_types)}"
|
||||
return _filtered_result(redacted, GuardrailViolation.CONTAINS_PII, reason)
|
||||
|
||||
return GuardrailResult.ok(text)
|
||||
return _allowed_result(text)
|
||||
|
||||
|
||||
def create_default_rules() -> GuardrailRules:
|
||||
"""Create default guardrail rules."""
|
||||
return GuardrailRules()
|
||||
rules = GuardrailRules()
|
||||
return rules
|
||||
|
||||
|
||||
def create_strict_rules() -> GuardrailRules:
|
||||
"""Create strict guardrail rules with PII blocking."""
|
||||
return GuardrailRules(
|
||||
rules = GuardrailRules(
|
||||
block_pii=True,
|
||||
redact_pii=False,
|
||||
detect_injection=True,
|
||||
max_input_length=2000,
|
||||
)
|
||||
return rules
|
||||
|
||||
|
||||
def _blocked_result(
|
||||
violation: GuardrailViolation,
|
||||
reason: str,
|
||||
) -> GuardrailResult:
|
||||
return GuardrailResult(allowed=False, violation=violation, reason=reason)
|
||||
|
||||
|
||||
def _filtered_result(
|
||||
content: str,
|
||||
violation: GuardrailViolation,
|
||||
reason: str,
|
||||
) -> GuardrailResult:
|
||||
return GuardrailResult(
|
||||
allowed=True,
|
||||
violation=violation,
|
||||
reason=reason,
|
||||
filtered_content=content,
|
||||
)
|
||||
|
||||
|
||||
def _allowed_result(content: str | None = None) -> GuardrailResult:
|
||||
return GuardrailResult(allowed=True, filtered_content=content)
|
||||
|
||||
|
||||
def _check_injection_if_enabled(
|
||||
text: str,
|
||||
detect_injection: bool,
|
||||
) -> GuardrailResult | None:
|
||||
if not detect_injection:
|
||||
return None
|
||||
return _check_injection(text)
|
||||
|
||||
|
||||
def _handle_pii_input(text: str, rules: GuardrailRules) -> GuardrailResult | None:
|
||||
if not (rules.block_pii or rules.redact_pii):
|
||||
return None
|
||||
pii_findings = _detect_pii(text)
|
||||
if not pii_findings:
|
||||
return None
|
||||
|
||||
pii_types = [f[0] for f in pii_findings]
|
||||
logger.info("pii_detected_in_input", pii_types=pii_types)
|
||||
|
||||
if rules.block_pii:
|
||||
reason = f"Input contains PII: {', '.join(pii_types)}"
|
||||
return _blocked_result(GuardrailViolation.CONTAINS_PII, reason)
|
||||
|
||||
redacted = _redact_pii(text)
|
||||
reason = f"PII redacted: {', '.join(pii_types)}"
|
||||
return _filtered_result(redacted, GuardrailViolation.CONTAINS_PII, reason)
|
||||
|
||||
@@ -5,17 +5,20 @@ Wraps LangGraph's interrupt() and Command APIs for consistent usage across graph
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Final
|
||||
from typing import TYPE_CHECKING, Any, Final, cast
|
||||
from uuid import uuid4
|
||||
|
||||
from langgraph.types import Command, interrupt
|
||||
|
||||
from noteflow.infrastructure.ai._langgraph_compat import create_command
|
||||
|
||||
from noteflow.domain.ai.interrupts import (
|
||||
InterruptAction,
|
||||
InterruptResponse,
|
||||
create_annotation_interrupt,
|
||||
create_web_search_interrupt,
|
||||
)
|
||||
from noteflow.domain.constants.fields import ACTION
|
||||
from noteflow.infrastructure.logging import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -54,7 +57,7 @@ def request_web_search_approval(
|
||||
query_preview=query[:50],
|
||||
)
|
||||
|
||||
response_data = interrupt(interrupt_request.to_dict())
|
||||
response_data = interrupt(interrupt_request.to_request_payload())
|
||||
|
||||
return _parse_interrupt_response(response_data, request_id)
|
||||
|
||||
@@ -71,7 +74,7 @@ def request_annotation_approval(
|
||||
InterruptResponse with user's decision.
|
||||
"""
|
||||
request_id = str(uuid4())
|
||||
annotation_dicts = [ann.to_dict() for ann in annotations]
|
||||
annotation_dicts = [ann.to_annotation_payload() for ann in annotations]
|
||||
interrupt_request = create_annotation_interrupt(
|
||||
annotations=annotation_dicts,
|
||||
request_id=request_id,
|
||||
@@ -83,7 +86,7 @@ def request_annotation_approval(
|
||||
annotation_count=len(annotations),
|
||||
)
|
||||
|
||||
response_data = interrupt(interrupt_request.to_dict())
|
||||
response_data = interrupt(interrupt_request.to_request_payload())
|
||||
|
||||
return _parse_interrupt_response(response_data, request_id)
|
||||
|
||||
@@ -106,16 +109,20 @@ def _parse_interrupt_response(
|
||||
return InterruptResponse(action=action, request_id=request_id)
|
||||
|
||||
if isinstance(response_data, dict):
|
||||
action_str = str(response_data.get("action", "reject"))
|
||||
# Cast to typed dict after isinstance check for type narrowing
|
||||
data: dict[str, object] = cast(dict[str, object], response_data)
|
||||
action_str = str(data.get(ACTION, InterruptAction.REJECT.value))
|
||||
action = _string_to_action(action_str)
|
||||
|
||||
modified_value = response_data.get("modified_value")
|
||||
if modified_value is not None and not isinstance(modified_value, dict):
|
||||
modified_value = None
|
||||
modified_value_raw = data.get("modified_value")
|
||||
modified_value: dict[str, object] | None = None
|
||||
if isinstance(modified_value_raw, dict):
|
||||
modified_value = cast(dict[str, object], modified_value_raw)
|
||||
|
||||
user_message = response_data.get("user_message")
|
||||
if user_message is not None:
|
||||
user_message = str(user_message)
|
||||
user_message: str | None = None
|
||||
user_message_raw = data.get("user_message")
|
||||
if user_message_raw is not None:
|
||||
user_message = str(user_message_raw)
|
||||
|
||||
return InterruptResponse(
|
||||
action=action,
|
||||
@@ -135,14 +142,20 @@ def _parse_interrupt_response(
|
||||
def _string_to_action(value: str) -> InterruptAction:
|
||||
"""Convert string response to InterruptAction."""
|
||||
normalized = value.lower().strip()
|
||||
if normalized in ("approve", "yes", "approved", "accept"):
|
||||
approve_values = {
|
||||
InterruptAction.APPROVE.value,
|
||||
"yes",
|
||||
"approved",
|
||||
"accept",
|
||||
}
|
||||
if normalized in approve_values:
|
||||
return InterruptAction.APPROVE
|
||||
if normalized in ("modify", "edit", "change"):
|
||||
if normalized in {InterruptAction.MODIFY.value, "edit", "change"}:
|
||||
return InterruptAction.MODIFY
|
||||
return InterruptAction.REJECT
|
||||
|
||||
|
||||
def create_resume_command(response: InterruptResponse) -> Command[None]:
|
||||
def create_resume_command(response: InterruptResponse) -> Command[Any]:
|
||||
"""Create a LangGraph Command to resume execution with user response.
|
||||
|
||||
Args:
|
||||
@@ -151,7 +164,8 @@ def create_resume_command(response: InterruptResponse) -> Command[None]:
|
||||
Returns:
|
||||
Command to resume graph execution.
|
||||
"""
|
||||
return Command(resume=response.to_dict())
|
||||
payload = response.to_response_payload()
|
||||
return create_command(resume=payload)
|
||||
|
||||
|
||||
class InterruptHandler:
|
||||
@@ -166,6 +180,8 @@ class InterruptHandler:
|
||||
return self._require_web_approval
|
||||
|
||||
def request_web_search(self, query: str) -> InterruptResponse:
|
||||
if not self._require_web_approval:
|
||||
return InterruptResponse(action=InterruptAction.APPROVE)
|
||||
return request_web_search_approval(query)
|
||||
|
||||
def request_annotation_approval(
|
||||
@@ -214,18 +230,46 @@ def check_annotation_approval(
|
||||
if response.is_rejected:
|
||||
return False, []
|
||||
|
||||
if response.is_modified and response.modified_value:
|
||||
modified_list_raw = response.modified_value.get("annotations", [])
|
||||
if isinstance(modified_list_raw, list):
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import (
|
||||
SuggestedAnnotation,
|
||||
)
|
||||
|
||||
modified_annotations: list[SuggestedAnnotation] = []
|
||||
for item in modified_list_raw:
|
||||
if isinstance(item, dict):
|
||||
item_dict: dict[str, object] = {str(k): v for k, v in item.items()}
|
||||
modified_annotations.append(SuggestedAnnotation.from_dict(item_dict))
|
||||
return True, modified_annotations
|
||||
modified = _extract_modified_annotations(response)
|
||||
if modified is not None:
|
||||
return True, modified
|
||||
|
||||
return response.is_approved, annotations
|
||||
|
||||
|
||||
def _extract_modified_annotations(
|
||||
response: InterruptResponse,
|
||||
) -> list[SuggestedAnnotation] | None:
|
||||
if not response.is_modified:
|
||||
return None
|
||||
modified_value = response.modified_value
|
||||
if not isinstance(modified_value, dict):
|
||||
return None
|
||||
raw_annotations = modified_value.get("annotations")
|
||||
if not isinstance(raw_annotations, list):
|
||||
return None
|
||||
return _coerce_annotation_payloads(cast(list[object], raw_annotations))
|
||||
|
||||
|
||||
def _coerce_annotation_payloads(
|
||||
raw_annotations: list[object],
|
||||
) -> list[SuggestedAnnotation]:
|
||||
from noteflow.infrastructure.ai.nodes.annotation_suggester import (
|
||||
SuggestedAnnotation,
|
||||
)
|
||||
|
||||
modified_annotations: list[SuggestedAnnotation] = []
|
||||
for item in raw_annotations:
|
||||
item_dict = _normalize_annotation_payload(item)
|
||||
if item_dict is not None:
|
||||
modified_annotations.append(SuggestedAnnotation.from_payload(item_dict))
|
||||
return modified_annotations
|
||||
|
||||
|
||||
def _normalize_annotation_payload(item: object) -> dict[str, object] | None:
|
||||
if not isinstance(item, dict):
|
||||
return None
|
||||
item_dict: dict[str, object] = {}
|
||||
for key, value in cast(dict[object, object], item).items():
|
||||
item_dict[str(key)] = value
|
||||
return item_dict
|
||||
|
||||
@@ -5,13 +5,28 @@ from __future__ import annotations
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Final
|
||||
from itertools import chain
|
||||
from typing import Final, cast
|
||||
|
||||
from noteflow.domain.constants.fields import (
|
||||
ACTION_ITEM,
|
||||
CONFIDENCE,
|
||||
DECISION,
|
||||
NOTE,
|
||||
SEGMENT_IDS,
|
||||
TEXT,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_CONFIDENCE: Final[float] = 0.8
|
||||
ACTION_ITEM_CONFIDENCE: Final[float] = 0.7
|
||||
DECISION_CONFIDENCE: Final[float] = 0.75
|
||||
|
||||
|
||||
class SuggestedAnnotationType(str, Enum):
|
||||
ACTION_ITEM = "action_item"
|
||||
DECISION = "decision"
|
||||
NOTE = "note"
|
||||
ACTION_ITEM = ACTION_ITEM
|
||||
DECISION = DECISION
|
||||
NOTE = NOTE
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -19,35 +34,23 @@ class SuggestedAnnotation:
|
||||
text: str
|
||||
annotation_type: SuggestedAnnotationType
|
||||
segment_ids: tuple[int, ...]
|
||||
confidence: float = 0.8
|
||||
confidence: float = DEFAULT_CONFIDENCE
|
||||
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
def to_annotation_payload(self) -> dict[str, object]:
|
||||
return {
|
||||
"text": self.text,
|
||||
TEXT: self.text,
|
||||
"type": self.annotation_type.value,
|
||||
"segment_ids": list(self.segment_ids),
|
||||
"confidence": self.confidence,
|
||||
SEGMENT_IDS: list(self.segment_ids),
|
||||
CONFIDENCE: self.confidence,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, object]) -> SuggestedAnnotation:
|
||||
text = str(data.get("text", ""))
|
||||
type_str = str(data.get("type", "note"))
|
||||
segment_ids_raw = data.get("segment_ids", [])
|
||||
if isinstance(segment_ids_raw, list):
|
||||
segment_ids = tuple(
|
||||
int(sid) for sid in segment_ids_raw if isinstance(sid, (int, float))
|
||||
)
|
||||
else:
|
||||
segment_ids = ()
|
||||
confidence_raw = data.get("confidence", 0.8)
|
||||
confidence = float(confidence_raw) if isinstance(confidence_raw, (int, float)) else 0.8
|
||||
|
||||
try:
|
||||
annotation_type = SuggestedAnnotationType(type_str)
|
||||
except ValueError:
|
||||
annotation_type = SuggestedAnnotationType.NOTE
|
||||
|
||||
def from_payload(cls, data: dict[str, object]) -> SuggestedAnnotation:
|
||||
text = str(data.get(TEXT, ""))
|
||||
type_str = str(data.get("type", NOTE))
|
||||
segment_ids = _parse_segment_ids(data.get(SEGMENT_IDS))
|
||||
confidence = _parse_confidence(data.get(CONFIDENCE))
|
||||
annotation_type = _parse_annotation_type(type_str)
|
||||
return cls(
|
||||
text=text,
|
||||
annotation_type=annotation_type,
|
||||
@@ -72,40 +75,92 @@ MIN_TEXT_LENGTH: Final[int] = 10
|
||||
MAX_TEXT_LENGTH: Final[int] = 200
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SuggestionCriteria:
|
||||
patterns: tuple[re.Pattern[str], ...]
|
||||
annotation_type: SuggestedAnnotationType
|
||||
confidence: float
|
||||
segment_ids: tuple[int, ...]
|
||||
|
||||
|
||||
def extract_annotations_from_answer(
|
||||
answer: str,
|
||||
cited_segment_ids: tuple[int, ...],
|
||||
) -> list[SuggestedAnnotation]:
|
||||
"""Extract action items and decisions from synthesized answer."""
|
||||
action_criteria = SuggestionCriteria(
|
||||
patterns=ACTION_ITEM_PATTERNS,
|
||||
annotation_type=SuggestedAnnotationType.ACTION_ITEM,
|
||||
confidence=ACTION_ITEM_CONFIDENCE,
|
||||
segment_ids=cited_segment_ids,
|
||||
)
|
||||
decision_criteria = SuggestionCriteria(
|
||||
patterns=DECISION_PATTERNS,
|
||||
annotation_type=SuggestedAnnotationType.DECISION,
|
||||
confidence=DECISION_CONFIDENCE,
|
||||
segment_ids=cited_segment_ids,
|
||||
)
|
||||
action_suggestions = _collect_suggestions(answer, action_criteria)
|
||||
decision_suggestions = _collect_suggestions(answer, decision_criteria)
|
||||
|
||||
return _dedupe_suggestions(action_suggestions + decision_suggestions)
|
||||
|
||||
|
||||
def _collect_suggestions(
|
||||
answer: str,
|
||||
criteria: SuggestionCriteria,
|
||||
) -> list[SuggestedAnnotation]:
|
||||
suggestions: list[SuggestedAnnotation] = []
|
||||
|
||||
for pattern in ACTION_ITEM_PATTERNS:
|
||||
for match in pattern.finditer(answer):
|
||||
text = match.group(1).strip()
|
||||
if MIN_TEXT_LENGTH <= len(text) <= MAX_TEXT_LENGTH:
|
||||
suggestions.append(
|
||||
SuggestedAnnotation(
|
||||
text=text,
|
||||
annotation_type=SuggestedAnnotationType.ACTION_ITEM,
|
||||
segment_ids=cited_segment_ids,
|
||||
confidence=0.7,
|
||||
)
|
||||
for match in chain.from_iterable(
|
||||
pattern.finditer(answer) for pattern in criteria.patterns
|
||||
):
|
||||
text = match.group(1).strip()
|
||||
if _is_valid_suggestion_text(text):
|
||||
suggestions.append(
|
||||
SuggestedAnnotation(
|
||||
text=text,
|
||||
annotation_type=criteria.annotation_type,
|
||||
segment_ids=criteria.segment_ids,
|
||||
confidence=criteria.confidence,
|
||||
)
|
||||
)
|
||||
return suggestions
|
||||
|
||||
for pattern in DECISION_PATTERNS:
|
||||
for match in pattern.finditer(answer):
|
||||
text = match.group(1).strip()
|
||||
if MIN_TEXT_LENGTH <= len(text) <= MAX_TEXT_LENGTH:
|
||||
suggestions.append(
|
||||
SuggestedAnnotation(
|
||||
text=text,
|
||||
annotation_type=SuggestedAnnotationType.DECISION,
|
||||
segment_ids=cited_segment_ids,
|
||||
confidence=0.75,
|
||||
)
|
||||
)
|
||||
|
||||
return _dedupe_suggestions(suggestions)
|
||||
def _is_valid_suggestion_text(text: str) -> bool:
|
||||
return MIN_TEXT_LENGTH <= len(text) <= MAX_TEXT_LENGTH
|
||||
|
||||
|
||||
def _parse_segment_ids(value: object) -> tuple[int, ...]:
|
||||
if not isinstance(value, list):
|
||||
return ()
|
||||
segment_ids_list = [
|
||||
converted
|
||||
for raw_sid in cast(list[object], value)
|
||||
if (converted := _coerce_segment_id(raw_sid)) is not None
|
||||
]
|
||||
return tuple(segment_ids_list)
|
||||
|
||||
|
||||
def _parse_confidence(value: object) -> float:
|
||||
if isinstance(value, (int, float)):
|
||||
return float(value)
|
||||
return DEFAULT_CONFIDENCE
|
||||
|
||||
|
||||
def _coerce_segment_id(value: object) -> int | None:
|
||||
if isinstance(value, int):
|
||||
return value
|
||||
if isinstance(value, float):
|
||||
return int(value)
|
||||
return None
|
||||
|
||||
|
||||
def _parse_annotation_type(type_str: str) -> SuggestedAnnotationType:
|
||||
try:
|
||||
return SuggestedAnnotationType(type_str)
|
||||
except ValueError:
|
||||
return SuggestedAnnotationType.NOTE
|
||||
|
||||
|
||||
def _dedupe_suggestions(suggestions: list[SuggestedAnnotation]) -> list[SuggestedAnnotation]:
|
||||
|
||||
@@ -22,7 +22,7 @@ class WebSearchResult:
|
||||
snippet: str
|
||||
score: float = 1.0
|
||||
|
||||
def to_dict(self) -> dict[str, object]:
|
||||
def to_result_payload(self) -> dict[str, object]:
|
||||
"""Convert to dictionary for serialization."""
|
||||
return {
|
||||
"title": self.title,
|
||||
|
||||
@@ -73,6 +73,48 @@ class RetrievalResult:
|
||||
score: float
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MeetingRetrievalRequest:
|
||||
query: str
|
||||
meeting_id: MeetingId | None = None
|
||||
top_k: int = 8
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class WorkspaceRetrievalRequest:
|
||||
query: str
|
||||
workspace_id: UUID
|
||||
project_id: UUID | None = None
|
||||
top_k: int = 20
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MeetingBatchRetrievalRequest:
|
||||
queries: Sequence[str]
|
||||
meeting_id: MeetingId | None = None
|
||||
top_k: int = 8
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class WorkspaceBatchRetrievalRequest:
|
||||
queries: Sequence[str]
|
||||
workspace_id: UUID
|
||||
project_id: UUID | None = None
|
||||
top_k: int = 20
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MeetingRetrievalDependencies:
|
||||
embedder: EmbedderProtocol
|
||||
segment_repo: SegmentSearchProtocol
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class WorkspaceRetrievalDependencies:
|
||||
embedder: EmbedderProtocol
|
||||
segment_repo: WorkspaceSegmentSearchProtocol
|
||||
|
||||
|
||||
def _meeting_id_to_uuid(mid: MeetingId | None) -> UUID:
|
||||
if mid is None:
|
||||
msg = "meeting_id is required for RetrievalResult"
|
||||
@@ -80,20 +122,9 @@ def _meeting_id_to_uuid(mid: MeetingId | None) -> UUID:
|
||||
return UUID(str(mid))
|
||||
|
||||
|
||||
async def retrieve_segments(
|
||||
query: str,
|
||||
embedder: EmbedderProtocol,
|
||||
segment_repo: SegmentSearchProtocol,
|
||||
meeting_id: MeetingId | None = None,
|
||||
top_k: int = 8,
|
||||
def _build_retrieval_results(
|
||||
results: Sequence[tuple[SegmentLike, float]],
|
||||
) -> list[RetrievalResult]:
|
||||
"""Retrieve relevant transcript segments via semantic search."""
|
||||
query_embedding = await embedder.embed(query)
|
||||
results = await segment_repo.search_semantic(
|
||||
query_embedding=query_embedding,
|
||||
limit=top_k,
|
||||
meeting_id=meeting_id,
|
||||
)
|
||||
return [
|
||||
RetrievalResult(
|
||||
segment_id=segment.segment_id,
|
||||
@@ -107,33 +138,33 @@ async def retrieve_segments(
|
||||
]
|
||||
|
||||
|
||||
async def retrieve_segments(
|
||||
request: MeetingRetrievalRequest,
|
||||
deps: MeetingRetrievalDependencies,
|
||||
) -> list[RetrievalResult]:
|
||||
"""Retrieve relevant transcript segments via semantic search."""
|
||||
query_embedding = await deps.embedder.embed(request.query)
|
||||
results = await deps.segment_repo.search_semantic(
|
||||
query_embedding=query_embedding,
|
||||
limit=request.top_k,
|
||||
meeting_id=request.meeting_id,
|
||||
)
|
||||
return _build_retrieval_results(results)
|
||||
|
||||
|
||||
async def retrieve_segments_workspace(
|
||||
query: str,
|
||||
embedder: EmbedderProtocol,
|
||||
segment_repo: WorkspaceSegmentSearchProtocol,
|
||||
workspace_id: UUID,
|
||||
project_id: UUID | None = None,
|
||||
top_k: int = 20,
|
||||
request: WorkspaceRetrievalRequest,
|
||||
deps: WorkspaceRetrievalDependencies,
|
||||
) -> list[RetrievalResult]:
|
||||
"""Retrieve relevant transcript segments across workspace/project via semantic search."""
|
||||
query_embedding = await embedder.embed(query)
|
||||
results = await segment_repo.search_semantic_workspace(
|
||||
query_embedding = await deps.embedder.embed(request.query)
|
||||
results = await deps.segment_repo.search_semantic_workspace(
|
||||
query_embedding=query_embedding,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
limit=top_k,
|
||||
workspace_id=request.workspace_id,
|
||||
project_id=request.project_id,
|
||||
limit=request.top_k,
|
||||
)
|
||||
return [
|
||||
RetrievalResult(
|
||||
segment_id=segment.segment_id,
|
||||
meeting_id=_meeting_id_to_uuid(segment.meeting_id),
|
||||
text=segment.text,
|
||||
start_time=segment.start_time,
|
||||
end_time=segment.end_time,
|
||||
score=score,
|
||||
)
|
||||
for segment, score in results
|
||||
]
|
||||
return _build_retrieval_results(results)
|
||||
|
||||
|
||||
async def _embed_batch_fallback(
|
||||
@@ -154,84 +185,57 @@ async def _embed_batch_fallback(
|
||||
|
||||
|
||||
async def retrieve_segments_batch(
|
||||
queries: Sequence[str],
|
||||
embedder: EmbedderProtocol,
|
||||
segment_repo: SegmentSearchProtocol,
|
||||
meeting_id: MeetingId | None = None,
|
||||
top_k: int = 8,
|
||||
request: MeetingBatchRetrievalRequest,
|
||||
deps: MeetingRetrievalDependencies,
|
||||
) -> list[list[RetrievalResult]]:
|
||||
"""Retrieve segments for multiple queries in parallel.
|
||||
|
||||
Uses batch embedding when available, then parallel search execution.
|
||||
Returns results in the same order as input queries.
|
||||
"""
|
||||
if not queries:
|
||||
if not request.queries:
|
||||
return []
|
||||
embeddings = await _embed_batch_fallback(list(queries), embedder)
|
||||
embeddings = await _embed_batch_fallback(list(request.queries), deps.embedder)
|
||||
|
||||
semaphore = asyncio.Semaphore(MAX_CONCURRENT_OPERATIONS)
|
||||
|
||||
async def _search(emb: list[float]) -> list[RetrievalResult]:
|
||||
async with semaphore:
|
||||
results = await segment_repo.search_semantic(
|
||||
results = await deps.segment_repo.search_semantic(
|
||||
query_embedding=emb,
|
||||
limit=top_k,
|
||||
meeting_id=meeting_id,
|
||||
limit=request.top_k,
|
||||
meeting_id=request.meeting_id,
|
||||
)
|
||||
return [
|
||||
RetrievalResult(
|
||||
segment_id=seg.segment_id,
|
||||
meeting_id=_meeting_id_to_uuid(seg.meeting_id),
|
||||
text=seg.text,
|
||||
start_time=seg.start_time,
|
||||
end_time=seg.end_time,
|
||||
score=score,
|
||||
)
|
||||
for seg, score in results
|
||||
]
|
||||
return _build_retrieval_results(results)
|
||||
|
||||
search_results = await asyncio.gather(*(_search(emb) for emb in embeddings))
|
||||
return list(search_results)
|
||||
|
||||
|
||||
async def retrieve_segments_workspace_batch(
|
||||
queries: Sequence[str],
|
||||
embedder: EmbedderProtocol,
|
||||
segment_repo: WorkspaceSegmentSearchProtocol,
|
||||
workspace_id: UUID,
|
||||
project_id: UUID | None = None,
|
||||
top_k: int = 20,
|
||||
request: WorkspaceBatchRetrievalRequest,
|
||||
deps: WorkspaceRetrievalDependencies,
|
||||
) -> list[list[RetrievalResult]]:
|
||||
"""Retrieve workspace segments for multiple queries in parallel.
|
||||
|
||||
Uses batch embedding when available, then parallel search execution.
|
||||
Returns results in the same order as input queries.
|
||||
"""
|
||||
if not queries:
|
||||
if not request.queries:
|
||||
return []
|
||||
embeddings = await _embed_batch_fallback(list(queries), embedder)
|
||||
embeddings = await _embed_batch_fallback(list(request.queries), deps.embedder)
|
||||
|
||||
semaphore = asyncio.Semaphore(MAX_CONCURRENT_OPERATIONS)
|
||||
|
||||
async def _search(emb: list[float]) -> list[RetrievalResult]:
|
||||
async with semaphore:
|
||||
results = await segment_repo.search_semantic_workspace(
|
||||
results = await deps.segment_repo.search_semantic_workspace(
|
||||
query_embedding=emb,
|
||||
workspace_id=workspace_id,
|
||||
project_id=project_id,
|
||||
limit=top_k,
|
||||
workspace_id=request.workspace_id,
|
||||
project_id=request.project_id,
|
||||
limit=request.top_k,
|
||||
)
|
||||
return [
|
||||
RetrievalResult(
|
||||
segment_id=seg.segment_id,
|
||||
meeting_id=_meeting_id_to_uuid(seg.meeting_id),
|
||||
text=seg.text,
|
||||
start_time=seg.start_time,
|
||||
end_time=seg.end_time,
|
||||
score=score,
|
||||
)
|
||||
for seg, score in results
|
||||
]
|
||||
return _build_retrieval_results(results)
|
||||
|
||||
search_results = await asyncio.gather(*(_search(emb) for emb in embeddings))
|
||||
return list(search_results)
|
||||
|
||||
@@ -20,18 +20,6 @@ class SynthesisResult:
|
||||
cited_segment_ids: list[int]
|
||||
|
||||
|
||||
SYNTHESIS_PROMPT_TEMPLATE: Final[
|
||||
str
|
||||
] = """Answer the question based on the following transcript segments.
|
||||
Cite specific segments by their ID when making claims.
|
||||
|
||||
Question: {question}
|
||||
|
||||
Segments:
|
||||
{segments}
|
||||
|
||||
Answer (cite segment IDs in brackets like [1], [3]):"""
|
||||
|
||||
CITATION_PATTERN: Final[re.Pattern[str]] = re.compile(r"\[(\d+)\]")
|
||||
|
||||
|
||||
@@ -44,10 +32,7 @@ async def synthesize_answer(
|
||||
segment_text = "\n".join(
|
||||
f"[{s.segment_id}] ({s.start_time:.1f}s-{s.end_time:.1f}s): {s.text}" for s in segments
|
||||
)
|
||||
prompt = SYNTHESIS_PROMPT_TEMPLATE.format(
|
||||
question=question,
|
||||
segments=segment_text,
|
||||
)
|
||||
prompt = _build_prompt(question, segment_text)
|
||||
answer = await llm.complete(prompt)
|
||||
valid_ids = {s.segment_id for s in segments}
|
||||
cited_ids = extract_cited_ids(answer, valid_ids)
|
||||
@@ -58,3 +43,13 @@ def extract_cited_ids(answer: str, valid_ids: set[int]) -> list[int]:
|
||||
matches = CITATION_PATTERN.findall(answer)
|
||||
cited = [int(m) for m in matches if int(m) in valid_ids]
|
||||
return list(dict.fromkeys(cited))
|
||||
|
||||
|
||||
def _build_prompt(question: str, segments: str) -> str:
|
||||
return (
|
||||
"Answer the question based on the following transcript segments.\n"
|
||||
"Cite specific segments by their ID when making claims.\n\n"
|
||||
f"Question: {question}\n\n"
|
||||
f"Segments:\n{segments}\n\n"
|
||||
"Answer (cite segment IDs in brackets like [1], [3]):"
|
||||
)
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from noteflow.domain.constants.fields import DURATION_MS, ENDED_AT, ERROR_CODE
|
||||
from noteflow.domain.constants.fields import CONFIG, DURATION_MS, ENDED_AT, ERROR_CODE
|
||||
from noteflow.domain.entities.integration import (
|
||||
Integration,
|
||||
IntegrationStatus,
|
||||
@@ -47,7 +47,7 @@ class IntegrationConverter:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def to_orm_kwargs(entity: Integration) -> dict[str, object]:
|
||||
def to_integration_orm_kwargs(entity: Integration) -> dict[str, object]:
|
||||
"""Convert domain entity to ORM model kwargs.
|
||||
|
||||
Returns a dict of kwargs rather than instantiating IntegrationModel
|
||||
@@ -66,7 +66,7 @@ class IntegrationConverter:
|
||||
"name": entity.name,
|
||||
"type": entity.type.value,
|
||||
"status": entity.status.value,
|
||||
"config": entity.config,
|
||||
CONFIG: entity.config,
|
||||
"last_sync": entity.last_sync,
|
||||
"error_message": entity.error_message,
|
||||
"created_at": entity.created_at,
|
||||
@@ -105,7 +105,7 @@ class SyncRunConverter:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def to_orm_kwargs(entity: SyncRun) -> dict[str, object]:
|
||||
def to_sync_run_orm_kwargs(entity: SyncRun) -> dict[str, object]:
|
||||
"""Convert domain entity to ORM model kwargs.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -53,7 +53,7 @@ class NerConverter:
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def to_orm_kwargs(entity: NamedEntity) -> dict[str, object]:
|
||||
def to_entity_orm_kwargs(entity: NamedEntity) -> dict[str, object]:
|
||||
"""Convert domain entity to ORM model kwargs.
|
||||
|
||||
Returns a dict of kwargs rather than instantiating NamedEntityModel
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Final, Protocol, cast
|
||||
|
||||
@@ -17,7 +18,7 @@ class _TorchCudaProperties(Protocol):
|
||||
|
||||
|
||||
class _TorchCudaModule(Protocol):
|
||||
def is_available(self) -> bool: ...
|
||||
is_available: Callable[[], bool]
|
||||
def mem_get_info(self) -> tuple[int, int]: ...
|
||||
def get_device_properties(self, device: int) -> _TorchCudaProperties: ...
|
||||
def memory_reserved(self, device: int) -> int: ...
|
||||
|
||||
@@ -6,6 +6,7 @@ from typing import TYPE_CHECKING, Final, TypedDict, cast
|
||||
|
||||
from noteflow.domain.entities.named_entity import EntityCategory
|
||||
from noteflow.infrastructure.logging import get_logger, log_timing
|
||||
from noteflow.infrastructure.ner.constants import LABEL_TIME
|
||||
from noteflow.infrastructure.ner.backends.types import RawEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -31,7 +32,7 @@ MEETING_LABELS: Final[tuple[str, ...]] = (
|
||||
EntityCategory.PRODUCT.value,
|
||||
"app",
|
||||
EntityCategory.LOCATION.value,
|
||||
"time",
|
||||
LABEL_TIME,
|
||||
EntityCategory.TIME_RELATIVE.value,
|
||||
EntityCategory.DURATION.value,
|
||||
EntityCategory.EVENT.value,
|
||||
|
||||
7
src/noteflow/infrastructure/ner/constants.py
Normal file
7
src/noteflow/infrastructure/ner/constants.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Constants for NER label normalization."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
LABEL_TIME: Final[str] = "time"
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
from typing import Final
|
||||
|
||||
from noteflow.domain.entities.named_entity import EntityCategory, NamedEntity
|
||||
from noteflow.infrastructure.ner.constants import LABEL_TIME
|
||||
from noteflow.infrastructure.ner.backends.types import RawEntity
|
||||
|
||||
DEFAULT_CONFIDENCE: Final[float] = 0.8
|
||||
@@ -20,7 +21,7 @@ LABEL_TO_CATEGORY: Final[dict[str, EntityCategory]] = {
|
||||
"loc": EntityCategory.LOCATION,
|
||||
"fac": EntityCategory.LOCATION,
|
||||
EntityCategory.DATE.value: EntityCategory.DATE,
|
||||
"time": EntityCategory.DATE,
|
||||
LABEL_TIME: EntityCategory.DATE,
|
||||
EntityCategory.TIME_RELATIVE.value: EntityCategory.TIME_RELATIVE,
|
||||
EntityCategory.DURATION.value: EntityCategory.DURATION,
|
||||
EntityCategory.EVENT.value: EntityCategory.EVENT,
|
||||
|
||||
@@ -6,6 +6,7 @@ import re
|
||||
from typing import Final
|
||||
|
||||
from noteflow.domain.constants.fields import DATE
|
||||
from noteflow.infrastructure.ner.constants import LABEL_TIME
|
||||
from noteflow.infrastructure.ner.backends.types import RawEntity
|
||||
|
||||
PROFANITY_WORDS: Final[frozenset[str]] = frozenset(
|
||||
@@ -41,7 +42,7 @@ DURATION_UNITS: Final[frozenset[str]] = frozenset(
|
||||
|
||||
TIME_LABELS: Final[frozenset[str]] = frozenset(
|
||||
{
|
||||
"time",
|
||||
LABEL_TIME,
|
||||
"time_relative",
|
||||
DATE,
|
||||
}
|
||||
|
||||
@@ -42,19 +42,29 @@ MaterializedViewName = Literal[
|
||||
"mv_entity_totals",
|
||||
]
|
||||
|
||||
# Canonical view name constants
|
||||
MV_DAILY_MEETING_STATS: Final[MaterializedViewName] = "mv_daily_meeting_stats"
|
||||
MV_SPEAKER_STATS: Final[MaterializedViewName] = "mv_speaker_stats"
|
||||
MV_ENTITY_CATEGORY_STATS: Final[MaterializedViewName] = "mv_entity_category_stats"
|
||||
MV_TOP_ENTITIES: Final[MaterializedViewName] = "mv_top_entities"
|
||||
MV_MEETING_TOTALS: Final[MaterializedViewName] = "mv_meeting_totals"
|
||||
MV_ENTITY_TOTALS: Final[MaterializedViewName] = "mv_entity_totals"
|
||||
|
||||
# Frozen set for runtime validation
|
||||
VALID_VIEW_NAMES: Final[frozenset[str]] = frozenset(get_args(MaterializedViewName))
|
||||
|
||||
# Ordered list of all materialized views
|
||||
ALL_MATERIALIZED_VIEWS: Final[tuple[MaterializedViewName, ...]] = (
|
||||
"mv_daily_meeting_stats",
|
||||
"mv_speaker_stats",
|
||||
"mv_entity_category_stats",
|
||||
"mv_top_entities",
|
||||
"mv_meeting_totals",
|
||||
"mv_entity_totals",
|
||||
MV_DAILY_MEETING_STATS,
|
||||
MV_SPEAKER_STATS,
|
||||
MV_ENTITY_CATEGORY_STATS,
|
||||
MV_TOP_ENTITIES,
|
||||
MV_MEETING_TOTALS,
|
||||
MV_ENTITY_TOTALS,
|
||||
)
|
||||
|
||||
SQL_AND: Final[str] = " AND "
|
||||
|
||||
|
||||
class DailyStatsMVRow(Protocol):
|
||||
meeting_date: date
|
||||
@@ -174,7 +184,7 @@ async def fetch_daily_stats_mv(
|
||||
conditions, query_params = _build_daily_params(
|
||||
params.workspace_id, params.project_ids, params.start_time, params.end_time
|
||||
)
|
||||
where_clause = " AND ".join(conditions)
|
||||
where_clause = SQL_AND.join(conditions)
|
||||
query = text(f"""
|
||||
SELECT
|
||||
meeting_date,
|
||||
@@ -199,7 +209,7 @@ async def fetch_meeting_totals_mv(
|
||||
return None
|
||||
|
||||
conditions, query_params = _build_workspace_params(params.workspace_id, params.project_ids)
|
||||
where_clause = " AND ".join(conditions)
|
||||
where_clause = SQL_AND.join(conditions)
|
||||
query = text(f"""
|
||||
SELECT
|
||||
SUM(total_meetings)::integer as total_meetings,
|
||||
@@ -226,7 +236,7 @@ async def fetch_speaker_stats_mv(
|
||||
return None
|
||||
|
||||
conditions, query_params = _build_workspace_params(params.workspace_id, params.project_ids)
|
||||
where_clause = " AND ".join(conditions)
|
||||
where_clause = SQL_AND.join(conditions)
|
||||
query = text(f"""
|
||||
SELECT
|
||||
speaker_id,
|
||||
@@ -252,7 +262,7 @@ async def fetch_entity_category_mv(
|
||||
return None
|
||||
|
||||
conditions, query_params = _build_workspace_params(params.workspace_id, params.project_ids)
|
||||
where_clause = " AND ".join(conditions)
|
||||
where_clause = SQL_AND.join(conditions)
|
||||
query = text(f"""
|
||||
SELECT
|
||||
category,
|
||||
@@ -279,7 +289,7 @@ async def fetch_top_entities_mv(
|
||||
|
||||
conditions, query_params = _build_workspace_params(params.workspace_id, params.project_ids)
|
||||
query_params["limit"] = limit
|
||||
where_clause = " AND ".join(conditions)
|
||||
where_clause = SQL_AND.join(conditions)
|
||||
query = text(f"""
|
||||
SELECT
|
||||
text,
|
||||
@@ -305,7 +315,7 @@ async def fetch_entity_totals_mv(
|
||||
return None
|
||||
|
||||
conditions, query_params = _build_workspace_params(params.workspace_id, params.project_ids)
|
||||
where_clause = " AND ".join(conditions)
|
||||
where_clause = SQL_AND.join(conditions)
|
||||
query = text(f"""
|
||||
SELECT
|
||||
SUM(total_entities)::integer as total_entities,
|
||||
|
||||
@@ -45,6 +45,12 @@ from noteflow.infrastructure.persistence.repositories._analytics_converters impo
|
||||
top_entity_from_row,
|
||||
)
|
||||
from noteflow.infrastructure.persistence.repositories._materialized_view_queries import (
|
||||
MV_DAILY_MEETING_STATS,
|
||||
MV_ENTITY_CATEGORY_STATS,
|
||||
MV_ENTITY_TOTALS,
|
||||
MV_MEETING_TOTALS,
|
||||
MV_SPEAKER_STATS,
|
||||
MV_TOP_ENTITIES,
|
||||
fetch_daily_stats_mv,
|
||||
fetch_entity_category_mv,
|
||||
fetch_entity_totals_mv,
|
||||
@@ -61,6 +67,8 @@ logger = get_logger(__name__)
|
||||
|
||||
class SqlAlchemyAnalyticsRepository(BaseRepository):
|
||||
_mv_available: bool | None = None
|
||||
_ANALYTICS_MV_HIT = "analytics_mv_hit"
|
||||
_ANALYTICS_MV_FALLBACK = "analytics_mv_fallback"
|
||||
|
||||
@classmethod
|
||||
def reset_mv_cache(cls) -> None:
|
||||
@@ -81,14 +89,14 @@ class SqlAlchemyAnalyticsRepository(BaseRepository):
|
||||
await refresh_all_materialized_views(self._session)
|
||||
|
||||
async def refresh_meeting_views(self) -> None:
|
||||
await refresh_materialized_view(self._session, "mv_daily_meeting_stats")
|
||||
await refresh_materialized_view(self._session, "mv_meeting_totals")
|
||||
await refresh_materialized_view(self._session, "mv_speaker_stats")
|
||||
await refresh_materialized_view(self._session, MV_DAILY_MEETING_STATS)
|
||||
await refresh_materialized_view(self._session, MV_MEETING_TOTALS)
|
||||
await refresh_materialized_view(self._session, MV_SPEAKER_STATS)
|
||||
|
||||
async def refresh_entity_views(self) -> None:
|
||||
await refresh_materialized_view(self._session, "mv_entity_category_stats")
|
||||
await refresh_materialized_view(self._session, "mv_top_entities")
|
||||
await refresh_materialized_view(self._session, "mv_entity_totals")
|
||||
await refresh_materialized_view(self._session, MV_ENTITY_CATEGORY_STATS)
|
||||
await refresh_materialized_view(self._session, MV_TOP_ENTITIES)
|
||||
await refresh_materialized_view(self._session, MV_ENTITY_TOTALS)
|
||||
|
||||
async def get_overview(
|
||||
self,
|
||||
@@ -201,9 +209,9 @@ class SqlAlchemyAnalyticsRepository(BaseRepository):
|
||||
)
|
||||
mv_result = await self._get_overview_from_mv(params)
|
||||
if mv_result is not None:
|
||||
logger.debug("analytics_mv_hit", method="get_overview")
|
||||
logger.debug(self._ANALYTICS_MV_HIT, method="get_overview")
|
||||
return mv_result
|
||||
logger.debug("analytics_mv_fallback", method="get_overview")
|
||||
logger.debug(self._ANALYTICS_MV_FALLBACK, method="get_overview")
|
||||
|
||||
return await self.get_overview(workspace_id, project_ids, start_time, end_time)
|
||||
|
||||
@@ -243,9 +251,9 @@ class SqlAlchemyAnalyticsRepository(BaseRepository):
|
||||
)
|
||||
mv_result = await fetch_speaker_stats_mv(self._session, params)
|
||||
if mv_result is not None:
|
||||
logger.debug("analytics_mv_hit", method="get_speaker_stats")
|
||||
logger.debug(self._ANALYTICS_MV_HIT, method="get_speaker_stats")
|
||||
return [speaker_stat_from_mv(r) for r in mv_result]
|
||||
logger.debug("analytics_mv_fallback", method="get_speaker_stats")
|
||||
logger.debug(self._ANALYTICS_MV_FALLBACK, method="get_speaker_stats")
|
||||
|
||||
return await self.get_speaker_stats(workspace_id, project_ids, start_time, end_time)
|
||||
|
||||
@@ -264,9 +272,9 @@ class SqlAlchemyAnalyticsRepository(BaseRepository):
|
||||
base_params, top_limit=params.top_limit
|
||||
)
|
||||
if mv_result is not None:
|
||||
logger.debug("analytics_mv_hit", method="get_entity_analytics")
|
||||
logger.debug(self._ANALYTICS_MV_HIT, method="get_entity_analytics")
|
||||
return mv_result
|
||||
logger.debug("analytics_mv_fallback", method="get_entity_analytics")
|
||||
logger.debug(self._ANALYTICS_MV_FALLBACK, method="get_entity_analytics")
|
||||
|
||||
return await self.get_entity_analytics(params)
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ class SqlAlchemyEntityRepository(
|
||||
Returns:
|
||||
Saved entity with db_id populated.
|
||||
"""
|
||||
kwargs = NerConverter.to_orm_kwargs(entity)
|
||||
kwargs = NerConverter.to_entity_orm_kwargs(entity)
|
||||
stmt = insert(NamedEntityModel).values(**kwargs)
|
||||
excluded = stmt.excluded
|
||||
stmt = stmt.on_conflict_do_update(
|
||||
@@ -103,7 +103,7 @@ class SqlAlchemyEntityRepository(
|
||||
if not entities:
|
||||
return entities
|
||||
|
||||
payload = [NerConverter.to_orm_kwargs(entity) for entity in entities]
|
||||
payload = [NerConverter.to_entity_orm_kwargs(entity) for entity in entities]
|
||||
stmt = insert(NamedEntityModel).values(payload)
|
||||
excluded = stmt.excluded
|
||||
stmt = stmt.on_conflict_do_update(
|
||||
|
||||
@@ -90,7 +90,7 @@ class SqlAlchemyIntegrationRepository(
|
||||
Returns:
|
||||
Created integration.
|
||||
"""
|
||||
kwargs = IntegrationConverter.to_orm_kwargs(integration)
|
||||
kwargs = IntegrationConverter.to_integration_orm_kwargs(integration)
|
||||
model = IntegrationModel(**kwargs)
|
||||
await self._add_and_flush(model)
|
||||
logger.info(
|
||||
@@ -207,7 +207,7 @@ class SqlAlchemyIntegrationRepository(
|
||||
Returns:
|
||||
Created sync run.
|
||||
"""
|
||||
kwargs = SyncRunConverter.to_orm_kwargs(sync_run)
|
||||
kwargs = SyncRunConverter.to_sync_run_orm_kwargs(sync_run)
|
||||
model = IntegrationSyncRunModel(**kwargs)
|
||||
await self._add_and_flush(model)
|
||||
logger.info(
|
||||
|
||||
@@ -12,7 +12,7 @@ from sqlalchemy import func, select
|
||||
from sqlalchemy.engine import RowMapping
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from noteflow.domain.constants.fields import MODEL_NAME
|
||||
from noteflow.domain.constants.fields import COUNT, MODEL_NAME
|
||||
from noteflow.infrastructure.persistence.models.observability.usage_event import (
|
||||
UsageEventModel,
|
||||
)
|
||||
@@ -182,7 +182,7 @@ async def count_by_event_type(
|
||||
stmt = (
|
||||
select(
|
||||
UsageEventModel.event_type,
|
||||
func.count(UsageEventModel.id).label("count"),
|
||||
func.count(UsageEventModel.id).label(COUNT),
|
||||
)
|
||||
.where(
|
||||
UsageEventModel.timestamp >= start_time,
|
||||
@@ -193,4 +193,4 @@ async def count_by_event_type(
|
||||
|
||||
result = await session.execute(stmt)
|
||||
rows: Sequence[RowMapping] = result.mappings().all()
|
||||
return {str(row["event_type"]): int(row["count"]) for row in rows}
|
||||
return {str(row["event_type"]): int(row[COUNT]) for row in rows}
|
||||
|
||||
@@ -5,11 +5,12 @@ from __future__ import annotations
|
||||
from collections.abc import Sequence
|
||||
from typing import Final
|
||||
|
||||
from noteflow.domain.constants.fields import ACTION
|
||||
from noteflow.domain.entities import ActionItem, Segment
|
||||
|
||||
_ACTION_KEYWORDS: Final[tuple[str, ...]] = (
|
||||
"todo",
|
||||
"action",
|
||||
ACTION,
|
||||
"will",
|
||||
"should",
|
||||
"must",
|
||||
|
||||
@@ -8,7 +8,7 @@ from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import TypeVar
|
||||
|
||||
from noteflow.domain.constants.fields import DISPLAY_NAME, ENDED_AT, STATE
|
||||
from noteflow.domain.constants.fields import DISPLAY_NAME, EMAIL, ENDED_AT, STATE
|
||||
from noteflow.domain.constants.placeholders import STYLE_INSTRUCTIONS_PLACEHOLDER
|
||||
|
||||
|
||||
@@ -110,7 +110,7 @@ WORKSPACE_GETTERS: dict[str, Callable[[WorkspaceTemplateContext], str | None]] =
|
||||
|
||||
USER_GETTERS: dict[str, Callable[[UserTemplateContext], str | None]] = {
|
||||
DISPLAY_NAME: lambda ctx: ctx.display_name,
|
||||
"email": lambda ctx: ctx.email,
|
||||
EMAIL: lambda ctx: ctx.email,
|
||||
}
|
||||
|
||||
SUMMARY_GETTERS: dict[str, Callable[[SummaryTemplateContext], str | None]] = {
|
||||
|
||||
@@ -4,6 +4,9 @@ import pytest
|
||||
|
||||
from noteflow.domain.ai.citations import SegmentCitation
|
||||
|
||||
EXPECTED_DURATION_SECONDS = 15.0
|
||||
FROZEN_ASSIGNMENT_MESSAGE = "cannot assign to field"
|
||||
|
||||
|
||||
class TestSegmentCitation:
|
||||
def test_creation_with_valid_values(self) -> None:
|
||||
@@ -17,14 +20,14 @@ class TestSegmentCitation:
|
||||
score=0.95,
|
||||
)
|
||||
|
||||
assert citation.meeting_id == meeting_id
|
||||
assert citation.segment_id == 1
|
||||
assert citation.start_time == 0.0
|
||||
assert citation.end_time == 5.0
|
||||
assert citation.text == "Test segment text"
|
||||
assert citation.score == 0.95
|
||||
assert citation.meeting_id == meeting_id, "Meeting ID should be preserved"
|
||||
assert citation.segment_id == 1, "Segment ID should be preserved"
|
||||
assert citation.start_time == 0.0, "Start time should be preserved"
|
||||
assert citation.end_time == 5.0, "End time should be preserved"
|
||||
assert citation.text == "Test segment text", "Text should be preserved"
|
||||
assert citation.score == 0.95, "Score should be preserved"
|
||||
|
||||
def test_duration_property(self) -> None:
|
||||
def test_duration_property_returns_delta(self) -> None:
|
||||
citation = SegmentCitation(
|
||||
meeting_id=uuid4(),
|
||||
segment_id=1,
|
||||
@@ -33,7 +36,9 @@ class TestSegmentCitation:
|
||||
text="Test",
|
||||
)
|
||||
|
||||
assert citation.duration == 15.0
|
||||
assert citation.duration == EXPECTED_DURATION_SECONDS, (
|
||||
"Duration should equal end_time - start_time"
|
||||
)
|
||||
|
||||
def test_default_score_is_zero(self) -> None:
|
||||
citation = SegmentCitation(
|
||||
@@ -44,7 +49,7 @@ class TestSegmentCitation:
|
||||
text="Test",
|
||||
)
|
||||
|
||||
assert citation.score == 0.0
|
||||
assert citation.score == 0.0, "Default score should be zero"
|
||||
|
||||
def test_rejects_negative_segment_id(self) -> None:
|
||||
with pytest.raises(ValueError, match="segment_id must be non-negative"):
|
||||
@@ -103,9 +108,9 @@ class TestSegmentCitation:
|
||||
text="Instant moment",
|
||||
)
|
||||
|
||||
assert citation.duration == 0.0
|
||||
assert citation.duration == 0.0, "Zero-length segments should have zero duration"
|
||||
|
||||
def test_is_frozen(self) -> None:
|
||||
def test_citation_is_frozen(self) -> None:
|
||||
citation = SegmentCitation(
|
||||
meeting_id=uuid4(),
|
||||
segment_id=1,
|
||||
@@ -114,5 +119,5 @@ class TestSegmentCitation:
|
||||
text="Test",
|
||||
)
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
citation.text = "Modified" # type: ignore[misc]
|
||||
with pytest.raises(AttributeError, match=FROZEN_ASSIGNMENT_MESSAGE):
|
||||
setattr(citation, "text", "Modified")
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
from collections.abc import Sequence
|
||||
from collections.abc import Awaitable, Callable, Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
from unittest.mock import AsyncMock
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
|
||||
from noteflow.domain.value_objects import MeetingId
|
||||
from noteflow.infrastructure.ai.tools.retrieval import (
|
||||
BatchEmbedderProtocol,
|
||||
MeetingBatchRetrievalRequest,
|
||||
MeetingRetrievalDependencies,
|
||||
MeetingRetrievalRequest,
|
||||
RetrievalResult,
|
||||
retrieve_segments,
|
||||
retrieve_segments_batch,
|
||||
@@ -34,14 +39,24 @@ class TestRetrieveSegments:
|
||||
return AsyncMock()
|
||||
|
||||
@pytest.fixture
|
||||
def sample_meeting_id(self) -> object:
|
||||
return uuid4()
|
||||
def sample_meeting_id(self) -> MeetingId:
|
||||
return MeetingId(uuid4())
|
||||
|
||||
async def test_retrieve_segments_success(
|
||||
@pytest.fixture
|
||||
def deps(
|
||||
self,
|
||||
mock_embedder: AsyncMock,
|
||||
mock_segment_repo: AsyncMock,
|
||||
sample_meeting_id: object,
|
||||
) -> MeetingRetrievalDependencies:
|
||||
return MeetingRetrievalDependencies(
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
)
|
||||
|
||||
async def test_retrieve_segments_success(
|
||||
self,
|
||||
deps: MeetingRetrievalDependencies,
|
||||
sample_meeting_id: MeetingId,
|
||||
) -> None:
|
||||
segment = MockSegment(
|
||||
segment_id=1,
|
||||
@@ -50,52 +65,50 @@ class TestRetrieveSegments:
|
||||
start_time=0.0,
|
||||
end_time=5.0,
|
||||
)
|
||||
mock_segment_repo.search_semantic.return_value = [(segment, 0.95)]
|
||||
deps.segment_repo.search_semantic.return_value = [(segment, 0.95)]
|
||||
|
||||
results = await retrieve_segments(
|
||||
query="test query",
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
meeting_id=sample_meeting_id, # type: ignore[arg-type]
|
||||
top_k=5,
|
||||
MeetingRetrievalRequest(
|
||||
query="test query",
|
||||
meeting_id=sample_meeting_id,
|
||||
top_k=5,
|
||||
),
|
||||
deps,
|
||||
)
|
||||
|
||||
assert len(results) == 1
|
||||
assert results[0].segment_id == 1
|
||||
assert results[0].text == "Test segment"
|
||||
assert results[0].score == 0.95
|
||||
assert len(results) == 1, "Expected one retrieval result"
|
||||
assert results[0].segment_id == 1, "Segment ID should match input"
|
||||
assert results[0].text == "Test segment", "Segment text should match input"
|
||||
assert results[0].score == 0.95, "Score should preserve search score"
|
||||
|
||||
async def test_retrieve_segments_calls_embedder_with_query(
|
||||
self,
|
||||
mock_embedder: AsyncMock,
|
||||
mock_segment_repo: AsyncMock,
|
||||
deps: MeetingRetrievalDependencies,
|
||||
) -> None:
|
||||
mock_segment_repo.search_semantic.return_value = []
|
||||
deps.segment_repo.search_semantic.return_value = []
|
||||
|
||||
await retrieve_segments(
|
||||
query="what happened in the meeting",
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
MeetingRetrievalRequest(query="what happened in the meeting"),
|
||||
deps,
|
||||
)
|
||||
|
||||
mock_embedder.embed.assert_called_once_with("what happened in the meeting")
|
||||
embed_call = cast(AsyncMock, deps.embedder.embed)
|
||||
embed_call.assert_called_once_with("what happened in the meeting")
|
||||
|
||||
async def test_retrieve_segments_passes_embedding_to_repo(
|
||||
self,
|
||||
mock_embedder: AsyncMock,
|
||||
mock_segment_repo: AsyncMock,
|
||||
deps: MeetingRetrievalDependencies,
|
||||
) -> None:
|
||||
mock_embedder.embed.return_value = [1.0, 2.0, 3.0]
|
||||
mock_segment_repo.search_semantic.return_value = []
|
||||
deps.embedder.embed.return_value = [1.0, 2.0, 3.0]
|
||||
deps.segment_repo.search_semantic.return_value = []
|
||||
|
||||
await retrieve_segments(
|
||||
query="test",
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
top_k=10,
|
||||
MeetingRetrievalRequest(query="test", top_k=10),
|
||||
deps,
|
||||
)
|
||||
|
||||
mock_segment_repo.search_semantic.assert_called_once_with(
|
||||
search_call = cast(AsyncMock, deps.segment_repo.search_semantic)
|
||||
search_call.assert_called_once_with(
|
||||
query_embedding=[1.0, 2.0, 3.0],
|
||||
meeting_id=None,
|
||||
limit=10,
|
||||
@@ -103,18 +116,16 @@ class TestRetrieveSegments:
|
||||
|
||||
async def test_retrieve_segments_empty_result(
|
||||
self,
|
||||
mock_embedder: AsyncMock,
|
||||
mock_segment_repo: AsyncMock,
|
||||
deps: MeetingRetrievalDependencies,
|
||||
) -> None:
|
||||
mock_segment_repo.search_semantic.return_value = []
|
||||
deps.segment_repo.search_semantic.return_value = []
|
||||
|
||||
results = await retrieve_segments(
|
||||
query="test",
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
MeetingRetrievalRequest(query="test"),
|
||||
deps,
|
||||
)
|
||||
|
||||
assert results == []
|
||||
assert results == [], "Expected no results for empty search response"
|
||||
|
||||
async def test_retrieval_result_is_frozen(self) -> None:
|
||||
result = RetrievalResult(
|
||||
@@ -126,8 +137,8 @@ class TestRetrieveSegments:
|
||||
score=0.9,
|
||||
)
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
result.text = "Modified" # type: ignore[misc]
|
||||
with pytest.raises(AttributeError, match="cannot assign to field"):
|
||||
setattr(result, "text", "Modified")
|
||||
|
||||
|
||||
class MockBatchEmbedder:
|
||||
@@ -145,6 +156,24 @@ class MockBatchEmbedder:
|
||||
return [self._embedding for _ in texts]
|
||||
|
||||
|
||||
def _ordered_search_side_effect(
|
||||
first: MockSegment,
|
||||
second: MockSegment,
|
||||
) -> Callable[[list[float], int, object], Awaitable[list[tuple[MockSegment, float]]]]:
|
||||
call_count = 0
|
||||
|
||||
async def side_effect(
|
||||
query_embedding: list[float],
|
||||
limit: int,
|
||||
meeting_id: object,
|
||||
) -> list[tuple[MockSegment, float]]:
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
return [(first, 0.9)] if call_count == 1 else [(second, 0.8)]
|
||||
|
||||
return side_effect
|
||||
|
||||
|
||||
class TestRetrieveSegmentsBatch:
|
||||
@pytest.fixture
|
||||
def mock_embedder(self) -> AsyncMock:
|
||||
@@ -161,8 +190,8 @@ class TestRetrieveSegmentsBatch:
|
||||
return AsyncMock()
|
||||
|
||||
@pytest.fixture
|
||||
def sample_meeting_id(self) -> object:
|
||||
return uuid4()
|
||||
def sample_meeting_id(self) -> MeetingId:
|
||||
return MeetingId(uuid4())
|
||||
|
||||
async def test_batch_returns_empty_for_no_queries(
|
||||
self,
|
||||
@@ -170,19 +199,21 @@ class TestRetrieveSegmentsBatch:
|
||||
mock_segment_repo: AsyncMock,
|
||||
) -> None:
|
||||
results = await retrieve_segments_batch(
|
||||
queries=[],
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
MeetingBatchRetrievalRequest(queries=[]),
|
||||
MeetingRetrievalDependencies(
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
),
|
||||
)
|
||||
|
||||
assert results == []
|
||||
assert results == [], "Expected empty results for empty query list"
|
||||
mock_embedder.embed.assert_not_called()
|
||||
|
||||
async def test_batch_uses_embed_batch_when_available(
|
||||
self,
|
||||
batch_embedder: MockBatchEmbedder,
|
||||
mock_segment_repo: AsyncMock,
|
||||
sample_meeting_id: object,
|
||||
sample_meeting_id: MeetingId,
|
||||
) -> None:
|
||||
segment = MockSegment(
|
||||
segment_id=1,
|
||||
@@ -196,22 +227,28 @@ class TestRetrieveSegmentsBatch:
|
||||
assert isinstance(batch_embedder, BatchEmbedderProtocol)
|
||||
|
||||
results = await retrieve_segments_batch(
|
||||
queries=["query1", "query2"],
|
||||
embedder=batch_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
meeting_id=sample_meeting_id, # type: ignore[arg-type]
|
||||
MeetingBatchRetrievalRequest(
|
||||
queries=["query1", "query2"],
|
||||
meeting_id=sample_meeting_id,
|
||||
),
|
||||
MeetingRetrievalDependencies(
|
||||
embedder=batch_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
),
|
||||
)
|
||||
|
||||
assert len(results) == 2
|
||||
assert len(batch_embedder.embed_batch_calls) == 1
|
||||
assert list(batch_embedder.embed_batch_calls[0]) == ["query1", "query2"]
|
||||
assert batch_embedder.embed_calls == []
|
||||
assert len(results) == 2, "Expected one result list per query"
|
||||
assert len(batch_embedder.embed_batch_calls) == 1, "Expected batch embedding call"
|
||||
assert list(batch_embedder.embed_batch_calls[0]) == ["query1", "query2"], (
|
||||
"Batch embedder should receive queries in order"
|
||||
)
|
||||
assert batch_embedder.embed_calls == [], "Single embed should not be used"
|
||||
|
||||
async def test_batch_falls_back_to_parallel_embed(
|
||||
self,
|
||||
mock_embedder: AsyncMock,
|
||||
mock_segment_repo: AsyncMock,
|
||||
sample_meeting_id: object,
|
||||
sample_meeting_id: MeetingId,
|
||||
) -> None:
|
||||
segment = MockSegment(
|
||||
segment_id=1,
|
||||
@@ -223,46 +260,43 @@ class TestRetrieveSegmentsBatch:
|
||||
mock_segment_repo.search_semantic.return_value = [(segment, 0.9)]
|
||||
|
||||
results = await retrieve_segments_batch(
|
||||
queries=["query1", "query2"],
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
meeting_id=sample_meeting_id, # type: ignore[arg-type]
|
||||
MeetingBatchRetrievalRequest(
|
||||
queries=["query1", "query2"],
|
||||
meeting_id=sample_meeting_id,
|
||||
),
|
||||
MeetingRetrievalDependencies(
|
||||
embedder=mock_embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
),
|
||||
)
|
||||
|
||||
assert len(results) == 2
|
||||
assert mock_embedder.embed.call_count == 2
|
||||
assert len(results) == 2, "Expected one result list per query"
|
||||
assert mock_embedder.embed.call_count == 2, "Expected parallel embed fallback"
|
||||
|
||||
async def test_batch_preserves_query_order(
|
||||
self,
|
||||
mock_segment_repo: AsyncMock,
|
||||
sample_meeting_id: object,
|
||||
sample_meeting_id: MeetingId,
|
||||
) -> None:
|
||||
segment1 = MockSegment(1, sample_meeting_id, "First", 0.0, 5.0)
|
||||
segment2 = MockSegment(2, sample_meeting_id, "Second", 5.0, 10.0)
|
||||
|
||||
call_count = 0
|
||||
|
||||
async def side_effect(
|
||||
query_embedding: list[float],
|
||||
limit: int,
|
||||
meeting_id: object,
|
||||
) -> list[tuple[MockSegment, float]]:
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
if call_count == 1:
|
||||
return [(segment1, 0.9)]
|
||||
return [(segment2, 0.8)]
|
||||
|
||||
mock_segment_repo.search_semantic.side_effect = side_effect
|
||||
mock_segment_repo.search_semantic.side_effect = _ordered_search_side_effect(
|
||||
segment1,
|
||||
segment2,
|
||||
)
|
||||
|
||||
embedder = MockBatchEmbedder([0.1, 0.2])
|
||||
results = await retrieve_segments_batch(
|
||||
queries=["first", "second"],
|
||||
embedder=embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
meeting_id=sample_meeting_id, # type: ignore[arg-type]
|
||||
MeetingBatchRetrievalRequest(
|
||||
queries=["first", "second"],
|
||||
meeting_id=sample_meeting_id,
|
||||
),
|
||||
MeetingRetrievalDependencies(
|
||||
embedder=embedder,
|
||||
segment_repo=mock_segment_repo,
|
||||
),
|
||||
)
|
||||
|
||||
assert len(results) == 2
|
||||
assert results[0][0].text == "First"
|
||||
assert results[1][0].text == "Second"
|
||||
assert len(results) == 2, "Expected results for two queries"
|
||||
assert results[0][0].text == "First", "First query should map to first result"
|
||||
assert results[1][0].text == "Second", "Second query should map to second result"
|
||||
|
||||
@@ -11,6 +11,10 @@ from noteflow.infrastructure.ai.tools.synthesis import (
|
||||
)
|
||||
|
||||
|
||||
INVALID_CITATION_ID = 99
|
||||
FROZEN_ASSIGNMENT_MESSAGE = "cannot assign to field"
|
||||
|
||||
|
||||
class TestSynthesizeAnswer:
|
||||
@pytest.fixture
|
||||
def mock_llm(self) -> AsyncMock:
|
||||
@@ -51,8 +55,8 @@ class TestSynthesizeAnswer:
|
||||
llm=mock_llm,
|
||||
)
|
||||
|
||||
assert isinstance(result, SynthesisResult)
|
||||
assert "deadline" in result.answer.lower()
|
||||
assert isinstance(result, SynthesisResult), "Result should be a SynthesisResult"
|
||||
assert "deadline" in result.answer.lower(), "Answer should mention the deadline"
|
||||
|
||||
async def test_synthesize_answer_extracts_citations(
|
||||
self,
|
||||
@@ -67,14 +71,14 @@ class TestSynthesizeAnswer:
|
||||
llm=mock_llm,
|
||||
)
|
||||
|
||||
assert result.cited_segment_ids == [1, 3]
|
||||
assert result.cited_segment_ids == [1, 3], "Citations should match referenced segments"
|
||||
|
||||
async def test_synthesize_answer_filters_invalid_citations(
|
||||
self,
|
||||
mock_llm: AsyncMock,
|
||||
sample_segments: list[RetrievalResult],
|
||||
) -> None:
|
||||
mock_llm.complete.return_value = "Found [1], [99], and [3]."
|
||||
mock_llm.complete.return_value = f"Found [1], [{INVALID_CITATION_ID}], and [3]."
|
||||
|
||||
result = await synthesize_answer(
|
||||
question="What happened?",
|
||||
@@ -82,8 +86,10 @@ class TestSynthesizeAnswer:
|
||||
llm=mock_llm,
|
||||
)
|
||||
|
||||
assert 99 not in result.cited_segment_ids
|
||||
assert result.cited_segment_ids == [1, 3]
|
||||
assert INVALID_CITATION_ID not in result.cited_segment_ids, (
|
||||
"Invalid citations should be filtered out"
|
||||
)
|
||||
assert result.cited_segment_ids == [1, 3], "Valid citations should be preserved"
|
||||
|
||||
async def test_synthesize_answer_builds_prompt_with_segments(
|
||||
self,
|
||||
@@ -100,42 +106,45 @@ class TestSynthesizeAnswer:
|
||||
|
||||
call_args = mock_llm.complete.call_args
|
||||
prompt = call_args[0][0]
|
||||
assert "What is happening?" in prompt
|
||||
assert "[1]" in prompt
|
||||
assert "[3]" in prompt
|
||||
assert "John discussed" in prompt
|
||||
assert "What is happening?" in prompt, "Prompt should include the question"
|
||||
assert "[1]" in prompt, "Prompt should include citation marker for segment 1"
|
||||
assert "[3]" in prompt, "Prompt should include citation marker for segment 3"
|
||||
assert "John discussed" in prompt, "Prompt should include segment text"
|
||||
|
||||
|
||||
class TestExtractCitedIds:
|
||||
def test_extracts_single_citation(self) -> None:
|
||||
result = extract_cited_ids("The answer is here [5].", {1, 3, 5})
|
||||
|
||||
assert result == [5]
|
||||
assert result == [5], "Single citation should be extracted"
|
||||
|
||||
def test_extracts_multiple_citations(self) -> None:
|
||||
result = extract_cited_ids("See [1] and [3] for details.", {1, 3, 5})
|
||||
|
||||
assert result == [1, 3]
|
||||
assert result == [1, 3], "Multiple citations should be extracted"
|
||||
|
||||
def test_filters_invalid_ids(self) -> None:
|
||||
result = extract_cited_ids("See [1] and [99].", {1, 3, 5})
|
||||
result = extract_cited_ids(
|
||||
f"See [1] and [{INVALID_CITATION_ID}].",
|
||||
{1, 3, 5},
|
||||
)
|
||||
|
||||
assert result == [1]
|
||||
assert result == [1], "Invalid IDs should be filtered out"
|
||||
|
||||
def test_deduplicates_citations(self) -> None:
|
||||
result = extract_cited_ids("See [1] and then [1] again.", {1, 3})
|
||||
|
||||
assert result == [1]
|
||||
assert result == [1], "Duplicates should be removed while preserving order"
|
||||
|
||||
def test_preserves_order(self) -> None:
|
||||
result = extract_cited_ids("[3] comes first, then [1].", {1, 3})
|
||||
|
||||
assert result == [3, 1]
|
||||
assert result == [3, 1], "Order should be preserved"
|
||||
|
||||
def test_empty_for_no_citations(self) -> None:
|
||||
result = extract_cited_ids("No citations here.", {1, 3})
|
||||
|
||||
assert result == []
|
||||
assert result == [], "No citations should return an empty list"
|
||||
|
||||
|
||||
class TestSynthesisResult:
|
||||
@@ -145,5 +154,5 @@ class TestSynthesisResult:
|
||||
cited_segment_ids=[1, 2],
|
||||
)
|
||||
|
||||
with pytest.raises(AttributeError):
|
||||
result.answer = "Modified" # type: ignore[misc]
|
||||
with pytest.raises(AttributeError, match=FROZEN_ASSIGNMENT_MESSAGE):
|
||||
setattr(result, "answer", "Modified")
|
||||
|
||||
@@ -149,8 +149,8 @@ class TestOrmConverterToOrmKwargs:
|
||||
assert result["word_index"] == 5, "Word index preserved"
|
||||
|
||||
|
||||
class TestNerConverterToOrmKwargs:
|
||||
"""Tests for NerConverter.to_orm_kwargs."""
|
||||
class TestNerConverterToEntityOrmKwargs:
|
||||
"""Tests for NerConverter.to_entity_orm_kwargs."""
|
||||
|
||||
def test_ner_entity_to_orm_kwargs(self) -> None:
|
||||
"""Convert domain NamedEntity to ORM kwargs dict."""
|
||||
@@ -166,7 +166,7 @@ class TestNerConverterToOrmKwargs:
|
||||
is_pinned=True,
|
||||
)
|
||||
|
||||
result = NerConverter.to_orm_kwargs(entity)
|
||||
result = NerConverter.to_entity_orm_kwargs(entity)
|
||||
|
||||
assert result["id"] == entity.id, "ID should be preserved"
|
||||
assert result["meeting_id"] == meeting_id, "Meeting ID should be preserved"
|
||||
@@ -185,7 +185,7 @@ class TestNerConverterToOrmKwargs:
|
||||
confidence=0.8,
|
||||
)
|
||||
|
||||
result = NerConverter.to_orm_kwargs(entity)
|
||||
result = NerConverter.to_entity_orm_kwargs(entity)
|
||||
|
||||
assert result["category"] == "person", "category value should be 'person'"
|
||||
assert isinstance(result["category"], str), "category should be string type"
|
||||
@@ -206,7 +206,7 @@ class TestNerConverterToOrmKwargs:
|
||||
) -> None:
|
||||
"""All category enum values convert to correct string."""
|
||||
entity = NamedEntity(text="Test", category=category, confidence=0.5)
|
||||
result = NerConverter.to_orm_kwargs(entity)
|
||||
result = NerConverter.to_entity_orm_kwargs(entity)
|
||||
assert result["category"] == expected_value, f"category {category} should convert to '{expected_value}'"
|
||||
|
||||
|
||||
@@ -287,7 +287,7 @@ class TestNerConverterRoundTrip:
|
||||
self, round_trip_entity: NamedEntity
|
||||
) -> None:
|
||||
"""Round-trip conversion preserves all field values."""
|
||||
orm_kwargs = NerConverter.to_orm_kwargs(round_trip_entity)
|
||||
orm_kwargs = NerConverter.to_entity_orm_kwargs(round_trip_entity)
|
||||
mock_orm = _create_mock_ner_orm_from_kwargs(orm_kwargs)
|
||||
result = NerConverter.orm_to_domain(mock_orm)
|
||||
|
||||
|
||||
@@ -177,7 +177,7 @@ class TestIntegrationConverterOrmToDomain:
|
||||
|
||||
|
||||
class TestIntegrationConverterToOrmKwargs:
|
||||
"""Tests for IntegrationConverter.to_orm_kwargs."""
|
||||
"""Tests for IntegrationConverter.to_integration_orm_kwargs."""
|
||||
|
||||
@pytest.fixture
|
||||
def integration_entity(self) -> Integration:
|
||||
@@ -199,7 +199,7 @@ class TestIntegrationConverterToOrmKwargs:
|
||||
self, integration_entity: Integration
|
||||
) -> None:
|
||||
"""Convert domain Integration to ORM kwargs dict."""
|
||||
result = IntegrationConverter.to_orm_kwargs(integration_entity)
|
||||
result = IntegrationConverter.to_integration_orm_kwargs(integration_entity)
|
||||
|
||||
assert result["id"] == integration_entity.id, "ID should be preserved"
|
||||
assert result["workspace_id"] == integration_entity.workspace_id, "Workspace ID should be preserved"
|
||||
@@ -230,7 +230,7 @@ class TestIntegrationConverterToOrmKwargs:
|
||||
type=type_enum,
|
||||
status=IntegrationStatus.DISCONNECTED,
|
||||
)
|
||||
result = IntegrationConverter.to_orm_kwargs(integration)
|
||||
result = IntegrationConverter.to_integration_orm_kwargs(integration)
|
||||
assert result["type"] == expected_string, f"Type enum {type_enum} should convert to '{expected_string}'"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -252,7 +252,7 @@ class TestIntegrationConverterToOrmKwargs:
|
||||
type=IntegrationType.CALENDAR,
|
||||
status=status_enum,
|
||||
)
|
||||
result = IntegrationConverter.to_orm_kwargs(integration)
|
||||
result = IntegrationConverter.to_integration_orm_kwargs(integration)
|
||||
assert result["status"] == expected_string, f"Status enum {status_enum} should convert to '{expected_string}'"
|
||||
|
||||
|
||||
@@ -330,7 +330,7 @@ class TestSyncRunConverterOrmToDomain:
|
||||
|
||||
|
||||
class TestSyncRunConverterToOrmKwargs:
|
||||
"""Tests for SyncRunConverter.to_orm_kwargs."""
|
||||
"""Tests for SyncRunConverter.to_sync_run_orm_kwargs."""
|
||||
|
||||
def test_sync_run_to_orm_kwargs(self) -> None:
|
||||
"""Convert domain SyncRun to ORM kwargs dict."""
|
||||
@@ -345,7 +345,7 @@ class TestSyncRunConverterToOrmKwargs:
|
||||
stats={"items_synced": SYNC_RUN_ITEMS_COMPLETE},
|
||||
)
|
||||
|
||||
result = SyncRunConverter.to_orm_kwargs(sync_run)
|
||||
result = SyncRunConverter.to_sync_run_orm_kwargs(sync_run)
|
||||
|
||||
assert result["id"] == sync_run.id, "ID should be preserved"
|
||||
assert result["integration_id"] == sync_run.integration_id, "Integration ID should be preserved"
|
||||
@@ -371,7 +371,7 @@ class TestSyncRunConverterToOrmKwargs:
|
||||
status=status_enum,
|
||||
started_at=datetime(2024, 1, 15, 12, 0, 0, tzinfo=UTC),
|
||||
)
|
||||
result = SyncRunConverter.to_orm_kwargs(sync_run)
|
||||
result = SyncRunConverter.to_sync_run_orm_kwargs(sync_run)
|
||||
assert result["status"] == expected_string, f"Status enum {status_enum} should convert to '{expected_string}'"
|
||||
|
||||
|
||||
@@ -412,7 +412,7 @@ class TestIntegrationConverterRoundTrip:
|
||||
self, round_trip_integration: Integration
|
||||
) -> None:
|
||||
"""Round-trip conversion preserves all Integration field values."""
|
||||
orm_kwargs = IntegrationConverter.to_orm_kwargs(round_trip_integration)
|
||||
orm_kwargs = IntegrationConverter.to_integration_orm_kwargs(round_trip_integration)
|
||||
mock_orm = _create_mock_integration_orm_from_kwargs(orm_kwargs)
|
||||
result = IntegrationConverter.orm_to_domain(mock_orm)
|
||||
|
||||
@@ -428,7 +428,7 @@ class TestIntegrationConverterRoundTrip:
|
||||
self, round_trip_sync_run: SyncRun
|
||||
) -> None:
|
||||
"""Round-trip conversion preserves all SyncRun field values."""
|
||||
orm_kwargs = SyncRunConverter.to_orm_kwargs(round_trip_sync_run)
|
||||
orm_kwargs = SyncRunConverter.to_sync_run_orm_kwargs(round_trip_sync_run)
|
||||
mock_orm = _create_mock_sync_run_orm_from_kwargs(orm_kwargs)
|
||||
result = SyncRunConverter.orm_to_domain(mock_orm)
|
||||
|
||||
|
||||
526
uv.lock
generated
526
uv.lock
generated
@@ -1678,6 +1678,39 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/91/984aca2ec129e2757d1e4e3c81c3fcda9d0f85b74670a094cc443d9ee949/joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713", size = 309071, upload-time = "2025-12-15T08:41:44.973Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpatch"
|
||||
version = "1.33"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "jsonpointer" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/42/78/18813351fe5d63acad16aec57f94ec2b70a09e53ca98145589e185423873/jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c", size = 21699, upload-time = "2023-06-26T12:07:29.144Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898, upload-time = "2023-06-16T21:01:28.466Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpath-ng"
|
||||
version = "1.7.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "ply" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/86/08646239a313f895186ff0a4573452038eed8c86f54380b3ebac34d32fb2/jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c", size = 37838, upload-time = "2024-10-11T15:41:42.404Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/35/5a/73ecb3d82f8615f32ccdadeb9356726d6cae3a4bbc840b437ceb95708063/jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6", size = 30105, upload-time = "2024-11-20T17:58:30.418Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpointer"
|
||||
version = "3.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "julius"
|
||||
version = "0.2.7"
|
||||
@@ -1776,6 +1809,130 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/80/be/3578e8afd18c88cdf9cb4cffde75a96d2be38c5a903f1ed0ceec061bd09e/kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32", size = 70260, upload-time = "2025-08-10T21:27:36.606Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "1.2.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "langsmith" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "tenacity" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "uuid-utils" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a2/0e/664d8d81b3493e09cbab72448d2f9d693d1fa5aa2bcc488602203a9b6da0/langchain_core-1.2.7.tar.gz", hash = "sha256:e1460639f96c352b4a41c375f25aeb8d16ffc1769499fb1c20503aad59305ced", size = 837039, upload-time = "2026-01-09T17:44:25.505Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/6f/34a9fba14d191a67f7e2ee3dbce3e9b86d2fa7310e2c7f2c713583481bd2/langchain_core-1.2.7-py3-none-any.whl", hash = "sha256:452f4fef7a3d883357b22600788d37e3d8854ef29da345b7ac7099f33c31828b", size = 490232, upload-time = "2026-01-09T17:44:24.236Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph"
|
||||
version = "1.0.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "langgraph-checkpoint" },
|
||||
{ name = "langgraph-prebuilt" },
|
||||
{ name = "langgraph-sdk" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "xxhash" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c2/9c/dac99ab1732e9fb2d3b673482ac28f02bee222c0319a3b8f8f73d90727e6/langgraph-1.0.6.tar.gz", hash = "sha256:dd8e754c76d34a07485308d7117221acf63990e7de8f46ddf5fe256b0a22e6c5", size = 495092, upload-time = "2026-01-12T20:33:30.778Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/10/45/9960747781416bed4e531ed0c6b2f2c739bc7b5397d8e92155463735a40e/langgraph-1.0.6-py3-none-any.whl", hash = "sha256:bcfce190974519c72e29f6e5b17f0023914fd6f936bfab8894083215b271eb89", size = 157356, upload-time = "2026-01-12T20:33:29.191Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph-checkpoint"
|
||||
version = "3.0.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "ormsgpack" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/07/2b1c042fa87d40cf2db5ca27dc4e8dd86f9a0436a10aa4361a8982718ae7/langgraph_checkpoint-3.0.1.tar.gz", hash = "sha256:59222f875f85186a22c494aedc65c4e985a3df27e696e5016ba0b98a5ed2cee0", size = 137785, upload-time = "2025-11-04T21:55:47.774Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/48/e3/616e3a7ff737d98c1bbb5700dd62278914e2a9ded09a79a1fa93cf24ce12/langgraph_checkpoint-3.0.1-py3-none-any.whl", hash = "sha256:9b04a8d0edc0474ce4eaf30c5d731cee38f11ddff50a6177eead95b5c4e4220b", size = 46249, upload-time = "2025-11-04T21:55:46.472Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph-checkpoint-postgres"
|
||||
version = "3.0.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langgraph-checkpoint" },
|
||||
{ name = "orjson" },
|
||||
{ name = "psycopg" },
|
||||
{ name = "psycopg-pool" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/36/86/5f3c01346be4b7f455aeeff915f78c878fe9eee5cc8b3782df0886b409bf/langgraph_checkpoint_postgres-3.0.3.tar.gz", hash = "sha256:f77852340198b9e71f2d52da5b591cfc55a4a2e537001868a83b9ab1865f9146", size = 127198, upload-time = "2026-01-12T20:36:40.205Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/84/f8/5a6f9dbc3a93e3b9a567ba0a8b6df5e89882be64dc1c6586a33ec15e93b8/langgraph_checkpoint_postgres-3.0.3-py3-none-any.whl", hash = "sha256:f603f4f81961e2740bd70679affeb540452d467990d4692132c07c20870bdbb1", size = 42715, upload-time = "2026-01-12T20:36:38.636Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph-checkpoint-redis"
|
||||
version = "0.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langgraph-checkpoint" },
|
||||
{ name = "orjson" },
|
||||
{ name = "redis" },
|
||||
{ name = "redisvl" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/f3/16ff43bcfa3efa85068c7f61e34578647c928a61a130cf26d8c408b66077/langgraph_checkpoint_redis-0.3.2.tar.gz", hash = "sha256:1fa28a3f06c1152ebcbfdddec865cdcac65e92ecccd39ecbca99a8be18a8d71b", size = 87565, upload-time = "2026-01-04T16:02:32.355Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/5d/d7d6b4775443c595c2a40a2f01c72793575e800b302349fe35ba466ae77d/langgraph_checkpoint_redis-0.3.2-py3-none-any.whl", hash = "sha256:7641bf5d3f1b64b396e064856f6fd36e983548e353fe2a11c9b0357bceb304fd", size = 92811, upload-time = "2026-01-04T16:02:30.789Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph-prebuilt"
|
||||
version = "1.0.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "langchain-core" },
|
||||
{ name = "langgraph-checkpoint" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3c/f5/8c75dace0d729561dce2966e630c5e312193df7e5df41a7e10cd7378c3a7/langgraph_prebuilt-1.0.6.tar.gz", hash = "sha256:c5f6cf0f5a0ac47643d2e26ae6faa38cb28885ecde67911190df9e30c4f72361", size = 162623, upload-time = "2026-01-12T20:31:28.425Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/26/6c/4045822b0630cfc0f8624c4499ceaf90644142143c063a8dc385a7424fc3/langgraph_prebuilt-1.0.6-py3-none-any.whl", hash = "sha256:9fdc35048ff4ac985a55bd2a019a86d45b8184551504aff6780d096c678b39ae", size = 35322, upload-time = "2026-01-12T20:31:27.161Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langgraph-sdk"
|
||||
version = "0.3.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "orjson" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/0f/ed0634c222eed48a31ba48eab6881f94ad690d65e44fe7ca838240a260c1/langgraph_sdk-0.3.3.tar.gz", hash = "sha256:c34c3dce3b6848755eb61f0c94369d1ba04aceeb1b76015db1ea7362c544fb26", size = 130589, upload-time = "2026-01-13T00:30:43.894Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/be/4ad511bacfdd854afb12974f407cb30010dceb982dc20c55491867b34526/langgraph_sdk-0.3.3-py3-none-any.whl", hash = "sha256:a52ebaf09d91143e55378bb2d0b033ed98f57f48c9ad35c8f81493b88705fc7b", size = 67021, upload-time = "2026-01-13T00:30:42.264Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.6.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "orjson", marker = "platform_python_implementation != 'PyPy'" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "requests" },
|
||||
{ name = "requests-toolbelt" },
|
||||
{ name = "uuid-utils" },
|
||||
{ name = "zstandard" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e7/85/9c7933052a997da1b85bc5c774f3865e9b1da1c8d71541ea133178b13229/langsmith-0.6.4.tar.gz", hash = "sha256:36f7223a01c218079fbb17da5e536ebbaf5c1468c028abe070aa3ae59bc99ec8", size = 919964, upload-time = "2026-01-15T20:02:28.873Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/66/0f/09a6637a7ba777eb307b7c80852d9ee26438e2bdafbad6fcc849ff9d9192/langsmith-0.6.4-py3-none-any.whl", hash = "sha256:ac4835860160be371042c7adbba3cb267bcf8d96a5ea976c33a8a4acad6c5486", size = 283503, upload-time = "2026-01-15T20:02:26.662Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "librt"
|
||||
version = "0.7.4"
|
||||
@@ -2032,6 +2189,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ml-dtypes"
|
||||
version = "0.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "numpy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fd/15/76f86faa0902836cc133939732f7611ace68cf54148487a99c539c272dc8/ml_dtypes-0.4.1.tar.gz", hash = "sha256:fad5f2de464fd09127e49b7fd1252b9006fb43d2edc1ff112d390c324af5ca7a", size = 692594, upload-time = "2024-09-13T19:07:11.624Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/1a/99e924f12e4b62139fbac87419698c65f956d58de0dbfa7c028fa5b096aa/ml_dtypes-0.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:827d3ca2097085cf0355f8fdf092b888890bb1b1455f52801a2d7756f056f54b", size = 405077, upload-time = "2024-09-13T19:06:57.538Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/8c/7b610bd500617854c8cc6ed7c8cfb9d48d6a5c21a1437a36a4b9bc8a3598/ml_dtypes-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:772426b08a6172a891274d581ce58ea2789cc8abc1c002a27223f314aaf894e7", size = 2181554, upload-time = "2024-09-13T19:06:59.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/c6/f89620cecc0581dc1839e218c4315171312e46c62a62da6ace204bda91c0/ml_dtypes-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:126e7d679b8676d1a958f2651949fbfa182832c3cd08020d8facd94e4114f3e9", size = 2160488, upload-time = "2024-09-13T19:07:03.131Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/11/a742d3c31b2cc8557a48efdde53427fd5f9caa2fa3c9c27d826e78a66f51/ml_dtypes-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:df0fb650d5c582a9e72bb5bd96cfebb2cdb889d89daff621c8fbc60295eba66c", size = 127462, upload-time = "2024-09-13T19:07:04.916Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "more-itertools"
|
||||
version = "10.8.0"
|
||||
@@ -2278,10 +2450,14 @@ dependencies = [
|
||||
{ name = "grpcio-tools" },
|
||||
{ name = "httpx" },
|
||||
{ name = "keyring" },
|
||||
{ name = "langgraph" },
|
||||
{ name = "langgraph-checkpoint-postgres" },
|
||||
{ name = "langgraph-checkpoint-redis" },
|
||||
{ name = "openai-whisper" },
|
||||
{ name = "pgvector" },
|
||||
{ name = "protobuf" },
|
||||
{ name = "psutil" },
|
||||
{ name = "psycopg" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "pydantic-settings" },
|
||||
{ name = "rich" },
|
||||
@@ -2444,6 +2620,9 @@ requires-dist = [
|
||||
{ name = "grpcio-tools", specifier = ">=1.60" },
|
||||
{ name = "httpx", specifier = ">=0.27" },
|
||||
{ name = "keyring", specifier = ">=25.0" },
|
||||
{ name = "langgraph", specifier = ">=1.0.6" },
|
||||
{ name = "langgraph-checkpoint-postgres", specifier = ">=3.0.3" },
|
||||
{ name = "langgraph-checkpoint-redis", specifier = ">=0.3.2" },
|
||||
{ name = "mypy", marker = "extra == 'dev'", specifier = ">=1.8" },
|
||||
{ name = "noteflow", extras = ["audio", "dev", "triggers", "summarization", "diarization", "pdf", "ner", "ner-gliner", "calendar", "observability"], marker = "extra == 'all'" },
|
||||
{ name = "numpy", marker = "extra == 'audio'", specifier = ">=1.26" },
|
||||
@@ -2465,6 +2644,7 @@ requires-dist = [
|
||||
{ name = "pgvector", specifier = ">=0.3" },
|
||||
{ name = "protobuf", specifier = ">=4.25" },
|
||||
{ name = "psutil", specifier = ">=7.1.3" },
|
||||
{ name = "psycopg", specifier = ">=3.3.2" },
|
||||
{ name = "pyannote-audio", marker = "extra == 'diarization'", specifier = ">=3.3" },
|
||||
{ name = "pyannote-audio", marker = "extra == 'optional'", specifier = ">=3.3" },
|
||||
{ name = "pydantic", specifier = ">=2.0" },
|
||||
@@ -2943,6 +3123,98 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/58/de/3d8455b08cb6312f8cc46aacdf16c71d4d881a1db4a4140fc5ef31108422/optuna-4.6.0-py3-none-any.whl", hash = "sha256:4c3a9facdef2b2dd7e3e2a8ae3697effa70fae4056fcf3425cfc6f5a40feb069", size = 404708, upload-time = "2025-11-10T05:14:28.6Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "orjson"
|
||||
version = "3.11.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/04/b8/333fdb27840f3bf04022d21b654a35f58e15407183aeb16f3b41aa053446/orjson-3.11.5.tar.gz", hash = "sha256:82393ab47b4fe44ffd0a7659fa9cfaacc717eb617c93cde83795f14af5c2e9d5", size = 5972347, upload-time = "2025-12-06T15:55:39.458Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/a4/8052a029029b096a78955eadd68ab594ce2197e24ec50e6b6d2ab3f4e33b/orjson-3.11.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:334e5b4bff9ad101237c2d799d9fd45737752929753bf4faf4b207335a416b7d", size = 245347, upload-time = "2025-12-06T15:54:22.061Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/67/574a7732bd9d9d79ac620c8790b4cfe0717a3d5a6eb2b539e6e8995e24a0/orjson-3.11.5-cp312-cp312-macosx_15_0_arm64.whl", hash = "sha256:ff770589960a86eae279f5d8aa536196ebda8273a2a07db2a54e82b93bc86626", size = 129435, upload-time = "2025-12-06T15:54:23.615Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/8d/544e77d7a29d90cf4d9eecd0ae801c688e7f3d1adfa2ebae5e1e94d38ab9/orjson-3.11.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed24250e55efbcb0b35bed7caaec8cedf858ab2f9f2201f17b8938c618c8ca6f", size = 132074, upload-time = "2025-12-06T15:54:24.694Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/57/b9f5b5b6fbff9c26f77e785baf56ae8460ef74acdb3eae4931c25b8f5ba9/orjson-3.11.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a66d7769e98a08a12a139049aac2f0ca3adae989817f8c43337455fbc7669b85", size = 130520, upload-time = "2025-12-06T15:54:26.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/6d/d34970bf9eb33f9ec7c979a262cad86076814859e54eb9a059a52f6dc13d/orjson-3.11.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86cfc555bfd5794d24c6a1903e558b50644e5e68e6471d66502ce5cb5fdef3f9", size = 136209, upload-time = "2025-12-06T15:54:27.264Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/39/bc373b63cc0e117a105ea12e57280f83ae52fdee426890d57412432d63b3/orjson-3.11.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a230065027bc2a025e944f9d4714976a81e7ecfa940923283bca7bbc1f10f626", size = 139837, upload-time = "2025-12-06T15:54:28.75Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/aa/7c4818c8d7d324da220f4f1af55c343956003aa4d1ce1857bdc1d396ba69/orjson-3.11.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29d36b60e606df01959c4b982729c8845c69d1963f88686608be9ced96dbfaa", size = 137307, upload-time = "2025-12-06T15:54:29.856Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/bf/0993b5a056759ba65145effe3a79dd5a939d4a070eaa5da2ee3180fbb13f/orjson-3.11.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74099c6b230d4261fdc3169d50efc09abf38ace1a42ea2f9994b1d79153d477", size = 139020, upload-time = "2025-12-06T15:54:31.024Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/e8/83a6c95db3039e504eda60fc388f9faedbb4f6472f5aba7084e06552d9aa/orjson-3.11.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e697d06ad57dd0c7a737771d470eedc18e68dfdefcdd3b7de7f33dfda5b6212e", size = 141099, upload-time = "2025-12-06T15:54:32.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/b4/24fdc024abfce31c2f6812973b0a693688037ece5dc64b7a60c1ce69e2f2/orjson-3.11.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e08ca8a6c851e95aaecc32bc44a5aa75d0ad26af8cdac7c77e4ed93acf3d5b69", size = 413540, upload-time = "2025-12-06T15:54:33.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/37/01c0ec95d55ed0c11e4cae3e10427e479bba40c77312b63e1f9665e0737d/orjson-3.11.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e8b5f96c05fce7d0218df3fdfeb962d6b8cfff7e3e20264306b46dd8b217c0f3", size = 151530, upload-time = "2025-12-06T15:54:34.6Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/d4/f9ebc57182705bb4bbe63f5bbe14af43722a2533135e1d2fb7affa0c355d/orjson-3.11.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ddbfdb5099b3e6ba6d6ea818f61997bb66de14b411357d24c4612cf1ebad08ca", size = 141863, upload-time = "2025-12-06T15:54:35.801Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/04/02102b8d19fdcb009d72d622bb5781e8f3fae1646bf3e18c53d1bc8115b5/orjson-3.11.5-cp312-cp312-win32.whl", hash = "sha256:9172578c4eb09dbfcf1657d43198de59b6cef4054de385365060ed50c458ac98", size = 135255, upload-time = "2025-12-06T15:54:37.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/fb/f05646c43d5450492cb387de5549f6de90a71001682c17882d9f66476af5/orjson-3.11.5-cp312-cp312-win_amd64.whl", hash = "sha256:2b91126e7b470ff2e75746f6f6ee32b9ab67b7a93c8ba1d15d3a0caaf16ec875", size = 133252, upload-time = "2025-12-06T15:54:38.401Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/a6/7b8c0b26ba18c793533ac1cd145e131e46fcf43952aa94c109b5b913c1f0/orjson-3.11.5-cp312-cp312-win_arm64.whl", hash = "sha256:acbc5fac7e06777555b0722b8ad5f574739e99ffe99467ed63da98f97f9ca0fe", size = 126777, upload-time = "2025-12-06T15:54:39.515Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/43/61a77040ce59f1569edf38f0b9faadc90c8cf7e9bec2e0df51d0132c6bb7/orjson-3.11.5-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:3b01799262081a4c47c035dd77c1301d40f568f77cc7ec1bb7db5d63b0a01629", size = 245271, upload-time = "2025-12-06T15:54:40.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/f9/0f79be617388227866d50edd2fd320cb8fb94dc1501184bb1620981a0aba/orjson-3.11.5-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:61de247948108484779f57a9f406e4c84d636fa5a59e411e6352484985e8a7c3", size = 129422, upload-time = "2025-12-06T15:54:42.403Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/42/f1bf1549b432d4a78bfa95735b79b5dac75b65b5bb815bba86ad406ead0a/orjson-3.11.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:894aea2e63d4f24a7f04a1908307c738d0dce992e9249e744b8f4e8dd9197f39", size = 132060, upload-time = "2025-12-06T15:54:43.531Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/49/825aa6b929f1a6ed244c78acd7b22c1481fd7e5fda047dc8bf4c1a807eb6/orjson-3.11.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ddc21521598dbe369d83d4d40338e23d4101dad21dae0e79fa20465dbace019f", size = 130391, upload-time = "2025-12-06T15:54:45.059Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/ec/de55391858b49e16e1aa8f0bbbb7e5997b7345d8e984a2dec3746d13065b/orjson-3.11.5-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7cce16ae2f5fb2c53c3eafdd1706cb7b6530a67cc1c17abe8ec747f5cd7c0c51", size = 135964, upload-time = "2025-12-06T15:54:46.576Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/40/820bc63121d2d28818556a2d0a09384a9f0262407cf9fa305e091a8048df/orjson-3.11.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e46c762d9f0e1cfb4ccc8515de7f349abbc95b59cb5a2bd68df5973fdef913f8", size = 139817, upload-time = "2025-12-06T15:54:48.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/c7/3a445ca9a84a0d59d26365fd8898ff52bdfcdcb825bcc6519830371d2364/orjson-3.11.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7345c759276b798ccd6d77a87136029e71e66a8bbf2d2755cbdde1d82e78706", size = 137336, upload-time = "2025-12-06T15:54:49.426Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/b3/dc0d3771f2e5d1f13368f56b339c6782f955c6a20b50465a91acb79fe961/orjson-3.11.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bc2e59e6a2ac1dd28901d07115abdebc4563b5b07dd612bf64260a201b1c7f", size = 138993, upload-time = "2025-12-06T15:54:50.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/a2/65267e959de6abe23444659b6e19c888f242bf7725ff927e2292776f6b89/orjson-3.11.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:54aae9b654554c3b4edd61896b978568c6daa16af96fa4681c9b5babd469f863", size = 141070, upload-time = "2025-12-06T15:54:52.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/c9/da44a321b288727a322c6ab17e1754195708786a04f4f9d2220a5076a649/orjson-3.11.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:4bdd8d164a871c4ec773f9de0f6fe8769c2d6727879c37a9666ba4183b7f8228", size = 413505, upload-time = "2025-12-06T15:54:53.67Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/17/68dc14fa7000eefb3d4d6d7326a190c99bb65e319f02747ef3ebf2452f12/orjson-3.11.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a261fef929bcf98a60713bf5e95ad067cea16ae345d9a35034e73c3990e927d2", size = 151342, upload-time = "2025-12-06T15:54:55.113Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/c5/ccee774b67225bed630a57478529fc026eda33d94fe4c0eac8fe58d4aa52/orjson-3.11.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c028a394c766693c5c9909dec76b24f37e6a1b91999e8d0c0d5feecbe93c3e05", size = 141823, upload-time = "2025-12-06T15:54:56.331Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/80/5d00e4155d0cd7390ae2087130637671da713959bb558db9bac5e6f6b042/orjson-3.11.5-cp313-cp313-win32.whl", hash = "sha256:2cc79aaad1dfabe1bd2d50ee09814a1253164b3da4c00a78c458d82d04b3bdef", size = 135236, upload-time = "2025-12-06T15:54:57.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/fe/792cc06a84808dbdc20ac6eab6811c53091b42f8e51ecebf14b540e9cfe4/orjson-3.11.5-cp313-cp313-win_amd64.whl", hash = "sha256:ff7877d376add4e16b274e35a3f58b7f37b362abf4aa31863dadacdd20e3a583", size = 133167, upload-time = "2025-12-06T15:54:58.71Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/2c/d158bd8b50e3b1cfdcf406a7e463f6ffe3f0d167b99634717acdaf5e299f/orjson-3.11.5-cp313-cp313-win_arm64.whl", hash = "sha256:59ac72ea775c88b163ba8d21b0177628bd015c5dd060647bbab6e22da3aad287", size = 126712, upload-time = "2025-12-06T15:54:59.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/60/77d7b839e317ead7bb225d55bb50f7ea75f47afc489c81199befc5435b50/orjson-3.11.5-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:e446a8ea0a4c366ceafc7d97067bfd55292969143b57e3c846d87fc701e797a0", size = 245252, upload-time = "2025-12-06T15:55:01.127Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/aa/d4639163b400f8044cef0fb9aa51b0337be0da3a27187a20d1166e742370/orjson-3.11.5-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:53deb5addae9c22bbe3739298f5f2196afa881ea75944e7720681c7080909a81", size = 129419, upload-time = "2025-12-06T15:55:02.723Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/94/9eabf94f2e11c671111139edf5ec410d2f21e6feee717804f7e8872d883f/orjson-3.11.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82cd00d49d6063d2b8791da5d4f9d20539c5951f965e45ccf4e96d33505ce68f", size = 132050, upload-time = "2025-12-06T15:55:03.918Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/c8/ca10f5c5322f341ea9a9f1097e140be17a88f88d1cfdd29df522970d9744/orjson-3.11.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3fd15f9fc8c203aeceff4fda211157fad114dde66e92e24097b3647a08f4ee9e", size = 130370, upload-time = "2025-12-06T15:55:05.173Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d4/e96824476d361ee2edd5c6290ceb8d7edf88d81148a6ce172fc00278ca7f/orjson-3.11.5-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9df95000fbe6777bf9820ae82ab7578e8662051bb5f83d71a28992f539d2cda7", size = 136012, upload-time = "2025-12-06T15:55:06.402Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/8e/9bc3423308c425c588903f2d103cfcfe2539e07a25d6522900645a6f257f/orjson-3.11.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a8d676748fca47ade5bc3da7430ed7767afe51b2f8100e3cd65e151c0eaceb", size = 139809, upload-time = "2025-12-06T15:55:07.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/3c/b404e94e0b02a232b957c54643ce68d0268dacb67ac33ffdee24008c8b27/orjson-3.11.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa0f513be38b40234c77975e68805506cad5d57b3dfd8fe3baa7f4f4051e15b4", size = 137332, upload-time = "2025-12-06T15:55:08.961Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/30/cc2d69d5ce0ad9b84811cdf4a0cd5362ac27205a921da524ff42f26d65e0/orjson-3.11.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1863e75b92891f553b7922ce4ee10ed06db061e104f2b7815de80cdcb135ad", size = 138983, upload-time = "2025-12-06T15:55:10.595Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0e/87/de3223944a3e297d4707d2fe3b1ffb71437550e165eaf0ca8bbe43ccbcb1/orjson-3.11.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d4be86b58e9ea262617b8ca6251a2f0d63cc132a6da4b5fcc8e0a4128782c829", size = 141069, upload-time = "2025-12-06T15:55:11.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/30/81d5087ae74be33bcae3ff2d80f5ccaa4a8fedc6d39bf65a427a95b8977f/orjson-3.11.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:b923c1c13fa02084eb38c9c065afd860a5cff58026813319a06949c3af5732ac", size = 413491, upload-time = "2025-12-06T15:55:13.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/6f/f6058c21e2fc1efaf918986dbc2da5cd38044f1a2d4b7b91ad17c4acf786/orjson-3.11.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1b6bd351202b2cd987f35a13b5e16471cf4d952b42a73c391cc537974c43ef6d", size = 151375, upload-time = "2025-12-06T15:55:14.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/92/c6921f17d45e110892899a7a563a925b2273d929959ce2ad89e2525b885b/orjson-3.11.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb150d529637d541e6af06bbe3d02f5498d628b7f98267ff87647584293ab439", size = 141850, upload-time = "2025-12-06T15:55:15.94Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/86/cdecb0140a05e1a477b81f24739da93b25070ee01ce7f7242f44a6437594/orjson-3.11.5-cp314-cp314-win32.whl", hash = "sha256:9cc1e55c884921434a84a0c3dd2699eb9f92e7b441d7f53f3941079ec6ce7499", size = 135278, upload-time = "2025-12-06T15:55:17.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/97/b638d69b1e947d24f6109216997e38922d54dcdcdb1b11c18d7efd2d3c59/orjson-3.11.5-cp314-cp314-win_amd64.whl", hash = "sha256:a4f3cb2d874e03bc7767c8f88adaa1a9a05cecea3712649c3b58589ec7317310", size = 133170, upload-time = "2025-12-06T15:55:18.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ormsgpack"
|
||||
version = "1.12.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/12/0c/f1761e21486942ab9bb6feaebc610fa074f7c5e496e6962dea5873348077/ormsgpack-1.12.2.tar.gz", hash = "sha256:944a2233640273bee67521795a73cf1e959538e0dfb7ac635505010455e53b33", size = 39031, upload-time = "2026-01-18T20:55:28.023Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/36/16c4b1921c308a92cef3bf6663226ae283395aa0ff6e154f925c32e91ff5/ormsgpack-1.12.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7a29d09b64b9694b588ff2f80e9826bdceb3a2b91523c5beae1fab27d5c940e7", size = 378618, upload-time = "2026-01-18T20:55:50.835Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/68/468de634079615abf66ed13bb5c34ff71da237213f29294363beeeca5306/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b39e629fd2e1c5b2f46f99778450b59454d1f901bc507963168985e79f09c5d", size = 203186, upload-time = "2026-01-18T20:56:11.163Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/a9/d756e01961442688b7939bacd87ce13bfad7d26ce24f910f6028178b2cc8/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:958dcb270d30a7cb633a45ee62b9444433fa571a752d2ca484efdac07480876e", size = 210738, upload-time = "2026-01-18T20:56:09.181Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/ba/795b1036888542c9113269a3f5690ab53dd2258c6fb17676ac4bd44fcf94/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d379d72b6c5e964851c77cfedfb386e474adee4fd39791c2c5d9efb53505cc", size = 212569, upload-time = "2026-01-18T20:56:06.135Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/aa/bff73c57497b9e0cba8837c7e4bcab584b1a6dbc91a5dd5526784a5030c8/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8463a3fc5f09832e67bdb0e2fda6d518dc4281b133166146a67f54c08496442e", size = 387166, upload-time = "2026-01-18T20:55:36.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/cf/f8283cba44bcb7b14f97b6274d449db276b3a86589bdb363169b51bc12de/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:eddffb77eff0bad4e67547d67a130604e7e2dfbb7b0cde0796045be4090f35c6", size = 482498, upload-time = "2026-01-18T20:55:29.626Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/be/71e37b852d723dfcbe952ad04178c030df60d6b78eba26bfd14c9a40575e/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fcd55e5f6ba0dbce624942adf9f152062135f991a0126064889f68eb850de0dd", size = 425518, upload-time = "2026-01-18T20:55:49.556Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/0c/9803aa883d18c7ef197213cd2cbf73ba76472a11fe100fb7dab2884edf48/ormsgpack-1.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:d024b40828f1dde5654faebd0d824f9cc29ad46891f626272dd5bfd7af2333a4", size = 117462, upload-time = "2026-01-18T20:55:47.726Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/9e/029e898298b2cc662f10d7a15652a53e3b525b1e7f07e21fef8536a09bb8/ormsgpack-1.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:da538c542bac7d1c8f3f2a937863dba36f013108ce63e55745941dda4b75dbb6", size = 111559, upload-time = "2026-01-18T20:55:54.273Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/29/bb0eba3288c0449efbb013e9c6f58aea79cf5cb9ee1921f8865f04c1a9d7/ormsgpack-1.12.2-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5ea60cb5f210b1cfbad8c002948d73447508e629ec375acb82910e3efa8ff355", size = 378661, upload-time = "2026-01-18T20:55:57.765Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/31/5efa31346affdac489acade2926989e019e8ca98129658a183e3add7af5e/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3601f19afdbea273ed70b06495e5794606a8b690a568d6c996a90d7255e51c1", size = 203194, upload-time = "2026-01-18T20:56:08.252Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/56/d0087278beef833187e0167f8527235ebe6f6ffc2a143e9de12a98b1ce87/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29a9f17a3dac6054c0dce7925e0f4995c727f7c41859adf9b5572180f640d172", size = 210778, upload-time = "2026-01-18T20:55:17.694Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/a2/072343e1413d9443e5a252a8eb591c2d5b1bffbe5e7bfc78c069361b92eb/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39c1bd2092880e413902910388be8715f70b9f15f20779d44e673033a6146f2d", size = 212592, upload-time = "2026-01-18T20:55:32.747Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/8b/a0da3b98a91d41187a63b02dda14267eefc2a74fcb43cc2701066cf1510e/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:50b7249244382209877deedeee838aef1542f3d0fc28b8fe71ca9d7e1896a0d7", size = 387164, upload-time = "2026-01-18T20:55:40.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/bb/6d226bc4cf9fc20d8eb1d976d027a3f7c3491e8f08289a2e76abe96a65f3/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:5af04800d844451cf102a59c74a841324868d3f1625c296a06cc655c542a6685", size = 482516, upload-time = "2026-01-18T20:55:42.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/f1/bb2c7223398543dedb3dbf8bb93aaa737b387de61c5feaad6f908841b782/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cec70477d4371cd524534cd16472d8b9cc187e0e3043a8790545a9a9b296c258", size = 425539, upload-time = "2026-01-18T20:55:24.727Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/e8/0fb45f57a2ada1fed374f7494c8cd55e2f88ccd0ab0a669aa3468716bf5f/ormsgpack-1.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:21f4276caca5c03a818041d637e4019bc84f9d6ca8baa5ea03e5cc8bf56140e9", size = 117459, upload-time = "2026-01-18T20:55:56.876Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/d4/0cfeea1e960d550a131001a7f38a5132c7ae3ebde4c82af1f364ccc5d904/ormsgpack-1.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:baca4b6773d20a82e36d6fd25f341064244f9f86a13dead95dd7d7f996f51709", size = 111577, upload-time = "2026-01-18T20:55:43.605Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/16/24d18851334be09c25e87f74307c84950f18c324a4d3c0b41dabdbf19c29/ormsgpack-1.12.2-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bc68dd5915f4acf66ff2010ee47c8906dc1cf07399b16f4089f8c71733f6e36c", size = 378717, upload-time = "2026-01-18T20:55:26.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/a2/88b9b56f83adae8032ac6a6fa7f080c65b3baf9b6b64fd3d37bd202991d4/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46d084427b4132553940070ad95107266656cb646ea9da4975f85cb1a6676553", size = 203183, upload-time = "2026-01-18T20:55:18.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/80/43e4555963bf602e5bdc79cbc8debd8b6d5456c00d2504df9775e74b450b/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c010da16235806cf1d7bc4c96bf286bfa91c686853395a299b3ddb49499a3e13", size = 210814, upload-time = "2026-01-18T20:55:33.973Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/e1/7cfbf28de8bca6efe7e525b329c31277d1b64ce08dcba723971c241a9d60/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18867233df592c997154ff942a6503df274b5ac1765215bceba7a231bea2745d", size = 212634, upload-time = "2026-01-18T20:55:28.634Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/f8/30ae5716e88d792a4e879debee195653c26ddd3964c968594ddef0a3cc7e/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b009049086ddc6b8f80c76b3955df1aa22a5fbd7673c525cd63bf91f23122ede", size = 387139, upload-time = "2026-01-18T20:56:02.013Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/81/aee5b18a3e3a0e52f718b37ab4b8af6fae0d9d6a65103036a90c2a8ffb5d/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:1dcc17d92b6390d4f18f937cf0b99054824a7815818012ddca925d6e01c2e49e", size = 482578, upload-time = "2026-01-18T20:55:35.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/17/71c9ba472d5d45f7546317f467a5fc941929cd68fb32796ca3d13dcbaec2/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f04b5e896d510b07c0ad733d7fce2d44b260c5e6c402d272128f8941984e4285", size = 425539, upload-time = "2026-01-18T20:56:04.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/a6/ac99cd7fe77e822fed5250ff4b86fa66dd4238937dd178d2299f10b69816/ormsgpack-1.12.2-cp314-cp314-win_amd64.whl", hash = "sha256:ae3aba7eed4ca7cb79fd3436eddd29140f17ea254b91604aa1eb19bfcedb990f", size = 117493, upload-time = "2026-01-18T20:56:07.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/67/339872846a1ae4592535385a1c1f93614138566d7af094200c9c3b45d1e5/ormsgpack-1.12.2-cp314-cp314-win_arm64.whl", hash = "sha256:118576ea6006893aea811b17429bfc561b4778fad393f5f538c84af70b01260c", size = 111579, upload-time = "2026-01-18T20:55:21.161Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/c2/6feb972dc87285ad381749d3882d8aecbde9f6ecf908dd717d33d66df095/ormsgpack-1.12.2-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7121b3d355d3858781dc40dafe25a32ff8a8242b9d80c692fd548a4b1f7fd3c8", size = 378721, upload-time = "2026-01-18T20:55:52.12Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/9a/900a6b9b413e0f8a471cf07830f9cf65939af039a362204b36bd5b581d8b/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee766d2e78251b7a63daf1cddfac36a73562d3ddef68cacfb41b2af64698033", size = 203170, upload-time = "2026-01-18T20:55:44.469Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/87/4c/27a95466354606b256f24fad464d7c97ab62bce6cc529dd4673e1179b8fb/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292410a7d23de9b40444636b9b8f1e4e4b814af7f1ef476e44887e52a123f09d", size = 212816, upload-time = "2026-01-18T20:55:23.501Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/cd/29cee6007bddf7a834e6cd6f536754c0535fcb939d384f0f37a38b1cddb8/ormsgpack-1.12.2-cp314-cp314t-win_amd64.whl", hash = "sha256:837dd316584485b72ef451d08dd3e96c4a11d12e4963aedb40e08f89685d8ec2", size = 117232, upload-time = "2026-01-18T20:55:45.448Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "25.0"
|
||||
@@ -3098,6 +3370,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ply"
|
||||
version = "3.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e5/69/882ee5c9d017149285cab114ebeab373308ef0f874fcdac9beb90e0ac4da/ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", size = 159130, upload-time = "2018-02-15T19:01:31.097Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce", size = 49567, upload-time = "2018-02-15T19:01:27.172Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "preshed"
|
||||
version = "3.0.12"
|
||||
@@ -3288,6 +3569,31 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psycopg"
|
||||
version = "3.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
|
||||
{ name = "tzdata", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e0/1a/7d9ef4fdc13ef7f15b934c393edc97a35c281bb7d3c3329fbfcbe915a7c2/psycopg-3.3.2.tar.gz", hash = "sha256:707a67975ee214d200511177a6a80e56e654754c9afca06a7194ea6bbfde9ca7", size = 165630, upload-time = "2025-12-06T17:34:53.899Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/51/2779ccdf9305981a06b21a6b27e8547c948d85c41c76ff434192784a4c93/psycopg-3.3.2-py3-none-any.whl", hash = "sha256:3e94bc5f4690247d734599af56e51bae8e0db8e4311ea413f801fef82b14a99b", size = 212774, upload-time = "2025-12-06T17:31:41.414Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "psycopg-pool"
|
||||
version = "3.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/56/9a/9470d013d0d50af0da9c4251614aeb3c1823635cab3edc211e3839db0bcf/psycopg_pool-3.3.0.tar.gz", hash = "sha256:fa115eb2860bd88fce1717d75611f41490dec6135efb619611142b24da3f6db5", size = 31606, upload-time = "2025-12-01T11:34:33.11Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/c3/26b8a0908a9db249de3b4169692e1c7c19048a9bc41a4d3209cee7dbb758/psycopg_pool-3.3.0-py3-none-any.whl", hash = "sha256:2e44329155c410b5e8666372db44276a8b1ebd8c90f1c3026ebba40d4bc81063", size = 39995, upload-time = "2025-12-01T11:34:29.761Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "py-cpuinfo"
|
||||
version = "9.0.0"
|
||||
@@ -6385,6 +6691,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-ulid"
|
||||
version = "3.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/7e/0d6c82b5ccc71e7c833aed43d9e8468e1f2ff0be1b3f657a6fcafbb8433d/python_ulid-3.1.0.tar.gz", hash = "sha256:ff0410a598bc5f6b01b602851a3296ede6f91389f913a5d5f8c496003836f636", size = 93175, upload-time = "2025-08-18T16:09:26.305Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/a0/4ed6632b70a52de845df056654162acdebaf97c20e3212c559ac43e7216e/python_ulid-3.1.0-py3-none-any.whl", hash = "sha256:e2cdc979c8c877029b4b7a38a6fba3bc4578e4f109a308419ff4d3ccf0a46619", size = 11577, upload-time = "2025-08-18T16:09:25.047Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-xlib"
|
||||
version = "0.33"
|
||||
@@ -6543,6 +6858,34 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "7.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669, upload-time = "2025-11-19T15:54:39.961Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159, upload-time = "2025-11-19T15:54:38.064Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redisvl"
|
||||
version = "0.13.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "jsonpath-ng" },
|
||||
{ name = "ml-dtypes" },
|
||||
{ name = "numpy" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-ulid" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "redis" },
|
||||
{ name = "tenacity" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/81/d6/8f3235b272e3a2370698d7524aad2dec15f53c5be5d6726ba41056844f69/redisvl-0.13.2.tar.gz", hash = "sha256:f34c4350922ac469c45d90b5db65c49950e6aa8706331931b000f631ff9a0f4a", size = 737736, upload-time = "2025-12-19T09:22:07.787Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/93/81ea5c45637ce7fe2fdaf214d5e1b91afe96a472edeb9b659e24d3710dfb/redisvl-0.13.2-py3-none-any.whl", hash = "sha256:dd998c6acc54f13526d464ad6b6e6f0c4cf6985fb2c7a1655bdf8ed8e57a4c01", size = 192760, upload-time = "2025-12-19T09:22:06.301Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2026.1.15"
|
||||
@@ -6659,6 +7002,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/5d/63d4ae3b9daea098d5d6f5da83984853c1bbacd5dc826764b249fe119d24/requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36", size = 24179, upload-time = "2024-03-22T20:32:28.055Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests-toolbelt"
|
||||
version = "1.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.2.0"
|
||||
@@ -7265,6 +7620,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "9.1.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tensorboardx"
|
||||
version = "2.6.4"
|
||||
@@ -7744,6 +8108,28 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid-utils"
|
||||
version = "0.14.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/7c/3a926e847516e67bc6838634f2e54e24381105b4e80f9338dc35cca0086b/uuid_utils-0.14.0.tar.gz", hash = "sha256:fc5bac21e9933ea6c590433c11aa54aaca599f690c08069e364eb13a12f670b4", size = 22072, upload-time = "2026-01-20T20:37:15.729Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/42/42d003f4a99ddc901eef2fd41acb3694163835e037fb6dde79ad68a72342/uuid_utils-0.14.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f6695c0bed8b18a904321e115afe73b34444bc8451d0ce3244a1ec3b84deb0e5", size = 601786, upload-time = "2026-01-20T20:37:09.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/e6/775dfb91f74b18f7207e3201eb31ee666d286579990dc69dd50db2d92813/uuid_utils-0.14.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:4f0a730bbf2d8bb2c11b93e1005e91769f2f533fa1125ed1f00fd15b6fcc732b", size = 303943, upload-time = "2026-01-20T20:37:18.767Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/82/ea5f5e85560b08a1f30cdc65f75e76494dc7aba9773f679e7eaa27370229/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40ce3fd1a4fdedae618fc3edc8faf91897012469169d600133470f49fd699ed3", size = 340467, upload-time = "2026-01-20T20:37:11.794Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/33/54b06415767f4569882e99b6470c6c8eeb97422686a6d432464f9967fd91/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ae4a98416a440e78f7d9543d11b11cae4bab538b7ed94ec5da5221481748f2", size = 346333, upload-time = "2026-01-20T20:37:12.818Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/10/a6bce636b8f95e65dc84bf4a58ce8205b8e0a2a300a38cdbc83a3f763d27/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:971e8c26b90d8ae727e7f2ac3ee23e265971d448b3672882f2eb44828b2b8c3e", size = 470859, upload-time = "2026-01-20T20:37:01.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/27/84121c51ea72f013f0e03d0886bcdfa96b31c9b83c98300a7bd5cc4fa191/uuid_utils-0.14.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5cde1fa82804a8f9d2907b7aec2009d440062c63f04abbdb825fce717a5e860", size = 341988, upload-time = "2026-01-20T20:37:22.881Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/a4/01c1c7af5e6a44f20b40183e8dac37d6ed83e7dc9e8df85370a15959b804/uuid_utils-0.14.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c7343862a2359e0bd48a7f3dfb5105877a1728677818bb694d9f40703264a2db", size = 365784, upload-time = "2026-01-20T20:37:10.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/f0/65ee43ec617b8b6b1bf2a5aecd56a069a08cca3d9340c1de86024331bde3/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c51e4818fdb08ccec12dc7083a01f49507b4608770a0ab22368001685d59381b", size = 523750, upload-time = "2026-01-20T20:37:06.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/d3/6bf503e3f135a5dfe705a65e6f89f19bccd55ac3fb16cb5d3ec5ba5388b8/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:181bbcccb6f93d80a8504b5bd47b311a1c31395139596edbc47b154b0685b533", size = 615818, upload-time = "2026-01-20T20:37:21.816Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/6c/99937dd78d07f73bba831c8dc9469dfe4696539eba2fc269ae1b92752f9e/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:5c8ae96101c3524ba8dbf762b6f05e9e9d896544786c503a727c5bf5cb9af1a7", size = 580831, upload-time = "2026-01-20T20:37:19.691Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/fa/bbc9e2c25abd09a293b9b097a0d8fc16acd6a92854f0ec080f1ea7ad8bb3/uuid_utils-0.14.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00ac3c6edfdaff7e1eed041f4800ae09a3361287be780d7610a90fdcde9befdc", size = 546333, upload-time = "2026-01-20T20:37:03.117Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/9b/e5e99b324b1b5f0c62882230455786df0bc66f67eff3b452447e703f45d2/uuid_utils-0.14.0-cp39-abi3-win32.whl", hash = "sha256:ec2fd80adf8e0e6589d40699e6f6df94c93edcc16dd999be0438dd007c77b151", size = 177319, upload-time = "2026-01-20T20:37:04.208Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/28/2c7d417ea483b6ff7820c948678fdf2ac98899dc7e43bb15852faa95acaf/uuid_utils-0.14.0-cp39-abi3-win_amd64.whl", hash = "sha256:efe881eb43a5504fad922644cb93d725fd8a6a6d949bd5a4b4b7d1a1587c7fd1", size = 182566, upload-time = "2026-01-20T20:37:16.868Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/86/49e4bdda28e962fbd7266684171ee29b3d92019116971d58783e51770745/uuid_utils-0.14.0-cp39-abi3-win_arm64.whl", hash = "sha256:32b372b8fd4ebd44d3a219e093fe981af4afdeda2994ee7db208ab065cfcd080", size = 182809, upload-time = "2026-01-20T20:37:05.139Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasabi"
|
||||
version = "1.1.3"
|
||||
@@ -7941,6 +8327,89 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xxhash"
|
||||
version = "3.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
version = "1.22.0"
|
||||
@@ -8058,3 +8527,60 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/95/db/4f2eebf73c0e2df293a366a1d176cd315a74ce0b00f83826a7ba9ddd1ab3/zopfli-0.4.0-cp310-abi3-win32.whl", hash = "sha256:03181d48e719fcb6cf8340189c61e8f9883d8bbbdf76bf5212a74457f7d083c1", size = 83655, upload-time = "2025-11-07T17:00:51.797Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/f6/bd80c5278b1185dc41155c77bc61bfe1d817254a7f2115f66aa69a270b89/zopfli-0.4.0-cp310-abi3-win_amd64.whl", hash = "sha256:f94e4dd7d76b4fe9f5d9229372be20d7f786164eea5152d1af1c34298c3d5975", size = 100824, upload-time = "2025-11-07T17:00:52.658Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstandard"
|
||||
version = "0.25.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user