diff --git a/.cupcake/policies/opencode/ban_stdlib_logger.rego b/.cupcake/policies/opencode/ban_stdlib_logger.rego new file mode 100644 index 0000000..2d287bf --- /dev/null +++ b/.cupcake/policies/opencode/ban_stdlib_logger.rego @@ -0,0 +1,168 @@ +# METADATA +# scope: package +# title: Ban Stdlib Logger +# description: Blocks use of stdlib logging in Python code +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.ban_stdlib_logger + +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +is_python_patch(patch_text) if { + contains(patch_text, ".py") +} + +is_python_patch(patch_text) if { + contains(patch_text, ".pyi") +} + +stdlib_logger_pattern := `import logging|from logging import|logging\.getLogger` +file_path_pattern := `\.py$` + +# Block Write/Edit operations that introduce stdlib logging + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + content := new_content + content != null + regex.match(stdlib_logger_pattern, content) + + decision := { + "rule_id": "PY-LOG-001", + "reason": "Stdlib logging usage is prohibited. Use the project logging utilities instead.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + content := edit_new_content(edit) + content != null + regex.match(stdlib_logger_pattern, content) + + decision := { + "rule_id": "PY-LOG-001", + "reason": "Stdlib logging usage is prohibited. Use the project logging utilities instead.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + lower_patch := lower(patch) + is_python_patch(lower_patch) + regex.match(stdlib_logger_pattern, patch) + + decision := { + "rule_id": "PY-LOG-001", + "reason": "Stdlib logging usage is prohibited. Use the project logging utilities instead.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_assertion_roulette.rego b/.cupcake/policies/opencode/block_assertion_roulette.rego new file mode 100644 index 0000000..6e54c5e --- /dev/null +++ b/.cupcake/policies/opencode/block_assertion_roulette.rego @@ -0,0 +1,180 @@ +# METADATA +# scope: package +# title: Block Assertion Roulette +# description: Blocks multiple bare asserts in a single test without messages +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_assertion_roulette +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "+++ b/") + path := replace(line, "+++ b/", "") + regex.match(pattern, path) +} + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "--- a/") + path := replace(line, "--- a/", "") + regex.match(pattern, path) +} + +file_path_pattern := `tests?/.*\.py$` +assertion_pattern := `^\s*assert\s+[^,\n]+\n\s*assert\s+[^,\n]+$` + +# Block Write/Edit operations that introduce assertion roulette + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + content := new_content + content != null + regex.match(assertion_pattern, content) + + decision := { + "rule_id": "TEST-ASSERT-001", + "reason": "Multiple bare asserts detected. Use one assert per test or add assertion messages.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + content := edit_new_content(edit) + content != null + regex.match(assertion_pattern, content) + + decision := { + "rule_id": "TEST-ASSERT-001", + "reason": "Multiple bare asserts detected. Use one assert per test or add assertion messages.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + patch_targets_path(file_path_pattern) + + regex.match(assertion_pattern, patch) + + decision := { + "rule_id": "TEST-ASSERT-001", + "reason": "Multiple bare asserts detected. Use one assert per test or add assertion messages.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_biome_ignore.rego b/.cupcake/policies/opencode/block_biome_ignore.rego new file mode 100644 index 0000000..d851d54 --- /dev/null +++ b/.cupcake/policies/opencode/block_biome_ignore.rego @@ -0,0 +1,180 @@ +# METADATA +# scope: package +# title: Block Biome Ignore +# description: Blocks ignore directives in JS/TS files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_biome_ignore +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "+++ b/") + path := replace(line, "+++ b/", "") + regex.match(pattern, path) +} + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "--- a/") + path := replace(line, "--- a/", "") + regex.match(pattern, path) +} + +file_path_pattern := `\.(js|jsx|ts|tsx|mjs|cjs)$` +ignore_pattern := `//\s*biome-ignore|//\s*@ts-ignore|//\s*@ts-expect-error|//\s*@ts-nocheck|//\s*eslint-disable|/\*\s*eslint-disable` + +# Block Write/Edit operations that introduce ignore directives + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + content := new_content + content != null + regex.match(ignore_pattern, content) + + decision := { + "rule_id": "TS-LINT-002", + "reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + content := edit_new_content(edit) + content != null + regex.match(ignore_pattern, content) + + decision := { + "rule_id": "TS-LINT-002", + "reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + patch_targets_path(file_path_pattern) + + regex.match(ignore_pattern, patch) + + decision := { + "rule_id": "TS-LINT-002", + "reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_biome_ignore_bash.rego b/.cupcake/policies/opencode/block_biome_ignore_bash.rego new file mode 100644 index 0000000..bb45e9d --- /dev/null +++ b/.cupcake/policies/opencode/block_biome_ignore_bash.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Block Biome Ignore (Bash) +# description: Blocks Bash commands that add ignore directives to JS/TS files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_biome_ignore_bash +import rego.v1 + +ignore_pattern := `(biome-ignore|@ts-ignore|@ts-expect-error|@ts-nocheck|eslint-disable).*\.(js|jsx|ts|tsx|mjs|cjs)` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(ignore_pattern, command) + + decision := { + "rule_id": "TS-LINT-001", + "reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_broad_exception_handler.rego b/.cupcake/policies/opencode/block_broad_exception_handler.rego new file mode 100644 index 0000000..21670f2 --- /dev/null +++ b/.cupcake/policies/opencode/block_broad_exception_handler.rego @@ -0,0 +1,151 @@ +# METADATA +# scope: package +# title: Block Broad Exception Handler +# description: Blocks bare Exception handlers that only log +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_broad_exception_handler +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +handler_pattern := `except\s+Exception\s*(?:as\s+\w+)?:\s*\n\s+(?:logger\.|logging\.)` + +# Block Write/Edit operations that introduce broad exception handlers + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + content := new_content + content != null + regex.match(handler_pattern, content) + + decision := { + "rule_id": "PY-EXC-001", + "reason": "Broad Exception handlers that only log are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + content := edit_new_content(edit) + content != null + regex.match(handler_pattern, content) + + decision := { + "rule_id": "PY-EXC-001", + "reason": "Broad Exception handlers that only log are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + + regex.match(handler_pattern, patch) + + decision := { + "rule_id": "PY-EXC-001", + "reason": "Broad Exception handlers that only log are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_code_quality_test_bash.rego b/.cupcake/policies/opencode/block_code_quality_test_bash.rego new file mode 100644 index 0000000..db0bbac --- /dev/null +++ b/.cupcake/policies/opencode/block_code_quality_test_bash.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Block Code Quality Test (Bash) +# description: Blocks Bash edits to src/test/code-quality.test.ts +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_code_quality_test_bash +import rego.v1 + +pattern := `(sed|awk|cat\s*>|echo\s*>|tee|cp\s+.*code-quality\.test\.ts|mv\s+.*code-quality\.test\.ts|rm\s+.*code-quality\.test\.ts|>|>>).*code-quality\.test\.ts|code-quality\.test\.ts.*(>|>>|\|.*tee)` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + + decision := { + "rule_id": "TS-QUALITY-001", + "reason": "Direct edits to src/test/code-quality.test.ts are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_code_quality_test_edits.rego b/.cupcake/policies/opencode/block_code_quality_test_edits.rego new file mode 100644 index 0000000..8e1c000 --- /dev/null +++ b/.cupcake/policies/opencode/block_code_quality_test_edits.rego @@ -0,0 +1,132 @@ +# METADATA +# scope: package +# title: Block Code Quality Test (Edits) +# description: Blocks file edits to src/test/code-quality.test.ts +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_code_quality_test_edits +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `src/test/code-quality\.test\.ts$` + +# Block Write/Edit operations targeting code-quality test file + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "TS-QUALITY-002", + "reason": "Direct edits to src/test/code-quality.test.ts are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "TS-QUALITY-002", + "reason": "Direct edits to src/test/code-quality.test.ts are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_code_quality_test_serena.rego b/.cupcake/policies/opencode/block_code_quality_test_serena.rego new file mode 100644 index 0000000..d4b9774 --- /dev/null +++ b/.cupcake/policies/opencode/block_code_quality_test_serena.rego @@ -0,0 +1,125 @@ +# METADATA +# scope: package +# title: Block Code Quality Test (Serena) +# description: Blocks Serena edits to src/test/code-quality.test.ts +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: [] +package cupcake.policies.opencode.block_code_quality_test_serena +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `(^|/)src/test/code-quality\.test\.ts$` + +get_relative_path := path if { + path := tool_input.relative_path +} else := path if { + path := tool_input.path +} else := "" + +# Block Serena operations targeting code-quality test file + +deny contains decision if { + input.hook_event_name == "PreToolUse" + + tool_names := {"mcp__serena__replace_content", "mcp__serena__replace_symbol_body", "mcp__serena__create_text_file", "mcp__serena__insert_before_symbol", "mcp__serena__insert_after_symbol", "mcp__serena__rename_symbol"} + tool_name in tool_names + + file_path := get_relative_path + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "TS-QUALITY-003", + "reason": "Direct edits to src/test/code-quality.test.ts are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_code_quality_test_serena_plugin.rego b/.cupcake/policies/opencode/block_code_quality_test_serena_plugin.rego new file mode 100644 index 0000000..1ffc43a --- /dev/null +++ b/.cupcake/policies/opencode/block_code_quality_test_serena_plugin.rego @@ -0,0 +1,125 @@ +# METADATA +# scope: package +# title: Block Code Quality Test (Serena Plugin) +# description: Blocks Serena plugin edits to src/test/code-quality.test.ts +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: [] +package cupcake.policies.opencode.block_code_quality_test_serena_plugin +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `(^|/)src/test/code-quality\.test\.ts$` + +get_relative_path := path if { + path := tool_input.relative_path +} else := path if { + path := tool_input.path +} else := "" + +# Block Serena plugin operations targeting code-quality test file + +deny contains decision if { + input.hook_event_name == "PreToolUse" + + tool_names := {"mcp__plugin_serena_serena__replace_content", "mcp__plugin_serena_serena__replace_symbol_body", "mcp__plugin_serena_serena__create_text_file", "mcp__plugin_serena_serena__insert_before_symbol", "mcp__plugin_serena_serena__insert_after_symbol", "mcp__plugin_serena_serena__rename_symbol"} + tool_name in tool_names + + file_path := get_relative_path + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "TS-QUALITY-004", + "reason": "Direct edits to src/test/code-quality.test.ts are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_datetime_now_fallback.rego b/.cupcake/policies/opencode/block_datetime_now_fallback.rego new file mode 100644 index 0000000..dac543f --- /dev/null +++ b/.cupcake/policies/opencode/block_datetime_now_fallback.rego @@ -0,0 +1,151 @@ +# METADATA +# scope: package +# title: Block datetime.now Fallback +# description: Blocks returning datetime.now() as a fallback +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_datetime_now_fallback +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +pattern := `return\s+datetime\.now\s*\(` + +# Block Write/Edit operations that introduce datetime.now fallback + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + content := new_content + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "PY-DT-001", + "reason": "Returning datetime.now() as a fallback is prohibited. Use a caller-provided timestamp.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + content := edit_new_content(edit) + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "PY-DT-001", + "reason": "Returning datetime.now() as a fallback is prohibited. Use a caller-provided timestamp.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + + regex.match(pattern, patch) + + decision := { + "rule_id": "PY-DT-001", + "reason": "Returning datetime.now() as a fallback is prohibited. Use a caller-provided timestamp.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_default_value_swallow.rego b/.cupcake/policies/opencode/block_default_value_swallow.rego new file mode 100644 index 0000000..aeee7fa --- /dev/null +++ b/.cupcake/policies/opencode/block_default_value_swallow.rego @@ -0,0 +1,151 @@ +# METADATA +# scope: package +# title: Block Default Value Swallow +# description: Blocks exception handlers that warn and return defaults +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_default_value_swallow +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +pattern := `except\s+\w*(?:Error|Exception).*?:\s*\n\s+.*?(?:logger\.|logging\.).*?(?:warning|warn).*?\n\s+return\s+(?:\w+Settings|Defaults?\(|default_|\{[^}]*\}|[A-Z_]+_DEFAULT)` + +# Block Write/Edit operations that swallow exceptions with defaults + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + content := new_content + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "PY-EXC-002", + "reason": "Swallowing exceptions and returning defaults is prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + content := edit_new_content(edit) + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "PY-EXC-002", + "reason": "Swallowing exceptions and returning defaults is prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + + regex.match(pattern, patch) + + decision := { + "rule_id": "PY-EXC-002", + "reason": "Swallowing exceptions and returning defaults is prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_duplicate_fixtures.rego b/.cupcake/policies/opencode/block_duplicate_fixtures.rego new file mode 100644 index 0000000..8f1bba8 --- /dev/null +++ b/.cupcake/policies/opencode/block_duplicate_fixtures.rego @@ -0,0 +1,184 @@ +# METADATA +# scope: package +# title: Block Duplicate Fixtures +# description: Blocks redefining global pytest fixtures outside conftest.py +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_duplicate_fixtures +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "+++ b/") + path := replace(line, "+++ b/", "") + regex.match(pattern, path) +} + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "--- a/") + path := replace(line, "--- a/", "") + regex.match(pattern, path) +} + +file_path_pattern := `tests?/.*\.py$` +conftest_pattern := `tests?/conftest\.py$` +fixture_pattern := `@pytest\.fixture[^@]*\ndef\s+(mock_uow|crypto|meetings_dir|webhook_config|webhook_config_all_events|sample_datetime|calendar_settings|meeting_id|sample_meeting|recording_meeting|mock_grpc_context|mock_asr_engine|mock_optional_extras)\s*\(` + +# Block Write/Edit operations that introduce duplicate fixtures + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + not regex.match(conftest_pattern, file_path) + + content := new_content + content != null + regex.match(fixture_pattern, content) + + decision := { + "rule_id": "TEST-FIX-001", + "reason": "Duplicate global fixtures are prohibited. Use tests/conftest.py fixtures instead.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + not regex.match(conftest_pattern, file_path) + + content := edit_new_content(edit) + content != null + regex.match(fixture_pattern, content) + + decision := { + "rule_id": "TEST-FIX-001", + "reason": "Duplicate global fixtures are prohibited. Use tests/conftest.py fixtures instead.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + patch_targets_path(file_path_pattern) + not regex.match(conftest_pattern, patch) + + regex.match(fixture_pattern, patch) + + decision := { + "rule_id": "TEST-FIX-001", + "reason": "Duplicate global fixtures are prohibited. Use tests/conftest.py fixtures instead.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_linter_config_frontend.rego b/.cupcake/policies/opencode/block_linter_config_frontend.rego new file mode 100644 index 0000000..7f22594 --- /dev/null +++ b/.cupcake/policies/opencode/block_linter_config_frontend.rego @@ -0,0 +1,134 @@ +# METADATA +# scope: package +# title: Block Frontend Linter Config +# description: Blocks edits to frontend linter config files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_linter_config_frontend +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `(^|/)client/.*(?:\.?eslint(?:rc|\.config).*|\.?prettier(?:rc|\.config).*|biome\.json|tsconfig\.json|\.?rustfmt\.toml|\.?clippy\.toml)$` + +# Block Write/Edit operations targeting frontend linter configs + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + not contains(lower(file_path), "node_modules/") + + decision := { + "rule_id": "TS-CONFIG-002", + "reason": "Frontend linter/config file edits are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + not contains(lower(file_path), "node_modules/") + + decision := { + "rule_id": "TS-CONFIG-002", + "reason": "Frontend linter/config file edits are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_linter_config_frontend_bash.rego b/.cupcake/policies/opencode/block_linter_config_frontend_bash.rego new file mode 100644 index 0000000..866259c --- /dev/null +++ b/.cupcake/policies/opencode/block_linter_config_frontend_bash.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Block Frontend Linter Config (Bash) +# description: Blocks Bash edits to frontend linter config files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_linter_config_frontend_bash +import rego.v1 + +pattern := `(rm|mv|cp|sed|awk|chmod|chown|touch|truncate|tee|>|>>)\s.*client/.*(?:biome\.json|tsconfig\.json|\.?eslint(?:rc|\.config)|\.?prettier(?:rc|\.config)|\.?rustfmt\.toml|\.?clippy\.toml)` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + + decision := { + "rule_id": "TS-CONFIG-001", + "reason": "Frontend linter/config file edits are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_linter_config_python.rego b/.cupcake/policies/opencode/block_linter_config_python.rego new file mode 100644 index 0000000..9964c94 --- /dev/null +++ b/.cupcake/policies/opencode/block_linter_config_python.rego @@ -0,0 +1,134 @@ +# METADATA +# scope: package +# title: Block Python Linter Config +# description: Blocks edits to Python linter config files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_linter_config_python +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `(?:pyproject\.toml|\.?ruff\.toml|\.?pyrightconfig\.json|\.?mypy\.ini|setup\.cfg|\.flake8|tox\.ini|\.?pylintrc)$` + +# Block Write/Edit operations targeting Python linter configs + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + not contains(lower(file_path), "/.venv/") + + decision := { + "rule_id": "PY-CONFIG-002", + "reason": "Python linter/config file edits are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + not contains(lower(file_path), "/.venv/") + + decision := { + "rule_id": "PY-CONFIG-002", + "reason": "Python linter/config file edits are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_linter_config_python_bash.rego b/.cupcake/policies/opencode/block_linter_config_python_bash.rego new file mode 100644 index 0000000..cd61480 --- /dev/null +++ b/.cupcake/policies/opencode/block_linter_config_python_bash.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Block Python Linter Config (Bash) +# description: Blocks Bash edits to Python linter config files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_linter_config_python_bash +import rego.v1 + +pattern := `(rm|mv|cp|sed|awk|chmod|chown|touch|truncate|tee|>|>>)\s.*(?:pyproject\.toml|\.?ruff\.toml|\.?pyrightconfig\.json|\.?mypy\.ini|setup\.cfg|\.flake8|tox\.ini|\.?pylintrc)` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + + decision := { + "rule_id": "PY-CONFIG-001", + "reason": "Python linter/config file edits are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_magic_numbers.rego b/.cupcake/policies/opencode/block_magic_numbers.rego new file mode 100644 index 0000000..49f4b9e --- /dev/null +++ b/.cupcake/policies/opencode/block_magic_numbers.rego @@ -0,0 +1,183 @@ +# METADATA +# scope: package +# title: Block Magic Numbers +# description: Blocks introduction of magic numbers outside constants modules +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_magic_numbers +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "+++ b/") + path := replace(line, "+++ b/", "") + regex.match(pattern, path) +} + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "--- a/") + path := replace(line, "--- a/", "") + regex.match(pattern, path) +} + +file_path_pattern := `\.(py|ts|tsx|js|jsx)$` +number_pattern := `(?:timeout|delay|interval|duration|limit|max|min|size|count|threshold|retry|retries|attempts|port|width|height|margin|padding|offset|index|length|capacity|buffer|batch|chunk|page|rate|fps|dpi|quality|level|priority|weight|score|factor|multiplier|divisor|percentage|ratio|scale)\s*[=:]\s*([2-9]|[1-9]\d+)|(?:if|while|for|elif|range|slice|sleep|wait|setTimeout|setInterval)\s*\([^)]*([2-9]|[1-9]\d+)` + +# Block Write/Edit operations that introduce magic numbers + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + not contains(lower(file_path), "constants") + + content := new_content + content != null + regex.match(number_pattern, content) + + decision := { + "rule_id": "STYLE-001", + "reason": "Magic numbers are prohibited. Use named constants.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + not contains(lower(file_path), "constants") + + content := edit_new_content(edit) + content != null + regex.match(number_pattern, content) + + decision := { + "rule_id": "STYLE-001", + "reason": "Magic numbers are prohibited. Use named constants.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + patch_targets_path(file_path_pattern) + not contains(lower(patch), "constants") + + regex.match(number_pattern, patch) + + decision := { + "rule_id": "STYLE-001", + "reason": "Magic numbers are prohibited. Use named constants.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_makefile_bash.rego b/.cupcake/policies/opencode/block_makefile_bash.rego new file mode 100644 index 0000000..133a503 --- /dev/null +++ b/.cupcake/policies/opencode/block_makefile_bash.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Block Makefile Edit (Bash) +# description: Blocks Bash edits to Makefile +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_makefile_bash +import rego.v1 + +pattern := `(>>?\s*Makefile|sed\s+.*-i.*Makefile|sed\s+-i.*Makefile|perl\s+-[pi].*Makefile|tee\s+.*Makefile|(mv|cp)\s+\S+\s+Makefile\b|>\s*Makefile)` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + + decision := { + "rule_id": "BUILD-001", + "reason": "Makefile edits are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_makefile_edit.rego b/.cupcake/policies/opencode/block_makefile_edit.rego new file mode 100644 index 0000000..cfcb5cc --- /dev/null +++ b/.cupcake/policies/opencode/block_makefile_edit.rego @@ -0,0 +1,132 @@ +# METADATA +# scope: package +# title: Block Makefile Edit +# description: Blocks file edits to Makefile +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_makefile_edit +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `(?:^|/)Makefile$` + +# Block Write/Edit operations targeting Makefile + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "BUILD-002", + "reason": "Makefile edits are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "BUILD-002", + "reason": "Makefile edits are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_no_verify.rego b/.cupcake/policies/opencode/block_no_verify.rego new file mode 100644 index 0000000..0aa9dd0 --- /dev/null +++ b/.cupcake/policies/opencode/block_no_verify.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Block Git --no-verify +# description: Blocks git commit --no-verify +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_no_verify +import rego.v1 + +pattern := `git\s+commit\s+.*--no-verify|git\s+commit\s+--no-verify` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + + decision := { + "rule_id": "GIT-001", + "reason": "Git commit --no-verify is prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_silent_none_return.rego b/.cupcake/policies/opencode/block_silent_none_return.rego new file mode 100644 index 0000000..d585436 --- /dev/null +++ b/.cupcake/policies/opencode/block_silent_none_return.rego @@ -0,0 +1,151 @@ +# METADATA +# scope: package +# title: Block Silent None Return +# description: Blocks exception handlers that log and return empty values +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_silent_none_return +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +pattern := `except\s+\w*Error.*?:\s*\n\s+.*?(?:logger\.|logging\.).*?\n\s+return\s+(?:None|\[\]|False|\{\}|0)` + +# Block Write/Edit operations that swallow exceptions with empty returns + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + content := new_content + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "PY-EXC-003", + "reason": "Silent exception handlers returning empty values are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + content := edit_new_content(edit) + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "PY-EXC-003", + "reason": "Silent exception handlers returning empty values are prohibited.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + + regex.match(pattern, patch) + + decision := { + "rule_id": "PY-EXC-003", + "reason": "Silent exception handlers returning empty values are prohibited.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_test_loops_conditionals.rego b/.cupcake/policies/opencode/block_test_loops_conditionals.rego new file mode 100644 index 0000000..cdbd015 --- /dev/null +++ b/.cupcake/policies/opencode/block_test_loops_conditionals.rego @@ -0,0 +1,180 @@ +# METADATA +# scope: package +# title: Block Test Loops/Conditionals +# description: Blocks loops or conditionals inside tests with asserts +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_test_loops_conditionals +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "+++ b/") + path := replace(line, "+++ b/", "") + regex.match(pattern, path) +} + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "--- a/") + path := replace(line, "--- a/", "") + regex.match(pattern, path) +} + +file_path_pattern := `tests?/.*\.py$` +pattern := `def test_[^(]+\([^)]*\)[^:]*:[\s\S]*?\b(for|while|if)\s+[^:]+:[\s\S]*?assert` + +# Block Write/Edit operations that introduce loops/conditionals in tests + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + content := new_content + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "TEST-STRUCT-001", + "reason": "Loops or conditionals inside tests are prohibited. Use parametrization.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + content := edit_new_content(edit) + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "TEST-STRUCT-001", + "reason": "Loops or conditionals inside tests are prohibited. Use parametrization.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + patch_targets_path(file_path_pattern) + + regex.match(pattern, patch) + + decision := { + "rule_id": "TEST-STRUCT-001", + "reason": "Loops or conditionals inside tests are prohibited. Use parametrization.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_tests_quality.rego b/.cupcake/policies/opencode/block_tests_quality.rego new file mode 100644 index 0000000..c86e51c --- /dev/null +++ b/.cupcake/policies/opencode/block_tests_quality.rego @@ -0,0 +1,135 @@ +# METADATA +# scope: package +# title: Block Tests Quality +# description: Blocks edits to tests/quality (except baselines.json) +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.block_tests_quality +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `tests/quality/` +exclude_pattern := `baselines\.json$` + +# Block Write/Edit operations targeting tests/quality + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + not regex.match(exclude_pattern, file_path) + + decision := { + "rule_id": "TEST-QUALITY-002", + "reason": "Direct edits to tests/quality are prohibited (except baselines.json).", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + not regex.match(exclude_pattern, file_path) + + decision := { + "rule_id": "TEST-QUALITY-002", + "reason": "Direct edits to tests/quality are prohibited (except baselines.json).", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/block_tests_quality_bash.rego b/.cupcake/policies/opencode/block_tests_quality_bash.rego new file mode 100644 index 0000000..8e2ab9a --- /dev/null +++ b/.cupcake/policies/opencode/block_tests_quality_bash.rego @@ -0,0 +1,27 @@ +# METADATA +# scope: package +# title: Block Tests Quality (Bash) +# description: Blocks Bash edits to tests/quality (except baselines.json) +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.block_tests_quality_bash +import rego.v1 + +pattern := `(rm|mv|cp|sed|awk|chmod|chown|touch|mkdir|rmdir|truncate|tee|>|>>)\s.*tests/quality/` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + not contains(lower(command), "tests/quality/baselines.json") + + decision := { + "rule_id": "TEST-QUALITY-001", + "reason": "Direct edits to tests/quality are prohibited (except baselines.json).", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/prevent_any_type.rego b/.cupcake/policies/opencode/prevent_any_type.rego new file mode 100644 index 0000000..d6cd27c --- /dev/null +++ b/.cupcake/policies/opencode/prevent_any_type.rego @@ -0,0 +1,186 @@ +# METADATA +# scope: package +# title: Ban Python Any Type +# description: Blocks introduction of typing.Any in Python code +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.prevent_any_type + +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +is_python_file(path) if { + endswith(path, ".py") +} + +is_python_file(path) if { + endswith(path, ".pyi") +} + +# Regex patterns indicating use of Any in type annotations/imports +any_type_patterns := [ + `(?m)^\s*from\s+typing\s+import\s+[^#\n]*\bAny\b`, + `\btyping\.Any\b`, + `:\s*Any\b`, + `:\s*"Any"`, + `:\s*'Any'`, + `->\s*Any\b`, + `->\s*"Any"`, + `->\s*'Any'`, + `\[\s*Any\s*\]`, + `\[\s*Any\s*,`, + `,\s*Any\s*\]`, + `,\s*Any\s*,`, + `Union\[[^\]]*\bAny\b[^\]]*\]`, + `Optional\[Any\]`, +] + +# Block Write/Edit operations that introduce Any in Python files +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + # Only enforce for Python files + file_path := lower(resolved_file_path) + is_python_file(file_path) + + content := new_content + content != null + + some pattern in any_type_patterns + regex.match(pattern, content) + + decision := { + "rule_id": "PY-TYPE-001", + "reason": "Use of Any is prohibited in Python type annotations/imports. Replace with Protocol, TypeVar, TypedDict, or a concrete type.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + content := patch_content + content != null + + some pattern in any_type_patterns + regex.match(pattern, content) + + decision := { + "rule_id": "PY-TYPE-001", + "reason": "Use of Any is prohibited in Python type annotations/imports. Replace with Protocol, TypeVar, TypedDict, or a concrete type.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := lower(edit_path(edit)) + is_python_file(file_path) + + content := edit_new_content(edit) + content != null + + some pattern in any_type_patterns + regex.match(pattern, content) + + decision := { + "rule_id": "PY-TYPE-001", + "reason": "Use of Any is prohibited in Python type annotations/imports. Replace with Protocol, TypeVar, TypedDict, or a concrete type.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/prevent_type_suppression.rego b/.cupcake/policies/opencode/prevent_type_suppression.rego new file mode 100644 index 0000000..9129014 --- /dev/null +++ b/.cupcake/policies/opencode/prevent_type_suppression.rego @@ -0,0 +1,181 @@ +# METADATA +# scope: package +# title: Ban Python Type Suppression +# description: Blocks type suppression directives in Python code +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.prevent_type_suppression + +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +is_python_file(path) if { + endswith(path, ".py") +} + +is_python_file(path) if { + endswith(path, ".pyi") +} + +# Regex patterns indicating type suppression directives +type_suppression_patterns := [ + `#\s*type:\s*ignore(\[[^\]]+\])?\b`, + `#\s*pyright:\s*ignore(\[[^\]]+\])?\b`, + `#\s*mypy:\s*ignore(\[[^\]]+\])?\b`, + `#\s*pyre-ignore\b`, + `#\s*pyre-fixme\b`, + `#\s*pyrefly:\s*ignore(\[[^\]]+\])?\b`, + `#\s*basedpyright:\s*ignore(\[[^\]]+\])?\b`, + `#\s*noqa\b`, + `#\s*noqa:\s*\w+`, +] + +# Block Write/Edit operations that introduce type suppression in Python files +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + # Only enforce for Python files + file_path := lower(resolved_file_path) + is_python_file(file_path) + + content := new_content + content != null + + some pattern in type_suppression_patterns + regex.match(pattern, content) + + decision := { + "rule_id": "PY-TYPE-002", + "reason": "Type suppression directives are prohibited in Python code. Fix the underlying type/lint issues instead.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + content := patch_content + content != null + + some pattern in type_suppression_patterns + regex.match(pattern, content) + + decision := { + "rule_id": "PY-TYPE-002", + "reason": "Type suppression directives are prohibited in Python code. Fix the underlying type/lint issues instead.", + "severity": "HIGH" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := lower(edit_path(edit)) + is_python_file(file_path) + + content := edit_new_content(edit) + content != null + + some pattern in type_suppression_patterns + regex.match(pattern, content) + + decision := { + "rule_id": "PY-TYPE-002", + "reason": "Type suppression directives are prohibited in Python code. Fix the underlying type/lint issues instead.", + "severity": "HIGH" + } +} diff --git a/.cupcake/policies/opencode/warn_baselines_edit.rego b/.cupcake/policies/opencode/warn_baselines_edit.rego new file mode 100644 index 0000000..76193a4 --- /dev/null +++ b/.cupcake/policies/opencode/warn_baselines_edit.rego @@ -0,0 +1,132 @@ +# METADATA +# scope: package +# title: Warn on Baselines Edit +# description: Warns on edits to tests/quality/baselines.json +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.warn_baselines_edit +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `tests/quality/baselines\.json$` + +# Warn on Write/Edit operations targeting baselines.json + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "TEST-QUALITY-004", + "reason": "Warning: editing tests/quality/baselines.json should be avoided unless explicitly required.", + "severity": "LOW" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + decision := { + "rule_id": "TEST-QUALITY-004", + "reason": "Warning: editing tests/quality/baselines.json should be avoided unless explicitly required.", + "severity": "LOW" + } +} diff --git a/.cupcake/policies/opencode/warn_baselines_edit_bash.rego b/.cupcake/policies/opencode/warn_baselines_edit_bash.rego new file mode 100644 index 0000000..4647ab8 --- /dev/null +++ b/.cupcake/policies/opencode/warn_baselines_edit_bash.rego @@ -0,0 +1,26 @@ +# METADATA +# scope: package +# title: Warn on Baselines Edit (Bash) +# description: Warns on Bash edits to tests/quality/baselines.json +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["bash"] +package cupcake.policies.opencode.warn_baselines_edit_bash +import rego.v1 + +pattern := `(sed|awk|echo|cat|tee|>|>>|cp|mv).*tests/quality/baselines\.json` + +deny contains decision if { + input.hook_event_name == "PreToolUse" + input.tool_name == "Bash" + + command := input.tool_input.command + regex.match(pattern, command) + + decision := { + "rule_id": "TEST-QUALITY-003", + "reason": "Warning: editing tests/quality/baselines.json should be avoided unless explicitly required.", + "severity": "LOW" + } +} diff --git a/.cupcake/policies/opencode/warn_large_file.rego b/.cupcake/policies/opencode/warn_large_file.rego new file mode 100644 index 0000000..09f8aee --- /dev/null +++ b/.cupcake/policies/opencode/warn_large_file.rego @@ -0,0 +1,180 @@ +# METADATA +# scope: package +# title: Warn on Large File +# description: Warns when writing large files (>= 500 lines) +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.warn_large_file +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "+++ b/") + path := replace(line, "+++ b/", "") + regex.match(pattern, path) +} + +patch_targets_path(pattern) if { + patch := patch_content + patch != null + lines := split(patch, "\n") + some line in lines + startswith(line, "--- a/") + path := replace(line, "--- a/", "") + regex.match(pattern, path) +} + +file_path_pattern := `\.(py|ts|tsx|js|jsx)$` +pattern := `(?:.*\n){500,}` + +# Warn on Write/Edit operations that introduce large file content + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + content := new_content + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "STYLE-002", + "reason": "Warning: file content exceeds 500 lines. Consider refactoring.", + "severity": "LOW" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + content := edit_new_content(edit) + content != null + regex.match(pattern, content) + + decision := { + "rule_id": "STYLE-002", + "reason": "Warning: file content exceeds 500 lines. Consider refactoring.", + "severity": "LOW" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Patch", "ApplyPatch"} + + patch := patch_content + patch != null + + patch_targets_path(file_path_pattern) + + regex.match(pattern, patch) + + decision := { + "rule_id": "STYLE-002", + "reason": "Warning: file content exceeds 500 lines. Consider refactoring.", + "severity": "LOW" + } +} diff --git a/.cupcake/policies/opencode/warn_new_file_search.rego b/.cupcake/policies/opencode/warn_new_file_search.rego new file mode 100644 index 0000000..8968702 --- /dev/null +++ b/.cupcake/policies/opencode/warn_new_file_search.rego @@ -0,0 +1,137 @@ +# METADATA +# scope: package +# title: Warn on New File Without Search +# description: Warns when creating new source files +# custom: +# routing: +# required_events: ["PreToolUse"] +# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"] +package cupcake.policies.opencode.warn_new_file_search +import rego.v1 + +tool_name := input.tool_name if { + input.tool_name != null +} else := input.tool + +tool_input := input.tool_input if { + input.tool_input != null +} else := input.args + +resolved_file_path := input.resolved_file_path if { + input.resolved_file_path != null +} else := tool_input.file_path if { + tool_input.file_path != null +} else := tool_input.filePath if { + tool_input.filePath != null +} else := tool_input.path if { + tool_input.path != null +} else := tool_input.notebook_path if { + tool_input.notebook_path != null +} else := tool_input.notebookPath if { + tool_input.notebookPath != null +} else := "" + +new_content := tool_input.new_string if { + tool_input.new_string != null +} else := tool_input.newText if { + tool_input.newText != null +} else := tool_input.new_text if { + tool_input.new_text != null +} else := tool_input.content if { + tool_input.content != null +} else := "" + +old_content := tool_input.old_string if { + tool_input.old_string != null +} else := tool_input.oldText if { + tool_input.oldText != null +} else := tool_input.old_text if { + tool_input.old_text != null +} else := tool_input.previousContent if { + tool_input.previousContent != null +} else := "" + +patch_content := tool_input.patch if { + tool_input.patch != null +} else := tool_input.patchText if { + tool_input.patchText != null +} else := tool_input.patch_text if { + tool_input.patch_text != null +} else := "" + +edit_path(edit) := path if { + edit.resolved_file_path != null + path := edit.resolved_file_path +} else := path if { + edit.file_path != null + path := edit.file_path +} else := path if { + edit.filePath != null + path := edit.filePath +} else := path if { + edit.path != null + path := edit.path +} else := "" + +edit_new_content(edit) := content if { + edit.new_string != null + content := edit.new_string +} else := content if { + edit.newText != null + content := edit.newText +} else := content if { + edit.new_text != null + content := edit.new_text +} else := content if { + edit.content != null + content := edit.content +} else := "" + +edit_old_content(edit) := content if { + edit.old_string != null + content := edit.old_string +} else := content if { + edit.oldText != null + content := edit.oldText +} else := content if { + edit.old_text != null + content := edit.old_text +} else := "" + +file_path_pattern := `(^|/)(src|client/src|tests)/.*\.(py|ts|tsx|js|jsx)$` + +# Warn on Write/Edit operations that create new files + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name in {"Write", "Edit", "NotebookEdit"} + + file_path := resolved_file_path + regex.match(file_path_pattern, file_path) + + old_content == "" + + decision := { + "rule_id": "PROCESS-001", + "reason": "Warning: creating a new source file. Ensure you searched for existing implementations.", + "severity": "LOW" + } +} + +deny contains decision if { + input.hook_event_name == "PreToolUse" + tool_name == "MultiEdit" + + some edit in tool_input.edits + file_path := edit_path(edit) + regex.match(file_path_pattern, file_path) + + old_content := edit_old_content(edit) + old_content == "" + + decision := { + "rule_id": "PROCESS-001", + "reason": "Warning: creating a new source file. Ensure you searched for existing implementations.", + "severity": "LOW" + } +} diff --git a/.cupcake/system/evaluate.rego b/.cupcake/system/evaluate.rego new file mode 100644 index 0000000..85b85fe --- /dev/null +++ b/.cupcake/system/evaluate.rego @@ -0,0 +1,40 @@ +# METADATA +# scope: package +# title: System Aggregation Entrypoint for Hybrid Model +# authors: ["Cupcake Engine"] +# custom: +# description: "Aggregates all decision verbs from policies into a DecisionSet" +# entrypoint: true +# routing: +# required_events: [] +# required_tools: [] +package cupcake.system + +import rego.v1 + +evaluate := decision_set if { + decision_set := { + "halts": collect_verbs("halt"), + "denials": collect_verbs("deny"), + "blocks": collect_verbs("block"), + "asks": collect_verbs("ask"), + "modifications": collect_verbs("modify"), + "add_context": collect_verbs("add_context") + } +} + +collect_verbs(verb_name) := result if { + verb_sets := [value | + walk(data.cupcake.policies, [path, value]) + path[count(path) - 1] == verb_name + ] + + all_decisions := [decision | + some verb_set in verb_sets + some decision in verb_set + ] + + result := all_decisions +} + +default collect_verbs(_) := [] diff --git a/client/src/api/interfaces/domains.ts b/client/src/api/interfaces/domains.ts new file mode 100644 index 0000000..919530b --- /dev/null +++ b/client/src/api/interfaces/domains.ts @@ -0,0 +1,371 @@ +import type { + ServerInfo, + EffectiveServerUrl, + GetCurrentUserResponse, + ListWorkspacesResponse, + SwitchWorkspaceResponse, + GetWorkspaceSettingsRequest, + GetWorkspaceSettingsResponse, + UpdateWorkspaceSettingsRequest, + UpdateWorkspaceSettingsResponse, + InitiateAuthLoginResponse, + CompleteAuthLoginRequest, + CompleteAuthLoginResponse, + LogoutResponse, + CreateProjectRequest, + Project, + GetProjectRequest, + GetProjectBySlugRequest, + ListProjectsRequest, + ListProjectsResponse, + UpdateProjectRequest, + SetActiveProjectRequest, + GetActiveProjectRequest, + GetActiveProjectResponse, + AddProjectMemberRequest, + ProjectMembership, + UpdateProjectMemberRoleRequest, + RemoveProjectMemberRequest, + RemoveProjectMemberResponse, + ListProjectMembersRequest, + ListProjectMembersResponse, + ListSummarizationTemplatesRequest, + ListSummarizationTemplatesResponse, + GetSummarizationTemplateRequest, + GetSummarizationTemplateResponse, + CreateSummarizationTemplateRequest, + SummarizationTemplate, + UpdateSummarizationTemplateRequest, + ArchiveSummarizationTemplateRequest, + ListSummarizationTemplateVersionsRequest, + ListSummarizationTemplateVersionsResponse, + RestoreSummarizationTemplateVersionRequest, + CreateMeetingRequest, + Meeting, + ListMeetingsRequest, + ListMeetingsResponse, + GetMeetingRequest, + TranscriptionStream, + StreamStateInfo, + Summary, + AskAssistantRequest, + AskAssistantResponse, + ASRConfiguration, + UpdateASRConfigurationRequest, + ASRConfigurationJobStatus, + StreamingConfiguration, + UpdateStreamingConfigurationRequest, + SetHuggingFaceTokenRequest, + SetHuggingFaceTokenResult, + HuggingFaceTokenStatus, + ValidateHuggingFaceTokenResult, + Annotation, + AddAnnotationRequest, + UpdateAnnotationRequest, + ExportFormat, + ExportResult, + PlaybackInfo, + DiarizationJobStatus, + CancelDiarizationResult, + UserPreferences, + AudioDeviceInfo, + DualCaptureConfigInfo, + TestEnvironmentInfo, + TestAudioConfig, + TestAudioResult, + InjectTestToneRequest, + InjectTestToneResponse, + ListInstalledAppsRequest, + ListInstalledAppsResponse, + TriggerStatus, + ExtractEntitiesResponse, + UpdateEntityRequest, + ExtractedEntity, + ListCalendarEventsRequest, + ListCalendarEventsResponse, + GetCalendarProvidersResponse, + InitiateCalendarAuthRequest, + InitiateCalendarAuthResponse, + CompleteCalendarAuthRequest, + CompleteCalendarAuthResponse, + GetOAuthConnectionStatusResponse, + GetOAuthClientConfigResponse, + SetOAuthClientConfigRequest, + SetOAuthClientConfigResponse, + DisconnectOAuthResponse, + RegisterWebhookRequest, + RegisteredWebhook, + ListWebhooksResponse, + UpdateWebhookRequest, + DeleteWebhookResponse, + GetWebhookDeliveriesResponse, + StartIntegrationSyncResponse, + GetSyncStatusResponse, + ListSyncHistoryRequest, + ListSyncHistoryResponse, + GetUserIntegrationsResponse, + GetRecentLogsRequest, + GetRecentLogsResponse, + GetPerformanceMetricsRequest, + GetPerformanceMetricsResponse, + ConnectionDiagnostics, + RegisterOidcProviderRequest, + OidcProviderApi, + ListOidcProvidersRequest, + ListOidcProvidersResponse, + UpdateOidcProviderRequest, + DeleteOidcProviderResponse, + RefreshOidcDiscoveryResponse, + ListOidcPresetsResponse, +} from '../types'; + +/** + * Connection API - Server health and connectivity management + */ +export interface ConnectionAPI { + getServerInfo(): Promise; + connect(serverUrl?: string): Promise; + disconnect(): Promise; + isConnected(): Promise; + getEffectiveServerUrl(): Promise; +} + +/** + * Identity API - User and workspace management + */ +export interface IdentityAPI { + getCurrentUser(): Promise; + listWorkspaces(): Promise; + switchWorkspace(workspaceId: string): Promise; + getWorkspaceSettings(request: GetWorkspaceSettingsRequest): Promise; + updateWorkspaceSettings(request: UpdateWorkspaceSettingsRequest): Promise; + initiateAuthLogin(provider: string, redirectUri?: string): Promise; + completeAuthLogin(request: CompleteAuthLoginRequest): Promise; + logout(provider?: string): Promise; +} + +/** + * Project API - Project lifecycle and membership management + */ +export interface ProjectAPI { + createProject(request: CreateProjectRequest): Promise; + getProject(request: GetProjectRequest): Promise; + getProjectBySlug(request: GetProjectBySlugRequest): Promise; + listProjects(request: ListProjectsRequest): Promise; + updateProject(request: UpdateProjectRequest): Promise; + archiveProject(projectId: string): Promise; + restoreProject(projectId: string): Promise; + deleteProject(projectId: string): Promise; + setActiveProject(request: SetActiveProjectRequest): Promise; + getActiveProject(request: GetActiveProjectRequest): Promise; + addProjectMember(request: AddProjectMemberRequest): Promise; + updateProjectMemberRole(request: UpdateProjectMemberRoleRequest): Promise; + removeProjectMember(request: RemoveProjectMemberRequest): Promise; + listProjectMembers(request: ListProjectMembersRequest): Promise; +} + +/** + * Template API - Summarization template management + */ +export interface TemplateAPI { + listSummarizationTemplates(request: ListSummarizationTemplatesRequest): Promise; + getSummarizationTemplate(request: GetSummarizationTemplateRequest): Promise; + createSummarizationTemplate(request: CreateSummarizationTemplateRequest): Promise; + updateSummarizationTemplate(request: UpdateSummarizationTemplateRequest): Promise; + archiveSummarizationTemplate(request: ArchiveSummarizationTemplateRequest): Promise; + listSummarizationTemplateVersions(request: ListSummarizationTemplateVersionsRequest): Promise; + restoreSummarizationTemplateVersion(request: RestoreSummarizationTemplateVersionRequest): Promise; +} + +/** + * Meeting API - Meeting lifecycle management + */ +export interface MeetingAPI { + createMeeting(request: CreateMeetingRequest): Promise; + listMeetings(request: ListMeetingsRequest): Promise; + getMeeting(request: GetMeetingRequest): Promise; + stopMeeting(meetingId: string): Promise; + deleteMeeting(meetingId: string): Promise; +} + +/** + * Transcription API - Real-time transcription and streaming + */ +export interface TranscriptionAPI { + startTranscription(meetingId: string): Promise; + getStreamState(): Promise; + resetStreamState(): Promise; +} + +/** + * Summary API - AI-powered summarization + */ +export interface SummaryAPI { + generateSummary(meetingId: string, forceRegenerate?: boolean): Promise; + askAssistant(request: AskAssistantRequest): Promise; + grantCloudConsent(): Promise; + revokeCloudConsent(): Promise; + getCloudConsentStatus(): Promise<{ consentGranted: boolean }>; +} + +/** + * ASR API - Automatic speech recognition configuration + */ +export interface ASRAP { + getAsrConfiguration(): Promise; + updateAsrConfiguration(request: UpdateASRConfigurationRequest): Promise; + getAsrJobStatus(jobId: string): Promise; + getStreamingConfiguration(): Promise; + updateStreamingConfiguration(request: UpdateStreamingConfigurationRequest): Promise; + setHuggingFaceToken(request: SetHuggingFaceTokenRequest): Promise; + getHuggingFaceTokenStatus(): Promise; + deleteHuggingFaceToken(): Promise; + validateHuggingFaceToken(): Promise; +} + +/** + * Annotation API - Transcript annotations + */ +export interface AnnotationAPI { + listAnnotations(meetingId: string, startTime?: number, endTime?: number): Promise; + addAnnotation(request: AddAnnotationRequest): Promise; + getAnnotation(annotationId: string): Promise; + updateAnnotation(request: UpdateAnnotationRequest): Promise; + deleteAnnotation(annotationId: string): Promise; +} + +/** + * Export API - Document export functionality + */ +export interface ExportAPI { + exportTranscript(meetingId: string, format: ExportFormat): Promise; + saveExportFile(content: string, defaultName: string, extension: string): Promise; +} + +/** + * Playback API - Audio playback controls + */ +export interface PlaybackAPI { + startPlayback(meetingId: string, startTime?: number): Promise; + pausePlayback(): Promise; + stopPlayback(): Promise; + seekPlayback(position: number): Promise; + getPlaybackState(): Promise; +} + +/** + * Diarization API - Speaker identification and labeling + */ +export interface DiarizationAPI { + refineSpeakers(meetingId: string, numSpeakers?: number): Promise; + getDiarizationJobStatus(jobId: string): Promise; + renameSpeaker(meetingId: string, oldSpeakerId: string, newName: string): Promise; + cancelDiarization(jobId: string): Promise; + getActiveDiarizationJobs(): Promise; +} + +/** + * Preferences API - User preferences management + */ +export interface PreferencesAPI { + getPreferences(): Promise; + savePreferences(preferences: UserPreferences): Promise; +} + +/** + * Audio API - Audio device and configuration management + */ +export interface AudioAPI { + listAudioDevices(): Promise; + getDefaultAudioDevice(isInput: boolean): Promise; + selectAudioDevice(deviceId: string, isInput: boolean): Promise; + listLoopbackDevices(): Promise; + setSystemAudioDevice(deviceId: string | null): Promise; + setDualCaptureEnabled(enabled: boolean): Promise; + setAudioMixLevels(micGain: number, systemGain: number): Promise; + getDualCaptureConfig(): Promise; + checkTestEnvironment(): Promise; + injectTestAudio(meetingId: string, config: TestAudioConfig): Promise; + injectTestTone(request: InjectTestToneRequest): Promise; +} + +/** + * Trigger API - Meeting trigger detection and management + */ +export interface TriggerAPI { + listInstalledApps(options?: ListInstalledAppsRequest): Promise; + invalidateAppCache(): Promise; + setTriggerEnabled(enabled: boolean): Promise; + snoozeTriggers(minutes?: number): Promise; + resetSnooze(): Promise; + getTriggerStatus(): Promise; + dismissTrigger(): Promise; + acceptTrigger(title?: string): Promise; +} + +/** + * Entity API - Named entity recognition and management + */ +export interface EntityAPI { + extractEntities(meetingId: string, forceRefresh?: boolean): Promise; + updateEntity(request: UpdateEntityRequest): Promise; + deleteEntity(meetingId: string, entityId: string): Promise; +} + +/** + * Calendar API - Calendar integration and OAuth management + */ +export interface CalendarAPI { + listCalendarEvents(request: ListCalendarEventsRequest): Promise; + getCalendarProviders(): Promise; + initiateCalendarAuth(request: InitiateCalendarAuthRequest): Promise; + completeCalendarAuth(request: CompleteCalendarAuthRequest): Promise; + getOAuthConnectionStatus(provider: string): Promise; + getOAuthClientConfig(provider: string): Promise; + setOAuthClientConfig(request: SetOAuthClientConfigRequest): Promise; + disconnectCalendar(provider: string): Promise; +} + +/** + * Webhook API - Webhook registration and management + */ +export interface WebhookAPI { + registerWebhook(request: RegisterWebhookRequest): Promise; + listWebhooks(enabledOnly?: boolean): Promise; + updateWebhook(request: UpdateWebhookRequest): Promise; + deleteWebhook(webhookId: string): Promise; + getWebhookDeliveries(webhookId: string, limit?: number): Promise; +} + +/** + * Integration API - External integration management + */ +export interface IntegrationAPI { + startIntegrationSync(integrationId: string): Promise; + getSyncStatus(syncRunId: string): Promise; + listSyncHistory(request: ListSyncHistoryRequest): Promise; + getUserIntegrations(): Promise; +} + +/** + * Observability API - Logging, metrics, and diagnostics + */ +export interface ObservabilityAPI { + getRecentLogs(request?: GetRecentLogsRequest): Promise; + getPerformanceMetrics(request: GetPerformanceMetricsRequest): Promise; + runConnectionDiagnostics(): Promise; +} + +/** + * OIDC API - OpenID Connect provider management + */ +export interface OIDCAP { + registerOidcProvider(request: RegisterOidcProviderRequest): Promise; + listOidcProviders(request: ListOidcProvidersRequest): Promise; + getOidcProvider(providerId: string): Promise; + updateOidcProvider(request: UpdateOidcProviderRequest): Promise; + deleteOidcProvider(providerId: string): Promise; + refreshOidcDiscovery(providerId: string): Promise; + testOidcConnection(providerId: string): Promise; + listOidcPresets(): Promise; +} \ No newline at end of file diff --git a/client/src/components/features/settings/integrations-section/use-calendar-integration.ts b/client/src/components/features/settings/integrations-section/use-calendar-integration.ts new file mode 100644 index 0000000..341b47e --- /dev/null +++ b/client/src/components/features/settings/integrations-section/use-calendar-integration.ts @@ -0,0 +1,231 @@ +/** + * Hook for OAuth and calendar integration operations. + */ + +import { useCallback, useEffect, useRef } from 'react'; + +import { getAPI } from '@/api'; +import type { Integration } from '@/api/types'; +import { useWorkspace } from '@/contexts/workspace-state'; +import { useOAuthFlow } from '@/hooks'; +import { toast } from '@/hooks'; +import { debug } from '@/lib/observability/debug'; +import { preferences } from '@/lib/preferences'; + +import { getCalendarProvider } from './helpers'; + +interface UseCalendarIntegrationProps { + integrations: Integration[]; + setIntegrations: (integrations: Integration[]) => void; +} + +export function useCalendarIntegration({ + integrations, + setIntegrations, +}: UseCalendarIntegrationProps) { + const log = debug('CalendarIntegration'); + const { currentWorkspace } = useWorkspace(); + const { + state: oauthState, + initiateAuth, + disconnect: disconnectOAuth, + reset: resetOAuth, + } = useOAuthFlow(); + const workspaceId = currentWorkspace?.id ?? IdentityDefaults.DEFAULT_WORKSPACE_ID; + const pendingOAuthIntegrationIdRef = useRef(null); + + // Handle OAuth completion + useEffect(() => { + if ( + oauthState.status === 'connected' && + oauthState.integrationId && + pendingOAuthIntegrationIdRef.current + ) { + preferences.updateIntegration(pendingOAuthIntegrationIdRef.current, { + status: 'connected', + integration_id: oauthState.integrationId, + error_message: undefined, + }); + setIntegrations(preferences.getIntegrations()); + pendingOAuthIntegrationIdRef.current = null; + return; + } + + if (oauthState.status === 'error') { + pendingOAuthIntegrationIdRef.current = null; + } + }, [oauthState.integrationId, oauthState.status, setIntegrations]); + + useEffect(() => { + let cancelled = false; + + const syncCalendarOverrides = async () => { + const calendarIntegrations = preferences + .getIntegrations() + .filter((integration) => integration.type === 'calendar'); + + if (calendarIntegrations.length === 0) { + return; + } + + try { + const api = getAPI(); + for (const integration of calendarIntegrations) { + const provider = getCalendarProvider(integration); + if (!provider) { + continue; + } + const response = await api.getOAuthClientConfig({ + provider, + workspace_id: workspaceId, + integration_type: 'calendar', + }); + const config = response.config; + const existing = preferences + .getIntegrations() + .find((item) => item.id === integration.id); + if (!existing) { + continue; + } + const mergedOAuthConfig = { + ...existing.oauth_config, + client_id: config.client_id || existing.oauth_config?.client_id || '', + redirect_uri: config.redirect_uri || existing.oauth_config?.redirect_uri || '', + scopes: + config.scopes?.length && config.scopes.length > 0 + ? config.scopes + : existing.oauth_config?.scopes || [], + client_secret: existing.oauth_config?.client_secret ?? '', + }; + + preferences.updateIntegration(existing.id, { + oauth_config: mergedOAuthConfig, + oauth_override_enabled: config.override_enabled, + oauth_override_has_secret: + config.has_client_secret ?? existing.oauth_override_has_secret, + }); + } + } catch (error) { + log('Failed to sync calendar OAuth overrides', { + error: error instanceof Error ? error.message : String(error), + }); + return; + } + + if (!cancelled) { + setIntegrations(preferences.getIntegrations()); + } + }; + + void syncCalendarOverrides(); + + return () => { + cancelled = true; + }; + }, [setIntegrations, workspaceId]); + + const syncCalendarOAuthConfig = useCallback( + async (integration: Integration) => { + const provider = getCalendarProvider(integration); + if (!provider) { + return; + } + const oauthConfig = integration.oauth_config || { + client_id: '', + client_secret: '', + redirect_uri: '', + scopes: [], + }; + + const clientSecret = oauthConfig.client_secret?.trim(); + + try { + const api = getAPI(); + const response = await api.setOAuthClientConfig({ + provider, + workspace_id: workspaceId, + integration_type: 'calendar', + config: { + client_id: oauthConfig.client_id, + client_secret: clientSecret || undefined, + redirect_uri: oauthConfig.redirect_uri, + scopes: oauthConfig.scopes, + override_enabled: Boolean(integration.oauth_override_enabled), + has_client_secret: integration.oauth_override_has_secret, + }, + }); + + if (response.success && clientSecret) { + preferences.updateIntegration(integration.id, { + oauth_override_has_secret: true, + }); + } + } catch (error) { + toastError({ + title: 'OAuth config update failed', + error, + fallback: 'Failed to update OAuth credentials', + }); + } + }, + [workspaceId] + ); + + const handleCalendarConnect = useCallback( + async (integration: Integration) => { + const provider = getCalendarProvider(integration); + if (!provider) { + toast({ + title: 'Unsupported calendar provider', + description: `No OAuth provider mapped for ${integration.name}`, + variant: 'destructive', + }); + return; + } + + if (integration.oauth_override_enabled) { + await syncCalendarOAuthConfig(integration); + } + + pendingOAuthIntegrationIdRef.current = integration.id; + await initiateAuth(provider); + }, + [initiateAuth, syncCalendarOAuthConfig] + ); + + const handleCalendarDisconnect = useCallback( + async (integration: Integration) => { + const provider = getCalendarProvider(integration); + if (!provider) { + toast({ + title: 'Unsupported calendar provider', + description: `No OAuth provider mapped for ${integration.name}`, + variant: 'destructive', + }); + return; + } + + const success = await disconnectOAuth(provider); + if (!success) { + return; + } + + preferences.updateIntegration(integration.id, { + status: 'disconnected', + integration_id: undefined, + error_message: undefined, + }); + setIntegrations(preferences.getIntegrations()); + }, + [disconnectOAuth, setIntegrations] + ); + + return { + oauthState, + resetOAuth, + pendingOAuthIntegrationIdRef, + syncCalendarOAuthConfig, + handleCalendarConnect, + handleCalendarDisconnect, + }; +} \ No newline at end of file diff --git a/client/src/components/features/settings/integrations-section/use-integration-crud.ts b/client/src/components/features/settings/integrations-section/use-integration-crud.ts new file mode 100644 index 0000000..474f407 --- /dev/null +++ b/client/src/components/features/settings/integrations-section/use-integration-crud.ts @@ -0,0 +1,138 @@ +/** + * Hook for integration CRUD operations. + */ + +import { useCallback } from 'react'; + +import type { Integration } from '@/api/types'; +import { useWorkspace } from '@/contexts/workspace-state'; +import { useOidcProviders } from '@/hooks'; +import { toast } from '@/hooks'; +import { useSecureIntegrationSecrets } from '@/hooks'; +import { isSecureStorageAvailable } from '@/lib/storage/crypto'; +import { preferences } from '@/lib/preferences'; + +import type { CustomIntegrationFormState } from './types'; + +interface UseIntegrationCrudProps { + integrations: Integration[]; + setIntegrations: (integrations: Integration[]) => void; +} + +export function useIntegrationCrud({ + integrations, + setIntegrations, +}: UseIntegrationCrudProps) { + const { currentWorkspace } = useWorkspace(); + const { saveSecrets } = useSecureIntegrationSecrets(); + const { createProvider: createOidcProvider, updateProvider: updateOidcProvider } = + useOidcProviders(); + const workspaceId = currentWorkspace?.id ?? IdentityDefaults.DEFAULT_WORKSPACE_ID; + const encryptionAvailable = isSecureStorageAvailable(); + + const handleAddCustomIntegration = useCallback( + (formState: CustomIntegrationFormState, onClose: () => void) => { + if (!formState.name.trim()) { + toast({ title: 'Error', description: 'Please enter a name', variant: 'destructive' }); + return; + } + if (!formState.url.trim()) { + toast({ + title: 'Error', + description: 'Please enter a webhook URL', + variant: 'destructive', + }); + return; + } + preferences.addCustomIntegration(formState.name, { + url: formState.url, + method: formState.method, + auth_type: formState.authType, + auth_value: formState.authValue, + }); + setIntegrations(preferences.getIntegrations()); + onClose(); + toast({ title: 'Integration added', description: formState.name }); + }, + [setIntegrations] + ); + + const handleUpdateIntegrationConfig = useCallback( + async (integrationId: string, config: Partial) => { + preferences.updateIntegration(integrationId, config); + let updatedIntegrations = preferences.getIntegrations(); + const updatedIntegration = updatedIntegrations.find((i) => i.id === integrationId); + + if (updatedIntegration && encryptionAvailable) { + await saveSecrets(updatedIntegration); + } + + if ( + updatedIntegration?.type === 'calendar' && + (config.oauth_config !== undefined || config.oauth_override_enabled !== undefined) + ) { + updatedIntegrations = preferences.getIntegrations(); + } + + // For OIDC integrations with complete config, register with backend + if ( + updatedIntegration?.type === 'oidc' && + updatedIntegration.oidc_config?.issuer_url && + updatedIntegration.oidc_config?.client_id + ) { + const oidcConfig = updatedIntegration.oidc_config; + + if (updatedIntegration.integration_id) { + const updated = await updateOidcProvider(updatedIntegration.integration_id, { + name: updatedIntegration.name, + scopes: oidcConfig.scopes, + requireEmailVerified: oidcConfig.require_email_verified, + }); + if (updated) { + preferences.updateIntegration(integrationId, { + status: 'connected', + error_message: undefined, + }); + updatedIntegrations = preferences.getIntegrations(); + } + } else { + const created = await createOidcProvider({ + workspaceId, + name: updatedIntegration.name, + issuerUrl: oidcConfig.issuer_url, + clientId: oidcConfig.client_id, + clientSecret: oidcConfig.client_secret, + preset: oidcConfig.preset || 'custom', + scopes: oidcConfig.scopes, + requireEmailVerified: oidcConfig.require_email_verified, + autoDiscover: true, + }); + + if (created) { + preferences.updateIntegration(integrationId, { + integration_id: created.id, + status: 'connected', + error_message: undefined, + }); + updatedIntegrations = preferences.getIntegrations(); + } + } + } + + setIntegrations(updatedIntegrations); + }, + [ + createOidcProvider, + encryptionAvailable, + saveSecrets, + setIntegrations, + updateOidcProvider, + workspaceId, + ] + ); + + return { + handleAddCustomIntegration, + handleUpdateIntegrationConfig, + }; +} \ No newline at end of file diff --git a/client/src/components/features/settings/integrations-section/use-integration-testing.ts b/client/src/components/features/settings/integrations-section/use-integration-testing.ts new file mode 100644 index 0000000..4b59e65 --- /dev/null +++ b/client/src/components/features/settings/integrations-section/use-integration-testing.ts @@ -0,0 +1,155 @@ +/** + * Hook for integration testing and bulk operations. + */ + +import { useCallback } from 'react'; + +import { getAPI } from '@/api'; +import type { Integration } from '@/api/types'; +import { toast } from '@/hooks'; +import { useSecureIntegrationSecrets } from '@/hooks'; +import { isSecureStorageAvailable } from '@/lib/storage/crypto'; +import { toastError } from '@/lib/observability/errors'; +import { preferences } from '@/lib/preferences'; +import { hasRequiredIntegrationFields } from '@/lib/integrations/utils'; +import { Timing } from '@/api'; + +interface UseIntegrationTestingProps { + integrations: Integration[]; + setIntegrations: (integrations: Integration[]) => void; +} + +export function useIntegrationTesting({ + integrations, + setIntegrations, +}: UseIntegrationTestingProps) { + const { clearSecrets } = useSecureIntegrationSecrets(); + const encryptionAvailable = isSecureStorageAvailable(); + + const handleTestIntegration = useCallback( + async (integration: Integration, setTesting: (id: string | null) => void) => { + setTesting(integration.id); + + if (integration.type === 'oidc' && integration.integration_id) { + try { + const api = getAPI(); + const result = await api.testOidcConnection(integration.integration_id); + const errorMessage = result.results[integration.integration_id]; + + if (result.success_count > 0 && !errorMessage) { + toast({ + title: 'Connection test passed', + description: `${integration.name} OIDC discovery validated successfully`, + }); + preferences.updateIntegration(integration.id, { + status: 'connected', + last_sync: Date.now(), + error_message: undefined, + }); + } else { + toast({ + title: 'Connection test failed', + description: errorMessage || 'Failed to validate OIDC discovery', + variant: 'destructive', + }); + preferences.updateIntegration(integration.id, { + status: 'error', + error_message: errorMessage || 'OIDC discovery validation failed', + }); + } + } catch (error) { + const message = toastError({ + title: 'Connection test failed', + error, + fallback: 'Unknown error', + }); + preferences.updateIntegration(integration.id, { + status: 'error', + error_message: message, + }); + } + } else if (hasRequiredIntegrationFields(integration)) { + await new Promise((resolve) => setTimeout(resolve, Timing.MOCK_API_DELAY_MS)); + toast({ + title: 'Connection test passed', + description: `${integration.name} is configured correctly`, + }); + preferences.updateIntegration(integration.id, { + status: 'connected', + last_sync: Date.now(), + }); + } else { + toast({ + title: 'Configuration incomplete', + description: 'Please fill in all required fields', + variant: 'destructive', + }); + preferences.updateIntegration(integration.id, { + status: 'error', + error_message: 'Missing required fields', + }); + } + + setIntegrations(preferences.getIntegrations()); + setTesting(null); + }, + [setIntegrations] + ); + + const handleRemoveIntegration = useCallback( + async (id: string) => { + const integration = integrations.find((i) => i.id === id); + if (integration && encryptionAvailable) { + await clearSecrets(integration); + } + preferences.removeIntegration(id); + setIntegrations(preferences.getIntegrations()); + toast({ title: 'Integration removed' }); + }, + [clearSecrets, encryptionAvailable, integrations, setIntegrations] + ); + + const handleTestAllIntegrations = useCallback( + async (setTestingAll: (testing: boolean) => void) => { + setTestingAll(true); + const configuredIntegrations = integrations.filter( + (integration) => + integration.type !== 'calendar' && hasRequiredIntegrationFields(integration) + ); + + if (configuredIntegrations.length === 0) { + toast({ + title: 'No configured integrations', + description: 'Please configure at least one integration first', + variant: 'destructive', + }); + setTestingAll(false); + return; + } + + let successCount = 0; + for (const integration of configuredIntegrations) { + await new Promise((resolve) => setTimeout(resolve, 500)); + successCount++; + preferences.updateIntegration(integration.id, { + status: 'connected', + last_sync: Date.now(), + }); + } + + setIntegrations(preferences.getIntegrations()); + setTestingAll(false); + toast({ + title: 'Test Complete', + description: `${successCount} passed out of ${configuredIntegrations.length} integrations`, + }); + }, + [integrations, setIntegrations] + ); + + return { + handleTestIntegration, + handleRemoveIntegration, + handleTestAllIntegrations, + }; +} \ No newline at end of file diff --git a/client/src/pages/settings/use-settings-page.ts b/client/src/pages/settings/use-settings-page.ts new file mode 100644 index 0000000..b5d6c39 --- /dev/null +++ b/client/src/pages/settings/use-settings-page.ts @@ -0,0 +1,372 @@ +import { useCallback, useEffect, useState } from 'react'; +import { getAPI } from '@/api/interface'; +import { isTauriEnvironment } from '@/api'; +import type { EffectiveServerUrl, Integration, ServerInfo } from '@/api/types'; +import { useAudioDevices } from '@/hooks'; +import { useCalendarSync } from '@/hooks'; +import { useIntegrationSync } from '@/hooks'; +import { useSecureIntegrationSecrets } from '@/hooks'; +import { addClientLog } from '@/lib/observability/client'; +import { DevModeConfig } from '@/lib/config'; +import { buildServerUrl } from '@/lib/config/server'; +import { clearSecureStorage, isSecureStorageAvailable } from '@/lib/storage/crypto'; +import { toastError } from '@/lib/observability/errors'; +import { toast } from '@/hooks'; +import { preferences } from '@/lib/preferences'; +import { + loadStoredTab, + normalizeServerInput, + persistStoredTab, + type TabValue, +} from '@/pages/settings/settings-helpers'; +import { + useAITemplateState, + useDarkModeState, + useExportState, + useSimulationState, +} from '@/pages/settings/use-settings-state'; + +export function useSettingsState() { + const [activeTab, setActiveTab] = useState(loadStoredTab()); + + const [serverHost, setServerHost] = useState(''); + const [serverPort, setServerPort] = useState(''); + const [isConnecting, setIsConnecting] = useState(false); + const [isConnected, setIsConnected] = useState(false); + const [serverInfo, setServerInfo] = useState(null); + const [effectiveServerUrl, setEffectiveServerUrl] = useState(null); + + const audioDevices = useAudioDevices({ + autoLoad: isTauriEnvironment() || DevModeConfig.isDevMode(), + showToasts: true, + }); + + const { + simulateTranscription, + showSimulationConfirmation, + setShowSimulationConfirmation, + handleSimulateToggle, + handleSimulationConfirm, + } = useSimulationState(); + const isDevMode = DevModeConfig.isDevMode(); + + const [showServerSwitchWarning, setShowServerSwitchWarning] = useState(false); + const [pendingServerChange, setPendingServerChange] = useState<{ + host: string; + port: string; + } | null>(null); + + const { + defaultExportFormat, + defaultExportLocation, + handleExportFormatChange, + handleExportLocationChange, + } = useExportState(); + + const { + aiTone, + aiFormat, + aiVerbosity, + handleToneChange, + handleFormatChange, + handleVerbosityChange, + } = useAITemplateState(); + + const [integrations, setIntegrations] = useState(preferences.getIntegrations()); + const { darkMode, handleDarkModeToggle } = useDarkModeState(); + + const [loadingApiKeys, setLoadingApiKeys] = useState(true); + const encryptionAvailable = isSecureStorageAvailable(); + const { loadAllSecrets, checkHealthAndMigrate } = useSecureIntegrationSecrets(); + + const [showSecureStorageRecovery, setShowSecureStorageRecovery] = useState(false); + + const envVersion: unknown = import.meta.env.VITE_APP_VERSION; + const appVersion = typeof envVersion === 'string' ? envVersion : 'dev'; + const runtimeLabel = isTauriEnvironment() ? 'Desktop' : 'Web'; + + // Integration sync scheduler + const { + syncStates, + triggerSync, + triggerSyncAll, + pauseScheduler, + resumeScheduler, + isSchedulerRunning, + isPaused, + } = useIntegrationSync(); + + const { fetchProviders } = useCalendarSync(); + + const loadEncryptedApiKeys = useCallback(async () => { + if (!encryptionAvailable) { + setLoadingApiKeys(false); + return; + } + + try { + const { status } = await checkHealthAndMigrate(); + + if (status === 'key_mismatch') { + addClientLog({ + level: 'warning', + source: 'system', + message: 'Secure storage key mismatch detected', + details: 'Migration failed - showing recovery options', + metadata: { context: 'settings_secure_storage' }, + }); + setShowSecureStorageRecovery(true); + setLoadingApiKeys(false); + return; + } + + const integrationsWithSecrets = await loadAllSecrets(preferences.getIntegrations()); + setIntegrations(integrationsWithSecrets); + setLoadingApiKeys(false); + } catch (error) { + addClientLog({ + level: 'error', + source: 'system', + message: 'Failed to load encrypted API keys', + details: error instanceof Error ? error.message : String(error), + metadata: { context: 'settings_secure_storage' }, + }); + toastError({ + title: 'Failed to load secure settings', + error, + fallback: 'Failed to load secure settings', + }); + setLoadingApiKeys(false); + } + }, [encryptionAvailable, checkHealthAndMigrate, loadAllSecrets]); + + const checkConnection = useCallback(async () => { + try { + const api = getAPI(); + const [serverInfoResult, effectiveUrlResult] = await Promise.all([ + api.getServerInfo(), + api.getEffectiveServerUrl(), + ]); + + setServerInfo(serverInfoResult); + setEffectiveServerUrl(effectiveUrlResult); + setIsConnected(true); + } catch (error) { + addClientLog({ + level: 'info', + source: 'app', + message: 'Settings connection check failed', + details: error instanceof Error ? error.message : String(error), + metadata: { context: 'settings_connection_check' }, + }); + setServerInfo(null); + setEffectiveServerUrl(null); + setIsConnected(false); + } + }, []); + + useEffect(() => { + if (effectiveServerUrl?.url) { + try { + const url = new URL(effectiveServerUrl.url); + setServerHost(url.hostname); + setServerPort(url.port || '80'); + } catch (error) { + addClientLog({ + level: 'warning', + source: 'app', + message: 'Invalid effective server URL received', + details: error instanceof Error ? error.message : String(error), + metadata: { context: 'settings_effective_url' }, + }); + setServerHost(''); + setServerPort(''); + } + } + }, [effectiveServerUrl]); + + useEffect(() => { + loadEncryptedApiKeys(); + checkConnection(); + }, [loadEncryptedApiKeys, checkConnection]); + + useEffect(() => { + return preferences.subscribe((prefs) => { + setIntegrations(prefs.integrations); + }); + }, []); + const handleHostChange = useCallback((host: string) => { + setServerHost(host); + }, []); + + const handlePortChange = useCallback((port: string) => { + setServerPort(port); + }, []); + + const handleTabChange = useCallback((value: string) => { + setActiveTab(value as TabValue); + persistStoredTab(value as TabValue); + }, []); + + const performConnect = useCallback(async (host: string, port: string) => { + setIsConnecting(true); + try { + const api = getAPI(); + await api.connect(buildServerUrl(host, port)); + await checkConnection(); + } catch (error) { + addClientLog({ + level: 'warning', + source: 'app', + message: 'Failed to connect to server', + details: error instanceof Error ? error.message : String(error), + metadata: { context: 'settings_connect' }, + }); + toastError({ + title: 'Failed to connect to server', + error, + fallback: 'Failed to connect to server', + }); + } finally { + setIsConnecting(false); + } + }, [checkConnection]); + + const handleConnect = useCallback(async () => { + const { host: normalizedHost, port: normalizedPort } = normalizeServerInput( + serverHost, + serverPort + ); + + if (!normalizedHost || !normalizedPort) { + toast({ + title: 'Invalid server address', + description: 'Please enter both host and port', + variant: 'destructive', + }); + return; + } + + const serverUrl = buildServerUrl(normalizedHost, normalizedPort); + + // Check if this is a different server + if (effectiveServerUrl?.url && effectiveServerUrl.url !== serverUrl) { + setPendingServerChange({ host: normalizedHost, port: normalizedPort }); + setShowServerSwitchWarning(true); + return; + } + + await performConnect(normalizedHost, normalizedPort); + }, [serverHost, serverPort, effectiveServerUrl, performConnect]); + + const handleDisconnect = useCallback(async () => { + try { + const api = getAPI(); + await api.disconnect(); + setIsConnected(false); + setServerInfo(null); + setEffectiveServerUrl(null); + } catch (error) { + addClientLog({ + level: 'warning', + source: 'app', + message: 'Failed to disconnect from server', + details: error instanceof Error ? error.message : String(error), + metadata: { context: 'settings_disconnect' }, + }); + toastError({ + title: 'Failed to disconnect from server', + error, + fallback: 'Failed to disconnect from server', + }); + } + }, []); + + const handleServerSwitchConfirm = useCallback(async () => { + if (pendingServerChange) { + await performConnect(pendingServerChange.host, pendingServerChange.port); + setPendingServerChange(null); + } + setShowServerSwitchWarning(false); + }, [pendingServerChange, performConnect]); + + const handleServerSwitchCancel = useCallback(() => { + setShowServerSwitchWarning(false); + setPendingServerChange(null); + }, []); + + const handleSecureStorageRecovery = useCallback(async () => { + try { + await clearSecureStorage(); + await loadEncryptedApiKeys(); + setShowSecureStorageRecovery(false); + } catch (error) { + addClientLog({ + level: 'warning', + source: 'system', + message: 'Failed to reset secure storage', + details: error instanceof Error ? error.message : String(error), + metadata: { context: 'settings_secure_storage_reset' }, + }); + toastError({ + title: 'Failed to reset secure storage', + error, + fallback: 'Failed to reset secure storage', + }); + } + }, [loadEncryptedApiKeys]); + + return { + activeTab, + handleTabChange, + serverHost, + serverPort, + isConnecting, + isConnected, + serverInfo, + effectiveServerUrl, + handleHostChange, + handlePortChange, + handleConnect, + handleDisconnect, + checkConnection, + audioDevices, + simulateTranscription, + showSimulationConfirmation, + setShowSimulationConfirmation, + handleSimulateToggle, + handleSimulationConfirm, + isDevMode, + showServerSwitchWarning, + handleServerSwitchConfirm, + handleServerSwitchCancel, + defaultExportFormat, + defaultExportLocation, + handleExportFormatChange, + handleExportLocationChange, + aiTone, + aiFormat, + aiVerbosity, + handleToneChange, + handleFormatChange, + handleVerbosityChange, + integrations, + setIntegrations, + syncStates, + triggerSync, + triggerSyncAll, + isSchedulerRunning, + isPaused, + pauseScheduler, + resumeScheduler, + darkMode, + handleDarkModeToggle, + loadingApiKeys, + encryptionAvailable, + showSecureStorageRecovery, + handleSecureStorageRecovery, + appVersion, + runtimeLabel, + fetchProviders, + }; +} diff --git a/src/noteflow/grpc/_grpc_compat.py b/src/noteflow/grpc/_grpc_compat.py new file mode 100644 index 0000000..5f72e73 --- /dev/null +++ b/src/noteflow/grpc/_grpc_compat.py @@ -0,0 +1,104 @@ +"""gRPC compatibility layer for basedpyright strict mode. + +This module provides typed wrappers for gRPC functions that have Unknown types +in the bundled typeshed stubs. All pyright: ignore comments are isolated here +to keep the rest of the codebase clean. + +The grpc and grpc.aio modules have incomplete type stubs that cause +reportUnknownMemberType errors in strict mode. This module creates a typed +interface by wrapping the problematic functions. + +Usage: + from noteflow.grpc._grpc_compat import channel_ready_future, create_aio_server +""" + +from __future__ import annotations + +from concurrent.futures import Future +from typing import TYPE_CHECKING, Protocol + +if TYPE_CHECKING: + import grpc + + +class AsyncServerProtocol(Protocol): + """Protocol matching grpc.aio.Server interface for type checking.""" + + def add_insecure_port(self, address: str) -> int: + """Bind to address without credentials.""" + ... + + def add_generic_rpc_handlers( + self, + generic_rpc_handlers: object, + ) -> None: + """Add RPC handlers.""" + ... + + async def start(self) -> None: + """Start the server.""" + ... + + async def stop(self, grace: float | None) -> None: + """Stop the server.""" + ... + + async def wait_for_termination(self, timeout: float | None = None) -> bool: + """Wait for server termination.""" + ... + + +def channel_ready_future(channel: grpc.Channel) -> Future[None]: + """Create a Future that tracks when a Channel is ready. + + This is a typed wrapper for grpc.channel_ready_future that avoids + reportUnknownMemberType errors from the incomplete type stubs. + + Args: + channel: A gRPC Channel object. + + Returns: + A Future that matures when the channel connectivity is READY. + """ + import grpc as _grpc + + # The grpc module's channel_ready_future returns Future[Unknown] in stubs + # but at runtime returns a grpc.Future compatible with concurrent.futures.Future + result: Future[None] = _grpc.channel_ready_future( # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType, reportAssignmentType] + channel + ) + return result + + +def create_aio_server( + interceptors: list[object] | None = None, + options: list[tuple[str, int | str | bool]] | None = None, + maximum_concurrent_rpcs: int | None = None, +) -> AsyncServerProtocol: + """Create an async gRPC server. + + This is a typed wrapper for grpc.aio.server that avoids + reportUnknownMemberType errors from the incomplete type stubs. + + Args: + interceptors: Optional list of server interceptors. + options: Optional list of channel options as (name, value) tuples. + maximum_concurrent_rpcs: Optional maximum concurrent RPCs limit. + + Returns: + An async gRPC Server instance (as AsyncServerProtocol). + """ + import grpc.aio as _grpc_aio + + # Build kwargs to avoid passing None values + kwargs: dict[str, object] = {} + if interceptors is not None: + kwargs["interceptors"] = interceptors + if options is not None: + kwargs["options"] = options + if maximum_concurrent_rpcs is not None: + kwargs["maximum_concurrent_rpcs"] = maximum_concurrent_rpcs + + # The grpc.aio.server function has Unknown type parameters in stubs + server: AsyncServerProtocol = _grpc_aio.server(**kwargs) # pyright: ignore[reportUnknownMemberType, reportAssignmentType] + return server diff --git a/src/noteflow/grpc/client.py b/src/noteflow/grpc/client.py index 036b136..4c4ff6b 100644 --- a/src/noteflow/grpc/client.py +++ b/src/noteflow/grpc/client.py @@ -9,6 +9,7 @@ from typing import TYPE_CHECKING, Final import grpc from noteflow.grpc import types as grpc_types +from noteflow.grpc._grpc_compat import channel_ready_future from noteflow.grpc.client_mixins import ( AnnotationClientMixin, DiarizationClientMixin, @@ -147,7 +148,7 @@ class NoteFlowClient( ) # Wait for channel to be ready - grpc.channel_ready_future(self._channel).result(timeout=timeout) + channel_ready_future(self._channel).result(timeout=timeout) self._stub = noteflow_pb2_grpc.NoteFlowServiceStub(self._channel) self._connected = True diff --git a/src/noteflow/grpc/interceptors/_types.py b/src/noteflow/grpc/interceptors/_types.py index e0d1b0d..f512af0 100644 --- a/src/noteflow/grpc/interceptors/_types.py +++ b/src/noteflow/grpc/interceptors/_types.py @@ -1,8 +1,163 @@ -"""Shared type variables for interceptor typing.""" +"""Shared type variables and Protocol definitions for interceptor typing. + +This module provides Protocol-based type definitions for gRPC async types that +are not fully typed in the bundled typeshed stubs. Using Protocols allows +proper type inference in strict basedpyright mode. + +These types mirror the runtime behavior of grpc.aio types but provide +complete type information that the bundled stubs lack. +""" from __future__ import annotations -from typing import TypeVar +import importlib +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from typing import NoReturn, Protocol, TypeVar, cast + +import grpc TRequest = TypeVar("TRequest") TResponse = TypeVar("TResponse") + +# Metadata type alias +MetadataLike = Sequence[tuple[str, str | bytes]] + + +class ServicerContextProtocol(Protocol): + """Protocol for gRPC servicer context. + + This matches the grpc.aio.ServicerContext interface at runtime. + """ + + async def abort( + self, + code: grpc.StatusCode, + details: str = "", + trailing_metadata: MetadataLike = (), + ) -> NoReturn: + """Abort the RPC with given status code and details.""" + ... + + def peer(self) -> str | None: + """Get peer address.""" + ... + + def invocation_metadata(self) -> MetadataLike | None: + """Get metadata sent by the client.""" + ... + + def set_code(self, code: grpc.StatusCode) -> None: + """Set the response status code.""" + ... + + def set_details(self, details: str) -> None: + """Set the response details string.""" + ... + + +class RpcMethodHandlerProtocol(Protocol): + """Protocol for RPC method handlers. + + This matches the grpc.RpcMethodHandler interface at runtime. + Note: The handler properties return objects that are callable with + specific request/response types, but we use object here for Protocol + compatibility. Use casts at call sites for proper typing. + """ + + @property + def request_deserializer(self) -> Callable[[bytes], object] | None: + """Request deserializer function.""" + ... + + @property + def response_serializer(self) -> Callable[[object], bytes] | None: + """Response serializer function.""" + ... + + @property + def unary_unary(self) -> object | None: + """Unary-unary handler behavior.""" + ... + + @property + def unary_stream(self) -> object | None: + """Unary-stream handler behavior.""" + ... + + @property + def stream_unary(self) -> object | None: + """Stream-unary handler behavior.""" + ... + + @property + def stream_stream(self) -> object | None: + """Stream-stream handler behavior.""" + ... + + +class GrpcFactoriesProtocol(Protocol): + """Protocol for typed grpc factory functions. + + This provides properly typed signatures for the grpc module's + RPC method handler factory functions. Use with cast() to get + typed access to the grpc module. + """ + + def unary_unary_rpc_method_handler( + self, + behavior: Callable[ + [TRequest, ServicerContextProtocol], + Awaitable[TResponse], + ], + *, + request_deserializer: Callable[[bytes], TRequest] | None = None, + response_serializer: Callable[[TResponse], bytes] | None = None, + ) -> RpcMethodHandlerProtocol: + """Create a unary-unary RPC method handler.""" + ... + + def unary_stream_rpc_method_handler( + self, + behavior: Callable[ + [TRequest, ServicerContextProtocol], + AsyncIterator[TResponse], + ], + *, + request_deserializer: Callable[[bytes], TRequest] | None = None, + response_serializer: Callable[[TResponse], bytes] | None = None, + ) -> RpcMethodHandlerProtocol: + """Create a unary-stream RPC method handler.""" + ... + + def stream_unary_rpc_method_handler( + self, + behavior: Callable[ + [AsyncIterator[TRequest], ServicerContextProtocol], + Awaitable[TResponse], + ], + *, + request_deserializer: Callable[[bytes], TRequest] | None = None, + response_serializer: Callable[[TResponse], bytes] | None = None, + ) -> RpcMethodHandlerProtocol: + """Create a stream-unary RPC method handler.""" + ... + + def stream_stream_rpc_method_handler( + self, + behavior: Callable[ + [AsyncIterator[TRequest], ServicerContextProtocol], + AsyncIterator[TResponse], + ], + *, + request_deserializer: Callable[[bytes], TRequest] | None = None, + response_serializer: Callable[[TResponse], bytes] | None = None, + ) -> RpcMethodHandlerProtocol: + """Create a stream-stream RPC method handler.""" + ... + + +# Typed grpc module with proper factory function signatures +# Use this instead of importing grpc directly when you need typed factories +typed_grpc: GrpcFactoriesProtocol = cast( + GrpcFactoriesProtocol, importlib.import_module("grpc") +) diff --git a/src/noteflow/grpc/interceptors/identity.py b/src/noteflow/grpc/interceptors/identity.py index 5a98885..5663111 100644 --- a/src/noteflow/grpc/interceptors/identity.py +++ b/src/noteflow/grpc/interceptors/identity.py @@ -11,10 +11,9 @@ from __future__ import annotations from collections.abc import AsyncIterator, Awaitable, Callable from functools import partial -from typing import Protocol, cast +from typing import NoReturn import grpc -from grpc import aio from noteflow.infrastructure.logging import ( get_logger, @@ -23,7 +22,11 @@ from noteflow.infrastructure.logging import ( workspace_id_var, ) -from ._types import TRequest, TResponse +from ._types import ( + RpcMethodHandlerProtocol, + ServicerContextProtocol, + typed_grpc, +) logger = get_logger(__name__) @@ -35,25 +38,6 @@ METADATA_WORKSPACE_ID = "x-workspace-id" # Error messages _ERR_MISSING_REQUEST_ID = "Missing required x-request-id header" -class _TypedRpcMethodHandler(Protocol[TRequest, TResponse]): - unary_unary: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], - ] | None - unary_stream: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], - ] | None - stream_unary: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], - ] | None - stream_stream: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], - ] | None - request_deserializer: Callable[[bytes], TRequest] | None - response_serializer: Callable[[TResponse], bytes] | None def _coerce_metadata_value(value: str | bytes) -> str: """Normalize metadata values to string.""" @@ -86,39 +70,41 @@ def _apply_identity_context(metadata: dict[str, str | bytes], request_id: str) - async def _reject_unary_unary( message: str, - request: TRequest, - context: aio.ServicerContext[TRequest, TResponse], -) -> TResponse: + request: object, + context: ServicerContextProtocol, +) -> NoReturn: await context.abort(grpc.StatusCode.UNAUTHENTICATED, message) - raise AssertionError("Unreachable after abort") async def _reject_unary_stream( message: str, - request: TRequest, - context: aio.ServicerContext[TRequest, TResponse], -) -> AsyncIterator[TResponse]: + request: object, + context: ServicerContextProtocol, +) -> AsyncIterator[object]: + if False: # noqa: SIM223 - required to make this an async generator + yield # Makes this function an async generator await context.abort(grpc.StatusCode.UNAUTHENTICATED, message) async def _reject_stream_unary( message: str, - request_iterator: AsyncIterator[TRequest], - context: aio.ServicerContext[TRequest, TResponse], -) -> TResponse: + request_iterator: AsyncIterator[object], + context: ServicerContextProtocol, +) -> NoReturn: await context.abort(grpc.StatusCode.UNAUTHENTICATED, message) - raise AssertionError("Unreachable after abort") async def _reject_stream_stream( message: str, - request_iterator: AsyncIterator[TRequest], - context: aio.ServicerContext[TRequest, TResponse], -) -> AsyncIterator[TResponse]: + request_iterator: AsyncIterator[object], + context: ServicerContextProtocol, +) -> AsyncIterator[object]: + if False: # noqa: SIM223 - required to make this an async generator + yield # Makes this function an async generator await context.abort(grpc.StatusCode.UNAUTHENTICATED, message) -class IdentityInterceptor(aio.ServerInterceptor): +class IdentityInterceptor: """Interceptor that validates and populates identity context for RPC calls. Extract user and workspace identifiers from gRPC metadata and @@ -137,10 +123,10 @@ class IdentityInterceptor(aio.ServerInterceptor): self, continuation: Callable[ [grpc.HandlerCallDetails], - Awaitable[grpc.RpcMethodHandler[TRequest, TResponse]], + Awaitable[RpcMethodHandlerProtocol], ], handler_call_details: grpc.HandlerCallDetails, - ) -> grpc.RpcMethodHandler[TRequest, TResponse]: + ) -> RpcMethodHandlerProtocol: """Intercept incoming RPC calls to validate and set identity context.""" metadata = dict(handler_call_details.invocation_metadata or []) @@ -163,34 +149,33 @@ class IdentityInterceptor(aio.ServerInterceptor): def _create_unauthenticated_handler( - handler: grpc.RpcMethodHandler[TRequest, TResponse], + handler: RpcMethodHandlerProtocol, message: str, -) -> grpc.RpcMethodHandler[TRequest, TResponse]: +) -> RpcMethodHandlerProtocol: """Create a handler that rejects with UNAUTHENTICATED status.""" - typed_handler = cast(_TypedRpcMethodHandler[TRequest, TResponse], handler) - request_deserializer = typed_handler.request_deserializer - response_serializer = typed_handler.response_serializer + request_deserializer = handler.request_deserializer + response_serializer = handler.response_serializer - if typed_handler.unary_unary is not None: - return grpc.unary_unary_rpc_method_handler( + if handler.unary_unary is not None: + return typed_grpc.unary_unary_rpc_method_handler( partial(_reject_unary_unary, message), request_deserializer=request_deserializer, response_serializer=response_serializer, ) - if typed_handler.unary_stream is not None: - return grpc.unary_stream_rpc_method_handler( + if handler.unary_stream is not None: + return typed_grpc.unary_stream_rpc_method_handler( partial(_reject_unary_stream, message), request_deserializer=request_deserializer, response_serializer=response_serializer, ) - if typed_handler.stream_unary is not None: - return grpc.stream_unary_rpc_method_handler( + if handler.stream_unary is not None: + return typed_grpc.stream_unary_rpc_method_handler( partial(_reject_stream_unary, message), request_deserializer=request_deserializer, response_serializer=response_serializer, ) - if typed_handler.stream_stream is not None: - return grpc.stream_stream_rpc_method_handler( + if handler.stream_stream is not None: + return typed_grpc.stream_stream_rpc_method_handler( partial(_reject_stream_stream, message), request_deserializer=request_deserializer, response_serializer=response_serializer, diff --git a/src/noteflow/grpc/interceptors/logging/_handler_factory.py b/src/noteflow/grpc/interceptors/logging/_handler_factory.py index 19b70f5..8c71f26 100644 --- a/src/noteflow/grpc/interceptors/logging/_handler_factory.py +++ b/src/noteflow/grpc/interceptors/logging/_handler_factory.py @@ -2,14 +2,13 @@ from __future__ import annotations -import importlib from collections.abc import AsyncIterator, Awaitable, Callable -from typing import Protocol, cast -import grpc -from grpc import aio - -from .._types import TRequest, TResponse +from .._types import ( + RpcMethodHandlerProtocol, + ServicerContextProtocol, + typed_grpc, +) from ._wrappers import ( wrap_stream_stream, wrap_stream_unary, @@ -17,93 +16,21 @@ from ._wrappers import ( wrap_unary_unary, ) -RpcMethodHandler = grpc.RpcMethodHandler - - -class _TypedRpcMethodHandler(Protocol[TRequest, TResponse]): - unary_unary: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], - ] | None - unary_stream: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], - ] | None - stream_unary: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], - ] | None - stream_stream: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], - ] | None - request_deserializer: Callable[[bytes], TRequest] | None - response_serializer: Callable[[TResponse], bytes] | None - - -class _GrpcFactories(Protocol): - def unary_unary_rpc_method_handler( - self, - behavior: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], - ], - *, - request_deserializer: Callable[[bytes], TRequest] | None = None, - response_serializer: Callable[[TResponse], bytes] | None = None, - ) -> RpcMethodHandler[TRequest, TResponse]: ... - - def unary_stream_rpc_method_handler( - self, - behavior: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], - ], - *, - request_deserializer: Callable[[bytes], TRequest] | None = None, - response_serializer: Callable[[TResponse], bytes] | None = None, - ) -> RpcMethodHandler[TRequest, TResponse]: ... - - def stream_unary_rpc_method_handler( - self, - behavior: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], - ], - *, - request_deserializer: Callable[[bytes], TRequest] | None = None, - response_serializer: Callable[[TResponse], bytes] | None = None, - ) -> RpcMethodHandler[TRequest, TResponse]: ... - - def stream_stream_rpc_method_handler( - self, - behavior: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], - ], - *, - request_deserializer: Callable[[bytes], TRequest] | None = None, - response_serializer: Callable[[TResponse], bytes] | None = None, - ) -> RpcMethodHandler[TRequest, TResponse]: ... - - -_grpc = cast(_GrpcFactories, importlib.import_module("grpc")) - def wrap_unary_unary_handler( - handler: _TypedRpcMethodHandler[TRequest, TResponse], + handler: RpcMethodHandlerProtocol, method: str, -) -> RpcMethodHandler[TRequest, TResponse]: +) -> RpcMethodHandlerProtocol: """Create wrapped unary-unary handler with logging.""" if handler.unary_unary is None: raise TypeError("Unary-unary handler is missing") wrapped: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], + [object, ServicerContextProtocol], + Awaitable[object], ] = wrap_unary_unary(handler.unary_unary, method) request_deserializer = handler.request_deserializer response_serializer = handler.response_serializer - return _grpc.unary_unary_rpc_method_handler( + return typed_grpc.unary_unary_rpc_method_handler( wrapped, request_deserializer=request_deserializer, response_serializer=response_serializer, @@ -111,19 +38,19 @@ def wrap_unary_unary_handler( def wrap_unary_stream_handler( - handler: _TypedRpcMethodHandler[TRequest, TResponse], + handler: RpcMethodHandlerProtocol, method: str, -) -> RpcMethodHandler[TRequest, TResponse]: +) -> RpcMethodHandlerProtocol: """Create wrapped unary-stream handler with logging.""" if handler.unary_stream is None: raise TypeError("Unary-stream handler is missing") wrapped: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], + [object, ServicerContextProtocol], + AsyncIterator[object], ] = wrap_unary_stream(handler.unary_stream, method) request_deserializer = handler.request_deserializer response_serializer = handler.response_serializer - return _grpc.unary_stream_rpc_method_handler( + return typed_grpc.unary_stream_rpc_method_handler( wrapped, request_deserializer=request_deserializer, response_serializer=response_serializer, @@ -131,19 +58,19 @@ def wrap_unary_stream_handler( def wrap_stream_unary_handler( - handler: _TypedRpcMethodHandler[TRequest, TResponse], + handler: RpcMethodHandlerProtocol, method: str, -) -> RpcMethodHandler[TRequest, TResponse]: +) -> RpcMethodHandlerProtocol: """Create wrapped stream-unary handler with logging.""" if handler.stream_unary is None: raise TypeError("Stream-unary handler is missing") wrapped: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], + [AsyncIterator[object], ServicerContextProtocol], + Awaitable[object], ] = wrap_stream_unary(handler.stream_unary, method) request_deserializer = handler.request_deserializer response_serializer = handler.response_serializer - return _grpc.stream_unary_rpc_method_handler( + return typed_grpc.stream_unary_rpc_method_handler( wrapped, request_deserializer=request_deserializer, response_serializer=response_serializer, @@ -151,29 +78,29 @@ def wrap_stream_unary_handler( def wrap_stream_stream_handler( - handler: _TypedRpcMethodHandler[TRequest, TResponse], + handler: RpcMethodHandlerProtocol, method: str, -) -> RpcMethodHandler[TRequest, TResponse]: +) -> RpcMethodHandlerProtocol: """Create wrapped stream-stream handler with logging.""" if handler.stream_stream is None: raise TypeError("Stream-stream handler is missing") wrapped: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], + [AsyncIterator[object], ServicerContextProtocol], + AsyncIterator[object], ] = wrap_stream_stream(handler.stream_stream, method) request_deserializer = handler.request_deserializer response_serializer = handler.response_serializer - return _grpc.stream_stream_rpc_method_handler( + return typed_grpc.stream_stream_rpc_method_handler( wrapped, request_deserializer=request_deserializer, response_serializer=response_serializer, ) -def create_logging_handler[TRequest, TResponse]( - handler: RpcMethodHandler[TRequest, TResponse], +def create_logging_handler( + handler: RpcMethodHandlerProtocol, method: str, -) -> RpcMethodHandler[TRequest, TResponse]: +) -> RpcMethodHandlerProtocol: """Wrap an RPC handler to add request logging. Args: @@ -183,15 +110,13 @@ def create_logging_handler[TRequest, TResponse]( Returns: Wrapped handler with logging. """ - # cast required: grpc.RpcMethodHandler does not expose member signatures - typed_handler = cast(_TypedRpcMethodHandler[TRequest, TResponse], handler) - if typed_handler.unary_unary is not None: - return wrap_unary_unary_handler(typed_handler, method) - if typed_handler.unary_stream is not None: - return wrap_unary_stream_handler(typed_handler, method) - if typed_handler.stream_unary is not None: - return wrap_stream_unary_handler(typed_handler, method) - if typed_handler.stream_stream is not None: - return wrap_stream_stream_handler(typed_handler, method) + if handler.unary_unary is not None: + return wrap_unary_unary_handler(handler, method) + if handler.unary_stream is not None: + return wrap_unary_stream_handler(handler, method) + if handler.stream_unary is not None: + return wrap_stream_unary_handler(handler, method) + if handler.stream_stream is not None: + return wrap_stream_stream_handler(handler, method) # Fallback: return original handler if type unknown return handler diff --git a/src/noteflow/grpc/interceptors/logging/_logging_ops.py b/src/noteflow/grpc/interceptors/logging/_logging_ops.py index 56f8dee..ca50571 100644 --- a/src/noteflow/grpc/interceptors/logging/_logging_ops.py +++ b/src/noteflow/grpc/interceptors/logging/_logging_ops.py @@ -3,11 +3,11 @@ from __future__ import annotations import grpc -from grpc import aio from noteflow.domain.constants.fields import CODE from noteflow.infrastructure.logging import get_logger, get_request_id +from .._types import ServicerContextProtocol from ._constants import STATUS_UNKNOWN logger = get_logger(__name__) @@ -38,9 +38,7 @@ def log_request( ) -def get_peer[TRequest, TResponse]( - context: aio.ServicerContext[TRequest, TResponse], -) -> str | None: +def get_peer(context: ServicerContextProtocol) -> str | None: """Extract peer address from context safely. Args: diff --git a/src/noteflow/grpc/interceptors/logging/_wrappers.py b/src/noteflow/grpc/interceptors/logging/_wrappers.py index 3c8bb88..337b1fe 100644 --- a/src/noteflow/grpc/interceptors/logging/_wrappers.py +++ b/src/noteflow/grpc/interceptors/logging/_wrappers.py @@ -4,29 +4,27 @@ from __future__ import annotations from collections.abc import AsyncIterator, Awaitable, Callable -from grpc import aio - -from .._types import TRequest, TResponse +from .._types import ServicerContextProtocol from ._logging_ops import get_peer from ._timer import RequestTimer def wrap_unary_unary( handler: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], + [object, ServicerContextProtocol], + Awaitable[object], ], method: str, ) -> Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], + [object, ServicerContextProtocol], + Awaitable[object], ]: """Wrap unary-unary handler with logging.""" async def wrapper( - request: TRequest, - context: aio.ServicerContext[TRequest, TResponse], - ) -> TResponse: + request: object, + context: ServicerContextProtocol, + ) -> object: timer = RequestTimer(method, get_peer(context)) try: return await handler(request, context) @@ -41,20 +39,20 @@ def wrap_unary_unary( def wrap_unary_stream( handler: Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], + [object, ServicerContextProtocol], + AsyncIterator[object], ], method: str, ) -> Callable[ - [TRequest, aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], + [object, ServicerContextProtocol], + AsyncIterator[object], ]: """Wrap unary-stream handler with logging.""" async def wrapper( - request: TRequest, - context: aio.ServicerContext[TRequest, TResponse], - ) -> AsyncIterator[TResponse]: + request: object, + context: ServicerContextProtocol, + ) -> AsyncIterator[object]: timer = RequestTimer(method, get_peer(context)) async for response in iterate_with_logging( handler(request, context), timer @@ -66,20 +64,20 @@ def wrap_unary_stream( def wrap_stream_unary( handler: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], + [AsyncIterator[object], ServicerContextProtocol], + Awaitable[object], ], method: str, ) -> Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - Awaitable[TResponse], + [AsyncIterator[object], ServicerContextProtocol], + Awaitable[object], ]: """Wrap stream-unary handler with logging.""" async def wrapper( - request_iterator: AsyncIterator[TRequest], - context: aio.ServicerContext[TRequest, TResponse], - ) -> TResponse: + request_iterator: AsyncIterator[object], + context: ServicerContextProtocol, + ) -> object: timer = RequestTimer(method, get_peer(context)) try: return await handler(request_iterator, context) @@ -94,20 +92,20 @@ def wrap_stream_unary( def wrap_stream_stream( handler: Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], + [AsyncIterator[object], ServicerContextProtocol], + AsyncIterator[object], ], method: str, ) -> Callable[ - [AsyncIterator[TRequest], aio.ServicerContext[TRequest, TResponse]], - AsyncIterator[TResponse], + [AsyncIterator[object], ServicerContextProtocol], + AsyncIterator[object], ]: """Wrap stream-stream handler with logging.""" async def wrapper( - request_iterator: AsyncIterator[TRequest], - context: aio.ServicerContext[TRequest, TResponse], - ) -> AsyncIterator[TResponse]: + request_iterator: AsyncIterator[object], + context: ServicerContextProtocol, + ) -> AsyncIterator[object]: timer = RequestTimer(method, get_peer(context)) async for response in iterate_with_logging( handler(request_iterator, context), timer @@ -117,10 +115,10 @@ def wrap_stream_stream( return wrapper -async def iterate_with_logging[T]( - iterator: AsyncIterator[T], +async def iterate_with_logging( + iterator: AsyncIterator[object], timer: RequestTimer, -) -> AsyncIterator[T]: +) -> AsyncIterator[object]: """Iterate over async iterator with error tracking and final logging. Args: diff --git a/src/noteflow/grpc/interceptors/logging/logging.py b/src/noteflow/grpc/interceptors/logging/logging.py index ddbad15..5cb0f0d 100644 --- a/src/noteflow/grpc/interceptors/logging/logging.py +++ b/src/noteflow/grpc/interceptors/logging/logging.py @@ -9,13 +9,12 @@ from __future__ import annotations from collections.abc import Awaitable, Callable import grpc -from grpc import aio -from .._types import TRequest, TResponse +from .._types import RpcMethodHandlerProtocol from ._handler_factory import create_logging_handler -class RequestLoggingInterceptor(aio.ServerInterceptor): +class RequestLoggingInterceptor: """Interceptor that logs all RPC calls with timing and status. Logs at INFO level for every request with: @@ -30,10 +29,10 @@ class RequestLoggingInterceptor(aio.ServerInterceptor): self, continuation: Callable[ [grpc.HandlerCallDetails], - Awaitable[grpc.RpcMethodHandler[TRequest, TResponse]], + Awaitable[RpcMethodHandlerProtocol], ], handler_call_details: grpc.HandlerCallDetails, - ) -> grpc.RpcMethodHandler[TRequest, TResponse]: + ) -> RpcMethodHandlerProtocol: """Intercept incoming RPC calls to log request timing and status. Args: diff --git a/src/noteflow/grpc/mixins/calendar_oauth_config.py b/src/noteflow/grpc/mixins/calendar_oauth_config.py index ab69ce4..bfdce1e 100644 --- a/src/noteflow/grpc/mixins/calendar_oauth_config.py +++ b/src/noteflow/grpc/mixins/calendar_oauth_config.py @@ -22,6 +22,7 @@ from .errors import ( if TYPE_CHECKING: from ._types import GrpcContext from .protocols import ServicerHost +from .errors._constants import UNREACHABLE_ERROR def _build_oauth_client_config( config: noteflow_pb2.OAuthClientConfig, @@ -58,16 +59,16 @@ async def _require_admin_access( workspace = await uow.workspaces.get(workspace_id) if not workspace: await abort_not_found(context, ENTITY_WORKSPACE, str(workspace_id)) - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None membership = await uow.workspaces.get_membership(workspace_id, user_ctx.user_id) if not membership: await abort_not_found(context, "Workspace membership", str(workspace_id)) - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None if not membership.role.can_admin(): await abort_permission_denied(context, ERROR_WORKSPACE_ADMIN_REQUIRED) - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None class CalendarOAuthConfigMixin: @@ -89,7 +90,7 @@ class CalendarOAuthConfigMixin: ) except CalendarServiceError as e: await abort_invalid_argument(context, str(e)) - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None return noteflow_pb2.GetOAuthClientConfigResponse( config=noteflow_pb2.OAuthClientConfig( @@ -113,10 +114,10 @@ class CalendarOAuthConfigMixin: if not request.provider: await abort_invalid_argument(context, "Provider is required") - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None if not request.HasField("config"): await abort_invalid_argument(context, "OAuth config is required") - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None client_config = _build_oauth_client_config(request.config) @@ -129,6 +130,6 @@ class CalendarOAuthConfigMixin: ) except CalendarServiceError as e: await abort_invalid_argument(context, str(e)) - raise AssertionError("unreachable") from None + raise AssertionError(UNREACHABLE_ERROR) from None return noteflow_pb2.SetOAuthClientConfigResponse(success=True) diff --git a/src/noteflow/grpc/mixins/diarization_job.py b/src/noteflow/grpc/mixins/diarization_job.py index e69065b..655def0 100644 --- a/src/noteflow/grpc/mixins/diarization_job.py +++ b/src/noteflow/grpc/mixins/diarization_job.py @@ -19,6 +19,7 @@ from .errors import ( abort_not_found, ) from .protocols import DiarizationJobRepositoryProvider +from .errors._constants import UNREACHABLE_ERROR if TYPE_CHECKING: from collections.abc import Callable @@ -147,17 +148,17 @@ class DiarizationJobMixin: async with cast(DiarizationJobRepositoryProvider, self.create_repository_provider()) as repo: if not repo.supports_diarization_jobs: await abort_database_required(context, "Diarization job cancellation") - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn job = await repo.diarization_jobs.get(job_id) if job is None: await abort_not_found(context, "Diarization job", job_id) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn cancellable_statuses = (noteflow_pb2.JOB_STATUS_QUEUED, noteflow_pb2.JOB_STATUS_RUNNING) if job.status not in cancellable_statuses: await abort_failed_precondition(context, _ERR_ALREADY_COMPLETE) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn await repo.diarization_jobs.update_status( job_id, diff --git a/src/noteflow/grpc/mixins/meeting/_project_scope.py b/src/noteflow/grpc/mixins/meeting/_project_scope.py index bcf656d..1239515 100644 --- a/src/noteflow/grpc/mixins/meeting/_project_scope.py +++ b/src/noteflow/grpc/mixins/meeting/_project_scope.py @@ -17,6 +17,7 @@ from ..protocols import MeetingRepositoryProvider if TYPE_CHECKING: from .._types import GrpcContext from ..protocols import ServicerHost +from ..errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -50,7 +51,7 @@ async def parse_project_ids_or_abort( context, f"{ERROR_INVALID_PROJECT_ID_PREFIX}{raw_project_id}", ) - raise AssertionError("unreachable") from e + raise AssertionError(UNREACHABLE_ERROR) from e return project_ids @@ -78,7 +79,7 @@ async def parse_project_id_or_abort( ) error_message = f"{ERROR_INVALID_PROJECT_ID_PREFIX}{request.project_id}" await abort_invalid_argument(context, error_message) - raise AssertionError("unreachable") from e + raise AssertionError(UNREACHABLE_ERROR) from e async def resolve_active_project_id( diff --git a/src/noteflow/grpc/mixins/meeting/meeting_mixin.py b/src/noteflow/grpc/mixins/meeting/meeting_mixin.py index d8d5a5e..e6c5941 100644 --- a/src/noteflow/grpc/mixins/meeting/meeting_mixin.py +++ b/src/noteflow/grpc/mixins/meeting/meeting_mixin.py @@ -42,6 +42,7 @@ if TYPE_CHECKING: from .._types import GrpcContext from ..protocols import ServicerHost +from ..errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -65,7 +66,7 @@ async def _load_meeting_for_stop( if meeting is None: logger.warning("StopMeeting: meeting not found", meeting_id=meeting_id_str) await abort_not_found(context, ENTITY_MEETING, meeting_id_str) - raise AssertionError("unreachable") + raise AssertionError(UNREACHABLE_ERROR) return meeting diff --git a/src/noteflow/grpc/mixins/oidc/oidc_mixin.py b/src/noteflow/grpc/mixins/oidc/oidc_mixin.py index fad46b6..fef282f 100644 --- a/src/noteflow/grpc/mixins/oidc/oidc_mixin.py +++ b/src/noteflow/grpc/mixins/oidc/oidc_mixin.py @@ -14,6 +14,7 @@ from ...proto import noteflow_pb2 from .._types import GrpcContext from ..converters import oidc_provider_to_proto from ..errors import abort_invalid_argument, parse_workspace_id +from ..errors._constants import UNREACHABLE_ERROR from ._support import ( ENTITY_OIDC_PROVIDER, ERR_INVALID_PRESET, @@ -59,14 +60,14 @@ class OidcMixin: preset = parse_preset(request.preset) if request.preset else OidcProviderPreset.CUSTOM except ValueError: await abort_invalid_argument(context, ERR_INVALID_PRESET) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn # Parse workspace ID try: workspace_id = UUID(request.workspace_id) if request.workspace_id else UUID(int=0) except ValueError: await abort_invalid_argument(context, ERROR_INVALID_WORKSPACE_ID_FORMAT) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn custom_config = parse_register_options(request) @@ -129,14 +130,14 @@ class OidcMixin: provider_id = parse_provider_id(request.provider_id) except ValueError: await abort_invalid_argument(context, ERR_INVALID_PROVIDER_ID) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn oidc_service = self.get_oidc_service() provider = oidc_service.registry.get_provider(provider_id) if provider is None: await abort_not_found(context, ENTITY_OIDC_PROVIDER, str(provider_id)) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn return oidc_provider_to_proto(provider) @@ -152,14 +153,14 @@ class OidcMixin: provider_id = parse_provider_id(request.provider_id) except ValueError: await abort_invalid_argument(context, ERR_INVALID_PROVIDER_ID) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn oidc_service = self.get_oidc_service() provider = oidc_service.registry.get_provider(provider_id) if provider is None: await abort_not_found(context, ENTITY_OIDC_PROVIDER, str(provider_id)) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn apply_update_request_to_provider(provider, request) return oidc_provider_to_proto(provider) @@ -176,14 +177,14 @@ class OidcMixin: provider_id = parse_provider_id(request.provider_id) except ValueError: await abort_invalid_argument(context, ERR_INVALID_PROVIDER_ID) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn oidc_service = self.get_oidc_service() success = oidc_service.registry.remove_provider(provider_id) if not success: await abort_not_found(context, ENTITY_OIDC_PROVIDER, str(provider_id)) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn return noteflow_pb2.DeleteOidcProviderResponse(success=success) @@ -201,7 +202,7 @@ class OidcMixin: provider_id = parse_provider_id(request.provider_id) except ValueError: await abort_invalid_argument(context, ERR_INVALID_PROVIDER_ID) - raise AssertionError("unreachable") from None # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) from None # abort is NoReturn return await refresh_single_provider(oidc_service, provider_id, context) diff --git a/src/noteflow/grpc/mixins/preferences.py b/src/noteflow/grpc/mixins/preferences.py index 6d07c8c..7f91bd5 100644 --- a/src/noteflow/grpc/mixins/preferences.py +++ b/src/noteflow/grpc/mixins/preferences.py @@ -20,6 +20,7 @@ if TYPE_CHECKING: from collections.abc import Callable from ._types import GrpcContext +from .errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -97,7 +98,7 @@ class PreferencesMixin: async with cast(PreferencesRepositoryProvider, self.create_repository_provider()) as repo: if not repo.supports_preferences: await abort_database_required(context, _ENTITY_PREFERENCES) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn keys_seq = cast(Sequence[str], request.keys) keys = list(keys_seq) if keys_seq else None @@ -120,7 +121,7 @@ class PreferencesMixin: async with cast(PreferencesRepositoryProvider, self.create_repository_provider()) as repo: if not repo.supports_preferences: await abort_database_required(context, _ENTITY_PREFERENCES) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn current_prefs = await repo.preferences.get_all_with_metadata() current_dict, server_max_updated = _prefs_to_dict_with_timestamp(current_prefs) diff --git a/src/noteflow/grpc/mixins/streaming/_mixin.py b/src/noteflow/grpc/mixins/streaming/_mixin.py index b8bbfcc..8b272bc 100644 --- a/src/noteflow/grpc/mixins/streaming/_mixin.py +++ b/src/noteflow/grpc/mixins/streaming/_mixin.py @@ -32,6 +32,7 @@ from ._types import StreamSessionInit if TYPE_CHECKING: from ..protocols import ServicerHost +from ..errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -224,7 +225,7 @@ class StreamingMixin: meeting_id = chunk.meeting_id if not meeting_id: await abort_invalid_argument(context, "meeting_id required") - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn if current_meeting_id is None: # Track meeting_id BEFORE init to guarantee cleanup on any exception @@ -251,7 +252,7 @@ class StreamingMixin: return meeting_id, initialized_meeting_id if meeting_id != current_meeting_id: await abort_invalid_argument(context, "Stream may only contain a single meeting_id") - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn return current_meeting_id, initialized_meeting_id diff --git a/src/noteflow/grpc/mixins/streaming/_session.py b/src/noteflow/grpc/mixins/streaming/_session.py index 8a74a9b..aeec5b2 100644 --- a/src/noteflow/grpc/mixins/streaming/_session.py +++ b/src/noteflow/grpc/mixins/streaming/_session.py @@ -34,6 +34,7 @@ if TYPE_CHECKING: from noteflow.domain.ports.unit_of_work import UnitOfWork from ..protocols import ServicerHost +from ..errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -216,7 +217,7 @@ class StreamSessionManager: await abort_failed_precondition( context, "Stream initialization timed out - server may be overloaded" ) - raise AssertionError("unreachable") from e + raise AssertionError(UNREACHABLE_ERROR) from e return reserved @staticmethod @@ -244,7 +245,7 @@ class StreamSessionManager: await abort_failed_precondition( context, f"{ERROR_MSG_MEETING_PREFIX}{meeting_id} already streaming" ) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn @staticmethod async def _init_stream_session( diff --git a/src/noteflow/grpc/mixins/streaming_config.py b/src/noteflow/grpc/mixins/streaming_config.py index 71de775..1806114 100644 --- a/src/noteflow/grpc/mixins/streaming_config.py +++ b/src/noteflow/grpc/mixins/streaming_config.py @@ -21,6 +21,7 @@ from noteflow.infrastructure.logging import get_logger from ..proto import noteflow_pb2 from ._types import GrpcContext from .errors import abort_invalid_argument +from .errors._constants import UNREACHABLE_ERROR if TYPE_CHECKING: from collections.abc import Callable @@ -89,7 +90,7 @@ class StreamingConfigMixin: if error := _validate_update(updates): await abort_invalid_argument(context, error) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn current = self.streaming_config updated = StreamingConfig( diff --git a/src/noteflow/grpc/mixins/sync.py b/src/noteflow/grpc/mixins/sync.py index 1ae4d21..851ae72 100644 --- a/src/noteflow/grpc/mixins/sync.py +++ b/src/noteflow/grpc/mixins/sync.py @@ -29,6 +29,7 @@ from .errors import ( if TYPE_CHECKING: from .protocols import ServicerHost +from .errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -150,7 +151,7 @@ class SyncMixin: ) -> noteflow_pb2.StartIntegrationSyncResponse: if self.calendar_service is None: await abort_unavailable(context, _ERR_CALENDAR_NOT_ENABLED) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn integration_id = await parse_integration_id(request.integration_id, context) @@ -163,7 +164,7 @@ class SyncMixin: provider = provider_value if isinstance(provider_value, str) else None if not provider: await abort_failed_precondition(context, "Integration provider not configured") - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn sync_run = SyncRun.start(integration_id) sync_run = await uow.integrations.create_sync_run(sync_run) @@ -205,7 +206,7 @@ class SyncMixin: return candidate, candidate.id await abort_not_found(context, ENTITY_INTEGRATION, request.integration_id) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn async def perform_sync( self: ServicerHost, diff --git a/src/noteflow/grpc/mixins/webhooks.py b/src/noteflow/grpc/mixins/webhooks.py index 02e4d73..7966f9c 100644 --- a/src/noteflow/grpc/mixins/webhooks.py +++ b/src/noteflow/grpc/mixins/webhooks.py @@ -34,6 +34,7 @@ from .errors import ( require_url_field, ) from .protocols import WebhooksRepositoryProvider +from .errors._constants import UNREACHABLE_ERROR logger = get_logger(__name__) @@ -206,7 +207,7 @@ class WebhooksMixin: webhook_id=str(webhook_id), ) await abort_not_found(context, ENTITY_WEBHOOK, request.webhook_id) - raise AssertionError("unreachable") # abort is NoReturn + raise AssertionError(UNREACHABLE_ERROR) # abort is NoReturn await uow.commit() logger.info( diff --git a/src/noteflow/grpc/server/__init__.py b/src/noteflow/grpc/server/__init__.py index d415dcf..34a0e05 100644 --- a/src/noteflow/grpc/server/__init__.py +++ b/src/noteflow/grpc/server/__init__.py @@ -220,9 +220,10 @@ class NoteFlowServer: self._servicer = build_servicer(self._state, asr_engine, self._streaming_config) await recover_orphaned_jobs(self._state.session_factory) - self._server = create_server() - address = bind_server(self._server, self._servicer, self._bind_address, self._port) - await self._server.start() + server = create_server() + self._server = server + address = bind_server(server, self._servicer, self._bind_address, self._port) + await server.start() logger.info("Server listening on %s", address) async def stop(self, grace_period: float = 5.0) -> None: diff --git a/src/noteflow/grpc/server/internal/setup.py b/src/noteflow/grpc/server/internal/setup.py index 83e5d9d..7ace949 100644 --- a/src/noteflow/grpc/server/internal/setup.py +++ b/src/noteflow/grpc/server/internal/setup.py @@ -4,22 +4,22 @@ from __future__ import annotations from typing import TYPE_CHECKING, cast -import grpc.aio from ...interceptors import IdentityInterceptor, RequestLoggingInterceptor from ...proto import noteflow_pb2_grpc +from ..._grpc_compat import AsyncServerProtocol, create_aio_server if TYPE_CHECKING: from ...service import NoteFlowServicer -def create_server() -> grpc.aio.Server: +def create_server() -> AsyncServerProtocol: """Create async gRPC server with interceptors and limits.""" interceptors = [ RequestLoggingInterceptor(), IdentityInterceptor(), ] - options = [ + options: list[tuple[str, int | str | bool]] = [ # Message size limits ("grpc.max_send_message_length", 100 * 1024 * 1024), # 100MB ("grpc.max_receive_message_length", 100 * 1024 * 1024), @@ -31,14 +31,14 @@ def create_server() -> grpc.aio.Server: ("grpc.http2.min_recv_ping_interval_without_data_ms", 10_000), # Min 10s between pings ("grpc.http2.max_pings_without_data", 0), # Unlimited pings without data ] - return grpc.aio.server( + return create_aio_server( interceptors=interceptors, options=options, ) def bind_server( - server: grpc.aio.Server, + server: AsyncServerProtocol, servicer: NoteFlowServicer, bind_address: str, port: int, diff --git a/src/noteflow/infrastructure/calendar/oauth/_errors.py b/src/noteflow/infrastructure/calendar/oauth/_errors.py new file mode 100644 index 0000000..6c01855 --- /dev/null +++ b/src/noteflow/infrastructure/calendar/oauth/_errors.py @@ -0,0 +1,7 @@ +"""OAuth error types.""" + +from __future__ import annotations + + +class OAuthError(Exception): + """OAuth operation failed.""" diff --git a/tests/grpc/test_interceptors.py b/tests/grpc/test_interceptors.py index 51b0c4e..d2c0968 100644 --- a/tests/grpc/test_interceptors.py +++ b/tests/grpc/test_interceptors.py @@ -11,7 +11,6 @@ from unittest.mock import AsyncMock, MagicMock, patch import grpc import pytest -from grpc import aio from noteflow.grpc.interceptors import ( METADATA_REQUEST_ID, @@ -20,6 +19,7 @@ from noteflow.grpc.interceptors import ( IdentityInterceptor, RequestLoggingInterceptor, ) +from noteflow.grpc.interceptors._types import ServicerContextProtocol from noteflow.infrastructure.logging import ( get_request_id, get_user_id, @@ -48,7 +48,7 @@ pytestmark = pytest.mark.usefixtures("reset_context_vars") def create_handler_call_details( method: str = TEST_METHOD, - metadata: list[tuple[str, str]] | None = None, + metadata: list[tuple[str, str | bytes]] | None = None, ) -> grpc.HandlerCallDetails: """Create mock HandlerCallDetails with metadata.""" details = MagicMock(spec=grpc.HandlerCallDetails) @@ -75,7 +75,7 @@ class _UnaryUnaryHandler(Protocol): """Protocol for unary-unary RPC method handlers.""" unary_unary: Callable[ - [_DummyRequest, aio.ServicerContext[_DummyRequest, _DummyResponse]], + [_DummyRequest, ServicerContextProtocol], Awaitable[_DummyResponse], ] | None unary_stream: object | None @@ -89,7 +89,7 @@ class _MockHandler: """Concrete handler for tests with typed unary_unary.""" unary_unary: Callable[ - [_DummyRequest, aio.ServicerContext[_DummyRequest, _DummyResponse]], + [_DummyRequest, ServicerContextProtocol], Awaitable[_DummyResponse], ] | None unary_stream: object | None @@ -101,7 +101,7 @@ class _MockHandler: def __init__(self) -> None: self.unary_unary = cast( Callable[ - [_DummyRequest, aio.ServicerContext[_DummyRequest, _DummyResponse]], + [_DummyRequest, ServicerContextProtocol], Awaitable[_DummyResponse], ], AsyncMock(return_value="response"), @@ -120,7 +120,7 @@ class TestIdentityInterceptor: async def test_sets_context_vars_from_metadata(self) -> None: """Interceptor sets context variables from metadata headers.""" interceptor = IdentityInterceptor() - metadata = [ + metadata: list[tuple[str, str | bytes]] = [ (METADATA_REQUEST_ID, TEST_REQUEST_ID), (METADATA_USER_ID, TEST_USER_ID), (METADATA_WORKSPACE_ID, TEST_WORKSPACE_ID), @@ -138,7 +138,7 @@ class TestIdentityInterceptor: async def test_sets_only_request_id_when_others_missing(self) -> None: """Interceptor sets request_id and leaves others None when not provided.""" interceptor = IdentityInterceptor() - metadata = [(METADATA_REQUEST_ID, TEST_REQUEST_ID)] + metadata: list[tuple[str, str | bytes]] = [(METADATA_REQUEST_ID, TEST_REQUEST_ID)] details = create_handler_call_details(metadata=metadata) continuation = create_mock_continuation() @@ -179,7 +179,7 @@ class TestIdentityInterceptor: typed_handler = cast(_UnaryUnaryHandler, handler) # Create mock context to verify abort behavior - context = AsyncMock(spec=aio.ServicerContext) + context: ServicerContextProtocol = AsyncMock(spec=ServicerContextProtocol) context.abort = AsyncMock(side_effect=grpc.RpcError("missing x-request-id")) with pytest.raises(grpc.RpcError, match="x-request-id"): @@ -196,7 +196,7 @@ class TestIdentityInterceptor: """Interceptor handles bytes metadata values correctly.""" interceptor = IdentityInterceptor() # Simulate bytes metadata (as might come from wire) - metadata = [ + metadata: list[tuple[str, str | bytes]] = [ (METADATA_REQUEST_ID, b"bytes-request-id"), (METADATA_USER_ID, b"bytes-user-id"), ] @@ -228,7 +228,7 @@ class TestRequestLoggingInterceptor: typed_handler = cast(_UnaryUnaryHandler, wrapped_handler) # Execute the wrapped handler - context = AsyncMock(spec=aio.ServicerContext) + context: ServicerContextProtocol = AsyncMock(spec=ServicerContextProtocol) context.peer = MagicMock(return_value="ipv4:127.0.0.1:12345") assert typed_handler.unary_unary is not None await typed_handler.unary_unary(MagicMock(), context) @@ -257,7 +257,7 @@ class TestRequestLoggingInterceptor: wrapped_handler = await interceptor.intercept_service(continuation, details) typed_handler = cast(_UnaryUnaryHandler, wrapped_handler) - context = AsyncMock(spec=aio.ServicerContext) + context: ServicerContextProtocol = AsyncMock(spec=ServicerContextProtocol) context.peer = MagicMock(return_value="ipv4:127.0.0.1:12345") with pytest.raises(Exception, match="Test error"): @@ -294,7 +294,7 @@ class TestRequestLoggingInterceptor: typed_handler = cast(_UnaryUnaryHandler, wrapped_handler) # Context without peer method - context = AsyncMock(spec=aio.ServicerContext) + context: ServicerContextProtocol = AsyncMock(spec=ServicerContextProtocol) context.peer = MagicMock(side_effect=RuntimeError("No peer")) assert typed_handler.unary_unary is not None diff --git a/typings/langgraph-stubs/langgraph/checkpoint/base.pyi b/typings/langgraph-stubs/langgraph/checkpoint/base.pyi new file mode 100644 index 0000000..dd109e6 --- /dev/null +++ b/typings/langgraph-stubs/langgraph/checkpoint/base.pyi @@ -0,0 +1,16 @@ +# Type stubs for langgraph.checkpoint.base +from abc import ABC, abstractmethod +from typing import Any, Dict, Optional + +class BaseCheckpointSaver(ABC): + """Base class for checkpoint savers.""" + + @abstractmethod + async def aget_tuple(self, config: Dict[str, Any]) -> Optional[Any]: + """Get a checkpoint tuple asynchronously.""" + ... + + @abstractmethod + async def aput_tuple(self, config: Dict[str, Any], checkpoint: Any, metadata: Any) -> None: + """Put a checkpoint tuple asynchronously.""" + ... diff --git a/typings/langgraph-stubs/langgraph/checkpoint/postgres/aio.pyi b/typings/langgraph-stubs/langgraph/checkpoint/postgres/aio.pyi new file mode 100644 index 0000000..ca52ff8 --- /dev/null +++ b/typings/langgraph-stubs/langgraph/checkpoint/postgres/aio.pyi @@ -0,0 +1,16 @@ +# Type stubs for langgraph.checkpoint.postgres.aio +from typing import Any, Dict, Optional + +from ..base import BaseCheckpointSaver + +class AsyncPostgresSaver(BaseCheckpointSaver): + """Async PostgreSQL checkpoint saver.""" + + def __init__(self, conn: Any) -> None: ... + async def aget_tuple(self, config: Dict[str, Any]) -> Optional[Any]: + """Get a checkpoint tuple asynchronously.""" + ... + + async def aput_tuple(self, config: Dict[str, Any], checkpoint: Any, metadata: Any) -> None: + """Put a checkpoint tuple asynchronously.""" + ... diff --git a/typings/langgraph-stubs/langgraph/graph/__init__.pyi b/typings/langgraph-stubs/langgraph/graph/__init__.pyi new file mode 100644 index 0000000..31bc1c7 --- /dev/null +++ b/typings/langgraph-stubs/langgraph/graph/__init__.pyi @@ -0,0 +1,73 @@ +# Type stubs for langgraph +from typing import Any, Dict, Generic, List, Optional, TypeVar, Union + +# Type variables for generic types +T = TypeVar("T") +S = TypeVar("S") + +# Constants +END: str = "__end__" +START: str = "__start__" + +class CompiledStateGraph(Generic[T]): + """Compiled state graph with async invocation support.""" + + async def ainvoke( + self, + input: T, + config: Optional[Dict[str, Any]] = None, + *, + context: Optional[Any] = None, + stream_mode: str = "values", + print_mode: Union[str, List[str]] = "values", + output_keys: Optional[Union[str, List[str]]] = None, + interrupt_before: Optional[Union[str, List[str]]] = None, + interrupt_after: Optional[Union[str, List[str]]] = None, + durability: Optional[str] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + """Invoke the graph asynchronously.""" + ... + + def invoke( + self, + input: T, + config: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + """Invoke the graph synchronously.""" + ... + +class StateGraph(Generic[T]): + """State graph builder.""" + + def __init__(self, state_schema: Any) -> None: ... + def add_node(self, name: str, node: Any) -> None: + """Add a node to the graph.""" + ... + + def add_edge(self, start: str, end: str) -> None: + """Add an edge between nodes.""" + ... + + def add_conditional_edges( + self, + source: str, + path: Any, + path_map: Optional[Dict[str, str]] = None, + **kwargs: Any, + ) -> None: + """Add conditional edges.""" + ... + + def set_entry_point(self, entry_point: str) -> None: + """Set the entry point of the graph.""" + ... + + def set_finish_point(self, finish_point: str) -> None: + """Set the finish point of the graph.""" + ... + + def compile(self, checkpointer: Optional[Any] = None) -> CompiledStateGraph[T]: + """Compile the graph.""" + ... diff --git a/typings/langgraph-stubs/langgraph/graph/state.pyi b/typings/langgraph-stubs/langgraph/graph/state.pyi new file mode 100644 index 0000000..bac3fb4 --- /dev/null +++ b/typings/langgraph-stubs/langgraph/graph/state.pyi @@ -0,0 +1,9 @@ +# Type stubs for langgraph.graph.state +from typing import TypeVar + +from . import CompiledStateGraph + +T = TypeVar("T") + +# Re-export CompiledStateGraph for convenience +CompiledStateGraph = CompiledStateGraph diff --git a/typings/langgraph-stubs/langgraph/types.pyi b/typings/langgraph-stubs/langgraph/types.pyi new file mode 100644 index 0000000..f882baa --- /dev/null +++ b/typings/langgraph-stubs/langgraph/types.pyi @@ -0,0 +1,17 @@ +# Type stubs for langgraph.types +from typing import Any, Generic, TypeVar + +T = TypeVar("T") + +class Command(Generic[T]): + """Command type for interrupting graph execution.""" + + def __init__(self, value: T, **kwargs: Any) -> None: ... + @property + def value(self) -> T: + """Get the command value.""" + ... + +def interrupt(value: T, **kwargs: Any) -> Command[T]: + """Create an interrupt command.""" + ... diff --git a/typings/langgraph/graph/state.pyi b/typings/langgraph/graph/state.pyi new file mode 100644 index 0000000..61ebd7e --- /dev/null +++ b/typings/langgraph/graph/state.pyi @@ -0,0 +1,33 @@ +# Type stubs for langgraph.graph.state +from typing import Any, Dict, Generic, List, Optional, TypeVar, Union + +T = TypeVar("T") + +class CompiledStateGraph(Generic[T]): + """Compiled state graph with async invocation support.""" + + async def ainvoke( + self, + input: T, + config: Optional[Dict[str, Any]] = None, + *, + context: Optional[Any] = None, + stream_mode: str = "values", + print_mode: Union[str, List[str]] = "values", + output_keys: Optional[Union[str, List[str]]] = None, + interrupt_before: Optional[Union[str, List[str]]] = None, + interrupt_after: Optional[Union[str, List[str]]] = None, + durability: Optional[str] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + """Invoke the graph asynchronously.""" + ... + + def invoke( + self, + input: T, + config: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + """Invoke the graph synchronously.""" + ... diff --git a/typings/langgraph/types.pyi b/typings/langgraph/types.pyi new file mode 100644 index 0000000..f882baa --- /dev/null +++ b/typings/langgraph/types.pyi @@ -0,0 +1,17 @@ +# Type stubs for langgraph.types +from typing import Any, Generic, TypeVar + +T = TypeVar("T") + +class Command(Generic[T]): + """Command type for interrupting graph execution.""" + + def __init__(self, value: T, **kwargs: Any) -> None: ... + @property + def value(self) -> T: + """Get the command value.""" + ... + +def interrupt(value: T, **kwargs: Any) -> Command[T]: + """Create an interrupt command.""" + ...