This commit is contained in:
2026-01-22 04:40:05 +00:00
parent fc7bbd0ea2
commit 073b70cc39
197 changed files with 18236 additions and 27240 deletions

View File

@@ -0,0 +1,163 @@
# METADATA
# scope: package
# title: Ban Stdlib Logger
# description: Blocks use of stdlib logging in Python code
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.ban_stdlib_logger
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
is_python_patch(patch_text) if {
contains(patch_text, ".py")
}
is_python_patch(patch_text) if {
contains(patch_text, ".pyi")
}
stdlib_logger_pattern := `import logging|from logging import|logging\.getLogger`
file_path_pattern := `\.py$`
# Block Write/Edit operations that introduce stdlib logging
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
content := new_content
content != null
regex.match(stdlib_logger_pattern, content)
decision := {
"rule_id": "PY-LOG-001",
"reason": "Stdlib logging usage is prohibited. Use the project logging utilities instead.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
content := edit_new_content(edit)
content != null
regex.match(stdlib_logger_pattern, content)
decision := {
"rule_id": "PY-LOG-001",
"reason": "Stdlib logging usage is prohibited. Use the project logging utilities instead.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
lower_patch := lower(patch)
is_python_patch(lower_patch)
regex.match(stdlib_logger_pattern, patch)
decision := {
"rule_id": "PY-LOG-001",
"reason": "Stdlib logging usage is prohibited. Use the project logging utilities instead.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,175 @@
# METADATA
# scope: package
# title: Block Assertion Roulette
# description: Blocks multiple bare asserts in a single test without messages
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_assertion_roulette
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "+++ b/")
path := replace(line, "+++ b/", "")
regex.match(pattern, path)
}
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "--- a/")
path := replace(line, "--- a/", "")
regex.match(pattern, path)
}
file_path_pattern := `tests?/.*\.py$`
assertion_pattern := `^\s*assert\s+[^,\n]+\n\s*assert\s+[^,\n]+$`
# Block Write/Edit operations that introduce assertion roulette
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
content := new_content
content != null
regex.match(assertion_pattern, content)
decision := {
"rule_id": "TEST-ASSERT-001",
"reason": "Multiple bare asserts detected. Use one assert per test or add assertion messages.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
content := edit_new_content(edit)
content != null
regex.match(assertion_pattern, content)
decision := {
"rule_id": "TEST-ASSERT-001",
"reason": "Multiple bare asserts detected. Use one assert per test or add assertion messages.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
patch_targets_path(file_path_pattern)
regex.match(assertion_pattern, patch)
decision := {
"rule_id": "TEST-ASSERT-001",
"reason": "Multiple bare asserts detected. Use one assert per test or add assertion messages.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,175 @@
# METADATA
# scope: package
# title: Block Biome Ignore
# description: Blocks ignore directives in JS/TS files
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_biome_ignore
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "+++ b/")
path := replace(line, "+++ b/", "")
regex.match(pattern, path)
}
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "--- a/")
path := replace(line, "--- a/", "")
regex.match(pattern, path)
}
file_path_pattern := `\.(js|jsx|ts|tsx|mjs|cjs)$`
ignore_pattern := `//\s*biome-ignore|//\s*@ts-ignore|//\s*@ts-expect-error|//\s*@ts-nocheck|//\s*eslint-disable|/\*\s*eslint-disable`
# Block Write/Edit operations that introduce ignore directives
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
content := new_content
content != null
regex.match(ignore_pattern, content)
decision := {
"rule_id": "TS-LINT-002",
"reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
content := edit_new_content(edit)
content != null
regex.match(ignore_pattern, content)
decision := {
"rule_id": "TS-LINT-002",
"reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
patch_targets_path(file_path_pattern)
regex.match(ignore_pattern, patch)
decision := {
"rule_id": "TS-LINT-002",
"reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Block Biome Ignore (Bash)
# description: Blocks Bash commands that add ignore directives to JS/TS files
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_biome_ignore_bash
import rego.v1
ignore_pattern := `(biome-ignore|@ts-ignore|@ts-expect-error|@ts-nocheck|eslint-disable).*\.(js|jsx|ts|tsx|mjs|cjs)`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(ignore_pattern, command)
decision := {
"rule_id": "TS-LINT-001",
"reason": "Ignore directives for Biome/TypeScript/ESLint are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,146 @@
# METADATA
# scope: package
# title: Block Broad Exception Handler
# description: Blocks bare Exception handlers that only log
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_broad_exception_handler
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
handler_pattern := `except\s+Exception\s*(?:as\s+\w+)?:\s*\n\s+(?:logger\.|logging\.)`
# Block Write/Edit operations that introduce broad exception handlers
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
regex.match(handler_pattern, content)
decision := {
"rule_id": "PY-EXC-001",
"reason": "Broad Exception handlers that only log are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
content != null
regex.match(handler_pattern, content)
decision := {
"rule_id": "PY-EXC-001",
"reason": "Broad Exception handlers that only log are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
regex.match(handler_pattern, patch)
decision := {
"rule_id": "PY-EXC-001",
"reason": "Broad Exception handlers that only log are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Block Code Quality Test (Bash)
# description: Blocks Bash edits to src/test/code-quality.test.ts
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_code_quality_test_bash
import rego.v1
pattern := `(sed|awk|cat\s*>|echo\s*>|tee|cp\s+.*code-quality\.test\.ts|mv\s+.*code-quality\.test\.ts|rm\s+.*code-quality\.test\.ts|>|>>).*code-quality\.test\.ts|code-quality\.test\.ts.*(>|>>|\|.*tee)`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
decision := {
"rule_id": "TS-QUALITY-001",
"reason": "Direct edits to src/test/code-quality.test.ts are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,127 @@
# METADATA
# scope: package
# title: Block Code Quality Test (Edits)
# description: Blocks file edits to src/test/code-quality.test.ts
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_code_quality_test_edits
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `src/test/code-quality\.test\.ts$`
# Block Write/Edit operations targeting code-quality test file
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
decision := {
"rule_id": "TS-QUALITY-002",
"reason": "Direct edits to src/test/code-quality.test.ts are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
decision := {
"rule_id": "TS-QUALITY-002",
"reason": "Direct edits to src/test/code-quality.test.ts are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,126 @@
# METADATA
# scope: package
# title: Block Code Quality Test (Serena)
# description: Blocks Serena edits to src/test/code-quality.test.ts
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools:
# - McpSerenaReplaceContent
# - McpSerenaReplaceSymbolBody
# - McpSerenaCreateTextFile
# - McpSerenaInsertBeforeSymbol
# - McpSerenaInsertAfterSymbol
# - McpSerenaRenameSymbol
package cupcake.policies.opencode.block_code_quality_test_serena
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `(^|/)src/test/code-quality\.test\.ts$`
get_relative_path := path if {
path := tool_input.relative_path
} else := path if {
path := tool_input.path
} else := ""
# Block Serena operations targeting code-quality test file
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_names := {"McpSerenaReplaceContent", "McpSerenaReplaceSymbolBody", "McpSerenaCreateTextFile", "McpSerenaInsertBeforeSymbol", "McpSerenaInsertAfterSymbol", "McpSerenaRenameSymbol"}
tool_name in tool_names
file_path := get_relative_path
regex.match(file_path_pattern, file_path)
decision := {
"rule_id": "TS-QUALITY-003",
"reason": "Direct edits to src/test/code-quality.test.ts are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,126 @@
# METADATA
# scope: package
# title: Block Code Quality Test (Serena Plugin)
# description: Blocks Serena plugin edits to src/test/code-quality.test.ts
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools:
# - McpPluginSerenaSerenaReplaceContent
# - McpPluginSerenaSerenaReplaceSymbolBody
# - McpPluginSerenaSerenaCreateTextFile
# - McpPluginSerenaSerenaInsertBeforeSymbol
# - McpPluginSerenaSerenaInsertAfterSymbol
# - McpPluginSerenaSerenaRenameSymbol
package cupcake.policies.opencode.block_code_quality_test_serena_plugin
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `(^|/)src/test/code-quality\.test\.ts$`
get_relative_path := path if {
path := tool_input.relative_path
} else := path if {
path := tool_input.path
} else := ""
# Block Serena plugin operations targeting code-quality test file
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_names := {"McpPluginSerenaSerenaReplaceContent", "McpPluginSerenaSerenaReplaceSymbolBody", "McpPluginSerenaSerenaCreateTextFile", "McpPluginSerenaSerenaInsertBeforeSymbol", "McpPluginSerenaSerenaInsertAfterSymbol", "McpPluginSerenaSerenaRenameSymbol"}
tool_name in tool_names
file_path := get_relative_path
regex.match(file_path_pattern, file_path)
decision := {
"rule_id": "TS-QUALITY-004",
"reason": "Direct edits to src/test/code-quality.test.ts are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,146 @@
# METADATA
# scope: package
# title: Block datetime.now Fallback
# description: Blocks returning datetime.now() as a fallback
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_datetime_now_fallback
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
pattern := `return\s+datetime\.now\s*\(`
# Block Write/Edit operations that introduce datetime.now fallback
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
regex.match(pattern, content)
decision := {
"rule_id": "PY-DT-001",
"reason": "Returning datetime.now() as a fallback is prohibited. Use a caller-provided timestamp.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
content != null
regex.match(pattern, content)
decision := {
"rule_id": "PY-DT-001",
"reason": "Returning datetime.now() as a fallback is prohibited. Use a caller-provided timestamp.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
regex.match(pattern, patch)
decision := {
"rule_id": "PY-DT-001",
"reason": "Returning datetime.now() as a fallback is prohibited. Use a caller-provided timestamp.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,146 @@
# METADATA
# scope: package
# title: Block Default Value Swallow
# description: Blocks exception handlers that warn and return defaults
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_default_value_swallow
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
pattern := `except\s+\w*(?:Error|Exception).*?:\s*\n\s+.*?(?:logger\.|logging\.).*?(?:warning|warn).*?\n\s+return\s+(?:\w+Settings|Defaults?\(|default_|\{[^}]*\}|[A-Z_]+_DEFAULT)`
# Block Write/Edit operations that swallow exceptions with defaults
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
regex.match(pattern, content)
decision := {
"rule_id": "PY-EXC-002",
"reason": "Swallowing exceptions and returning defaults is prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
content != null
regex.match(pattern, content)
decision := {
"rule_id": "PY-EXC-002",
"reason": "Swallowing exceptions and returning defaults is prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
regex.match(pattern, patch)
decision := {
"rule_id": "PY-EXC-002",
"reason": "Swallowing exceptions and returning defaults is prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,179 @@
# METADATA
# scope: package
# title: Block Duplicate Fixtures
# description: Blocks redefining global pytest fixtures outside conftest.py
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_duplicate_fixtures
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "+++ b/")
path := replace(line, "+++ b/", "")
regex.match(pattern, path)
}
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "--- a/")
path := replace(line, "--- a/", "")
regex.match(pattern, path)
}
file_path_pattern := `tests?/.*\.py$`
conftest_pattern := `tests?/conftest\.py$`
fixture_pattern := `@pytest\.fixture[^@]*\ndef\s+(mock_uow|crypto|meetings_dir|webhook_config|webhook_config_all_events|sample_datetime|calendar_settings|meeting_id|sample_meeting|recording_meeting|mock_grpc_context|mock_asr_engine|mock_optional_extras)\s*\(`
# Block Write/Edit operations that introduce duplicate fixtures
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
not regex.match(conftest_pattern, file_path)
content := new_content
content != null
regex.match(fixture_pattern, content)
decision := {
"rule_id": "TEST-FIX-001",
"reason": "Duplicate global fixtures are prohibited. Use tests/conftest.py fixtures instead.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
not regex.match(conftest_pattern, file_path)
content := edit_new_content(edit)
content != null
regex.match(fixture_pattern, content)
decision := {
"rule_id": "TEST-FIX-001",
"reason": "Duplicate global fixtures are prohibited. Use tests/conftest.py fixtures instead.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
patch_targets_path(file_path_pattern)
not regex.match(conftest_pattern, patch)
regex.match(fixture_pattern, patch)
decision := {
"rule_id": "TEST-FIX-001",
"reason": "Duplicate global fixtures are prohibited. Use tests/conftest.py fixtures instead.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,129 @@
# METADATA
# scope: package
# title: Block Frontend Linter Config
# description: Blocks edits to frontend linter config files
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_linter_config_frontend
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `(^|/)client/.*(?:\.?eslint(?:rc|\.config).*|\.?prettier(?:rc|\.config).*|biome\.json|tsconfig\.json|\.?rustfmt\.toml|\.?clippy\.toml)$`
# Block Write/Edit operations targeting frontend linter configs
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
not contains(lower(file_path), "node_modules/")
decision := {
"rule_id": "TS-CONFIG-002",
"reason": "Frontend linter/config file edits are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
not contains(lower(file_path), "node_modules/")
decision := {
"rule_id": "TS-CONFIG-002",
"reason": "Frontend linter/config file edits are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Block Frontend Linter Config (Bash)
# description: Blocks Bash edits to frontend linter config files
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_linter_config_frontend_bash
import rego.v1
pattern := `(rm|mv|cp|sed|awk|chmod|chown|touch|truncate|tee|>|>>)\s.*client/.*(?:biome\.json|tsconfig\.json|\.?eslint(?:rc|\.config)|\.?prettier(?:rc|\.config)|\.?rustfmt\.toml|\.?clippy\.toml)`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
decision := {
"rule_id": "TS-CONFIG-001",
"reason": "Frontend linter/config file edits are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,129 @@
# METADATA
# scope: package
# title: Block Python Linter Config
# description: Blocks edits to Python linter config files
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_linter_config_python
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `(?:pyproject\.toml|\.?ruff\.toml|\.?pyrightconfig\.json|\.?mypy\.ini|setup\.cfg|\.flake8|tox\.ini|\.?pylintrc)$`
# Block Write/Edit operations targeting Python linter configs
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
not contains(lower(file_path), "/.venv/")
decision := {
"rule_id": "PY-CONFIG-002",
"reason": "Python linter/config file edits are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
not contains(lower(file_path), "/.venv/")
decision := {
"rule_id": "PY-CONFIG-002",
"reason": "Python linter/config file edits are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Block Python Linter Config (Bash)
# description: Blocks Bash edits to Python linter config files
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_linter_config_python_bash
import rego.v1
pattern := `(rm|mv|cp|sed|awk|chmod|chown|touch|truncate|tee|>|>>)\s.*(?:pyproject\.toml|\.?ruff\.toml|\.?pyrightconfig\.json|\.?mypy\.ini|setup\.cfg|\.flake8|tox\.ini|\.?pylintrc)`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
decision := {
"rule_id": "PY-CONFIG-001",
"reason": "Python linter/config file edits are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,178 @@
# METADATA
# scope: package
# title: Block Magic Numbers
# description: Blocks introduction of magic numbers outside constants modules
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_magic_numbers
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "+++ b/")
path := replace(line, "+++ b/", "")
regex.match(pattern, path)
}
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "--- a/")
path := replace(line, "--- a/", "")
regex.match(pattern, path)
}
file_path_pattern := `\.(py|ts|tsx|js|jsx)$`
number_pattern := `(?:timeout|delay|interval|duration|limit|max|min|size|count|threshold|retry|retries|attempts|port|width|height|margin|padding|offset|index|length|capacity|buffer|batch|chunk|page|rate|fps|dpi|quality|level|priority|weight|score|factor|multiplier|divisor|percentage|ratio|scale)\s*[=:]\s*([2-9]|[1-9]\d+)|(?:if|while|for|elif|range|slice|sleep|wait|setTimeout|setInterval)\s*\([^)]*([2-9]|[1-9]\d+)`
# Block Write/Edit operations that introduce magic numbers
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
not contains(lower(file_path), "constants")
content := new_content
content != null
regex.match(number_pattern, content)
decision := {
"rule_id": "STYLE-001",
"reason": "Magic numbers are prohibited. Use named constants.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
not contains(lower(file_path), "constants")
content := edit_new_content(edit)
content != null
regex.match(number_pattern, content)
decision := {
"rule_id": "STYLE-001",
"reason": "Magic numbers are prohibited. Use named constants.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
patch_targets_path(file_path_pattern)
not contains(lower(patch), "constants")
regex.match(number_pattern, patch)
decision := {
"rule_id": "STYLE-001",
"reason": "Magic numbers are prohibited. Use named constants.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Block Makefile Edit (Bash)
# description: Blocks Bash edits to Makefile
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_makefile_bash
import rego.v1
pattern := `(>>?\s*Makefile|sed\s+.*-i.*Makefile|sed\s+-i.*Makefile|perl\s+-[pi].*Makefile|tee\s+.*Makefile|(mv|cp)\s+\S+\s+Makefile\b|>\s*Makefile)`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
decision := {
"rule_id": "BUILD-001",
"reason": "Makefile edits are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,127 @@
# METADATA
# scope: package
# title: Block Makefile Edit
# description: Blocks file edits to Makefile
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_makefile_edit
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `(?:^|/)Makefile$`
# Block Write/Edit operations targeting Makefile
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
decision := {
"rule_id": "BUILD-002",
"reason": "Makefile edits are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
decision := {
"rule_id": "BUILD-002",
"reason": "Makefile edits are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Block Git --no-verify
# description: Blocks git commit --no-verify
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_no_verify
import rego.v1
pattern := `git\s+commit\s+.*--no-verify|git\s+commit\s+--no-verify`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
decision := {
"rule_id": "GIT-001",
"reason": "Git commit --no-verify is prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,146 @@
# METADATA
# scope: package
# title: Block Silent None Return
# description: Blocks exception handlers that log and return empty values
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_silent_none_return
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
pattern := `except\s+\w*Error.*?:\s*\n\s+.*?(?:logger\.|logging\.).*?\n\s+return\s+(?:None|\[\]|False|\{\}|0)`
# Block Write/Edit operations that swallow exceptions with empty returns
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
regex.match(pattern, content)
decision := {
"rule_id": "PY-EXC-003",
"reason": "Silent exception handlers returning empty values are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
content != null
regex.match(pattern, content)
decision := {
"rule_id": "PY-EXC-003",
"reason": "Silent exception handlers returning empty values are prohibited.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
regex.match(pattern, patch)
decision := {
"rule_id": "PY-EXC-003",
"reason": "Silent exception handlers returning empty values are prohibited.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,175 @@
# METADATA
# scope: package
# title: Block Test Loops/Conditionals
# description: Blocks loops or conditionals inside tests with asserts
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_test_loops_conditionals
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "+++ b/")
path := replace(line, "+++ b/", "")
regex.match(pattern, path)
}
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "--- a/")
path := replace(line, "--- a/", "")
regex.match(pattern, path)
}
file_path_pattern := `tests?/.*\.py$`
pattern := `def test_[^(]+\([^)]*\)[^:]*:[\s\S]*?\b(for|while|if)\s+[^:]+:[\s\S]*?assert`
# Block Write/Edit operations that introduce loops/conditionals in tests
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
content := new_content
content != null
regex.match(pattern, content)
decision := {
"rule_id": "TEST-STRUCT-001",
"reason": "Loops or conditionals inside tests are prohibited. Use parametrization.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
content := edit_new_content(edit)
content != null
regex.match(pattern, content)
decision := {
"rule_id": "TEST-STRUCT-001",
"reason": "Loops or conditionals inside tests are prohibited. Use parametrization.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
patch_targets_path(file_path_pattern)
regex.match(pattern, patch)
decision := {
"rule_id": "TEST-STRUCT-001",
"reason": "Loops or conditionals inside tests are prohibited. Use parametrization.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,130 @@
# METADATA
# scope: package
# title: Block Tests Quality
# description: Blocks edits to tests/quality (except baselines.json)
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_tests_quality
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
file_path_pattern := `tests/quality/`
exclude_pattern := `baselines\.json$`
# Block Write/Edit operations targeting tests/quality
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
not regex.match(exclude_pattern, file_path)
decision := {
"rule_id": "TEST-QUALITY-002",
"reason": "Direct edits to tests/quality are prohibited (except baselines.json).",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
not regex.match(exclude_pattern, file_path)
decision := {
"rule_id": "TEST-QUALITY-002",
"reason": "Direct edits to tests/quality are prohibited (except baselines.json).",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,27 @@
# METADATA
# scope: package
# title: Block Tests Quality (Bash)
# description: Blocks Bash edits to tests/quality (except baselines.json)
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_tests_quality_bash
import rego.v1
pattern := `(rm|mv|cp|sed|awk|chmod|chown|touch|mkdir|rmdir|truncate|tee|>|>>)\s.*tests/quality/`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
not contains(lower(command), "tests/quality/baselines.json")
decision := {
"rule_id": "TEST-QUALITY-001",
"reason": "Direct edits to tests/quality are prohibited (except baselines.json).",
"severity": "HIGH"
}
}

33
.backup/example.rego Normal file
View File

@@ -0,0 +1,33 @@
# METADATA
# scope: package
# title: Example Policy
# description: A minimal example policy that never fires
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.example
import rego.v1
# This rule will never fire - it's just here to prevent OPA compilation issues
# It checks for a command that nobody would ever type
deny contains decision if {
input.tool_input.command == "CUPCAKE_EXAMPLE_RULE_THAT_NEVER_FIRES_12345"
decision := {
"reason": "This will never happen",
"severity": "LOW",
"rule_id": "EXAMPLE-001"
}
}
# Replace the above with your actual policies
# Example of a real policy:
# deny contains decision if {
# contains(input.tool_input.command, "rm -rf /")
# decision := {
# "reason": "Dangerous command blocked",
# "severity": "HIGH",
# "rule_id": "SAFETY-001"
# }
# }

View File

@@ -0,0 +1,181 @@
# METADATA
# scope: package
# title: Ban Python Any Type
# description: Blocks introduction of typing.Any in Python code
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.prevent_any_type
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
is_python_file(path) if {
endswith(path, ".py")
}
is_python_file(path) if {
endswith(path, ".pyi")
}
# Regex patterns indicating use of Any in type annotations/imports
any_type_patterns := [
`(?m)^\s*from\s+typing\s+import\s+[^#\n]*\bAny\b`,
`\btyping\.Any\b`,
`:\s*Any\b`,
`:\s*"Any"`,
`:\s*'Any'`,
`->\s*Any\b`,
`->\s*"Any"`,
`->\s*'Any'`,
`\[\s*Any\s*\]`,
`\[\s*Any\s*,`,
`,\s*Any\s*\]`,
`,\s*Any\s*,`,
`Union\[[^\]]*\bAny\b[^\]]*\]`,
`Optional\[Any\]`,
]
# Block Write/Edit operations that introduce Any in Python files
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
# Only enforce for Python files
file_path := lower(resolved_file_path)
is_python_file(file_path)
content := new_content
content != null
some pattern in any_type_patterns
regex.match(pattern, content)
decision := {
"rule_id": "PY-TYPE-001",
"reason": "Use of Any is prohibited in Python type annotations/imports. Replace with Protocol, TypeVar, TypedDict, or a concrete type.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
content := patch_content
content != null
some pattern in any_type_patterns
regex.match(pattern, content)
decision := {
"rule_id": "PY-TYPE-001",
"reason": "Use of Any is prohibited in Python type annotations/imports. Replace with Protocol, TypeVar, TypedDict, or a concrete type.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := lower(edit_path(edit))
is_python_file(file_path)
content := edit_new_content(edit)
content != null
some pattern in any_type_patterns
regex.match(pattern, content)
decision := {
"rule_id": "PY-TYPE-001",
"reason": "Use of Any is prohibited in Python type annotations/imports. Replace with Protocol, TypeVar, TypedDict, or a concrete type.",
"severity": "HIGH"
}
}

View File

@@ -0,0 +1,176 @@
# METADATA
# scope: package
# title: Ban Python Type Suppression
# description: Blocks type suppression directives in Python code
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.prevent_type_suppression
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
is_python_file(path) if {
endswith(path, ".py")
}
is_python_file(path) if {
endswith(path, ".pyi")
}
# Regex patterns indicating type suppression directives
type_suppression_patterns := [
`#\s*type:\s*ignore(\[[^\]]+\])?\b`,
`#\s*pyright:\s*ignore(\[[^\]]+\])?\b`,
`#\s*mypy:\s*ignore(\[[^\]]+\])?\b`,
`#\s*pyre-ignore\b`,
`#\s*pyre-fixme\b`,
`#\s*pyrefly:\s*ignore(\[[^\]]+\])?\b`,
`#\s*basedpyright:\s*ignore(\[[^\]]+\])?\b`,
`#\s*noqa\b`,
`#\s*noqa:\s*\w+`,
]
# Block Write/Edit operations that introduce type suppression in Python files
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
# Only enforce for Python files
file_path := lower(resolved_file_path)
is_python_file(file_path)
content := new_content
content != null
some pattern in type_suppression_patterns
regex.match(pattern, content)
decision := {
"rule_id": "PY-TYPE-002",
"reason": "Type suppression directives are prohibited in Python code. Fix the underlying type/lint issues instead.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
content := patch_content
content != null
some pattern in type_suppression_patterns
regex.match(pattern, content)
decision := {
"rule_id": "PY-TYPE-002",
"reason": "Type suppression directives are prohibited in Python code. Fix the underlying type/lint issues instead.",
"severity": "HIGH"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := lower(edit_path(edit))
is_python_file(file_path)
content := edit_new_content(edit)
content != null
some pattern in type_suppression_patterns
regex.match(pattern, content)
decision := {
"rule_id": "PY-TYPE-002",
"reason": "Type suppression directives are prohibited in Python code. Fix the underlying type/lint issues instead.",
"severity": "HIGH"
}
}

View File

@@ -1,21 +1,16 @@
# METADATA
# scope: package
# title: Warn on New File Without Search
# description: Warns when creating new source files
# title: Warn on Baselines Edit
# description: Warns on edits to tests/quality/baselines.json
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
package cupcake.policies.opencode.warn_new_file_search
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.warn_baselines_edit
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -98,9 +93,9 @@ edit_old_content(edit) := content if {
content := edit.old_text
} else := ""
file_path_pattern := `(^|/)(src|client/src|tests)/.*\.(py|ts|tsx|js|jsx)$`
file_path_pattern := `tests/quality/baselines\.json$`
# Warn on Write/Edit operations that create new files
# Warn on Write/Edit operations targeting baselines.json
deny contains decision if {
input.hook_event_name == "PreToolUse"
@@ -109,11 +104,9 @@ deny contains decision if {
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
old_content == ""
decision := {
"rule_id": "PROCESS-001",
"reason": "Warning: creating a new source file. Ensure you searched for existing implementations.",
"rule_id": "TEST-QUALITY-004",
"reason": "Warning: editing tests/quality/baselines.json should be avoided unless explicitly required.",
"severity": "LOW"
}
}
@@ -126,12 +119,9 @@ deny contains decision if {
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
old_content := edit_old_content(edit)
old_content == ""
decision := {
"rule_id": "PROCESS-001",
"reason": "Warning: creating a new source file. Ensure you searched for existing implementations.",
"rule_id": "TEST-QUALITY-004",
"reason": "Warning: editing tests/quality/baselines.json should be avoided unless explicitly required.",
"severity": "LOW"
}
}

View File

@@ -0,0 +1,26 @@
# METADATA
# scope: package
# title: Warn on Baselines Edit (Bash)
# description: Warns on Bash edits to tests/quality/baselines.json
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.warn_baselines_edit_bash
import rego.v1
pattern := `(sed|awk|echo|cat|tee|>|>>|cp|mv).*tests/quality/baselines\.json`
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
regex.match(pattern, command)
decision := {
"rule_id": "TEST-QUALITY-003",
"reason": "Warning: editing tests/quality/baselines.json should be avoided unless explicitly required.",
"severity": "LOW"
}
}

View File

@@ -0,0 +1,175 @@
# METADATA
# scope: package
# title: Warn on Large File
# description: Warns when writing large files (>= 500 lines)
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.warn_large_file
import rego.v1
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
} else := tool_input.file_path if {
tool_input.file_path != null
} else := tool_input.filePath if {
tool_input.filePath != null
} else := tool_input.path if {
tool_input.path != null
} else := tool_input.notebook_path if {
tool_input.notebook_path != null
} else := tool_input.notebookPath if {
tool_input.notebookPath != null
} else := ""
new_content := tool_input.new_string if {
tool_input.new_string != null
} else := tool_input.newText if {
tool_input.newText != null
} else := tool_input.new_text if {
tool_input.new_text != null
} else := tool_input.content if {
tool_input.content != null
} else := ""
old_content := tool_input.old_string if {
tool_input.old_string != null
} else := tool_input.oldText if {
tool_input.oldText != null
} else := tool_input.old_text if {
tool_input.old_text != null
} else := tool_input.previousContent if {
tool_input.previousContent != null
} else := ""
patch_content := tool_input.patch if {
tool_input.patch != null
} else := tool_input.patchText if {
tool_input.patchText != null
} else := tool_input.patch_text if {
tool_input.patch_text != null
} else := ""
edit_path(edit) := path if {
edit.resolved_file_path != null
path := edit.resolved_file_path
} else := path if {
edit.file_path != null
path := edit.file_path
} else := path if {
edit.filePath != null
path := edit.filePath
} else := path if {
edit.path != null
path := edit.path
} else := ""
edit_new_content(edit) := content if {
edit.new_string != null
content := edit.new_string
} else := content if {
edit.newText != null
content := edit.newText
} else := content if {
edit.new_text != null
content := edit.new_text
} else := content if {
edit.content != null
content := edit.content
} else := ""
edit_old_content(edit) := content if {
edit.old_string != null
content := edit.old_string
} else := content if {
edit.oldText != null
content := edit.oldText
} else := content if {
edit.old_text != null
content := edit.old_text
} else := ""
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "+++ b/")
path := replace(line, "+++ b/", "")
regex.match(pattern, path)
}
patch_targets_path(pattern) if {
patch := patch_content
patch != null
lines := split(patch, "\n")
some line in lines
startswith(line, "--- a/")
path := replace(line, "--- a/", "")
regex.match(pattern, path)
}
file_path_pattern := `\.(py|ts|tsx|js|jsx)$`
pattern := `(?:.*\n){500,}`
# Warn on Write/Edit operations that introduce large file content
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
content := new_content
content != null
regex.match(pattern, content)
decision := {
"rule_id": "STYLE-002",
"reason": "Warning: file content exceeds 500 lines. Consider refactoring.",
"severity": "LOW"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
regex.match(file_path_pattern, file_path)
content := edit_new_content(edit)
content != null
regex.match(pattern, content)
decision := {
"rule_id": "STYLE-002",
"reason": "Warning: file content exceeds 500 lines. Consider refactoring.",
"severity": "LOW"
}
}
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null
patch_targets_path(file_path_pattern)
regex.match(pattern, patch)
decision := {
"rule_id": "STYLE-002",
"reason": "Warning: file content exceeds 500 lines. Consider refactoring.",
"severity": "LOW"
}
}

View File

@@ -1,11 +0,0 @@
---
active: true
iteration: 1
max_iterations: 0
completion_promise: null
started_at: "2026-01-20T02:31:55Z"
started_at: "2026-01-20T02:31:55Z"
---
proceed with the plan, i have also documented a copy in @.claudectx/codefixes.md. please use your agents iteratively to manage context and speed, however you must review the accuracy and value of each doc before moving to the next
proceed with the plan, i have also documented a copy in @.claudectx/codefixes.md. please use your agents iteratively to manage context and speed, however you must review the accuracy and value of each doc before moving to the next

View File

@@ -1,377 +0,0 @@
╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌╌
Strategic CLAUDE.md Placement Analysis for NoteFlow
Executive Summary
This document analyzes optimal placement of CLAUDE.md files throughout the NoteFlow codebase to provide meaningful context for AI assistants. The analysis considers both
constrained (strategic) and unlimited scenarios.
---
Current State: Existing Documentation Files
10 CLAUDE.md/AGENTS.md Files Already Present
┌─────────────────────┬───────────┬──────────────────────────────────────────────────────────┐
│ Location │ File │ Focus │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ / │ CLAUDE.md │ Root orchestration, parallel execution, project overview │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ / │ AGENTS.md │ Architecture for non-Claude AI assistants │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /src/ │ CLAUDE.md │ Python backend entry point │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /src/ │ AGENTS.md │ Python backend for other AIs │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /src/noteflow/ │ CLAUDE.md │ Detailed Python standards (line limits, typing, modules) │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /src/noteflow/grpc/ │ CLAUDE.md │ gRPC security patterns │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /src/noteflow/grpc/ │ AGENTS.md │ gRPC security (duplicate) │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /client/ │ CLAUDE.md │ Tauri + React development │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /client/src/ │ CLAUDE.md │ TypeScript security rules │
├─────────────────────┼───────────┼──────────────────────────────────────────────────────────┤
│ /docker/ │ CLAUDE.md │ Docker security and build patterns │
└─────────────────────┴───────────┴──────────────────────────────────────────────────────────┘
---
Part 1: Strategic Placement (Constrained Resources)
If limited to 5-7 additional files, prioritize these high-impact locations:
Tier 1: Critical Gaps (Add These First)
1. /src/noteflow/infrastructure/CLAUDE.md
Why: Infrastructure layer has 15+ adapters with distinct patterns (ASR, diarization, NER, summarization, calendar, webhooks, persistence). No unified guidance exists.
2. /src/noteflow/domain/CLAUDE.md
Why: Domain layer defines entities, ports, rules, and value objects. Understanding DDD boundaries prevents architectural violations.
3. /src/noteflow/application/services/CLAUDE.md
Why: 12+ services with distinct responsibilities. Service-level guidance prevents duplication and clarifies orchestration patterns.
4. /client/src/hooks/CLAUDE.md
Why: 7 hook directories (audio, auth, data, processing, recording, sync, ui) with complex interdependencies. Prevents reinventing existing hooks.
5. /client/src-tauri/src/CLAUDE.md
Why: Rust backend has commands, gRPC client, audio processing, state management. No Rust-specific guidance currently exists.
Tier 2: High Value (Add Next)
6. /tests/CLAUDE.md
Why: Testing conventions (fixtures, markers, quality gates) are scattered. Centralized guidance improves test quality.
7. /src/noteflow/infrastructure/persistence/CLAUDE.md
Why: UnitOfWork pattern, repository hierarchy, capability flags, migrations are complex. Prevents incorrect persistence patterns.
---
Part 2: Unlimited Placement (Comprehensive Coverage)
With no constraints, here's the complete list of 25+ locations where CLAUDE.md would add value:
Python Backend (src/noteflow/)
┌────────────────────────────────────────┬─────────────────────────────────────────────────────────┐
│ Path │ Content Focus │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ domain/CLAUDE.md │ DDD entities, ports, value objects, rules engine │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ domain/entities/CLAUDE.md │ Entity relationships, state machines, invariants │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ domain/ports/CLAUDE.md │ Repository protocols, capability contracts │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ domain/rules/CLAUDE.md │ Rule modes (SIMPLE→EXPRESSION), registry, evaluation │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ application/CLAUDE.md │ Use case organization, service boundaries │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ application/services/CLAUDE.md │ Service catalog, dependency patterns │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/CLAUDE.md │ Adapter patterns, external integrations │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/asr/CLAUDE.md │ Whisper, VAD, segmentation, streaming │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/diarization/CLAUDE.md │ Job lifecycle, streaming vs offline, speaker assignment │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/ner/CLAUDE.md │ Backend abstraction, mapper, post-processing │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/summarization/CLAUDE.md │ Provider protocols, consent workflow, citation linking │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/persistence/CLAUDE.md │ UnitOfWork, repositories, migrations │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/calendar/CLAUDE.md │ OAuth flow, sync patterns, trigger detection │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ infrastructure/webhooks/CLAUDE.md │ Delivery, signing, retry logic │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ grpc/mixins/CLAUDE.md │ Mixin composition, streaming handlers │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ grpc/startup/CLAUDE.md │ Service initialization, dependency injection │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ config/CLAUDE.md │ Settings cascade, feature flags, environment loading │
├────────────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ cli/CLAUDE.md │ Command patterns, model management │
└────────────────────────────────────────┴─────────────────────────────────────────────────────────┘
Client (client/)
┌──────────────────────────────────┬─────────────────────────────────────────────────────────┐
│ Path │ Content Focus │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src/api/CLAUDE.md │ Adapter pattern, transport abstraction, type generation │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src/components/CLAUDE.md │ Component hierarchy, feature organization │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src/hooks/CLAUDE.md │ Hook catalog, composition patterns, state management │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src/lib/CLAUDE.md │ Utility catalog, AI providers, audio processing │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src-tauri/src/CLAUDE.md │ Rust patterns, command handlers, state │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src-tauri/src/commands/CLAUDE.md │ IPC contract, audio commands, recording session │
├──────────────────────────────────┼─────────────────────────────────────────────────────────┤
│ src-tauri/src/grpc/CLAUDE.md │ gRPC client wrapper, type conversions │
└──────────────────────────────────┴─────────────────────────────────────────────────────────┘
Testing (tests/)
┌─────────────────────────────┬────────────────────────────────────────────────────┐
│ Path │ Content Focus │
├─────────────────────────────┼────────────────────────────────────────────────────┤
│ tests/CLAUDE.md │ Test conventions, fixtures, markers, quality gates │
├─────────────────────────────┼────────────────────────────────────────────────────┤
│ tests/fixtures/CLAUDE.md │ Shared fixtures catalog, usage patterns │
├─────────────────────────────┼────────────────────────────────────────────────────┤
│ tests/integration/CLAUDE.md │ Integration test setup, testcontainers │
└─────────────────────────────┴────────────────────────────────────────────────────┘
Documentation (docs/)
┌────────────────────────┬───────────────────────────────────────────┐
│ Path │ Content Focus │
├────────────────────────┼───────────────────────────────────────────┤
│ docs/sprints/CLAUDE.md │ Sprint structure, documentation standards │
└────────────────────────┴───────────────────────────────────────────┘
---
Part 3: Mockup - /src/noteflow/infrastructure/CLAUDE.md
# Infrastructure Layer Development Guide
## Overview
The infrastructure layer (`src/noteflow/infrastructure/`) contains adapters that implement domain ports. These connect the application to external systems: databases, ML
models, cloud APIs, file systems.
---
## Architecture Principle: Hexagonal/Ports-and-Adapters
Domain Ports (interfaces) Infrastructure Adapters (implementations)
───────────────────────── ───────────────────────────────────────────
NerPort → SpacyBackend, GlinerBackend
SummarizationProvider → CloudProvider, OllamaProvider, MockProvider
DiarizationEngine → DiartSession, PyannoteOffline
AssetRepository → FileSystemAssetRepository
UnitOfWork → SqlAlchemyUnitOfWork, MemoryUnitOfWork
CalendarProvider → GoogleCalendar, OutlookCalendar
**Rule**: Infrastructure code imports domain; domain NEVER imports infrastructure.
---
## Adapter Catalog
| Directory | Responsibility | Key Protocols |
|-----------|----------------|---------------|
| `asr/` | Speech-to-text (Whisper) | `TranscriptionResult` |
| `diarization/` | Speaker identification | `DiarizationEngine`, `DiarizationJob` |
| `ner/` | Named entity extraction | `NerPort` |
| `summarization/` | LLM summarization | `SummarizationProvider` |
| `persistence/` | Database (SQLAlchemy) | `UnitOfWork`, `*Repository` |
| `calendar/` | OAuth + event sync | `CalendarProvider` |
| `webhooks/` | Event delivery | `WebhookDeliveryService` |
| `export/` | PDF/HTML/Markdown | `ExportAdapter` |
| `audio/` | Recording/playback | `AudioDevice` |
| `crypto/` | Encryption | `Keystore` |
| `logging/` | Structured logging | `LogEventType` |
| `metrics/` | Observability | `MetricsCollector` |
| `gpu/` | GPU detection | `GpuInfo` |
---
## Common Patterns
### 1. Async Wrappers for Sync Libraries
Many ML libraries (spaCy, faster-whisper) are synchronous. Wrap them:
```python
async def extract(self, text: str) -> list[NamedEntity]:
loop = asyncio.get_running_loop()
return await loop.run_in_executor(
None, # Default ThreadPoolExecutor
self._sync_extract,
text
)
2. Backend Selection via Factory
def create_ner_engine(config: NerConfig) -> NerPort:
match config.backend:
case "spacy":
return SpacyBackend(model=config.model_name)
case "gliner":
return GlinerBackend(model=config.model_name)
case _:
raise ValueError(f"Unknown NER backend: {config.backend}")
3. Capability Flags for Optional Features
class SqlAlchemyUnitOfWork(UnitOfWork):
@property
def supports_entities(self) -> bool:
return True # Has EntityRepository
@property
def supports_webhooks(self) -> bool:
return True # Has WebhookRepository
Always check capability before accessing optional repository:
if uow.supports_entities:
entities = await uow.entities.get_by_meeting(meeting_id)
4. Provider Protocol Pattern
class SummarizationProvider(Protocol):
async def summarize(
self,
segments: list[Segment],
template: SummarizationTemplate,
) -> SummaryResult: ...
@property
def requires_consent(self) -> bool: ...
---
Forbidden Patterns
❌ Direct database access outside persistence/
# WRONG: Raw SQL in service layer
async with engine.connect() as conn:
result = await conn.execute(text("SELECT * FROM meetings"))
❌ Hardcoded API keys
# WRONG: Secrets in code
client = anthropic.Anthropic(api_key="sk-ant-...")
❌ Synchronous I/O in async context
# WRONG: Blocking the event loop
def load_model(self):
self.model = whisper.load_model("base") # Blocks!
❌ Domain imports in infrastructure
# WRONG: Infrastructure should implement domain ports, not modify domain
from noteflow.domain.entities import Meeting
meeting.state = "COMPLETED" # Don't mutate domain objects here
---
Testing Infrastructure Adapters
Use Dependency Injection for Mocking
# tests/infrastructure/ner/test_engine.py
@pytest.fixture
def mock_backend() -> NerBackend:
backend = Mock(spec=NerBackend)
backend.extract.return_value = [
RawEntity(text="John", label="PERSON", start=0, end=4)
]
return backend
async def test_engine_uses_backend(mock_backend):
engine = NerEngine(backend=mock_backend)
result = await engine.extract("Hello John")
mock_backend.extract.assert_called_once()
Integration Tests with Real Services
# tests/integration/test_ner_integration.py
@pytest.mark.integration
@pytest.mark.requires_gpu
async def test_gliner_real_extraction():
backend = GlinerBackend(model="urchade/gliner_base")
result = await backend.extract("Microsoft CEO Satya Nadella announced...")
assert any(e.label == "ORG" and "Microsoft" in e.text for e in result)
---
Adding a New Adapter
1. Define port in domain (domain/ports/) if not exists
2. Create adapter directory (infrastructure/<adapter_name>/)
3. Implement the protocol with proper async handling
4. Add factory function for backend selection
5. Write unit tests with mocked dependencies
6. Write integration test with real external service
7. Update gRPC startup (grpc/startup/services.py) for dependency injection
8. Document in this file (update Adapter Catalog table)
---
Key Files
┌───────────────────────────────┬──────────────────────────────────┐
│ File │ Purpose │
├───────────────────────────────┼──────────────────────────────────┤
│ __init__.py │ Public exports for layer │
├───────────────────────────────┼──────────────────────────────────┤
│ */engine.py │ Main adapter implementation │
├───────────────────────────────┼──────────────────────────────────┤
│ */backends/ │ Multiple backend implementations │
├───────────────────────────────┼──────────────────────────────────┤
│ */mapper.py │ External→Domain type conversion │
├───────────────────────────────┼──────────────────────────────────┤
│ */post_processing.py │ Output normalization │
├───────────────────────────────┼──────────────────────────────────┤
│ persistence/unit_of_work/*.py │ Transaction management │
├───────────────────────────────┼──────────────────────────────────┤
│ persistence/repositories/*.py │ Data access │
├───────────────────────────────┼──────────────────────────────────┤
│ persistence/models/*.py │ ORM definitions │
└───────────────────────────────┴──────────────────────────────────┘
---
See Also
- /src/noteflow/domain/ports/ — Port definitions
- /src/noteflow/grpc/startup/services.py — Dependency injection
- /tests/infrastructure/ — Adapter tests
---
## Part 4: Answer to "Would Your Answer Change With No Limit?"
**Yes, significantly.**
### Constrained (5-7 files):
Focus on **layer boundaries** (domain, application, infrastructure) and **high-complexity areas** (hooks, Rust backend). Each file covers broad territory.
### Unlimited (25+ files):
Add **subsystem-specific documentation** for:
- Complex state machines (diarization jobs, recording lifecycle)
- Protocol patterns (summarization providers, NER backends)
- Cross-cutting concerns (rules engine, settings cascade)
- Test organization (fixtures, integration setup)
The key difference: with unlimited resources, document **WHY decisions were made** (design rationale), not just **WHAT exists** (API reference).
---
## Recommendation
### Immediate Action (Phase 1)
Add these 3 files for maximum impact:
1. `/src/noteflow/infrastructure/CLAUDE.md` — Adapter patterns (mockup above)
2. `/src/noteflow/domain/CLAUDE.md` — DDD boundaries, entity relationships
3. `/client/src-tauri/src/CLAUDE.md` — Rust patterns, IPC contracts
### Follow-up (Phase 2)
4. `/src/noteflow/application/services/CLAUDE.md` — Service catalog
5. `/client/src/hooks/CLAUDE.md` — Hook organization
6. `/tests/CLAUDE.md` — Testing conventions
### Future (Phase 3)
Remaining 19+ files as the codebase grows and patterns stabilize.

View File

@@ -0,0 +1,49 @@
# METADATA
# scope: package
# description: Helper functions for secure command analysis
package cupcake.helpers.commands
import rego.v1
# Check if command contains a specific verb with proper word boundary anchoring
# This prevents bypass via extra whitespace: "git commit" or " git commit"
has_verb(command, verb) if {
pattern := concat("", ["(^|\\s)", verb, "(\\s|$)"])
regex.match(pattern, command)
}
# Check if command contains ANY of the dangerous verbs from a set
# More efficient than checking each verb individually in policy code
has_dangerous_verb(command, verb_set) if {
some verb in verb_set
has_verb(command, verb)
}
# Detect symlink creation commands
# Matches: ln -s, ln -sf, ln -s -f, etc.
creates_symlink(command) if {
has_verb(command, "ln")
contains(command, "-s")
}
# Check if symlink command involves a protected path
# IMPORTANT: Checks BOTH source and target (addresses TOB-EQTY-LAB-CUPCAKE-4)
# Blocks: ln -s .cupcake foo AND ln -s foo .cupcake
symlink_involves_path(command, protected_path) if {
creates_symlink(command)
contains(command, protected_path)
}
# Detect output redirection operators that could bypass file protection
# Matches: >, >>, |, tee
has_output_redirect(command) if {
redirect_patterns := [
`\s>\s`, # stdout redirect
`\s>>\s`, # stdout append
`\s\|\s`, # pipe
`(^|\s)tee(\s|$)`, # tee command
]
some pattern in redirect_patterns
regex.match(pattern, command)
}

View File

@@ -5,18 +5,13 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.ban_stdlib_logger
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -114,7 +109,7 @@ file_path_pattern := `\.py$`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -132,7 +127,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -151,7 +146,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_assertion_roulette
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -126,7 +121,7 @@ assertion_pattern := `^\s*assert\s+[^,\n]+\n\s*assert\s+[^,\n]+$`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -144,7 +139,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -163,7 +158,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_biome_ignore
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -126,7 +121,7 @@ ignore_pattern := `//\s*biome-ignore|//\s*@ts-ignore|//\s*@ts-expect-error|//\s*
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -144,7 +139,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -163,7 +158,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_biome_ignore_bash
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_broad_exception_handler
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ handler_pattern := `except\s+Exception\s*(?:as\s+\w+)?:\s*\n\s+(?:logger\.|loggi
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
@@ -119,7 +114,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
@@ -135,7 +130,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_code_quality_test_bash
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_code_quality_test_edits
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ file_path_pattern := `src/test/code-quality\.test\.ts$`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -118,7 +113,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)

View File

@@ -5,17 +5,18 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: []
# required_tools:
# - McpSerenaReplaceContent
# - McpSerenaReplaceSymbolBody
# - McpSerenaCreateTextFile
# - McpSerenaInsertBeforeSymbol
# - McpSerenaInsertAfterSymbol
# - McpSerenaRenameSymbol
package cupcake.policies.opencode.block_code_quality_test_serena
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -111,7 +112,7 @@ get_relative_path := path if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_names := {"mcp__serena__replace_content", "mcp__serena__replace_symbol_body", "mcp__serena__create_text_file", "mcp__serena__insert_before_symbol", "mcp__serena__insert_after_symbol", "mcp__serena__rename_symbol"}
tool_names := {"McpSerenaReplaceContent", "McpSerenaReplaceSymbolBody", "McpSerenaCreateTextFile", "McpSerenaInsertBeforeSymbol", "McpSerenaInsertAfterSymbol", "McpSerenaRenameSymbol"}
tool_name in tool_names
file_path := get_relative_path

View File

@@ -5,17 +5,18 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: []
# required_tools:
# - McpPluginSerenaSerenaReplaceContent
# - McpPluginSerenaSerenaReplaceSymbolBody
# - McpPluginSerenaSerenaCreateTextFile
# - McpPluginSerenaSerenaInsertBeforeSymbol
# - McpPluginSerenaSerenaInsertAfterSymbol
# - McpPluginSerenaSerenaRenameSymbol
package cupcake.policies.opencode.block_code_quality_test_serena_plugin
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -111,7 +112,7 @@ get_relative_path := path if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_names := {"mcp__plugin_serena_serena__replace_content", "mcp__plugin_serena_serena__replace_symbol_body", "mcp__plugin_serena_serena__create_text_file", "mcp__plugin_serena_serena__insert_before_symbol", "mcp__plugin_serena_serena__insert_after_symbol", "mcp__plugin_serena_serena__rename_symbol"}
tool_names := {"McpPluginSerenaSerenaReplaceContent", "McpPluginSerenaSerenaReplaceSymbolBody", "McpPluginSerenaSerenaCreateTextFile", "McpPluginSerenaSerenaInsertBeforeSymbol", "McpPluginSerenaSerenaInsertAfterSymbol", "McpPluginSerenaSerenaRenameSymbol"}
tool_name in tool_names
file_path := get_relative_path

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_datetime_now_fallback
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ pattern := `return\s+datetime\.now\s*\(`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
@@ -119,7 +114,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
@@ -135,7 +130,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_default_value_swallow
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ pattern := `except\s+\w*(?:Error|Exception).*?:\s*\n\s+.*?(?:logger\.|logging\.)
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
@@ -119,7 +114,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
@@ -135,7 +130,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_duplicate_fixtures
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -127,7 +122,7 @@ fixture_pattern := `@pytest\.fixture[^@]*\ndef\s+(mock_uow|crypto|meetings_dir|w
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -146,7 +141,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -166,7 +161,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_linter_config_frontend
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ file_path_pattern := `(^|/)client/.*(?:\.?eslint(?:rc|\.config).*|\.?prettier(?:
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -119,7 +114,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_linter_config_frontend_bash
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_linter_config_python
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ file_path_pattern := `(?:pyproject\.toml|\.?ruff\.toml|\.?pyrightconfig\.json|\.
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -119,7 +114,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_linter_config_python_bash
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_magic_numbers
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -126,7 +121,7 @@ number_pattern := `(?:timeout|delay|interval|duration|limit|max|min|size|count|t
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -145,7 +140,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -165,7 +160,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_makefile_bash
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_makefile_edit
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ file_path_pattern := `(?:^|/)Makefile$`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -118,7 +113,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_no_verify
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_silent_none_return
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ pattern := `except\s+\w*Error.*?:\s*\n\s+.*?(?:logger\.|logging\.).*?\n\s+return
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
content := new_content
content != null
@@ -119,7 +114,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
content := edit_new_content(edit)
@@ -135,7 +130,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_test_loops_conditionals
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -126,7 +121,7 @@ pattern := `def test_[^(]+\([^)]*\)[^:]*:[\s\S]*?\b(for|while|if)\s+[^:]+:[\s\S]
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -144,7 +139,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -163,7 +158,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.block_tests_quality
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -105,7 +100,7 @@ exclude_pattern := `baselines\.json$`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -120,7 +115,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.block_tests_quality_bash
import rego.v1

View File

@@ -0,0 +1,107 @@
# METADATA
# scope: package
# title: Git Block No-Verify - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: HIGH
# id: BUILTIN-GIT-BLOCK-NO-VERIFY
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.builtins.git_block_no_verify
import rego.v1
import data.cupcake.helpers.commands
# Block git commands that bypass verification hooks
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
# Get the command from tool input
command := lower(input.tool_input.command)
# Check if it's a git command with --no-verify flag
contains_git_no_verify(command)
decision := {
"rule_id": "BUILTIN-GIT-BLOCK-NO-VERIFY",
"reason": "Git operations with --no-verify are not permitted. Commit hooks must run for code quality and security checks.",
"severity": "HIGH",
}
}
# Check if command contains git with --no-verify flag
# Uses helper library to prevent spacing bypass (TOB-EQTY-LAB-CUPCAKE-3)
contains_git_no_verify(cmd) if {
# Check for git commit with --no-verify
commands.has_verb(cmd, "git")
commands.has_verb(cmd, "commit")
contains(cmd, "--no-verify")
}
contains_git_no_verify(cmd) if {
# Check for git commit with -n (shorthand for --no-verify)
commands.has_verb(cmd, "git")
commands.has_verb(cmd, "commit")
regex.match(`\s-[a-z]*n[a-z]*\s`, concat(" ", [cmd, " "])) # Matches -n, -an, -nm, etc.
}
contains_git_no_verify(cmd) if {
# Check for git push with --no-verify
commands.has_verb(cmd, "git")
commands.has_verb(cmd, "push")
contains(cmd, "--no-verify")
}
contains_git_no_verify(cmd) if {
# Check for git merge with --no-verify
commands.has_verb(cmd, "git")
commands.has_verb(cmd, "merge")
contains(cmd, "--no-verify")
}
# Also block attempts to disable hooks via config
deny contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := lower(input.tool_input.command)
# Check if trying to disable hooks via git config
contains_hook_disable(command)
decision := {
"rule_id": "BUILTIN-GIT-BLOCK-NO-VERIFY",
"reason": "Disabling git hooks is not permitted. Hooks are required for code quality and security.",
"severity": "HIGH",
}
}
contains_hook_disable(cmd) if {
commands.has_verb(cmd, "git")
commands.has_verb(cmd, "config")
contains(cmd, "core.hooksPath")
contains(cmd, "/dev/null")
}
contains_hook_disable(cmd) if {
# Detect attempts to chmod hooks to non-executable
commands.has_verb(cmd, "chmod")
regex.match(`\.git/hooks`, cmd)
regex.match(`-x|-[0-9]*0[0-9]*`, cmd) # Removing execute permission
}
contains_hook_disable(cmd) if {
# Detect attempts to remove hook files
contains(cmd, ".git/hooks")
removal_cmds := {"rm", "unlink", "trash"}
commands.has_dangerous_verb(cmd, removal_cmds)
}
contains_hook_disable(cmd) if {
# Detect moving/renaming hooks to disable them
commands.has_verb(cmd, "mv")
contains(cmd, ".git/hooks")
}

View File

@@ -0,0 +1,121 @@
# METADATA
# scope: package
# title: Git Pre-Check - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: HIGH
# id: BUILTIN-GIT-CHECK
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.builtins.git_pre_check
import rego.v1
# Check git operations and run validation before allowing
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
# Check if this is a git operation that needs validation
command := lower(input.params.command)
is_git_operation(command)
# Run all configured checks
check_results := run_all_checks
# Find any failed checks
failed_checks := [check |
some check in check_results
not check.success
]
# If any checks failed, halt the operation
count(failed_checks) > 0
# Build failure message
failure_messages := [msg |
some check in failed_checks
msg := concat("", ["- ", check.message])
]
failure_list := concat("\n", failure_messages)
reason := concat("\n", ["Git pre-checks failed:", failure_list])
decision := {
"rule_id": "BUILTIN-GIT-CHECK",
"reason": reason,
"severity": "HIGH",
}
}
# Check if command is a git operation that needs validation
is_git_operation(cmd) if {
git_patterns := {
"git commit",
"git push",
"git merge",
}
some pattern in git_patterns
contains(cmd, pattern)
}
# Run all configured pre-checks
run_all_checks := results if {
# Collect all git check signals
check_signals := [name |
some name, _ in input.signals
startswith(name, "__builtin_git_check_")
]
# Evaluate each check
results := [result |
some signal_name in check_signals
signal_result := input.signals[signal_name]
result := evaluate_check(signal_name, signal_result)
]
# Return results if we have any
count(results) > 0
} else := []
# No checks configured
# Evaluate a check result
evaluate_check(name, result) := check if {
# Parse the signal result which should contain exit_code and output
is_object(result)
check := {
"name": clean_signal_name(name),
"success": result.exit_code == 0,
"message": default_message(result),
}
} else := check if {
# Handle string results (command output)
is_string(result)
check := {
"name": clean_signal_name(name),
"success": true, # Assume success if we got output
"message": result,
}
}
# Extract readable name from signal name
clean_signal_name(signal_name) := name if {
# Remove __builtin_git_check_ prefix and return the index
parts := split(signal_name, "__builtin_git_check_")
count(parts) > 1
name := concat("Check ", [parts[1]])
} else := signal_name
# Get appropriate message from result
default_message(result) := msg if {
result.output != ""
msg := result.output
} else := msg if {
result.exit_code == 0
msg := "Check passed"
} else := msg if {
msg := concat("", ["Check failed with exit code ", format_int(result.exit_code, 10)])
}

View File

@@ -0,0 +1,60 @@
# METADATA
# scope: package
# title: Always Inject On Prompt - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: LOW
# id: BUILTIN-INJECT-PROMPT
# routing:
# required_events: ["UserPromptSubmit"]
package cupcake.policies.builtins.opencode_always_inject_on_prompt
import rego.v1
# Inject configured context on every user prompt
add_context contains decision if {
input.hook_event_name == "UserPromptSubmit"
# Get all configured context items
contexts := get_all_contexts
count(contexts) > 0
# Combine all contexts
combined_context := concat("\n\n", contexts)
decision := {
"rule_id": "BUILTIN-INJECT-PROMPT",
"context": combined_context,
"severity": "LOW",
}
}
# Get all configured contexts from signals
get_all_contexts := contexts if {
# Collect all builtin prompt context signals
signal_results := [value |
some key, value in input.signals
startswith(key, "__builtin_prompt_context_")
]
# Format each context appropriately
contexts := [ctx |
some result in signal_results
ctx := format_context(result)
]
# Ensure we have at least one context
count(contexts) > 0
} else := []
# No signals available or no contexts configured
# Format context based on its source
format_context(value) := formatted if {
# If it's a string, use it directly
is_string(value)
formatted := value
} else := formatted if {
# If it's an object/array, format as JSON
formatted := json.marshal(value)
}

View File

@@ -0,0 +1,63 @@
# METADATA
# scope: package
# title: Enforce Full File Read - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: MEDIUM
# id: BUILTIN-ENFORCE-FULL-READ
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Read"]
package cupcake.policies.builtins.opencode_enforce_full_file_read
import rego.v1
# Deny partial reads of files (MVP: enforce for all files)
deny contains decision if {
# Only apply to Read tool
input.hook_event_name == "PreToolUse"
input.tool_name == "Read"
# Check if offset or limit parameters are present
has_partial_read_params
# Get configured message from signal (with fallback)
message := get_configured_message
decision := {
"rule_id": "BUILTIN-ENFORCE-FULL-READ",
"reason": message,
"severity": "MEDIUM",
}
}
# Check if the Read tool has offset or limit parameters
has_partial_read_params if {
# Check for offset parameter
"offset" in object.keys(input.tool_input)
}
has_partial_read_params if {
# Check for limit parameter
"limit" in object.keys(input.tool_input)
}
# Get configured message from builtin config
get_configured_message := msg if {
# Direct access to builtin config (no signal execution needed)
msg := input.builtin_config.opencode_enforce_full_file_read.message
} else := msg if {
# Fallback to default message
msg := "Please read the entire file first (files under 2000 lines must be read completely)"
}
# Future enhancement: Get max lines threshold
# This would be used in a future version to check file size
# and only enforce full reads for files under the threshold
get_max_lines_threshold := lines if {
# Direct access to builtin config (no signal execution needed)
lines := input.builtin_config.opencode_enforce_full_file_read.max_lines
} else := lines if {
# Default to 2000 lines
lines := 2000
}

View File

@@ -0,0 +1,135 @@
# METADATA
# scope: package
# title: Post Edit Check - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: MEDIUM
# id: BUILTIN-POST-EDIT
# routing:
# required_events: ["PostToolUse"]
# required_tools: ["Edit", "Write", "MultiEdit", "NotebookEdit"]
package cupcake.policies.builtins.post_edit_check
import rego.v1
# Run validation after file edits
ask contains decision if {
input.hook_event_name == "PostToolUse"
# Check if this was a file editing operation
editing_tools := {"Edit", "Write", "MultiEdit", "NotebookEdit"}
input.tool_name in editing_tools
# Get the file that was edited
file_path := get_edited_file_path
file_path != ""
# Get file extension
extension := get_file_extension(file_path)
extension != ""
# Run validation for this file type
validation_result := run_validation_for_extension(extension, file_path)
# If validation failed, ask for user confirmation
not validation_result.success
question := concat("\n", [
concat(" ", ["File validation failed for", file_path]),
validation_result.message,
"",
"Do you want to continue anyway?",
])
decision := {
"rule_id": "BUILTIN-POST-EDIT",
"reason": question,
"question": question,
"severity": "MEDIUM",
}
}
# Also provide feedback as context when validation succeeds
add_context contains context_msg if {
input.hook_event_name == "PostToolUse"
editing_tools := {"Edit", "Write", "MultiEdit", "NotebookEdit"}
input.tool_name in editing_tools
file_path := get_edited_file_path
file_path != ""
extension := get_file_extension(file_path)
extension != ""
validation_result := run_validation_for_extension(extension, file_path)
# If validation succeeded, provide positive feedback
validation_result.success
# add_context expects strings, not decision objects
context_msg := concat(" ", ["✓ Validation passed for", file_path])
}
# Extract file path from tool response/params
get_edited_file_path := path if {
path := input.params.file_path
} else := path if {
path := input.params.path
} else := ""
# Get file extension from path
get_file_extension(path) := ext if {
parts := split(path, ".")
count(parts) > 1
ext := parts[count(parts) - 1]
} else := ""
# Run validation for a specific file extension
run_validation_for_extension(ext, file_path) := result if {
# Check if there's a configured validation signal for this extension
signal_name := concat("", ["__builtin_post_edit_", ext])
signal_name in object.keys(input.signals)
# Get the validation result from the signal
signal_result := input.signals[signal_name]
# Parse the result based on its type
result := parse_validation_result(signal_result, file_path)
} else := result if {
# No validation configured for this extension
result := {
"success": true,
"message": "No validation configured - FALLBACK",
}
}
# Parse validation result from signal
parse_validation_result(signal_result, file_path) := result if {
# Handle object results with exit_code (standard format from signal execution)
is_object(signal_result)
"exit_code" in object.keys(signal_result)
result := {
"success": signal_result.exit_code == 0,
"message": default_validation_message(signal_result, file_path),
}
} else := result if {
# Handle string results (assume success if we got output)
is_string(signal_result)
result := {
"success": true,
"message": signal_result,
}
}
# Generate appropriate validation message
default_validation_message(signal_result, file_path) := msg if {
signal_result.output != ""
msg := signal_result.output
} else := msg if {
signal_result.exit_code == 0
msg := "Validation passed"
} else := msg if {
msg := concat("", ["Validation failed with exit code ", format_int(signal_result.exit_code, 10)])
}

View File

@@ -0,0 +1,380 @@
# METADATA
# scope: package
# title: Protected Paths - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: HIGH
# id: BUILTIN-PROTECTED-PATHS
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Edit", "Write", "MultiEdit", "NotebookEdit", "Bash"]
package cupcake.policies.builtins.protected_paths
import data.cupcake.helpers.commands
import data.cupcake.helpers.paths
import rego.v1
# Block WRITE operations on protected paths (but allow reads)
# For regular tools (Edit, Write, NotebookEdit)
halt contains decision if {
input.hook_event_name == "PreToolUse"
# Check for SINGLE-file writing tools only
single_file_tools := {"Edit", "Write", "NotebookEdit"}
input.tool_name in single_file_tools
# Get the file path from tool input
# TOB-4 fix: Use canonical path (always provided by Rust preprocessing)
file_path := input.resolved_file_path
file_path != null
# Check if path matches any protected path
is_protected_path(file_path)
# Get configured message from signals
message := get_configured_message
decision := {
"rule_id": "BUILTIN-PROTECTED-PATHS",
"reason": concat("", [message, " (", file_path, ")"]),
"severity": "HIGH",
}
}
# Block WRITE operations on protected paths - MultiEdit special handling
# MultiEdit has an array of edits, each with their own resolved_file_path
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "MultiEdit"
# Check each edit in the edits array
some edit in input.tool_input.edits
file_path := edit.resolved_file_path
file_path != null
# Check if THIS edit's path matches any protected path
is_protected_path(file_path)
# Get configured message from signals
message := get_configured_message
decision := {
"rule_id": "BUILTIN-PROTECTED-PATHS",
"reason": concat("", [message, " (", file_path, ")"]),
"severity": "HIGH",
}
}
# Block ALL Bash commands that reference protected paths UNLESS whitelisted
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
# Get the command
command := input.tool_input.command
lower_cmd := lower(command)
# Check if any protected path is mentioned in the command
some protected_path in get_protected_paths
contains_protected_reference(lower_cmd, protected_path)
# ONLY allow if it's a whitelisted read operation
not is_whitelisted_read_command(lower_cmd)
message := get_configured_message
decision := {
"rule_id": "BUILTIN-PROTECTED-PATHS",
"reason": concat("", [message, " (only read operations allowed)"]),
"severity": "HIGH",
}
}
# Block destructive commands that would affect a parent directory containing protected paths
# This catches cases like `rm -rf /home/user/*` when `/home/user/.cupcake/` is protected
# The `affected_parent_directories` field is populated by Rust preprocessing for destructive commands
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
# Get affected parent directories from preprocessing
# This is populated for commands like rm -rf, chmod -R, etc.
affected_dirs := input.affected_parent_directories
count(affected_dirs) > 0
# Check if any protected path is a CHILD of an affected directory
some affected_dir in affected_dirs
some protected_path in get_protected_paths
protected_is_child_of_affected(protected_path, affected_dir)
message := get_configured_message
decision := {
"rule_id": "BUILTIN-PROTECTED-PATHS-PARENT",
"reason": concat("", [message, " (", protected_path, " would be affected by operation on ", affected_dir, ")"]),
"severity": "HIGH",
}
}
# Block interpreter inline scripts (-c/-e flags) that mention protected paths
# This catches attacks like: python -c 'pathlib.Path("../my-favorite-file.txt").delete()'
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := input.tool_input.command
lower_cmd := lower(command)
# Detect inline script execution with interpreters
interpreters := ["python", "python3", "python2", "ruby", "perl", "node", "php"]
some interp in interpreters
regex.match(concat("", ["(^|\\s)", interp, "\\s+(-c|-e)\\s"]), lower_cmd)
# Check if any protected path is mentioned anywhere in the command
some protected_path in get_protected_paths
contains(lower_cmd, lower(protected_path))
message := get_configured_message
decision := {
"rule_id": "BUILTIN-PROTECTED-PATHS-SCRIPT",
"reason": concat("", [message, " (inline script mentions '", protected_path, "')"]),
"severity": "HIGH",
}
}
# Extract file path from tool input
get_file_path_from_tool_input := path if {
path := input.tool_input.file_path
} else := path if {
path := input.tool_input.path
} else := path if {
path := input.tool_input.notebook_path
} else := path if {
# For MultiEdit, check if any edit targets a protected path
# Return the first protected path found
some edit in input.tool_input.edits
path := edit.file_path
} else := ""
# Check if a path is protected
is_protected_path(path) if {
protected_paths := get_protected_paths
some protected_path in protected_paths
path_matches(path, protected_path)
}
# Path matching logic (supports exact, directory prefix, filename, and glob patterns)
path_matches(path, pattern) if {
# Exact match (case-insensitive)
lower(path) == lower(pattern)
}
path_matches(path, pattern) if {
# Filename match - pattern is just a filename (no path separators)
# Matches if the canonical path ends with the filename
not contains(pattern, "/")
not contains(pattern, "\\")
endswith(lower(path), concat("/", [lower(pattern)]))
}
path_matches(path, pattern) if {
# Filename match for Windows paths
not contains(pattern, "/")
not contains(pattern, "\\")
endswith(lower(path), concat("\\", [lower(pattern)]))
}
path_matches(path, pattern) if {
# Directory prefix match - absolute pattern (starts with /)
# Pattern: "/absolute/path/" matches "/absolute/path/file.txt"
endswith(pattern, "/")
startswith(pattern, "/")
startswith(lower(path), lower(pattern))
}
path_matches(path, pattern) if {
# Directory prefix match - relative pattern
# Pattern: "src/legacy/" should match "/tmp/project/src/legacy/file.rs"
# This handles canonical absolute paths against relative pattern configs
endswith(pattern, "/")
not startswith(pattern, "/")
# Check if the pattern appears in the path as a directory component
# We need to match "/src/legacy/" not just any "src/legacy/" substring
contains(lower(path), concat("/", [lower(pattern)]))
}
path_matches(path, pattern) if {
# Directory match without trailing slash - absolute pattern
# If pattern is "/absolute/path/src/legacy", match "/absolute/path/src/legacy/file.js"
not endswith(pattern, "/")
startswith(pattern, "/")
prefix := concat("", [lower(pattern), "/"])
startswith(lower(path), prefix)
}
path_matches(path, pattern) if {
# Directory match without trailing slash - relative pattern
# If pattern is "src/legacy", match "/tmp/project/src/legacy/file.js"
not endswith(pattern, "/")
not startswith(pattern, "/")
prefix := concat("/", [lower(pattern), "/"])
contains(lower(path), prefix)
}
path_matches(path, pattern) if {
# Glob pattern matching (simplified - just * wildcard for now)
contains(pattern, "*")
glob_match(lower(path), lower(pattern))
}
# Simple glob matching (supports * wildcard)
glob_match(path, pattern) if {
# Convert glob pattern to regex: * becomes .*
regex_pattern := replace(replace(pattern, ".", "\\."), "*", ".*")
regex_pattern_anchored := concat("", ["^", regex_pattern, "$"])
regex.match(regex_pattern_anchored, path)
}
# WHITELIST approach: Only these read operations are allowed on protected paths
is_whitelisted_read_command(cmd) if {
# Exclude dangerous sed variants FIRST
startswith(cmd, "sed -i") # In-place edit
false # Explicitly reject
}
is_whitelisted_read_command(cmd) if {
# Check if command starts with a safe read-only command
safe_read_verbs := {
"cat", # Read file contents
"less", # Page through file
"more", # Page through file
"head", # Read first lines
"tail", # Read last lines
"grep", # Search in file
"egrep", # Extended grep
"fgrep", # Fixed string grep
"zgrep", # Grep compressed files
"wc", # Word/line count
"file", # Determine file type
"stat", # File statistics
"ls", # List files
"find", # Find files (read-only by default)
"awk", # Text processing (without output redirect)
"sed", # Stream editor (safe without -i flag)
"sort", # Sort lines
"uniq", # Filter unique lines
"diff", # Compare files
"cmp", # Compare files byte by byte
"md5sum", # Calculate checksum
"sha256sum", # Calculate checksum
"hexdump", # Display in hex
"strings", # Extract strings from binary
"od", # Octal dump
}
some verb in safe_read_verbs
commands.has_verb(cmd, verb)
# CRITICAL: Exclude sed -i specifically
# This check is NOT redundant with lines 188-192. OPA evaluates ALL rule bodies
# for is_whitelisted_read_command(). Body 1 (lines 188-192) explicitly rejects "sed -i",
# but OPA continues to evaluate Body 2 (this body). Without this check, "sed -i"
# would match the "sed" verb above and incorrectly be whitelisted.
# Whitespace variations (sed -i, sed\t-i) are normalized by preprocessing.
not startswith(cmd, "sed -i")
# Ensure no output redirection
not commands.has_output_redirect(cmd)
}
is_whitelisted_read_command(cmd) if {
# Also allow piped commands that start with safe reads
# e.g., "cat file.txt | grep pattern"
contains(cmd, "|")
parts := split(cmd, "|")
first_part := trim_space(parts[0])
# Check if first part starts with a safe command (avoid recursion)
safe_read_verbs := {
"cat", # Read file contents
"less", # Page through file
"more", # Page through file
"head", # Read first lines
"tail", # Read last lines
"grep", # Search in file
"wc", # Word/line count
"file", # Determine file type
"stat", # File statistics
"ls", # List files
}
some verb in safe_read_verbs
commands.has_verb(first_part, verb)
}
# Check if command references a protected path
contains_protected_reference(cmd, protected_path) if {
# Direct reference
contains(cmd, lower(protected_path))
}
contains_protected_reference(cmd, protected_path) if {
# Without trailing slash if it's a directory pattern
endswith(protected_path, "/")
path_without_slash := substring(lower(protected_path), 0, count(protected_path) - 1)
contains(cmd, path_without_slash)
}
# Get configured message from builtin config
get_configured_message := msg if {
# Direct access to builtin config (no signal execution needed)
msg := input.builtin_config.protected_paths.message
} else := msg if {
# Fallback to default if config not present
msg := "This path is read-only and cannot be modified"
}
# Get list of protected paths from builtin config
get_protected_paths := paths if {
# Direct access to builtin config (no signal execution needed)
paths := input.builtin_config.protected_paths.paths
} else := paths if {
# No paths configured - policy inactive
paths := []
}
# Check if a protected path is a child of an affected directory
# This is the "reverse" check for parent directory protection:
# protected_path: /home/user/.cupcake/config.yml
# affected_dir: /home/user/
# Returns true because the protected path is inside the affected directory
protected_is_child_of_affected(protected_path, affected_dir) if {
# Normalize: ensure affected_dir ends with /
affected_normalized := ensure_trailing_slash(affected_dir)
# Check if protected path starts with the affected directory
startswith(lower(protected_path), lower(affected_normalized))
}
protected_is_child_of_affected(protected_path, affected_dir) if {
# Also check exact match (rm -rf /home/user/.cupcake)
lower(protected_path) == lower(affected_dir)
}
protected_is_child_of_affected(protected_path, affected_dir) if {
# Handle case where affected_dir is specified without trailing slash
# but protected_path has it as a prefix
not endswith(affected_dir, "/")
prefix := concat("", [lower(affected_dir), "/"])
startswith(lower(protected_path), prefix)
}
# Helper to ensure path ends with /
ensure_trailing_slash(path) := result if {
endswith(path, "/")
result := path
} else := result if {
result := concat("", [path, "/"])
}

View File

@@ -0,0 +1,231 @@
# METADATA
# scope: package
# title: Rulebook Security Guardrails - Builtin Policy
# authors: ["Cupcake Builtins"]
# custom:
# severity: HIGH
# id: BUILTIN-RULEBOOK-SECURITY
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Edit", "Write", "MultiEdit", "NotebookEdit", "Read", "Grep", "Glob", "Bash", "Task", "WebFetch"]
package cupcake.policies.builtins.rulebook_security_guardrails
import rego.v1
import data.cupcake.helpers.commands
# Block ANY tool operations targeting protected paths
halt contains decision if {
input.hook_event_name == "PreToolUse"
# Check for ANY file operation tools (read, write, search, etc.)
file_operation_tools := {
"Edit", "Write", "MultiEdit", "NotebookEdit", # Writing tools
"Read", # Reading tools
"Grep", "Glob", # Search/listing tools
"WebFetch", # Could use file:// URLs
"Task", # Could spawn agent to bypass
}
input.tool_name in file_operation_tools
# Check if any parameter contains a protected path (case-insensitive)
# TOB-4 fix: Prefer canonical path (input.resolved_file_path) when available,
# but fall back to raw tool_input fields for pattern-based tools (Glob/Grep)
# that don't have file paths that can be canonicalized
file_path := get_file_path_with_preprocessing_fallback
file_path != ""
is_protected_path(file_path)
# Get configured message from signals (fallback to default)
message := get_configured_message
decision := {
"rule_id": "BUILTIN-RULEBOOK-SECURITY",
"reason": concat("", [message, " (blocked file operation on ", file_path, ")"]),
"severity": "HIGH",
}
}
# Block Bash commands that reference any protected path
# Total lockdown - NO whitelist (unlike protected_paths builtin)
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
# Check if command references any protected path
# Bash tool uses tool_input.command, not params.command
command := lower(input.tool_input.command)
# Iterate over all protected paths
some protected_path in get_protected_paths
contains_protected_reference(command, protected_path)
message := get_configured_message
decision := {
"rule_id": "BUILTIN-RULEBOOK-SECURITY",
"reason": concat("", [message, " (detected protected path reference in bash command)"]),
"severity": "HIGH",
}
}
# Block symlink creation involving any protected path (TOB-EQTY-LAB-CUPCAKE-4)
halt contains decision if {
input.hook_event_name == "PreToolUse"
input.tool_name == "Bash"
command := lower(input.tool_input.command)
# Check if command creates symlink involving ANY protected path (source OR target)
some protected_path in get_protected_paths
commands.symlink_involves_path(command, protected_path)
message := get_configured_message
decision := {
"rule_id": "BUILTIN-RULEBOOK-SECURITY",
"reason": concat("", [message, " (symlink creation involving protected path is not permitted)"]),
"severity": "HIGH",
}
}
# Check if a file path matches any protected path
is_protected_path(path) if {
protected_paths := get_protected_paths
some protected_path in protected_paths
path_matches(path, protected_path)
}
# Path matching logic (supports substring and directory matching)
path_matches(path, pattern) if {
# Exact match (case-insensitive)
lower(path) == lower(pattern)
}
path_matches(path, pattern) if {
# Substring match - handles both file and directory references
# "/full/path/.cupcake/file" matches ".cupcake"
# "/full/path/secrets/api.key" matches "secrets/"
lower_path := lower(path)
lower_pattern := lower(pattern)
contains(lower_path, lower_pattern)
}
path_matches(path, pattern) if {
# Directory match without trailing slash
# Pattern "secrets" should match "/full/path/secrets/file"
not endswith(pattern, "/")
lower_path := lower(path)
lower_pattern := lower(pattern)
# Add slash to ensure directory boundary
pattern_with_slash := concat("", [lower_pattern, "/"])
contains(lower_path, pattern_with_slash)
}
path_matches(path, pattern) if {
# Canonical directory paths don't have trailing slashes
# Pattern ".cupcake/" should match canonical path "/tmp/xyz/.cupcake"
# This handles the case where preprocessing canonicalizes directory paths
endswith(pattern, "/")
pattern_without_slash := substring(pattern, 0, count(pattern) - 1)
lower_path := lower(path)
lower_pattern := lower(pattern_without_slash)
# Ensure directory boundary by checking for /{pattern} suffix
path_suffix := concat("", ["/", lower_pattern])
endswith(lower_path, path_suffix)
}
path_matches(path, pattern) if {
# Protected path with trailing slash should also match without the slash
# This handles Glob patterns like ".cupcake*" matching protected path ".cupcake/"
# Also handles paths/patterns that reference the directory without trailing slash
endswith(pattern, "/")
pattern_without_slash := substring(pattern, 0, count(pattern) - 1)
contains(lower(path), lower(pattern_without_slash))
}
# Check if command references a protected path
contains_protected_reference(cmd, protected_path) if {
# Direct reference (case-insensitive)
contains(cmd, lower(protected_path))
}
contains_protected_reference(cmd, protected_path) if {
# Without trailing slash if it's a directory pattern
# "secrets/" pattern should also match "secrets" in command
endswith(protected_path, "/")
path_without_slash := substring(lower(protected_path), 0, count(protected_path) - 1)
contains(cmd, path_without_slash)
}
# Get configured message from builtin config
get_configured_message := msg if {
# Direct access to builtin config (no signal execution needed)
msg := input.builtin_config.rulebook_security_guardrails.message
} else := msg if {
# Fallback to default if config not present
msg := "Cupcake configuration files are protected from modification"
}
# Extract file path from tool input based on tool type
get_file_path_from_tool_input := path if {
# Standard file_path parameter (Edit, Write, MultiEdit, NotebookEdit, Read)
path := input.tool_input.file_path
} else := path if {
# Path parameter (Grep, Glob)
path := input.tool_input.path
} else := path if {
# Pattern parameter might contain path (Glob)
path := input.tool_input.pattern
} else := path if {
# URL parameter for WebFetch (could be file:// URL)
path := input.tool_input.url
} else := path if {
# Task prompt might contain .cupcake references
path := input.tool_input.prompt
} else := path if {
# Notebook path for NotebookEdit
path := input.tool_input.notebook_path
} else := path if {
# Some tools use params instead of tool_input
path := input.params.file_path
} else := path if {
path := input.params.path
} else := path if {
path := input.params.pattern
} else := ""
# TOB-4 aware path extraction: Prefer canonical path from preprocessing,
# fall back to raw tool_input only for Glob (patterns can't be canonicalized)
#
# FIXED: GitHub Copilot review - Grep symlink bypass (TOB-4 defense)
# - Grep's 'path' field now uses canonical paths (closes symlink bypass)
# - Glob's 'pattern' field still uses raw patterns (can't be canonicalized)
#
# TODO: Known Glob limitations (complex pattern parsing required):
# - Glob(pattern="backup/**/*.rego") where "backup" is symlink to .cupcake
# - Glob(pattern="**/*.rego") searches symlinks without .cupcake in pattern
# - Requires pattern parsing before file expansion to fully address
get_file_path_with_preprocessing_fallback := path if {
# For Glob only, use raw pattern since it can't be canonicalized (e.g., "**/*.rs")
# Grep's 'path' field CAN be canonicalized, so it goes through TOB-4 defense
input.tool_name == "Glob"
path := get_file_path_from_tool_input
} else := input.resolved_file_path if {
# For other tools (including Grep), use canonical path from Rust preprocessing (TOB-4 defense)
input.resolved_file_path != ""
} else := path if {
# Final fallback
path := get_file_path_from_tool_input
}
# Helper: Get list of protected paths from builtin config
get_protected_paths := paths if {
# Direct access to builtin config (no signal execution needed)
paths := input.builtin_config.rulebook_security_guardrails.protected_paths
} else := paths if {
# Default protected paths
paths := [".cupcake/"]
}

View File

@@ -0,0 +1,33 @@
# METADATA
# scope: package
# title: Example Policy
# description: A minimal example policy that never fires
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["Bash"]
package cupcake.policies.example
import rego.v1
# This rule will never fire - it's just here to prevent OPA compilation issues
# It checks for a command that nobody would ever type
deny contains decision if {
input.tool_input.command == "CUPCAKE_EXAMPLE_RULE_THAT_NEVER_FIRES_12345"
decision := {
"reason": "This will never happen",
"severity": "LOW",
"rule_id": "EXAMPLE-001"
}
}
# Replace the above with your actual policies
# Example of a real policy:
# deny contains decision if {
# contains(input.tool_input.command, "rm -rf /")
# decision := {
# "reason": "Dangerous command blocked",
# "severity": "HIGH",
# "rule_id": "SAFETY-001"
# }
# }

View File

@@ -5,18 +5,13 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.prevent_any_type
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -128,7 +123,7 @@ any_type_patterns := [
# Block Write/Edit operations that introduce Any in Python files
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
# Only enforce for Python files
file_path := lower(resolved_file_path)
@@ -149,7 +144,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
content := patch_content
content != null
@@ -166,7 +161,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := lower(edit_path(edit))

View File

@@ -5,18 +5,13 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.prevent_type_suppression
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -123,7 +118,7 @@ type_suppression_patterns := [
# Block Write/Edit operations that introduce type suppression in Python files
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
# Only enforce for Python files
file_path := lower(resolved_file_path)
@@ -144,7 +139,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
content := patch_content
content != null
@@ -161,7 +156,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := lower(edit_path(edit))

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.warn_baselines_edit
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -104,7 +99,7 @@ file_path_pattern := `tests/quality/baselines\.json$`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -118,7 +113,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)

View File

@@ -5,7 +5,7 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["bash"]
# required_tools: ["Bash"]
package cupcake.policies.opencode.warn_baselines_edit_bash
import rego.v1

View File

@@ -5,17 +5,12 @@
# custom:
# routing:
# required_events: ["PreToolUse"]
# required_tools: ["apply_patch", "edit", "multiedit", "notebookedit", "patch", "write"]
# required_tools: ["ApplyPatch", "Edit", "MultiEdit", "NotebookEdit", "Patch", "Write"]
package cupcake.policies.opencode.warn_large_file
import rego.v1
tool_name := input.tool_name if {
input.tool_name != null
} else := input.tool
tool_input := input.tool_input if {
input.tool_input != null
} else := input.args
tool_name := input.tool_name
tool_input := input.tool_input
resolved_file_path := input.resolved_file_path if {
input.resolved_file_path != null
@@ -126,7 +121,7 @@ pattern := `(?:.*\n){500,}`
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"write", "edit", "notebookedit"}
tool_name in {"Write", "Edit", "NotebookEdit"}
file_path := resolved_file_path
regex.match(file_path_pattern, file_path)
@@ -144,7 +139,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name == "multiedit"
tool_name == "MultiEdit"
some edit in tool_input.edits
file_path := edit_path(edit)
@@ -163,7 +158,7 @@ deny contains decision if {
deny contains decision if {
input.hook_event_name == "PreToolUse"
tool_name in {"patch", "apply_patch"}
tool_name in {"Patch", "ApplyPatch"}
patch := patch_content
patch != null

216
.cupcake/rulebook.yml Normal file
View File

@@ -0,0 +1,216 @@
# Cupcake Base Configuration Template
# This template demonstrates all available builtin abstractions and configuration options.
# Copy this file to .cupcake/rulebook.yml and uncomment/modify as needed.
# ============================================================================
# SIGNALS - External data providers
# ============================================================================
# Signals are commands that provide data to policies. They can return strings
# or JSON structures. Convention: place scripts in .cupcake/signals/ directory
# for auto-discovery, or define explicitly here.
signals:
# Example: Simple string signal
# current_branch:
# command: "git branch --show-current"
# timeout_seconds: 2
# Example: Structured JSON signal
# system_info:
# command: 'echo "{\"os\": \"$(uname)\", \"user\": \"$(whoami)\"}"'
# timeout_seconds: 5
# Note: Signals in .cupcake/signals/ directory are auto-discovered
# File signals/foo.sh becomes signal "foo" automatically
# ============================================================================
# ACTIONS - Response to policy violations
# ============================================================================
# Actions are commands executed when policies trigger. Convention: place scripts
# in .cupcake/actions/ directory named by rule_id for auto-discovery.
actions:
# Actions that run on ANY policy denial
# on_any_denial:
# - command: "echo 'Policy violation detected' >> audit.log"
# Rule-specific actions (by rule_id)
# by_rule_id:
# SECURITY-001:
# - command: "notify-team --severity high"
# LINT-001:
# - command: "echo 'Code style violation'"
# Note: Scripts in .cupcake/actions/ are auto-mapped by filename
# File actions/SECURITY-001.sh triggers for rule_id: SECURITY-001
# ============================================================================
# BUILTINS - Higher-level policy abstractions
# ============================================================================
# Builtins provide common security patterns without writing Rego policies.
# Each builtin can be enabled/disabled and configured independently.
#
# IMPORTANT: Builtins are ENABLED BY DEFAULT when configured.
# Simply configuring a builtin (even with just empty settings) enables it.
# To disable, either remove the configuration or set 'enabled: false'.
#
# FILE PROTECTION BUILTINS (Two-Tier System):
# 1. protected_paths: Makes specific paths read-only (read allowed, write blocked)
# 2. rulebook_security_guardrails: Total lockdown of paths (no read OR write)
# ============================================================================
# USAGE NOTES
# ============================================================================
# 1. Builtins are processed BEFORE custom policies, allowing you to set
# foundational rules that custom policies can build upon.
#
# 2. Signal commands are executed with 'sh -c' and should output valid JSON
# for structured data, or plain text for simple strings.
#
# 3. All paths are relative to the project root (parent of .cupcake/)
#
# 4. Builtin policies are located in .cupcake/policies/builtins/ and are
# only compiled when their corresponding builtin is enabled.
#
# 5. For debugging, use --log-level debug when running Cupcake to see detailed
# information about builtin activation and signal execution.
#
# 6. LIMITATION: Due to Claude Code hook limitations, context can only be
# injected on UserPromptSubmit and SessionStart events. PreToolUse events
# do not support context injection.
builtins:
# ---------------------------------------------------------------------------
# CLAUDE_CODE_ALWAYS_INJECT_ON_PROMPT - Add context to every user prompt (Claude Code only)
# ---------------------------------------------------------------------------
# Inject additional context with every user interaction. Useful for project
# guidelines, current state awareness, or team conventions.
# Note: Builtins are enabled by default when configured. Use 'enabled: false' to disable.
# Note: This builtin only works with Claude Code due to context injection support.
# claude_code_always_inject_on_prompt:
# # enabled: true # Optional - defaults to true when configured
# context:
# # Static text context
# - "Follow SOLID principles and write comprehensive tests"
# - "This is a production system - be careful with database changes"
#
# # Dynamic context from command
# - command: "git status --short"
# - command: "date '+Today is %A, %B %d'"
#
# # Context from file
# - file: ".cupcake/coding-standards.md"
# - file: "docs/current-sprint-goals.md"
# ---------------------------------------------------------------------------
# GIT_PRE_CHECK - Enforce checks before git operations
# ---------------------------------------------------------------------------
# Run validation before allowing git commits, pushes, or merges. Ensures
# code quality and prevents broken commits from entering the repository.
git_pre_check:
enabled: true
checks:
- command: "echo Validation passed"
message: "Basic validation check"
# Optional: only apply to certain operations
# operations: ["commit", "push"] # skip for merge
# ---------------------------------------------------------------------------
# POST_EDIT_CHECK - Validate files after modification
# ---------------------------------------------------------------------------
# Run language-specific validation after files are edited. Provides immediate
# feedback about syntax errors, type issues, or style violations.
# post_edit_check:
# # enabled: true # Optional - defaults to true when configured
# # Checks by file extension
# by_extension:
# "rs":
# command: "cargo check --message-format short"
# message: "Rust compilation check"
#
# "py":
# command: "python -m py_compile"
# message: "Python syntax validation"
#
# "tsx":
# command: "npx tsc --noEmit"
# message: "TypeScript type checking"
#
# "jsx":
# command: "npx eslint --quiet"
# message: "ESLint validation"
#
# "go":
# command: "go fmt && go vet"
# message: "Go format and vet check"
#
# # Checks by glob pattern (future enhancement)
# # by_pattern:
# # "src/**/*.test.ts":
# # command: "npm test -- --findRelatedTests"
# # message: "Running related tests"
# ---------------------------------------------------------------------------
# GIT_BLOCK_NO_VERIFY - Prevent bypassing git commit hooks
# ---------------------------------------------------------------------------
# Blocks git commands that use --no-verify flag to bypass pre-commit hooks.
# This ensures code quality checks, linting, and security scans always run.
git_block_no_verify:
enabled: true
message: "Git operations with --no-verify are not permitted"
# Optional: Add exceptions for specific environments
# exceptions:
# - "CI_ENVIRONMENT"
# ---------------------------------------------------------------------------
# CLAUDE_CODE_ENFORCE_FULL_FILE_READ - Require complete file reads (Claude Code only)
# ---------------------------------------------------------------------------
# Ensures Claude reads entire files (up to a configurable limit) before
# processing. Prevents partial reads that might miss important context.
# Files larger than max_lines can still use offset/limit parameters.
# Note: This builtin only works with Claude Code.
# claude_code_enforce_full_file_read:
# enabled: true
# max_lines: 2000 # Files under this size must be read completely
# message: "Please read the entire file first (files under 2000 lines must be read completely)"
# ---------------------------------------------------------------------------
# PROTECTED_PATHS - User-defined read-only paths
# ---------------------------------------------------------------------------
# Declare specific files or directories as read-only while still allowing
# Claude to read and analyze them. Supports glob patterns. Uses a WHITELIST
# approach for bash commands - only known-safe read operations are allowed.
protected_paths:
enabled: false
message: "System path modification blocked by policy"
paths:
- "/etc/"
- "/System/"
- "~/.ssh/"
# Note: Read operations (cat, grep, less, etc.) are allowed
# Write operations (edit, rm, mv, sed -i, etc.) are blocked
# ---------------------------------------------------------------------------
# RULEBOOK_SECURITY_GUARDRAILS - Cupcake configuration protection
# ---------------------------------------------------------------------------
# Protects the .cupcake directory and other critical paths from any
# modification or inspection. This is the highest security level - blocks
# BOTH read and write operations. Essential for protecting Cupcake's own
# configuration and sensitive system files.
rulebook_security_guardrails:
message: "Cupcake configuration files are protected from modification"
# Protected paths (defaults to [".cupcake/"] if not specified)
protected_paths:
- ".cupcake/"
- ".git/hooks/"
# - "secrets/" # Add your own sensitive directories

View File

@@ -1,5 +1,9 @@
package cupcake.system
import rego.v1
# METADATA
# scope: package
# scope: document
# title: System Aggregation Entrypoint for Hybrid Model
# authors: ["Cupcake Engine"]
# custom:
@@ -8,10 +12,11 @@
# routing:
# required_events: []
# required_tools: []
package cupcake.system
import rego.v1
# The single entrypoint for the Hybrid Model.
# This uses the `walk()` built-in to recursively traverse data.cupcake.policies,
# automatically discovering and aggregating all decision verbs from all loaded
# policies, regardless of their package name or nesting depth.
evaluate := decision_set if {
decision_set := {
"halts": collect_verbs("halt"),
@@ -23,12 +28,18 @@ evaluate := decision_set if {
}
}
# Helper function to collect all decisions for a specific verb type.
# Uses walk() to recursively find all instances of the verb across
# the entire policy hierarchy under data.cupcake.policies.
collect_verbs(verb_name) := result if {
# Collect all matching verb sets from the policy tree
verb_sets := [value |
walk(data.cupcake.policies, [path, value])
path[count(path) - 1] == verb_name
]
# Flatten all sets into a single array
# Since Rego v1 decision verbs are sets, we need to convert to arrays
all_decisions := [decision |
some verb_set in verb_sets
some decision in verb_set
@@ -37,4 +48,5 @@ collect_verbs(verb_name) := result if {
result := all_decisions
}
# Default to empty arrays if no decisions found
default collect_verbs(_) := []

11
.openagent_context.json Normal file
View File

@@ -0,0 +1,11 @@
{
"context_dir_name": "context",
"context_root": "/home/trav/context",
"mission_context": "/home/trav/context/b2331579-f252-4806-9dc4-1cf6c3ff4cd5",
"mission_id": "b2331579-f252-4806-9dc4-1cf6c3ff4cd5",
"working_dir": "/home/trav/repos/noteflow",
"workspace_id": "b293c268-0b8e-4ddc-ad53-5fb34e43237a",
"workspace_name": "noteflow",
"workspace_root": "/home/trav/repos/noteflow",
"workspace_type": "host"
}

380
.opencode/plugin/cupcake.js Normal file
View File

@@ -0,0 +1,380 @@
/**
* Cupcake OpenCode Plugin
*
* Install: Copy this file to .opencode/plugin/cupcake.js
*
* This plugin integrates Cupcake policy enforcement with OpenCode.
* It intercepts tool executions and evaluates them against your policies.
*/
// src/types.ts
var DEFAULT_CONFIG = {
enabled: true,
cupcakePath: "cupcake",
harness: "opencode",
logLevel: "warn",
// Default to warn - info/debug are noisy in TUI
timeoutMs: 5e3,
failMode: "closed",
cacheDecisions: false,
showToasts: true,
toastDurationMs: 5e3
};
function getToastVariant(decision) {
switch (decision) {
case "allow":
return "success";
case "ask":
return "warning";
case "deny":
case "block":
return "error";
default:
return "info";
}
}
// src/event-builder.ts
function normalizeTool(tool) {
return tool;
}
function buildPreToolUseEvent(sessionId, cwd, tool, args, agent, messageId, callId) {
const event = {
hook_event_name: "PreToolUse",
session_id: sessionId,
cwd,
tool: normalizeTool(tool),
args
};
if (agent) {
event.agent = agent;
}
if (messageId) {
event.message_id = messageId;
}
if (callId) {
event.call_id = callId;
}
return event;
}
function buildPermissionEvent(sessionId, cwd, permissionId, permissionType, title, metadata, pattern, messageId, callId) {
const event = {
hook_event_name: "PermissionRequest",
session_id: sessionId,
cwd,
permission_id: permissionId,
permission_type: permissionType,
title,
metadata
};
if (pattern) {
event.pattern = pattern;
}
if (messageId) {
event.message_id = messageId;
}
if (callId) {
event.call_id = callId;
}
return event;
}
// src/executor.ts
async function executeCupcake(config, event) {
const startTime = Date.now();
const eventJson = JSON.stringify(event);
if (config.logLevel === "debug") {
console.error(`[cupcake] DEBUG: Executing cupcake`);
console.error(`[cupcake] DEBUG: Event:`, eventJson);
}
const proc = Bun.spawn([config.cupcakePath, "eval", "--harness", config.harness], {
stdin: "pipe",
stdout: "pipe",
stderr: "ignore"
});
proc.stdin.write(eventJson);
proc.stdin.end();
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => {
proc.kill();
reject(
new Error(
`Policy evaluation timed out after ${config.timeoutMs}ms. Consider optimizing policies or increasing timeout.`
)
);
}, config.timeoutMs);
});
try {
const [stdout, exitCode] = await Promise.race([
Promise.all([new Response(proc.stdout).text(), proc.exited]),
timeoutPromise
]);
const elapsed = Date.now() - startTime;
if (config.logLevel === "debug") {
console.error(`[cupcake] DEBUG: Cupcake response (${elapsed}ms):`, stdout);
}
if (exitCode !== 0) {
const error = new Error(`Cupcake exited with code ${exitCode}`);
if (config.failMode === "open") {
console.error(`[cupcake] ERROR: ${error.message}`);
console.error(`[cupcake] WARN: Allowing operation in fail-open mode.`);
return { decision: "allow" };
}
throw error;
}
const response = JSON.parse(stdout);
if (config.logLevel === "debug") {
console.error(`[cupcake] DEBUG: Decision: ${response.decision} (${elapsed}ms)`);
}
return response;
} catch (error) {
if (config.failMode === "open") {
console.error(`[cupcake] ERROR: ${error.message}`);
console.error(`[cupcake] WARN: Allowing operation in fail-open mode.`);
return { decision: "allow" };
}
throw error;
}
}
// src/enforcer.ts
function formatDecision(response) {
const { decision, reason, rule_id, severity } = response;
let title;
let message;
let blocked = false;
switch (decision) {
case "allow":
title = "Allowed";
message = reason || "Operation allowed by policy";
break;
case "deny":
case "block":
title = "Policy Violation";
message = reason || `Operation blocked by policy`;
blocked = true;
break;
case "ask":
title = "Approval Required";
message = reason || "This operation requires approval";
blocked = true;
break;
default:
title = "Unknown Decision";
message = `Policy returned unknown decision: ${decision}`;
blocked = true;
}
if (rule_id || severity) {
const details = [];
if (rule_id) details.push(`Rule: ${rule_id}`);
if (severity) details.push(`Severity: ${severity}`);
message += `
(${details.join(", ")})`;
}
return {
blocked,
title,
message,
variant: getToastVariant(decision),
decision,
ruleId: rule_id,
severity
};
}
function formatErrorMessage(formatted) {
let message = "";
if (formatted.decision === "deny" || formatted.decision === "block") {
message += "\u274C Policy Violation\n\n";
} else if (formatted.decision === "ask") {
message += "\u26A0\uFE0F Approval Required\n\n";
}
message += formatted.message;
if (formatted.decision === "ask") {
message += "\n\nNote: This operation requires manual approval. ";
message += "To proceed, review the policy and temporarily disable it if appropriate, ";
message += "then re-run the command.";
}
return message;
}
// src/index.ts
import { existsSync, readFileSync } from "fs";
import { join } from "path";
function loadConfig(directory) {
const configPath = join(directory, ".cupcake", "opencode.json");
if (existsSync(configPath)) {
try {
const configData = readFileSync(configPath, "utf-8");
const userConfig = JSON.parse(configData);
return { ...DEFAULT_CONFIG, ...userConfig };
} catch (error) {
console.error(`[cupcake] WARN: Failed to load config from ${configPath}: ${error.message}`);
console.error(`[cupcake] WARN: Using default configuration`);
}
}
return DEFAULT_CONFIG;
}
async function showToast(client, config, title, message, variant) {
if (!config.showToasts || !client) {
return;
}
try {
await client.tui.showToast({
body: {
title,
message,
variant,
duration: config.toastDurationMs
}
});
} catch (error) {
if (config.logLevel === "debug") {
console.error(`[cupcake] DEBUG: Failed to show toast: ${error.message}`);
}
}
}
function log(config, level, message, ...args) {
const levels = ["debug", "info", "warn", "error"];
const configLevel = levels.indexOf(config.logLevel);
const messageLevel = levels.indexOf(level);
if (messageLevel >= configLevel) {
const prefix = `[cupcake] ${level.toUpperCase()}:`;
if (args.length > 0) {
console.error(prefix, message, ...args);
} else {
console.error(prefix, message);
}
}
}
var CupcakePlugin = async ({ directory, client }) => {
const config = loadConfig(directory);
if (!config.enabled) {
log(config, "debug", "Plugin is disabled in configuration");
return {};
}
log(config, "debug", "Cupcake plugin initialized");
return {
/**
* Hook: tool.execute.before
*
* Fired before any tool execution. This is where we enforce policies.
* Throwing an error blocks the tool execution.
*/
"tool.execute.before": async (input, output) => {
try {
log(config, "debug", `tool.execute.before fired for ${input.tool}`);
log(config, "debug", "Args:", output.args);
const event = buildPreToolUseEvent(
input.sessionID || "unknown",
directory,
input.tool,
output.args,
void 0,
// agent - not provided in current hook
void 0,
// messageId - not provided in current hook
input.callID
);
const response = await executeCupcake(config, event);
const formatted = formatDecision(response);
if (formatted.decision !== "allow") {
await showToast(client, config, formatted.title, formatted.message, formatted.variant);
}
if (formatted.blocked) {
throw new Error(formatErrorMessage(formatted));
}
log(config, "debug", "Allowing tool execution");
} catch (error) {
throw error;
}
},
/**
* Hook: permission.ask
*
* Fired when OpenCode needs to request permission for an operation.
* This integrates with OpenCode's native permission UI.
*
* - Set output.status = "allow" to auto-approve
* - Set output.status = "deny" to auto-deny
* - Leave as "ask" to show native permission dialog
*/
"permission.ask": async (input, output) => {
try {
log(config, "debug", `permission.ask fired for ${input.type}`);
log(config, "debug", "Permission:", input);
const event = buildPermissionEvent(
input.sessionID,
directory,
input.id,
input.type,
input.title,
input.metadata,
input.pattern,
input.messageID,
input.callID
);
const response = await executeCupcake(config, event);
switch (response.decision) {
case "allow":
output.status = "allow";
log(config, "debug", `Auto-allowing permission: ${input.type}`);
break;
case "deny":
case "block":
output.status = "deny";
log(config, "debug", `Auto-denying permission: ${input.type}`);
await showToast(
client,
config,
"Permission Denied",
response.reason || `Permission ${input.type} blocked by policy`,
"error"
);
break;
case "ask":
default:
output.status = "ask";
log(config, "debug", `Deferring permission to user: ${input.type}`);
if (response.reason) {
await showToast(client, config, "Approval Recommended", response.reason, "warning");
}
break;
}
} catch (error) {
log(config, "error", `Permission evaluation failed: ${error.message}`);
output.status = "ask";
}
},
/**
* Hook: tool.execute.after
*
* Fired after tool execution. Used for audit logging.
* Cannot prevent execution (already happened).
*/
"tool.execute.after": async (input, output) => {
log(config, "debug", `tool.execute.after fired for ${input.tool}`);
log(config, "debug", "Output:", output.output?.substring(0, 200));
},
/**
* Hook: event
*
* Fired for all OpenCode events. Used for comprehensive audit logging.
*/
event: async ({ event }) => {
if (config.logLevel !== "debug") {
return;
}
const auditEvents = [
"tool.executed",
"permission.replied",
"file.edited",
"session.created",
"session.aborted"
];
if (auditEvents.includes(event.type)) {
log(config, "debug", `Audit event: ${event.type}`, event.properties);
}
}
};
};
export { CupcakePlugin };

View File

@@ -1,5 +1,5 @@
{
"$schema": "https://biomejs.dev/schemas/2.3.10/schema.json",
"$schema": "https://biomejs.dev/schemas/2.3.11/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",

View File

@@ -0,0 +1,35 @@
use std::sync::Arc;
use tauri::State;
use crate::error::Result;
use crate::grpc::types::analytics::{AnalyticsOverview, ListSpeakerStatsResult};
use crate::state::AppState;
#[tauri::command(rename_all = "snake_case")]
pub async fn get_analytics_overview(
state: State<'_, Arc<AppState>>,
start_time: f64,
end_time: f64,
project_id: Option<String>,
project_ids: Option<Vec<String>>,
) -> Result<AnalyticsOverview> {
state
.grpc_client
.get_analytics_overview(start_time, end_time, project_id, project_ids)
.await
}
#[tauri::command(rename_all = "snake_case")]
pub async fn list_speaker_stats(
state: State<'_, Arc<AppState>>,
start_time: f64,
end_time: f64,
project_id: Option<String>,
project_ids: Option<Vec<String>>,
) -> Result<ListSpeakerStatsResult> {
state
.grpc_client
.list_speaker_stats(start_time, end_time, project_id, project_ids)
.await
}

View File

@@ -2,6 +2,7 @@
//!
//! Each module corresponds to a functional area and exposes #[tauri::command] functions.
mod analytics;
mod annotation;
mod apps;
mod asr;
@@ -26,6 +27,7 @@ mod shell;
mod summary;
mod streaming_config;
mod sync;
mod tasks;
mod triggers;
mod webhooks;
@@ -39,6 +41,7 @@ mod playback_tests;
#[cfg(test)]
mod recording_tests;
pub use analytics::*;
pub use annotation::*;
pub use apps::*;
pub use asr::*;
@@ -65,6 +68,7 @@ pub use shell::*;
pub use summary::*;
pub use streaming_config::*;
pub use sync::*;
pub use tasks::*;
pub use testing::*;
pub use triggers::*;
pub use webhooks::*;

View File

@@ -0,0 +1,39 @@
use std::sync::Arc;
use tauri::State;
use crate::error::Result;
use crate::grpc::types::tasks::{ListTasksResult, Task};
use crate::state::AppState;
#[tauri::command(rename_all = "snake_case")]
pub async fn list_tasks(
state: State<'_, Arc<AppState>>,
statuses: Option<Vec<String>>,
limit: Option<i32>,
offset: Option<i32>,
project_id: Option<String>,
project_ids: Option<Vec<String>>,
meeting_id: Option<String>,
) -> Result<ListTasksResult> {
state
.grpc_client
.list_tasks(statuses, limit, offset, project_id, project_ids, meeting_id)
.await
}
#[tauri::command(rename_all = "snake_case")]
pub async fn update_task(
state: State<'_, Arc<AppState>>,
task_id: String,
text: Option<String>,
status: Option<String>,
assignee_person_id: Option<String>,
due_date: Option<f64>,
priority: Option<i32>,
) -> Result<Task> {
state
.grpc_client
.update_task(task_id, text, status, assignee_person_id, due_date, priority)
.await
}

View File

@@ -0,0 +1,80 @@
use crate::error::Result;
use crate::grpc::noteflow as pb;
use crate::grpc::types::analytics::{
AnalyticsOverview, DailyMeetingStats, ListSpeakerStatsResult, SpeakerStat,
};
use super::core::GrpcClient;
impl GrpcClient {
pub async fn get_analytics_overview(
&self,
start_time: f64,
end_time: f64,
project_id: Option<String>,
project_ids: Option<Vec<String>>,
) -> Result<AnalyticsOverview> {
let mut client = self.get_client()?;
let response = client
.get_analytics_overview(pb::GetAnalyticsOverviewRequest {
start_time,
end_time,
project_id,
project_ids: project_ids.unwrap_or_default(),
})
.await?
.into_inner();
Ok(AnalyticsOverview {
daily: response.daily.into_iter().map(map_daily_stats).collect(),
total_meetings: response.total_meetings,
total_duration: response.total_duration,
total_words: response.total_words,
total_segments: response.total_segments,
speaker_count: response.speaker_count,
})
}
pub async fn list_speaker_stats(
&self,
start_time: f64,
end_time: f64,
project_id: Option<String>,
project_ids: Option<Vec<String>>,
) -> Result<ListSpeakerStatsResult> {
let mut client = self.get_client()?;
let response = client
.list_speaker_stats(pb::ListSpeakerStatsRequest {
start_time,
end_time,
project_id,
project_ids: project_ids.unwrap_or_default(),
})
.await?
.into_inner();
Ok(ListSpeakerStatsResult {
speakers: response.speakers.into_iter().map(map_speaker_stat).collect(),
})
}
}
fn map_daily_stats(proto: pb::DailyMeetingStatsProto) -> DailyMeetingStats {
DailyMeetingStats {
date: proto.date,
meetings: proto.meetings,
total_duration: proto.total_duration,
word_count: proto.word_count,
}
}
fn map_speaker_stat(proto: pb::SpeakerStatProto) -> SpeakerStat {
SpeakerStat {
speaker_id: proto.speaker_id,
display_name: proto.display_name,
total_time: proto.total_time,
segment_count: proto.segment_count,
meeting_count: proto.meeting_count,
avg_confidence: proto.avg_confidence,
}
}

View File

@@ -12,8 +12,11 @@
//! - `observability`: Logs and metrics operations (Sprint 9)
//! - `asr`: ASR configuration operations (Sprint 19)
//! - `streaming`: Streaming configuration operations (Sprint 20)
//! - `tasks`: Task management operations (Bugfinder Sprint)
//! - `analytics`: Analytics aggregate operations (Bugfinder Sprint)
//! - `converters`: Protobuf to domain type converters
mod analytics;
mod annotations;
mod asr;
mod streaming;
@@ -29,6 +32,7 @@ mod oidc;
mod preferences;
mod projects;
mod sync;
mod tasks;
mod webhooks;
// Re-export the main types

View File

@@ -0,0 +1,144 @@
use crate::error::Result;
use crate::grpc::noteflow as pb;
use crate::grpc::types::tasks::{ListTasksResult, Task, TaskStatus, TaskWithMeeting};
use super::core::GrpcClient;
impl GrpcClient {
pub async fn list_tasks(
&self,
statuses: Option<Vec<String>>,
limit: Option<i32>,
offset: Option<i32>,
project_id: Option<String>,
project_ids: Option<Vec<String>>,
meeting_id: Option<String>,
) -> Result<ListTasksResult> {
let mut client = self.get_client()?;
let status_enums: Vec<i32> = statuses
.unwrap_or_default()
.iter()
.map(|s| string_to_task_status_proto(s))
.collect();
let response = client
.list_tasks(pb::ListTasksRequest {
statuses: status_enums,
limit: limit.unwrap_or(100),
offset: offset.unwrap_or(0),
project_id,
project_ids: project_ids.unwrap_or_default(),
meeting_id,
})
.await?
.into_inner();
Ok(ListTasksResult {
tasks: response.tasks.into_iter().map(map_task_with_meeting).collect(),
total_count: response.total_count,
})
}
pub async fn update_task(
&self,
task_id: String,
text: Option<String>,
status: Option<String>,
assignee_person_id: Option<String>,
due_date: Option<f64>,
priority: Option<i32>,
) -> Result<Task> {
let mut client = self.get_client()?;
let response = client
.update_task(pb::UpdateTaskRequest {
task_id,
text: text.unwrap_or_default(),
status: status.map(|s| string_to_task_status_proto(&s)).unwrap_or(0),
assignee_person_id: assignee_person_id.unwrap_or_default(),
due_date: due_date.unwrap_or(0.0),
priority: priority.unwrap_or(0),
})
.await?
.into_inner();
response
.task
.map(map_task)
.ok_or_else(|| crate::error::Error::InvalidInput("Task not found".into()))
}
}
fn string_to_task_status_proto(s: &str) -> i32 {
match s {
"open" => pb::TaskStatusProto::TaskStatusOpen as i32,
"done" => pb::TaskStatusProto::TaskStatusDone as i32,
"dismissed" => pb::TaskStatusProto::TaskStatusDismissed as i32,
_ => pb::TaskStatusProto::TaskStatusUnspecified as i32,
}
}
fn task_status_proto_to_enum(status: i32) -> TaskStatus {
match status {
1 => TaskStatus::Open,
2 => TaskStatus::Done,
3 => TaskStatus::Dismissed,
_ => TaskStatus::Open,
}
}
fn map_task(proto: pb::TaskProto) -> Task {
Task {
id: proto.id,
meeting_id: if proto.meeting_id.is_empty() {
None
} else {
Some(proto.meeting_id)
},
action_item_id: if proto.action_item_id == 0 {
None
} else {
Some(proto.action_item_id)
},
text: proto.text,
status: task_status_proto_to_enum(proto.status),
assignee_person_id: if proto.assignee_person_id.is_empty() {
None
} else {
Some(proto.assignee_person_id)
},
due_date: if proto.due_date == 0.0 {
None
} else {
Some(proto.due_date)
},
priority: proto.priority,
completed_at: if proto.completed_at == 0.0 {
None
} else {
Some(proto.completed_at)
},
}
}
fn map_task_with_meeting(proto: pb::TaskWithMeetingProto) -> TaskWithMeeting {
TaskWithMeeting {
task: proto.task.map(map_task).unwrap_or_else(|| Task {
id: String::new(),
meeting_id: None,
action_item_id: None,
text: String::new(),
status: TaskStatus::Open,
assignee_person_id: None,
due_date: None,
priority: 0,
completed_at: None,
}),
meeting_title: proto.meeting_title,
meeting_created_at: proto.meeting_created_at,
project_id: if proto.project_id.is_empty() {
None
} else {
Some(proto.project_id)
},
}
}

View File

@@ -2378,6 +2378,152 @@ pub struct UpdateWorkspaceSettingsRequest {
#[prost(message, optional, tag = "2")]
pub settings: ::core::option::Option<WorkspaceSettingsProto>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TaskProto {
#[prost(string, tag = "1")]
pub id: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub meeting_id: ::prost::alloc::string::String,
#[prost(int32, tag = "3")]
pub action_item_id: i32,
#[prost(string, tag = "4")]
pub text: ::prost::alloc::string::String,
#[prost(enumeration = "TaskStatusProto", tag = "5")]
pub status: i32,
#[prost(string, tag = "6")]
pub assignee_person_id: ::prost::alloc::string::String,
#[prost(double, tag = "7")]
pub due_date: f64,
#[prost(int32, tag = "8")]
pub priority: i32,
#[prost(double, tag = "9")]
pub completed_at: f64,
#[prost(double, tag = "10")]
pub created_at: f64,
#[prost(double, tag = "11")]
pub updated_at: f64,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct TaskWithMeetingProto {
#[prost(message, optional, tag = "1")]
pub task: ::core::option::Option<TaskProto>,
#[prost(string, tag = "2")]
pub meeting_title: ::prost::alloc::string::String,
#[prost(double, tag = "3")]
pub meeting_created_at: f64,
#[prost(string, tag = "4")]
pub project_id: ::prost::alloc::string::String,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTasksRequest {
#[prost(enumeration = "TaskStatusProto", repeated, tag = "1")]
pub statuses: ::prost::alloc::vec::Vec<i32>,
#[prost(int32, tag = "2")]
pub limit: i32,
#[prost(int32, tag = "3")]
pub offset: i32,
#[prost(string, optional, tag = "4")]
pub project_id: ::core::option::Option<::prost::alloc::string::String>,
#[prost(string, repeated, tag = "5")]
pub project_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
#[prost(string, optional, tag = "6")]
pub meeting_id: ::core::option::Option<::prost::alloc::string::String>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListTasksResponse {
#[prost(message, repeated, tag = "1")]
pub tasks: ::prost::alloc::vec::Vec<TaskWithMeetingProto>,
#[prost(int32, tag = "2")]
pub total_count: i32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTaskRequest {
#[prost(string, tag = "1")]
pub task_id: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub text: ::prost::alloc::string::String,
#[prost(enumeration = "TaskStatusProto", tag = "3")]
pub status: i32,
#[prost(string, tag = "4")]
pub assignee_person_id: ::prost::alloc::string::String,
#[prost(double, tag = "5")]
pub due_date: f64,
#[prost(int32, tag = "6")]
pub priority: i32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UpdateTaskResponse {
#[prost(message, optional, tag = "1")]
pub task: ::core::option::Option<TaskProto>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetAnalyticsOverviewRequest {
#[prost(double, tag = "1")]
pub start_time: f64,
#[prost(double, tag = "2")]
pub end_time: f64,
#[prost(string, optional, tag = "3")]
pub project_id: ::core::option::Option<::prost::alloc::string::String>,
#[prost(string, repeated, tag = "4")]
pub project_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DailyMeetingStatsProto {
#[prost(string, tag = "1")]
pub date: ::prost::alloc::string::String,
#[prost(int32, tag = "2")]
pub meetings: i32,
#[prost(double, tag = "3")]
pub total_duration: f64,
#[prost(int32, tag = "4")]
pub word_count: i32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GetAnalyticsOverviewResponse {
#[prost(message, repeated, tag = "1")]
pub daily: ::prost::alloc::vec::Vec<DailyMeetingStatsProto>,
#[prost(int32, tag = "2")]
pub total_meetings: i32,
#[prost(double, tag = "3")]
pub total_duration: f64,
#[prost(int32, tag = "4")]
pub total_words: i32,
#[prost(int32, tag = "5")]
pub total_segments: i32,
#[prost(int32, tag = "6")]
pub speaker_count: i32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SpeakerStatProto {
#[prost(string, tag = "1")]
pub speaker_id: ::prost::alloc::string::String,
#[prost(string, tag = "2")]
pub display_name: ::prost::alloc::string::String,
#[prost(double, tag = "3")]
pub total_time: f64,
#[prost(int32, tag = "4")]
pub segment_count: i32,
#[prost(int32, tag = "5")]
pub meeting_count: i32,
#[prost(double, tag = "6")]
pub avg_confidence: f64,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSpeakerStatsRequest {
#[prost(double, tag = "1")]
pub start_time: f64,
#[prost(double, tag = "2")]
pub end_time: f64,
#[prost(string, optional, tag = "3")]
pub project_id: ::core::option::Option<::prost::alloc::string::String>,
#[prost(string, repeated, tag = "4")]
pub project_ids: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListSpeakerStatsResponse {
#[prost(message, repeated, tag = "1")]
pub speakers: ::prost::alloc::vec::Vec<SpeakerStatProto>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum UpdateType {
@@ -2816,6 +2962,38 @@ impl ProjectRoleProto {
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum TaskStatusProto {
TaskStatusUnspecified = 0,
TaskStatusOpen = 1,
TaskStatusDone = 2,
TaskStatusDismissed = 3,
}
impl TaskStatusProto {
/// String value of the enum field names used in the ProtoBuf definition.
///
/// The values are not transformed in any way and thus are considered stable
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
Self::TaskStatusUnspecified => "TASK_STATUS_UNSPECIFIED",
Self::TaskStatusOpen => "TASK_STATUS_OPEN",
Self::TaskStatusDone => "TASK_STATUS_DONE",
Self::TaskStatusDismissed => "TASK_STATUS_DISMISSED",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
match value {
"TASK_STATUS_UNSPECIFIED" => Some(Self::TaskStatusUnspecified),
"TASK_STATUS_OPEN" => Some(Self::TaskStatusOpen),
"TASK_STATUS_DONE" => Some(Self::TaskStatusDone),
"TASK_STATUS_DISMISSED" => Some(Self::TaskStatusDismissed),
_ => None,
}
}
}
/// Generated client implementations.
pub mod note_flow_service_client {
#![allow(
@@ -4912,6 +5090,106 @@ pub mod note_flow_service_client {
.insert(GrpcMethod::new("noteflow.NoteFlowService", "GetActiveProject"));
self.inner.unary(req, path, codec).await
}
/// Task management (Bugfinder Sprint)
pub async fn list_tasks(
&mut self,
request: impl tonic::IntoRequest<super::ListTasksRequest>,
) -> std::result::Result<
tonic::Response<super::ListTasksResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/noteflow.NoteFlowService/ListTasks",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("noteflow.NoteFlowService", "ListTasks"));
self.inner.unary(req, path, codec).await
}
pub async fn update_task(
&mut self,
request: impl tonic::IntoRequest<super::UpdateTaskRequest>,
) -> std::result::Result<
tonic::Response<super::UpdateTaskResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/noteflow.NoteFlowService/UpdateTask",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("noteflow.NoteFlowService", "UpdateTask"));
self.inner.unary(req, path, codec).await
}
/// Analytics (Bugfinder Sprint)
pub async fn get_analytics_overview(
&mut self,
request: impl tonic::IntoRequest<super::GetAnalyticsOverviewRequest>,
) -> std::result::Result<
tonic::Response<super::GetAnalyticsOverviewResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/noteflow.NoteFlowService/GetAnalyticsOverview",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(
GrpcMethod::new("noteflow.NoteFlowService", "GetAnalyticsOverview"),
);
self.inner.unary(req, path, codec).await
}
pub async fn list_speaker_stats(
&mut self,
request: impl tonic::IntoRequest<super::ListSpeakerStatsRequest>,
) -> std::result::Result<
tonic::Response<super::ListSpeakerStatsResponse>,
tonic::Status,
> {
self.inner
.ready()
.await
.map_err(|e| {
tonic::Status::unknown(
format!("Service was not ready: {}", e.into()),
)
})?;
let codec = tonic::codec::ProstCodec::default();
let path = http::uri::PathAndQuery::from_static(
"/noteflow.NoteFlowService/ListSpeakerStats",
);
let mut req = request.into_request();
req.extensions_mut()
.insert(GrpcMethod::new("noteflow.NoteFlowService", "ListSpeakerStats"));
self.inner.unary(req, path, codec).await
}
/// Project membership management (Sprint 18)
pub async fn add_project_member(
&mut self,

View File

@@ -0,0 +1,34 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DailyMeetingStats {
pub date: String,
pub meetings: i32,
pub total_duration: f64,
pub word_count: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnalyticsOverview {
pub daily: Vec<DailyMeetingStats>,
pub total_meetings: i32,
pub total_duration: f64,
pub total_words: i32,
pub total_segments: i32,
pub speaker_count: i32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SpeakerStat {
pub speaker_id: String,
pub display_name: String,
pub total_time: f64,
pub segment_count: i32,
pub meeting_count: i32,
pub avg_confidence: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ListSpeakerStatsResult {
pub speakers: Vec<SpeakerStat>,
}

View File

@@ -3,6 +3,7 @@
//! These types mirror the protobuf definitions and are used for
//! communication between Rust and the React frontend via Tauri.
pub mod analytics;
pub mod asr;
pub mod calendar;
pub mod core;
@@ -16,4 +17,5 @@ pub mod projects;
pub mod results;
pub mod streaming;
pub mod sync;
pub mod tasks;
pub mod webhooks;

View File

@@ -0,0 +1,43 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum TaskStatus {
#[default]
Open,
Done,
Dismissed,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Task {
pub id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub meeting_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub action_item_id: Option<i32>,
pub text: String,
pub status: TaskStatus,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub assignee_person_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub due_date: Option<f64>,
pub priority: i32,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub completed_at: Option<f64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskWithMeeting {
pub task: Task,
pub meeting_title: String,
pub meeting_created_at: f64,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub project_id: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ListTasksResult {
pub tasks: Vec<TaskWithMeeting>,
pub total_count: i32,
}

View File

@@ -196,6 +196,12 @@ macro_rules! app_invoke_handler {
commands::reset_test_recording_state,
commands::inject_test_audio,
commands::inject_test_tone,
// Tasks (2 commands) - Bugfinder Sprint
commands::list_tasks,
commands::update_task,
// Analytics (2 commands) - Bugfinder Sprint
commands::get_analytics_overview,
commands::list_speaker_stats,
]
};
}

View File

@@ -1,5 +1,6 @@
import type { NoteFlowAPI } from '../../interface';
import type { TauriInvoke, TauriListen } from './types';
import { createAnalyticsApi } from './sections/analytics';
import { createAnnotationApi } from './sections/annotations';
import { createAppsApi } from './sections/apps';
import { createAsrApi } from './sections/asr';
@@ -17,6 +18,7 @@ import { createPlaybackApi } from './sections/playback';
import { createPreferencesApi } from './sections/preferences';
import { createProjectApi } from './sections/projects';
import { createSummarizationApi } from './sections/summarization';
import { createTaskApi } from './sections/tasks';
import { createTriggerApi } from './sections/triggers';
import { createWebhookApi } from './sections/webhooks';
@@ -42,5 +44,7 @@ export function createTauriAPI(invoke: TauriInvoke, listen: TauriListen): NoteFl
...createIntegrationApi(invoke),
...createObservabilityApi(invoke),
...createOidcApi(invoke),
...createTaskApi(invoke),
...createAnalyticsApi(invoke),
};
}

View File

@@ -142,6 +142,12 @@ export const TauriCommands = {
CHECK_TEST_ENVIRONMENT: 'check_test_environment',
INJECT_TEST_AUDIO: 'inject_test_audio',
INJECT_TEST_TONE: 'inject_test_tone',
// Tasks (Bugfinder Sprint)
LIST_TASKS: 'list_tasks',
UPDATE_TASK: 'update_task',
// Analytics (Bugfinder Sprint)
GET_ANALYTICS_OVERVIEW: 'get_analytics_overview',
LIST_SPEAKER_STATS: 'list_speaker_stats',
} as const;
/**

View File

@@ -0,0 +1,32 @@
import type {
AnalyticsOverview,
AnalyticsOverviewRequest,
ListSpeakerStatsRequest,
ListSpeakerStatsResponse,
} from '../../../types';
import type { NoteFlowAPI } from '../../../interface';
import { TauriCommands } from '../constants';
import type { TauriInvoke } from '../types';
export function createAnalyticsApi(
invoke: TauriInvoke
): Pick<NoteFlowAPI, 'getAnalyticsOverview' | 'listSpeakerStats'> {
return {
async getAnalyticsOverview(request: AnalyticsOverviewRequest): Promise<AnalyticsOverview> {
return invoke<AnalyticsOverview>(TauriCommands.GET_ANALYTICS_OVERVIEW, {
start_time: request.start_time,
end_time: request.end_time,
project_id: request.project_id,
project_ids: request.project_ids,
});
},
async listSpeakerStats(request: ListSpeakerStatsRequest): Promise<ListSpeakerStatsResponse> {
return invoke<ListSpeakerStatsResponse>(TauriCommands.LIST_SPEAKER_STATS, {
start_time: request.start_time,
end_time: request.end_time,
project_id: request.project_id,
project_ids: request.project_ids,
});
},
};
}

View File

@@ -0,0 +1,32 @@
import type { ListTasksRequest, ListTasksResponse, Task, UpdateTaskRequest } from '../../../types';
import type { NoteFlowAPI } from '../../../interface';
import { TauriCommands } from '../constants';
import type { TauriInvoke } from '../types';
export function createTaskApi(
invoke: TauriInvoke
): Pick<NoteFlowAPI, 'listTasks' | 'updateTask'> {
return {
async listTasks(request: ListTasksRequest): Promise<ListTasksResponse> {
return invoke<ListTasksResponse>(TauriCommands.LIST_TASKS, {
statuses: request.statuses,
limit: request.limit,
offset: request.offset,
project_id: request.project_id,
project_ids: request.project_ids,
meeting_id: request.meeting_id,
});
},
async updateTask(request: UpdateTaskRequest): Promise<Task> {
const response = await invoke<{ task: Task }>(TauriCommands.UPDATE_TASK, {
task_id: request.task_id,
text: request.text,
status: request.status,
assignee_person_id: request.assignee_person_id,
due_date: request.due_date,
priority: request.priority,
});
return response.task;
},
};
}

View File

@@ -21,7 +21,7 @@ function sanitizeWebhookRequest<T extends {
if (request.url !== undefined && !url) {
throw new Error('Webhook URL is required.');
}
if (request.events !== undefined && request.events.length === 0) {
if (request.events?.length === 0) {
throw new Error('Webhook events are required.');
}
const name = request.name?.trim();

View File

@@ -14,6 +14,8 @@
import type {
AddAnnotationRequest,
AddProjectMemberRequest,
AnalyticsOverview,
AnalyticsOverviewRequest,
ArchiveSummarizationTemplateRequest,
Annotation,
ASRConfiguration,
@@ -63,12 +65,16 @@ import type {
GetUserIntegrationsResponse,
GetWebhookDeliveriesResponse,
InitiateCalendarAuthResponse,
ListSpeakerStatsRequest,
ListSpeakerStatsResponse,
ListSummarizationTemplateVersionsRequest,
ListSummarizationTemplateVersionsResponse,
ListSummarizationTemplatesRequest,
ListSummarizationTemplatesResponse,
ListOidcPresetsResponse,
ListOidcProvidersResponse,
ListTasksRequest,
ListTasksResponse,
ListWorkspacesResponse,
ListCalendarEventsResponse,
ListMeetingsRequest,
@@ -104,11 +110,13 @@ import type {
Summary,
SetOAuthClientConfigRequest,
SetOAuthClientConfigResponse,
Task,
TriggerStatus,
UpdateASRConfigurationRequest,
UpdateASRConfigurationResult,
UpdateStreamingConfigurationRequest,
UpdateSummarizationTemplateRequest,
UpdateTaskRequest,
UpdateWorkspaceSettingsRequest,
UpdateAnnotationRequest,
UpdateOidcProviderRequest,
@@ -119,9 +127,10 @@ import type {
ValidateHuggingFaceTokenResult,
} from './types';
import type { TestAudioConfig, TestAudioResult, TestEnvironmentInfo } from './types/testing';
import type { TranscriptionStream } from './core/streams';
// Re-export TranscriptionStream from core module
export type { TranscriptionStream } from './core/streams';
export type { TranscriptionStream };
/**
* Main NoteFlow API interface
@@ -937,6 +946,18 @@ export interface NoteFlowAPI {
* @see gRPC endpoint: ListOidcPresets (unary)
*/
listOidcPresets(): Promise<ListOidcPresetsResponse>;
// --- Tasks (Strategy B) ---
listTasks(request: ListTasksRequest): Promise<ListTasksResponse>;
updateTask(request: UpdateTaskRequest): Promise<Task>;
// --- Analytics (Strategy B) ---
getAnalyticsOverview(request: AnalyticsOverviewRequest): Promise<AnalyticsOverview>;
listSpeakerStats(request: ListSpeakerStatsRequest): Promise<ListSpeakerStatsResponse>;
}
// --- API Instance Management ---

View File

@@ -118,3 +118,14 @@ export type WorkspaceRole = 'owner' | 'admin' | 'member' | 'viewer';
* - PROJECT_ROLE_ADMIN = 3
*/
export type ProjectRole = 'viewer' | 'editor' | 'admin';
/**
* Task lifecycle status
*
* gRPC enum values:
* - TASK_STATUS_UNSPECIFIED = 0
* - TASK_STATUS_OPEN = 1
* - TASK_STATUS_DONE = 2
* - TASK_STATUS_DISMISSED = 3
*/
export type TaskStatus = 'open' | 'done' | 'dismissed';

View File

@@ -0,0 +1,49 @@
/**
* Analytics Aggregate Types
*
* Types for server-backed analytics aggregates.
* These replace client-side computations for better scalability.
*/
export interface DailyMeetingStats {
date: string;
meetings: number;
total_duration: number;
word_count: number;
}
export interface AnalyticsOverview {
daily: DailyMeetingStats[];
total_meetings: number;
total_duration: number;
total_words: number;
total_segments: number;
speaker_count: number;
}
export interface AnalyticsOverviewRequest {
start_time: number;
end_time: number;
project_id?: string;
project_ids?: string[];
}
export interface SpeakerStat {
speaker_id: string;
display_name: string;
total_time: number;
segment_count: number;
meeting_count: number;
avg_confidence: number;
}
export interface ListSpeakerStatsRequest {
start_time: number;
end_time: number;
project_id?: string;
project_ids?: string[];
}
export interface ListSpeakerStatsResponse {
speakers: SpeakerStat[];
}

View File

@@ -27,9 +27,9 @@ export type ASRDevice = 'unspecified' | 'cpu' | 'cuda';
export type ASRComputeType = 'unspecified' | 'int8' | 'float16' | 'float32';
/**
* Job status for background tasks
* Job status for ASR background tasks (extends base JobStatus with 'unspecified')
*/
export type JobStatus = 'unspecified' | 'queued' | 'running' | 'completed' | 'failed' | 'cancelled';
export type ASRJobStatus = 'unspecified' | 'queued' | 'running' | 'completed' | 'failed' | 'cancelled';
/**
* Current ASR configuration and capabilities
@@ -79,7 +79,7 @@ export interface UpdateASRConfigurationResult {
jobId: string;
/** Initial job status */
status: JobStatus;
status: ASRJobStatus;
/** Whether the request was accepted */
accepted: boolean;
@@ -101,7 +101,7 @@ export interface ASRConfigurationJobStatus {
jobId: string;
/** Current status */
status: JobStatus;
status: ASRJobStatus;
/** Progress percentage (0.0-100.0) */
progressPercent: number;

View File

@@ -1,9 +1,10 @@
/**
* NoteFlow Feature Types
*
* Types for NER, calendar, webhooks, sync, observability, identity, OIDC, ASR, and model downloads.
* Types for NER, calendar, webhooks, sync, observability, identity, OIDC, ASR, model downloads, tasks, and analytics.
*/
export * from './analytics';
export * from './asr';
export * from './calendar';
export * from './identity';
@@ -13,4 +14,5 @@ export * from './observability';
export * from './oidc';
export * from './streaming';
export * from './sync';
export * from './tasks';
export * from './webhooks';

View File

@@ -0,0 +1,52 @@
import type { TaskStatus } from '../enums';
export interface Task {
id: string;
meeting_id: string | null;
action_item_id: number | null;
text: string;
status: TaskStatus;
assignee_person_id: string | null;
due_date: number | null;
priority: number;
completed_at: number | null;
}
/** Task with associated meeting metadata */
export interface TaskWithMeeting {
task: Task;
meeting_title: string;
meeting_created_at: number;
project_id: string | null;
}
/** Request to list tasks with filtering */
export interface ListTasksRequest {
statuses?: TaskStatus[];
limit?: number;
offset?: number;
project_id?: string;
project_ids?: string[];
meeting_id?: string;
}
/** Response from listing tasks */
export interface ListTasksResponse {
tasks: TaskWithMeeting[];
total_count: number;
}
/** Request to update a task */
export interface UpdateTaskRequest {
task_id: string;
text?: string;
status?: TaskStatus;
assignee_person_id?: string | null;
due_date?: number | null;
priority?: number;
}
/** Response from updating a task */
export interface UpdateTaskResponse {
task: Task;
}

View File

@@ -115,23 +115,11 @@ export interface SyncHistoryEvent {
error?: string;
}
export interface GetOAuthClientConfigRequest {
workspace_id?: string;
provider: string;
integration_type?: Integration['type'];
}
export interface GetOAuthClientConfigResponse {
config?: OAuthConfig;
}
export interface SetOAuthClientConfigRequest {
workspace_id?: string;
provider: string;
integration_type?: Integration['type'];
config: OAuthConfig;
}
export interface SetOAuthClientConfigResponse {
success: boolean;
}
// OAuth client config types are in features/calendar.ts
// Re-exported here for backwards compatibility
export type {
GetOAuthClientConfigRequest,
GetOAuthClientConfigResponse,
SetOAuthClientConfigRequest,
SetOAuthClientConfigResponse,
} from '../features/calendar';

View File

@@ -138,7 +138,10 @@ export function LogsTab() {
return true;
}
const metadataText = log.metadata ? JSON.stringify(log.metadata).toLowerCase() : '';
const correlationText = [log.traceId, log.spanId].filter(Boolean).join(' ').toLowerCase();
// Only server logs have traceId/spanId - use type guard
const traceId = 'traceId' in log ? log.traceId : undefined;
const spanId = 'spanId' in log ? log.spanId : undefined;
const correlationText = [traceId, spanId].filter(Boolean).join(' ').toLowerCase();
return (
log.message.toLowerCase().includes(query) ||
log.details?.toLowerCase().includes(query) ||

View File

@@ -125,7 +125,7 @@ export function PerformanceTab() {
queryKey: ['performance-metrics'],
queryFn: async () => {
const api = getAPI();
return api.getPerformanceMetrics({ history_minutes: 60 });
return api.getPerformanceMetrics({ history_limit: 60 });
},
refetchInterval: METRICS_REFRESH_INTERVAL_MS,
});

Some files were not shown because too many files have changed in this diff Show More