feat: local mcp server (#38797)

* feat: local mcp server

* feat(local-mcp): implement migrations

* fix: remove unsupported mcp args

* feat(local-mcp): tests

* fix(local-mcp): packages to adhere to minimumReleaseAge

* fix(mcp): import path for createSupabaseApiPlatform

* fix(local-mcp): move tests out of pages/api dir

* refactor: self-hosted execute sql logic

* fix: deps
This commit is contained in:
Greg Richardson
2025-09-23 12:00:08 -06:00
committed by GitHub
parent 765aac4f2f
commit 9820707d71
18 changed files with 643 additions and 141 deletions

View File

@@ -44,10 +44,10 @@ export async function executeSql<T = any>(
>,
signal?: AbortSignal,
headersInit?: HeadersInit,
fetcherOverride?: (
sql: string,
fetcherOverride?: (options: {
query: string
headers?: HeadersInit
) => Promise<{ data: T } | { error: ResponseError }>
}) => Promise<{ data: T } | { error: ResponseError }>
): Promise<{ result: T }> {
if (!projectRef) throw new Error('projectRef is required')
@@ -64,7 +64,7 @@ export async function executeSql<T = any>(
let error
if (fetcherOverride) {
const result = await fetcherOverride(sql, headers)
const result = await fetcherOverride({ query: sql, headers })
if ('data' in result) {
data = result.data
} else {

View File

@@ -1,4 +1,5 @@
import { createSupabaseApiPlatform, createSupabaseMcpServer } from '@supabase/mcp-server-supabase'
import { createSupabaseMcpServer } from '@supabase/mcp-server-supabase'
import { createSupabaseApiPlatform } from '@supabase/mcp-server-supabase/platform/api'
import { StreamTransport } from '@supabase/mcp-utils'
import { experimental_createMCPClient as createMCPClient } from 'ai'

View File

@@ -8,7 +8,7 @@ import { getDatabaseFunctions } from 'data/database-functions/database-functions
import { getDatabasePolicies } from 'data/database-policies/database-policies-query'
import { getEntityDefinitionsSql } from 'data/database/entity-definitions-query'
import { executeSql } from 'data/sql/execute-sql-query'
import { queryPgMetaSelfHosted } from 'lib/self-hosted'
import { executeQuery } from 'lib/api/self-hosted/query'
export const getFallbackTools = ({
projectRef,
@@ -46,7 +46,7 @@ export const getFallbackTools = ({
},
undefined,
headers,
IS_PLATFORM ? undefined : queryPgMetaSelfHosted
IS_PLATFORM ? undefined : executeQuery
)
: { result: [] }

View File

@@ -1,5 +1,12 @@
import { describe, it, expect, vi, beforeEach } from 'vitest'
import { constructHeaders, toSnakeCase } from './apiHelpers'
import type { IncomingHttpHeaders } from 'node:http'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import {
commaSeparatedStringIntoArray,
constructHeaders,
fromNodeHeaders,
toSnakeCase,
zBooleanString,
} from './apiHelpers'
vi.mock('lib/constants', () => ({
IS_PLATFORM: false,
@@ -129,4 +136,124 @@ describe('apiHelpers', () => {
expect(toSnakeCase(true)).toBe(true)
})
})
describe('zBooleanString', () => {
it('should transform "true" string to boolean true', () => {
const schema = zBooleanString()
const result = schema.parse('true')
expect(result).toBe(true)
})
it('should transform "false" string to boolean false', () => {
const schema = zBooleanString()
const result = schema.parse('false')
expect(result).toBe(false)
})
it('should throw error for invalid boolean string', () => {
const schema = zBooleanString()
expect(() => schema.parse('invalid')).toThrow('must be a boolean string')
})
it('should throw custom error message when provided', () => {
const customError = 'Custom boolean error'
const schema = zBooleanString(customError)
expect(() => schema.parse('invalid')).toThrow(customError)
})
it('should throw error for empty string', () => {
const schema = zBooleanString()
expect(() => schema.parse('')).toThrow('must be a boolean string')
})
it('should throw error for non-string input', () => {
const schema = zBooleanString()
expect(() => schema.parse(true)).toThrow()
expect(() => schema.parse(false)).toThrow()
expect(() => schema.parse(123)).toThrow()
})
})
describe('commaSeparatedStringIntoArray', () => {
it('should split comma-separated string into array', () => {
const result = commaSeparatedStringIntoArray('a,b,c')
expect(result).toEqual(['a', 'b', 'c'])
})
it('should trim whitespace from values', () => {
const result = commaSeparatedStringIntoArray('a, b , c')
expect(result).toEqual(['a', 'b', 'c'])
})
it('should filter out empty values', () => {
const result = commaSeparatedStringIntoArray('a,,b,')
expect(result).toEqual(['a', 'b'])
})
it('should handle single value', () => {
const result = commaSeparatedStringIntoArray('single')
expect(result).toEqual(['single'])
})
it('should handle empty string', () => {
const result = commaSeparatedStringIntoArray('')
expect(result).toEqual([])
})
it('should handle string with only commas', () => {
const result = commaSeparatedStringIntoArray(',,,')
expect(result).toEqual([])
})
})
describe('fromNodeHeaders', () => {
it('should convert simple node headers to fetch headers', () => {
const nodeHeaders: IncomingHttpHeaders = {
'content-type': 'application/json',
authorization: 'Bearer token',
}
const result = fromNodeHeaders(nodeHeaders)
expect(result.get('content-type')).toBe('application/json')
expect(result.get('authorization')).toBe('Bearer token')
})
it('should skip undefined values', () => {
const nodeHeaders: IncomingHttpHeaders = {
'content-type': 'application/json',
authorization: undefined,
accept: 'application/json',
}
const result = fromNodeHeaders(nodeHeaders)
expect(result.get('content-type')).toBe('application/json')
expect(result.get('authorization')).toBeNull()
expect(result.get('accept')).toBe('application/json')
})
it('should handle empty headers object', () => {
const nodeHeaders: IncomingHttpHeaders = {}
const result = fromNodeHeaders(nodeHeaders)
expect(Array.from(result.keys())).toEqual([])
})
it('should handle mixed array and string values', () => {
const nodeHeaders: IncomingHttpHeaders = {
'content-type': 'application/json',
'x-custom': ['value1', 'value2'],
authorization: 'Bearer token',
'x-empty': undefined,
}
const result = fromNodeHeaders(nodeHeaders)
expect(result.get('content-type')).toBe('application/json')
expect(result.get('authorization')).toBe('Bearer token')
expect(result.get('x-empty')).toBeNull()
expect(result.get('x-custom')).toBe('value1, value2')
})
})
})

View File

@@ -1,5 +1,7 @@
import { IS_PLATFORM } from 'lib/constants'
import { snakeCase } from 'lodash'
import type { IncomingHttpHeaders } from 'node:http'
import z from 'zod'
/**
* Construct headers for api request.
@@ -67,3 +69,53 @@ export const toSnakeCase = (object) => {
return object
}
}
/**
* Converts Node.js `IncomingHttpHeaders` to Fetch API `Headers`.
*/
export function fromNodeHeaders(nodeHeaders: IncomingHttpHeaders): Headers {
const headers = new Headers()
for (const [key, value] of Object.entries(nodeHeaders)) {
if (Array.isArray(value)) {
value.forEach((v) => headers.append(key, v))
} else if (value !== undefined) {
headers.append(key, value)
}
}
return headers
}
/**
* Zod transformer to parse boolean values from strings.
*
* Use when accepting a boolean value in a query parameter.
*/
export function zBooleanString(errorMsg?: string) {
return z.string().transform((value, ctx) => {
if (value === 'true') {
return true
}
if (value === 'false') {
return false
}
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: errorMsg || 'must be a boolean string',
})
return z.NEVER
})
}
/**
* Transform a comma-separated string into an array of strings.
*
* Use when accepting a list of values in a query parameter.
*/
export function commaSeparatedStringIntoArray(value: string): string[] {
return value
.split(',')
.map((v) => v.trim())
.filter(Boolean)
}

View File

@@ -0,0 +1,50 @@
import {
ApplyMigrationOptions,
DatabaseOperations,
ExecuteSqlOptions,
} from '@supabase/mcp-server-supabase/platform'
import { executeQuery } from './query'
import { applyAndTrackMigrations, listMigrationVersions } from './migrations'
export type GetDatabaseOperationsOptions = {
headers?: HeadersInit
}
export function getDatabaseOperations({
headers,
}: GetDatabaseOperationsOptions): DatabaseOperations {
return {
async executeSql<T>(_projectRef: string, options: ExecuteSqlOptions) {
const { query } = options
const response = await executeQuery({ query, headers })
if (response.error) {
const { code, message } = response.error
throw new Error(`Error executing SQL: ${message} (code: ${code})`)
}
return response as T
},
async listMigrations() {
const response = await listMigrationVersions({ headers })
if (response.error) {
const { code, message } = response.error
throw new Error(`Error listing migrations: ${message} (code: ${code})`)
}
return response as any
},
async applyMigration<T>(_projectRef: string, options: ApplyMigrationOptions) {
const { query, name } = options
const response = await applyAndTrackMigrations({ query, name, headers })
if (response.error) {
const { code, message } = response.error
throw new Error(`Error applying migration: ${message} (code: ${code})`)
}
return response as T
},
}
}

View File

@@ -0,0 +1,73 @@
import { source } from 'common-tags'
import { makeRandomString } from 'lib/helpers'
import { executeQuery } from './query'
const listMigrationVersionsQuery = () =>
'select version, name from supabase_migrations.schema_migrations order by version'
const initializeHistoryTableQuery = () => `begin;
create schema if not exists supabase_migrations;
create table if not exists supabase_migrations.schema_migrations (version text not null primary key);
alter table supabase_migrations.schema_migrations add column if not exists statements text[];
alter table supabase_migrations.schema_migrations add column if not exists name text;
commit;`
const applyAndTrackMigrationsQuery = (query: string, name?: string) => {
// Escapes literals using postgres dollar quoted string
const dollar = `$${makeRandomString(20)}$`
const quote = (s?: string) => (s ? dollar + s + dollar : `''`)
return source`
begin;
-- apply sql from post body
${query};
-- track statements in history table
insert into supabase_migrations.schema_migrations (version, name, statements)
values (
to_char(current_timestamp, 'YYYYMMDDHHMISS'),
${quote(name)},
array[${quote(query)}]
);
commit;
`
}
export type ListMigrationVersionsOptions = {
headers?: HeadersInit
}
export async function listMigrationVersions({ headers }: ListMigrationVersionsOptions) {
return await executeQuery({ query: listMigrationVersionsQuery(), headers })
}
export type ApplyAndTrackMigrationsOptions = {
query: string
name?: string
headers?: HeadersInit
}
export async function applyAndTrackMigrations({
query,
name,
headers,
}: ApplyAndTrackMigrationsOptions) {
const initializeResponse = await executeQuery<void>({
query: initializeHistoryTableQuery(),
headers,
})
if (initializeResponse.error) {
return initializeResponse
}
const applyAndTrackResponse = await executeQuery({
query: applyAndTrackMigrationsQuery(query, name),
headers,
})
return applyAndTrackResponse
}

View File

@@ -0,0 +1,23 @@
import { fetchPost } from 'data/fetchers'
import { PG_META_URL } from 'lib/constants/index'
import { ResponseError } from 'types'
import { constructHeaders } from '../apiHelpers'
export type QueryOptions = {
query: string
headers?: HeadersInit
}
export async function executeQuery<T = unknown>({ query, headers }: QueryOptions) {
const response = await fetchPost<T[]>(
`${PG_META_URL}/query`,
{ query },
{ headers: constructHeaders(headers ?? {}) }
)
if (response instanceof ResponseError) {
return { error: response }
} else {
return { data: response }
}
}

View File

@@ -1,15 +0,0 @@
import { fetchPost } from 'data/fetchers'
import { constructHeaders } from 'lib/api/apiHelpers'
import { PG_META_URL } from 'lib/constants'
import type { ResponseError } from 'types'
export async function queryPgMetaSelfHosted(sql: string, headersInit?: { [prop: string]: any }) {
const headers = constructHeaders(headersInit ?? {})
const response = await fetchPost(`${PG_META_URL}/query`, { query: sql }, { headers })
if (response.error) {
return { error: response.error as ResponseError }
} else {
return { data: response }
}
}

View File

@@ -45,6 +45,7 @@
"@headlessui/react": "^1.7.17",
"@heroicons/react": "^2.1.3",
"@hookform/resolvers": "^3.1.1",
"@modelcontextprotocol/sdk": "^1.18.0",
"@monaco-editor/react": "^4.6.0",
"@next/bundle-analyzer": "15.3.1",
"@number-flow/react": "^0.3.2",
@@ -56,7 +57,7 @@
"@stripe/react-stripe-js": "^3.7.0",
"@stripe/stripe-js": "^7.5.0",
"@supabase/auth-js": "catalog:",
"@supabase/mcp-server-supabase": "^0.4.4",
"@supabase/mcp-server-supabase": "^0.5.4",
"@supabase/mcp-utils": "^0.2.0",
"@supabase/pg-meta": "workspace:*",
"@supabase/realtime-js": "catalog:",
@@ -193,6 +194,7 @@
"jsdom-testing-mocks": "^1.13.1",
"msw": "^2.3.0",
"next-router-mock": "^0.9.13",
"node-mocks-http": "^1.17.2",
"postcss": "^8.5.3",
"prettier": "3.2.4",
"raw-loader": "^4.0.2",

View File

@@ -1,5 +1,5 @@
import pgMeta from '@supabase/pg-meta'
import { ModelMessage, stepCountIs, generateText, Output } from 'ai'
import { generateText, ModelMessage, stepCountIs } from 'ai'
import { IS_PLATFORM } from 'common'
import { source } from 'common-tags'
import { executeSql } from 'data/sql/execute-sql-query'
@@ -16,8 +16,9 @@ import {
} from 'lib/ai/prompts'
import { getTools } from 'lib/ai/tools'
import apiWrapper from 'lib/api/apiWrapper'
import { queryPgMetaSelfHosted } from 'lib/self-hosted'
import { executeQuery } from 'lib/api/self-hosted/query'
import { NextApiRequest, NextApiResponse } from 'next'
import z from 'zod'
export const maxDuration = 60
@@ -78,10 +79,11 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
// Get a list of all schemas to add to context
const pgMetaSchemasList = pgMeta.schemas.list()
type Schemas = z.infer<(typeof pgMetaSchemasList)['zod']>
const { result: schemas } =
aiOptInLevel !== 'disabled'
? await executeSql(
? await executeSql<Schemas>(
{
projectRef,
connectionString,
@@ -92,7 +94,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
'Content-Type': 'application/json',
...(authorization && { Authorization: authorization }),
},
IS_PLATFORM ? undefined : queryPgMetaSelfHosted
IS_PLATFORM ? undefined : executeQuery
)
: { result: [] }

View File

@@ -3,6 +3,7 @@ import { convertToModelMessages, ModelMessage, stepCountIs, streamText } from 'a
import { source } from 'common-tags'
import { NextApiRequest, NextApiResponse } from 'next'
import { z } from 'zod/v4'
import { z as z3 } from 'zod/v3'
import { IS_PLATFORM } from 'common'
import { executeSql } from 'data/sql/execute-sql-query'
@@ -11,7 +12,6 @@ import { getModel } from 'lib/ai/model'
import { getOrgAIDetails } from 'lib/ai/org-ai-details'
import { getTools } from 'lib/ai/tools'
import apiWrapper from 'lib/api/apiWrapper'
import { queryPgMetaSelfHosted } from 'lib/self-hosted'
import {
CHAT_PROMPT,
@@ -21,6 +21,7 @@ import {
RLS_PROMPT,
SECURITY_PROMPT,
} from 'lib/ai/prompts'
import { executeQuery } from 'lib/api/self-hosted/query'
export const maxDuration = 120
@@ -138,10 +139,11 @@ async function handlePost(req: NextApiRequest, res: NextApiResponse) {
try {
// Get a list of all schemas to add to context
const pgMetaSchemasList = pgMeta.schemas.list()
type Schemas = z3.infer<(typeof pgMetaSchemasList)['zod']>
const { result: schemas } =
aiOptInLevel !== 'disabled'
? await executeSql(
? await executeSql<Schemas>(
{
projectRef,
connectionString,
@@ -152,7 +154,7 @@ async function handlePost(req: NextApiRequest, res: NextApiResponse) {
'Content-Type': 'application/json',
...(authorization && { Authorization: authorization }),
},
IS_PLATFORM ? undefined : queryPgMetaSelfHosted
IS_PLATFORM ? undefined : executeQuery
)
: { result: [] }

View File

@@ -0,0 +1,76 @@
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'
import { createSupabaseMcpServer, SupabasePlatform } from '@supabase/mcp-server-supabase'
import { stripIndent } from 'common-tags'
import { commaSeparatedStringIntoArray, fromNodeHeaders } from 'lib/api/apiHelpers'
import { getDatabaseOperations } from 'lib/api/self-hosted/mcp'
import { DEFAULT_PROJECT } from 'lib/constants/api'
import { NextApiRequest, NextApiResponse } from 'next'
import { z } from 'zod'
const supportedFeatureGroupSchema = z.enum(['docs', 'database'])
const mcpQuerySchema = z.object({
features: z
.string()
.transform(commaSeparatedStringIntoArray)
.optional()
.describe(
stripIndent`
A comma-separated list of feature groups to filter tools by. If not provided, all tools are available.
The following feature groups are supported: ${supportedFeatureGroupSchema.options.map((group) => `\`${group}\``).join(', ')}.
`
)
.pipe(z.array(supportedFeatureGroupSchema).optional()),
})
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
switch (req.method) {
case 'POST':
return handlePost(req, res)
default:
res.setHeader('Allow', ['POST'])
return res.status(405).json({ error: { message: `Method ${req.method} Not Allowed` } })
}
}
async function handlePost(req: NextApiRequest, res: NextApiResponse) {
const { error, data } = mcpQuerySchema.safeParse(req.query)
if (error) {
return res.status(400).json({ error: error.flatten().fieldErrors })
}
const { features } = data
const headers = fromNodeHeaders(req.headers)
const platform: SupabasePlatform = {
database: getDatabaseOperations({ headers }),
}
try {
const server = createSupabaseMcpServer({
platform,
projectId: DEFAULT_PROJECT.ref,
features,
})
const transport = new StreamableHTTPServerTransport({
sessionIdGenerator: undefined, // Stateless, don't use session management
enableJsonResponse: true, // Stateless, discourage SSE streams
})
await server.connect(transport)
await transport.handleRequest(req, res, req.body)
} catch (error) {
// Errors at this point will be due MCP setup issues
// Future errors will be handled at the JSON-RPC level within the MCP protocol
if (error instanceof Error) {
return res.status(400).json({ error: error.message })
}
return res.status(500).json({ error: 'Unable to process MCP request', cause: error })
}
}
export default handler

View File

@@ -1,7 +1,6 @@
import { fetchPost } from 'data/fetchers'
import { constructHeaders } from 'lib/api/apiHelpers'
import apiWrapper from 'lib/api/apiWrapper'
import { PG_META_URL } from 'lib/constants'
import { executeQuery } from 'lib/api/self-hosted/query'
import { NextApiRequest, NextApiResponse } from 'next'
export default (req: NextApiRequest, res: NextApiResponse) =>
@@ -22,12 +21,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
const handlePost = async (req: NextApiRequest, res: NextApiResponse) => {
const { query } = req.body
const headers = constructHeaders(req.headers)
const response = await fetchPost(`${PG_META_URL}/query`, { query }, { headers })
const { data, error } = await executeQuery({ query, headers })
if (response.error) {
const { code, message } = response.error
return res.status(code).json({ message, formattedError: message })
if (error) {
const { code, message } = error
return res.status(code ?? 500).json({ message, formattedError: message })
} else {
return res.status(200).json(response)
return res.status(200).json(data)
}
}

View File

@@ -1,10 +1,8 @@
import { NextApiRequest, NextApiResponse } from 'next'
import { fetchPost } from 'data/fetchers'
import { constructHeaders } from 'lib/api/apiHelpers'
import apiWrapper from 'lib/api/apiWrapper'
import { PG_META_URL } from 'lib/constants'
import { makeRandomString } from 'lib/helpers'
import { applyAndTrackMigrations, listMigrationVersions } from 'lib/api/self-hosted/migrations'
export default (req: NextApiRequest, res: NextApiResponse) =>
apiWrapper(req, res, handler, { withAuth: true })
@@ -22,79 +20,29 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
res.status(405).json({ error: { message: `Method ${method} Not Allowed` } })
}
}
const listMigrationVersions =
'select version, name from supabase_migrations.schema_migrations order by version'
const handleGetAll = async (req: NextApiRequest, res: NextApiResponse) => {
const headers = constructHeaders(req.headers)
const { data, error } = await listMigrationVersions(headers)
const response = await fetchPost(
`${PG_META_URL}/query`,
{ query: listMigrationVersions },
{ headers }
)
if (response.error) {
const { code, message } = response.error
return res.status(code).json({ message })
if (error) {
const { code, message } = error
return res.status(code ?? 500).json({ message })
} else {
return res.status(200).json(response)
return res.status(200).json(data)
}
}
export const initialiseHistoryTable = `begin;
create schema if not exists supabase_migrations;
create table if not exists supabase_migrations.schema_migrations (version text not null primary key);
alter table supabase_migrations.schema_migrations add column if not exists statements text[];
alter table supabase_migrations.schema_migrations add column if not exists name text;
commit;`
export function applyAndTrackMigrations(query: string, name?: string) {
// Escapes literals using postgres dollar quoted string
const dollar = `$${makeRandomString(20)}$`
const quote = (s?: string) => (s ? dollar + s + dollar : `''`)
return `begin;
-- apply sql from post body
${query};
-- track statements in history table
insert into supabase_migrations.schema_migrations (version, name, statements)
values (
to_char(current_timestamp, 'YYYYMMDDHHMISS'),
${quote(name)},
array[${quote(query)}]
);
commit;`
}
const handlePost = async (req: NextApiRequest, res: NextApiResponse) => {
const headers = constructHeaders(req.headers)
const { query, name } = req.body
const { data, error } = await applyAndTrackMigrations({ query, name, headers })
const { error } = await fetchPost(
`${PG_META_URL}/query`,
{ query: initialiseHistoryTable },
{ headers }
)
if (error) {
const { code, message } = error
return res.status(code).json({ message, formattedError: message })
}
const { query, name } = req.body
const response = await fetchPost(
`${PG_META_URL}/query`,
{ query: applyAndTrackMigrations(query, name) },
{ headers }
)
if (response.error) {
const { code, message } = response.error
return res.status(code).json({ message, formattedError: message })
return res.status(code ?? 500).json({ message, formattedError: message })
} else {
return res.status(200).json(response)
return res.status(200).json(data)
}
}

View File

@@ -0,0 +1,78 @@
import { describe, it, expect, beforeEach } from 'vitest'
import { createMocks } from 'node-mocks-http'
import { mswServer } from 'tests/lib/msw'
import handler from '../../../../pages/api/mcp/index'
describe('/api/mcp', () => {
beforeEach(() => {
// Disable MSW for these tests
mswServer.close()
})
describe('Method handling', async () => {
it('should handle POST requests', async () => {
const { req, res } = createMocks({
method: 'POST',
query: {},
body: {},
})
await handler(req, res)
expect(res._getStatusCode()).not.toBe(405)
})
it('should return 405 for non-POST methods', async () => {
const { req, res } = createMocks({
method: 'GET',
})
await handler(req, res)
expect(res._getStatusCode()).toBe(405)
expect(JSON.parse(res._getData())).toEqual({
error: { message: 'Method GET Not Allowed' },
})
expect(res._getHeaders()).toEqual({ allow: ['POST'], 'content-type': 'application/json' })
})
})
describe('Query validation', async () => {
it('should accept valid feature groups', async () => {
const { req, res } = createMocks({
method: 'POST',
query: { features: 'docs,database' },
body: {},
})
await handler(req, res)
expect(res._getStatusCode()).not.toBe(400)
})
it('should reject invalid feature groups', async () => {
const { req, res } = createMocks({
method: 'POST',
query: { features: 'invalid,unknown' },
body: {},
})
await handler(req, res)
expect(res._getStatusCode()).toBe(400)
expect(JSON.parse(res._getData())).toHaveProperty('error')
})
it('should work without features parameter', async () => {
const { req, res } = createMocks({
method: 'POST',
query: {},
body: {},
})
await handler(req, res)
expect(res._getStatusCode()).not.toBe(400)
})
})
})

160
pnpm-lock.yaml generated
View File

@@ -771,6 +771,9 @@ importers:
'@hookform/resolvers':
specifier: ^3.1.1
version: 3.3.1(react-hook-form@7.47.0(react@18.3.1))
'@modelcontextprotocol/sdk':
specifier: ^1.18.0
version: 1.18.0(supports-color@8.1.1)
'@monaco-editor/react':
specifier: ^4.6.0
version: 4.6.0(monaco-editor@0.52.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@@ -805,8 +808,8 @@ importers:
specifier: 'catalog:'
version: 2.72.0-rc.11
'@supabase/mcp-server-supabase':
specifier: ^0.4.4
version: 0.4.4(supports-color@8.1.1)
specifier: ^0.5.4
version: 0.5.5(supports-color@8.1.1)
'@supabase/mcp-utils':
specifier: ^0.2.0
version: 0.2.1(supports-color@8.1.1)
@@ -1204,6 +1207,9 @@ importers:
next-router-mock:
specifier: ^0.9.13
version: 0.9.13(next@15.5.2(@babel/core@7.26.10(supports-color@8.1.1))(@opentelemetry/api@1.9.0)(@playwright/test@1.53.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.77.4))(react@18.3.1)
node-mocks-http:
specifier: ^1.17.2
version: 1.17.2(@types/node@22.13.14)
postcss:
specifier: ^8.5.3
version: 8.5.3
@@ -3282,9 +3288,6 @@ packages:
'@deno/eszip@0.83.0':
resolution: {integrity: sha512-gTKYMQ+uv20IUJuEBYkjovMPflFjX7caJ8cwA/sZVqic0L/PFP2gZMFt/GiCHc8eVejhlJLGxg0J4qehDq/f2A==}
'@deno/eszip@0.84.0':
resolution: {integrity: sha512-kfTiJ3jYWy57gV/jjd2McRZdfn2dXHxR3UKL6HQksLAMEmRILHo+pZmN1PAjj8UxQiTBQbybsNHGLaqgHeVntQ==}
'@deno/shim-deno-test@0.5.0':
resolution: {integrity: sha512-4nMhecpGlPi0cSzT67L+Tm+GOJqvuk8gqHBziqcUQOarnuIax1z96/gJHCSIz2Z0zhxE6Rzwb3IZXPtFh51j+w==}
@@ -4499,11 +4502,17 @@ packages:
autoprefixer: ^10.0.2
postcss: ^8.0.9
'@mjackson/headers@0.11.1':
resolution: {integrity: sha512-uXXhd4rtDdDwkqAuGef1nuafkCa1NlTmEc1Jzc0NL4YiA1yON1NFXuqJ3hOuKvNKQwkiDwdD+JJlKVyz4dunFA==}
'@mjackson/multipart-parser@0.10.1':
resolution: {integrity: sha512-cHMD6+ErH/DrEfC0N6Ru/+1eAdavxdV0C35PzSb5/SD7z3XoaDMc16xPJcb8CahWjSpqHY+Too9sAb6/UNuq7A==}
'@mjackson/node-fetch-server@0.2.0':
resolution: {integrity: sha512-EMlH1e30yzmTpGLQjlFmaDAjyOeZhng1/XCd7DExR8PNAnG/G1tyruZxEoUe11ClnwGhGrtsdnyyUx1frSzjng==}
'@modelcontextprotocol/sdk@1.12.1':
resolution: {integrity: sha512-KG1CZhZfWg+u8pxeM/mByJDScJSrjjxLc8fwQqbsS8xCjBmQfMNEBTotYdNanKekepnfRI85GtgQlctLFpcYPw==}
'@modelcontextprotocol/sdk@1.18.0':
resolution: {integrity: sha512-JvKyB6YwS3quM+88JPR0axeRgvdDu3Pv6mdZUy+w4qVkCzGgumb9bXG/TmtDRQv+671yaofVfXSQmFLlWU5qPQ==}
engines: {node: '>=18'}
'@monaco-editor/loader@1.4.0':
@@ -8115,13 +8124,16 @@ packages:
'@supabase/functions-js@2.4.4':
resolution: {integrity: sha512-WL2p6r4AXNGwop7iwvul2BvOtuJ1YQy8EbOd0dhG1oN1q8el/BIRSFCFnWAMM/vJJlHWLi4ad22sKbKr9mvjoA==}
'@supabase/mcp-server-supabase@0.4.4':
resolution: {integrity: sha512-GYgd4R+TTnQICjLxmdW0RRQREqG8Ix+1f9D8kroPASt25p/F60ohD8jPx53l7ym3qjb05Jy5tpJW2pss+ifV5g==}
'@supabase/mcp-server-supabase@0.5.5':
resolution: {integrity: sha512-te1XM2i+h3NBUgJ/8z9PkNCKaJ268VzFI3Qx5RA97s8eGtH94NyPy3lOIZAh3BFAOFHDpcB7Mn1b0oCTGFxg5g==}
hasBin: true
'@supabase/mcp-utils@0.2.1':
resolution: {integrity: sha512-T3LEAEKXOxHGVzhPvxqbAYbxluUKNxQpFnYVyRIazQJOQzZ03tCg+pp3LUYQi0HkWPIo+u+AgtULJVEvgeNr/Q==}
'@supabase/mcp-utils@0.2.2':
resolution: {integrity: sha512-hg4IR1iw2k3zdCiB5abvROSsVK/rOdUoyai3N97uG7c3NSQjWp0M6xPJEoH4TJE63pwY0oTc4eQAjXSmTlNK4Q==}
'@supabase/node-fetch@2.6.15':
resolution: {integrity: sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==}
engines: {node: 4.x || >=6.0.0}
@@ -9239,6 +9251,10 @@ packages:
abstract-logging@2.0.1:
resolution: {integrity: sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA==}
accepts@1.3.8:
resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==}
engines: {node: '>= 0.6'}
accepts@2.0.0:
resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==}
engines: {node: '>= 0.6'}
@@ -10196,6 +10212,10 @@ packages:
constant-case@3.0.4:
resolution: {integrity: sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==}
content-disposition@0.5.4:
resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==}
engines: {node: '>= 0.6'}
content-disposition@1.0.0:
resolution: {integrity: sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==}
engines: {node: '>= 0.6'}
@@ -10682,6 +10702,10 @@ packages:
resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==}
engines: {node: '>=0.10'}
depd@1.1.2:
resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==}
engines: {node: '>= 0.6'}
depd@2.0.0:
resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==}
engines: {node: '>= 0.8'}
@@ -13677,6 +13701,10 @@ packages:
peerDependencies:
esbuild: ^0.25.2
media-typer@0.3.0:
resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==}
engines: {node: '>= 0.6'}
media-typer@1.1.0:
resolution: {integrity: sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==}
engines: {node: '>= 0.8'}
@@ -13695,6 +13723,9 @@ packages:
resolution: {integrity: sha512-S3UwM3yj5mtUSEfP41UZmt/0SCoVYUcU1rkXv+BQ5Ig8ndL4sPoJNBUJERafdPb5jjHJGuMgytgKvKIf58XNBw==}
engines: {node: '>= 0.10.0'}
merge-descriptors@1.0.3:
resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==}
merge-descriptors@2.0.0:
resolution: {integrity: sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==}
engines: {node: '>=18'}
@@ -13721,6 +13752,10 @@ packages:
meshoptimizer@0.18.1:
resolution: {integrity: sha512-ZhoIoL7TNV4s5B6+rx5mC//fw8/POGyNxS/DZyCJeiZ12ScLfVwRE/GfsxwiTkMYYD5DmK2/JXnEVXqL4rF+Sw==}
methods@1.1.2:
resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==}
engines: {node: '>= 0.6'}
micromark-core-commonmark@1.1.0:
resolution: {integrity: sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==}
@@ -14379,6 +14414,18 @@ packages:
node-mock-http@1.0.0:
resolution: {integrity: sha512-0uGYQ1WQL1M5kKvGRXWQ3uZCHtLTO8hln3oBjIusM75WoesZ909uQJs/Hb946i2SS+Gsrhkaa6iAO17jRIv6DQ==}
node-mocks-http@1.17.2:
resolution: {integrity: sha512-HVxSnjNzE9NzoWMx9T9z4MLqwMpLwVvA0oVZ+L+gXskYXEJ6tFn3Kx4LargoB6ie7ZlCLplv7QbWO6N+MysWGA==}
engines: {node: '>=14'}
peerDependencies:
'@types/express': ^4.17.21 || ^5.0.0
'@types/node': '*'
peerDependenciesMeta:
'@types/express':
optional: true
'@types/node':
optional: true
node-pty@1.0.0:
resolution: {integrity: sha512-wtBMWWS7dFZm/VgqElrTvtfMq4GzJ6+edFI0Y0zyzygUSZMgZdraDUMUhCIvkjhJjme15qWmbyJbtAx4ot4uZA==}
@@ -14532,9 +14579,6 @@ packages:
resolution: {integrity: sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==}
engines: {node: '>= 6'}
object-inspect@1.13.1:
resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==}
object-inspect@1.13.4:
resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==}
engines: {node: '>= 0.4'}
@@ -16420,10 +16464,6 @@ packages:
resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==}
engines: {node: '>= 0.4'}
side-channel@1.0.6:
resolution: {integrity: sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==}
engines: {node: '>= 0.4'}
side-channel@1.1.0:
resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==}
engines: {node: '>= 0.4'}
@@ -17357,6 +17397,10 @@ packages:
resolution: {integrity: sha512-G6zXWS1dLj6eagy6sVhOMQiLtJdxQBHIA9Z6HFUNLOlr6MFOgzV8wvmidtPONfPtEUv0uZsy77XJNzTAfwPDaA==}
engines: {node: '>=16'}
type-is@1.6.18:
resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
engines: {node: '>= 0.6'}
type-is@2.0.1:
resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==}
engines: {node: '>= 0.6'}
@@ -20160,11 +20204,6 @@ snapshots:
'@deno/shim-deno': 0.18.2
undici: 6.21.2
'@deno/eszip@0.84.0':
dependencies:
'@deno/shim-deno': 0.18.2
undici: 6.21.2
'@deno/shim-deno-test@0.5.0': {}
'@deno/shim-deno@0.18.2':
@@ -21786,15 +21825,22 @@ snapshots:
lodash: 4.17.21
postcss: 8.5.3
'@mjackson/headers@0.11.1': {}
'@mjackson/multipart-parser@0.10.1':
dependencies:
'@mjackson/headers': 0.11.1
'@mjackson/node-fetch-server@0.2.0': {}
'@modelcontextprotocol/sdk@1.12.1(supports-color@8.1.1)':
'@modelcontextprotocol/sdk@1.18.0(supports-color@8.1.1)':
dependencies:
ajv: 6.12.6
content-type: 1.0.5
cors: 2.8.5
cross-spawn: 7.0.6
eventsource: 3.0.7
eventsource-parser: 3.0.6
express: 5.1.0(supports-color@8.1.1)
express-rate-limit: 7.5.0(express@5.1.0(supports-color@8.1.1))
pkce-challenge: 5.0.0
@@ -26167,11 +26213,11 @@ snapshots:
dependencies:
'@supabase/node-fetch': 2.6.15
'@supabase/mcp-server-supabase@0.4.4(supports-color@8.1.1)':
'@supabase/mcp-server-supabase@0.5.5(supports-color@8.1.1)':
dependencies:
'@deno/eszip': 0.84.0
'@modelcontextprotocol/sdk': 1.12.1(supports-color@8.1.1)
'@supabase/mcp-utils': 0.2.1(supports-color@8.1.1)
'@mjackson/multipart-parser': 0.10.1
'@modelcontextprotocol/sdk': 1.18.0(supports-color@8.1.1)
'@supabase/mcp-utils': 0.2.2(supports-color@8.1.1)
common-tags: 1.8.2
graphql: 16.11.0
openapi-fetch: 0.13.8
@@ -26181,7 +26227,15 @@ snapshots:
'@supabase/mcp-utils@0.2.1(supports-color@8.1.1)':
dependencies:
'@modelcontextprotocol/sdk': 1.12.1(supports-color@8.1.1)
'@modelcontextprotocol/sdk': 1.18.0(supports-color@8.1.1)
zod: 3.25.76
zod-to-json-schema: 3.24.5(zod@3.25.76)
transitivePeerDependencies:
- supports-color
'@supabase/mcp-utils@0.2.2(supports-color@8.1.1)':
dependencies:
'@modelcontextprotocol/sdk': 1.18.0(supports-color@8.1.1)
zod: 3.25.76
zod-to-json-schema: 3.24.5(zod@3.25.76)
transitivePeerDependencies:
@@ -27989,6 +28043,11 @@ snapshots:
abstract-logging@2.0.1: {}
accepts@1.3.8:
dependencies:
mime-types: 2.1.35
negotiator: 0.6.3
accepts@2.0.0:
dependencies:
mime-types: 3.0.1
@@ -29075,6 +29134,10 @@ snapshots:
tslib: 2.8.1
upper-case: 2.0.2
content-disposition@0.5.4:
dependencies:
safe-buffer: 5.2.1
content-disposition@1.0.0:
dependencies:
safe-buffer: 5.2.1
@@ -29521,6 +29584,8 @@ snapshots:
denque@2.1.0: {}
depd@1.1.2: {}
depd@2.0.0: {}
dependency-graph@0.11.0: {}
@@ -29773,7 +29838,7 @@ snapshots:
is-string: 1.0.7
is-typed-array: 1.1.13
is-weakref: 1.0.2
object-inspect: 1.13.1
object-inspect: 1.13.4
object-keys: 1.1.1
object.assign: 4.1.5
regexp.prototype.flags: 1.5.2
@@ -31782,7 +31847,7 @@ snapshots:
dependencies:
es-errors: 1.3.0
hasown: 2.0.2
side-channel: 1.0.6
side-channel: 1.1.0
internal-slot@1.1.0:
dependencies:
@@ -33207,6 +33272,8 @@ snapshots:
transitivePeerDependencies:
- supports-color
media-typer@0.3.0: {}
media-typer@1.1.0: {}
memfs@4.14.1:
@@ -33222,6 +33289,8 @@ snapshots:
memorystream@0.3.1: {}
merge-descriptors@1.0.3: {}
merge-descriptors@2.0.0: {}
merge-stream@2.0.0: {}
@@ -33236,6 +33305,8 @@ snapshots:
meshoptimizer@0.18.1: {}
methods@1.1.2: {}
micromark-core-commonmark@1.1.0:
dependencies:
decode-named-character-reference: 1.0.2
@@ -34316,6 +34387,21 @@ snapshots:
node-mock-http@1.0.0: {}
node-mocks-http@1.17.2(@types/node@22.13.14):
dependencies:
accepts: 1.3.8
content-disposition: 0.5.4
depd: 1.1.2
fresh: 0.5.2
merge-descriptors: 1.0.3
methods: 1.1.2
mime: 1.6.0
parseurl: 1.3.3
range-parser: 1.2.1
type-is: 1.6.18
optionalDependencies:
'@types/node': 22.13.14
node-pty@1.0.0:
dependencies:
nan: 2.22.1
@@ -34488,8 +34574,6 @@ snapshots:
object-hash@3.0.0: {}
object-inspect@1.13.1: {}
object-inspect@1.13.4: {}
object-keys@1.1.1: {}
@@ -36735,7 +36819,7 @@ snapshots:
'@babel/core': 7.26.10(supports-color@8.1.1)
'@babel/parser': 7.27.0
'@babel/plugin-transform-typescript': 7.27.0(@babel/core@7.26.10(supports-color@8.1.1))(supports-color@8.1.1)
'@modelcontextprotocol/sdk': 1.12.1(supports-color@8.1.1)
'@modelcontextprotocol/sdk': 1.18.0(supports-color@8.1.1)
commander: 10.0.1
cosmiconfig: 8.3.6(typescript@5.9.2)
deepmerge: 4.3.1
@@ -36895,13 +36979,6 @@ snapshots:
object-inspect: 1.13.4
side-channel-map: 1.0.1
side-channel@1.0.6:
dependencies:
call-bind: 1.0.7
es-errors: 1.3.0
get-intrinsic: 1.3.0
object-inspect: 1.13.1
side-channel@1.1.0:
dependencies:
es-errors: 1.3.0
@@ -37920,6 +37997,11 @@ snapshots:
type-fest@4.30.0: {}
type-is@1.6.18:
dependencies:
media-typer: 0.3.0
mime-types: 2.1.35
type-is@2.0.1:
dependencies:
content-type: 1.0.5

View File

@@ -23,3 +23,5 @@ minimumReleaseAge: 10080
minimumReleaseAgeExclude:
- 'ai'
- '@ai-sdk/*'
- '@supabase/mcp-server-supabase'
- '@supabase/mcp-utils'