a new hope (#38893)

* a new hope

* run tests in ci against cli mode

* summary

* try vercel action to run e2e against studio self hosted preview

* believe

* debug

* gh pages artifact

* test

* rm pages step

* fix automation bypass missing

* continue on error

* only install necessary deps for CI

* fix bypass

* remove

* fail job if test fails

* disable customer query if is_platform false

* vercel check

* fix var name, make comment update instead

* check bypass on runtime

* add env var

* fix tests going to project ref instead of default

* fix

* better dates in comment

* Update E2E test workflow to include flaky test detection and improve summary output

* fix

* fix dumb mistake
This commit is contained in:
Jordi Enric
2025-09-23 12:02:23 +02:00
committed by GitHub
parent dc5ab444df
commit 25abebc32e
15 changed files with 817 additions and 134 deletions

View File

@@ -1,4 +1,4 @@
name: Studio E2E Tests
name: Selfhosted Studio E2E Tests
on:
push:
branches: [master]
@@ -22,28 +22,33 @@ concurrency:
permissions:
contents: write
pull-requests: write
jobs:
test:
timeout-minutes: 60
runs-on: ubuntu-latest
# Make the job non-blocking
continue-on-error: true
# Require approval only for external contributors
environment: ${{ github.event.pull_request.author_association != 'MEMBER' && 'Studio E2E Tests' || '' }}
env:
EMAIL: ${{ secrets.CI_EMAIL }}
PASSWORD: ${{ secrets.CI_PASSWORD }}
PROJECT_REF: ${{ secrets.CI_PROJECT_REF }}
NEXT_PUBLIC_IS_PLATFORM: true
NEXT_PUBLIC_API_URL: https://api.supabase.green
VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
VERCEL_PROJECT_ID: ${{ secrets.VERCEL_STUDIO_HOSTED_PROJECT_ID }}
# Studio Self-Hosted project ID
VERCEL_PROJECT_ID: prj_CnatEuo7L6bUZAgmujMrm5P1rxtv
NEXT_PUBLIC_HCAPTCHA_SITE_KEY: 10000000-ffff-ffff-ffff-000000000001
VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO: ${{ secrets.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO }}
steps:
- uses: actions/checkout@v4
- name: Verify Vercel bypass secret exists
run: |
if [ -z "${{ secrets.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO }}" ]; then
echo "Required secret VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO is not set" >&2
exit 1
fi
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
@@ -57,13 +62,16 @@ jobs:
- name: Install dependencies
run: pnpm i
# Deploy a preview to Vercel (CLI mode) and capture the URL
- name: Install Vercel CLI
run: pnpm add --global vercel@latest
- name: Pull Vercel Environment Information (Preview)
run: vercel pull --yes --environment=preview --token=${{ secrets.VERCEL_TOKEN }}
- name: Build Project Artifacts for Vercel
- name: Build Project Artifacts for Vercel (is_platform=false)
env:
NEXT_PUBLIC_IS_PLATFORM: false
run: vercel build --token=${{ secrets.VERCEL_TOKEN }}
- name: Deploy Project to Vercel and Get URL
@@ -74,13 +82,16 @@ jobs:
echo "DEPLOY_URL=$DEPLOY_URL" >> $GITHUB_OUTPUT
- name: Install Playwright Browsers
run: pnpm -C e2e/studio exec playwright install --with-deps
run: pnpm -C e2e/studio exec playwright install chromium --with-deps --only-shell
- name: Run Playwright tests
- name: 🚀 Run Playwright tests against Vercel Preview
id: playwright
continue-on-error: true
env:
AUTHENTICATION: true
STUDIO_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}/dashboard
AUTHENTICATION: false
STUDIO_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}
API_URL: ${{ steps.deploy_vercel.outputs.DEPLOY_URL }}
VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO: ${{ secrets.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO }}
run: pnpm e2e
- uses: actions/upload-artifact@v4
@@ -91,3 +102,93 @@ jobs:
e2e/studio/playwright-report/
e2e/studio/test-results/
retention-days: 7
- name: Prepare summary (outputs)
if: always()
id: summarize
uses: actions/github-script@v7
with:
script: |
const fs = require('fs')
const p = 'e2e/studio/test-results/test-results.json'
// Initialize a summary object to hold test statistics.
let s={total:0,passed:0,failed:0,skipped:0,timedOut:0,interrupted:0,flaky:0,durationMs:0,note:''}
try {
const data = JSON.parse(fs.readFileSync(p,'utf8'))
// Recursively walk through the test suites to process each test.
const walk=suite=>{
if(!suite)return;
suite.specs?.forEach(spec=>{
spec.tests?.forEach(test=>{
s.total++;
// Get the last result of the test, as tests can be retried.
const lastResult = test.results[test.results.length - 1];
s.durationMs += lastResult.duration || 0;
// A test is considered flaky if it has more than one run and the final status is 'passed'.
if (test.results.length > 1 && lastResult.status === 'passed') {
s.flaky++
}
const status = lastResult.status === 'passed' && s.flaky > 0 ? 'flaky' : lastResult.status
s[status] = (s[status]||0)+1;
})
})
suite.suites?.forEach(walk)
}
walk(data.suites?.[0])
} catch { s.note='No JSON report found or parse error.' }
// Generate the markdown for the summary comment.
const md = s.note ? `Note: ${s.note}` : `- Total: ${s.total}\n- Passed: ${s.passed||0}\n- Failed: ${s.failed||0}\n- Skipped: ${s.skipped||0}\n- Timed out: ${s.timedOut||0}\n- Interrupted: ${s.interrupted||0}\n- Flaky: ${s.flaky||0}\n- Duration: ${(s.durationMs/1000).toFixed(1)}s`
// Set the summary and flaky_count as outputs for subsequent steps.
core.setOutput('summary', md)
core.setOutput('flaky_count', s.flaky)
- name: Comment summary on PR
if: always() && github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
script: |
const owner = context.repo.owner
const repo = context.repo.repo
const issue_number = context.issue.number
const summary = `${{ steps.summarize.outputs.summary }}`.replace(/^"|"$/g,'')
const runUrl = `https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}`
const marker = '<!-- studio-e2e-summary -->'
const now = new Date()
const weekday = now.toLocaleString('en-US', { weekday: 'long', timeZone: 'UTC' })
const day = now.toLocaleString('en-US', { day: 'numeric', timeZone: 'UTC' })
const month = now.toLocaleString('en-US', { month: 'long', timeZone: 'UTC' })
const year = now.toLocaleString('en-US', { year: 'numeric', timeZone: 'UTC' })
const time = now.toLocaleTimeString('en-US', {
hour: '2-digit',
minute: '2-digit',
second: '2-digit',
hour12: false,
timeZone: 'UTC',
})
const date = `${weekday} ${day}, ${month}, ${year} ${time} (UTC)`
const body = [
marker,
`**Studio E2E Results**`,
'',
summary,
'',
`Artifacts: ${runUrl}`,
'',
`Last updated: ${date}`
].join('\n')
const { data: comments } = await github.rest.issues.listComments({ owner, repo, issue_number, per_page: 100 })
const existing = comments.find(c => c.body && c.body.includes(marker))
if (existing) {
await github.rest.issues.updateComment({ owner, repo, comment_id: existing.id, body })
} else {
await github.rest.issues.createComment({ owner, repo, issue_number, body })
}
- name: Fail job if tests failed
if: steps.playwright.outcome != 'success' || steps.summarize.outputs.flaky_count > 0
run: |
echo "E2E tests failed" >&2
exit 1

View File

@@ -57,7 +57,7 @@ export function ReportChartUpsell({
onMouseLeave={() => setIsHoveringUpgrade(false)}
className="mt-4"
>
<Link href={`/org/${orgSlug}/billing?panel=subscriptionPlan&source=reports`}>
<Link href={`/org/${orgSlug || '_'}/billing?panel=subscriptionPlan&source=reports`}>
Upgrade to{' '}
<span className="capitalize">
{!!report.availableIn?.length ? report.availableIn[0] : 'Pro'}

View File

@@ -5,6 +5,7 @@ import { get, handleError } from 'data/fetchers'
import { useAsyncCheckPermissions } from 'hooks/misc/useCheckPermissions'
import type { ResponseError } from 'types'
import { organizationKeys } from './keys'
import { IS_PLATFORM } from 'common'
export type OrganizationCustomerProfileVariables = {
slug?: string
@@ -52,7 +53,7 @@ export const useOrganizationCustomerProfileQuery = <TData = OrganizationCustomer
organizationKeys.customerProfile(slug),
({ signal }) => getOrganizationCustomerProfile({ slug }, signal),
{
enabled: enabled && canReadCustomerProfile && typeof slug !== 'undefined',
enabled: IS_PLATFORM && enabled && canReadCustomerProfile && typeof slug !== 'undefined',
...options,
}
)

View File

@@ -347,7 +347,7 @@ function createTabsState(projectRef: string) {
router.push(`/project/${router.query.ref}/editor`)
break
default:
router.push(`/project/${router.query.ref}/${editor}`)
router.push(`/project/${router.query.ref}/${editor === 'table' ? 'editor' : 'sql'}`)
}
}
}

View File

@@ -1,13 +1,21 @@
import path from 'path'
const toBoolean = (value?: string) => {
if (value == null) return false
const normalized = value.trim().toLowerCase()
return normalized === 'true'
}
export const env = {
STUDIO_URL: process.env.STUDIO_URL,
API_URL: process.env.API_URL || 'https://api.supabase.green',
AUTHENTICATION: process.env.AUTHENTICATION,
AUTHENTICATION: toBoolean(process.env.AUTHENTICATION),
EMAIL: process.env.EMAIL,
PASSWORD: process.env.PASSWORD,
PROJECT_REF: process.env.PROJECT_REF || 'default',
IS_PLATFORM: process.env.IS_PLATFORM || 'false',
VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO:
process.env.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO || 'false',
}
export const STORAGE_STATE_PATH = path.join(__dirname, './playwright/.auth/user.json')

View File

@@ -25,6 +25,18 @@ setup('Global Setup', async ({ page }) => {
- Is Platform: ${IS_PLATFORM}
`)
/*
* Check if we're in CI, if so, check VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO
* is set to true.
*/
const VERCEL_BYPASS = process.env.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO
if (process.env.CI === 'true') {
if (!VERCEL_BYPASS || VERCEL_BYPASS.length === 0) {
throw new Error('VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO is not set')
}
}
/**
* Studio Check
*/

View File

@@ -2,7 +2,11 @@ import { expect, Page } from '@playwright/test'
import { env } from '../env.config'
import { test } from '../utils/test'
import { toUrl } from '../utils/to-url'
import { waitForApiResponse, waitForDatabaseToLoad } from '../utils/wait-for-response'
import {
waitForApiResponse,
waitForDatabaseToLoad,
createApiResponseWaiter,
} from '../utils/wait-for-response'
const databaseTableName = 'pw_database_table'
const databaseTableNameNew = 'pw_database_table_new'
@@ -61,13 +65,14 @@ const deleteTable = async (page: Page, tableName: string) => {
).toBeVisible()
}
test.describe('Database', () => {
test.describe.serial('Database', () => {
let page: Page
test.beforeAll(async ({ browser, ref }) => {
page = await browser.newPage()
const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
await page.goto(toUrl(`/project/${ref}/editor`))
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
await wait
if ((await page.getByRole('button', { name: `View ${databaseTableName}` }).count()) > 0) {
await deleteTable(page, databaseTableName)
@@ -77,8 +82,9 @@ test.describe('Database', () => {
})
test.afterAll(async ({ ref }) => {
const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
await page.goto(toUrl(`/project/${ref}/editor`))
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=entity-types-public-0')
await wait
if ((await page.getByRole('button', { name: `View ${databaseTableName}` }).count()) > 0) {
await deleteTable(page, databaseTableName)
}
@@ -86,10 +92,14 @@ test.describe('Database', () => {
test.describe('Schema Visualizer', () => {
test('actions works as expected', async ({ page, ref }) => {
const wait = createApiResponseWaiter(
page,
'pg-meta',
ref,
'tables?include_columns=true&included_schemas=public'
)
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/schemas?schema=public`))
// Wait for schema visualizer to load
await waitForDatabaseToLoad(page, ref)
await wait
// validates table and column exists
await page.waitForTimeout(500)
@@ -130,12 +140,16 @@ test.describe('Database', () => {
})
})
test.describe('Tables', () => {
test.describe.serial('Tables', () => {
test('actions works as expected', async ({ page, ref }) => {
const wait = createApiResponseWaiter(
page,
'pg-meta',
ref,
'tables?include_columns=true&included_schemas=public'
)
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/tables?schema=public`))
// Wait for database tables to be populated
await waitForDatabaseToLoad(page, ref)
await wait
// check new table button is present in public schema
await expect(page.getByRole('button', { name: 'New table' })).toBeVisible()
@@ -258,7 +272,7 @@ test.describe('Database', () => {
})
test.describe('Tables columns', () => {
test('everything works as expected', async ({ page, ref }) => {
test('can view, create, update, delete, and filter table columns', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/tables?schema=public`))
// Wait for database tables to be populated
@@ -322,7 +336,7 @@ test.describe('Database', () => {
})
})
test.describe('Triggers', () => {
test.describe.serial('Triggers', () => {
test('actions works as expected', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/triggers?schema=public`))
@@ -377,7 +391,7 @@ test.describe('Database', () => {
await page.getByRole('checkbox').nth(2).click()
await page.getByRole('button', { name: 'Choose a function to trigger' }).click()
await page.getByRole('paragraph').filter({ hasText: 'subscription_check_filters' }).click()
await page.getByRole('button', { name: 'Create trigger' }).click()
await page.getByRole('button', { name: /^(Create|Save) trigger$/ }).click()
// validate trigger creation
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=trigger-create')
@@ -395,7 +409,7 @@ test.describe('Database', () => {
await triggerRow.getByRole('button', { name: 'More options' }).click()
await page.getByRole('menuitem', { name: 'Edit trigger' }).click()
await page.getByRole('textbox', { name: 'Name of trigger' }).fill(databaseTriggerNameUpdated)
await page.getByRole('button', { name: 'Create trigger' }).click()
await page.getByRole('button', { name: /^(Create|Save) trigger$/ }).click()
// validate trigger update
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=trigger-update')
@@ -441,7 +455,6 @@ test.describe('Database', () => {
await page.getByTestId('schema-selector').click()
await page.getByPlaceholder('Find schema...').fill('auth')
await page.getByRole('option', { name: 'auth' }).click()
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=indexes-auth')
await page.waitForTimeout(500)
expect(page.getByText('sso_providers_pkey')).toBeVisible()
expect(page.getByText('confirmation_token_idx')).toBeVisible()
@@ -579,12 +592,13 @@ test.describe('Database', () => {
})
})
test.describe('Database Enumerated Types', () => {
test.describe.serial('Database Enumerated Types', () => {
test('actions works as expected', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/types?schema=public`))
// Wait for database enumerated types to be populated
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=schemas')
// await waitForApiResponse(page, 'pg-meta', ref, 'query?key=schemas')
await page.waitForLoadState('networkidle')
// create new type button exists in public schema
await expect(page.getByRole('button', { name: 'Create type' })).toBeVisible()
@@ -593,23 +607,26 @@ test.describe('Database Enumerated Types', () => {
await page.getByTestId('schema-selector').click()
await page.getByPlaceholder('Find schema...').fill('auth')
await page.getByRole('option', { name: 'auth' }).click()
expect(page.getByText('factor_type')).toBeVisible()
expect(page.getByText('code_challenge_method')).toBeVisible()
await expect(page.getByText('factor_type')).toBeVisible()
await expect(page.getByText('code_challenge_method')).toBeVisible()
// create new type button does not exist in other schemas
expect(page.getByRole('button', { name: 'Create type' })).not.toBeVisible()
await expect(page.getByRole('button', { name: 'Create type' })).not.toBeVisible()
// filter by querying
await page.getByRole('textbox', { name: 'Search for a type' }).fill('code')
await page.waitForTimeout(500) // wait for enum types to be loaded
expect(page.getByText('factor_type')).not.toBeVisible()
expect(page.getByText('code_challenge_method')).toBeVisible()
await page.waitForTimeout(1000) // wait for enum types to be loaded
await expect(page.getByText('factor_type')).not.toBeVisible()
await expect(page.getByText('code_challenge_method')).toBeVisible()
})
test('CRUD operations works as expected', async ({ page, ref }) => {
const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=schemas')
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/types?schema=public`))
// Wait for database roles list to be populated
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=schemas')
await wait
// await page.waitForLoadState('networkidle')
// if enum exists, delete it.
await page.waitForTimeout(500)
@@ -661,12 +678,13 @@ test.describe('Database Enumerated Types', () => {
})
})
test.describe('Database Functions', () => {
test.describe.serial('Database Functions', () => {
test('actions works as expected', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/functions?schema=public`))
// Wait for database functions to be populated
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
await page.waitForLoadState('networkidle')
// await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
// create a new function button exists in public schema
await expect(page.getByRole('button', { name: 'Create a new function' })).toBeVisible()
@@ -691,7 +709,8 @@ test.describe('Database Functions', () => {
await page.goto(toUrl(`/project/${env.PROJECT_REF}/database/functions?schema=public`))
// Wait for database functions to be populated
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
// await waitForApiResponse(page, 'pg-meta', ref, 'query?key=database-functions')
await page.waitForLoadState('networkidle')
// delete function if exists
if ((await page.getByRole('button', { name: databaseFunctionName }).count()) > 0) {
@@ -703,7 +722,7 @@ test.describe('Database Functions', () => {
.fill(databaseFunctionName)
await page.getByRole('button', { name: `Delete function ${databaseFunctionName}` }).click()
await expect(
page.getByText(`Successfully removed ${databaseFunctionName}`),
page.getByText(`Successfully removed function ${databaseFunctionName}`),
'Delete confirmation toast should be visible'
).toBeVisible({
timeout: 50000,
@@ -719,7 +738,7 @@ test.describe('Database Functions', () => {
END;`)
await page.waitForTimeout(500) // wait for text content to be visible
expect(await page.getByRole('presentation').textContent()).toBe(`BEGINEND;`)
await page.getByRole('button', { name: 'Confirm' }).click()
await page.getByRole('button', { name: 'Create function' }).click()
// validate function creation
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=functions-create')
@@ -737,7 +756,7 @@ END;`)
await functionRow.getByRole('button', { name: 'More options' }).click()
await page.getByRole('menuitem', { name: 'Edit function', exact: true }).click()
await page.getByRole('textbox', { name: 'Name of function' }).fill(databaseFunctionNameUpdated)
await page.getByRole('button', { name: 'Confirm' }).click()
await page.getByRole('button', { name: 'Save function' }).click()
// validate function update
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=functions-update')

View File

@@ -6,6 +6,8 @@ test.describe('Project', async () => {
test('Can navigate to project home page', async ({ page, ref }) => {
await page.goto(toUrl(`/project/${ref}`))
await expect(page.getByRole('link', { name: 'Tables' })).toBeVisible()
// The home page has 2 variants (classic and new). Both render an H1 heading.
// Assert on a stable, variant-agnostic selector.
await expect(page.getByRole('heading', { level: 1 })).toBeVisible()
})
})

View File

@@ -15,9 +15,36 @@ import { waitForApiResponseWithTimeout } from '../utils/wait-for-response-with-t
const tableNamePrefix = 'pw_table'
const columnName = 'pw_column'
const dismissToastsIfAny = async (page: Page) => {
const closeButtons = page.getByRole('button', { name: 'Close toast' })
const count = await closeButtons.count()
for (let i = 0; i < count; i++) {
await closeButtons.nth(i).click()
}
}
const createTable = async (page: Page, ref: string, tableName: string) => {
await page.getByRole('button', { name: 'New table', exact: true }).click()
await page.getByTestId('table-name-input').fill(tableName)
// Ensure no toast overlays block the dialog trigger
await dismissToastsIfAny(page)
const newTableBtn = page.getByRole('button', { name: 'New table', exact: true })
await expect(newTableBtn).toBeVisible()
await newTableBtn.click()
// Check if we're in the unable to find table error
// If so, click Close tab or Head back first
const closeTabBtn = page.getByRole('button', { name: 'Close tab' })
const headBackBtn = page.getByRole('button', { name: 'Head back' })
if ((await closeTabBtn.count()) > 0) {
await closeTabBtn.click()
}
if ((await headBackBtn.count()) > 0) {
await headBackBtn.click()
}
const nameInput = page.getByTestId('table-name-input')
await expect(nameInput).toBeVisible()
await nameInput.fill(tableName)
await page.getByTestId('created_at-extra-options').click()
await page.getByText('Is Nullable').click()
await page.getByTestId('created_at-extra-options').click({ force: true })
@@ -26,12 +53,10 @@ const createTable = async (page: Page, ref: string, tableName: string) => {
await page.getByText('Choose a column type...').click()
await page.getByRole('option', { name: 'text Variable-length' }).click()
await page.getByRole('button', { name: 'Save' }).click()
await waitForApiResponse(
page,
'pg-meta',
ref,
'tables?include_columns=true&included_schemas=public'
) // wait for table creation
// Wait specifically for tables list refresh instead of generic networkidle
await waitForApiResponseWithTimeout(page, (response) =>
response.url().includes('tables?include_columns=true&included_schemas=public')
)
// wait for tables to load, we don't need to wait here cause this response may complete before the table creation.
await waitForApiResponseWithTimeout(page, (response) =>
response.url().includes('query?key=entity-types-public-')
@@ -43,8 +68,10 @@ const createTable = async (page: Page, ref: string, tableName: string) => {
}
const deleteTable = async (page: Page, ref: string, tableName: string) => {
await page.getByLabel(`View ${tableName}`).nth(0).click()
await page.getByLabel(`View ${tableName}`).getByRole('button').nth(1).click()
const viewLocator = page.getByLabel(`View ${tableName}`)
if ((await viewLocator.count()) === 0) return
await viewLocator.nth(0).click()
await viewLocator.getByRole('button').nth(1).click({ force: true })
await page.getByText('Delete table').click()
await page.getByRole('checkbox', { name: 'Drop table with cascade?' }).click()
await page.getByRole('button', { name: 'Delete' }).click()
@@ -68,18 +95,13 @@ const deleteEnumIfExist = async (page: Page, ref: string, enumName: string) => {
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=', { method: 'POST' })
}
test.describe('table editor', () => {
test.describe.serial('table editor', () => {
let page: Page
test.beforeEach(async ({ ref }) => {
await resetLocalStorage(page, ref)
if (!page.url().includes('/editor')) {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
}
// wait for all settings to load, and no more network request for 500ms
await page.waitForLoadState('networkidle')
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
})
test.beforeAll(async ({ browser, ref }) => {
@@ -87,10 +109,10 @@ test.describe('table editor', () => {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
// Delete all tables with prefix pw_table
const tablesToDelete = await (
await page.getByRole('button', { name: 'View' }).allTextContents()
).filter((tableName) => tableName.startsWith(tableNamePrefix))
// Delete all tables with prefix pw_table (ensure page is stable first)
const viewButtons = page.getByRole('button', { name: /^View / })
const names = (await viewButtons.allTextContents()).map((t) => t.replace(/^View\s+/, '').trim())
const tablesToDelete = names.filter((tableName) => tableName.startsWith(tableNamePrefix))
for (const tableName of tablesToDelete) {
await deleteTable(page, ref, tableName)
@@ -101,16 +123,14 @@ test.describe('table editor', () => {
test.afterAll(async ({ ref }) => {
await resetLocalStorage(page, ref)
// Only navigate and wait if not already at /editor
if (!page.url().includes('/editor')) {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref) // wait for table data to load
}
// Always navigate explicitly to editor and wait for tables to be loaded
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
// Delete all tables with prefix pw_table
const tablesToDelete = await (
await page.getByRole('button', { name: 'View' }).allTextContents()
).filter((tableName) => tableName.startsWith(tableNamePrefix))
const viewButtons = page.getByRole('button', { name: /^View / })
const names = (await viewButtons.allTextContents()).map((t) => t.replace(/^View\s+/, '').trim())
const tablesToDelete = names.filter((tableName) => tableName.startsWith(tableNamePrefix))
for (const tableName of tablesToDelete) {
await deleteTable(page, ref, tableName)
@@ -174,7 +194,7 @@ test.describe('table editor', () => {
})
test('switching schemas work as expected', async ({ ref }) => {
const authTableSso = 'sso_provider'
const authTableSso = 'identities'
const authTableMfa = 'mfa_factors'
// change schema from public to auth
@@ -358,10 +378,16 @@ test.describe('table editor', () => {
.getByRole('button')
.nth(1)
.click()
await page.getByRole('menuitem', { name: 'Export data' }).click()
const downloadCsvPromise = page.waitForEvent('download')
await page.getByRole('menuitem', { name: 'Export table as CSV' }).click()
const downloadCsv = await downloadCsvPromise
// Open nested export submenu via keyboard (more stable than hover in headless)
const exportDataItemCsv = page.getByRole('menuitem', { name: 'Export data' })
await expect(exportDataItemCsv).toBeVisible()
await exportDataItemCsv.hover()
await expect(exportDataItemCsv).toHaveAttribute('data-state', /open/)
await expect(page.getByRole('menuitem', { name: 'Export table as CSV' })).toBeVisible()
const [downloadCsv] = await Promise.all([
page.waitForEvent('download'),
page.getByRole('menuitem', { name: 'Export table as CSV' }).click(),
])
expect(downloadCsv.suggestedFilename()).toContain('.csv')
const downloadCsvPath = await downloadCsv.path()
@@ -377,16 +403,32 @@ test.describe('table editor', () => {
})
fs.unlinkSync(downloadCsvPath)
// Close submenu and parent menu to avoid UI leftovers
await page.keyboard.press('Escape')
await page.keyboard.press('Escape')
await page.waitForTimeout(500)
// expect to NOT find the Export data menu item
await expect(page.getByRole('menuitem', { name: 'Export data' })).not.toBeVisible()
// test export data via SQL + verify
await page
.getByRole('button', { name: `View ${tableNameUpdated}`, exact: true })
.getByRole('button')
.nth(1)
.click()
await page.getByRole('menuitem', { name: 'Export data' }).click()
const downloadSqlPromise = page.waitForEvent('download')
await page.getByRole('menuitem', { name: 'Export table as SQL' }).click()
const downloadSql = await downloadSqlPromise
// Open nested export submenu via keyboard (more stable than hover in headless)
const exportDataItemSql = page.getByRole('menuitem', { name: 'Export data' })
await expect(exportDataItemSql).toBeVisible()
await exportDataItemSql.hover({
force: true,
})
await expect(exportDataItemSql).toHaveAttribute('data-state', /open/)
await expect(page.getByRole('menuitem', { name: 'Export table as SQL' })).toBeVisible()
const [downloadSql] = await Promise.all([
page.waitForEvent('download'),
page.getByRole('menuitem', { name: 'Export table as SQL' }).click(),
])
expect(downloadSql.suggestedFilename()).toContain('.sql')
const downloadSqlPath = await downloadSql.path()
const sqlContent = fs.readFileSync(downloadSqlPath, 'utf-8')
@@ -398,53 +440,128 @@ test.describe('table editor', () => {
expect(sqlContent).toContain('123')
fs.unlinkSync(downloadSqlPath)
// Close submenu and parent menu to avoid UI leftovers
await page.keyboard.press('Escape')
await page.keyboard.press('Escape')
await page.waitForTimeout(500)
// test export data via CLI
await page
.getByRole('button', { name: `View ${tableNameUpdated}`, exact: true })
.getByRole('button')
.nth(1)
.click()
await page.getByRole('menuitem', { name: 'Export data' }).click()
const exportDataItemCli = page.getByRole('menuitem', { name: 'Export data' })
await expect(exportDataItemCli).toBeVisible()
await exportDataItemCli.hover({
force: true,
})
await expect(page.getByRole('menuitem', { name: 'Export table via CLI' })).toBeVisible()
await page.getByRole('menuitem', { name: 'Export table via CLI' }).click()
await expect(page.getByRole('heading', { name: 'Export table data via CLI' })).toBeVisible()
await page.getByRole('button', { name: 'Close' }).first().click()
// sort rows
await page.getByRole('button', { name: 'Sort' }).click()
await page.getByRole('button', { name: 'Pick a column to sort by' }).click()
await page.getByRole('menuitem', { name: columnNameUpdated }).click()
await page.getByRole('button', { name: 'Apply sorting' }).click()
await waitForGridDataToLoad(page, ref) // wait for sorted table data to load
await page.getByRole('button', { name: 'Sorted by 1 rule' }).click()
// Ensure all menus/dialogs are closed before continuing
await page.keyboard.press('Escape')
await page.keyboard.press('Escape')
await page.waitForTimeout(500)
})
// verify sorted row content
await page.waitForTimeout(500) // may take some time for sorting to complete
expect(await page.getByRole('gridcell').nth(3).textContent()).toBe('123')
expect(await page.getByRole('gridcell').nth(8).textContent()).toBe('456')
expect(await page.getByRole('gridcell').nth(13).textContent()).toBe('789')
test('filtering rows works as expected', async ({ ref }) => {
const tableName = 'pw_table_filtering'
const colName = 'pw_column'
if (!page.url().includes('/editor')) {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
}
await createTable(page, ref, tableName)
await page.getByRole('button', { name: `View ${tableName}`, exact: true }).click()
await page.waitForURL(/\/editor\/\d+\?schema=public$/)
for (const value of ['789', '456', '123']) {
await page.getByTestId('table-editor-insert-new-row').click()
await page.getByRole('menuitem', { name: 'Insert row Insert a new row' }).click()
await page.getByTestId(`${colName}-input`).fill(value)
await page.getByTestId('action-bar-save-row').click()
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=', { method: 'POST' })
}
// filter rows
await page.getByRole('button', { name: 'Filter', exact: true }).click()
await page.getByRole('button', { name: 'Add filter' }).click()
await page.getByRole('button', { name: 'id' }).click()
await page.getByRole('menuitem', { name: columnNameUpdated }).click()
await page.getByRole('menuitem', { name: colName }).click()
await page.getByRole('textbox', { name: 'Enter a value' }).fill('789')
await page.getByRole('button', { name: 'Apply filter' }).click()
await waitForGridDataToLoad(page, ref) // wait for filtered table data to load
await page.waitForTimeout(500) // may take some time for filtering to complete
await page.getByRole('button', { name: 'Filtered by 1 rule' }).click()
await waitForGridDataToLoad(page, ref)
await page.waitForTimeout(500)
await page.getByRole('button', { name: 'Filtered by 1 rule' }).click({ force: true })
await expect(page.getByRole('gridcell', { name: '789' })).toBeVisible()
await expect(page.getByRole('gridcell', { name: '456' })).not.toBeVisible()
await expect(page.getByRole('gridcell', { name: '123' })).not.toBeVisible()
// view table definition
await page.getByText('definition', { exact: true }).click()
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=table-definition-') // wait for table definition
await expect(page.locator('.view-lines')).toContainText(
`create table public.${tableNameUpdated} ( id bigint generated by default as identity not null, created_at timestamp with time zone null default now(), ${columnNameUpdated} text null, constraint ${tableNameGridEditor}_pkey primary key (id)) TABLESPACE pg_default;`
)
await deleteTable(page, ref, tableName)
})
await deleteTable(page, ref, tableNameUpdated)
test('view table definition works as expected', async ({ ref }) => {
const tableName = 'pw_table_definition'
const colName = 'pw_column'
if (!page.url().includes('/editor')) {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
}
await createTable(page, ref, tableName)
await page.getByRole('button', { name: `View ${tableName}`, exact: true }).click()
await page.waitForURL(/\/editor\/\d+\?schema=public$/)
await page.getByText('definition', { exact: true }).click()
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=table-definition-')
await expect(page.locator('.view-lines')).toContainText(
`create table public.${tableName} ( id bigint generated by default as identity not null, created_at timestamp with time zone null default now(), ${colName} text null, constraint ${tableName}_pkey primary key (id)) TABLESPACE pg_default;`
)
await deleteTable(page, ref, tableName)
})
test('sorting rows works as expected', async ({ ref }) => {
const tableName = 'pw_table_sorting'
const colName = 'pw_column'
// Ensure we're on editor
if (!page.url().includes('/editor')) {
await page.goto(toUrl(`/project/${ref}/editor?schema=public`))
await waitForTableToLoad(page, ref)
}
// Create a small table and three rows
await createTable(page, ref, tableName)
await page.getByRole('button', { name: `View ${tableName}`, exact: true }).click()
await page.waitForURL(/\/editor\/\d+\?schema=public$/)
for (const value of ['789', '456', '123']) {
await page.getByTestId('table-editor-insert-new-row').click()
await page.getByRole('menuitem', { name: 'Insert row Insert a new row' }).click()
await page.getByTestId(`${colName}-input`).fill(value)
await page.getByTestId('action-bar-save-row').click()
await waitForApiResponse(page, 'pg-meta', ref, 'query?key=', { method: 'POST' })
}
// Apply sorting
await page.getByRole('button', { name: 'Sort', exact: true }).click()
await page.getByRole('button', { name: 'Pick a column to sort by' }).click()
await page.getByRole('menuitem', { name: colName }).click()
await page.getByRole('button', { name: 'Apply sorting' }).click()
await waitForGridDataToLoad(page, ref)
await page.getByRole('button', { name: 'Sorted by 1 rule' }).click()
// Verify sorted row content asc lexicographically for strings
await page.waitForTimeout(500)
expect(await page.getByRole('gridcell').nth(3).textContent()).toBe('123')
expect(await page.getByRole('gridcell').nth(8).textContent()).toBe('456')
expect(await page.getByRole('gridcell').nth(13).textContent()).toBe('789')
// Cleanup
await deleteTable(page, ref, tableName)
})
test('importing, pagination and large data actions works as expected', async ({ ref }) => {
@@ -524,9 +641,10 @@ test.describe('table editor', () => {
await page.getByRole('row', { name: 'value 6 to export' }).getByRole('checkbox').click()
await page.getByRole('button', { name: 'Export' }).click()
const downloadSqlPromise = page.waitForEvent('download')
await page.getByRole('menuitem', { name: 'Export as SQL' }).click()
const downloadSql = await downloadSqlPromise
const [downloadSql] = await Promise.all([
page.waitForEvent('download'),
page.getByRole('menuitem', { name: 'Export as SQL' }).click(),
])
expect(downloadSql.suggestedFilename()).toContain('.sql')
const downloadSqlPath = await downloadSql.path()
const sqlContent = fs.readFileSync(downloadSqlPath, 'utf-8')
@@ -536,10 +654,16 @@ test.describe('table editor', () => {
await page.waitForTimeout(1000) // wait for event processing to complete
fs.unlinkSync(downloadSqlPath)
// Close menu to prevent overlap with next export
await page.keyboard.press('Escape')
await page.keyboard.press('Escape')
await page.waitForTimeout(500)
await page.getByRole('button', { name: 'Export' }).click()
const downloadJsonPromise = page.waitForEvent('download')
await page.getByRole('menuitem', { name: 'Export as JSON' }).click()
const downloadJson = await downloadJsonPromise
const [downloadJson] = await Promise.all([
page.waitForEvent('download'),
page.getByRole('menuitem', { name: 'Export as JSON' }).click(),
])
expect(downloadJson.suggestedFilename()).toContain('.json')
const downloadJsonPath = await downloadJson.path()
const jsonContent = fs.readFileSync(downloadJsonPath, 'utf-8')
@@ -549,10 +673,16 @@ test.describe('table editor', () => {
await page.waitForTimeout(1000) // wait for event processing to complete
fs.unlinkSync(downloadJsonPath)
// Close menu to prevent overlap with next export
await page.keyboard.press('Escape')
await page.keyboard.press('Escape')
await page.waitForTimeout(500)
await page.getByRole('button', { name: 'Export' }).click()
const downloadCsvPromise = page.waitForEvent('download')
await page.getByRole('menuitem', { name: 'Export as CSV' }).click()
const downloadCsv = await downloadCsvPromise
const [downloadCsv] = await Promise.all([
page.waitForEvent('download'),
page.getByRole('menuitem', { name: 'Export as CSV' }).click(),
])
expect(downloadCsv.suggestedFilename()).toContain('.csv')
const downloadCsvPath = await downloadCsv.path()
const csvContent = fs.readFileSync(downloadCsvPath, 'utf-8').replace(/\r?\n/g, '\n')
@@ -573,6 +703,11 @@ test.describe('table editor', () => {
await page.waitForTimeout(1000) // wait for event processing to complete
fs.unlinkSync(downloadCsvPath)
// Close menu to avoid leaving it open
await page.keyboard.press('Escape')
await page.keyboard.press('Escape')
await page.waitForTimeout(500)
// select all actions works (delete action)
await page.getByRole('checkbox', { name: 'Select All' }).click()
await page.getByRole('button', { name: 'Delete 98 rows' }).click()

View File

@@ -3,7 +3,8 @@
"version": "1.0.0",
"main": "index.js",
"scripts": {
"e2e": "playwright test"
"e2e": "playwright test",
"e2e:ui": "playwright test --ui"
},
"keywords": [],
"author": "",

View File

@@ -8,18 +8,24 @@ dotenv.config({ path: path.resolve(__dirname, '.env.local') })
const IS_CI = !!process.env.CI
export default defineConfig({
timeout: 60 * 1000,
timeout: 90 * 1000,
testDir: './features',
testMatch: /.*\.spec\.ts/,
forbidOnly: IS_CI,
retries: IS_CI ? 3 : 0,
maxFailures: 3,
fullyParallel: true,
use: {
baseURL: env.STUDIO_URL,
screenshot: 'off',
video: 'retain-on-failure',
headless: IS_CI,
headless: true || IS_CI,
trace: 'retain-on-failure',
permissions: ['clipboard-read', 'clipboard-write'],
extraHTTPHeaders: {
'x-vercel-protection-bypass': process.env.VERCEL_AUTOMATION_BYPASS_SELFHOSTED_STUDIO,
'x-vercel-set-bypass-cookie': 'true',
},
},
projects: [
{
@@ -34,7 +40,9 @@ export default defineConfig({
use: {
browserName: 'chromium',
screenshot: 'off',
storageState: STORAGE_STATE_PATH,
// Only use storage state if authentication is enabled. When AUTHENTICATION=false
// we should not require a pre-generated storage state file.
storageState: env.AUTHENTICATION ? STORAGE_STATE_PATH : undefined,
},
},
],

8
e2e/studio/supabase/.gitignore vendored Normal file
View File

@@ -0,0 +1,8 @@
# Supabase
.branches
.temp
# dotenvx
.env.keys
.env.local
.env.*.local

View File

@@ -0,0 +1,335 @@
# For detailed configuration reference documentation, visit:
# https://supabase.com/docs/guides/local-development/cli/config
# A string used to distinguish different Supabase projects on the same host. Defaults to the
# working directory name when running `supabase init`.
project_id = "studio"
[api]
enabled = true
# Port to use for the API URL.
port = 54321
# Schemas to expose in your API. Tables, views and stored procedures in this schema will get API
# endpoints. `public` and `graphql_public` schemas are included by default.
schemas = ["public", "graphql_public"]
# Extra schemas to add to the search_path of every request.
extra_search_path = ["public", "extensions"]
# The maximum number of rows returns from a view, table, or stored procedure. Limits payload size
# for accidental or malicious requests.
max_rows = 1000
[api.tls]
# Enable HTTPS endpoints locally using a self-signed certificate.
enabled = false
[db]
# Port to use for the local database URL.
port = 54322
# Port used by db diff command to initialize the shadow database.
shadow_port = 54320
# The database major version to use. This has to be the same as your remote database's. Run `SHOW
# server_version;` on the remote database to check.
major_version = 17
[db.pooler]
enabled = false
# Port to use for the local connection pooler.
port = 54329
# Specifies when a server connection can be reused by other clients.
# Configure one of the supported pooler modes: `transaction`, `session`.
pool_mode = "transaction"
# How many server connections to allow per user/database pair.
default_pool_size = 20
# Maximum number of client connections allowed.
max_client_conn = 100
# [db.vault]
# secret_key = "env(SECRET_VALUE)"
[db.migrations]
# If disabled, migrations will be skipped during a db push or reset.
enabled = true
# Specifies an ordered list of schema files that describe your database.
# Supports glob patterns relative to supabase directory: "./schemas/*.sql"
schema_paths = []
[db.seed]
# If enabled, seeds the database after migrations during a db reset.
enabled = true
# Specifies an ordered list of seed files to load during db reset.
# Supports glob patterns relative to supabase directory: "./seeds/*.sql"
sql_paths = ["./seed.sql"]
[db.network_restrictions]
# Enable management of network restrictions.
enabled = false
# List of IPv4 CIDR blocks allowed to connect to the database.
# Defaults to allow all IPv4 connections. Set empty array to block all IPs.
allowed_cidrs = ["0.0.0.0/0"]
# List of IPv6 CIDR blocks allowed to connect to the database.
# Defaults to allow all IPv6 connections. Set empty array to block all IPs.
allowed_cidrs_v6 = ["::/0"]
[realtime]
enabled = true
# Bind realtime via either IPv4 or IPv6. (default: IPv4)
# ip_version = "IPv6"
# The maximum length in bytes of HTTP request headers. (default: 4096)
# max_header_length = 4096
[studio]
enabled = true
# Port to use for Supabase Studio.
port = 54323
# External URL of the API server that frontend connects to.
api_url = "http://127.0.0.1"
# OpenAI API Key to use for Supabase AI in the Supabase Studio.
openai_api_key = "env(OPENAI_API_KEY)"
# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
# are monitored, and you can view the emails that would have been sent from the web interface.
[inbucket]
enabled = true
# Port to use for the email testing server web interface.
port = 54324
# Uncomment to expose additional ports for testing user applications that send emails.
# smtp_port = 54325
# pop3_port = 54326
# admin_email = "admin@email.com"
# sender_name = "Admin"
[storage]
enabled = true
# The maximum file size allowed (e.g. "5MB", "500KB").
file_size_limit = "50MiB"
# Image transformation API is available to Supabase Pro plan.
# [storage.image_transformation]
# enabled = true
# Uncomment to configure local storage buckets
# [storage.buckets.images]
# public = false
# file_size_limit = "50MiB"
# allowed_mime_types = ["image/png", "image/jpeg"]
# objects_path = "./images"
[auth]
enabled = true
# The base URL of your website. Used as an allow-list for redirects and for constructing URLs used
# in emails.
site_url = "http://127.0.0.1:3000"
# A list of *exact* URLs that auth providers are permitted to redirect to post authentication.
additional_redirect_urls = ["https://127.0.0.1:3000"]
# How long tokens are valid for, in seconds. Defaults to 3600 (1 hour), maximum 604,800 (1 week).
jwt_expiry = 3600
# Path to JWT signing key. DO NOT commit your signing keys file to git.
# signing_keys_path = "./signing_keys.json"
# If disabled, the refresh token will never expire.
enable_refresh_token_rotation = true
# Allows refresh tokens to be reused after expiry, up to the specified interval in seconds.
# Requires enable_refresh_token_rotation = true.
refresh_token_reuse_interval = 10
# Allow/disallow new user signups to your project.
enable_signup = true
# Allow/disallow anonymous sign-ins to your project.
enable_anonymous_sign_ins = false
# Allow/disallow testing manual linking of accounts
enable_manual_linking = false
# Passwords shorter than this value will be rejected as weak. Minimum 6, recommended 8 or more.
minimum_password_length = 6
# Passwords that do not meet the following requirements will be rejected as weak. Supported values
# are: `letters_digits`, `lower_upper_letters_digits`, `lower_upper_letters_digits_symbols`
password_requirements = ""
[auth.rate_limit]
# Number of emails that can be sent per hour. Requires auth.email.smtp to be enabled.
email_sent = 2
# Number of SMS messages that can be sent per hour. Requires auth.sms to be enabled.
sms_sent = 30
# Number of anonymous sign-ins that can be made per hour per IP address. Requires enable_anonymous_sign_ins = true.
anonymous_users = 30
# Number of sessions that can be refreshed in a 5 minute interval per IP address.
token_refresh = 150
# Number of sign up and sign-in requests that can be made in a 5 minute interval per IP address (excludes anonymous users).
sign_in_sign_ups = 30
# Number of OTP / Magic link verifications that can be made in a 5 minute interval per IP address.
token_verifications = 30
# Number of Web3 logins that can be made in a 5 minute interval per IP address.
web3 = 30
# Configure one of the supported captcha providers: `hcaptcha`, `turnstile`.
# [auth.captcha]
# enabled = true
# provider = "hcaptcha"
# secret = ""
[auth.email]
# Allow/disallow new user signups via email to your project.
enable_signup = true
# If enabled, a user will be required to confirm any email change on both the old, and new email
# addresses. If disabled, only the new email is required to confirm.
double_confirm_changes = true
# If enabled, users need to confirm their email address before signing in.
enable_confirmations = false
# If enabled, users will need to reauthenticate or have logged in recently to change their password.
secure_password_change = false
# Controls the minimum amount of time that must pass before sending another signup confirmation or password reset email.
max_frequency = "1s"
# Number of characters used in the email OTP.
otp_length = 6
# Number of seconds before the email OTP expires (defaults to 1 hour).
otp_expiry = 3600
# Use a production-ready SMTP server
# [auth.email.smtp]
# enabled = true
# host = "smtp.sendgrid.net"
# port = 587
# user = "apikey"
# pass = "env(SENDGRID_API_KEY)"
# admin_email = "admin@email.com"
# sender_name = "Admin"
# Uncomment to customize email template
# [auth.email.template.invite]
# subject = "You have been invited"
# content_path = "./supabase/templates/invite.html"
[auth.sms]
# Allow/disallow new user signups via SMS to your project.
enable_signup = false
# If enabled, users need to confirm their phone number before signing in.
enable_confirmations = false
# Template for sending OTP to users
template = "Your code is {{ .Code }}"
# Controls the minimum amount of time that must pass before sending another sms otp.
max_frequency = "5s"
# Use pre-defined map of phone number to OTP for testing.
# [auth.sms.test_otp]
# 4152127777 = "123456"
# Configure logged in session timeouts.
# [auth.sessions]
# Force log out after the specified duration.
# timebox = "24h"
# Force log out if the user has been inactive longer than the specified duration.
# inactivity_timeout = "8h"
# This hook runs before a new user is created and allows developers to reject the request based on the incoming user object.
# [auth.hook.before_user_created]
# enabled = true
# uri = "pg-functions://postgres/auth/before-user-created-hook"
# This hook runs before a token is issued and allows you to add additional claims based on the authentication method used.
# [auth.hook.custom_access_token]
# enabled = true
# uri = "pg-functions://<database>/<schema>/<hook_name>"
# Configure one of the supported SMS providers: `twilio`, `twilio_verify`, `messagebird`, `textlocal`, `vonage`.
[auth.sms.twilio]
enabled = false
account_sid = ""
message_service_sid = ""
# DO NOT commit your Twilio auth token to git. Use environment variable substitution instead:
auth_token = "env(SUPABASE_AUTH_SMS_TWILIO_AUTH_TOKEN)"
# Multi-factor-authentication is available to Supabase Pro plan.
[auth.mfa]
# Control how many MFA factors can be enrolled at once per user.
max_enrolled_factors = 10
# Control MFA via App Authenticator (TOTP)
[auth.mfa.totp]
enroll_enabled = false
verify_enabled = false
# Configure MFA via Phone Messaging
[auth.mfa.phone]
enroll_enabled = false
verify_enabled = false
otp_length = 6
template = "Your code is {{ .Code }}"
max_frequency = "5s"
# Configure MFA via WebAuthn
# [auth.mfa.web_authn]
# enroll_enabled = true
# verify_enabled = true
# Use an external OAuth provider. The full list of providers are: `apple`, `azure`, `bitbucket`,
# `discord`, `facebook`, `github`, `gitlab`, `google`, `keycloak`, `linkedin_oidc`, `notion`, `twitch`,
# `twitter`, `slack`, `spotify`, `workos`, `zoom`.
[auth.external.apple]
enabled = false
client_id = ""
# DO NOT commit your OAuth provider secret to git. Use environment variable substitution instead:
secret = "env(SUPABASE_AUTH_EXTERNAL_APPLE_SECRET)"
# Overrides the default auth redirectUrl.
redirect_uri = ""
# Overrides the default auth provider URL. Used to support self-hosted gitlab, single-tenant Azure,
# or any other third-party OIDC providers.
url = ""
# If enabled, the nonce check will be skipped. Required for local sign in with Google auth.
skip_nonce_check = false
# Allow Solana wallet holders to sign in to your project via the Sign in with Solana (SIWS, EIP-4361) standard.
# You can configure "web3" rate limit in the [auth.rate_limit] section and set up [auth.captcha] if self-hosting.
[auth.web3.solana]
enabled = false
# Use Firebase Auth as a third-party provider alongside Supabase Auth.
[auth.third_party.firebase]
enabled = false
# project_id = "my-firebase-project"
# Use Auth0 as a third-party provider alongside Supabase Auth.
[auth.third_party.auth0]
enabled = false
# tenant = "my-auth0-tenant"
# tenant_region = "us"
# Use AWS Cognito (Amplify) as a third-party provider alongside Supabase Auth.
[auth.third_party.aws_cognito]
enabled = false
# user_pool_id = "my-user-pool-id"
# user_pool_region = "us-east-1"
# Use Clerk as a third-party provider alongside Supabase Auth.
[auth.third_party.clerk]
enabled = false
# Obtain from https://clerk.com/setup/supabase
# domain = "example.clerk.accounts.dev"
[edge_runtime]
enabled = true
# Supported request policies: `oneshot`, `per_worker`.
# `per_worker` (default) — enables hot reload during local development.
# `oneshot` — fallback mode if hot reload causes issues (e.g. in large repos or with symlinks).
policy = "per_worker"
# Port to attach the Chrome inspector for debugging edge functions.
inspector_port = 8083
# The Deno major version to use.
deno_version = 2
# [edge_runtime.secrets]
# secret_key = "env(SECRET_VALUE)"
[analytics]
enabled = true
port = 54327
# Configure one of the supported backends: `postgres`, `bigquery`.
backend = "postgres"
# Experimental features may be deprecated any time
[experimental]
# Configures Postgres storage engine to use OrioleDB (S3)
orioledb_version = ""
# Configures S3 bucket URL, eg. <bucket_name>.s3-<region>.amazonaws.com
s3_host = "env(S3_HOST)"
# Configures S3 bucket region, eg. us-east-1
s3_region = "env(S3_REGION)"
# Configures AWS_ACCESS_KEY_ID for S3 bucket
s3_access_key = "env(S3_ACCESS_KEY)"
# Configures AWS_SECRET_ACCESS_KEY for S3 bucket
s3_secret_key = "env(S3_SECRET_KEY)"

View File

@@ -16,6 +16,6 @@ export interface TestOptions {
export const test = base.extend<TestOptions>({
env: env.STUDIO_URL,
ref: env.PROJECT_REF,
ref: 'default',
apiUrl: env.API_URL,
})

View File

@@ -17,22 +17,75 @@ export async function waitForApiResponse(
action: string,
options?: Options
): Promise<void> {
// regex trims "/" both start and end.
return createApiResponseWaiter(page, basePath, ref, action, options)
}
function buildUrlMatcher(basePath: string, ref: string, action: string, method?: HttpMethod) {
// Normalize inputs and build a tolerant matcher that works across environments
const trimmedBasePath = basePath.replace(/^\/+|\/+$/g, '')
const httpMethod = options?.method
const refAlternatives = [ref, 'default']
await page.waitForResponse((response) => {
const urlMatches =
response.url().includes(`${trimmedBasePath}/${ref}/${action}`) ||
response.url().includes(`${trimmedBasePath}/default/${action}`)
return (response: any) => {
const url = response.url()
const requestMethod = response.request().method()
// checks HTTP method if exists
return httpMethod ? urlMatches && response.request().method() === httpMethod : urlMatches
})
// Must include base path and one of the ref alternatives
const hasBasePath = url.includes(`${trimmedBasePath}/`)
const hasRef = refAlternatives.some((r) => url.includes(`/${r}/`))
// Action match should be tolerant to extra query params ordering
const hasAction = url.includes(action)
const urlMatches = hasBasePath && hasRef && hasAction
if (method) return urlMatches && requestMethod === method
return urlMatches
}
}
/**
* Starts listening for a specific API response and returns a promise you can await later.
* Use this to avoid races by creating the waiter BEFORE triggering navigation/clicks.
*
* Example:
* const wait = createApiResponseWaiter(page, 'pg-meta', ref, 'query?key=schemas')
* await page.goto(...)
* await wait
*/
export function createApiResponseWaiter(
page: Page,
basePath: string,
ref: string,
action: string,
options?: Options
): Promise<void> {
const matcher = buildUrlMatcher(basePath, ref, action, options?.method)
return page
.waitForResponse(matcher, { timeout: options?.timeout })
.then(() => {})
.catch((error) => {
const trimmedBasePath = basePath.replace(/^\/+|\/+$/g, '')
const message = `Error waiting for response: ${error}. Method: ${options?.method}, URL contains: ${trimmedBasePath}/(default|${ref})/${action}`
if (options?.soft) {
console.warn(`[soft-wait] ${message}`)
const fallback = options?.fallbackWaitMs ?? 0
if (fallback > 0) {
return page.waitForTimeout(fallback).then(() => {})
}
return
} else {
console.error(message)
throw error
}
})
}
type Options = {
method?: HttpMethod
timeout?: number
// When true, do not throw on timeout/error; optionally wait fallbackWaitMs and continue
soft?: boolean
fallbackWaitMs?: number
}
export async function waitForTableToLoad(page: Page, ref: string, schema?: string) {