Merge branch 'master' into chore/formatter

This commit is contained in:
Qiao Han
2023-03-01 12:35:32 +08:00
267 changed files with 13611 additions and 2414 deletions

View File

@@ -25,4 +25,4 @@ jobs:
with:
version: 1.0.0
- run: supabase functions deploy og-images --project-ref $PROJECT_ID
- run: supabase functions deploy og-images --project-ref $PROJECT_ID --no-verify-jwt

View File

@@ -41,7 +41,7 @@ To see how to Contribute, visit [Getting Started](./DEVELOPERS.md)
- [x] Alpha: We are testing Supabase with a closed set of customers
- [x] Public Alpha: Anyone can sign up over at [app.supabase.com](https://app.supabase.com). But go easy on us, there are a few kinks
- [x] Public Beta: Stable enough for most non-enterprise use-cases
- [ ] Public: Production-ready
- [ ] Public: General Availability [[status](https://supabase.com/docs/guides/getting-started/features#feature-status)]
We are currently in Public Beta. Watch "releases" of this repo to get notified of major updates.

View File

@@ -94,8 +94,15 @@ export default function Extensions() {
filters.length === 0 ? x : x.tags.some((item) => filters.includes(item))
)
.map((extension) => (
<Link passHref href={`/guides/database/extensions/${extension.name}`}>
<a target="_blank" className="no-underline">
<Link
passHref
href={`${
extension.link
? `/guides/database/extensions/${extension.name}`
: '/guides/database/extensions#full-list-of-extensions'
}`}
>
<a target={`${extension.link ? '_blank' : '_self'}`} className="no-underline">
<GlassPanel title={extension.name} background={false} key={extension.name}>
<p className="mt-4">
{extension.comment.charAt(0).toUpperCase() + extension.comment.slice(1)}

View File

@@ -553,8 +553,10 @@ export const functions = {
url: '/guides/functions/examples/connect-to-postgres',
items: [],
},
{ name: 'Discord Bot', url: '/guides/functions/examples/discord-bot', items: [] },
{ name: 'GitHub Actions', url: '/guides/functions/examples/github-actions', items: [] },
{ name: 'OG Image', url: '/guides/functions/examples/og-image', items: [] },
{ name: 'OpenAI', url: '/guides/functions/examples/openai', items: [] },
{ name: 'Storage Caching', url: '/guides/functions/examples/storage-caching', items: [] },
{ name: 'Stripe Webhooks', url: '/guides/functions/examples/stripe-webhooks', items: [] },
{ name: 'Telegram Bot', url: '/guides/functions/examples/telegram-bot', items: [] },
@@ -635,7 +637,7 @@ export const platform = {
url: undefined,
items: [
{ name: 'Access Control', url: '/guides/platform/access-control', items: [] },
{ name: 'Database Usage', url: '/guides/platform/database-usage', items: [] },
{ name: 'Database Size', url: '/guides/platform/database-size', items: [] },
{ name: 'HTTP Status Codes', url: '/guides/platform/http-status-codes', items: [] },
{ name: 'Logging', url: '/guides/platform/logs', items: [] },
{ name: 'Metrics', url: '/guides/platform/metrics', items: [] },
@@ -821,6 +823,7 @@ export const integrations = {
url: undefined,
items: [
{ name: 'Estuary', url: '/guides/integrations/estuary', items: [] },
{ name: 'OpenAI', url: '/guides/functions/examples/openai', items: [] },
{ name: 'pgMustard', url: '/guides/integrations/pgmustard', items: [] },
{ name: 'Prisma', url: '/guides/integrations/prisma', items: [] },
{ name: 'Sequin', url: '/guides/integrations/sequin', items: [] },

View File

@@ -38,7 +38,7 @@ const NavigationMenuGuideList: React.FC<Props> = ({ id, active }) => {
// disabled
// level !== 'home' && level !== id ? '-ml-8' : '',
!active ? 'opacity-0 invisible absolute' : '',
!active ? 'opacity-0 invisible absolute h-0 overflow-hidden' : '',
].join(' ')}
>
<NavigationMenuGuideListItems id={id} />

View File

@@ -1,34 +1,68 @@
import { MDXProvider } from '@mdx-js/react'
import { NextSeo } from 'next-seo'
import Head from 'next/head'
import { useRouter } from 'next/router'
import { FC } from 'react'
import components from '~/components'
import TableOfContents from '~/components/TableOfContents'
interface Props {
meta: { title: string; description?: string; hide_table_of_contents?: boolean; video?: string }
meta: {
title: string
description?: string
hide_table_of_contents?: boolean
video?: string
canonical?: string
}
children: any
toc?: any
menuItems: any
}
const Layout: FC<Props> = (props: Props) => {
const { asPath, basePath } = useRouter()
const hasTableOfContents =
props.toc !== undefined &&
props.toc.json.filter((item) => item.lvl !== 1 && item.lvl <= 3).length > 0
console.log('asPath', asPath)
return (
<>
<Head>
<title>{props.meta?.title} | Supabase Docs</title>
<title>
{asPath === '/' ? 'Supabase Docs' : `${props.meta?.title} | Supabase Docadsdsds`}
</title>
<meta name="description" content={props.meta?.description} />
<meta content="width=device-width, initial-scale=1" name="viewport" />
<link rel="icon" href="/docs/favicon.ico" />
<meta property="og:type" content="website" />
<meta property="og:site_name" content={props.meta?.title} />
<meta property="og:description" content={props.meta?.description} />
<meta property="og:title" content={props.meta?.title} />
<meta property="og:image" content={`https://supabase.com/docs/img/supabase-og-image.png`} />
<meta
name="twitter:image"
content={`https://supabase.com/docs/img/supabase-og-image.png`}
/>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</Head>
<NextSeo
canonical={props.meta?.canonical ?? `https://supabase.com/docs${asPath}`}
openGraph={{
url: `https://supabase.com/docs${asPath}`,
type: 'article',
videos: props.meta?.video && [
{
// youtube based video meta
url: props.meta?.video,
width: 640,
height: 385,
type: 'application/x-shockwave-flash',
},
],
article: {
publishedTime: new Date().toISOString(),
modifiedTime: new Date().toISOString(),
authors: ['Supabase'],
},
}}
/>
<div className={['relative transition-all ease-out', 'duration-150 '].join(' ')}>
{/* <p className="text-brand-900 tracking-wider">Tutorials</p> */}
<article className="prose dark:prose-dar max-w-none">

View File

@@ -1,5 +1,6 @@
import { MDXProvider } from '@mdx-js/react'
import { NextSeo } from 'next-seo'
import Head from 'next/head'
import Link from 'next/link'
import { useRouter } from 'next/router'
import { FC, useEffect, useRef, useState } from 'react'
@@ -63,17 +64,27 @@ const Layout: FC<Props> = (props) => {
const hasTableOfContents = tocList.length > 0
// page type, ie, Auth, Database, Storage etc
const ogPageType = asPath.split('/')[2]
// open graph image url constructor
const ogImageUrl = `https://obuldanrptloktxcffvn.functions.supabase.co/og-images?site=docs${
ogPageType ? `&type=${ogPageType}` : ''
}&title=${encodeURIComponent(props.meta?.title)}&description=${encodeURIComponent(
props.meta?.description
)}`
return (
<>
<Head>
<title>{props.meta?.title} | Supabase Docs</title>
<meta name="description" content={props.meta?.description} />
<meta property="og:image" content={ogImageUrl} />
<meta name="twitter:image" content={ogImageUrl} />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</Head>
<NextSeo
title={`${props.meta?.title} | Supabase Docs`}
description={props.meta?.description ? props.meta?.description : props.meta?.title}
canonical={props.meta?.canonical ?? `https://supabase.com/docs${asPath}`}
openGraph={{
title: props.meta?.title,
description: props.meta?.description,
url: `https://supabase.com/docs${asPath}`,
type: 'article',
videos: props.meta?.video && [
@@ -90,15 +101,6 @@ const Layout: FC<Props> = (props) => {
modifiedTime: new Date().toISOString(),
authors: ['Supabase'],
},
images: [
{
url: `https://obuldanrptloktxcffvn.functions.supabase.co/og-images?site=docs${
ogPageType ? `&type=${ogPageType}` : ''
}&title=${encodeURIComponent(props.meta?.title)}&description=${encodeURIComponent(
props.meta?.description
)}`,
},
],
}}
/>
<div className={['grid grid-cols-12 relative gap-4'].join(' ')}>

View File

@@ -50,7 +50,7 @@ const Layout: FC<Props> = (props: Props) => {
<Head>
<title>{props.meta?.title} | Supabase</title>
<meta name="description" content={props.meta?.description} />
<meta content="width=device-width, initial-scale=1" name="viewport" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" href="/docs/favicon.ico" />
<meta property="og:type" content="website" />
<meta property="og:site_name" content={props.meta?.title} />

View File

@@ -2,6 +2,7 @@ import { createBrowserSupabaseClient } from '@supabase/auth-helpers-nextjs'
import { SessionContextProvider } from '@supabase/auth-helpers-react'
import { ThemeProvider } from 'common/Providers'
import { DefaultSeo } from 'next-seo'
import Head from 'next/head'
import { useRouter } from 'next/router'
import { useEffect, useState } from 'react'
import { AppPropsWithLayout } from 'types'
@@ -9,8 +10,8 @@ import ClippyProvider from '~/components/Clippy/ClippyProvider'
import { SearchProvider } from '~/components/DocSearch'
import Favicons from '~/components/Favicons'
import SiteLayout from '~/layouts/SiteLayout'
import { post } from '~/lib/fetchWrappers'
import { IS_PLATFORM } from '~/lib/constants'
import { post } from '~/lib/fetchWrappers'
import '../styles/algolia-search.scss'
import '../styles/ch.scss'
import '../styles/docsearch.scss'
@@ -59,34 +60,10 @@ function MyApp({ Component, pageProps }: AppPropsWithLayout) {
}, [router.events])
const SITE_TITLE = 'Supabase Documentation'
const SITE_DESCRIPTION = 'The open source Firebase alternative.'
const { basePath } = useRouter()
return (
<>
<Favicons />
<DefaultSeo
title={SITE_TITLE}
description={SITE_DESCRIPTION}
openGraph={{
type: 'website',
url: 'https://supabase.com/docs',
site_name: SITE_TITLE,
images: [
{
url: `https://supabase.com${basePath}/img/supabase-og-image.png`,
width: 800,
height: 600,
alt: 'Supabase Og Image',
},
],
}}
twitter={{
handle: '@supabase',
site: '@supabase',
cardType: 'summary_large_image',
}}
/>
{IS_PLATFORM ? (
<SessionContextProvider supabaseClient={supabase}>
<ThemeProvider>

View File

@@ -3,7 +3,9 @@ import { Html, Head, Main, NextScript } from 'next/document'
export default function Document() {
return (
<Html lang="en" className="dark">
<Head></Head>
<Head>
<meta name="viewport" content="width=device-width, initial-scale=1.0"></meta>
</Head>
<body>
<Main />
<NextScript />

View File

@@ -137,7 +137,8 @@ In order to get the most out of TypeScript and it´s intellisense, you should im
// and what to do when importing types
declare namespace App {
interface Supabase {
Database: import('./DatabaseDefinitions').Database
// Use the path to where you generated the types using the Supbase CLI.
Database: import('../types/supabase').Database;
SchemaName: 'public'
}

View File

@@ -38,6 +38,12 @@ Primary keys are **guaranteed not to change**. Columns, indices, constraints or
You may delete users directly or via the management console at Authentication > Users. Note that deleting a user from the `auth.users` table does not automatically sign out a user. As Supabase makes use of JSON Web Tokens (JWT), a user's JWT will remain "valid" until it has expired. Should you wish to immediately revoke access for a user, do considering making use of a Row Level Security policy as described below.
<Admonition type="caution">
You cannot delete a user if they are the owner of any objects in Supabase Storage.
You will encounter an error when you try to delete an Auth user that owns any Storage objects. If this happens, try deleting all the objects for that user, or reassign ownership to another user.
</Admonition>
## Public access
Since Row Level Security is enabled, this table is accessible via the API but no data will be returned unless we set up some Policies.

View File

@@ -329,6 +329,129 @@ Tip: Make sure to enable RLS for all your tables, so that your tables are inacce
Supabase provides special "Service" keys, which can be used to bypass all RLS.
These should never be used in the browser or exposed to customers, but they are useful for administrative tasks.
### Testing policies
To test policies on the database itself (i.e., from the [SQL Editor](https://app.supabase.com/project/_/sql) or from `psql`) without switching to your frontend and logging in as different users, you can utilize the following helper SQL procedures ([credits](https://github.com/supabase/supabase/issues/7311#issuecomment-1398648114)):
```sql
grant anon, authenticated to postgres;
create or replace procedure auth.login_as_user (user_email text)
language plpgsql
as $$
declare
auth_user auth.users;
begin
select
* into auth_user
from
auth.users
where
email = user_email;
execute format('set request.jwt.claim.sub=%L', (auth_user).id::text);
execute format('set request.jwt.claim.role=%I', (auth_user).role);
execute format('set request.jwt.claim.email=%L', (auth_user).email);
execute format('set request.jwt.claims=%L', json_strip_nulls(json_build_object('app_metadata', (auth_user).raw_app_meta_data))::text);
raise notice '%', format( 'set role %I; -- logging in as %L (%L)', (auth_user).role, (auth_user).id, (auth_user).email);
execute format('set role %I', (auth_user).role);
end;
$$;
create or replace procedure auth.login_as_anon ()
language plpgsql
as $$
begin
set request.jwt.claim.sub='';
set request.jwt.claim.role='';
set request.jwt.claim.email='';
set request.jwt.claims='';
set role anon;
end;
$$;
create or replace procedure auth.logout ()
language plpgsql
as $$
begin
set request.jwt.claim.sub='';
set request.jwt.claim.role='';
set request.jwt.claim.email='';
set request.jwt.claims='';
set role postgres;
end;
$$;
```
To switch to a given user (by email), use `call auth.login_as_user('my@email.com');`. You can also switch to the `anon` role using `call auth.login_as_anon();`. When you are done, use `call auth.logout();` to return yourself to the `postgres` role.
These procedures can also be used for writing [pgTAP](/docs/guides/database/extensions/pgtap) unit tests for policies.
<details>
<summary>Click here to see an example `psql` interaction using this.</summary>
This example shows that the `public.profiles` table from the tutorial example can indeed be updated by the `postgres` role and the owner of the row but not from `anon` connections:
```shell
postgres=> select id, email from auth.users;
id | email
--------------------------------------+-------------------
d4f0aa86-e6f6-41d1-bd32-391f077cf1b9 | user1@example.com
15d6811a-16ee-4fa2-9b18-b63085688be4 | user2@example.com
4e1010bb-eb37-4a4d-a05a-b0ee315c9d56 | user3@example.com
(3 rows)
postgres=> table public.profiles;
id | updated_at | username | full_name | avatar_url | website
--------------------------------------+------------+----------+-----------+------------+---------
d4f0aa86-e6f6-41d1-bd32-391f077cf1b9 | | user1 | User 1 | |
15d6811a-16ee-4fa2-9b18-b63085688be4 | | user2 | User 2 | |
4e1010bb-eb37-4a4d-a05a-b0ee315c9d56 | | user3 | User 3 | |
(3 rows)
postgres=> call auth.login_as_anon();
CALL
postgres=> update public.profiles set updated_at=now();
UPDATE 0 -- anon users cannot update any profile but see all of them
postgres=> table public.profiles;
id | updated_at | username | full_name | avatar_url | website
--------------------------------------+------------+----------+-----------+------------+---------
d4f0aa86-e6f6-41d1-bd32-391f077cf1b9 | | user1 | User 1 | |
15d6811a-16ee-4fa2-9b18-b63085688be4 | | user2 | User 2 | |
4e1010bb-eb37-4a4d-a05a-b0ee315c9d56 | | user3 | User 3 | |
(3 rows)
postgres=> call auth.logout();
CALL
postgres=> call auth.login_as_user('user1@example.com');
NOTICE: set role authenticated; -- logging in as 'd4f0aa86-e6f6-41d1-bd32-391f077cf1b9' ('user1@example.com')
CALL
postgres=> update public.profiles set updated_at=now();
UPDATE 1 -- authenticated users can update their own profile and see all of them
postgres=> table public.profiles;
id | updated_at | username | full_name | avatar_url | website
--------------------------------------+-------------------------------+----------+-----------+------------+---------
15d6811a-16ee-4fa2-9b18-b63085688be4 | | user1 | User 1 | |
4e1010bb-eb37-4a4d-a05a-b0ee315c9d56 | | user2 | User 2 | |
d4f0aa86-e6f6-41d1-bd32-391f077cf1b9 | 2023-02-18 21:39:16.204612+00 | user3 | User 3 | |
(3 rows)
postgres=> call auth.logout();
CALL
postgres=> update public.profiles set updated_at=now();
UPDATE 3 -- the 'postgres' role can update and see all profiles
postgres=> table public.profiles;
id | updated_at | username | full_name | avatar_url | website
--------------------------------------+-------------------------------+----------+-----------+------------+---------
15d6811a-16ee-4fa2-9b18-b63085688be4 | 2023-02-18 21:40:08.216324+00 | user1 | User 1 | |
4e1010bb-eb37-4a4d-a05a-b0ee315c9d56 | 2023-02-18 21:40:08.216324+00 | user2 | User 2 | |
d4f0aa86-e6f6-41d1-bd32-391f077cf1b9 | 2023-02-18 21:40:08.216324+00 | user3 | User 3 | |
(3 rows)
```
</details>
## Deprecated features
We have deprecate some functions to ensure better performance and extensibilty of RLS policies.

View File

@@ -160,6 +160,12 @@ use this potentially stale information to render a page.
## Frequently Asked Questions
### No session on the server side with Next.js route prefetching?
When you use route prefetching in Next.js using `<Link href="/...">` components or the `Router.push()` APIs can send server-side requests before the browser processes the access and refresh tokens. This means that those requests may not have any cookies set and your server code will render unauthenticated content.
To improve experience for your users, we recommend redirecting users to one specific page after sign-in that does not include any route prefetching from Next.js. Once the Supabase client library running in the browser has obtained the access and refresh tokens from the URL fragment, you can send users to any pages that use prefetching.
### How do I make the cookies `HttpOnly`?
This is not necessary. Both the access token and refresh token are designed to

View File

@@ -43,12 +43,12 @@ export const examples = [
{
name: 'With supabase-js',
description: 'Use the Supabase client inside your Edge Function.',
href: 'https://github.com/supabase/supabase/blob/master/examples/edge-functions/supabase/functions/select-from-table-with-auth-rls/index.ts',
href: '/guides/functions/auth',
},
{
name: 'With CORS headers',
description: 'Send CORS headers for invoking from the browser.',
href: 'https://github.com/supabase/supabase/blob/master/examples/edge-functions/supabase/functions/browser-with-cors/index.ts',
href: '/guides/functions/cors',
},
{
name: 'React Native with Stripe',
@@ -86,6 +86,71 @@ export const examples = [
description: `Get user location data from user's IP address.`,
href: 'https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/location',
},
{
name: 'Cloudflare Turnstile',
description: `Protecting Forms with Cloudflare Turnstile.`,
href: '/guides/functions/examples/cloudflare-turnstile',
},
{
name: 'Connect to Postgres',
description: `Connecting to Postgres from Edge Functions.`,
href: '/guides/functions/examples/connect-to-postgres',
},
{
name: 'Github Actions',
description: `Deploying Edge Functions with GitHub Actions.`,
href: '/guides/functions/examples/github-actions',
},
{
name: 'Oak Server Middleware',
description: `Request Routing with Oak server middleware.`,
href: 'https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/oak-server',
},
{
name: 'OpenAI',
description: `Using OpenAI in Edge Functions.`,
href: '/guides/functions/examples/openai',
},
{
name: 'Stripe Webhooks',
description: `Handling signed Stripe Webhooks with Edge Functions.`,
href: '/guides/functions/examples/stripe-webhooks',
},
{
name: 'Send emails',
description: `Send emails in Edge Functions.`,
href: 'https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/send-email-smtp',
},
{
name: 'Web Stream',
description: `Server-Sent Events in Edge Functions.`,
href: 'https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/streams',
},
{
name: 'Puppeteer',
description: `Generate screenshots with Puppeteer.`,
href: 'https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/puppeteer',
},
{
name: 'Discord Bot',
description: `Building a Slash Command Discord Bot with Edge Functions.`,
href: '/guides/functions/examples/discord-bot',
},
{
name: 'Telegram Bot',
description: `Building a Telegram Bot with Edge Functions.`,
href: '/guides/functions/examples/telegram-bot',
},
{
name: 'Upload File',
description: `Process multipart/form-data.`,
href: 'https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/file-upload-storage',
},
{
name: 'Upstash Redis',
description: `Build an Edge Functions Counter with Upstash Redis.`,
href: '/guides/functions/examples/upstash-redis',
},
]
export const Page = ({ children }) => <Layout meta={meta} children={children} hideToc={true} />

View File

@@ -20,7 +20,7 @@ By creating a supabase client with the auth context from the function, you can d
2. Run queries in the context of the user with [Row Level Security (RLS)](/docs/guides/auth/row-level-security) policies enforced.
```js lines=14,17-19,22-23 title=supabase/functions/select-from-table-with-auth-rls/index.ts
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
serve(async (req: Request) => {

View File

@@ -24,7 +24,7 @@ export const corsHeaders = {
You can then import and use the CORS headers within your functions:
```ts index.ts
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
import { corsHeaders } from '../_shared/cors.ts'
console.log(`Function "browser-with-cors" up and running!`)

View File

@@ -16,7 +16,7 @@ export const meta = {
></iframe>
</div>
[Clouflare Turnstile](https://www.cloudflare.com/products/turnstile/) is a friendly, free CAPTCHA replacement, and it works seamlessly with Supabase Edge Functions to protect your forms. [View on GitHub](https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/cloudflare-turnstile).
[Cloudflare Turnstile](https://www.cloudflare.com/products/turnstile/) is a friendly, free CAPTCHA replacement, and it works seamlessly with Supabase Edge Functions to protect your forms. [View on GitHub](https://github.com/supabase/supabase/tree/master/examples/edge-functions/supabase/functions/cloudflare-turnstile).
## Setup
@@ -34,7 +34,7 @@ supabase functions new cloudflare-turnstile
And add the code to the `index.ts` file:
```ts index.ts
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
import { corsHeaders } from '../_shared/cors.ts'
console.log('Hello from Cloudflare Trunstile!')

View File

@@ -4,6 +4,7 @@ export const meta = {
id: 'examples-postgres-on-the-edge',
title: 'Connect to Postgres',
description: 'Connecting to Postgres from Edge Functions.',
video: 'https://www.youtube.com/v/cl7EuF1-RsY',
}
<div class="video-container">
@@ -19,7 +20,7 @@ Supabase Edge Functions allow you to go beyond HTTP and can connect to your Post
```ts index.ts
import * as postgres from 'https://deno.land/x/postgres@v0.14.2/mod.ts'
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts'
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
// Get the connection string from the environment variable "DATABASE_URL"
const databaseUrl = Deno.env.get('DATABASE_URL')!

View File

@@ -0,0 +1,178 @@
import Layout from '~/layouts/DefaultGuideLayout'
export const meta = {
id: 'examples-discord-bot',
title: 'Discord Bot',
description: 'Building a Slash Command Discord Bot with Edge Functions.',
video: 'https://www.youtube.com/v/J24Bvo_m7DM',
}
<div class="video-container">
<iframe
src="https://www.youtube-nocookie.com/embed/J24Bvo_m7DM"
frameBorder="1"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
</div>
## Create an application on Discord Developer Portal
1. Go to [https://discord.com/developers/applications](https://discord.com/developers/applications) (login using your discord account if required).
2. Click on **New Application** button available at left side of your profile picture.
3. Name your application and click on **Create**.
4. Go to **Bot** section, click on **Add Bot**, and finally on **Yes, do it!** to confirm.
That's it. A new application is created which will hold our Slash Command. Don't close the tab as we need information from this application page throughout our development.
Before we can write some code, we need to curl a discord endpoint to register a Slash Command in our app.
Fill `DISCORD_BOT_TOKEN` with the token available in the **Bot** section and `CLIENT_ID` with the ID available on the **General Information** section of the page and run the command on your terminal.
```bash
BOT_TOKEN='replace_me_with_bot_token'
CLIENT_ID='replace_me_with_client_id'
curl -X POST \
-H 'Content-Type: application/json' \
-H "Authorization: Bot $BOT_TOKEN" \
-d '{"name":"hello","description":"Greet a person","options":[{"name":"name","description":"The name of the person","type":3,"required":true}]}' \
"https://discord.com/api/v8/applications/$CLIENT_ID/commands"
```
This will register a Slash Command named `hello` that accepts a parameter named `name` of type string.
## Code
```ts index.ts
// Sift is a small routing library that abstracts away details like starting a
// listener on a port, and provides a simple function (serve) that has an API
// to invoke a function for a specific path.
import { json, serve, validateRequest } from 'sift'
// TweetNaCl is a cryptography library that we use to verify requests
// from Discord.
import nacl from 'nacl'
enum DiscordCommandType {
Ping = 1,
ApplicationCommand = 2,
}
// For all requests to "/" endpoint, we want to invoke home() handler.
serve({
'/discord-bot': home,
})
// The main logic of the Discord Slash Command is defined in this function.
async function home(request: Request) {
// validateRequest() ensures that a request is of POST method and
// has the following headers.
const { error } = await validateRequest(request, {
POST: {
headers: ['X-Signature-Ed25519', 'X-Signature-Timestamp'],
},
})
if (error) {
return json({ error: error.message }, { status: error.status })
}
// verifySignature() verifies if the request is coming from Discord.
// When the request's signature is not valid, we return a 401 and this is
// important as Discord sends invalid requests to test our verification.
const { valid, body } = await verifySignature(request)
if (!valid) {
return json(
{ error: 'Invalid request' },
{
status: 401,
}
)
}
const { type = 0, data = { options: [] } } = JSON.parse(body)
// Discord performs Ping interactions to test our application.
// Type 1 in a request implies a Ping interaction.
if (type === DiscordCommandType.Ping) {
return json({
type: 1, // Type 1 in a response is a Pong interaction response type.
})
}
// Type 2 in a request is an ApplicationCommand interaction.
// It implies that a user has issued a command.
if (type === DiscordCommandType.ApplicationCommand) {
const { value } = data.options.find(
(option: { name: string; value: string }) => option.name === 'name'
)
return json({
// Type 4 responds with the below message retaining the user's
// input at the top.
type: 4,
data: {
content: `Hello, ${value}!`,
},
})
}
// We will return a bad request error as a valid Discord request
// shouldn't reach here.
return json({ error: 'bad request' }, { status: 400 })
}
/** Verify whether the request is coming from Discord. */
async function verifySignature(request: Request): Promise<{ valid: boolean; body: string }> {
const PUBLIC_KEY = Deno.env.get('DISCORD_PUBLIC_KEY')!
// Discord sends these headers with every request.
const signature = request.headers.get('X-Signature-Ed25519')!
const timestamp = request.headers.get('X-Signature-Timestamp')!
const body = await request.text()
const valid = nacl.sign.detached.verify(
new TextEncoder().encode(timestamp + body),
hexToUint8Array(signature),
hexToUint8Array(PUBLIC_KEY)
)
return { valid, body }
}
/** Converts a hexadecimal string to Uint8Array. */
function hexToUint8Array(hex: string) {
return new Uint8Array(hex.match(/.{1,2}/g)!.map((val) => parseInt(val, 16)))
}
```
## Deploy the Slash Command Handler
```bash
supabase functions deploy discord-bot --no-verify-jwt
supabase secrets set DISCORD_PUBLIC_KEY=your_public_key
```
Navigate to your Function details in the Supabase Dashboard to get your Endpoint URL.
### Configure Discord application to use our URL as interactions endpoint URL
1. Go back to your application (Greeter) page on Discord Developer Portal
2. Fill **INTERACTIONS ENDPOINT URL** field with the URL and click on **Save Changes**.
The application is now ready. Let's proceed to the next section to install it.
## Install the Slash Command on your Discord server
So to use the `hello` Slash Command, we need to install our Greeter application on our Discord server. Here are the steps:
1. Go to **OAuth2** section of the Discord application page on Discord Developer Portal
2. Select `applications.commands` scope and click on the **Copy** button below.
3. Now paste and visit the URL on your browser. Select your server and click on **Authorize**.
Open Discord, type `/Promise` and press **Enter**.
## Run locally
```bash
supabase functions serve discord-bot --no-verify-jwt --env-file ./supabase/.env.local
ngrok http 54321
```
export const Page = ({ children }) => <Layout meta={meta} children={children} />
export default Page

View File

@@ -50,7 +50,7 @@ export default function handler(req: Request) {
Create an `index.ts` file to execute the handler on incoming requests:
```ts index.ts
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
import handler from './handler.tsx'
console.log('Hello from og-image Function!')

View File

@@ -0,0 +1,84 @@
import Layout from '~/layouts/DefaultGuideLayout'
export const meta = {
id: 'examples-openai',
title: 'OpenAI',
description: 'Using OpenAI in Edge Functions.',
video: 'https://www.youtube.com/v/29p8kIqyU_Y',
}
<div class="video-container">
<iframe
src="https://www.youtube-nocookie.com/embed/29p8kIqyU_Y"
frameBorder="1"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
</div>
Use the [OpenAI completions API](https://platform.openai.com/docs/api-reference/completions) in Supabase Edge Functions.
```ts index.ts
import 'xhr_polyfill'
import { serve } from 'std/server'
import { CreateCompletionRequest } from 'openai'
serve(async (req) => {
const { query } = await req.json()
const completionConfig: CreateCompletionRequest = {
model: 'text-davinci-003',
prompt: query,
max_tokens: 256,
temperature: 0,
stream: true,
}
return fetch('https://api.openai.com/v1/completions', {
method: 'POST',
headers: {
Authorization: `Bearer ${Deno.env.get('OPENAI_API_KEY')}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(completionConfig),
})
})
```
## Run locally
```bash
supabase functions serve --env-file ./supabase/.env.local --no-verify-jwt
```
Use cURL or Postman to make a POST request to http://localhost:54321/functions/v1/openai.
```bash
curl -i --location --request POST http://localhost:54321/functions/v1/openai \
--header 'Content-Type: application/json' \
--data '{"query":"What is Supabase?"}'
```
## Deploy
```bash
supabase functions deploy --no-verify-jwt openai
supabase secrets set --env-file ./supabase/.env.local
```
## Go deeper
If you're interesting in learning how to use this to build your own ChatGPT, read [the blog post](/blog/chatgpt-supabase-docs) and check out the video:
<div class="video-container">
<iframe
src="https://www.youtube-nocookie.com/embed/Yhtjd7yGGGA"
frameBorder="1"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
</div>
export const Page = ({ children }) => <Layout meta={meta} children={children} />
export default Page

View File

@@ -73,7 +73,7 @@ serve(async (_req) => {
```bash
supabase start
supabase functions serve upstash-redis-counter --no-verify-jwt --env-file supabase/functions/upstash-redis-counter/.env
supabase functions serve --no-verify-jwt --env-file supabase/functions/upstash-redis-counter/.env
```
Navigate to http://localhost:54321/functions/v1/upstash-redis-counter.

View File

@@ -10,7 +10,7 @@ You can run your Edge Function locally using [`supabase functions serve`](/docs/
```bash
supabase start # start the supabase stack
supabase functions serve hello-world # start the Function watcher
supabase functions serve # start the Functions watcher
```
The `functions serve` command has hot-reloading capabilities. It will watch for any changes to your files and restart the Deno server.
@@ -20,7 +20,7 @@ The `functions serve` command has hot-reloading capabilities. It will watch for
While serving your local Edge Function, you can invoke it using curl:
```bash
curl --request POST 'http://localhost:54321/functions/v1/hello-world' \
curl --request POST 'http://localhost:54321/functions/v1/function-name' \
--header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24ifQ.625_WdcF3KHqz5amU0x2X5WWHP-OEs_4qj0ssLNHzTs' \
--header 'Content-Type: application/json' \
--data '{ "name":"Functions" }'
@@ -37,7 +37,7 @@ const supabase = createClient(
'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0'
)
const { data, error } = await supabase.functions.invoke('hello-world', {
const { data, error } = await supabase.functions.invoke('function-name', {
body: { name: 'Functions' },
})
```

View File

@@ -46,7 +46,7 @@ console.log(Deno.env.get('MY_NAME'))
Now we can invoke our function locally, by serving it with our new `.env.local` file:
```bash
supabase functions serve hello-world --env-file ./supabase/.env.local
supabase functions serve --env-file ./supabase/.env.local
```
When the function starts you should see the name “Yoda” output to the terminal.

View File

@@ -176,6 +176,7 @@ export const meta = {
Run your app on a platform of your choosing! By default an app should launch in your web browser.
Note that `supabase_flutter` is compatible with web, iOS, Android, macOS, and Windows apps.
Running the app on MacOS requires additional configuration to [set the entitlements](https://docs.flutter.dev/development/platform-integration/macos/building#setting-up-entitlements).
</StepHikeCompact.Details>

View File

@@ -126,7 +126,7 @@ The fastest way to get started with supabase and Nuxt.js is to use the supabase-
<StepHikeCompact.Step step={5}>
<StepHikeCompact.Details title="Create client in app">
Next, in your Next.js app, create a file called supabase-client.js and add the following code to initialize the Supabase client and set your project's credentials:
Next, in your Nuxt.js app, create a file called supabase-client.js and add the following code to initialize the Supabase client and set your project's credentials:
</StepHikeCompact.Details>

View File

@@ -50,9 +50,9 @@ These variables will be exposed on the browser, and that's completely fine since
```js title=src/lib/supabaseClient.ts
import { createClient } from '@supabase/auth-helpers-sveltekit'
import { env } from '$env/dynamic/public'
import { PUBLIC_SUPABASE_URL, PUBLIC_SUPABASE_ANON_KEY } from '$env/static/public'
export const supabase = createClient(env.PUBLIC_SUPABASE_URL, env.PUBLIC_SUPABASE_ANON_KEY)
export const supabase = createClient(PUBLIC_SUPABASE_URL, PUBLIC_SUPABASE_ANON_KEY)
```
Optionally, update `src/routes/styles.css` with the [CSS from the example](https://raw.githubusercontent.com/supabase/supabase/master/examples/user-management/svelte-user-management/src/app.css).

View File

@@ -99,7 +99,7 @@ const handleLogin = async () => {
<h1 class="header">Supabase + Vue 3</h1>
<p class="description">Sign in via magic link with your email below</p>
<div>
<input class="inputField" type="email" placeholder="Your email" v-model="email" />
<input class="inputField" required type="email" placeholder="Your email" v-model="email" />
</div>
<div>
<input

View File

@@ -25,9 +25,9 @@ Supabase offers a variety of options for populating tables with data, including
Fill out the info in the table. The database is now set up.
### Step 2: Build UI on ILLA Builder
### Step 2: Build UI on ILLA Cloud
On [ILLA Builder](https://fast-try.illacloud.com/), click Create New to create a new application.
On [ILLA Cloud](https://cloud.illacloud.com/), click Create New to create a new application.
![Create new project on ILLA Builder](/docs/img/guides/integrations/illa/supabase-illa-create-project.png)

View File

@@ -8,7 +8,7 @@ export const meta = {
video: 'https://www.youtube.com/v/mw0DLwItue4',
}
[OneSignal](https://onesignal.com/) is a tool that allows you to send messages across different channels such as the following to keep your users engaged.
[OneSignal](https://onesignal.com/) is a tool that allows you to send messages across different channels such as the following to keep your users engaged.
- Push notifications
- SMS
@@ -36,7 +36,6 @@ You can find the complete example app along with the edge functions code to send
![Ordering app UI](/docs/img/guides/integrations/onesignal/app-ui.png)
## Step 1: Getting started
Before we dive into the code, this guide assumes that you have the following ready
@@ -80,8 +79,7 @@ const supabase = createClient(supabaseUrl, supabaseAnonKey)
const Home: NextPage = () => {
const [user, setUser] = useState<User | null>(null)
const [oneSignalInitialized, setOneSignalInitialized] =
useState<boolean>(false)
const [oneSignalInitialized, setOneSignalInitialized] = useState<boolean>(false)
/**
* Initializes OneSignal SDK for a given Supabase User ID
@@ -123,9 +121,7 @@ const Home: NextPage = () => {
const { price } = Object.fromEntries(new FormData(event.currentTarget))
if (typeof price !== 'string') return
const { error } = await supabase
.from('orders')
.insert({ price: Number(price) })
const { error } = await supabase.from('orders').insert({ price: Number(price) })
if (error) {
alert(error.message)
}
@@ -142,15 +138,13 @@ const Home: NextPage = () => {
initialize()
const authListener = supabase.auth.onAuthStateChange(
async (event, session) => {
const user = session?.user ?? null
setUser(user)
if (user) {
initializeOneSignal(user.id)
}
const authListener = supabase.auth.onAuthStateChange(async (event, session) => {
const user = session?.user ?? null
setUser(user)
if (user) {
initializeOneSignal(user.id)
}
)
})
return () => {
authListener.data.subscription.unsubscribe()
@@ -175,10 +169,7 @@ const Home: NextPage = () => {
<option value="200">$200</option>
<option value="300">$300</option>
</select>
<button
type="submit"
className="py-1 px-4 text-lg bg-green-400 rounded"
>
<button type="submit" className="py-1 px-4 text-lg bg-green-400 rounded">
Place an Order
</button>
</form>
@@ -190,10 +181,7 @@ const Home: NextPage = () => {
name="email"
placeholder="Email"
/>
<button
type="submit"
className="py-1 px-4 text-lg bg-green-400 rounded"
>
<button type="submit" className="py-1 px-4 text-lg bg-green-400 rounded">
Send Magic Link
</button>
</form>
@@ -211,7 +199,7 @@ There is quite a bit of stuff going on here, but basically, its creating a si
Notice that inside the `initializeOneSignal()` function, we are setting the Supabase user ID as an [external user ID of OneSignal](https://documentation.onesignal.com/docs/external-user-ids). This allows us to later send push notifications to the user using their Supabase user ID from the backend, which is very handy.
```tsx
await OneSignal.setExternalUserId(uid);
await OneSignal.setExternalUserId(uid)
```
The front-end side of things is done here. Lets get into the backend.
@@ -237,7 +225,7 @@ supabase functions new notify
Replace the contents of `supabase/functions/notify/index.ts` with the following
```tsx
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
import * as OneSignal from 'https://esm.sh/@onesignal/node-onesignal@1.0.0-beta7'
const _OnesignalAppId_ = Deno.env.get('ONESIGNAL_APP_ID')!
@@ -263,12 +251,9 @@ serve(async (req) => {
}
const onesignalApiRes = await onesignal.createNotification(notification)
return new Response(
JSON.stringify({ onesignalResponse: onesignalApiRes }),
{
headers: { 'Content-Type': 'application/json' },
}
)
return new Response(JSON.stringify({ onesignalResponse: onesignalApiRes }), {
headers: { 'Content-Type': 'application/json' },
})
} catch (err) {
console.error('Failed to create OneSignal notification', err)
return new Response('Server error.', {
@@ -295,7 +280,6 @@ ONESIGNAL_REST_API_KEY=YOUR_ONESIGNAL_REST_API_KEY
![Where to find OneSignal User Auth Key](/docs/img/guides/integrations/onesignal/onesignal-api-key.png)
Once your environment variables are filled in, you can run the following command to set the environment variable.
```bash
@@ -313,35 +297,35 @@ supabase functions deploy notify --no-verify-jwt
Finally, we get to set up the database! Run the following SQL to set up the `orders` table.
```sql
create table if not exists public.orders (
id uuid not null primary key default uuid_generate_v4(),
created_at timestamptz not null default now(),
user_id uuid not null default auth.uid(),
create table
if not exists public.orders (
id uuid not null primary key default uuid_generate_v4 (),
created_at timestamptz not null default now (),
user_id uuid not null default auth.uid (),
price int8 not null
);
);
```
As you can see, the `orders` table has 4 columns and 3 of them have default values. That means all we need to send from the front-end app is the price. That is why our insert statement looked very simple.
```tsx
const { error } = await supabase.from('orders').insert({
price: 100,
})
price: 100,
})
```
Lets also set up the webhook so that whenever a new row is inserted in the `orders` table, it calls the edge function. Go to `Database > Webhooks` and create a new Database Webhook. The table should be set to `orders` and Events should be inserted. The type should be HTTP Request, the HTTP method should be POST, and the URL should be the URL of your edge function. Hit confirm to save the webhook configuration.
![Supabase Webhooks configuration](/docs/img/guides/integrations/onesignal/webhook.png)
At this point, the app should be complete! Run your app locally with `npm run dev`, or deploy your app to a hosting service and see how you receive a push notification when you place an order!
At this point, the app should be complete! Run your app locally with `npm run dev`, or deploy your app to a hosting service and see how you receive a push notification when you place an order!
Remember that if you decide to deploy your app to a hosting service, you would need to create another OneSignal app configured for your local address.
![Ordering app UI](/docs/img/guides/integrations/onesignal/app-ui.png)
## Resources
This particular example was using Next.js, but you can apply the same principles to implement send push notification, SMS, Emails, and in-app-notifications on other platforms as well.
This particular example was using Next.js, but you can apply the same principles to implement send push notification, SMS, Emails, and in-app-notifications on other platforms as well.
- [OneSignal + Flutter + Supabase example](https://github.com/OneSignalDevelopers/onesignal-supabase-sample-integration-supabase)
- [OneSignal Mobile Quickstart](https://documentation.onesignal.com/docs/mobile-sdk-setup)

View File

@@ -17,6 +17,7 @@ This guide explains how to quickly connect the Postgres database provided by Sup
## Step 1: Get the connection string from Supabase project settings
Go to the settings page from the sidebar and navigate to the **Database** tab. Youll find the databases connection string with a placeholder for the password you provided when you created the project.
![Getting the connection string](/docs/img/guides/integrations/prisma/zntcsh3ic91gf1gy8j73.png)
## Step 2: Testing the connection
@@ -29,22 +30,17 @@ In case you dont have a Prisma project or this is your first time working wit
### Cloning the starter project
Navigate into a directory of your choice and run the following command in your terminal if youre on a Windows machine:
Navigate into a directory of your choice and run the following command in your terminal:
```bash
curl https://pris.ly/quickstart -L -o quickstart-main.tar.gz && tar -zxvf quickstart-main.tar.gz quickstart-main/typescript/starter && move quickstart-main\typescript\starter starter && rmdir /S /Q quickstart-main && del /Q quickstart-main.tar.gz
```
And if youre using Mac OS or Linux, run the following command:
```bash
curl -L https://pris.ly/quickstart | tar -xz --strip=2 quickstart-main/typescript/starter
curl https://codeload.github.com/prisma/prisma-examples/tar.gz/latest | tar -xz --strip=2 prisma-examples-latest/databases/postgresql-supabase
```
You can now navigate into the directory and install the projects dependencies:
```bash
cd starter && npm install
cd postgresql-supabase
npm install
```
### A look at the projects structure
@@ -52,53 +48,43 @@ cd starter && npm install
This project comes with TypeScript configured and has the following structure.
- A `prisma` directory which contains:
- A `dev.db` file: This is a SQLite database.
- A `schema.prisma` file: Where we define the different database models and relations between them.
- A `.env` file: Contains the `DATABASE_URL` variable, which Prisma will use.
- A `script.ts` file: where we will run some queries using Prisma Client.
This starter also comes with the following packages installed:
- A `seed.ts` file: This is the data used to seed your database.
- A `schema.prisma` file: Where you define the different database models and relations between them.
- A `script.ts` file: where you will run some queries using Prisma Client.
This starter also comes with the following packages installed:
- [`@prisma/client`](https://www.npmjs.com/package/@prisma/client): An auto-generated and type-safe query builder thats _tailored_ to your data.
- [`prisma`](https://www.npmjs.com/package/prisma): Prismas command-line interface (CLI). It allows you to initialize new project assets, generate Prisma Client, and analyze existing database structures through introspection to automatically create your application models.
> Note: Prisma works with both JavaScript and TypeScript. However, to get the best possible development experience, using TypeScript is highly recommended.
### Configuring the project to use PostgreSQL
> Note: Prisma works with both JavaScript and TypeScript. However, to get the best possible development experience, using TypeScript is highly recommended.
By default, Prisma migrations will try to drop the `postgres` database, which can lead to conflicts with Supabase databases. For this scenario, use [Prisma Shadow Databases](https://www.prisma.io/docs/concepts/components/prisma-migrate/shadow-database#cloud-hosted-shadow-databases-must-be-created-manually).
### Configuring the project
Create a shadow database in your PostgreSQL server within the same Supabase project using the `psql` CLI and the `DATABASE_URL` from the previous steps (or use the local database).
Create a `.env` file at the root of your project:
```bash
psql postgresql://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:5432
touch .env
```
After you connect to your project's PostgreSQL instance, create another database (e.g., `postgres_shadow`):
```bash
postgres=> CREATE DATABASE postgres_shadow;
postgres=> exit
```
Go ahead and delete the `prisma/dev.db` file because we will be switching to PostgreSQL.
In the `.env` file, update `DATABASE_URL` and `SHADOW_DATABASE_URL` to the connection string from **step 1**. The `.env` file should look like:
In the `.env` file, add a `DATABASE_URL` variable and add the connection string from **step 1**. The `.env` file should look like:
```env
# .env
DATABASE_URL="postgres://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:5432/postgres"
SHADOW_DATABASE_URL="postgres://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:5432/postgres_shadow"
```
In the `schema.prisma` file, change the `provider` from "sqlite" to `"postgresql"` and add the `shadowDatabaseUrl` property.
This is what your `schema.prisma` file should look like:
```go
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
shadowDatabaseUrl = env("SHADOW_DATABASE_URL")
provider = "postgresql"
url = env("DATABASE_URL")
}
generator client {
provider = "prisma-client-js"
}
model Post {
id Int @id @default(autoincrement())
title String
@@ -107,6 +93,7 @@ model Post {
author User? @relation(fields: [authorId], references: [id])
authorId Int?
}
model User {
id Int @id @default(autoincrement())
email String @unique
@@ -115,53 +102,81 @@ model User {
}
```
To test that everything works correctly, run the following command to create a migration:
```bash
prisma migrate dev --name init
npx prisma migrate dev --name init
```
You can optionally give your migration a name, depending on the changes you made. Since this is the projects first migration, youre setting the `--name` flag to “init”.
If everything works correctly, you should get the following message in your terminal:
You can optionally give your migration a name, depending on the changes you made. Since this is the projects first migration, youre setting the `--name` flag to “init”. If everything works correctly, you should get the following message in your terminal:
```text
Your database is now in sync with your schema.
:heavy_check_mark: Generated Prisma Client (2.x.x) to ./node_modules/@prisma/client in 111ms
:heavy_check_mark: Generated Prisma Client (4.x.x) to ./node_modules/@prisma/client in 111ms
```
This will create a `prisma/migrations` folder inside your `prisma` directory and synchronize your Prisma schema with your database schema.
> Note: if you want to skip the process of creating a migration history, you can use the [`db push`](https://www.prisma.io/docs/concepts/components/prisma-migrate/db-push) command instead of `migrate dev`.
> If you go to your Supabase project, in the table editor, you should see that two tables have been created, a `Post` and a `User` table.
> ![tables created in the UI](/docs/img/guides/integrations/prisma/7y4qq4wwvfrheti6r09u.png)
> Thats it! You have now successfully connected a Prisma project to a PostgreSQL database hosted on Supabase and ran your first migration.
> **Note**: If you want to skip the process of creating a migration history, you can use the [`prisma db push`](https://www.prisma.io/docs/concepts/components/prisma-migrate/db-push) command instead of `prisma migrate dev`. However, we recommend using `prisma migrate dev` to evolve your database schema in development.
> If you would like to get a conceptual overview of how Prisma Migrate works and which commands to use in what environment, refer to [this page in the Prisma documentation](https://www.prisma.io/docs/concepts/components/prisma-migrate/mental-model).
If you go to your Supabase project, in the table editor, you should see that two tables have been created, a `Post`, `User`, and `_prisma_migrations` tables. The `_prisma_migrations` table is used to keep
![tables created in the UI](/docs/img/guides/integrations/prisma/7y4qq4wwvfrheti6r09u.png)
Thats it! You have now successfully connected a Prisma project to a PostgreSQL database hosted on Supabase and ran your first migration.
## Connection pooling with Supabase
If youre working in a serverless environment (for example Node.js functions hosted on AWS Lambda, Vercel or Netlify Functions), you need to set up [connection pooling](https://www.prisma.io/docs/guides/performance-and-optimization/connection-management#serverless-environments-faas) using a tool like [PgBouncer](https://www.pgbouncer.org/). Thats because every function invocation may result in a [new connection to the database](https://www.prisma.io/docs/guides/performance-and-optimization/connection-management#the-serverless-challenge). Supabase [supports connection management using PgBouncer](https://supabase.io/blog/2021/04/02/supabase-pgbouncer#what-is-connection-pooling) and are enabled by default.
Go to the **Database** page from the sidebar in the Supabase dashboard and navigate to **connection pool** settings
If youre working in a serverless environment (for example Node.js functions hosted on AWS Lambda, Vercel or Netlify Functions), you need to set up [connection pooling](https://www.prisma.io/docs/guides/performance-and-optimization/connection-management#serverless-environments-faas) using a tool like [PgBouncer](https://www.pgbouncer.org/). Thats because every function invocation may result in a [new connection to the database](https://www.prisma.io/docs/guides/performance-and-optimization/connection-management#the-serverless-challenge).
Supabase [supports connection management using PgBouncer](/docs/guides/database/connecting-to-postgres#connection-pool) which prevents a traffic spike from overwhelming your database.
Go to the **Database** page from the sidebar in the Supabase dashboard and navigate to **Connection pool** settings:
![Connection pool settings](/docs/img/guides/integrations/prisma/w0oowg8vq435ob5c3gf0.png)
When migrating, you need to use the non-pooled connection URL (like the one used in **step 1**). However, when deploying your app, use the pooled connection URL and add the `?pgbouncer=true` flag to the PostgreSQL connection URL. It's also recommended to minimize the number of concurrent connections by setting the `connection_limit` to `1`. The `.env` file should look like:
When updating your database schema, you need to use the non-pooled connection URL (like the one used in **step 1**). You can configure the non-pooled connection string by using the `directUrl` property in the datasource block.
Update your `.env` file with the following changes:
1. Rename the `DATABASE_URL` environment variable to `DIRECT_URL`
1. Create a `DATABASE_URL` environment variable and paste in the new connection string from the dashboard as its value
Append the `?pgbouncer=true` flag to the `DATABASE_URL` variable.
Your `.env` file should resemble the following:
```env
# .env
DATABASE_URL="postgres://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:6543/postgres?pgbouncer=true&connection_limit=1"
SHADOW_DATABASE_URL="postgres://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:5432/postgres_shadow"
# PostgreSQL connection string used for migrations
DIRECT_URL="postgres://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:5432/postgres"
# PostgreSQL connection string with pgBouncer config — used by Prisma Client
DATABASE_URL="postgres://postgres:[YOUR-PASSWORD]@db.[YOUR-PROJECT-REF].supabase.co:6543/postgres?pgbouncer=true"
```
Prisma Migrate uses database transactions to check out the current state of the database and the migrations table. However, the Migration Engine is designed to use a single connection to the database, and does not support connection pooling with PgBouncer. If you attempt to run Prisma Migrate commands in any environment that uses PgBouncer for connection pooling, you might see the following error:
Update your Prisma schema by setting the `directUrl` in the datasource block:
```bash
Error: undefined: Database error
Error querying the database: db error: ERROR: prepared statement “s0” already exists
```go
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
directURL = env("DIRECT_URL")
}
```
This is a known issue and it is being worked on, you can follow the progress on this [GitHub issue](https://github.com/prisma/prisma/issues/6485).
If you want to learn more about Prisma, check out the [docs](https://www.prisma.io/docs). Also in case you have any questions or run into any issue, feel free to start a discussion in the repos [discussions section](https://github.com/prisma/prisma/discussions).
> **Note**: This feature is available from Prisma version [4.10.0](https://github.com/prisma/prisma/releases/tag/4.10.0) and higher.
If you want to learn more about Prisma, check out the [docs](https://www.prisma.io/docs/reference/api-reference/prisma-schema-reference#fields). Also in case you have any questions or run into any issue, feel free to start a discussion in the repos [discussions section](https://github.com/prisma/prisma/discussions).
## Troubleshooting
If you run `prisma migrate dev --name init` multiple times, it sometimes asks if you want to recreate the whole schema. If you chose yes, it will delete the public schema and recreate it. The default grants are missing after this. If you run into this problem, add a helper SQL for fixing the grants:
### Missing grants
If your database schema is out of sync from your migration history, `prisma migrate dev` will detect a migration history conflict or a [schema drift](https://www.prisma.io/docs/guides/database/developing-with-prisma-migrate/troubleshooting-development#schema-drift). When `prisma migrate dev` detects the drift, it might ask to to reset your database schema. If you choose yes, it will delete the `public` schema along with the default grants defined in your database.
If you run into this problem, create a draft migration using `prisma migrate dev --create-only`, and add the following helper SQL:
```sql
grant usage on schema public to postgres, anon, authenticated, service_role;
@@ -175,6 +190,98 @@ alter default privileges in schema public grant all on functions to postgres, an
alter default privileges in schema public grant all on sequences to postgres, anon, authenticated, service_role;
```
Run `prisma migrate dev` to apply the draft migration to the database.
### Using Supabase Auth with Prisma
If you would like to use Supabase Auth and Prisma in your application, you will have to enable the `multiSchema` Preview feature flag in the `generator` block of your Prisma schema:
```go
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
directURL = env("DIRECT_URL")
}
generator client {
provider = "prisma-client-js"
previewFeatures = ["multiSchema"]
}
```
Next, specify the database schemas you would like to include in your Prisma schema:
```go
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
directURL = env("DIRECT_URL")
schemas = ["public", "auth"]
}
generator client {
provider = "prisma-client-js"
previewFeatures = ["multiSchema"]
}
```
You can then specify what schema a model or enum belongs to using the `@@schema` attribute:
```go
model User {
id Int @id
// ...
@@schema("auth") // or @@schema("public")
}
```
To learn more about using Prisma with multiple database schemas, refer to [this page in the Prisma docs](https://www.prisma.io/docs/guides/database/multi-schema#learn-more-about-the-multischema-preview-feature).
### Using PostgreSQL Row Level Security with Prisma
If you would like to use Row Level Security (RLS) with Prisma, check out the [Prisma Client Extension - Row Level Security example](https://github.com/prisma/prisma-client-extensions/tree/main/row-level-security) that provides the primitives you could use to build and extend Prisma Client in PostgreSQL.
Also check out [useSupabaseRowLevelSecurity](https://github.com/dthyresson/prisma-extension-supabase-rls) Prisma Client extension that supports [Supabase RLS](/docs/guides/auth/row-level-security#authrole) and policies written to use [Supabase auth](/docs/guides/auth/overview).
The example and extension use [Prisma Client extensions](https://www.prisma.io/docs/concepts/components/prisma-client/client-extensions) Preview feature.
### Enabling PosgreSQL extensions
If you would like to use a PostgreSQL extension with Prisma, enable the `postgresqlExtensions` Preview feature flag in the `generator` block of your Prisma schema:
```go
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
directURL = env("DIRECT_URL")
}
generator client {
provider = "prisma-client-js"
previewFeatures = ["postgresqlExtensions"]
}
```
Next, specify the extensions you need in the `datasource` block:
```go
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
directURL = env("DIRECT_URL")
extensions = [hstore(schema: "myHstoreSchema"), pg_trgm, postgis(version: "2.1")]
}
generator client {
provider = "prisma-client-js"
previewFeatures = ["postgresqlExtensions"]
}
```
To learn more about using Prisma with PostgreSQL extensions, refer to [this page in the Prisma docs](https://www.prisma.io/docs/concepts/components/prisma-schema/postgresql-extensions).
## Resources
- [Prisma](https://prisma.io) official website.

View File

@@ -155,6 +155,7 @@ As with the final activation stage of the process for setting up a vanity subdom
- Edge functions do not honor the custom domain or the vanity subdomain setting and they still have to be invoked via the `foobarbaz.supabase.co` domain.
- A Supabase project can—at this time—use either a Custom Domain or a Vanity Subdomain, but not both.
- Some authentication flows like Sign-in with Twitter set cookies to track the progress of the flow. Make sure you use only one domain in your frontend application for this reason. Mixing calls to the Supabase domain `foobarbaz.supabase.co` and your custom domain could cause those flows to stop working due to the [Same Origin Policy](https://developer.mozilla.org/en-US/docs/Web/Security/Same-origin_policy) enforced on cookies by the browser.
export const Page = ({ children }) => <Layout meta={meta} children={children} />

View File

@@ -0,0 +1,101 @@
import Layout from '~/layouts/DefaultGuideLayout'
export const meta = {
id: 'database-size',
title: 'Database size',
description: 'Understanding how database size applies to your subscription.',
}
Database size refers to the _monthly average storage usage_, as reported by Postgres. This metric is reported in your project's [billing usage](https://app.supabase.com/project/_/settings/billing/usage) and is updated daily. As you read this document we will refer to "database size" and "disk size":
- "Database size" is the total size of used storage from your database.
- "Disk size" describes the size of the underlying available storage.
## Database space management
### Database size
This SQL query will show the current size of your Postgres database:
```sql
select
sum(pg_database_size (pg_database.datname)) / (1024 * 1024) as db_size_mb
from
pg_database;
```
This value is reported in the [database settings page](https://app.supabase.com/project/_/settings/database).
Database Space is consumed primarily by your data, indexes, and materialized views. You can reduce your disk size by removing any of these and running a Vacuum operation.
<Admonition type="note">
Depending on your billing tier, your database can go into read-only mode which can prevent you inserting and deleting data. There are instructions for managing read-only mode in the [Disk Management](#disk-management) section.
</Admonition>
### Vacuum operations
Postgres does not immediately reclaim the physical space used by dead tuples (i.e., deleted rows) in the DB. They are marked as "removed" until a [vacuum operation](https://www.postgresql.org/docs/current/routine-vacuuming.html) is executed. As a result, deleting data from your database may not immediately reduce the reported disk usage.
<Admonition type="note">
Vacuum operations can temporarily increase resource utilization, which may adversely impact the observed performance of your project until the maintenance is completed.
</Admonition>
Supabase projects have automatic vacuuming enabled, which ensures that these operations are performed regularly to keep the database healthy and performant.
It is possible to [fine-tune](https://www.percona.com/blog/2018/08/10/tuning-autovacuum-in-postgresql-and-autovacuum-internals/)
the [autovacuum parameters](https://www.enterprisedb.com/blog/postgresql-vacuum-and-analyze-best-practice-tips),
or [manually initiate](https://www.postgresql.org/docs/current/sql-vacuum.html) vacuum operations.
Running a manual vacuum after deleting large amounts of data from your DB could help reduce the database size reported by Postgres.
### Preoccupied Space
New Supabase projects have a database size of ~40-60mb. This space includes pre-installed extensions, schemas, and default Postgres data. Additional database size is used when installing extensions, even if those extensions are inactive.
## Disk management
Supabase uses network-attached storage to balance performance with scalability. The behavior of your disk depends on your billing tier.
### Paid Tier Behavior
Pro and Enterprise projects have auto-scaling Disk Storage.
Disk storage expands automatically when the database reaches 90% of the disk size. The disk is expanded to be 50% larger (e.g., 8GB -> 12GB). Auto-scaling can only take place once every 6 hours. If within those 6 hours you reach 95% of the disk space, your project will enter read-only mode.
<Admonition type="caution">
If you intend to import a lot of data into your database which requires multiple disk expansions then [reach out to our team](https://app.supabase.com/support/new). For example, uploading more than 1.5x the current size of your database storage will put your database into [read-only mode](#read-only-mode).
</Admonition>
The maximum Disk Size for Pro Tier is 1024TB. If you need more than this, [contact us](https://app.supabase.com/support/new) to learn more about the Enterprise plan.
### Free Tier Behavior
Free Tier projects enter [read-only](#read-only-mode) mode when you exceed the 500mb limit. Once in read-only mode, you have several options:
- [Upgrade to the Pro or Enterprise tier](https://app.supabase.com/project/_/settings/billing/subscription) to enable auto-scaling and expand beyond the 500mb database size limit.
- [Disable read-only mode](#disabling-read-only-mode) and reduce your database size.
### Read-only mode
In some cases Supabase may put your database into read-only mode to prevent your database from exceeding the billing or disk limitations.
In read-only mode, clients will encounter errors such as `cannot execute INSERT in a read-only transaction`. Regular operation (read-write mode) is automatically re-enabled once usage is below 95% of the disk size,
### Disabling read-only mode
You can manually override read-only mode to reduce disk size. To do this, run the following in the [SQL Editor](https://app.supabase.com/project/_/sql):
```sql
SET
default_transaction_read_only = 'off';
```
This allows you to delete data from within the session. After deleting data, you should run a vacuum to reclaim as much space as possible.
export const Page = ({ children }) => <Layout meta={meta} children={children} />
export default Page

View File

@@ -1,73 +0,0 @@
import Layout from '~/layouts/DefaultGuideLayout'
export const meta = {
id: 'database-usage',
title: 'Database usage',
description: 'Understanding how database usage applies to your subscription.',
}
Database size refers to the _monthly average storage usage_, as reported by Postgres. This metric is reported in your project's [billing usage](https://app.supabase.com/project/_/settings/billing/usage) and is updated daily.
Database size is the total size of used storage from your database, whereas disk size describes the size of the underlying available storage.
For an instantaneous live view of the DB size, you can execute in Postgres:
```sql
select
sum(pg_database_size(pg_database.datname)) / (1024 * 1024) as db_size_mb
from pg_database;
```
This value is also reported in the [database settings page](https://app.supabase.com/project/_/settings/database).
## Database storage management
Supabase uses network-attached storage to balance performance with scalability.
For Pro and Enterprise projects, disk size expands ~1.5x automatically (e.g., 8GB -> 12GB) when you reach 90% of the disk size.
Disk size expansion can only occur once every six hours.
Pro projects can store up to 1024TB.
All projects enter read-only mode when you reach 95% of the disk size. In read-only mode, clients will encounter errors such as `cannot execute INSERT in a read-only transaction`.
Regular operation (read-write mode) is automatically re-enabled once usage is below 95% of the disk size.
If you need more than 1024TB of disk size or require multiple storage expansions in a short period of time, [contact us](https://app.supabase.com/support/new) to learn more about the Enterprise plan.
### Increasing available disk size
1. [Upgrade to the Pro or Enterprise plan](https://app.supabase.com/project/_/settings/billing/subscription) to increase your quota and expand your disk size automatically.
2. Delete data from your project's database to lower its disk usage. If your database is already in read-only mode, run the following command to change the transaction mode to read-write for your session:
```sql
SET
default_transaction_read_only = 'off';
```
This allows you to delete data from within the session.
### Preoccupied Space
When launching a new project, your database size will be roughly ~40-60mb.
The space is used up by preinstalled extensions, schema/data used by our services that are offered with each project and default Postgres data.
When installing additional extensions, even if you don't actively use them, additional database size is used.
## Vacuum operations
Postgres does not immediately reclaim the physical space used by dead tuples (i.e., deleted rows) in the DB. Instead, they are internally marked as removed until a [vacuum operation](https://www.postgresql.org/docs/current/routine-vacuuming.html) is executed.
As a result, deleting data from your database may not immediately reduce the reported disk usage.
<Admonition type="note">
Vacuum operations can temporarily increase resource utilization, which may adversely impact the observed performance of your project until the maintenance is completed.
</Admonition>
Supabase projects have automatic vacuuming enabled, which ensures that these operations are performed regularly to keep the database healthy and performant.
However, it can be necessary to either [fine-tune](https://www.percona.com/blog/2018/08/10/tuning-autovacuum-in-postgresql-and-autovacuum-internals/)
the [autovacuum parameters](https://www.enterprisedb.com/blog/postgresql-vacuum-and-analyze-best-practice-tips),
or [manually initiate](https://www.postgresql.org/docs/current/sql-vacuum.html) vacuum operations.
For example, running a manual vacuum after deleting large amounts of data from your DB could help reduce the reported disk usage by Postgres.
export const Page = ({ children }) => <Layout meta={meta} children={children} />
export default Page

View File

@@ -66,7 +66,7 @@ Replication for Realtime is disabled for all tables in your new project. On the
### Migrate Storage objects
The new project has the old project's Storage buckets, but the Storage objects need to be migrated manually. Use this script to move storage objects from one project to another. If you have more than 10k objects, we can move the objects for you. Just contact us at [support@supabase.com](mailto:support@supabase.com).
The new project has the old project's Storage buckets, but the Storage objects need to be migrated manually. Use this script to move storage objects from one project to another.
```js
// npm install @supabase/supabase-js@1

View File

@@ -8,6 +8,159 @@ export const meta = {
The Supabase platform automatically optimizes your Postgres database to take advantage of the compute resources of the tier your project is on. However, these optimizations are based on assumptions about the type of workflow the project is being utilized for, and it is likely that better results can be obtained by tuning the database for your particular workflow.
## Examining Query Performance
Unoptimized queries are a major cause of poor database performance. The techniques on this page can help you identify and understand queries that take the most time and resources from your database.
Database performance is a large topic and many factors can contribute. Some of the most common causes of poor performance include:
* An inefficiently designed schema
* Inefficiently designed queries
* A lack of indexes causing slower than required queries over large tables
* Unused indexes causing slow `INSERT`, `UPDATE` and `DELETE` operations
* Not enough compute resources, such as memory, causing your database to go to disk for results too often
* Lock contention from multiple queries operating on highly utilized tables
* Large amount of bloat on your tables causing poor query planning
Thankfully there are solutions to all these issues, which we will cover in the following sections.
### Postgres Cumulative Statistics system
Postgres collects data about its own operations using the [cumulative statistics system](https://www.postgresql.org/docs/current/monitoring-stats.html). In addition to this, every Supabase project has the [pg_stat_statements extension](/docs/guides/database/extensions/pg_stat_statements) enabled by default. This extension records query execution performance details and is the best way to find inefficient queries. This information can be combined with the Postgres query plan analyzer to develop more efficient queries.
Here are some example queries to get you started.
#### Most frequently called queries:
```sql
select
auth.rolname,
statements.query,
statements.calls,
-- -- Postgres 13, 14, 15
statements.total_exec_time + statements.total_plan_time as total_time,
statements.min_exec_time + statements.min_plan_time as min_time,
statements.max_exec_time + statements.max_plan_time as max_time,
statements.mean_exec_time + statements.mean_plan_time as mean_time,
-- -- Postgres <= 12
-- total_time,
-- min_time,
-- max_time,
-- mean_time,
statements.rows / statements.calls as avg_rows
from pg_stat_statements as statements
inner join pg_authid as auth on statements.userid = auth.oid
order by
statements.calls desc
limit
100;
```
This query shows:
- query statistics, ordered by the number of times each query has been executed
- the role that ran the query
- the number of times it has been called
- the average number of rows returned
- the cumulative total time the query has spent running
- the min, max and mean query times.
This provides useful information about the queries you run most frequently. Queries that have high `max_time` or `mean_time` times and are being called often can be good candidates for optimization.
#### Slowest queries by execution time:
```sql
select
auth.rolname,
statements.query,
statements.calls,
-- -- Postgres 13, 14, 15
statements.total_exec_time + statements.total_plan_time as total_time,
statements.min_exec_time + statements.min_plan_time as min_time,
statements.max_exec_time + statements.max_plan_time as max_time,
statements.mean_exec_time + statements.mean_plan_time as mean_time,
-- -- Postgres <= 12
-- total_time,
-- min_time,
-- max_time,
-- mean_time,
statements.rows / statements.calls as avg_rows
from pg_stat_statements as statements
inner join pg_authid as auth on statements.userid = auth.oid
order by
max_time desc
limit
100;
```
This query will show you statistics about queries ordered by the maximum execution time. It is similar to the query above ordered by calls, but this one highlights outliers that may have high executions times. Queries which have high or mean execution times are good candidates for optimisation.
#### Most time consuming queries:
```sql
select
auth.rolname,
statements.query,
statements.calls,
statements.total_exec_time + statements.total_plan_time as total_time,
to_char(((statements.total_exec_time + statements.total_plan_time)/sum(statements.total_exec_time + statements.total_plan_time) over()) * 100, 'FM90D0') || '%' as prop_total_time
from pg_stat_statements as statements
inner join pg_authid as auth on statements.userid = auth.oid
order by
total_time desc
limit
100;
```
This query will show you statistics about queries ordered by the cumulative total execution time. It shows the total time the query has spent running as well as the proportion of total execution time the query has taken up.
Queries which are the most time consuming are not necessarily bad, you may have a very effiecient and frequently ran queries that end up taking a large total % time, but it can be useful to help spot queries that are taking up more time than they should.
### Hit rate
Generally for most applications a small percentage of data is accessed more regularly than the rest. To make sure that your regularly accessed data is available, Postgres tracks your data access patterns and keeps this in its [shared_buffers](https://www.postgresql.org/docs/15/runtime-config-resource.html#RUNTIME-CONFIG-RESOURCE-MEMORY) cache.
Applications with lower cache hit rates generally perform more poorly since they have to hit the disk to get results rather than serving them from memory. Very poor hit rates can also cause you to burst past your [Disk I/O limits](https://supabase.com/docs/guides/platform/compute-add-ons#disk-io-bandwidth) causing significant performance issues.
You can view your cache and index hit rate by executing the following query:
```sql
select
'index hit rate' as name,
(sum(idx_blks_hit)) / nullif(sum(idx_blks_hit + idx_blks_read),0) * 100 as ratio
from pg_statio_user_indexes
union all
select
'table hit rate' as name,
sum(heap_blks_hit) / nullif(sum(heap_blks_hit) + sum(heap_blks_read),0) * 100 as ratio
from pg_statio_user_tables;
```
This shows the ratio of data blocks fetched from the Postgres [shared_buffers](https://www.postgresql.org/docs/15/runtime-config-resource.html#RUNTIME-CONFIG-RESOURCE-MEMORY) cache against the data blocks that were read from disk/OS cache.
If either of your index or table hit rate are < 99% then this can indicate your compute plan is too small for your current workload and you would benefit from more memory. [Upgrading your compute](https://supabase.com/docs/guides/platform/compute-add-ons) is easy and can be done from your [project dashboard](https://app.supabase.com/project/_/settings/billing/subscription).
### Optimizing poor performing queries
Postgres has built in tooling to help you optimize poorly performing queries. You can use the [query plan analyzer](https://www.postgresql.org/docs/current/sql-explain.html) on any expensive queries that you have identified:
```sql
explain analyze <query-statement-here>;
```
Be careful using `explain analyze` with `insert`/`update`/`delete` queries, because the query will actually run, and could have unintended side-effects.
Using the query plan analyzer to optimize your queries is a large topic, with a number of online resources available:
- [Official docs.](https://www.postgresql.org/docs/current/using-explain.html)
- [The Art of PostgreSQL.](https://theartofpostgresql.com/explain-plan-visualizer/)
- [Postgres Wiki.](https://wiki.postgresql.org/wiki/Using_EXPLAIN)
- [Enterprise DB.](https://www.enterprisedb.com/blog/postgresql-query-optimization-performance-tuning-with-explain-analyze)
You can pair the information available from `pg_stat_statements` with the detailed system metrics available [via your metrics endpoint](../platform/metrics) to better understand the behavior of your DB and the queries you're executing against it.
## Optimizing the number of connections
By default, the number of connections allowed to Postgres and PgBouncer is configured based on the resources available to the database.
@@ -64,33 +217,6 @@ alter system reset <other-overridden-conf>;
Configuring the number of PgBouncer connections is not supported at this time.
## Examining Query Performance
Every Supabase project has [the pg_stat_statements extension](https://www.postgresql.org/docs/14/pgstatstatements.html) enabled by default. This extension records query execution performance details and is the best way to find queries that take the most time to execute. This information can be combined with the Postgres query plan analyzer to develop more efficient queries.
Obtaining information from pg_stat_statements:
```sql
select mean_exec_time + stddev_exec_time, * from pg_stat_statements order by 1 desc;
```
Using the query plan analyzer on your expensive queries:
```sql
explain analyze <query-statement-here>;
```
Be careful using `explain analyze` with `insert`/`update`/`delete` queries, because the query will actually run, and could have unintended side-effects.
Using the query plan analyzer to optimize your queries is a large topic, with a number of online resources available:
- [Official docs.](https://www.postgresql.org/docs/current/using-explain.html)
- [The Art of PostgreSQL.](https://theartofpostgresql.com/explain-plan-visualizer/)
- [Postgres Wiki.](https://wiki.postgresql.org/wiki/Using_EXPLAIN)
- [Enterprise DB.](https://www.enterprisedb.com/blog/postgresql-query-optimization-performance-tuning-with-explain-analyze)
You can pair the information available from `pg_stat_statements` with the detailed system metrics available [via your metrics endpoint](../platform/metrics) to better understand the behavior of your DB and the queries you're executing against it.
export const Page = ({ children }) => <Layout meta={meta} children={children} />
export default Page

View File

@@ -113,6 +113,7 @@ Build a basic Todo List with Supabase and your favorite frontend framework:
- In-depth self-hosting guide using Nginx [Blog](https://dev.to/chronsyn/self-hosting-with-supabase-1aii)
- Build an Email and Social Auth for Next JS with Supabase, Tailwind CSS 3.0 and TypeScript [Blog](https://creativedesignsguru.com/next-js-supabase-auth/)
- Link Shortener using Supabase and Ory [3-part Blog Series](https://www.ory.sh/tutorial-url-shortener-supabase-ory-integration-backend/)
- Building a CRUD API with FastAPI and Supabase: A Step-by-Step Guide [Blog](https://blog.theinfosecguy.xyz/building-a-crud-api-with-fastapi-and-supabase-a-step-by-step-guide)
### Example apps

View File

@@ -17,6 +17,7 @@ Dave Wilson
Div Arora
Egor Romanov
Fabrizio Fenoglio
Francesco Sansalvadore
Haydn Maley
Hieu Pham
Inian P

Binary file not shown.

Before

Width:  |  Height:  |  Size: 514 KiB

After

Width:  |  Height:  |  Size: 1.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

View File

@@ -18,12 +18,6 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/api</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/cli</loc>
<changefreq>weekly</changefreq>
@@ -36,12 +30,6 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/examples</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions</loc>
<changefreq>weekly</changefreq>
@@ -90,12 +78,6 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/reference/index</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/handbook/contributing</loc>
<changefreq>weekly</changefreq>
@@ -108,30 +90,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/new/auth</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/reference/index</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/tutorials/nextjs</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/api/generating-types</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/auth-captcha</loc>
<changefreq>weekly</changefreq>
@@ -216,6 +180,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/api</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/arrays</loc>
<changefreq>weekly</changefreq>
@@ -318,60 +288,6 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/auth</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/cicd-workflow</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/cors</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/debugging</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/import-maps</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/local-development</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/quickstart</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/schedule-functions</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/secrets</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/integrations/appsmith</loc>
<changefreq>weekly</changefreq>
@@ -534,6 +450,60 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/auth</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/cicd-workflow</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/cors</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/debugging</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/import-maps</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/local-development</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/quickstart</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/schedule-functions</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/secrets</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/access-control</loc>
<changefreq>weekly</changefreq>
@@ -559,7 +529,7 @@
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/database-usage</loc>
<loc>https://supabase.com/docs/guides/platform/database-size</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
@@ -612,6 +582,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/ssl-enforcement</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso</loc>
<changefreq>weekly</changefreq>
@@ -624,12 +600,24 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/realtime/broadcast</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/realtime/postgres-changes</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/realtime/presence</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/realtime/quickstart</loc>
<changefreq>weekly</changefreq>
@@ -642,6 +630,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/resources/examples</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/resources/glossary</loc>
<changefreq>weekly</changefreq>
@@ -715,49 +709,139 @@
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/http</loc>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/auth-ui</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgcron</loc>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/nextjs-server-components</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgnet</loc>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/nextjs</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgtap</loc>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/remix</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/plv8</loc>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/sveltekit</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/postgis</loc>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-apple</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/rum</loc>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-azure</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/uuid-ossp</loc>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-bitbucket</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-discord</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-facebook</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-github</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-gitlab</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-google</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-keycloak</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-linkedin</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-notion</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-slack</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-spotify</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-twitch</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-twitter</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-workos</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-zoom</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/api/generating-types</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
@@ -888,36 +972,6 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/auth-ui</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/nextjs-server-components</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/nextjs</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/remix</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/auth-helpers/sveltekit</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/phone-login/messagebird</loc>
<changefreq>weekly</changefreq>
@@ -948,6 +1002,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/examples/discord-bot</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/examples/github-actions</loc>
<changefreq>weekly</changefreq>
@@ -960,6 +1020,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/examples/openai</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/examples/storage-caching</loc>
<changefreq>weekly</changefreq>
@@ -978,6 +1044,186 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/functions/examples/upstash-redis</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso/azure</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso/gsuite</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso/okta</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/http</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/hypopg</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg-safeupdate</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg_graphql</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg_hashids</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg_jsonschema</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg_plan_filter</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg_stat_monitor</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pg_stat_statements</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgaudit</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgcron</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgjwt</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgnet</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgrepack</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgroonga</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgrouting</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgsodium</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgtap</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/pgvector</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/plpgsql_check</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/plv8</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/postgis</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/rum</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/timescaledb</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/uuid-ossp</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/database/extensions/wrappers</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/resources/migrating-to-supabase/firebase-auth</loc>
<changefreq>weekly</changefreq>
@@ -1003,25 +1249,7 @@
</url>
<url>
<loc>https://supabase.com/docs/guides/self-hosting/realtime/config</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso/azure</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso/gsuite</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/platform/sso/okta</loc>
<loc>https://supabase.com/docs/guides/resources/migrating-to-supabase/render</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
@@ -1032,114 +1260,36 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/resources/postgres/dropping-all-tables-in-schema</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/resources/postgres/first-row-in-group</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/resources/postgres/which-version-of-postgres</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/self-hosting/realtime/config</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/self-hosting/storage/config</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-apple</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-azure</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-bitbucket</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-discord</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-facebook</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-github</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-gitlab</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-google</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-keycloak</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-linkedin</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-notion</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-slack</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-spotify</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-twitch</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-twitter</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-workos</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/guides/auth/social-login/auth-zoom</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/reference/javascript/initializing</loc>
<changefreq>weekly</changefreq>
@@ -1562,6 +1712,12 @@
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/reference/javascript/db-returns</loc>
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
<url>
<loc>https://supabase.com/docs/reference/javascript/functions-invoke</loc>
<changefreq>weekly</changefreq>
@@ -3867,4 +4023,4 @@
<changefreq>weekly</changefreq>
<changefreq>0.5</changefreq>
</url>
</urlset>
</urlset>

View File

@@ -11,7 +11,7 @@ date: '2023-02-07'
toc_depth: 3
---
We all know that Microsoft's real agenda for pouring billions into OpenAI to revive their favorite friend Clippy.
We all know that Microsoft's real agenda for pouring billions into OpenAI is to revive their favorite friend Clippy.
Today, we're doing our part to support the momentum by releasing “Supabase Clippy” for our docs (and we don't expect this name to last long before the lawyers catch on).
![Clippy](/images/blog/docsgpt/clippy.png)
@@ -30,7 +30,7 @@ Our product suite has grown in the past 2 years and our docs have grown as a res
### The “ask” interface
Developers have recently gained an the ability to trust a bot. Where Clippy failed, ChatGPT succeeded.
Developers have recently gained the ability to trust a bot. Where Clippy failed, ChatGPT succeeded.
This is convenient timing for us, since our documentation content is more than the average developer wants to consume in one go. Today we're providing a similar interface to ChatGPT which is trained on our own docs.

View File

@@ -30,7 +30,7 @@ const data: CommunityItem[] = [
},
{
title: 'GitHub',
stat: '44,000+',
stat: '46,000+',
statLabel: 'GitHub stars',
img: 'github.png',
invertImgDarkMode: true,
@@ -42,7 +42,7 @@ const data: CommunityItem[] = [
},
{
title: 'Twitter',
stat: '54,000+',
stat: '60,000+',
statLabel: 'Followers',
img: 'twitter.png',
detail: () => (

View File

@@ -1857,4 +1857,9 @@ module.exports = [
source: '/docs/reference/javascript/v0/rpc',
destination: '/docs/reference/javascript/rpc',
},
{
permanent: true,
source: '/docs/guides/platform/database-usage',
destination: '/docs/guides/platform/database-size',
},
]

View File

@@ -38,7 +38,7 @@ export default function MyApp({ Component, pageProps }: AppProps) {
return (
<>
<Head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</Head>
<Meta />
<DefaultSeo

View File

@@ -966,6 +966,7 @@ const Beta = (props: Props) => {
<Layout hideHeader={true}>
<Head>
<title>{site_title}</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0"></meta>
<meta name="og:title" property="og:title" content={site_title} />
<meta name="twitter:site" content={site_title} />
<meta name="twitter:text:title" content={site_title} />

View File

@@ -83,34 +83,6 @@ export default function IndexPage() {
additional: '',
cta: 'Get Started',
},
// {
// name: 'Team',
// nameBadge: 'New',
// costUnit: 'per month per org',
// href: 'https://app.supabase.com/new/new-project',
// from: true,
// priceLabel: 'Starting from',
// priceMonthly: 599,
// description: 'For scaling teams with permissions & access controls',
// warning: '+ any additional usage',
// features: [
// 'Usage-based pricing',
// 'Organization member roles (ABAC)',
// 'SOC2',
// 'Priority email support & SLAs',
// '14 day backups',
// '100,000 monthly active users included',
// 'Standardized Security Questionnaire',
// 'SSO for Supabase Dashboard',
// '1 XS compute instance',
// '28 day log retention',
// ],
// scale: '',
// shutdown: '',
// preface: 'The following features will apply to all projects within the organization:',
// additional: '',
// cta: 'Get Started',
// },
{
name: 'Enterprise',
href: 'https://forms.supabase.com/enterprise',
@@ -127,8 +99,8 @@ export default function IndexPage() {
`Private Slack channel`,
`Uptime SLA`,
],
priceLabel: 'Custom quotas',
priceMonthly: 'Exclusive pricing',
priceLabel: '',
priceMonthly: 'Contact us',
preface: 'These apply to all projects within the organization:',
scale: '',
shutdown: '',
@@ -164,7 +136,7 @@ export default function IndexPage() {
name: 'Point in Time Recovery',
heroImg: 'addons-pitr-hero',
icon: 'pitr-upgrade',
price: 'Starts from $5',
price: 'Starts from $100',
description: 'Roll back to any specific point in time and ensure that data is not lost.',
leftCtaText: 'Documentation',
leftCtaLink: 'https://supabase.com/docs/guides/platform/backups',
@@ -319,7 +291,7 @@ export default function IndexPage() {
</p>
<p
className={`mt-2 gradient-text-scale-500 dark:gradient-text-scale-100 pb-1 ${
tier.name !== 'Enterprise' ? 'text-5xl' : 'text-4xl max-w-[75%]'
tier.name !== 'Enterprise' ? 'text-5xl' : 'text-4xl'
}`}
>
{tier.name !== 'Enterprise' && '$'}
@@ -750,13 +722,15 @@ export default function IndexPage() {
<>
<span
className={`text-scale-1200 ${
tier.name !== 'Enterprise' ? 'text-5xl' : 'text-4xl max-w-[75%]'
tier.name !== 'Enterprise' ? 'text-5xl' : 'text-4xl'
}`}
>
{tier.name !== 'Enterprise' && '$'}
{tier.priceMonthly}
</span>
<p className="p text-xs mt-1">per project per month</p>
{tier.name !== 'Enterprise' && (
<p className="p text-xs mt-1">per project per month</p>
)}
</>
{tier.warning && (
@@ -861,66 +835,12 @@ export default function IndexPage() {
<div className="mx-auto max-w-5xl gap-y-10 gap-x-10 lg:grid-cols-2">
<div className="sm:py-18 mx-auto px-6 py-16 md:py-24 lg:px-16 lg:py-24 xl:px-20">
<h2 className="h3 text-center">Frequently asked questions</h2>
<p className="p text-center">
Can&apos;t find the answer to your question, ask someone in the community either on
our Discord or GitHub.
</p>
<div className="p text-center mt-6">
<Link href="https://discord.supabase.com">
<a>
<Button
type="default"
className="mr-2"
size="small"
icon={
<svg
className="h-6 w-6"
fill="currentColor"
viewBox="0 0 71 55"
aria-hidden="true"
>
<path
fillRule="evenodd"
d="M60.1045 4.8978C55.5792 2.8214 50.7265 1.2916 45.6527 0.41542C45.5603 0.39851 45.468 0.440769 45.4204 0.525289C44.7963 1.6353 44.105 3.0834 43.6209 4.2216C38.1637 3.4046 32.7345 3.4046 27.3892 4.2216C26.905 3.0581 26.1886 1.6353 25.5617 0.525289C25.5141 0.443589 25.4218 0.40133 25.3294 0.41542C20.2584 1.2888 15.4057 2.8186 10.8776 4.8978C10.8384 4.9147 10.8048 4.9429 10.7825 4.9795C1.57795 18.7309 -0.943561 32.1443 0.293408 45.3914C0.299005 45.4562 0.335386 45.5182 0.385761 45.5576C6.45866 50.0174 12.3413 52.7249 18.1147 54.5195C18.2071 54.5477 18.305 54.5139 18.3638 54.4378C19.7295 52.5728 20.9469 50.6063 21.9907 48.5383C22.0523 48.4172 21.9935 48.2735 21.8676 48.2256C19.9366 47.4931 18.0979 46.6 16.3292 45.5858C16.1893 45.5041 16.1781 45.304 16.3068 45.2082C16.679 44.9293 17.0513 44.6391 17.4067 44.3461C17.471 44.2926 17.5606 44.2813 17.6362 44.3151C29.2558 49.6202 41.8354 49.6202 53.3179 44.3151C53.3935 44.2785 53.4831 44.2898 53.5502 44.3433C53.9057 44.6363 54.2779 44.9293 54.6529 45.2082C54.7816 45.304 54.7732 45.5041 54.6333 45.5858C52.8646 46.6197 51.0259 47.4931 49.0921 48.2228C48.9662 48.2707 48.9102 48.4172 48.9718 48.5383C50.038 50.6034 51.2554 52.5699 52.5959 54.435C52.6519 54.5139 52.7526 54.5477 52.845 54.5195C58.6464 52.7249 64.529 50.0174 70.6019 45.5576C70.6551 45.5182 70.6887 45.459 70.6943 45.3942C72.1747 30.0791 68.2147 16.7757 60.1968 4.9823C60.1772 4.9429 60.1437 4.9147 60.1045 4.8978ZM23.7259 37.3253C20.2276 37.3253 17.3451 34.1136 17.3451 30.1693C17.3451 26.225 20.1717 23.0133 23.7259 23.0133C27.308 23.0133 30.1626 26.2532 30.1066 30.1693C30.1066 34.1136 27.28 37.3253 23.7259 37.3253ZM47.3178 37.3253C43.8196 37.3253 40.9371 34.1136 40.9371 30.1693C40.9371 26.225 43.7636 23.0133 47.3178 23.0133C50.9 23.0133 53.7545 26.2532 53.6986 30.1693C53.6986 34.1136 50.9 37.3253 47.3178 37.3253Z"
clipRule="evenodd"
/>
</svg>
}
>
Discord
</Button>
</a>
</Link>
<Link href="https://github.com/supabase/supabase/discussions">
<a>
<Button
type="default"
size="small"
icon={
<svg
className="h-6 w-6"
fill="currentColor"
viewBox="0 0 24 24"
aria-hidden="true"
>
<path
fillRule="evenodd"
d="M12 2C6.477 2 2 6.484 2 12.017c0 4.425 2.865 8.18 6.839 9.504.5.092.682-.217.682-.483 0-.237-.008-.868-.013-1.703-2.782.605-3.369-1.343-3.369-1.343-.454-1.158-1.11-1.466-1.11-1.466-.908-.62.069-.608.069-.608 1.003.07 1.531 1.032 1.531 1.032.892 1.53 2.341 1.088 2.91.832.092-.647.35-1.088.636-1.338-2.22-.253-4.555-1.113-4.555-4.951 0-1.093.39-1.988 1.029-2.688-.103-.253-.446-1.272.098-2.65 0 0 .84-.27 2.75 1.026A9.564 9.564 0 0112 6.844c.85.004 1.705.115 2.504.337 1.909-1.296 2.747-1.027 2.747-1.027.546 1.379.202 2.398.1 2.651.64.7 1.028 1.595 1.028 2.688 0 3.848-2.339 4.695-4.566 4.943.359.309.678.92.678 1.855 0 1.338-.012 2.419-.012 2.747 0 .268.18.58.688.482A10.019 10.019 0 0022 12.017C22 6.484 17.522 2 12 2z"
clipRule="evenodd"
/>
</svg>
}
>
GitHub
</Button>
</a>
</Link>
</div>
<div className="mt-16">
{/* @ts-ignore */}
<div className="my-16">
<Accordion
type="default"
openBehaviour="multiple"
chevronAlign="right"
justified
size="medium"
className="text-scale-900 dark:text-white"
>
@@ -940,6 +860,28 @@ export default function IndexPage() {
})}
</Accordion>
</div>
<p className="p text-center">
Can&apos;t find the answer to your question, you can{' '}
<a
target="_blank"
href="https://app.supabase.com/support/new"
className="transition text-brand-900 hover:text-brand-1000"
>
open a support ticket
</a>{' '}
and our team of experts will be able to help.
</p>
<p className="p text-center">
For enterprise enquries,{' '}
<a
target="_blank"
href="https://app.supabase.com/support/new"
className="transition text-brand-900 hover:text-brand-1000"
>
you can contact the team here
</a>
.
</p>
</div>
</div>
</div>

View File

@@ -87,8 +87,8 @@ An Information request about Supabase or feature request.
| Severity Level | Standard | Priority | Priority Plus |
| -------------- | ------------------------------------- | ------------------------------------- | -------------------------------------- |
| 1. Urgent | 1 business hour<br />24/7 × 365 | 1 business hour<br />24/7 × 365 | 1 business hour<br />24/7 × 365 |
| 2. High | 4 business hours<br />Monday - Friday | 2 business hours<br />Monday - Friday | 2 business hours<br />24/7 × 365 |
| 1. Urgent | 1 hour<br />24/7 × 365 | 1 hour<br />24/7 × 365 | 1 hour<br />24/7 × 365 |
| 2. High | 4 business hours<br />Monday - Friday | 2 business hours<br />Monday - Friday | 2 hours<br />24/7 × 365 |
| 3. Normal | 1 business day<br />Monday - Friday | 1 business day<br />Monday - Friday | 12 business hours<br />Monday - Friday |
| 4. Low | 2 business days<br />Monday - Friday | 2 business days<br />Monday - Friday | 1 business day<br />Monday - Friday |

View File

@@ -6,23 +6,17 @@
## Example Functions
The function examples are located in [`./supabase/functions`](./supabase/functions):
- [`browser-with-cors`](./supabase/functions/browser-with-cors/index.ts): Handle CORS headers for function invocations from browser environments.
- [`select-from-table-with-auth-rls`](./supabase/functions/select-from-table-with-auth-rls/index.ts): Retrieve data from an authenticated user via RLS.
- [`send-email-smtp`](./supabase/functions/send-email-smtp/index.ts): Send an email using SMTP credentials.
- [`stripe-webhooks`](./supabase/functions/stripe-webhooks/index.ts): Handle Stripe Webhooks.
- [`telegram-bot`](./supabase/functions/telegram-bot/index.ts): Webhook handler for Telegram bots using [grammY](https://grammy.dev/).
We're constantly adding new Function Examples, [check our docs](https://supabase.com/docs/guides/functions#examples) for a complete list!
## Develop locally
- Run `supabase start` (make sure your Docker daemon is running.)
- Run `mv ./supabase/.env.local.example ./supabase/.env.local` to rename the local `.env` file.
- Set the required variables to run the desired edge functions in the `.env.local` file.
- Run `supabase functions serve your-function-name --env-file ./supabase/.env.local`
- Run `cp ./supabase/.env.local.example ./supabase/.env.local` to create your local `.env` file.
- Set the required variables for the corresponding edge functions in the `.env.local` file.
- Run `supabase functions serve --env-file ./supabase/.env.local --no-verify-jwt`
- Run the CURL command in the example function, or use the [invoke method](https://supabase.com/docs/reference/javascript/invoke) on the Supabase client or use the test client [app](./app/).
## Test
## Test Client
This example includes a create-react-app in the [`./app/`](./app/) directory which you can use as a sort of postman to make test requests both locally and to your deployed functions.
@@ -45,10 +39,13 @@ Note: when testing locally, the select dropdown doesn't have any effect, and inv
- Link your project
- Within your project root run `supabase link --project-ref your-project-ref`
- Set up your secrets
- Run `supabase secrets set --env-file ./supabase/.env.local` to set the environment variables.
(This is assuming your local and production secrets are the same. The recommended way is to create a separate `.env` file for storing production secrets, and then use it to set the environment variables while deploying.)
- You can run `supabase secrets list` to check that it worked and also to see what other env vars are set by default.
- Deploy the function
- Within your project root run `supabase functions deploy your-function-name`
- In your [`./app/.env`](./app/.env) file remove the `SUPA_FUNCTION_LOCALHOST` variable and restart your Expo app.

View File

@@ -18,6 +18,7 @@
"node_modules/": true,
"app/": true,
"supabase/functions/": true
}
},
"deno.importMap": "./supabase/functions/import_map.json"
}
}

View File

@@ -1,3 +1,21 @@
# cloudflare-turnstile
CLOUDFLARE_TURNSTILE_SECRET_KEY=your_secret_key
# discord-bot
DISCORD_PUBLIC_KEY=
# location
IPINFO_TOKEN="your https://ipinfo.io token"
# openai
OPENAI_API_KEY="<YOUR API KEY HERE>"
# postgres-on-the-edge
DATABASE_URL=
# puppeteer
PUPPETEER_BROWSERLESS_IO_TOKEN=
# send-email-smtp
SMTP_HOSTNAME="your.hostname.com"
SMTP_PORT="2587"
@@ -5,13 +23,14 @@ SMTP_USERNAME="your_username"
SMTP_PASSWORD="your_password"
SMTP_FROM="no-reply@example.com"
# telegram-bot
TELEGRAM_BOT_TOKEN="get it from https://t.me/BotFather"
FUNCTION_SECRET="random secret"
# stripe-webhooks
STRIPE_API_KEY="<YOUR API KEY HERE>"
STRIPE_WEBHOOK_SIGNING_SECRET="<YOUR WEBHOOK SIGNING HERE>"
# openai
OPENAI_API_KEY="<YOUR API KEY HERE>"
# telegram-bot
TELEGRAM_BOT_TOKEN="get it from https://t.me/BotFather"
FUNCTION_SECRET="random secret"
# upstash-redis-counter
UPSTASH_REDIS_REST_URL=
UPSTASH_REDIS_REST_TOKEN=

View File

@@ -1,5 +1,5 @@
{
"deno.enable": true,
"deno.lint": true,
"deno.unstable": true
"deno.unstable": true,
"deno.importMap": "./import_map.json"
}

View File

@@ -2,7 +2,7 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'std/server'
import { corsHeaders } from '../_shared/cors.ts'
console.log(`Function "browser-with-cors" up and running!`)

View File

@@ -2,6 +2,10 @@
Turnstile is Cloudflare's CAPTCHA alternative: https://developers.cloudflare.com/turnstile/get-started/
## Watch the Video Tutorial
[![video tutorial](https://img.youtube.com/vi/OwW0znboh60/0.jpg)](https://www.youtube.com/watch?v=OwW0znboh60)
## Setup
- Follow these steps to set up a new site: https://developers.cloudflare.com/turnstile/get-started/

View File

@@ -2,7 +2,7 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { serve } from 'std/server'
import { corsHeaders } from '../_shared/cors.ts'
console.log(`Function "cloudflare-turnstile" up and running!`)
@@ -55,7 +55,7 @@ serve(async (req) => {
})
// To invoke:
// curl -i --location --request POST 'http://localhost:54321/functions/v1/' \
// curl -i --location --request POST 'http://localhost:54321/functions/v1/cloudflare-turnstile' \
// --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24iLCJleHAiOjE5ODM4MTI5OTZ9.CRXP1A7WOeoJeXxjNni43kdQwgnWNReilDMblYTn_I0' \
// --header 'Content-Type: application/json' \
// --data '{"token":"cf-turnstile-response"}'

View File

@@ -5,6 +5,10 @@
- https://deno.com/deploy/docs/tutorial-discord-slash
- https://discord.com/developers/docs/interactions/application-commands#slash-commands
## Watch the Video Tutorial
[![video tutorial](https://img.youtube.com/vi/J24Bvo_m7DM/0.jpg)](https://www.youtube.com/watch?v=J24Bvo_m7DM)
## Create an application on Discord Developer Portal
1. Go to [https://discord.com/developers/applications](https://discord.com/developers/applications) (login using your discord account if required).
@@ -59,6 +63,6 @@ Open Discord, type `/Promise` and press **Enter**.
## Run locally
```bash
supabase functions serve discord-bot --no-verify-jwt --env-file ./supabase/functions/discord-bot/.env
supabase functions serve discord-bot --no-verify-jwt --env-file ./supabase/.env.local
ngrok http 54321
```

View File

@@ -5,10 +5,10 @@
// Sift is a small routing library that abstracts away details like starting a
// listener on a port, and provides a simple function (serve) that has an API
// to invoke a function for a specific path.
import { json, serve, validateRequest } from 'https://deno.land/x/sift@0.6.0/mod.ts'
import { json, serve, validateRequest } from 'sift'
// TweetNaCl is a cryptography library that we use to verify requests
// from Discord.
import nacl from 'https://cdn.skypack.dev/tweetnacl@v1.0.3?dts'
import nacl from 'nacl'
enum DiscordCommandType {
Ping = 1,

View File

@@ -1,8 +1,8 @@
// This example shows how to use Edge Functions to read incoming multipart/form-data request,
// and write files to Supabase Storage and other fields to a database table.
import { Application, Router } from 'https://deno.land/x/oak/mod.ts'
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
import { Application } from 'oak'
import { createClient } from '@supabase/supabase-js'
const MB = 1024 * 1024
@@ -25,9 +25,9 @@ app.use(async (ctx) => {
const supabaseClient = createClient(
// Supabase API URL - env var exported by default.
Deno.env.get('SUPABASE_URL') ?? '',
Deno.env.get('SUPABASE_URL')!,
// Supabase API ANON KEY - env var exported by default.
Deno.env.get('SUPABASE_ANON_KEY') ?? ''
Deno.env.get('SUPABASE_ANON_KEY')!
)
//upload image to Storage

View File

@@ -15,9 +15,11 @@ Good luck trying to GET a T-Shirt!
### Serve this function locally
```bash
supabase functions serve --no-verify-jwt get-tshirt-competition
supabase functions serve --no-verify-jwt
```
Navigate to http://localhost:54321/functions/v1/get-tshirt-competition?email=testr@test.de&twitter=thorwebdev&size=2XL&answer=20
### Deploy this function
```bash

View File

@@ -2,8 +2,8 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
import { serve } from 'std/server'
import { createClient } from '@supabase/supabase-js'
import { corsHeaders } from '../_shared/cors.ts'
console.log(`Function "get-tshirt-competition" up and running!`)

View File

@@ -2,23 +2,18 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from "https://deno.land/std@0.131.0/http/server.ts";
import { serve } from 'std/server'
console.log("Hello from Functions!");
console.log('Hello from Functions!')
serve(async (req) => {
const { name } = await req.json();
serve((_req) => {
const data = {
message: `I was deployed via GitHub Actions!`,
};
}
return new Response(JSON.stringify(data), {
headers: { "Content-Type": "application/json" },
});
});
headers: { 'Content-Type': 'application/json' },
})
})
// To invoke:
// curl -i --location --request POST 'http://localhost:54321/functions/v1/' \
// --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24ifQ.625_WdcF3KHqz5amU0x2X5WWHP-OEs_4qj0ssLNHzTs' \
// --header 'Content-Type: application/json' \
// --data '{"name":"Functions"}'
// To invoke: http://localhost:54321/functions/v1/github-action-deploy

View File

@@ -0,0 +1,20 @@
{
"imports": {
"denomailer": "https://deno.land/x/denomailer@0.12.0/mod.ts",
"nacl": "https://cdn.skypack.dev/tweetnacl@v1.0.3?dts",
"oak": "https://deno.land/x/oak@v11.1.0/mod.ts",
"og_edge": "https://deno.land/x/og_edge@0.0.4/mod.ts",
"openai": "https://esm.sh/openai@3.1.0",
"grammy": "https://deno.land/x/grammy@v1.8.3/mod.ts",
"react": "https://esm.sh/react@18.2.0",
"std/server": "https://deno.land/std@0.177.0/http/server.ts",
"stripe": "https://esm.sh/stripe@11.1.0?target=deno",
"sift": "https://deno.land/x/sift@0.6.0/mod.ts",
"@supabase/supabase-js": "https://esm.sh/@supabase/supabase-js@2.7.1",
"postgres": "https://deno.land/x/postgres@v0.14.2/mod.ts",
"puppeteer": "https://deno.land/x/puppeteer@16.2.0/mod.ts",
"React": "https://esm.sh/react@18.2.0?deno-std=0.177.0",
"upstash_redis": "https://deno.land/x/upstash_redis@v1.19.3/mod.ts",
"xhr_polyfill": "https://deno.land/x/xhr@0.3.0/mod.ts"
}
}

View File

@@ -4,6 +4,14 @@ This example shows how you can get user location based on the IP provided in X-F
You will need to signup for an account in https://ipinfo.io and provide it as `IPINFO_TOKEN` environment variable ([learn how to set environment variables to your functions](https://supabase.com/docs/guides/functions#secrets-and-environment-variables)).
## Develop locally
```bash
supabase functions serve --env-file ./supabase/.env.local --no-verify-jwt
```
Navigate to http://localhost:54321/functions/v1/location
## Deploy
```bash

View File

@@ -2,20 +2,29 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from "https://deno.land/std@0.131.0/http/server.ts"
import { serve } from 'std/server'
function ips(req: Request) {
return req.headers.get("x-forwarded-for")?.split(/\s*,\s*/);
return req.headers.get('x-forwarded-for')?.split(/\s*,\s*/)
}
serve(async (req) => {
const clientIps = ips(req) || [''];
const res = await fetch(`https://ipinfo.io/${clientIps[0]}?token=${Deno.env.get('IPINFO_TOKEN')}`, {
headers: { 'Content-Type': 'application/json'}});
const { city, country } = await res.json();
return new Response(
JSON.stringify(`You're accessing from ${city}, ${country}`),
{ headers: { "Content-Type": "application/json" } },
const clientIps = ips(req) || ['']
const res = await fetch(
`https://ipinfo.io/${clientIps[0]}?token=${Deno.env.get('IPINFO_TOKEN')}`,
{
headers: { 'Content-Type': 'application/json' },
}
)
if (res.ok) {
const { city, country } = await res.json()
return new Response(JSON.stringify(`You're accessing from ${city}, ${country}`), {
headers: { 'Content-Type': 'application/json' },
})
} else {
return new Response(await res.text(), {
status: 400,
})
}
})

View File

@@ -5,7 +5,7 @@ This example shows how you can write functions using Oak server middleware (http
## Run locally
```bash
supabase functions serve oak-server
supabase functions serve --no-verify-jwt
```
Use cURL or Postman to make a POST request to http://localhost:54321/functions/v1/oak-server.
@@ -20,5 +20,5 @@ Use cURL or Postman to make a POST request to http://localhost:54321/functions/v
## Deploy
```bash
supabase functions deploy oak-server
supabase functions deploy oak-server --no-verify-jwt
```

View File

@@ -1,4 +1,4 @@
import { Application, Router } from 'https://deno.land/x/oak/mod.ts'
import { Application, Router } from 'oak'
const router = new Router()
router

View File

@@ -9,7 +9,7 @@ Generate Open Graph images with Deno and Supabase Edge Functions and cache the g
## Run locally
```bash
supabase functions serve og-image-with-storage-cdn --no-verify-jwt
supabase functions serve --no-verify-jwt
```
Navigate to http://localhost:54321/functions/v1/og-image-with-storage-cdn?ticketNumber=3524&username=thorwebdev&name=Thor%20%E9%9B%B7%E7%A5%9E%20Schaeff

View File

@@ -1,6 +1,6 @@
import React from 'https://esm.sh/react@18.2.0?deno-std=0.140.0'
import { ImageResponse } from 'https://deno.land/x/og_edge@0.0.4/mod.ts'
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
import React from 'React'
import { ImageResponse } from 'og_edge'
import { createClient } from '@supabase/supabase-js'
import { corsHeaders } from '../_shared/cors.ts'
const STORAGE_URL = 'https://obuldanrptloktxcffvn.supabase.co/storage/v1/object/public/images/lw6'
@@ -213,7 +213,7 @@ export async function handler(req: Request) {
})
if (error) throw error
return generatedImage
return await fetch(`${STORAGE_URL}/tickets/${username}.png?v=3`)
} catch (error) {
return new Response(JSON.stringify({ error: error.message }), {
headers: { ...corsHeaders, 'Content-Type': 'application/json' },

View File

@@ -2,7 +2,7 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.140.0/http/server.ts'
import { serve } from 'std/server'
import { handler } from './handler.tsx'

View File

@@ -9,7 +9,7 @@ cp supabase/.env.local.example supabase/.env.local
## Run locally
```bash
supabase functions serve --no-verify-jwt --env-file ./supabase/.env.local openai
supabase functions serve --env-file ./supabase/.env.local --no-verify-jwt
```
Use cURL or Postman to make a POST request to http://localhost:54321/functions/v1/openai.

View File

@@ -1,6 +1,6 @@
import 'https://deno.land/x/xhr@0.3.0/mod.ts'
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts'
import { CreateCompletionRequest } from 'https://esm.sh/openai@3.1.0'
import 'xhr_polyfill'
import { serve } from 'std/server'
import { CreateCompletionRequest } from 'openai'
serve(async (req) => {
const { query } = await req.json()

View File

@@ -8,7 +8,7 @@ Generate Open Graph images with Deno and Supabase Edge Functions, no framework n
## Run locally
```bash
supabase functions serve opengraph --no-verify-jwt
supabase functions serve --no-verify-jwt
```
Navigate to http://localhost:54321/functions/v1/opengraph

View File

@@ -1,5 +1,5 @@
import React from 'https://esm.sh/react@18.2.0?deno-std=0.140.0'
import { ImageResponse } from 'https://deno.land/x/og_edge@0.0.4/mod.ts'
import React from 'React'
import { ImageResponse } from 'og_edge'
export function handler(req: Request) {
return new ImageResponse(

View File

@@ -2,7 +2,7 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.140.0/http/server.ts'
import { serve } from 'std/server'
import { handler } from './handler.tsx'

View File

@@ -1,5 +1,5 @@
import * as postgres from 'https://deno.land/x/postgres@v0.14.2/mod.ts'
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts'
import * as postgres from 'postgres'
import { serve } from 'std/server'
// Get the connection string from the environment variable "DATABASE_URL"
const databaseUrl = Deno.env.get('DATABASE_URL')!

View File

@@ -4,6 +4,14 @@ This example shows how you can use Puppeteer and a headless-browser to generate
Since Edge Functions cannot run a Headless Browser instance due to resource constraints, you will need to use a hosted browser service like https://browserless.io.
## Develop locally
```bash
supabase functions serve --env-file ./supabase/.env.local --no-verify-jwt
```
Navigate to http://localhost:54321/functions/v1/puppeteer
## Deploy
```bash

View File

@@ -1,11 +1,14 @@
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import puppeteer from 'https://deno.land/x/puppeteer@16.2.0/mod.ts'
import { serve } from 'std/server'
import puppeteer from 'puppeteer'
serve(async (req) => {
try {
console.log(`wss://chrome.browserless.io?token=${Deno.env.get('PUPPETEER_BROWSERLESS_IO_KEY')}`)
// Visit browserless.io to get your free API token
const browser = await puppeteer.connect({
browserWSEndpoint: 'wss://chrome.browserless.io?token=YOUR_API_TOKEN',
browserWSEndpoint: `wss://chrome.browserless.io?token=${Deno.env.get(
'PUPPETEER_BROWSERLESS_IO_KEY'
)}`,
})
const page = await browser.newPage()
@@ -17,7 +20,7 @@ serve(async (req) => {
return new Response(screenshot, { headers: { 'Content-Type': 'image/png' } })
} catch (e) {
console.error(e)
return new Response(JSON.stringify(`Error occurred when generating the screenshot`), {
return new Response(JSON.stringify({ error: e.message }), {
headers: { 'Content-Type': 'application/json' },
status: 500,
})

View File

@@ -2,8 +2,8 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
import { serve } from 'std/server'
import { createClient } from '@supabase/supabase-js'
const corsHeaders = {
'Access-Control-Allow-Origin': '*',

View File

@@ -2,8 +2,8 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { createClient, SupabaseClient } from 'https://esm.sh/@supabase/supabase-js@2'
import { serve } from 'std/server'
import { createClient, SupabaseClient } from '@supabase/supabase-js'
const corsHeaders = {
'Access-Control-Allow-Origin': '*',

View File

@@ -2,8 +2,8 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { createClient } from 'https://esm.sh/@supabase/supabase-js@2'
import { serve } from 'std/server'
import { createClient } from '@supabase/supabase-js'
import { corsHeaders } from '../_shared/cors.ts'
console.log(`Function "select-from-table-with-auth-rls" up and running!`)

View File

@@ -13,4 +13,4 @@ Note: `SMTP_PORT` must be a port other than `25`, `465`, and `587` as Deno Deplo
## Test locally
- `cp ./supabase/.env.local.example ./supabase/.env.local`
- `supabase functions serve send-email-smtp --env-file ./supabase/.env.local`
- `supabase functions serve --env-file ./supabase/.env.local`

View File

@@ -2,14 +2,14 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.131.0/http/server.ts'
import { SmtpClient } from 'https://deno.land/x/denomailer@0.12.0/mod.ts'
import { serve } from 'std/server'
import { SmtpClient } from 'denomailer'
const smtp = new SmtpClient()
console.log(`Function "send-email-smtp" up and running!`)
serve(async (req) => {
serve(async (_req) => {
await smtp.connect({
hostname: Deno.env.get('SMTP_HOSTNAME')!,
port: Number(Deno.env.get('SMTP_PORT')!),
@@ -24,7 +24,7 @@ serve(async (req) => {
subject: `Hello from Supabase Edge Functions`,
content: `Hello Functions \o/`,
})
} catch (error: any) {
} catch (error) {
return new Response(error.message, { status: 500 })
}
@@ -41,7 +41,7 @@ serve(async (req) => {
})
// To invoke:
// curl -i --location --request POST 'http://localhost:54321/functions/v1/' \
// curl -i --location --request POST 'http://localhost:54321/functions/v1/send-email-smtp' \
// --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24ifQ.625_WdcF3KHqz5amU0x2X5WWHP-OEs_4qj0ssLNHzTs' \
// --header 'Content-Type: application/json' \
// --data '{"name":"Functions"}'

View File

@@ -3,7 +3,7 @@
## Run locally
```bash
supabase functions serve --no-verify-jwt streams
supabase functions serve --no-verify-jwt
```
Use cURL or Postman to make a GET request to http://localhost:54321/functions/v1/streams.

View File

@@ -1,4 +1,4 @@
import { serve } from 'https://deno.land/std@0.168.0/http/server.ts'
import { serve } from 'std/server'
const msg = new TextEncoder().encode('data: hello\r\n\r\n')

View File

@@ -9,7 +9,7 @@ Also check out our full Stripe Payments examples for [React Native (Expo)](https
## Test locally
- Terminal 1:
- `supabase functions serve --no-verify-jwt stripe-webhooks --env-file ./supabase/.env.local`
- `supabase functions serve --no-verify-jwt --env-file ./supabase/.env.local`
- Terminal 2:
- `stripe listen --forward-to localhost:54321/functions/v1/`
- Terminal 3 (optional):

View File

@@ -2,20 +2,21 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.132.0/http/server.ts'
import { serve } from 'std/server'
// esm.sh is used to compile stripe-node to be compatible with ES modules.
import Stripe from 'https://esm.sh/stripe@10.13.0?target=deno&no-check&deno-std=0.132.0'
// Import via bare specifier thanks to the import_map.json file.
import Stripe from 'stripe'
const stripe = Stripe(Deno.env.get('STRIPE_API_KEY'), {
const stripe = new Stripe(Deno.env.get('STRIPE_API_KEY') as string, {
// This is needed to use the Fetch API rather than relying on the Node http
// package.
apiVersion: '2022-11-15',
httpClient: Stripe.createFetchHttpClient(),
})
// This is needed in order to use the Web Crypto API in Deno.
const cryptoProvider = Stripe.createSubtleCryptoProvider()
console.log(`Function "stripe-webhooks" up and running!`)
console.log('Hello from Stripe Webhook!')
serve(async (request) => {
const signature = request.headers.get('Stripe-Signature')
@@ -27,8 +28,8 @@ serve(async (request) => {
try {
receivedEvent = await stripe.webhooks.constructEventAsync(
body,
signature,
Deno.env.get('STRIPE_WEBHOOK_SIGNING_SECRET'),
signature!,
Deno.env.get('STRIPE_WEBHOOK_SIGNING_SECRET')!,
undefined,
cryptoProvider
)
@@ -36,25 +37,5 @@ serve(async (request) => {
return new Response(err.message, { status: 400 })
}
console.log(`🔔 Event received: ${receivedEvent.id}`)
// Secondly, we use this event to query the Stripe API in order to avoid
// handling any forged event. If available, we use the idempotency key.
const requestOptions =
receivedEvent.request && receivedEvent.request.idempotency_key
? {
idempotencyKey: receivedEvent.request.idempotency_key,
}
: {}
let retrievedEvent
try {
retrievedEvent = await stripe.events.retrieve(receivedEvent.id, requestOptions)
} catch (err) {
return new Response(err.message, { status: 400 })
}
return new Response(JSON.stringify({ id: retrievedEvent.id, status: 'ok' }), {
status: 200,
headers: { 'Content-Type': 'application/json' },
})
return new Response(JSON.stringify({ ok: true }), { status: 200 })
})

View File

@@ -2,34 +2,28 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from "https://deno.land/std@0.131.0/http/server.ts"
import { serve } from 'std/server'
console.log(`Function "telegram-bot" up and running!`)
import { Bot, webhookCallback } from "https://deno.land/x/grammy@v1.8.3/mod.ts";
import { Bot, webhookCallback } from 'grammy'
const bot = new Bot(Deno.env.get('TELEGRAM_BOT_TOKEN') || '');
const bot = new Bot(Deno.env.get('TELEGRAM_BOT_TOKEN') || '')
bot.command("start", (ctx) => ctx.reply("Welcome! Up and running."));
bot.command('start', (ctx) => ctx.reply('Welcome! Up and running.'))
bot.command('ping', (ctx) => ctx.reply(`Pong! ${new Date()} ${Date.now()}`))
const handleUpdate = webhookCallback(bot, "std/http");
const handleUpdate = webhookCallback(bot, 'std/http')
serve(async (req) => {
try {
const url = new URL(req.url);
if (url.searchParams.get('secret') !== Deno.env.get('FUNCTION_SECRET'))
const url = new URL(req.url)
if (url.searchParams.get('secret') !== Deno.env.get('FUNCTION_SECRET'))
return new Response('not allowed', { status: 405 })
return await handleUpdate(req);
return await handleUpdate(req)
} catch (err) {
console.error(err);
console.error(err)
}
})
// To invoke:
// curl -i --location --request POST 'http://localhost:54321/functions/v1/' \
// --header 'Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZS1kZW1vIiwicm9sZSI6ImFub24ifQ.625_WdcF3KHqz5amU0x2X5WWHP-OEs_4qj0ssLNHzTs' \
// --header 'Content-Type: application/json' \
// --data '{"name":"Functions"}'

View File

@@ -1,2 +0,0 @@
UPSTASH_REDIS_REST_URL=
UPSTASH_REDIS_REST_TOKEN=

View File

@@ -9,7 +9,7 @@ Create a Redis database using the [Upstash Console](https://console.upstash.com/
Select the `Global` type to minimize the latency from all edge locations. Copy the `UPSTASH_REDIS_REST_URL` and `UPSTASH_REDIS_REST_TOKEN` to your .env file. You'll find them under **Details > REST API > .env**.
```bash
cp supabase/functions/upstash-redis-counter/.env.example supabase/functions/upstash-redis-counter/.env
cp supabase/.env.local.example supabase/.env.local
```
## Run locally
@@ -18,7 +18,7 @@ Make sure you have the latest version of the [Supabase CLI installed](https://su
```bash
supabase start
supabase functions serve upstash-redis-counter --no-verify-jwt --env-file supabase/functions/upstash-redis-counter/.env
supabase functions serve --no-verify-jwt --env-file supabase/.env.local
```
Navigate to http://localhost:54321/functions/v1/upstash-redis-counter.
@@ -27,5 +27,5 @@ Navigate to http://localhost:54321/functions/v1/upstash-redis-counter.
```bash
supabase functions deploy upstash-redis-counter --no-verify-jwt
supabase secrets set --env-file supabase/functions/upstash-redis-counter/.env
supabase secrets set --env-file supabase/.env.local
```

View File

@@ -2,8 +2,8 @@
// https://deno.land/manual/getting_started/setup_your_environment
// This enables autocomplete, go to definition, etc.
import { serve } from 'https://deno.land/std@0.177.0/http/server.ts'
import { Redis } from 'https://deno.land/x/upstash_redis@v1.19.3/mod.ts'
import { serve } from 'std/server'
import { Redis } from 'upstash_redis'
console.log(`Function "upstash-redis-counter" up and running!`)

View File

@@ -0,0 +1,3 @@
# Update these with your Supabase details from your project settings > API
NEXT_PUBLIC_SUPABASE_URL=https://your-project.supabase.co
NEXT_PUBLIC_SUPABASE_ANON_KEY=your-anon-key

View File

@@ -0,0 +1,3 @@
{
"extends": "next/core-web-vitals"
}

View File

@@ -0,0 +1,36 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts

View File

@@ -0,0 +1,75 @@
# Todo example using Supabase
- Frontend:
- [Next.js](https://github.com/vercel/next.js) - a React framework for production.
- [Tailwind](https://tailwindcss.com/) for styling and layout.
- [Supabase.js](https://supabase.com/docs/library/getting-started) for user management and realtime data syncing.
- Backend:
- [app.supabase.com](https://app.supabase.com/): hosted Postgres database with restful API for usage with Supabase.js.
## Deploy with Vercel
The Vercel deployment will guide you through creating a Supabase account and project. After installation of the Supabase integration, all relevant environment variables will be set up so that the project is usable immediately after deployment 🚀
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/git/external?repository-url=https%3A%2F%2Fgithub.com%2Fsupabase%2Fexamples%2Ftree%2Fmain%2Fsupabase-js-v1%2Ftodo-list%2Fnextjs-todo-list&project-name=supabase-todo-list&repository-name=supabase-todo-list&demo-title=Todo%20list&demo-description=An%20example%20web%20app%20using%20Supabase%20and%20Next.js&demo-url=https%3A%2F%2Fsupabase-nextjs-todo-list.vercel.app&demo-image=https%3A%2F%2Fi.imgur.com%2FGJauPlN.png&integration-ids=oac_jUduyjQgOyzev1fjrW83NYOv&external-id=supabase-todo-list)
## Build from scratch
### 1. Create new project
Sign up to Supabase - [https://app.supabase.com](https://app.supabase.com) and create a new project. Wait for your database to start.
### 2. Run "Todo List" Quickstart
Once your database has started, run the "Todo List" quickstart. Inside of your project, enter the `SQL editor` tab and scroll down until you see `TODO LIST: Build a basic todo list with Row Level Security`.
### 3. Get the URL and Key
Go to the Project Settings (the cog icon), open the API tab, and find your API URL and `anon` key, you'll need these in the next step.
The `anon` key is your client-side API key. It allows "anonymous access" to your database, until the user has logged in. Once they have logged in, the keys will switch to the user's own login token. This enables row level security for your data. Read more about this [below](#postgres-row-level-security).
![image](https://user-images.githubusercontent.com/10214025/88916245-528c2680-d298-11ea-8a71-708f93e1ce4f.png)
**_NOTE_**: The `service_role` key has full access to your data, bypassing any security policies. These keys have to be kept secret and are meant to be used in server environments and never on a client or browser.
## Supabase details
### Postgres Row level security
This project uses very high-level Authorization using Postgres' Role Level Security.
When you start a Postgres database on Supabase, we populate it with an `auth` schema, and some helper functions.
When a user logs in, they are issued a JWT with the role `authenticated` and their UUID.
We can use these details to provide fine-grained control over what each user can and cannot do.
This is a trimmed-down schema, with the policies:
```sql
create table todos (
id bigint generated by default as identity primary key,
user_id uuid references auth.users not null,
task text check (char_length(task) > 3),
is_complete boolean default false,
inserted_at timestamp with time zone default timezone('utc'::text, now()) not null
);
alter table todos enable row level security;
create policy "Individuals can create todos." on todos for
insert with check (auth.uid() = user_id);
create policy "Individuals can view their own todos. " on todos for
select using (auth.uid() = user_id);
create policy "Individuals can update their own todos." on todos for
update using (auth.uid() = user_id);
create policy "Individuals can delete their own todos." on todos for
delete using (auth.uid() = user_id);
```
## Authors
- [Supabase](https://supabase.com)
Supabase is open source. We'd love for you to follow along and get involved at https://github.com/supabase/supabase

View File

@@ -0,0 +1,151 @@
import { Database } from '@/lib/schema'
import { Session, useSupabaseClient } from '@supabase/auth-helpers-react'
import { useEffect, useState } from 'react'
type Todos = Database['public']['Tables']['todos']['Row']
export default function TodoList({ session }: { session: Session }) {
const supabase = useSupabaseClient<Database>()
const [todos, setTodos] = useState<Todos[]>([])
const [newTaskText, setNewTaskText] = useState('')
const [errorText, setErrorText] = useState('')
const user = session.user
useEffect(() => {
const fetchTodos = async () => {
const { data: todos, error } = await supabase
.from('todos')
.select('*')
.order('id', { ascending: true })
if (error) console.log('error', error)
else setTodos(todos)
}
fetchTodos()
}, [supabase])
const addTodo = async (taskText: string) => {
let task = taskText.trim()
if (task.length) {
const { data: todo, error } = await supabase
.from('todos')
.insert({ task, user_id: user.id })
.select()
.single()
if (error) {
setErrorText(error.message)
} else {
setTodos([...todos, todo])
setNewTaskText('')
}
}
}
const deleteTodo = async (id: number) => {
try {
await supabase.from('todos').delete().eq('id', id).throwOnError()
setTodos(todos.filter((x) => x.id != id))
} catch (error) {
console.log('error', error)
}
}
return (
<div className="w-full">
<h1 className="mb-12">Todo List.</h1>
<form
onSubmit={(e) => {
e.preventDefault()
addTodo(newTaskText)
}}
className="flex gap-2 my-2"
>
<input
className="rounded w-full p-2"
type="text"
placeholder="make coffee"
value={newTaskText}
onChange={(e) => {
setErrorText('')
setNewTaskText(e.target.value)
}}
/>
<button className="btn-black" type="submit">
Add
</button>
</form>
{!!errorText && <Alert text={errorText} />}
<div className="bg-white shadow overflow-hidden rounded-md">
<ul>
{todos.map((todo) => (
<Todo key={todo.id} todo={todo} onDelete={() => deleteTodo(todo.id)} />
))}
</ul>
</div>
</div>
)
}
const Todo = ({ todo, onDelete }: { todo: Todos; onDelete: () => void }) => {
const supabase = useSupabaseClient<Database>()
const [isCompleted, setIsCompleted] = useState(todo.is_complete)
const toggle = async () => {
try {
const { data } = await supabase
.from('todos')
.update({ is_complete: !isCompleted })
.eq('id', todo.id)
.throwOnError()
.select()
.single()
if (data) setIsCompleted(data.is_complete)
} catch (error) {
console.log('error', error)
}
}
return (
<li className="w-full block cursor-pointer hover:bg-gray-200 focus:outline-none focus:bg-gray-200 transition duration-150 ease-in-out">
<div className="flex items-center px-4 py-4 sm:px-6">
<div className="min-w-0 flex-1 flex items-center">
<div className="text-sm leading-5 font-medium truncate">{todo.task}</div>
</div>
<div>
<input
className="cursor-pointer"
onChange={(e) => toggle()}
type="checkbox"
checked={isCompleted ? true : false}
/>
</div>
<button
onClick={(e) => {
e.preventDefault()
e.stopPropagation()
onDelete()
}}
className="w-4 h-4 ml-2 border-2 hover:border-black rounded"
>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20" fill="gray">
<path
fillRule="evenodd"
d="M4.293 4.293a1 1 0 011.414 0L10 8.586l4.293-4.293a1 1 0 111.414 1.414L11.414 10l4.293 4.293a1 1 0 01-1.414 1.414L10 11.414l-4.293 4.293a1 1 0 01-1.414-1.414L8.586 10 4.293 5.707a1 1 0 010-1.414z"
clipRule="evenodd"
/>
</svg>
</button>
</div>
</li>
)
}
const Alert = ({ text }: { text: string }) => (
<div className="rounded-md bg-red-100 p-4 my-3">
<div className="text-sm leading-5 text-red-700">{text}</div>
</div>
)

View File

@@ -0,0 +1,7 @@
import { createClient } from '@supabase/supabase-js'
import { Database } from './schema'
export const supabase = createClient(
process.env.NEXT_PUBLIC_SUPABASE_URL,
process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY
)

View File

@@ -0,0 +1,49 @@
export type Json =
| string
| number
| boolean
| null
| { [key: string]: Json }
| Json[]
export interface Database {
public: {
Tables: {
todos: {
Row: {
id: number
inserted_at: string
is_complete: boolean | null
task: string | null
user_id: string
}
Insert: {
id?: number
inserted_at?: string
is_complete?: boolean | null
task?: string | null
user_id: string
}
Update: {
id?: number
inserted_at?: string
is_complete?: boolean | null
task?: string | null
user_id?: string
}
}
}
Views: {
[_ in never]: never
}
Functions: {
[_ in never]: never
}
Enums: {
[_ in never]: never
}
CompositeTypes: {
[_ in never]: never
}
}
}

View File

@@ -0,0 +1,6 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
}
module.exports = nextConfig

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More