diff --git a/.github/workflows/ai-tests.yml b/.github/workflows/ai-tests.yml
index 9a9605579282d..357b2600c328c 100644
--- a/.github/workflows/ai-tests.yml
+++ b/.github/workflows/ai-tests.yml
@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
env:
- OPENAI_KEY: ${{ secrets.OPENAI_KEY }}
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
defaults:
run:
diff --git a/.github/workflows/search.yml b/.github/workflows/search.yml
index 8f60b73aa9190..b25871774950a 100644
--- a/.github/workflows/search.yml
+++ b/.github/workflows/search.yml
@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
env:
- OPENAI_KEY: ${{ secrets.OPENAI_KEY }}
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }}
PROJECT_ID: ${{ secrets.SEARCH_SUPABASE_PROJECT_ID }}
NEXT_PUBLIC_SUPABASE_URL: ${{ secrets.SEARCH_SUPABASE_URL }}
diff --git a/apps/database-new/supabase/config.toml b/apps/database-new/supabase/config.toml
index 36bcd21299f32..0d1a103efdb1f 100644
--- a/apps/database-new/supabase/config.toml
+++ b/apps/database-new/supabase/config.toml
@@ -47,6 +47,8 @@ enabled = true
port = 54323
# External URL of the API server that frontend connects to.
api_url = "http://localhost"
+# OpenAI API Key to use for Supabase AI in the Supabase Studio.
+openai_api_key = "env(OPENAI_API_KEY)"
# Email testing server. Emails sent with the local dev setup are not actually sent - rather, they
# are monitored, and you can view the emails that would have been sent from the web interface.
diff --git a/apps/docs/.env.example b/apps/docs/.env.example
index 81e97461ea0df..735dd88086dac 100644
--- a/apps/docs/.env.example
+++ b/apps/docs/.env.example
@@ -3,7 +3,7 @@ NEXT_PUBLIC_SUPABASE_URL=http://localhost:54321
NEXT_PUBLIC_SUPABASE_ANON_KEY=
# PRIVATE
-OPENAI_KEY=
+OPENAI_API_KEY=
SUPABASE_SERVICE_ROLE_KEY=
SEARCH_GITHUB_APP_ID=
SEARCH_GITHUB_APP_INSTALLATION_ID=
diff --git a/apps/docs/components/MDX/social_provider_setup.mdx b/apps/docs/components/MDX/social_provider_setup.mdx
index b270d56d74501..6e8f01d65cfef 100644
--- a/apps/docs/components/MDX/social_provider_setup.mdx
+++ b/apps/docs/components/MDX/social_provider_setup.mdx
@@ -4,3 +4,9 @@ The next step requires a callback URL, which looks like this: `https://
+
+For testing OAuth locally with the Supabase CLI please see the [local development docs](/docs/guides/cli/local-development#use-auth-locally).
+
+
diff --git a/apps/docs/content/guides/ai/examples/headless-vector-search.mdx b/apps/docs/content/guides/ai/examples/headless-vector-search.mdx
index 3dc03951be392..9c6de702bca46 100644
--- a/apps/docs/content/guides/ai/examples/headless-vector-search.mdx
+++ b/apps/docs/content/guides/ai/examples/headless-vector-search.mdx
@@ -40,7 +40,7 @@ Now we can use the [Headless Vector Search](https://github.com/supabase/headless
1. Clone the repo to your local machine: `git clone git@github.com:supabase/headless-vector-search.git`
2. Link the repo to your remote project: `supabase link --project-ref XXX`
3. Apply the database migrations: `supabase db push`
-4. Set your OpenAI key as a secret: `supabase secrets set OPENAI_KEY=sk-xxx`
+4. Set your OpenAI key as a secret: `supabase secrets set OPENAI_API_KEY=sk-xxx`
5. Deploy the Edge Functions: `supabase functions deploy --no-verify-jwt`
6. Expose `docs` schema via API in Supabase Dashboard [settings](https://supabase.com/dashboard/project/_/settings/api) > `API Settings` > `Exposed schemas`
@@ -66,11 +66,11 @@ jobs:
with:
supabase-url: 'https://your-project-ref.supabase.co' # Update this to your project URL.
supabase-service-role-key: ${{ secrets.SUPABASE_SERVICE_ROLE_KEY }}
- openai-key: ${{ secrets.OPENAI_KEY }}
+ openai-key: ${{ secrets.OPENAI_API_KEY }}
docs-root-path: 'docs' # the path to the root of your md(x) files
```
-Make sure to choose the latest version, and set your `SUPABASE_SERVICE_ROLE_KEY` and `OPENAI_KEY` as repository secrets in your repo settings (settings > secrets > actions).
+Make sure to choose the latest version, and set your `SUPABASE_SERVICE_ROLE_KEY` and `OPENAI_API_KEY` as repository secrets in your repo settings (settings > secrets > actions).
### Add a search interface
diff --git a/apps/docs/content/guides/ai/examples/nextjs-vector-search.mdx b/apps/docs/content/guides/ai/examples/nextjs-vector-search.mdx
index 2cd43c7944bbd..975218aef0247 100644
--- a/apps/docs/content/guides/ai/examples/nextjs-vector-search.mdx
+++ b/apps/docs/content/guides/ai/examples/nextjs-vector-search.mdx
@@ -287,7 +287,7 @@ With our database set up, we need to process and store all `.mdx` files in the `
SUPABASE_SERVICE_ROLE_KEY=
# Get your key at https://platform.openai.com/account/api-keys
- OPENAI_KEY=
+ OPENAI_API_KEY=
```
diff --git a/apps/docs/content/guides/database/tables.mdx b/apps/docs/content/guides/database/tables.mdx
index 54945eef4eafd..fe9cdf9c178ef 100644
--- a/apps/docs/content/guides/database/tables.mdx
+++ b/apps/docs/content/guides/database/tables.mdx
@@ -79,8 +79,7 @@ You must define the "data type" when you create a column.
### Data types
-Every column is a predefined type. PostgreSQL provides many [default types](https://www.postgresql.org/docs/current/datatype.html), and you can even design your own (or use extensions)
-if the default types don't fit your needs. You can use any data type that Postgres supports via the SQL editor. We only support a subset of these in the Table Editor in an effort to keep the experience simple for people with less experience with databases.
+Every column is a predefined type. PostgreSQL provides many [default types](https://www.postgresql.org/docs/current/datatype.html), and you can even design your own (or use extensions) if the default types don't fit your needs. You can use any data type that Postgres supports via the SQL editor. We only support a subset of these in the Table Editor in an effort to keep the experience simple for people with less experience with databases.
Show/Hide default data types
diff --git a/apps/docs/content/guides/functions/ai-models.mdx b/apps/docs/content/guides/functions/ai-models.mdx
index 89f14e1e51f6f..3d3fddbdd2012 100644
--- a/apps/docs/content/guides/functions/ai-models.mdx
+++ b/apps/docs/content/guides/functions/ai-models.mdx
@@ -70,7 +70,7 @@ Inference via larger models is supported via [Ollama](https://ollama.com/). In t
### Running locally
-1. Install Ollama and pull the Mistral model
+1. [Install Ollama](https://github.com/ollama/ollama?tab=readme-ov-file#ollama) and pull the Mistral model
```
ollama pull mistral
diff --git a/apps/docs/pages/api/ai/docs.ts b/apps/docs/pages/api/ai/docs.ts
index 2f6ab7e7883cd..40d35d01105bf 100644
--- a/apps/docs/pages/api/ai/docs.ts
+++ b/apps/docs/pages/api/ai/docs.ts
@@ -5,7 +5,7 @@ import OpenAI from 'openai'
export const runtime = 'edge'
-const openAiKey = process.env.OPENAI_KEY
+const openAiKey = process.env.OPENAI_API_KEY
const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL as string
const supabaseServiceKey = process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY as string
@@ -13,7 +13,7 @@ export default async function handler(req: NextRequest) {
if (!openAiKey) {
return new Response(
JSON.stringify({
- error: 'No OPENAI_KEY set. Create this environment variable to use AI features.',
+ error: 'No OPENAI_API_KEY set. Create this environment variable to use AI features.',
}),
{
status: 500,
diff --git a/apps/docs/scripts/search/generate-embeddings.ts b/apps/docs/scripts/search/generate-embeddings.ts
index a6931a1836be7..cfac32cae35ae 100644
--- a/apps/docs/scripts/search/generate-embeddings.ts
+++ b/apps/docs/scripts/search/generate-embeddings.ts
@@ -22,7 +22,7 @@ async function generateEmbeddings() {
const requiredEnvVars = [
'NEXT_PUBLIC_SUPABASE_URL',
'SUPABASE_SERVICE_ROLE_KEY',
- 'OPENAI_KEY',
+ 'OPENAI_API_KEY',
'NEXT_PUBLIC_MISC_USE_URL',
'NEXT_PUBLIC_MISC_USE_ANON_KEY',
'SEARCH_GITHUB_APP_ID',
@@ -162,10 +162,11 @@ async function generateEmbeddings() {
console.log(`[${path}] Adding ${sections.length} page sections (with embeddings)`)
for (const { slug, heading, content } of sections) {
// OpenAI recommends replacing newlines with spaces for best results (specific to embeddings)
+ // force a redeploy
const input = content.replace(/\n/g, ' ')
try {
- const openai = new OpenAI({ apiKey: process.env.OPENAI_KEY })
+ const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY })
const embeddingResponse = await openai.embeddings.create({
model: 'text-embedding-ada-002',
diff --git a/apps/studio/components/interfaces/Auth/AuthProvidersFormValidation.tsx b/apps/studio/components/interfaces/Auth/AuthProvidersFormValidation.tsx
index fd8186c108b9e..650933b6470ae 100644
--- a/apps/studio/components/interfaces/Auth/AuthProvidersFormValidation.tsx
+++ b/apps/studio/components/interfaces/Auth/AuthProvidersFormValidation.tsx
@@ -35,25 +35,24 @@ const PROVIDER_EMAIL = {
type: 'boolean',
},
MAILER_OTP_EXP: {
- title: 'Mailer OTP Expiration',
+ title: 'Email OTP Expiration',
type: 'number',
description: 'Duration before an email otp / link expires.',
units: 'seconds',
},
- PASSWORD_MIN_LENGTH: {
- title: 'Min password length',
- description: 'Users will not be able to use a password shorter than this.',
+ MAILER_OTP_LENGTH: {
+ title: 'Email OTP Length',
type: 'number',
+ description: 'Number of digits in the email OTP',
+ units: 'number',
},
},
validationSchema: object().shape({
- PASSWORD_MIN_LENGTH: number()
- .required('A password is required.')
- .min(6, 'Password length must be at least 6 characters long'),
MAILER_OTP_EXP: number()
.min(0, 'Must be more than 0')
.max(86400, 'Must be no more than 86400')
.required('This is required'),
+ MAILER_OTP_LENGTH: number().min(6, 'Must be at least 6').max(10, 'Must be no more than 10'),
}),
misc: {
iconKey: 'email-icon2',
diff --git a/apps/studio/components/interfaces/Database/Functions/CreateFunction/CreateFunctionHeader.tsx b/apps/studio/components/interfaces/Database/Functions/CreateFunction/CreateFunctionHeader.tsx
index bf8eb775cfc85..c3354a3693066 100644
--- a/apps/studio/components/interfaces/Database/Functions/CreateFunction/CreateFunctionHeader.tsx
+++ b/apps/studio/components/interfaces/Database/Functions/CreateFunction/CreateFunctionHeader.tsx
@@ -12,12 +12,7 @@ export const CreateFunctionHeader = ({
setAssistantVisible: (v: boolean) => void
}) => {
return (
-
+
- We auto-scale your disk as you need more storage, but can only do this every 6 hours.
- If you upload more than 1.5x the current size of your storage, your database will go
- into read-only mode. If you know how big your database is going to be, you can
- manually increase the size here.
-
+
+
+ Importing a lot of data?
+
+
- Read more about{' '}
-
- disk management
-
- .
-
-
+Read more about [disk management](https://supabase.com/docs/guides/platform/database-size#disk-management).
+`}
+ />
+
+
) : (
-
+
+
+ {projectSubscriptionData?.plan?.id === 'free'
? 'Disk size configuration is not available for projects on the Free plan'
- : 'Disk size configuration is only available when disabling the spend cap.'
- }
- actions={
-
- }
- >
-
+ : 'Disk size configuration is only available when the spend cap has been disabled'}
+
+
{projectSubscriptionData?.plan?.id === 'free' ? (
If you are intending to use more than 500MB of disk space, then you will need to
@@ -185,8 +179,20 @@ const DiskSizeConfiguration = ({ disabled = false }: DiskSizeConfigurationProps)
disable your spend cap.
)}
-
-
+
+
+
)}
{() =>
currentDiskSize >= maxDiskSize ? (
- <>
-
- You cannot manually expand the disk size any more than {maxDiskSize} GB. If you
- need more than this, contact us to learn more about the Enterprise plan.
-
- >
+
+
+ Maximum manual disk size increase reached
+
+
+ You cannot manually expand the disk size any more than {maxDiskSize}GB. If you
+ need more than this, contact us via support for help.
+
+
+
+
) : (
<>
-
- {isAbleToResizeDatabase
- ? `Upon updating your disk size, the next disk size update will only be available from ${dayjs().format(
- 'DD MMM YYYY, HH:mm (ZZ)'
- )}`
- : `Your database was last resized at ${dayjs(lastDatabaseResizeAt).format(
- 'DD MMM YYYY, HH:mm (ZZ)'
- )}. You can resize your database again in approximately ${formattedTimeTillNextAvailableResize}`}
-
+
+
+
+ This operation is only possible every 6 hours
+
+
+ {isAbleToResizeDatabase
+ ? `Upon updating your disk size, the next disk size update will only be available from ${dayjs().format(
+ 'DD MMM YYYY, HH:mm (ZZ)'
+ )}`
+ : `Your database was last resized at ${dayjs(lastDatabaseResizeAt).format(
+ 'DD MMM YYYY, HH:mm (ZZ)'
+ )}. You can resize your database again in approximately ${formattedTimeTillNextAvailableResize}`}
+
+ {
const { mutate: resetPassword, isLoading } = useResetPasswordMutation({
onSuccess: async () => {
toast.success(
- `If you registered using your email and password, you will receive a password reset email.`
+ `If you registered using your email and password, you will receive a password reset email. The password reset link expires in 10 minutes.`
)
await router.push('/sign-in')
},
diff --git a/apps/studio/components/interfaces/SignIn/SignUpForm.tsx b/apps/studio/components/interfaces/SignIn/SignUpForm.tsx
index 17bcfbb61e2c2..10856dd563a29 100644
--- a/apps/studio/components/interfaces/SignIn/SignUpForm.tsx
+++ b/apps/studio/components/interfaces/SignIn/SignUpForm.tsx
@@ -60,7 +60,7 @@ const SignUpForm = () => {
>
You've successfully signed up. Please check your email to confirm your account before
- signing in to the Supabase dashboard
+ signing in to the Supabase dashboard. The confirmation link expires in 10 minutes.