diff --git a/apps/studio/components/interfaces/Auth/Overview/OverviewMonitoring.tsx b/apps/studio/components/interfaces/Auth/Overview/OverviewMonitoring.tsx deleted file mode 100644 index 098ff90beeced..0000000000000 --- a/apps/studio/components/interfaces/Auth/Overview/OverviewMonitoring.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import { ScaffoldSectionTitle, ScaffoldSection } from 'components/layouts/Scaffold' -import { Card } from 'ui' - -export const OverviewMonitoring = () => { - return ( - - Monitoring -
- - - - -
-
- ) -} diff --git a/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.constants.ts b/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.constants.ts index caad4e200302b..0e858125dfa3d 100644 --- a/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.constants.ts +++ b/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.constants.ts @@ -43,16 +43,39 @@ export const AUTH_COMBINED_QUERY = () => ` select 'signUpCount' as metric, cast(count(case when action = 'user_signedup' then 1 else null end) as float64) from base - + union all - select 'signInLatency' as metric, - coalesce(round(avg(case when action = 'login' then duration_ns else null end) / 1000000, 2), 0) - from base - + select 'apiTotalRequests' as metric, + cast(count(*) as float64) as value + from edge_logs + cross join unnest(metadata) as m + cross join unnest(m.request) as request + cross join unnest(m.response) as response + cross join unnest(response.headers) as h + where path like '%auth/v1%' + union all - select 'signUpLatency' as metric, - coalesce(round(avg(case when action = 'user_signedup' then duration_ns else null end) / 1000000, 2), 0) - from base + select 'apiErrorRequests' as metric, + cast(count(*) as float64) as value + from edge_logs + cross join unnest(metadata) as m + cross join unnest(m.request) as request + cross join unnest(m.response) as response + cross join unnest(response.headers) as h + where path like '%auth/v1%' + and response.status_code >= 400 and response.status_code <= 599 + + union all + select 'authErrorRequests' as metric, + cast(count(*) as float64) as value + from edge_logs + cross join unnest(metadata) as m + cross join unnest(m.request) as request + cross join unnest(m.response) as response + cross join unnest(response.headers) as h + where path like '%auth/v1%' + and response.status_code >= 400 and response.status_code <= 599 + and h.x_sb_error_code is not null ` export const fetchAllAuthMetrics = async (projectRef: string, period: 'current' | 'previous') => { @@ -66,22 +89,31 @@ export const fetchAllAuthMetrics = async (projectRef: string, period: 'current' export const processAllAuthMetrics = (currentData: any[], previousData: any[]) => { const processData = (data: any[]) => { if (!data || !Array.isArray(data)) { - return { activeUsers: 0, passwordResets: 0, signInLatency: 0, signUpLatency: 0 } + return { activeUsers: 0, signUps: 0, apiErrorRate: 0, authErrorRate: 0 } } const result = data.reduce( (acc, row) => { const { metric, value } = row if (metric === 'activeUsers') acc.activeUsers = value || 0 - if (metric === 'passwordResetRequests') acc.passwordResets = value || 0 - if (metric === 'signInLatency') acc.signInLatency = value || 0 - if (metric === 'signUpLatency') acc.signUpLatency = value || 0 + if (metric === 'signUpCount') acc.signUps = value || 0 + if (metric === 'apiTotalRequests') acc._apiTotal = value || 0 + if (metric === 'apiErrorRequests') acc._apiErrors = value || 0 + if (metric === 'authErrorRequests') acc._authErrors = value || 0 return acc }, - { activeUsers: 0, passwordResets: 0, signInLatency: 0, signUpLatency: 0 } + { activeUsers: 0, signUps: 0, _apiTotal: 0, _apiErrors: 0, _authErrors: 0 } as any ) - return result + const apiErrorRate = result._apiTotal > 0 ? (result._apiErrors / result._apiTotal) * 100 : 0 + const authErrorRate = result._apiTotal > 0 ? (result._authErrors / result._apiTotal) * 100 : 0 + + return { + activeUsers: result.activeUsers, + signUps: result.signUps, + apiErrorRate, + authErrorRate, + } } return { diff --git a/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.tsx b/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.tsx index dac8b7b29aba9..2ddc6452fa863 100644 --- a/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.tsx +++ b/apps/studio/components/interfaces/Auth/Overview/OverviewUsage.tsx @@ -9,17 +9,14 @@ import { useParams } from 'common' import { ChevronRight, Loader2 } from 'lucide-react' import { Reports } from 'icons' import { - getChangeSign, getChangeColor, fetchAllAuthMetrics, processAllAuthMetrics, calculatePercentageChange, } from './OverviewUsage.constants' import { useQuery } from '@tanstack/react-query' -import { useMemo } from 'react' -import { ReportChartV2 } from 'components/interfaces/Reports/v2/ReportChartV2' -import { createAuthReportConfig } from 'data/reports/v2/auth.config' import dayjs from 'dayjs' +import { ArrowUpIcon, ArrowDownIcon } from 'lucide-react' const StatCard = ({ title, @@ -27,19 +24,32 @@ const StatCard = ({ previous, loading, suffix = '', + invert = false, + href, }: { title: string current: number previous: number loading: boolean suffix?: string + invert?: boolean + href?: string }) => { - const changeColor = getChangeColor(previous) - const changeSign = getChangeSign(previous) - const formattedCurrent = suffix === 'ms' ? current.toFixed(2) : current + const isZeroChange = previous === 0 + const changeColor = isZeroChange + ? 'text-foreground-lighter' + : invert + ? previous >= 0 + ? 'text-destructive' + : 'text-brand' + : getChangeColor(previous) + const formattedCurrent = + suffix === 'ms' ? current.toFixed(2) : suffix === '%' ? current.toFixed(1) : Math.round(current) + const ArrowIcon = previous >= 0 ? ArrowUpIcon : ArrowDownIcon + const signChar = previous > 0 ? '+' : previous < 0 ? '-' : '' - return ( - + const Inner = ( +

{title}

-

{`${formattedCurrent}${suffix}`}

-

- {`${changeSign}${previous.toFixed(1)}%`} -

+

{`${formattedCurrent}${suffix}`}

+
+ {!isZeroChange && } + {`${signChar}${Math.abs(previous).toFixed(1)}%`} +
)}
) + + return href ? {Inner} : Inner } export const OverviewUsage = () => { @@ -84,55 +97,13 @@ export const OverviewUsage = () => { metrics.current.activeUsers, metrics.previous.activeUsers ) - const passwordResetChange = calculatePercentageChange( - metrics.current.passwordResets, - metrics.previous.passwordResets - ) - const signInLatencyChange = calculatePercentageChange( - metrics.current.signInLatency, - metrics.previous.signInLatency - ) - const signUpLatencyChange = calculatePercentageChange( - metrics.current.signUpLatency, - metrics.previous.signUpLatency - ) + + const signUpsChange = calculatePercentageChange(metrics.current.signUps, metrics.previous.signUps) const endDate = dayjs().toISOString() const startDate = dayjs().subtract(24, 'hour').toISOString() - const signUpChartConfig = useMemo(() => { - const config = createAuthReportConfig({ - projectRef: ref as string, - startDate, - endDate, - interval: '1h', - filters: { status_code: null }, - }) - const chart = config.find((c) => c.id === 'signups') - if (chart) { - return { ...chart, defaultChartStyle: 'bar' } - } - return chart - }, [ref, startDate, endDate]) - - const signInChartConfig = useMemo(() => { - const config = createAuthReportConfig({ - projectRef: ref as string, - startDate, - endDate, - interval: '1h', - filters: { status_code: null }, - }) - const chart = config.find((c) => c.id === 'sign-in-attempts') - if (chart) { - return { ...chart, defaultChartStyle: 'bar' } - } - return chart - }, [ref, startDate, endDate]) - - const updateDateRange = (from: string, to: string) => { - console.log('Date range update:', from, to) - } + // No charts on overview; keep date range for link only return ( @@ -150,54 +121,42 @@ export const OverviewUsage = () => {
-
- {signUpChartConfig && ( - - )} - {signInChartConfig && ( - - )} -
) diff --git a/apps/studio/pages/project/[ref]/auth/overview.tsx b/apps/studio/pages/project/[ref]/auth/overview.tsx index 86f8f300fe14a..0a42fba0eb92a 100644 --- a/apps/studio/pages/project/[ref]/auth/overview.tsx +++ b/apps/studio/pages/project/[ref]/auth/overview.tsx @@ -5,7 +5,6 @@ import { ScaffoldContainer, ScaffoldSection } from 'components/layouts/Scaffold' import { PageLayout } from 'components/layouts/PageLayout/PageLayout' import { DocsButton } from 'components/ui/DocsButton' import { DOCS_URL } from 'lib/constants' -import { OverviewMonitoring } from 'components/interfaces/Auth/Overview/OverviewMonitoring' import { OverviewUsage } from 'components/interfaces/Auth/Overview/OverviewUsage' import { OverviewLearnMore } from 'components/interfaces/Auth/Overview/OverviewLearnMore' import { useRouter } from 'next/router' @@ -32,7 +31,6 @@ const AuthOverview: NextPageWithLayout = () => { return (
-
@@ -48,7 +46,7 @@ AuthOverview.getLayout = (page) => ( secondaryActions={
- All reports Last 24 hours + Last 24 hours
diff --git a/apps/www/_blog/2025-08-16-testing-for-vibe-coders-from-zero-to-production-confidence.mdx b/apps/www/_blog/2025-08-16-testing-for-vibe-coders-from-zero-to-production-confidence.mdx new file mode 100644 index 0000000000000..a263477f56cb2 --- /dev/null +++ b/apps/www/_blog/2025-08-16-testing-for-vibe-coders-from-zero-to-production-confidence.mdx @@ -0,0 +1,493 @@ +--- +title: 'Testing for Vibe Coders: From Zero to Production Confidence' +description: 'Build a testing strategy that prevents production disasters without turning development into a slog. Learn which tests matter, which tools to use, and how to catch bugs before your users do.' +categories: + - developers +tags: + - vibe-coding +date: '2025-08-16:10:00' +toc_depth: 3 +author: prashant +--- + +Testing feels like homework until your users find the bugs first. This guide shows you how to build a testing strategy that actually prevents production disasters without turning development into a slog. You will learn which tests matter, which tools are simple enough to stick with, and how to catch the bugs that embarrass you in front of users. + +Supabase helps because it is just Postgres at the core with an integrated suite of tools. You can run a full local stack, write tests against real Postgres schema and policies, and promote changes the same way you ship code. Start simple and layer more as your app grows. + +## Tests that actually matter + +Most developers write the wrong tests first. Unit tests feel productive because they are fast to write and always pass. But they miss the bugs that actually break your app in production. + +Integration tests do the heavy lifting. They check that your database, API routes, auth, and third-party calls work together. These catch the "works on my machine" issues that unit tests miss entirely. + +Start with integration tests on your core features. Add unit tests only for complex logic like price calculations, date handling, and data transforms where bugs are expensive. Save end-to-end tests for critical user flows like login, checkout, and content creation. Visual tests are optional unless pixel-perfect UI is your main value proposition. + +This order catches real-world bugs without turning testing into a full-time job. You want tests that fail when something is actually broken, not tests that fail because you refactored a function name. + +## Pick tools and stick with them + +Tool-hopping burns more hours than imperfect tools ever will. Pick one tool per category and move on. For JavaScript and TypeScript projects, use Jest or Vitest for unit and integration tests. For end-to-end testing, Playwright handles modern web apps better than Selenium ever did. + +The secret weapon is Supabase local development. Running `supabase start` gives you a real Postgres database, auth system, and generated APIs on your machine. Your tests run against the same schema, Row Level Security policies, and API endpoints that your production app uses. No mocking, no fake data, no surprises when you deploy. + +If you are building Python services, pytest works the same way. For testing SQL policies and functions directly, pgTAP lets you write tests in SQL, but save that for later when your database logic gets complex. + +## Start with a minimal setup + +Prove your testing pipeline works before writing complex tests. Add these scripts to your package.json: + +```json +{ + "scripts": { + "test": "vitest run", + "test:watch": "vitest", + "test:e2e": "playwright test" + } +} +``` + +Write one simple test to verify everything works: + +```tsx +import { expect, test } from 'vitest' +import { formatPrice } from '../src/lib/format' + +test('formats cents into dollars', () => { + expect(formatPrice(1999)).toBe('$19.99') + expect(formatPrice(0)).toBe('$0.00') +}) +``` + +If this passes in watch mode and in continuous integration, your test harness is solid. Now you can point tests at your real application stack. + +## Test against your real database + +Create a test client that connects to your local Supabase instance. Keep your service role keys secure and use the anonymous key for user-level operations: + +```tsx +import { createClient } from '@supabase/supabase-js' + +export const supabase = createClient( + process.env.SUPABASE_URL || 'http://localhost:54321', + process.env.SUPABASE_ANON_KEY || 'your-local-anon-key' +) +``` + +Write integration tests that verify your most critical systems work together. This test confirms that Supabase Auth, database triggers, and Row Level Security all work correctly: + +```tsx +import { expect, test, beforeEach } from 'vitest' +import { supabase } from './setup' + +beforeEach(async () => { + await supabase.from('profiles').delete().neq('id', '') +}) + +test('sign up creates a profile row via trigger', async () => { + const email = `test-${Date.now()}@example.com` + const { data, error } = await supabase.auth.signUp({ + email, + password: 'Pass1234!', + }) + + expect(error).toBeNull() + expect(data.user?.email).toBe(email) + + const { data: profile } = await supabase + .from('profiles') + .select('*') + .eq('id', data.user?.id) + .single() + + expect(profile).toBeTruthy() +}) +``` + +One test covers authentication, database triggers, and data access policies. That is efficient testing. + +## Focus on expensive failures first + +Write tests for the areas where bugs cost you the most money or reputation. Authentication and authorization failures expose user data or lock people out of their accounts. Money calculations that are wrong by even a penny destroy trust. Data validation bugs let malicious users break your application. + +Test that logged-out users cannot access protected endpoints. Verify that users can only see their own data under Row Level Security. Confirm that session refresh works correctly. For business logic, verify that totals and taxes calculate correctly, discounts do not create negative prices, and webhook handlers are idempotent so duplicate deliveries do not double-charge customers. + +Check that email addresses, dates, and user IDs are validated properly. Ensure that dangerous input gets rejected on the server side, not just in the browser. Test your critical user flows like signup, onboarding, checkout, content creation, and file uploads. + +A single test in these areas prevents entire categories of production incidents. Focus your testing time where failure hurts the most. + +## Test Supabase-specific features + +Row Level Security is easy to forget during development, and forgetting it leaves your database wide open. Write tests that prove users cannot see each other's data: + +```tsx +test('users cannot see each other's posts', async () => { + const u1 = await supabase.auth.signUp({ + email: 'u1@test.com', + password: 'pass' + }) + const u2 = await supabase.auth.signUp({ + email: 'u2@test.com', + password: 'pass' + }) + + await supabase.auth.signInWithPassword({ + email: 'u1@test.com', + password: 'pass' + }) + const { data: post } = await supabase + .from('posts') + .insert({ title: 'secret' }) + .select() + .single() + + await supabase.auth.signInWithPassword({ + email: 'u2@test.com', + password: 'pass' + }) + const { data: rows } = await supabase + .from('posts') + .select() + .eq('id', post!.id) + + expect(rows?.length ?? 0).toBe(0) +}) +``` + +Test database triggers that create profile rows after user signup or update timestamps on data changes. If your app relies on these triggers, make sure they fire correctly. + +For file storage, test that uploads work but unauthorized users cannot read or delete files: + +```tsx +test('upload to avatars bucket works', async () => { + const file = new File(['test'], 'avatar.jpg', { type: 'image/jpeg' }) + const { data, error } = await supabase.storage + .from('avatars') + .upload(`avatar-${Date.now()}.jpg`, file) + + expect(error).toBeNull() + expect(data?.path).toBeTruthy() +}) +``` + +If you use Supabase Realtime for collaborative features, write a test that subscribes to table changes and verifies that events arrive after you insert data. + +## Generate test data that looks real + +Your first few tests work fine with hardcoded values like `test-${Date.now()}@example.com`. But eventually you need to test pagination, search results, or how your app handles varied user data. Writing 50 manual insert statements gets old fast. + +Start with simple helper functions that create test records: + +```tsx +export async function createTestUser(overrides = {}) { + const email = `user-${Date.now()}@example.com` + const { data, error } = await supabase.auth.signUp({ + email, + password: 'TestPass123!', + ...overrides, + }) + + if (error) throw error + return data.user +} + +export async function createTestPost(userId: string, overrides = {}) { + const { data, error } = await supabase + .from('posts') + .insert({ + user_id: userId, + title: 'Test post', + content: 'Test content', + ...overrides, + }) + .select() + .single() + + if (error) throw error + return data +} +``` + +Now your tests are cleaner: + +```tsx +test('search returns relevant posts', async () => { + const user = await createTestUser() + await createTestPost(user.id, { title: 'JavaScript tips' }) + await createTestPost(user.id, { title: 'Python tricks' }) + + const { data } = await supabase.from('posts').select().textSearch('title', 'JavaScript') + + expect(data).toHaveLength(1) +}) +``` + +When you need realistic variety, use Faker.js: + +```bash +npm install @faker-js/faker --save-dev +``` + +```tsx +import { faker } from '@faker-js/faker' + +export async function createTestUser(overrides = {}) { + const { data, error } = await supabase.auth.signUp({ + email: faker.internet.email(), + password: 'TestPass123!', + ...overrides, + }) + + if (error) throw error + + await supabase + .from('profiles') + .update({ + display_name: faker.person.fullName(), + bio: faker.lorem.paragraph(), + avatar_url: faker.image.avatar(), + }) + .eq('id', data.user.id) + + return data.user +} +``` + +For tests that need volume, write a seed script that populates your database with realistic data: + +```tsx +// tests/seed.ts +import { createClient } from '@supabase/supabase-js' +import { faker } from '@faker-js/faker' + +const supabase = createClient('http://localhost:54321', process.env.SUPABASE_SERVICE_ROLE_KEY) + +async function seed() { + // Create 10 users with posts + for (let i = 0; i < 10; i++) { + const { data: user } = await supabase.auth.admin.createUser({ + email: faker.internet.email(), + password: 'TestPass123!', + email_confirm: true, + }) + + // Each user gets 3-7 posts + const postCount = faker.number.int({ min: 3, max: 7 }) + for (let j = 0; j < postCount; j++) { + await supabase.from('posts').insert({ + user_id: user.user.id, + title: faker.lorem.sentence(), + content: faker.lorem.paragraphs(3), + published_at: faker.date.recent({ days: 30 }), + }) + } + } + + console.log('Seed complete') +} + +seed() +``` + +Run it with `npx tsx tests/seed.ts` when you need fresh data. Better yet, add it to your database reset flow: + +```bash +supabase db reset && npx tsx tests/seed.ts +``` + +This gives you a baseline dataset that looks like real usage. Your pagination tests work correctly, search returns varied results, and you catch UI bugs that only show up with different name lengths or content volumes. + +Keep your seed data simple at first. Add complexity only when you actually need to test against it. Ten users with a few posts each covers most testing scenarios. You can always generate more data for specific performance tests. + +## Keep authentication tests simple + +OAuth testing (for Login with Google, Login with Apple, etc.) on localhost is painful, so mix your approaches. Mock external provider calls in unit tests to verify your callback logic works. Use Supabase Admin APIs in integration tests to create confirmed users quickly without going through the full signup flow. Use Playwright for one or two complete OAuth flows with a dedicated test application and saved login state. + +This gives you fast feedback during development and confidence that production flows work correctly. + +## Make async tests reliable + +Flaky tests destroy team confidence in your test suite. Always await promises in your tests and explicitly test error conditions. Use fake timers instead of sleeping to make time-dependent tests deterministic. Reset your database state between tests so they do not interfere with each other. Retry network calls in tests the same way your production code does. + +If a test fails only in continuous integration, capture logs and debugging artifacts. Fix flaky tests immediately or delete them. A reliable test suite that catches real bugs is better than a comprehensive suite that cries wolf. + +## Run tests in continuous integration + +Set up GitHub Actions to run your tests on every pull request and merge to main. Start Supabase locally in CI with `supabase start` and point your tests at the local instance. Split fast unit and integration tests from slower end-to-end tests into separate jobs. Gate your deployments on fast tests passing, but let end-to-end tests run in parallel. + +Keep your CI builds fast by running tests in parallel and caching dependencies. Developers stop running tests if they take too long. + +## Test-driven development with AI coding assistants + +Testing becomes even more important when you are using AI to write code quickly. Large language models are creative assistants, but they make subtle mistakes. A test suite turns your AI pair programmer from a creative helper into a reliable co-pilot. + +The workflow is simple. Write or update a test that describes what you want. Ask the AI to implement the feature. Run the tests and feed any failures back to the model. The test is your contract. If the AI goes off track, the test catches it immediately. + +This works especially well for API contract tests that verify status codes and response shapes, Row Level Security policies that prevent users from seeing each other's data, money calculations that prevent rounding errors, and webhook handlers that need to be idempotent. + +Done correctly, tests make AI-assisted development faster and more reliable. You can iterate quickly without accidentally breaking existing functionality. + +## Build in a weekend, test forever + +If you have been coding your project for a while and haven't started to add tests, don't worry. It's not too late. Here is how to retrofit testing onto your existing application and maintain good habits going forward. + +### Add tests to your existing project + +Start by installing your testing framework and setting up Supabase local development: + +```bash +npm install vitest @supabase/supabase-js --save-dev +supabase init +supabase link --project-ref YOUR_PROJECT_ID +supabase db pull +supabase start +``` + +This captures your existing database schema as migration files and starts a local Supabase instance that matches your production setup. + +Create a simple test configuration in `vitest.config.js`: + +```jsx +import { defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + environment: 'node', + setupFiles: ['./tests/setup.ts'], + }, +}) +``` + +Write your first integration test for the most critical feature in your app. If it is a social app, test that users can create posts and see their own posts but not other users' posts. If it is an e-commerce app, test that the checkout calculation is correct. If it is a content management system, test that publishing and unpublishing work properly. + +Pick the one feature that would hurt the most if it broke, and write a test for it first. This gives you immediate confidence that your core functionality works correctly. + +### Test your authentication system + +Most weekend projects have basic authentication but skip Row Level Security. Write a test that creates two users, has one create some data, and verifies the other cannot see it: + +```tsx +import { createClient } from '@supabase/supabase-js' + +const supabase = createClient('http://localhost:54321', process.env.SUPABASE_ANON_KEY) + +test('users cannot access each other data', async () => { + // Create two test users + const user1 = await supabase.auth.signUp({ + email: 'user1@test.com', + password: 'password123', + }) + + const user2 = await supabase.auth.signUp({ + email: 'user2@test.com', + password: 'password123', + }) + + // User 1 creates some data + await supabase.auth.signInWithPassword({ + email: 'user1@test.com', + password: 'password123', + }) + + const { data: created } = await supabase + .from('posts') + .insert({ title: 'Private post' }) + .select() + .single() + + // User 2 tries to access it + await supabase.auth.signInWithPassword({ + email: 'user2@test.com', + password: 'password123', + }) + + const { data: accessed } = await supabase.from('posts').select().eq('id', created.id) + + expect(accessed).toHaveLength(0) +}) +``` + +If this test fails, you need to add Row Level Security policies to your tables. If it passes, your data is properly isolated between users. + +### Add tests as you build new features + +From now on, write a test before you add each new feature. This prevents regressions and gives you confidence that changes work correctly. The pattern is simple: describe what the feature should do in a test, implement the feature, and verify the test passes. + +For a new feature like user profiles, write the test first: + +```tsx +test('users can update their own profile', async () => { + const { data: user } = await supabase.auth.signUp({ + email: 'profile@test.com', + password: 'password123', + }) + + const { error } = await supabase + .from('profiles') + .update({ display_name: 'New Name' }) + .eq('id', user.user.id) + + expect(error).toBeNull() + + const { data: profile } = await supabase + .from('profiles') + .select('display_name') + .eq('id', user.user.id) + .single() + + expect(profile.display_name).toBe('New Name') +}) +``` + +Then implement the feature and verify the test passes. This workflow catches bugs before they reach users and documents how your features are supposed to work. + +### Set up continuous integration + +Add a GitHub Actions workflow that runs your tests on every push: + +```yaml +name: Tests +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: '18' + - uses: supabase/setup-cli@v1 + + - name: Install dependencies + run: npm ci + + - name: Start Supabase + run: supabase start + + - name: Run tests + run: npm test +``` + +This ensures your tests run in a clean environment and catch issues before they reach production. Tests that pass locally but fail in CI usually indicate missing environment setup or flaky timing assumptions. + +### Maintain good testing habits + +Make testing part of your daily workflow. Run tests in watch mode while developing so you get immediate feedback when something breaks. Reset your local database regularly with `supabase db reset` to ensure your tests work against a clean schema. + +When you fix a bug, write a test that would have caught it. This prevents the same bug from coming back and gradually improves your test coverage in the most important areas. + +Review your tests monthly and delete ones that no longer add value. Tests that are hard to maintain or frequently break for trivial reasons hurt more than they help. Keep your test suite focused on the functionality that matters most to your users. + +The goal is not perfect test coverage but reliable protection against the bugs that would hurt your business. A small suite of well-targeted tests beats a comprehensive suite that breaks constantly and slows down development. + +## Your testing workflow + +Make these habits automatic. During daily development, run tests in watch mode, reset your local database when things get messy, and write a test when you fix any bug. For each pull request, ensure your tests pass locally, run a quick end-to-end check on critical flows, and fix any flaky tests immediately. + +Monthly, review your test suite and remove obsolete tests, refresh your seed data to match current usage patterns, and update testing dependencies to stay current with security patches. + +## You do not need perfect coverage + +You need tests in the right places that run against your real schema and integrate into your daily development flow. Supabase makes this straightforward because you can run the entire stack locally, test your actual Postgres policies and triggers, and deploy the same migrations you test with. + +Start with integration tests for your core features, add a few end-to-end tests for critical user flows, protect your authentication and business logic, and automate the rest. Your users will notice fewer bugs, and you will ship new features with confidence instead of anxiety. diff --git a/apps/www/_blog/2025-08-16-the-vibe-coding-master-checklist.mdx b/apps/www/_blog/2025-08-16-the-vibe-coding-master-checklist.mdx new file mode 100644 index 0000000000000..4a94c60aabc68 --- /dev/null +++ b/apps/www/_blog/2025-08-16-the-vibe-coding-master-checklist.mdx @@ -0,0 +1,303 @@ +--- +title: 'The Vibe Coding Master Checklist' +description: 'Get your AI-generated app ready for production with this comprehensive guide covering security, performance, and deployment best practices.' +categories: + - developers +tags: + - vibe-coding +date: '2025-08-16:10:00' +toc_depth: 3 +author: prashant +--- + +Get your app ready for production. + +Vibe coding has transformed how we build software. AI-powered tools like [Lovable](https://lovable.dev/), [Bolt](https://bolt.new/), [v0](https://v0.app/), [Figma Make](https://www.figma.com/make/), and others let you describe your app in plain language and watch it come to life. You can go from idea to working prototype faster than ever before. + +But getting an app to "work" and getting it ready for real users are two different challenges. Your weekend prototype needs security hardening, performance optimization, and deployment planning before it can handle actual traffic and protect user data. + +This guide covers the essential steps to bridge that gap. You'll learn how to audit your AI-generated code, optimize for production, and deploy with confidence. Whether you built with these or another tool, these practices will help you ship something users can actually rely on. + +## Why Supabase works so well for vibe coders + +When you're building with AI tools, you want to focus on your app's unique features, not wrestle with backend infrastructure. That's where Supabase shines as the ideal foundation for vibe-coded applications. + +Unlike piecing together separate services for your [database](https://supabase.com/database), [authentication](https://supabase.com/auth), [storage](https://supabase.com/storage), [edge functions](https://supabase.com/edge-functions), and more, Supabase gives you everything integrated from the start. Your AI tool can request "Supabase Auth for user management" and immediately get secure authentication with social logins, magic links, and proper session handling. No configuration headaches or security gaps. + +The same integration applies across the platform. Your database, real-time subscriptions, file storage, and edge functions all work together seamlessly. When your AI-generated code needs to store user files, implement real-time features, or run server-side logic, these components communicate naturally without custom integration work. + +Tight integration serves the needs of developers of all skill levels and applications of all levels of sophistication, but solo developers and small teams especially benefit from the time and effort saved. Authentication, for example, is notoriously complex to implement securely. With Supabase Auth, you get enterprise-grade security features like Row Level Security, proper password hashing, and session management built in. Your AI tool can focus on your app's business logic while Supabase handles the infrastructure. + +The platform's Postgres foundation means you're building on proven, scalable technology from day one. As your vibe-coded weekend project grows, you won't hit arbitrary limits or need to migrate to "real" infrastructure. The same database that powers your prototype can scale to millions of users. + +For vibe coders specifically, this integration eliminates the biggest obstacle between prototype and production: the backend complexity that AI tools often struggle with. Your generated frontend code works immediately with Supabase's auto-generated APIs, and security, performance, and reliability features are available when you need them, not bolted on as an afterthought. + +## The prototype to production gap + +AI tools excel at creating functional demos quickly. They generate working code, set up databases, and handle basic user flows. But they prioritize speed over production concerns like security, scalability, and maintainability. + +Common gaps include hard-coded API keys in frontend code, missing input validation and error handling, unoptimized database queries and large bundle sizes, basic authentication without proper security controls, and no monitoring, backups, or disaster recovery. + +The good news? You can address these systematically without starting over. + +## Security audit and hardening + +Security should be your first priority when moving to production. Start by testing your app's basic security controls. + +### Authentication and authorization review + +Test your login system thoroughly. Try accessing private pages while logged out by typing URLs directly into your browser. If your app has different user roles (such as "admin", "member", or "visitor"), create test accounts for each and verify they only see appropriate content. + +When working with your AI tool, request specific security features: "implement secure password storage," "add session timeouts," and "ensure proper logout functionality." Tools like Supabase Auth handle these concerns automatically, letting you focus on your application rather than security infrastructure. + +### Input validation and data protection + +Your app should validate all user input before processing it. This means checking that email fields contain valid email formats and that numeric fields only accept numbers. It also prevents attackers from injecting malicious code through form submissions. + +Ask your AI tool to "review all form inputs for proper validation and sanitization" to address this systematically. + +### API security and secrets management + +Check that sensitive information like API keys aren't exposed in your frontend code. Open your browser's developer tools, go to the Network tab, and click around your app to see what requests it makes. Look for exposed passwords or personal data, and test whether rapid repeated requests might overwhelm your system. + +One critical issue: some AI tools embed API keys directly in code that users can access. This creates serious security risks. Request that your tool "scan the codebase for exposed API keys and move them to environment variables." + +### Database security + +Your database needs protection at multiple levels. If you're using Supabase, implement Row Level Security (RLS) to ensure users only access their own data. Test this by logging in as different users and confirming they can't see each other's information. + +Request "Row Level Security policies" and "user data isolation" when working with AI tools. Check Supabase's RLS documentation for specific implementation guidance. + +### Security checklist and prompt + +Use this comprehensive approach when you're ready to audit your application's security: + +**Essential security tasks:** + +- Test authentication flow (login/logout multiple times, test private URLs when logged out) +- Validate user inputs (review all forms for proper validation and sanitization) +- Secure API endpoints (inspect requests in Network tab, test rate limiting) +- Protect credentials (scan for exposed API keys, move to environment variables) +- Implement database security (enable RLS, test user data isolation) + +**Security audit prompt:** + +``` +Conduct a comprehensive security audit of my application and implement these measures: + +Authentication & Access Control: +- Ensure secure password storage with proper hashing +- Add session timeouts and proper logout functionality +- Implement user role restrictions and data isolation +- Use Supabase Auth for authentication handling + +Data Protection: +- Enable Row Level Security (RLS) policies for user data (especially for Supabase) +- Review all form inputs for proper validation and sanitization +- Add rate limiting to prevent spam attacks + +Application Security: +- Implement error handling that doesn't reveal sensitive information +- Hide database connection details from users +- Scan for API keys in frontend components and move to environment variables + +Provide a summary of specific changes made to improve security. + +``` + +## Data modeling and management + +Well-structured data becomes more important as your app grows. Your database should organize information logically and handle validation automatically. + +### Schema design and relationships + +Most apps organize data into related tables. A restaurant review app might have separate tables for users, restaurants, and reviews, with clear connections between them. This structure makes your app easier to maintain and query efficiently. + +Ask your AI tool to review your database schema for proper relationships, constraints, and data types. Well-designed schemas prevent data corruption and make future changes easier. + +### Data validation and backups + +Set appropriate data types for your database columns. If a field should only contain whole numbers, use an integer type. This ensures your application always receives predictable data formats. + +Also verify that automated backups are enabled. Most managed database services, including Supabase, offer automatic backup configuration to protect against data loss. + +### Database checklist and optimization + +**Key database tasks:** + +- Review database schema (check tables, relationships, constraints) +- Optimize queries (add indexes for frequently queried columns) +- Configure backups (enable automatic database backups) +- Plan for growth (design schema for future changes) + +**Database optimization prompt:** + +``` +Review my database schema and ensure it includes: + +Structure & Relationships: +- Proper table relationships with primary/foreign keys +- Normalized structure avoiding data duplication +- Junction tables for many-to-many relationships + +Data Integrity: +- Unique constraints on emails, usernames, phone numbers +- NOT NULL requirements where appropriate +- Proper data types and validation rules + +Performance & Maintenance: +- Indexes on frequently queried columns +- Migration planning for future schema changes +- Backup and recovery configuration + +Explain the architectural decisions and suggest improvements. + +``` + +## Performance and user experience + +Performance directly impacts user satisfaction. Slow apps frustrate users and hurt conversion rates. + +### Speed optimization + +Run your app through Google's PageSpeed Insights to get specific performance metrics and recommendations. This tool identifies exactly what's slowing down your app and provides actionable suggestions. + +Common performance issues include oversized images, unused JavaScript, and slow database queries. Your AI tool can address these systematically when given specific feedback from PageSpeed. + +### Database performance + +If your app feels sluggish despite a fast interface, database queries might be the bottleneck. Ask your AI tool to analyze query performance and add indexes where needed. Indexes make frequently accessed data much faster to retrieve. + +### User experience improvements + +Click through your app and note any confusing interactions or slow responses. Take screenshots of problem areas to give your AI tool visual context when requesting fixes. + +Focus on clear error messages, consistent navigation, and mobile responsiveness. These improvements don't fix "bugs" but significantly impact usability. + +### Performance checklist and optimization + +**Performance priorities:** + +- Audit loading speed (run PageSpeed Insights, implement recommendations) +- Optimize database queries (debug slow queries, add appropriate indexes) +- Optimize assets (compress images, implement lazy loading, minimize bundles) +- Improve user experience (add loading states, test mobile performance) + +**Performance optimization prompt:** + +``` +My application needs performance optimization. Here's my PageSpeed data: + +[Include your specific PageSpeed results here] + +Implement optimizations in this order: + +1. Image Optimization + - Compress large images (target ≤100KB where possible) + - Implement lazy loading for off-screen images + - Use modern formats like WebP where supported + +2. Code Optimization + - Remove unused JavaScript and CSS + - Minify remaining files + - Split large bundles into smaller chunks + +3. Loading Improvements + - Add loading states for async operations + - Implement pagination for large data sets + - Reduce layout shifts with proper sizing + +For each optimization: +1. Identify the specific issue in my code +2. Show the updated implementation +3. Explain the performance benefit +4. Estimate the PageSpeed improvement + +Work through these systematically, confirming each stage before proceeding. + +``` + +## Deployment best practices + +Moving from development to production requires careful environment configuration and monitoring. + +### Environment setup + +Your app should behave differently in development and production. Development might show detailed error messages for debugging, while production should hide these for security. Set up separate environment configurations and store sensitive information like API keys in environment variables, not in your code. + +Most deployment platforms like Vercel and Netlify handle environment variables through their dashboards, making secret management straightforward. + +### Error handling and monitoring + +Implement proper error boundaries so your app gracefully handles problems rather than crashing. Users should see helpful messages like "Sorry, we couldn't update your account" instead of technical error codes. + +Ask your AI tool to "implement error boundaries and user-friendly error pages" along with "basic performance monitoring and error tracking." + +### Automated deployment + +Set up automatic deployment from your code repository so updates go live without manual work. When you push code changes to GitHub, platforms like Vercel can automatically build and deploy your app. + +Request "automatic deployment from GitHub" and "basic testing before deployment" to ensure smooth updates. + +### Deployment checklist and preparation + +**Deployment essentials:** + +- Configure environments (set up development and production configurations) +- Handle errors gracefully (add error boundaries and user-friendly error pages) +- Set up monitoring (add performance and error tracking) +- Optimize for search (add proper meta tags and SEO elements) + +**Deployment preparation prompt:** + +``` +Prepare my application for production deployment: + +Current Setup: +- Built with [your AI tool] +- Using Supabase for backend/database +- Deploying to [Vercel/Netlify/other] +- Custom domain: [yourdomain.com] + +Implementation Steps: + +1. Environment Configuration + - Move hard-coded config to environment variables + - Set up production vs. development environments + - Configure deployment platform settings + +2. Error Handling & UX + - Add error boundaries for component crashes + - Create user-friendly error pages (404, 500, network errors) + - Implement loading states for all async operations + +3. Production Optimization + - Optimize images and assets + - Remove development code and console.logs + - Add proper meta tags for SEO + - Ensure responsive design works on all devices + +4. Monitoring & Deployment + - Set up error tracking and basic analytics + - Configure automatic deployment from GitHub + - Test custom domain and SSL setup + +For each step, provide: +1. Exact code changes needed +2. Platform configuration settings +3. Simple test to verify functionality +4. Troubleshooting guidance + +Conclude with a final production checklist. + +``` + +## Ready for real users + +Moving from prototype to production doesn't have to be overwhelming. By following this systematic approach, you can address the most critical concerns first and build confidence in your application's readiness. + +The key is working with tools that support this transition. Supabase provides production-ready infrastructure from day one: managed Postgres databases, built-in authentication, file storage, real-time updates, and edge functions. With native integrations for popular AI coding tools, you can design your app and set up enterprise-grade backend infrastructure without switching contexts. + +Whether you built with Lovable, v0, or another AI tool, Supabase handles the complex backend requirements so you can focus on creating great user experiences. Your vibe-coded prototype can scale to serve real users with the confidence that comes from proper security, performance, and reliability. + +[Get started with Supabase](https://supabase.com/) and take your AI-generated app from weekend project to production-ready platform. diff --git a/apps/www/_blog/2025-08-16-vibe-coding-best-practices-for-prompting.mdx b/apps/www/_blog/2025-08-16-vibe-coding-best-practices-for-prompting.mdx new file mode 100644 index 0000000000000..b0c5892aaf9a1 --- /dev/null +++ b/apps/www/_blog/2025-08-16-vibe-coding-best-practices-for-prompting.mdx @@ -0,0 +1,241 @@ +--- +title: 'Vibe Coding: Best Practices for Prompting' +description: 'Master the art of communicating with AI coding assistants through effective prompting strategies, iterative refinement, and systematic approaches that turn ideas into deployable applications.' +categories: + - developers +tags: + - vibe-coding +date: '2025-08-16:10:00' +toc_depth: 3 +author: prashant +--- + +AI-powered tools like Lovable, Cursor, and Claude Code have transformed how we build software. You can now turn ideas into working applications by describing what you want in plain language. Instead of spending days on boilerplate setup and API configuration, you tell your AI assistant what you need and watch it build a working demo in minutes. + +But there's a crucial skill that separates effective vibe coders from frustrated ones: knowing how to communicate with AI tools. A vague "make it look better" might produce unusable results, while a well-structured prompt generates clean, functional code that works in your specific context. + +This guide shows you how to get the most out of AI coding assistants through effective prompting strategies, iterative refinement techniques, and systematic approaches that turn your ideas into deployable applications. + +## Understand what your AI can and can't do + +Your AI assistant isn't a mind reader. It's more like a skilled developer who just joined your project. It knows coding patterns, frameworks, and best practices, but it doesn't know your specific application, users, or the decisions you made in previous prompts. + +The most effective vibe coders provide clear context rather than assuming the AI will "just know" what they want. Consider the difference between these prompts: + +**Vague:** "Build me a login page" +**Specific:** "Create a login form in React using Tailwind, connected to Supabase Auth, with error handling for expired tokens and social login options" + +The first prompt is like asking a chef for "food" while the second gives specific ingredients and cooking instructions. The detailed prompt provides enough context for the AI to generate code that integrates properly with your existing stack. + +Context accumulates throughout your coding session, but AI assistants usually start fresh with each new conversation. Successful vibe coders weave context into their prompts: "We have the login and task list working. Now implement filtering and archiving for completed tasks." This approach builds coherent applications rather than disconnected components. + +## The three-layer prompt structure + +The most effective prompts organize information into three distinct layers that give your AI assistant everything it needs to generate production-quality code: + +**Layer 1: Technical context and constraints** +Specify your stack, styling framework, and architectural patterns. This tells the AI how your code should look and behave within your existing project. + +**Layer 2: Functional requirements and user stories** +Describe what the feature does from a user's perspective, including specific behaviors and interactions. + +**Layer 3: Integration requirements and edge cases** + +Explain how this code connects with your existing application and handles real-world scenarios that separate demos from production-ready features. + +Here's an example three-layer prompt for a todo item component: + +``` +Create a TodoItem component with the following specifications: + +Technical context: +- React component using TypeScript +- Styled with Tailwind CSS using our design system +- Uses Lucide React icons for UI elements +- Follows existing component patterns with proper props interface + +Functional requirements: +- Display todo text with completion checkbox +- Show edit button that toggles inline editing mode +- Include delete button with confirmation dialog +- Visual distinction between completed and pending todos +- Smooth transitions between view and edit modes + +Integration and edge cases: +- Integrates with Supabase for state management +- Handle empty or whitespace-only todo text gracefully +- Optimistic UI updates during API calls +- Keyboard shortcuts: Enter to save, Escape to cancel +- Loading states for delete and update operations +- Prevent double-clicks on action buttons + +``` + +This structure eliminates guesswork and reduces back-and-forth iterations. Instead of getting generic code that needs extensive modification, you receive functionality that's much closer to your requirements on the first attempt. + +## Use iterative prompting + +Even well-structured prompts rarely produce perfect code on the first try, and that's normal. The power of vibe coding lies in rapid iteration cycles that let you refine and improve code in real time. + +Think of AI-generated code as a solid first draft that you sculpt into exactly what you need. Follow this cycle: + +**Prompt → Review → Ask for explanation/refactor → Build next step** + +This approach helps you uncover blind spots, add necessary improvements, and gradually transform demo code into production-ready functionality. + +### The "what could go wrong?" technique + +Even detailed prompts can miss edge cases. Use this follow-up prompt to identify potential issues: + +``` +What could go wrong with this code? What edge cases should I handle? + +``` + +For example, if your AI generates a function to fetch blog posts from an API, this follow-up might reveal the need to handle empty responses, invalid JSON, network timeouts, or missing data fields. The AI can then refactor the code to address these scenarios. + +### Security-focused iteration + +Security gaps often slip through initial code generation. Ask directly about security considerations: + +``` +What security best practices should I follow with this code? How should I handle authentication and sensitive data? + +``` + +This might surface recommendations about storing API keys in environment variables, implementing rate limiting, or adding input validation to prevent injection attacks. + +## Make AI teach you and fix its own issues + +Use your AI assistant as both a code generator and a teaching tool. Instead of accepting code at face value, ask it to explain its decisions: + +``` +Why did you choose this approach over alternatives? What are the trade-offs? + +``` + +This forces the AI to articulate its reasoning and helps you understand the implications of different implementation choices. + +You can also ask the AI to predict deployment issues before you encounter them: + +``` +If I deploy this code to production with Supabase, what potential problems should I watch for? + +``` + +This might reveal important considerations like enabling Row Level Security, adding password complexity rules, or implementing proper error logging. + +Put the AI into "self-review mode" to catch issues proactively: + +``` +Review this code as if it's going live tomorrow. Identify security concerns, performance bottlenecks, and missing error handling. Suggest specific improvements. + +``` + +## Prompt templates for common tasks + +Once you understand the three-layer structure and iterative refinement, certain patterns emerge. Here are templates for common vibe coding scenarios: + +### Data modeling template + +``` +Technical context: +- Database: [Supabase/PostgreSQL/etc.] +- Language/Framework: [TypeScript/Python/etc.] +- Constraints: [Naming conventions, relationship patterns] + +Data requirements: +- Entity: [Name and purpose] +- Core fields: [Essential fields with types] +- Relationships: [Connections to other entities] +- Business rules: [Validation requirements, constraints] + +Integration considerations: +- Data validation: [Required fields, format requirements] +- Performance: [Indexing needs, query patterns] +- Security: [Access control, sensitive data handling] +- Migration: [How this fits with existing schema] + +Create a data model for [specific use case]. + +``` + +**Follow-up prompts:** + +- "Explain your column and index choices" +- "What queries will be slow at scale? Suggest optimizations" +- "Show me how to seed example data and query it with Supabase" + +### API endpoint template + +``` +Technical context: +- Framework: [Express/Next.js API routes/FastAPI/etc.] +- Authentication: [JWT/session-based/API keys] +- Data layer: [Database ORM, external APIs] +- Response format: [JSON structure preferences] + +Endpoint specification: +- Method and route: [GET/POST/etc.] /api/[path] +- Purpose: [What this endpoint accomplishes] +- Request format: [Body structure, query params, headers] +- Response format: [Success and error responses] +- Business logic: [Key operations and validations] + +Integration and edge cases: +- Authentication: [Access control, permission levels] +- Validation: [Input sanitization, required fields] +- Error handling: [Specific error scenarios and responses] +- Rate limiting: [Protection against abuse] +- External dependencies: [Third-party APIs, database queries] + +Create an API endpoint that [specific functionality]. + +``` + +### UI component template + +``` +Technical context: +- Framework: [React/Vue/Angular/etc.] +- Styling: [Tailwind/CSS modules/styled-components] +- State management: [useState/Zustand/Redux] +- Icon library: [Lucide/Heroicons/etc.] + +Component specification: +- Purpose: [What this component does] +- Props interface: [Expected props with types] +- User interactions: [Clicks, hovers, keyboard events] +- Visual states: [Loading, error, empty, success states] +- Accessibility: [ARIA labels, keyboard navigation] + +Integration considerations: +- Parent integration: [How it fits in the app] +- Performance: [Memoization, lazy loading needs] +- Error boundaries: [Failure handling] +- Mobile responsiveness: [Breakpoint considerations] +- Testing: [Key behaviors to verify] + +Create a [component name] component that [specific functionality]. + +``` + +These templates work because they mirror how AI needs to reason about code. They provide technical constraints, functional objectives, and real-world considerations upfront, leading to more accurate initial results. + +## Why context matters more than cleverness + +AI coding assistants are pattern matchers trained on clean, happy-path code examples. They default to what looks most common: functional snippets that work under typical conditions. Edge cases and security considerations are called "edge cases" because they appear less frequently in training data. + +Unless you explicitly prompt for comprehensive error handling, security measures, and edge case management, the AI will generate "good enough to run" code rather than "ready for production" code. + +This is why structured prompting isn't optional for serious vibe coding. The three-layer approach, iterative refinement, and explicit requests for security and error handling transform AI from a demo generator into a collaborative development partner. + +## Building with the right foundation + +Effective vibe coding requires more than good prompts; it needs the right infrastructure. Supabase provides an ideal foundation for AI-generated applications with its integrated Postgres development platform. + +When your AI assistant generates code that needs user authentication, database operations, file storage, or real-time features, Supabase handles these requirements seamlessly. Your prompts can focus on business logic while Supabase manages the complex backend infrastructure that typically causes integration headaches. + +The platform's instant APIs, built-in authentication, and real-time subscriptions work together cohesively, eliminating the need to stitch together multiple services. Whether you're building with Lovable, Replit, or any other AI coding tool, Supabase provides the production-ready backend that scales with your vibe-coded applications. + +Ready to put these prompting techniques into practice? Supabase gives you the integrated backend platform that works seamlessly with your favorite AI coding tools. [Start building with Supabase](https://supabase.com/) and turn your next idea into a production-ready application. diff --git a/apps/www/_blog/2025-08-17-the-vibe-coders-guide-to-supabase-environments.mdx b/apps/www/_blog/2025-08-17-the-vibe-coders-guide-to-supabase-environments.mdx new file mode 100644 index 0000000000000..8a32ec6dd9b34 --- /dev/null +++ b/apps/www/_blog/2025-08-17-the-vibe-coders-guide-to-supabase-environments.mdx @@ -0,0 +1,296 @@ +--- +title: "The Vibe Coder's Guide to Supabase Environments" +description: 'Build a professional deployment workflow for your Supabase project. Learn essential patterns that prevent 3am panic attacks while keeping your workflow fun, simple, and safe.' +categories: + - developers +tags: + - vibe-coding +date: '2025-08-17:10:00' +toc_depth: 3 +author: prashant +--- + +Setting up separate development and production environments does not have to be painful. This guide shows you how to build a professional deployment workflow for your Supabase project. **You will learn the essential patterns that prevent the 3am "I dropped production" panic attacks** while keeping your workflow fun, simple, and safe. + +Supabase is the open source [Postgres](https://supabase.com/database) development platform. At its core, it is just Postgres, but with an integrated suite: [Auth](https://supabase.com/auth), [Storage](https://supabase.com/storage), [Edge Functions](https://supabase.com/edge-functions), [Realtime](https://supabase.com/realtime), and [Vector](https://supabase.com/modules/vector) search. That means you can start hacking in minutes and also scale to millions when your app takes off. + +With this post, we'll explore how to setup a professional development and staging environment for our projects to prevent those late night panics. + +## Rule #1: never work directly on production + +The fastest way to ruin your night is to treat your one Supabase project as both your playground and your live app. One wrong `DROP TABLE` and your users are gone. The simple fix is to create at least two projects: one for breaking things (development) and one for your users (production). Larger teams often add a staging project as well, but the minimum is two. + +Create them in the Supabase Dashboard and give them obvious names like `myapp-dev` and `myapp-prod`. Boring names reduce mistakes. Grab the Project Reference IDs from **Settings > General** and stash them in a safe place. + +Then set up the Supabase CLI: + +```bash +npm install supabase --save-dev +supabase init +``` + +This creates a `supabase/` directory, your single source of truth for migrations, functions, and seed data. Treat it like a ledger of every database change. Because it is just files, you can track it with Git, roll changes forward, and keep environments in sync. The flow should always be one direction: local development → dev project → production project. That is how you avoid the pain of trying to sync in multiple directions later. + +## Database migrations are git commits for your database + +Migrations are your safety net. Each one is a timestamped SQL file in `supabase/migrations/`. They record what changed and when, just like Git commits. This is how you avoid schema drift, where dev and prod quietly diverge until one day you cannot deploy without breaking things. + +Here is the basic workflow: + +1. Create a migration whenever you need to change the schema: + + ```bash + supabase migration new add_user_profiles + ``` + +2. Fill in the SQL, and always enable [Row Level Security](https://supabase.com/docs/guides/database/postgres/row-level-security). Without RLS, anyone with your project URL can read all your data. + + ```sql + CREATE TABLE public.profiles ( + id UUID REFERENCES auth.users ON DELETE CASCADE, + username TEXT UNIQUE, + avatar_url TEXT, + created_at TIMESTAMPTZ DEFAULT NOW() + ); + ALTER TABLE public.profiles ENABLE ROW LEVEL SECURITY; + CREATE POLICY "Users can view own profile" ON profiles + FOR SELECT USING (auth.uid() = id); + ``` + +3. Test locally before touching any remote environment: + + ```bash + supabase db reset + ``` + +4. If you make changes in the Dashboard instead of SQL, capture them: + + ```bash + supabase db diff -f capture_dashboard_changes + ``` + +Because migrations must run in order on a fresh database, always reset locally to prove they work. If `supabase db reset` works, production will too. This habit prevents the subtle drift that causes late night panics. + +## Common pitfalls and how to avoid them + +Every developer hits the same landmines once. Knowing them up front means you only hit them once. + +- **Forgetting to enable RLS.** Without it, your tables are wide open. Always add `ALTER TABLE ... ENABLE ROW LEVEL SECURITY;`. +- **Deploying to the wrong environment.** Give production a different terminal theme, never store its credentials locally, and run `supabase status` to check before you push. +- **Migration conflicts.** If two migrations collide after a Git merge, rename one with a later timestamp and rerun `supabase db reset` to verify the order. +- **Exposed service role keys.** If one leaks, rotate it immediately in the Dashboard, update every environment, and scrub your Git history. + +Mistakes are inevitable. Guardrails keep them from becoming disasters. + +## GitHub autopilot with CI/CD + +Manual deploys are risky. Automating them with GitHub Actions removes the human error. The idea is simple: push to `develop` to deploy to staging, merge to `main` to deploy to production. + +Add your secrets in **Settings > Secrets and variables > Actions**. Then create `.github/workflows/deploy.yml`: + +```yaml +name: Deploy Supabase +on: + push: + branches: [main, develop] +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: supabase/setup-cli@v1 + with: { version: latest } + - name: Deploy to staging + if: github.ref == 'refs/heads/develop' + run: | + supabase link --project-ref ${{ secrets.STAGING_PROJECT_ID }} + supabase db push + - name: Deploy to production + if: github.ref == 'refs/heads/main' + run: | + supabase link --project-ref ${{ secrets.PRODUCTION_PROJECT_ID }} + supabase db push +``` + +Now deployments happen automatically with every push. You do not have to remember commands or worry about sending them to the wrong project. Larger teams often extend this with integration tests that run against staging before code can be promoted, but even this simple setup eliminates most accidents. + +## Backups are your safety net + +Every production app needs a backup plan. The best plans run without you thinking about them. Set up a GitHub Action to dump your database nightly. + + + +Even better, enable [Point in Time Recovery](https://supabase.com/docs/guides/platform/backups#point-in-time-recovery) (PITR) in the Supabase Dashboard. It lets you roll back to any point in time, not just to last night's snapshot. If your weekend project has suddenly taken off, you'll want to invest the money in keeping your environment solid. + + + +Backups are only useful if you know they work. Schedule a monthly drill: restore a backup to a new project, run through your app, and confirm the data is intact. If you cannot restore, you do not have a backup. + +For high stakes apps, combine PITR with read replicas and multi region deployments. That way you can recover from mistakes without downtime or lost data. + +## Environment variables without the oops + +Secrets are a common leak. The rule is simple: anything with `NEXT_PUBLIC_` is visible in browser code. Only use anon keys there. + +Keep secrets like service role keys in `.env.local`, and never commit that file. Document what is required in `.env.example`. Then in your code, create two Supabase clients: one safe for the browser, one for server side code. + +For bigger teams, a secrets manager like Doppler, Vault, or GitHub's encrypted environment variables makes rotation and auditing easier. + +## Branches that match reality + +Your Git branches should map to your environments. Keep it simple: `main` for production, `develop` for staging, and `feat/*` for features. Supabase will even create preview branches for you automatically when you open a PR. Each one is a fully isolated Supabase instance with unique credentials, perfect for testing features before they hit staging. + +This structure keeps your workflow clean and prevents confusion about which branch is safe to merge. + +## Your deployment rituals + +With all the pieces in place, you need habits to tie them together. + +For daily development: + +- Start Supabase locally with `supabase start` +- Branch from `develop` +- Make schema changes with migrations +- Test with `supabase db reset` +- Commit and push + +For deployments: + +- Open a pull request from your feature branch into `develop` +- Test your changes in staging +- Open a pull request from `develop` into `main` +- Merge to deploy to production +- Monitor production for 15 minutes to catch issues quickly + +Pull requests are not just ceremony. They create an audit trail, trigger preview branches, and give you a chance to test before you touch production. That small delay saves hours of recovery work later. + +## Build in a weekend. Scale to millions. + +Let's say your weekend project took off and people are using it. Let's build separate dev and prod environments. To start, you will create two Supabase projects instead of one. Use the dev project for breaking things, keep the production project for your users. After that, you'll set up Vercel to automatically use the right database for each environment. + +### Separate your environments + +Create a development project in the Supabase Dashboard and name it `yourapp-dev`. Rename your existing project to `yourapp-prod` for clarity. Now you have a safe place to experiment. + +Extract your production schema and turn it into migration files: + +```bash +npm install supabase --save-dev +supabase init + +# Capture your production schema +supabase link --project-ref YOUR_PROD_PROJECT_ID +supabase db pull + +# Apply the same schema to development +supabase link --project-ref YOUR_DEV_PROJECT_ID +supabase db push +``` + +This creates migration files in `supabase/migrations/` that represent your current database structure. These files are your new source of truth for schema changes. + +### Configure Vercel environments + +Tell Vercel which database to use for each deployment. Go to your Vercel project settings and add environment variables: + +**Production environment** (only `main` branch): + +``` +NEXT_PUBLIC_SUPABASE_URL = https://yourapp-prod.supabase.co +NEXT_PUBLIC_SUPABASE_ANON_KEY = your_prod_anon_key +``` + +**Preview environment** (all other branches): + +``` +NEXT_PUBLIC_SUPABASE_URL = https://yourapp-dev.supabase.co +NEXT_PUBLIC_SUPABASE_ANON_KEY = your_dev_anon_key +``` + +Update your local `.env.local` to point to the dev project so you never accidentally test against production data. + +### Your new daily workflow + +The workflow stays almost identical to what you know, with one key difference: you never touch the production Supabase Dashboard again. + +**For regular features:** + +- Code locally (automatically uses dev database) +- Push any branch to get a Vercel preview: `git push origin feature/new-comments`. Vercel automatically creates a preview URL like `yourapp-git-feature-new-comments.vercel.app` that connects to your dev database with safe test data. +- Test the preview URL with fake data +- Merge to `main` when ready: Create a pull request on GitHub, review your changes, then merge. Vercel automatically deploys to your production domain using the production database. + +**For database changes:** + +- Create a migration: `supabase migration new add_comments_table` +- Write SQL in the generated file +- Test on dev: `supabase db push` +- Commit the migration file and push: `git add supabase/migrations/` then `git commit -m "Add comments table"` then `git push origin feature/comments`. The migration file gets committed to your repo like any other code. +- Production gets the same changes automatically + +### Automate production deployments + +Without automation, you would need to manually apply database changes to production every time you merge code. That means remembering to run `supabase db push` against your production project, which is error-prone and easy to forget. + +GitHub Actions solves this by watching your repository and automatically running commands when specific events happen. Set up GitHub Actions to handle production database changes. Create `.github/workflows/deploy.yml`: + +```yaml +name: Deploy to Production +on: + push: + branches: [main] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: supabase/setup-cli@v1 + - name: Apply migrations to production + run: | + supabase link --project-ref ${{ secrets.PRODUCTION_PROJECT_ID }} + supabase db push + env: + SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }} +``` + +This file tells GitHub: "Every time someone merges code into the `main` branch, automatically connect to the production Supabase project and apply any new migration files." Vercel handles your frontend deployment, but your database changes need this extra step. + +Here is what happens when you merge a pull request: + +1. Vercel automatically deploys your new frontend code to production +2. GitHub Actions triggers and connects to your production Supabase project +3. Any new migration files get applied to the production database +4. Your frontend and database stay in perfect sync + +Add your project IDs and access token to GitHub secrets. Now every merge to `main` automatically applies your migrations to production. No more forgetting to update the database. No more manual steps that can go wrong at 2am. + +### The safety net effect + +Every branch you push creates a preview deployment that uses development data. You can test destructive changes, experiment with new features, and invite others to try things without any risk to production. + +The key insight is that your development and production environments stay perfectly in sync through migrations. When a migration works in development, it will work in production. No more schema drift, no more surprise failures. + +### Enable Row Level Security immediately + +Your weekend project probably skipped RLS. Fix this now before you ship new features: + +```sql +ALTER TABLE your_table ENABLE ROW LEVEL SECURITY; +CREATE POLICY "Users can only access their own data" ON your_table + FOR ALL USING (auth.uid() = user_id); +``` + +Apply these policies to both environments. RLS is your last line of defense against data breaches. + + + +Even when building weekend projects, build with RLS enabled. You will write better, cleaner, safer code, and your database will protect you in case you don't. Most coding agents can help translate natural language (e.g., "People should only be able to see rows that belong to them in this table") into the necessary SQL for RLS. + + + +This setup takes one afternoon to implement but eliminates the fear of breaking production. You can move fast again while your users stay protected. The same tools, the same workflow, just organized safely. + +## Final word + +The path from vibe coder to confident deployer is not about memorizing every DevOps buzzword. It is about a handful of patterns that keep you safe: separate environments, migrations as save points, automated deployments, tested backups, and strict RLS. Supabase makes this easy because everything is Postgres, deeply integrated, and scalable from weekend project to millions of users. diff --git a/apps/www/components/Solutions/AIBuildersLogos.tsx b/apps/www/components/Solutions/AIBuildersLogos.tsx index 5b42fab1beeaf..f5a777b5b8bbf 100644 --- a/apps/www/components/Solutions/AIBuildersLogos.tsx +++ b/apps/www/components/Solutions/AIBuildersLogos.tsx @@ -1,86 +1,187 @@ import Link from 'next/link' -import React from 'react' +import React, { useEffect } from 'react' import { cn } from 'ui' +import styles from './ai-builders-logos.module.css' + const logos = [ - { - image: `/images/logos/publicity/lovable.svg`, - alt: 'lovable', - name: 'lovable', - href: 'https://lovable.dev/', - }, - { - image: `/images/logos/publicity/bolt.svg`, - alt: 'bolt', - name: 'bolt', - href: 'https://bolt.new', - }, - { - image: `/images/logos/publicity/v0.svg`, - alt: 'v0', - name: 'v0', - href: 'https://v0.dev', - }, - { - image: `/images/logos/publicity/figma.svg`, - alt: 'figma', - name: 'figma', - href: 'https://www.figma.com/make/', - }, - { - image: `/images/logos/publicity/tempo.svg`, - alt: 'tempo', - name: 'tempo', - href: 'https://tempo.new', - }, - { - image: `/images/logos/publicity/gumloop.svg`, - alt: 'gumloop', - name: 'gumloop', - href: 'https://gumloop.com', - }, - { - image: `/images/logos/publicity/co-com.svg`, - alt: 'co.com', - name: 'co-com', - href: 'https://co.dev', - }, + [ + { + image: `/images/logos/publicity/v0.svg`, + alt: 'v0', + name: 'v0', + href: 'https://v0.dev', + }, + { + image: `/images/logos/publicity/lovable.svg`, + alt: 'lovable', + name: 'lovable', + href: 'https://lovable.dev/', + }, + { + image: `/images/logos/publicity/bolt.svg`, + alt: 'bolt', + name: 'bolt', + href: 'https://bolt.new', + }, + ], + [ + { + image: `/images/logos/publicity/figma.svg`, + alt: 'figma', + name: 'figma', + href: 'https://www.figma.com/make/', + }, + { + image: `/images/logos/publicity/tempo.svg`, + alt: 'tempo', + name: 'tempo', + href: 'https://tempo.new', + }, + { + image: `/images/logos/publicity/gumloop.svg`, + alt: 'gumloop', + name: 'gumloop', + href: 'https://gumloop.com', + }, + ], + // { + // image: `/images/logos/publicity/co-com.svg`, + // alt: 'co.com', + // name: 'co-com', + // href: 'https://co.dev', + // }, ] interface Props { className?: string } +const stagger = 0.1 + +// duration in ms +const duration = 5000 + const EnterpriseLogos: React.FC = ({ className }) => { + const [index, setIndex] = React.useState(0) + const [animate, setAnimate] = React.useState(false) + + const currentLogos = logos[index].slice(0, 3) + const logosNext = logos[(index + 1) % logos.length].slice(0, 3) + + useEffect(() => { + const id = setTimeout(() => { + setAnimate(true) + }, 500) + + return () => { + clearTimeout(id) + } + }, []) + + useEffect(() => { + if (!animate) { + return + } + + function loop() { + setIndex((index) => (index + 1) % logos.length) + } + + const interval = setInterval(loop, duration) + + return () => { + clearInterval(interval) + } + }, [animate]) + return ( -
+
+ {currentLogos.map((logo, idx) => ( + + ))} +
+ + {animate && ( +
+ {logosNext.map((logo, idx) => ( + + ))} +
+ )} +
+ ) +} + +const Logo = ({ + logo, + state, + animate, + index, + stagger, +}: { + logo: (typeof logos)[0][0] + state: 'enter' | 'exit' + animate: boolean + index: number + stagger: number +}) => { + return ( + - {logos.map((logo) => ( - - {logo.alt} - - ))} - + {logo.alt} + ) } diff --git a/apps/www/components/Solutions/PostGrid.tsx b/apps/www/components/Solutions/PostGrid.tsx new file mode 100644 index 0000000000000..8c6198a58fda4 --- /dev/null +++ b/apps/www/components/Solutions/PostGrid.tsx @@ -0,0 +1,69 @@ +import { cn } from 'ui' +import SectionContainer from '../Layouts/SectionContainer' +import { getSortedPosts } from '~/lib/posts' +import Image from 'next/image' +import Link from 'next/link' + +interface PostGridProps { + id?: string + className?: string + + header: React.ReactNode + subheader: React.ReactNode + posts: ReturnType +} + +function PostGrid({ id, className, header, subheader, posts }: PostGridProps) { + const hasPosts = posts.length > 0 + + return ( + +
+

{header}

+

{subheader}

+
+ + {hasPosts ? ( +
+ {posts.map((post) => ( + + {post.thumb && ( +
+ {post.title +
+ )} + +
+

{post.title}

+

+ {post.readingTime} +

+
+ + ))} +
+ ) : ( +

No posts found

+ )} +
+ ) +} + +export default PostGrid + +function formatDate(date: string) { + return new Date(date).toLocaleDateString('en-US', { + month: 'long', + day: 'numeric', + year: 'numeric', + }) +} diff --git a/apps/www/components/Solutions/ai-builders-logos.module.css b/apps/www/components/Solutions/ai-builders-logos.module.css new file mode 100644 index 0000000000000..095061d90c52c --- /dev/null +++ b/apps/www/components/Solutions/ai-builders-logos.module.css @@ -0,0 +1,49 @@ +.logo { + --duration: 500ms; + --curve: ease; + + &[data-animate='false'] { + &[data-state='enter'] { + opacity: 0; + animation: none; + } + &[data-state='exit'] { + opacity: 1; + animation: none; + } + } + + &[data-state='enter'][data-animate='true'] { + animation: enter var(--duration) var(--curve) var(--delay) both; + } + + &[data-state='exit'][data-animate='true'] { + animation: exit var(--duration) var(--curve) var(--delay) both; + } +} + +@keyframes enter { + 0% { + translate: 0 32px; + filter: blur(4px); + opacity: 0; + } + 100% { + translate: 0 0; + filter: blur(0px); + opacity: 1; + } +} + +@keyframes exit { + 0% { + translate: 0 0; + filter: blur(0px); + opacity: 1; + } + 100% { + translate: 0 -32px; + filter: blur(4px); + opacity: 0; + } +} diff --git a/apps/www/data/Solutions.tsx b/apps/www/data/Solutions.tsx index 2e7399d6aa5a0..a2dbfaefbd485 100644 --- a/apps/www/data/Solutions.tsx +++ b/apps/www/data/Solutions.tsx @@ -7,6 +7,7 @@ import { PointerIcon, PuzzleIcon, TrendingUpIcon, + ZapIcon, } from 'lucide-react' export type SolutionTypes = Solutions[keyof Solutions] @@ -23,6 +24,7 @@ export enum Solutions { enterprise = 'enterprise', hackathon = 'hackathon', innovationTeams = 'innovation-teams', + vibeCoders = 'vibe-coders', } export const skillBasedSolutions = { @@ -90,6 +92,13 @@ export const skillBasedSolutions = { ), }, + { + id: Solutions.vibeCoders, + text: 'Vibe Coders', + description: '', + url: '/solutions/vibe-coders', + icon: ZapIcon, + }, ], } diff --git a/apps/www/data/solutions/solutions.utils.tsx b/apps/www/data/solutions/solutions.utils.tsx index 354c84b6e26b8..a80b0ba41b6dd 100644 --- a/apps/www/data/solutions/solutions.utils.tsx +++ b/apps/www/data/solutions/solutions.utils.tsx @@ -103,6 +103,11 @@ export interface FrameworkLinkProps { docs: string } +export interface PostGridProps { + header: React.ReactNode + subheader: React.ReactNode +} + export const FrameworkLink = ({ framework }: { framework: FrameworkLinkProps }) => { const isXs = useBreakpoint(640) return ( diff --git a/apps/www/data/solutions/vibe-coders.tsx b/apps/www/data/solutions/vibe-coders.tsx new file mode 100644 index 0000000000000..e201423c59c93 --- /dev/null +++ b/apps/www/data/solutions/vibe-coders.tsx @@ -0,0 +1,563 @@ +import { Timer, Zap, CheckCircle, Check, Sparkles } from 'lucide-react' +import { Button, Image } from 'ui' +import type { + FeaturesSection, + HeroSection, + Metadata, + PostGridProps, + Quotes, +} from './solutions.utils' +import dynamic from 'next/dynamic' + +import MainProducts from '../MainProducts' +import { PRODUCT_SHORTNAMES } from 'shared-data/products' +import { frameworks } from 'components/Hero/HeroFrameworks' +import { FrameworkLink, getEditors } from './solutions.utils' +import type { MPCSectionProps } from 'components/Solutions/MPCSection' +import { useBreakpoint } from 'common' + +const AuthVisual = dynamic(() => import('components/Products/AuthVisual')) +const ComputePricingCalculator = dynamic( + () => import('components/Pricing/ComputePricingCalculator') +) +const FunctionsVisual = dynamic(() => import('components/Products/FunctionsVisual')) +const RealtimeVisual = dynamic(() => import('components/Products/RealtimeVisual')) +const AIBuildersLogos = dynamic(() => import('components/Solutions/AIBuildersLogos')) + +const useVibeCodersContent: () => { + metadata: Metadata + heroSection: HeroSection + quotes: Quotes + why: FeaturesSection + platform: any + platformStarterSection: any + mcp: MPCSectionProps + postGrid: PostGridProps +} = () => { + const isXs = useBreakpoint(640) + const editors = getEditors(isXs) + + return { + metadata: { + metaTitle: 'Supabase for Vibe Coders', + metaDescription: + 'Your weekend prototype deserves production. Stop letting backend complexity kill your momentum. Supabase is the production-ready backend that works with your AI tools from day one.', + }, + heroSection: { + id: 'hero', + title: 'Vibe Coders', + h1: ( + <> + Your weekend prototype + deserves production + + ), + subheader: [ + <> + Weekend project. Real users. Now what? Stop letting backend + complexity kill your momentum. + , + ], + image: undefined, + ctas: [ + { + label: 'Start Your Project', + href: 'https://supabase.com/dashboard', + type: 'primary' as any, + }, + ], + }, + quotes: { + id: 'quotes', + items: [ + { + icon: '/images/logos/publicity/lovable.svg', + avatar: '/images/avatars/anton-osika.jpg', + author: 'Anton Osika', + authorTitle: 'Lovable - CEO', + quote: ( + <> + We chose Supabase because it's{' '} + extremely user friendly and{' '} + + covers all the needs to build full-stack applications + + . + + ), + }, + { + icon: '/images/logos/publicity/bolt.svg', + avatar: '/images/avatars/eric-simons.jpg', + author: 'Eric Simmons', + authorTitle: 'Bolt.new - CEO', + quote: ( + <> + Supabase is awesome. Supabase is the{' '} + key database integration that we + have...because it’s the{' '} + + best product in the world for storing and retrieving data + + . + + ), + }, + { + icon: '/images/logos/publicity/v0.svg', + avatar: '/images/avatars/guillermo-rauch.jpg', + author: 'Guillermo Rauch', + authorTitle: 'Vercel (v0) - CEO', + quote: ( + <> + v0 integrates with Supabase seamlessly. If + you ask v0 to generate an application and it needs Supabase,{' '} + + you’ll be prompted to create a Supabase account right there in the application + + . + + ), + }, + ], + }, + why: { + id: 'why-supabase', + label: '', + heading: ( + <> + The Vibe Coder's Dilemma + + ), + subheading: + "Your AI assistant nails the prototype. Users actually want it. Then reality hits. Authentication breaks. Databases crash. Deployment becomes a nightmare. You're not alone. Every vibe coder hits this wall.", + features: [ + { + id: 'built-for-how-you-build', + icon: Zap, + heading: 'Built for how you build', + subheading: + 'Supabase is a complete production-ready back-end that includes everything you need to ship full-featured apps.', + }, + { + id: 'prototype-to-production', + icon: Timer, + heading: 'From prototype to production', + subheading: + 'Start with a weekend project and scale to millions of users. Supabase handles the complexity so you can focus on what matters - building great products.', + }, + { + id: 'vibe-coding-toolkit', + icon: CheckCircle, + heading: 'Break through with our Vibe Coding Toolkit', + subheading: + 'Tools, articles, and other resources to help you deploy your application to production with confidence.', + }, + ], + }, + platform: { + id: 'vibe-coding-platform', + title: ( + <> + Built for how you build + + ), + subheading: + 'Supabase is a production-ready backend that works with your AI tools from day one. No DevOps degree required. No months of setup. No "learning the hard way." Just ship.', + className: '', + features: [ + { + id: 'database', + title: 'Database', + isDatabase: true, + icon: MainProducts[PRODUCT_SHORTNAMES.DATABASE].icon, + subheading: ( + <> + A fully managed database that’s simple for creators and{' '} + trusted by enterprises. + + ), + className: 'lg:col-span-2 flex-col lg:flex-row', + image: ( +
+
+ + + + + + + + + {[ + { name: 'Jon Meyers', pub: 'All', active: false }, + { name: 'Chris Martin', pub: 'All', active: true }, + { name: 'Amy Quek', pub: 'No', active: false }, + { name: 'Riccardo Bussetti', pub: 'No', active: false }, + { name: 'Beng Eu', pub: 'All', active: false }, + { name: 'Tyler Hillery', pub: 'All', active: false }, + ].map((row) => ( + + + + + ))} + +
+ NAME + + PUBLICATION +
{row.name}{row.pub}
+
+
+
+ ), + highlights: ( +
    +
  • + 100% portable +
  • +
  • + Built-in Auth with RLS +
  • +
  • + Easy to extend +
  • +
+ ), + }, + { + id: 'authentication', + title: 'Authentication', + icon: MainProducts[PRODUCT_SHORTNAMES.AUTHENTICATION].icon, + subheading: ( + <> + Let your users{' '} + + login with email, Google, Apple, GitHub, and more + + . Secure and trusted. + + ), + className: '!border-l-0 sm:!border-l sm:!border-t-0', + image: , + }, + { + id: 'realtime', + title: 'Realtime', + icon: MainProducts[PRODUCT_SHORTNAMES.REALTIME].icon, + subheading: ( + <> + Build immersive{' '} + multi-player, collaborative experiences. + + ), + className: '!border-l-0', + image: ( + + ), + }, + { + id: 'edge-functions', + title: 'Edge Functions', + icon: MainProducts[PRODUCT_SHORTNAMES.FUNCTIONS].icon, + subheading: <>Custom backend logic when you want to dive into code., + className: '!border-l-0 sm:!border-l', + image: , + }, + { + id: 'storage', + title: 'Storage', + icon: MainProducts[PRODUCT_SHORTNAMES.STORAGE].icon, + subheading: ( + <> + Affordable and fast, for all the videos and + images you need in your app. + + ), + className: '!border-l-0 lg:!border-l', + image: ( + Storage + ), + }, + { + id: 'vectors', + title: 'AI Ready', + icon: 'M4.13477 12.8129C4.13477 14.1481 4.43245 15.4138 4.96506 16.5471M12.925 4.02271C11.5644 4.02271 10.276 4.33184 9.12614 4.88371M21.7152 12.8129C21.7152 11.4644 21.4115 10.1867 20.8688 9.0447M12.925 21.6032C14.2829 21.6032 15.5689 21.2952 16.717 20.7454M16.717 20.7454C17.2587 21.5257 18.1612 22.0366 19.1831 22.0366C20.84 22.0366 22.1831 20.6935 22.1831 19.0366C22.1831 17.3798 20.84 16.0366 19.1831 16.0366C17.5263 16.0366 16.1831 17.3798 16.1831 19.0366C16.1831 19.6716 16.3804 20.2605 16.717 20.7454ZM4.96506 16.5471C4.16552 17.086 3.63965 17.9999 3.63965 19.0366C3.63965 20.6935 4.98279 22.0366 6.63965 22.0366C8.2965 22.0366 9.63965 20.6935 9.63965 19.0366C9.63965 17.3798 8.2965 16.0366 6.63965 16.0366C6.01951 16.0366 5.44333 16.2248 4.96506 16.5471ZM9.12614 4.88371C8.58687 4.08666 7.67444 3.56274 6.63965 3.56274C4.98279 3.56274 3.63965 4.90589 3.63965 6.56274C3.63965 8.2196 4.98279 9.56274 6.63965 9.56274C8.2965 9.56274 9.63965 8.2196 9.63965 6.56274C9.63965 5.94069 9.45032 5.36285 9.12614 4.88371ZM20.8688 9.0447C21.6621 8.50486 22.1831 7.59464 22.1831 6.56274C22.1831 4.90589 20.84 3.56274 19.1831 3.56274C17.5263 3.56274 16.1831 4.90589 16.1831 6.56274C16.1831 8.2196 17.5263 9.56274 19.1831 9.56274C19.8081 9.56274 20.3884 9.37165 20.8688 9.0447Z', + subheading: ( + <> + When you’re ready to explore vectors and{' '} + the power of AI, Supabase is there with + industry-standard tools to guide you. + + ), + className: '!border-l lg:!border-l-0', + image: ( + Vector embeddings + ), + }, + { + id: 'pricing', + title: 'Pricing for builders', + className: 'sm:col-span-2 flex-col', + icon: ( + + + + + + ), + subheading: ( + <> + A generous free tier, plus fair, flexible + pricing when you’re ready to scale. + + ), + image: ( +
+
+ +
+
+ ), + }, + ], + }, + platformStarterSection: { + id: 'platform-starter', + heading: ( + <> + Choose your platform to start building in + seconds + + ), + headingRight: ( + <> + Or, start with Supabase AI Prompts{' '} + + + ), + docsUrl: 'https://supabase.com/docs/guides/getting-started/ai-prompts', + leftFooter: ( +
+ {frameworks.map((framework) => ( + + ))} +
+ ), + aiPrompts: [ + { + id: 'auth-setup', + title: 'Bootstrap Next.js app with Supabase Auth', + code: `1. Install @supabase/supabase-js and @supabase/ssr packages. +2. Set up environment variables. +3. Write two utility functions with \u0060createClient\u0060 functions to create a browser client and a server client. +4. Hook up middleware to refresh auth tokens +`, + language: 'markdown', + docsUrl: + 'https://supabase.com/docs/guides/getting-started/ai-prompts/nextjs-supabase-auth', + }, + { + id: 'edge-functions', + title: 'Writing Supabase Edge Functions', + code: `1. Try to use Web APIs and Deno's core APIs instead of external dependencies (eg: use fetch instead of Axios, use WebSockets API instead of node-ws) +2. If you are reusing utility methods between Edge Functions, add them to 'supabase/functions/_shared' and import using a relative path. Do NOT have cross dependencies between Edge Functions. +3. Do NOT use bare specifiers when importing dependecnies. If you need to use an external dependency, make sure it's prefixed with either 'npm:' or 'jsr:'. For example, '@supabase/supabase-js' should be written as 'npm:@supabase/supabase-js'. +4. For external imports, always define a version. For example, 'npm:@express' should be written as 'npm:express@4.18.2'. +5. For external dependencies, importing via 'npm:' and 'jsr:' is preferred. Minimize the use of imports from @'deno.land/x' , 'esm.sh' and @'unpkg.com' . If you have a package from one of those CDNs, you can replace the CDN hostname with 'npm:' specifier. +`, + language: 'markdown', + docsUrl: 'https://supabase.com/docs/guides/getting-started/ai-prompts/edge-functions', + }, + { + id: 'declarative-db-schema', + title: 'Declarative Database Schema', + code: `Mandatory Instructions for Supabase Declarative Schema Management +## 1. **Exclusive Use of Declarative Schema** +-**All database schema modifications must be defined within '.sql' files located in the 'supabase/schemas/' directory.`, + language: 'markdown', + docsUrl: + 'https://supabase.com/docs/guides/getting-started/ai-prompts/declarative-database-schema', + }, + { + id: 'rls-policies', + title: 'Create RLS policies', + code: `You're a Supabase Postgres expert in writing row level security policies. Your purpose is to generate a policy with the constraints given by the user. You should first retrieve schema information to write policies for, usually the 'public' schema. +The output should use the following instructions: + +- The generated SQL must be valid SQL.`, + language: 'markdown', + docsUrl: + 'https://supabase.com/docs/guides/getting-started/ai-prompts/database-rls-policies', + }, + ], + }, + mcp: { + id: 'mcp', + heading: ( +
+ Supabase MCP server works seamlessly with{' '} + your favorite AI code editor +
+ ), + ctaLabel: 'Connect your AI tools', + documentationLink: '/docs/guides/getting-started/mcp', + frameworks: editors, + apiExamples: [ + { + lang: 'json', + title: 'macOS', + code: `{ +"mcpServers": { + "supabase": { + "command": "npx", + "args": [ + "-y", + "@supabase/mcp-server-supabase@latest", + "--read-only", + "--project-ref=" + ], + "env": { + "SUPABASE_ACCESS_TOKEN": "" + } + } +} +}`, + }, + { + lang: 'json', + title: 'Windows', + code: `{ +"mcpServers": { + "supabase": { + "command": "cmd", + "args": [ + "/c", + "npx", + "-y", + "@supabase/mcp-server-supabase@latest", + "--read-only", + "--project-ref=" + ], + "env": { + "SUPABASE_ACCESS_TOKEN": "" + } + } +} +}`, + }, + { + lang: 'json', + title: 'Windows (WSL)', + code: `{ +"mcpServers": { + "supabase": { + "command": "wsl", + "args": [ + "npx", + "-y", + "@supabase/mcp-server-supabase@latest", + "--read-only", + "--project-ref=" + ], + "env": { + "SUPABASE_ACCESS_TOKEN": "" + } + } +} +}`, + }, + { + lang: 'json', + title: 'Linux', + code: `{ + "mcpServers": { + "supabase": { + "command": "npx", + "args": [ + "-y", + "@supabase/mcp-server-supabase@latest", + "--read-only", + "--project-ref=" + ], + "env": { + "SUPABASE_ACCESS_TOKEN": "" + } + } + } +}`, + }, + ], + }, + postGrid: { + header: <>The Vibe Coding Toolkit, + subheader: ( + <> + Supabase gives you the tools to easily manage databases, authentication, and backend + infrastructure so you can build faster and ship with confidence. + + ), + }, + } +} + +export { useVibeCodersContent as default } diff --git a/apps/www/lib/posts.tsx b/apps/www/lib/posts.tsx index 0da58e2d1acde..6ac170883f8d1 100644 --- a/apps/www/lib/posts.tsx +++ b/apps/www/lib/posts.tsx @@ -9,6 +9,25 @@ type Directories = '_blog' | '_case-studies' | '_customers' | '_alternatives' | // based on YYYY-MM-DD format export const FILENAME_SUBSTRING = 11 +export type Post = { + slug: string + title?: string + description?: string + author?: string + image?: string + thumb?: string + categories?: string[] + tags?: string[] + date?: string + toc_depth?: number + formattedDate: string + readingTime: string + url: string + path: string + + [key: string]: any // Allow additional properties from frontmatter +} + type GetSortedPostsParams = { directory: Directories limit?: number @@ -24,7 +43,7 @@ export const getSortedPosts = ({ tags, categories, currentPostSlug, -}: GetSortedPostsParams) => { +}: GetSortedPostsParams): Post[] => { //Finding directory named "blog" from the current working directory of Node. const postDirectory = path.join(process.cwd(), directory) diff --git a/apps/www/pages/solutions/ai-builders.tsx b/apps/www/pages/solutions/ai-builders.tsx index f3fd46a041239..84a624df9fef7 100644 --- a/apps/www/pages/solutions/ai-builders.tsx +++ b/apps/www/pages/solutions/ai-builders.tsx @@ -38,7 +38,7 @@ const Enterprise: NextPage = () => ( {...content.heroSection} className="[&_h1]:2xl:!text-5xl bg-default border-0 lg:pb-8 [&_.ph-footer]:mt-0 [&_.ph-footer]:lg:mt-16 [&_.ph-footer]:xl:mt-32" sectionContainerClassName="lg:gap-4" - footer={} + footer={} footerPosition="left" /> diff --git a/apps/www/pages/solutions/no-code.tsx b/apps/www/pages/solutions/no-code.tsx index 0600180f8d7db..c5b3bdcdb1efc 100644 --- a/apps/www/pages/solutions/no-code.tsx +++ b/apps/www/pages/solutions/no-code.tsx @@ -8,6 +8,8 @@ import Quotes from 'components/Solutions/Quotes' import SolutionsStickyNav from 'components/SolutionsStickyNav' import getContent from 'data/solutions/no-code' +import { getSortedPosts } from '~/lib/posts' +import { PostTypes } from '~/types/post' const PlatformSection = dynamic(() => import('components/Solutions/PlatformSection')) const PlatformStarterSection = dynamic(() => import('components/Solutions/TwoColumnsSection')) diff --git a/apps/www/pages/solutions/vibe-coders.tsx b/apps/www/pages/solutions/vibe-coders.tsx new file mode 100644 index 0000000000000..8cd8cacb98603 --- /dev/null +++ b/apps/www/pages/solutions/vibe-coders.tsx @@ -0,0 +1,65 @@ +import { NextPage } from 'next' +import dynamic from 'next/dynamic' +import { NextSeo } from 'next-seo' + +import Layout from 'components/Layouts/Default' +import ProductHeader from 'components/Sections/ProductHeader2' +import SolutionsStickyNav from 'components/SolutionsStickyNav' +import useVibeCodersContent from 'data/solutions/vibe-coders' +import { Solutions } from 'data/Solutions' +import Quotes from '~/components/Solutions/Quotes' +import PostGrid from '~/components/Solutions/PostGrid' +import { getSortedPosts, type Post } from '~/lib/posts' +import SectionContainer from '~/components/Layouts/SectionContainer' +import AIBuildersLogos from 'components/Solutions/AIBuildersLogos' + +const WhySupabase = dynamic(() => import('components/Solutions/FeaturesSection')) +const PlatformSection = dynamic(() => import('components/Solutions/PlatformSection')) +const PlatformStarterSection = dynamic(() => import('components/Solutions/TwoColumnsSection')) +const MPCSection = dynamic(() => import('components/Solutions/MPCSection')) + +const VibeCodersPage: NextPage<{ posts: Post[] }> = ({ posts }) => { + const content = useVibeCodersContent() + + return ( + <> + + + + + + + + + + +

+ Supabase is the backend platform for apps created by your favorite AI Builders. +

+ + +
+ +
+ + ) +} + +export const getStaticProps = async () => { + const posts = getSortedPosts({ directory: '_blog', limit: 6, tags: ['vibe-coding'] }) + return { props: { posts } } +} + +export default VibeCodersPage