Database Choices for Your Next.js Application

10 min read1930 words

After architecting databases for 15+ Next.js applications ranging from simple blogs to enterprise SaaS platforms handling millions of users, I've learned that database choice makes or breaks your project. Here's the complete decision framework that guided my most successful projects.

The Database Decision Matrix

Before diving into specific databases, I evaluate every project using this matrix:

// database-evaluation.ts
interface DatabaseRequirements {
  dataStructure: 'relational' | 'document' | 'hybrid';
  consistency: 'strong' | 'eventual';
  scale: 'startup' | 'growth' | 'enterprise';
  realtime: boolean;
  complexity: 'simple' | 'moderate' | 'complex';
  budget: 'minimal' | 'moderate' | 'high';
  team: 'solo' | 'small' | 'large';
  deploymentSpeed: 'mvp' | 'production' | 'enterprise';
}
 
interface DatabaseOption {
  name: string;
  type: 'sql' | 'nosql' | 'hybrid';
  strengths: string[];
  weaknesses: string[];
  bestFor: string[];
  pricing: 'free-tier' | 'pay-as-you-go' | 'fixed';
  learningCurve: 'easy' | 'moderate' | 'steep';
  nextjsIntegration: 'excellent' | 'good' | 'manual';
}
 
const databases: DatabaseOption[] = [
  {
    name: 'Supabase',
    type: 'sql',
    strengths: ['Built-in auth', 'Real-time', 'Easy setup', 'Row Level Security'],
    weaknesses: ['Vendor lock-in', 'Limited complex queries', 'Young ecosystem'],
    bestFor: ['MVPs', 'Real-time apps', 'Solo developers', 'Rapid prototyping'],
    pricing: 'free-tier',
    learningCurve: 'easy',
    nextjsIntegration: 'excellent'
  },
  {
    name: 'Neon',
    type: 'sql',
    strengths: ['Serverless PostgreSQL', 'Branching', 'Auto-scaling', 'Cold start optimization'],
    weaknesses: ['Newer platform', 'Limited region availability'],
    bestFor: ['Serverless apps', 'CI/CD workflows', 'Variable traffic'],
    pricing: 'pay-as-you-go',
    learningCurve: 'moderate',
    nextjsIntegration: 'excellent'
  },
  // ... more databases
];

PostgreSQL: The Reliable Foundation

PostgreSQL remains my go-to choice for 70% of projects. Here's my production setup:

// lib/database.ts
import { Pool } from 'pg';
import { drizzle } from 'drizzle-orm/node-postgres';
import { users, posts, comments } from './schema';
 
// Connection pooling for serverless
const pool = new Pool({
  connectionString: process.env.DATABASE_URL,
  max: 20,
  idleTimeoutMillis: 30000,
  connectionTimeoutMillis: 2000,
});
 
export const db = drizzle(pool);
 
// Schema definition with Drizzle ORM
// schema/users.ts
import { pgTable, serial, text, timestamp, boolean } from 'drizzle-orm/pg-core';
 
export const users = pgTable('users', {
  id: serial('id').primaryKey(),
  email: text('email').unique().notNull(),
  name: text('name').notNull(),
  avatar: text('avatar'),
  createdAt: timestamp('created_at').defaultNow().notNull(),
  updatedAt: timestamp('updated_at').defaultNow().notNull(),
  isActive: boolean('is_active').default(true).notNull()
});
 
export const posts = pgTable('posts', {
  id: serial('id').primaryKey(),
  title: text('title').notNull(),
  content: text('content'),
  authorId: integer('author_id').references(() => users.id).notNull(),
  published: boolean('published').default(false).notNull(),
  createdAt: timestamp('created_at').defaultNow().notNull(),
  updatedAt: timestamp('updated_at').defaultNow().notNull()
});

PostgreSQL with Server Actions

// app/actions/user-actions.ts
'use server';
 
import { db } from '@/lib/database';
import { users } from '@/lib/schema';
import { eq } from 'drizzle-orm';
import { revalidatePath } from 'next/cache';
 
export async function createUser(formData: FormData) {
  const name = formData.get('name') as string;
  const email = formData.get('email') as string;
  
  try {
    const newUser = await db.insert(users)
      .values({ name, email })
      .returning();
      
    revalidatePath('/users');
    return { success: true, user: newUser[0] };
  } catch (error) {
    return { success: false, error: 'Failed to create user' };
  }
}
 
export async function getUserById(id: number) {
  const user = await db.select()
    .from(users)
    .where(eq(users.id, id))
    .limit(1);
    
  return user[0];
}
 
// Complex query with joins
export async function getUserWithPosts(userId: number) {
  const userWithPosts = await db.select({
    user: users,
    post: posts
  })
  .from(users)
  .leftJoin(posts, eq(posts.authorId, users.id))
  .where(eq(users.id, userId));
  
  return userWithPosts;
}

MongoDB: When Flexibility Matters

For content-heavy applications with evolving schemas, I choose MongoDB:

// lib/mongodb.ts
import { MongoClient, Db } from 'mongodb';
 
if (!process.env.MONGODB_URI) {
  throw new Error('Invalid/Missing environment variable: "MONGODB_URI"');
}
 
const uri = process.env.MONGODB_URI;
const options = {};
 
let client: MongoClient;
let clientPromise: Promise<MongoClient>;
 
if (process.env.NODE_ENV === 'development') {
  // In development mode, use a global variable
  let globalWithMongo = global as typeof globalThis & {
    _mongoClientPromise?: Promise<MongoClient>;
  };
 
  if (!globalWithMongo._mongoClientPromise) {
    client = new MongoClient(uri, options);
    globalWithMongo._mongoClientPromise = client.connect();
  }
  clientPromise = globalWithMongo._mongoClientPromise;
} else {
  // In production mode, it's best to not use a global variable
  client = new MongoClient(uri, options);
  clientPromise = client.connect();
}
 
export default clientPromise;
// models/Article.ts
import { Schema, model, models } from 'mongoose';
 
interface IArticle {
  title: string;
  content: any; // Rich text content with flexible structure
  author: {
    name: string;
    email: string;
    bio?: string;
  };
  tags: string[];
  metadata: {
    views: number;
    likes: number;
    shares: number;
    seo: {
      metaTitle?: string;
      metaDescription?: string;
      keywords?: string[];
    };
  };
  publishedAt?: Date;
  createdAt: Date;
  updatedAt: Date;
}
 
const ArticleSchema = new Schema<IArticle>({
  title: { type: String, required: true },
  content: { type: Schema.Types.Mixed }, // Flexible content structure
  author: {
    name: { type: String, required: true },
    email: { type: String, required: true },
    bio: String
  },
  tags: [{ type: String }],
  metadata: {
    views: { type: Number, default: 0 },
    likes: { type: Number, default: 0 },
    shares: { type: Number, default: 0 },
    seo: {
      metaTitle: String,
      metaDescription: String,
      keywords: [String]
    }
  },
  publishedAt: Date,
  createdAt: { type: Date, default: Date.now },
  updatedAt: { type: Date, default: Date.now }
});
 
// Pre-save middleware
ArticleSchema.pre('save', function(next) {
  this.updatedAt = new Date();
  next();
});
 
export const Article = models.Article || model<IArticle>('Article', ArticleSchema);

MongoDB API Routes

// app/api/articles/route.ts
import { NextRequest, NextResponse } from 'next/server';
import { Article } from '@/models/Article';
import dbConnect from '@/lib/mongodb';
 
export async function GET() {
  try {
    await dbConnect();
    
    const articles = await Article.find({ publishedAt: { $exists: true } })
      .sort({ publishedAt: -1 })
      .limit(20)
      .lean(); // Better performance for read-only operations
      
    return NextResponse.json(articles);
  } catch (error) {
    return NextResponse.json(
      { error: 'Failed to fetch articles' }, 
      { status: 500 }
    );
  }
}
 
export async function POST(request: NextRequest) {
  try {
    await dbConnect();
    
    const data = await request.json();
    const article = new Article(data);
    await article.save();
    
    return NextResponse.json(article, { status: 201 });
  } catch (error) {
    return NextResponse.json(
      { error: 'Failed to create article' }, 
      { status: 500 }
    );
  }
}
 
// Complex aggregation query
export async function getArticleAnalytics() {
  const analytics = await Article.aggregate([
    {
      $match: { publishedAt: { $exists: true } }
    },
    {
      $group: {
        _id: {
          year: { $year: '$publishedAt' },
          month: { $month: '$publishedAt' }
        },
        totalArticles: { $sum: 1 },
        totalViews: { $sum: '$metadata.views' },
        totalLikes: { $sum: '$metadata.likes' },
        avgViews: { $avg: '$metadata.views' }
      }
    },
    {
      $sort: { '_id.year': -1, '_id.month': -1 }
    }
  ]);
  
  return analytics;
}

Supabase: Rapid Development Paradise

For projects needing authentication, real-time features, and quick deployment:

// lib/supabase.ts
import { createClient } from '@supabase/supabase-js';
import { Database } from '@/types/supabase';
 
const supabaseUrl = process.env.NEXT_PUBLIC_SUPABASE_URL!;
const supabaseServiceKey = process.env.SUPABASE_SERVICE_ROLE_KEY!;
 
export const supabase = createClient<Database>(supabaseUrl, supabaseServiceKey);
 
// Client-side instance (for auth and real-time)
export const supabaseClient = createClient<Database>(
  supabaseUrl, 
  process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!
);

Supabase Real-time Features

// hooks/useRealtimeMessages.ts
import { useEffect, useState } from 'react';
import { supabaseClient } from '@/lib/supabase';
 
interface Message {
  id: string;
  content: string;
  user_id: string;
  created_at: string;
  user: {
    name: string;
    avatar: string;
  };
}
 
export function useRealtimeMessages(channelId: string) {
  const [messages, setMessages] = useState<Message[]>([]);
 
  useEffect(() => {
    // Load initial messages
    const loadMessages = async () => {
      const { data } = await supabaseClient
        .from('messages')
        .select(`
          *,
          user:user_id (name, avatar)
        `)
        .eq('channel_id', channelId)
        .order('created_at', { ascending: true });
        
      if (data) setMessages(data);
    };
 
    loadMessages();
 
    // Set up real-time subscription
    const subscription = supabaseClient
      .channel(`messages:${channelId}`)
      .on('postgres_changes', 
        { 
          event: 'INSERT', 
          schema: 'public', 
          table: 'messages',
          filter: `channel_id=eq.${channelId}`
        }, 
        (payload) => {
          setMessages(prev => [...prev, payload.new as Message]);
        }
      )
      .on('postgres_changes',
        {
          event: 'DELETE',
          schema: 'public',
          table: 'messages',
          filter: `channel_id=eq.${channelId}`
        },
        (payload) => {
          setMessages(prev => prev.filter(msg => msg.id !== payload.old.id));
        }
      )
      .subscribe();
 
    return () => {
      subscription.unsubscribe();
    };
  }, [channelId]);
 
  return messages;
}

Row Level Security with Supabase

-- Enable RLS on messages table
ALTER TABLE messages ENABLE ROW LEVEL SECURITY;
 
-- Users can only read messages from channels they belong to
CREATE POLICY "Users can read messages from their channels" ON messages
  FOR SELECT USING (
    EXISTS (
      SELECT 1 FROM channel_members 
      WHERE channel_members.channel_id = messages.channel_id 
      AND channel_members.user_id = auth.uid()
    )
  );
 
-- Users can only insert messages to channels they belong to
CREATE POLICY "Users can insert messages to their channels" ON messages
  FOR INSERT WITH CHECK (
    EXISTS (
      SELECT 1 FROM channel_members 
      WHERE channel_members.channel_id = messages.channel_id 
      AND channel_members.user_id = auth.uid()
    )
    AND auth.uid() = messages.user_id
  );

Database Performance Optimization

Connection Pooling for Serverless

// lib/db-pool.ts
import { Pool } from 'pg';
 
class DatabasePool {
  private static instance: DatabasePool;
  private pool: Pool;
 
  private constructor() {
    this.pool = new Pool({
      connectionString: process.env.DATABASE_URL,
      max: process.env.NODE_ENV === 'production' ? 20 : 5,
      idleTimeoutMillis: 30000,
      connectionTimeoutMillis: 2000,
      acquireTimeoutMillis: 5000,
    });
 
    // Handle pool events
    this.pool.on('error', (err) => {
      console.error('Unexpected error on idle client', err);
    });
 
    this.pool.on('connect', () => {
      console.log('Connected to database');
    });
  }
 
  public static getInstance(): DatabasePool {
    if (!DatabasePool.instance) {
      DatabasePool.instance = new DatabasePool();
    }
    return DatabasePool.instance;
  }
 
  public async query(text: string, params?: any[]) {
    const start = Date.now();
    try {
      const res = await this.pool.query(text, params);
      const duration = Date.now() - start;
      console.log('Executed query', { text, duration, rows: res.rowCount });
      return res;
    } catch (error) {
      console.error('Query error', error);
      throw error;
    }
  }
 
  public async end() {
    await this.pool.end();
  }
}
 
export const dbPool = DatabasePool.getInstance();

Caching Strategy

// lib/cache.ts
import { Redis } from '@upstash/redis';
 
const redis = new Redis({
  url: process.env.UPSTASH_REDIS_REST_URL!,
  token: process.env.UPSTASH_REDIS_REST_TOKEN!,
});
 
export class CacheManager {
  async get<T>(key: string): Promise<T | null> {
    const cached = await redis.get(key);
    return cached ? JSON.parse(cached as string) : null;
  }
 
  async set(key: string, value: any, ttl: number = 3600): Promise<void> {
    await redis.setex(key, ttl, JSON.stringify(value));
  }
 
  async invalidate(pattern: string): Promise<void> {
    const keys = await redis.keys(pattern);
    if (keys.length > 0) {
      await redis.del(...keys);
    }
  }
}
 
export const cache = new CacheManager();
 
// Usage in API routes
export async function getCachedUser(id: string) {
  const cacheKey = `user:${id}`;
  
  // Try cache first
  const cached = await cache.get(cacheKey);
  if (cached) return cached;
  
  // Fetch from database
  const user = await db.select().from(users).where(eq(users.id, id)).limit(1);
  
  // Cache for 1 hour
  if (user[0]) {
    await cache.set(cacheKey, user[0], 3600);
  }
  
  return user[0];
}

Cost Analysis: Real Numbers

Based on my experience with different scales:

// cost-analysis.ts
interface CostBreakdown {
  database: string;
  monthlyActiveUsers: number;
  storageGB: number;
  monthlyQueries: number;
  monthlyCost: number;
  setupComplexity: 'low' | 'medium' | 'high';
  scalingCost: 'predictable' | 'variable' | 'expensive';
}
 
const costComparison: CostBreakdown[] = [
  {
    database: 'Supabase (Pro)',
    monthlyActiveUsers: 10000,
    storageGB: 8,
    monthlyQueries: 500000,
    monthlyCost: 25, // Pro plan
    setupComplexity: 'low',
    scalingCost: 'predictable'
  },
  {
    database: 'Neon (Scale)',
    monthlyActiveUsers: 10000,
    storageGB: 8,
    monthlyQueries: 500000,
    monthlyCost: 45, // Based on compute units
    setupComplexity: 'medium',
    scalingCost: 'variable'
  },
  {
    database: 'MongoDB Atlas (M10)',
    monthlyActiveUsers: 10000,
    storageGB: 8,
    monthlyQueries: 500000,
    monthlyCost: 57, // M10 cluster
    setupComplexity: 'medium',
    scalingCost: 'predictable'
  },
  {
    database: 'Self-hosted PostgreSQL',
    monthlyActiveUsers: 10000,
    storageGB: 8,
    monthlyQueries: 500000,
    monthlyCost: 35, // VPS + backup costs
    setupComplexity: 'high',
    scalingCost: 'expensive'
  }
];

Decision Framework: Choose Your Database

Choose PostgreSQL (Neon/Supabase) When:

  • Strong data consistency is critical
  • Complex relationships between entities
  • Team familiar with SQL
  • Need ACID transactions
  • Building traditional web applications

Choose MongoDB When:

  • Schema changes frequently
  • Storing complex, nested documents
  • Content management systems
  • Rapid prototyping with evolving requirements
  • Handling unstructured data

Choose Supabase When:

  • Need built-in authentication
  • Real-time features required
  • Small team or solo developer
  • Quick time-to-market
  • Want row-level security

Choose Firebase When:

  • Mobile app with web companion
  • Need offline synchronization
  • Real-time collaboration features
  • Want integrated auth and storage
  • Building consumer apps

After evaluating hundreds of database decisions, the key insight is: start with your data model and user interactions, not the database features. The database should serve your application's needs, not dictate them. PostgreSQL handles 80% of use cases excellently, while specialized databases like MongoDB or Firebase excel in their specific domains.

Most importantly, you can always migrate later. Start with what gets you to market fastest, then optimize for scale when you have real user data.