Backend Performance Optimization

Backend performance optimization including API routes, server actions, and server-side optimizations.

Backend Performance Optimization

Backend performance is crucial for Smart Shelf's responsiveness and scalability. This section covers optimization strategies for API routes, server actions, and server-side processing.

API Route Optimization

Response Caching

Implement intelligent caching strategies to reduce server load and improve response times.

API Route with Caching

// app/api/products/route.ts
import { NextRequest, NextResponse } from 'next/server'
import { createClient } from '@/lib/supabase/server'
import { redis } from '@/lib/redis'

export async function GET(request: NextRequest) {
  const { searchParams } = new URL(request.url)
  const page = searchParams.get('page') || '1'
  const limit = searchParams.get('limit') || '20'
  const category = searchParams.get('category')
  const search = searchParams.get('search')

  // Generate cache key based on parameters
  const cacheKey = `products:${page}:${limit}:${category || 'all'}:${search || 'none'}`
  
  try {
    // Check cache first
    const cached = await redis.get(cacheKey)
    if (cached) {
      return NextResponse.json(JSON.parse(cached), {
        headers: {
          'Cache-Control': 's-maxage=300, stale-while-revalidate=600',
          'X-Cache': 'HIT'
        }
      })
    }

    // Fetch from database
    const supabase = createClient()
    let query = supabase
      .from('products')
      .select(`
        id,
        name,
        sku,
        price,
        category:categories(name),
        inventory!inner(
          quantity_on_hand,
          warehouse:warehouses(name)
        )
      `)

    // Apply filters
    if (category) {
      query = query.eq('category_id', category)
    }
    
    if (search) {
      query = query.ilike('name', `%${search}%`)
    }

    const { data: products, error } = await query
      .order('created_at', { ascending: false })
      .range(
        (parseInt(page) - 1) * parseInt(limit),
        parseInt(page) * parseInt(limit) - 1
      )

    if (error) throw error

    const response = {
      data: products,
      pagination: {
        page: parseInt(page),
        limit: parseInt(limit),
        total: products?.length || 0
      }
    }

    // Cache the result for 5 minutes
    await redis.setex(cacheKey, 300, JSON.stringify(response))

    return NextResponse.json(response, {
      headers: {
        'Cache-Control': 's-maxage=300, stale-while-revalidate=600',
        'X-Cache': 'MISS'
      }
    })
  } catch (error) {
    console.error('Products API error:', error)
    return NextResponse.json(
      { error: 'Internal Server Error' },
      { status: 500 }
    )
  }
}

Request Optimization Middleware

Create reusable middleware for common optimizations.

Compression and Rate Limiting

// lib/middleware/api-optimization.ts
import { NextRequest, NextResponse } from 'next/server'
import { redis } from '@/lib/redis'

export function withCompression(handler: Function) {
  return async (request: NextRequest) => {
    const response = await handler(request)
    
    // Add compression headers
    if (response instanceof NextResponse) {
      response.headers.set('Content-Encoding', 'gzip')
      response.headers.set('Vary', 'Accept-Encoding')
    }
    
    return response
  }
}

export function withRateLimit(handler: Function, options = { limit: 100, window: 60 }) {
  return async (request: NextRequest) => {
    const ip = request.ip || request.headers.get('x-forwarded-for') || 'unknown'
    const key = `rate_limit:${ip}`
    
    const current = await redis.incr(key)
    if (current === 1) {
      await redis.expire(key, options.window)
    }
    
    if (current > options.limit) {
      return NextResponse.json(
        { error: 'Rate limit exceeded' },
        { 
          status: 429,
          headers: {
            'Retry-After': options.window.toString(),
            'X-RateLimit-Limit': options.limit.toString(),
            'X-RateLimit-Remaining': '0',
            'X-RateLimit-Reset': (Date.now() + options.window * 1000).toString()
          }
        }
      )
    }
    
    const response = await handler(request)
    
    // Add rate limit headers
    if (response instanceof NextResponse) {
      response.headers.set('X-RateLimit-Limit', options.limit.toString())
      response.headers.set('X-RateLimit-Remaining', (options.limit - current).toString())
    }
    
    return response
  }
}

export function withErrorHandling(handler: Function) {
  return async (request: NextRequest) => {
    try {
      return await handler(request)
    } catch (error) {
      console.error('API Error:', error)
      
      // Log error details for monitoring
      console.error(JSON.stringify({
        url: request.url,
        method: request.method,
        error: error instanceof Error ? error.message : 'Unknown error',
        stack: error instanceof Error ? error.stack : undefined,
        timestamp: new Date().toISOString()
      }))
      
      return NextResponse.json(
        { error: 'Internal Server Error' },
        { status: 500 }
      )
    }
  }
}

// Usage example
export const optimizedHandler = withErrorHandling(
  withRateLimit(
    withCompression(async (request: NextRequest) => {
      // Your API logic here
      return NextResponse.json({ message: 'Success' })
    }),
    { limit: 50, window: 60 } // 50 requests per minute
  )
)

Response Optimization

Optimize API responses for better performance and smaller payload sizes.

Response Transformation

// lib/api/response-optimization.ts
import { NextResponse } from 'next/server'

interface OptimizeResponseOptions {
  minify?: boolean
  excludeFields?: string[]
  transformDates?: boolean
}

export function optimizeResponse(data: any, options: OptimizeResponseOptions = {}) {
  const {
    minify = false,
    excludeFields = [],
    transformDates = true
  } = options

  // Remove excluded fields
  if (excludeFields.length > 0) {
    data = removeFields(data, excludeFields)
  }

  // Transform dates to ISO strings
  if (transformDates) {
    data = transformDateFields(data)
  }

  // Create response with appropriate headers
  const response = NextResponse.json(data)
  
  // Add performance headers
  response.headers.set('Content-Type', 'application/json')
  
  if (minify) {
    response.headers.set('X-Content-Minified', 'true')
  }
  
  return response
}

function removeFields(obj: any, fieldsToRemove: string[]): any {
  if (Array.isArray(obj)) {
    return obj.map(item => removeFields(item, fieldsToRemove))
  }
  
  if (obj && typeof obj === 'object') {
    const newObj: any = {}
    for (const [key, value] of Object.entries(obj)) {
      if (!fieldsToRemove.includes(key)) {
        newObj[key] = removeFields(value, fieldsToRemove)
      }
    }
    return newObj
  }
  
  return obj
}

function transformDateFields(obj: any): any {
  if (Array.isArray(obj)) {
    return obj.map(item => transformDateFields(item))
  }
  
  if (obj && typeof obj === 'object') {
    const newObj: any = {}
    for (const [key, value] of Object.entries(obj)) {
      if (value instanceof Date) {
        newObj[key] = value.toISOString()
      } else if (typeof value === 'string' && /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/.test(value)) {
        newObj[key] = new Date(value).toISOString()
      } else {
        newObj[key] = transformDateFields(value)
      }
    }
    return newObj
  }
  
  return obj
}

Server Actions Optimization

Optimized Server Actions

Implement efficient server actions with proper caching and error handling.

Cached Server Actions

// lib/actions/products.ts
'use server'

import { revalidateTag, unstable_cache } from 'next/cache'
import { createClient } from '@/lib/supabase/server'
import { z } from 'zod'

// Input validation schemas
const ProductFiltersSchema = z.object({
  category: z.string().optional(),
  search: z.string().optional(),
  offset: z.number().default(0),
  limit: z.number().default(20),
  sortBy: z.enum(['name', 'sku', 'price', 'created_at']).default('created_at'),
  sortOrder: z.enum(['asc', 'desc']).default('desc')
})

// Cache function calls with revalidation
export const getProducts = unstable_cache(
  async (filters: z.infer<typeof ProductFiltersSchema>) => {
    const validatedFilters = ProductFiltersSchema.parse(filters)
    const supabase = createClient()
    
    let query = supabase
      .from('products')
      .select(`
        id,
        name,
        sku,
        price,
        category:categories(id, name),
        inventory!inner(
          quantity_on_hand,
          warehouse:warehouses(id, name)
        )
      `)
    
    // Apply filters
    if (validatedFilters.category) {
      query = query.eq('category_id', validatedFilters.category)
    }
    
    if (validatedFilters.search) {
      query = query.or(`name.ilike.%${validatedFilters.search}%,sku.ilike.%${validatedFilters.search}%`)
    }
    
    const { data, error, count } = await query
      .order(validatedFilters.sortBy, { ascending: validatedFilters.sortOrder === 'asc' })
      .range(validatedFilters.offset, validatedFilters.offset + validatedFilters.limit - 1)
    
    if (error) {
      console.error('getProducts error:', error)
      throw new Error('Failed to fetch products')
    }
    
    return { data: data || [], count: count || 0 }
  },
  ['products'], // Cache key
  {
    tags: ['products'],
    revalidate: 300 // Cache for 5 minutes
  }
)

// Optimized create action with cache invalidation
export async function createProduct(formData: FormData) {
  const ProductSchema = z.object({
    name: z.string().min(1, 'Name is required'),
    sku: z.string().min(1, 'SKU is required'),
    price: z.number().positive('Price must be positive'),
    cost: z.number().positive('Cost must be positive'),
    category_id: z.string().uuid('Invalid category ID'),
    description: z.string().optional()
  })

  try {
    const validatedData = ProductSchema.parse({
      name: formData.get('name'),
      sku: formData.get('sku'),
      price: parseFloat(formData.get('price') as string),
      cost: parseFloat(formData.get('cost') as string),
      category_id: formData.get('category_id'),
      description: formData.get('description')
    })
    
    const supabase = createClient()
    
    // Check for duplicate SKU
    const { data: existingProduct } = await supabase
      .from('products')
      .select('id')
      .eq('sku', validatedData.sku)
      .single()
    
    if (existingProduct) {
      throw new Error('Product with this SKU already exists')
    }
    
    const { data, error } = await supabase
      .from('products')
      .insert(validatedData)
      .select()
      .single()
    
    if (error) {
      console.error('createProduct error:', error)
      throw new Error('Failed to create product')
    }
    
    // Invalidate related caches
    revalidateTag('products')
    revalidateTag('categories')
    revalidateTag('dashboard')
    
    return { success: true, data }
  } catch (error) {
    console.error('createProduct validation error:', error)
    return { 
      success: false, 
      error: error instanceof Error ? error.message : 'Invalid input' 
    }
  }
}

// Batch operations for better performance
export async function updateProductsBatch(updates: Array<{ id: string; [key: string]: any }>) {
  const supabase = createClient()
  
  try {
    const promises = updates.map(update => 
      supabase
        .from('products')
        .update(update)
        .eq('id', update.id)
    )
    
    const results = await Promise.allSettled(promises)
    
    const successful = results.filter(result => result.status === 'fulfilled').length
    const failed = results.filter(result => result.status === 'rejected').length
    
    // Invalidate caches if any updates succeeded
    if (successful > 0) {
      revalidateTag('products')
      revalidateTag('inventory')
    }
    
    return {
      success: successful > 0,
      results: { successful, failed, total: updates.length }
    }
  } catch (error) {
    console.error('updateProductsBatch error:', error)
    throw new Error('Batch update failed')
  }
}

Background Job Processing

Handle heavy operations asynchronously to improve user experience.

Background Job System

// lib/jobs/background-jobs.ts
interface Job {
  id: string
  type: string
  data: any
  priority: number
  attempts: number
  maxAttempts: number
  createdAt: Date
  processingAt?: Date
  completedAt?: Date
  failedAt?: Date
  error?: string
}

class BackgroundJobQueue {
  private jobs: Map<string, Job> = new Map()
  private processing = false

  async addJob(
    type: string, 
    data: any, 
    options: { priority?: number; maxAttempts?: number } = {}
  ) {
    const job: Job = {
      id: crypto.randomUUID(),
      type,
      data,
      priority: options.priority || 0,
      attempts: 0,
      maxAttempts: options.maxAttempts || 3,
      createdAt: new Date()
    }

    this.jobs.set(job.id, job)
    
    // Start processing if not already running
    if (!this.processing) {
      this.processJobs()
    }

    return job.id
  }

  private async processJobs() {
    this.processing = true

    while (this.jobs.size > 0) {
      // Get highest priority job
      const job = Array.from(this.jobs.values())
        .filter(j => !j.completedAt && !j.failedAt)
        .sort((a, b) => b.priority - a.priority)[0]

      if (!job) break

      try {
        job.processingAt = new Date()
        job.attempts++

        await this.executeJob(job)

        job.completedAt = new Date()
        console.log(`Job ${job.id} completed successfully`)
      } catch (error) {
        console.error(`Job ${job.id} failed:`, error)
        
        if (job.attempts >= job.maxAttempts) {
          job.failedAt = new Date()
          job.error = error instanceof Error ? error.message : 'Unknown error'
        }
      }

      // Remove completed or failed jobs
      if (job.completedAt || job.failedAt) {
        this.jobs.delete(job.id)
      }
    }

    this.processing = false
  }

  private async executeJob(job: Job) {
    switch (job.type) {
      case 'export-inventory':
        await this.exportInventory(job.data)
        break
      case 'sync-products':
        await this.syncProducts(job.data)
        break
      case 'generate-report':
        await this.generateReport(job.data)
        break
      default:
        throw new Error(`Unknown job type: ${job.type}`)
    }
  }

  private async exportInventory(data: any) {
    // Heavy inventory export operation
    const supabase = createClient()
    
    const { data: inventory } = await supabase
      .from('inventory')
      .select(`
        *,
        product:products(*),
        warehouse:warehouses(*)
      `)
    
    // Process and export data (simplified)
    console.log(`Exporting ${inventory?.length} inventory items`)
    
    // Simulate heavy processing
    await new Promise(resolve => setTimeout(resolve, 2000))
  }

  private async syncProducts(data: any) {
    // Product synchronization logic
    console.log('Syncing products from external source')
    await new Promise(resolve => setTimeout(resolve, 5000))
  }

  private async generateReport(data: any) {
    // Report generation logic
    console.log('Generating comprehensive report')
    await new Promise(resolve => setTimeout(resolve, 3000))
  }

  getJobStatus(jobId: string): Job | null {
    return this.jobs.get(jobId) || null
  }
}

export const jobQueue = new BackgroundJobQueue()

// Server action to queue jobs
export async function queueInventoryExport(filters: any) {
  'use server'
  
  const jobId = await jobQueue.addJob('export-inventory', filters, {
    priority: 1,
    maxAttempts: 2
  })
  
  return { success: true, jobId }
}

export async function getJobStatus(jobId: string) {
  'use server'
  
  const job = jobQueue.getJobStatus(jobId)
  return job ? {
    id: job.id,
    type: job.type,
    attempts: job.attempts,
    maxAttempts: job.maxAttempts,
    completed: !!job.completedAt,
    failed: !!job.failedAt,
    error: job.error
  } : null
}

Request/Response Optimization

Request Parsing and Validation

Optimize request parsing and validation for better performance.

Optimized Request Handler

// lib/api/request-optimization.ts
import { NextRequest } from 'next/server'
import { z } from 'zod'

export async function parseAndValidateRequest<T>(
  request: NextRequest,
  schema: z.ZodSchema<T>,
  options: {
    parseBody?: boolean
    parseSearchParams?: boolean
    maxBodySize?: number
  } = {}
) {
  const {
    parseBody = false,
    parseSearchParams = true,
    maxBodySize = 1024 * 1024 // 1MB default
  } = options

  try {
    let data: any = {}

    // Parse search parameters
    if (parseSearchParams) {
      const searchParams = new URL(request.url).searchParams
      const params: Record<string, string> = {}
      searchParams.forEach((value, key) => {
        params[key] = value
      })
      data = { ...data, ...params }
    }

    // Parse request body
    if (parseBody && request.body) {
      const contentType = request.headers.get('content-type')
      
      if (contentType?.includes('application/json')) {
        const bodyText = await request.text()
        
        // Check body size
        if (bodyText.length > maxBodySize) {
          throw new Error('Request body too large')
        }
        
        const body = JSON.parse(bodyText)
        data = { ...data, ...body }
      } else if (contentType?.includes('multipart/form-data')) {
        const formData = await request.formData()
        const formObject: Record<string, any> = {}
        
        formData.forEach((value, key) => {
          formObject[key] = value
        })
        
        data = { ...data, ...formObject }
      }
    }

    // Validate against schema
    const validatedData = schema.parse(data)
    return { success: true, data: validatedData }
  } catch (error) {
    return {
      success: false,
      error: error instanceof Error ? error.message : 'Validation failed'
    }
  }
}

// Usage example
const ProductQuerySchema = z.object({
  page: z.string().transform(Number).pipe(z.number().positive()).default('1'),
  limit: z.string().transform(Number).pipe(z.number().max(100)).default('20'),
  category: z.string().optional(),
  search: z.string().optional()
})

export async function GET(request: NextRequest) {
  const validation = await parseAndValidateRequest(
    request, 
    ProductQuerySchema,
    { parseSearchParams: true }
  )

  if (!validation.success) {
    return NextResponse.json(
      { error: validation.error },
      { status: 400 }
    )
  }

  // Use validated data
  const { page, limit, category, search } = validation.data
  
  // Proceed with API logic...
}

Database Connection Optimization

Optimize database connections and queries for better performance.

Connection Pool Management

// lib/supabase/optimized-client.ts
import { createClient } from '@supabase/supabase-js'

interface ConnectionOptions {
  maxConnections?: number
  idleTimeout?: number
  connectionTimeout?: number
}

class OptimizedSupabaseClient {
  private client: any
  private connectionCount = 0
  private maxConnections: number

  constructor(url: string, key: string, options: ConnectionOptions = {}) {
    this.maxConnections = options.maxConnections || 10

    this.client = createClient(url, key, {
      db: {
        schema: 'public',
      },
      auth: {
        autoRefreshToken: false,
        persistSession: false,
      },
      global: {
        headers: {
          'X-Client-Info': 'smart-shelf@1.0.0',
        },
      },
    })
  }

  async withConnection<T>(operation: (client: any) => Promise<T>): Promise<T> {
    // Simple connection counting (in production, use proper pooling)
    if (this.connectionCount >= this.maxConnections) {
      throw new Error('Too many database connections')
    }

    this.connectionCount++
    
    try {
      return await operation(this.client)
    } finally {
      this.connectionCount--
    }
  }

  // Optimized query method with automatic retries
  async query<T>(
    queryFn: (client: any) => any,
    retries: number = 2
  ): Promise<T> {
    return this.withConnection(async (client) => {
      let lastError: Error | null = null

      for (let i = 0; i <= retries; i++) {
        try {
          const result = await queryFn(client)
          
          if (result.error) {
            throw new Error(result.error.message)
          }
          
          return result.data
        } catch (error) {
          lastError = error as Error
          
          // Don't retry on certain types of errors
          if (error instanceof Error && 
              (error.message.includes('duplicate') || 
               error.message.includes('not found'))) {
            throw error
          }
          
          // Wait before retry (exponential backoff)
          if (i < retries) {
            await new Promise(resolve => 
              setTimeout(resolve, Math.pow(2, i) * 100)
            )
          }
        }
      }

      throw lastError || new Error('Query failed after retries')
    })
  }
}

// Export singleton instance
export const optimizedSupabase = new OptimizedSupabaseClient(
  process.env.NEXT_PUBLIC_SUPABASE_URL!,
  process.env.SUPABASE_SERVICE_ROLE_KEY!,
  {
    maxConnections: 20,
    idleTimeout: 30000,
    connectionTimeout: 5000
  }
)

Backend performance optimization is essential for providing responsive user experiences and supporting application scalability. These techniques ensure Smart Shelf can handle growing user bases and data volumes efficiently.