Security & Backup
Production security measures, backup strategies, and disaster recovery for Smart Shelf.
Security & Backup
This section covers production security measures, backup strategies, and disaster recovery procedures for Smart Shelf deployment.
Production Security
Environment Security
# .env.production (example - never commit actual values)
NODE_ENV=production
# Application
NEXT_PUBLIC_APP_URL=https://smart-shelf.vercel.app
NEXT_PUBLIC_APP_NAME=Smart Shelf
# Supabase (Production)
NEXT_PUBLIC_SUPABASE_URL=https://your-project.supabase.co
NEXT_PUBLIC_SUPABASE_ANON_KEY=your-anon-key
SUPABASE_SERVICE_ROLE_KEY=your-service-role-key
# Security
JWT_SECRET=your-super-secure-jwt-secret
ENCRYPTION_KEY=your-32-character-encryption-key
# External Services
STRIPE_SECRET_KEY=sk_live_your-stripe-secret
STRIPE_WEBHOOK_SECRET=whsec_your-webhook-secret
# Monitoring
SENTRY_DSN=https://your-sentry-dsn
VERCEL_ANALYTICS_ID=your-analytics-id
Secret Management
// lib/security/secrets.ts
import { ENV } from '@/lib/config/environment';
export class SecretManager {
private static secrets = new Map<string, string>();
static async getSecret(key: string): Promise<string> {
// Check cache first
if (this.secrets.has(key)) {
return this.secrets.get(key)!;
}
// In production, secrets should come from environment variables
const secret = process.env[key];
if (!secret) {
throw new Error(`Secret ${key} not found`);
}
// Cache for performance (be careful with sensitive data)
this.secrets.set(key, secret);
return secret;
}
static validateRequiredSecrets(): void {
const requiredSecrets = [
'JWT_SECRET',
'ENCRYPTION_KEY',
'SUPABASE_SERVICE_ROLE_KEY',
];
const missing = requiredSecrets.filter(secret => !process.env[secret]);
if (missing.length > 0) {
throw new Error(`Missing required secrets: ${missing.join(', ')}`);
}
}
static rotateSecret(key: string, newValue: string): void {
// Implementation for secret rotation
// This would typically involve:
// 1. Updating the secret in the secret management system
// 2. Invalidating cached values
// 3. Triggering application restart if needed
this.secrets.delete(key);
process.env[key] = newValue;
console.log(`Secret ${key} rotated successfully`);
}
}
// Validate secrets on startup
if (ENV.NODE_ENV === 'production') {
SecretManager.validateRequiredSecrets();
}
Security Monitoring
// lib/security/monitoring.ts
export async function detectAnomalousActivity(userId: string) {
const supabase = createServiceRoleClient();
// Check for suspicious login patterns
const { data: recentLogins } = await supabase
.from('auth_logs')
.select('ip_address, created_at')
.eq('user_id', userId)
.gte('created_at', new Date(Date.now() - 3600000).toISOString()) // Last hour
.order('created_at', { ascending: false });
if (recentLogins && recentLogins.length > 10) {
// Too many login attempts
await alertSecurityTeam('high_login_frequency', userId);
}
// Check for geographic anomalies
const uniqueIPs = new Set(recentLogins?.map(login => login.ip_address));
if (uniqueIPs.size > 3) {
// Multiple IPs in short time
await alertSecurityTeam('multiple_locations', userId);
}
}
async function alertSecurityTeam(type: string, userId: string) {
// Send alert to security monitoring system
await fetch(process.env.SECURITY_WEBHOOK_URL!, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
alert: type,
userId,
timestamp: new Date().toISOString(),
severity: 'high',
}),
});
}
// Security metrics collection
export async function collectSecurityMetrics() {
const supabase = createServiceRoleClient();
const [failedLogins, suspiciousActivities, blockedIPs] = await Promise.all([
supabase
.from('auth_logs')
.select('count(*)')
.eq('success', false)
.gte('created_at', new Date(Date.now() - 86400000).toISOString()), // Last 24h
supabase
.from('security_events')
.select('count(*)')
.eq('severity', 'high')
.gte('created_at', new Date(Date.now() - 86400000).toISOString()),
supabase
.from('blocked_ips')
.select('count(*)')
.eq('is_active', true),
]);
return {
failedLogins: failedLogins.data?.[0]?.count || 0,
suspiciousActivities: suspiciousActivities.data?.[0]?.count || 0,
blockedIPs: blockedIPs.data?.[0]?.count || 0,
timestamp: new Date().toISOString(),
};
}
Rate Limiting and DDoS Protection
// lib/security/rate-limiting.ts
import { Redis } from 'ioredis';
const redis = new Redis(process.env.REDIS_URL!);
interface RateLimitConfig {
windowMs: number;
maxRequests: number;
blockDuration?: number;
}
const RATE_LIMITS: Record<string, RateLimitConfig> = {
api: { windowMs: 60 * 1000, maxRequests: 100 },
auth: { windowMs: 15 * 60 * 1000, maxRequests: 5, blockDuration: 60 * 60 * 1000 },
upload: { windowMs: 60 * 1000, maxRequests: 10 },
};
export async function checkRateLimit(
identifier: string,
limitType: keyof typeof RATE_LIMITS
): Promise<{ allowed: boolean; remaining: number; resetTime: number }> {
const config = RATE_LIMITS[limitType];
const key = `rate_limit:${limitType}:${identifier}`;
// Check if IP is currently blocked
const blockKey = `blocked:${identifier}`;
const isBlocked = await redis.get(blockKey);
if (isBlocked) {
const ttl = await redis.ttl(blockKey);
return {
allowed: false,
remaining: 0,
resetTime: Date.now() + (ttl * 1000),
};
}
const current = await redis.get(key);
const requests = current ? parseInt(current) : 0;
if (requests >= config.maxRequests) {
// Block IP if configured
if (config.blockDuration) {
await redis.setex(blockKey, Math.ceil(config.blockDuration / 1000), '1');
}
const ttl = await redis.ttl(key);
return {
allowed: false,
remaining: 0,
resetTime: Date.now() + (ttl * 1000),
};
}
// Increment counter
const pipeline = redis.pipeline();
pipeline.incr(key);
pipeline.expire(key, Math.ceil(config.windowMs / 1000));
await pipeline.exec();
return {
allowed: true,
remaining: config.maxRequests - requests - 1,
resetTime: Date.now() + config.windowMs,
};
}
Backup Strategies
Database Backup
-- Automated backup configuration (managed by Supabase)
-- Point-in-time recovery: 7-day window
-- Geographic replication: Multiple regions
-- Backup verification: Automated testing
-- Manual backup for critical operations
CREATE OR REPLACE FUNCTION create_manual_backup(backup_name TEXT)
RETURNS void AS $$
BEGIN
-- Export critical tables
COPY products TO '/backups/' || backup_name || '_products.csv' CSV HEADER;
COPY inventory TO '/backups/' || backup_name || '_inventory.csv' CSV HEADER;
COPY sales_orders TO '/backups/' || backup_name || '_orders.csv' CSV HEADER;
-- Log backup creation
INSERT INTO backup_log (name, created_at, type)
VALUES (backup_name, NOW(), 'manual');
END;
$$ LANGUAGE plpgsql;
-- Backup verification
CREATE OR REPLACE FUNCTION verify_backup(backup_name TEXT)
RETURNS boolean AS $$
DECLARE
product_count INTEGER;
inventory_count INTEGER;
order_count INTEGER;
BEGIN
-- Count records in backup files
SELECT COUNT(*) INTO product_count FROM backup_products WHERE backup_name = backup_name;
SELECT COUNT(*) INTO inventory_count FROM backup_inventory WHERE backup_name = backup_name;
SELECT COUNT(*) INTO order_count FROM backup_orders WHERE backup_name = backup_name;
-- Verify against current counts
IF product_count > 0 AND inventory_count > 0 AND order_count > 0 THEN
RETURN TRUE;
ELSE
RETURN FALSE;
END IF;
END;
$$ LANGUAGE plpgsql;
Application Backup
// scripts/backup.ts
import { createServiceRoleClient } from '@/lib/supabase/server';
import { writeFileSync, mkdirSync } from 'fs';
import { join } from 'path';
interface BackupOptions {
includeUserData?: boolean;
includeSystemSettings?: boolean;
encrypt?: boolean;
}
async function createApplicationBackup(options: BackupOptions = {}) {
const supabase = createServiceRoleClient();
const timestamp = new Date().toISOString().split('T')[0];
const backupDir = './backups';
// Ensure backup directory exists
mkdirSync(backupDir, { recursive: true });
try {
console.log('🔄 Creating application backup...');
const backup: any = {
timestamp,
version: process.env.NEXT_PUBLIC_APP_VERSION,
metadata: {
created_at: new Date().toISOString(),
created_by: 'automated_backup',
options,
},
};
// Backup system settings
if (options.includeSystemSettings !== false) {
const { data: settings } = await supabase
.from('system_settings')
.select('*');
backup.settings = settings;
}
// Backup user data (if requested)
if (options.includeUserData) {
const { data: users } = await supabase
.from('user_profiles')
.select('id, email, role, created_at, is_active');
backup.users = users;
const { data: warehouses } = await supabase
.from('warehouses')
.select('*');
backup.warehouses = warehouses;
}
// Backup critical configuration
const { data: features } = await supabase
.from('feature_flags')
.select('*');
backup.features = features;
const backupContent = JSON.stringify(backup, null, 2);
const filename = `app-backup-${timestamp}.json`;
const filepath = join(backupDir, filename);
// Encrypt backup if requested
if (options.encrypt) {
const encrypted = await encryptBackup(backupContent);
writeFileSync(filepath + '.enc', encrypted);
console.log(`✅ Encrypted backup created: ${filename}.enc`);
} else {
writeFileSync(filepath, backupContent);
console.log(`✅ Backup created: ${filename}`);
}
// Log backup creation
await supabase.from('backup_log').insert({
name: filename,
type: 'application',
size: backupContent.length,
encrypted: options.encrypt || false,
created_at: new Date().toISOString(),
});
return filename;
} catch (error) {
console.error('❌ Backup failed:', error);
throw error;
}
}
async function encryptBackup(content: string): Promise<string> {
const crypto = await import('crypto');
const algorithm = 'aes-256-gcm';
const key = Buffer.from(process.env.ENCRYPTION_KEY!, 'hex');
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipher(algorithm, key);
cipher.setAAD(Buffer.from('backup-data'));
let encrypted = cipher.update(content, 'utf8', 'hex');
encrypted += cipher.final('hex');
const authTag = cipher.getAuthTag();
return `${iv.toString('hex')}:${authTag.toString('hex')}:${encrypted}`;
}
// Automated backup schedule
async function scheduleBackups() {
// Daily application backup
setInterval(async () => {
try {
await createApplicationBackup({
includeSystemSettings: true,
encrypt: true,
});
} catch (error) {
console.error('Scheduled backup failed:', error);
}
}, 24 * 60 * 60 * 1000); // 24 hours
// Weekly full backup
setInterval(async () => {
try {
await createApplicationBackup({
includeUserData: true,
includeSystemSettings: true,
encrypt: true,
});
} catch (error) {
console.error('Weekly backup failed:', error);
}
}, 7 * 24 * 60 * 60 * 1000); // 7 days
}
export { createApplicationBackup, scheduleBackups };
// CLI usage
if (require.main === module) {
const options = {
includeUserData: process.argv.includes('--include-users'),
encrypt: process.argv.includes('--encrypt'),
};
createApplicationBackup(options);
}
File Storage Backup
// lib/backup/storage.ts
import { createServiceRoleClient } from '@/lib/supabase/server';
export async function backupStorageFiles() {
const supabase = createServiceRoleClient();
try {
// List all files in storage
const { data: files, error } = await supabase.storage
.from('files')
.list('', { limit: 1000 });
if (error) throw error;
const backupPromises = files.map(async (file) => {
// Download file
const { data: fileData } = await supabase.storage
.from('files')
.download(file.name);
if (fileData) {
// Upload to backup storage bucket
await supabase.storage
.from('backups')
.upload(`${new Date().toISOString().split('T')[0]}/${file.name}`, fileData);
}
});
await Promise.all(backupPromises);
console.log(`✅ Backed up ${files.length} files`);
} catch (error) {
console.error('❌ Storage backup failed:', error);
throw error;
}
}
Disaster Recovery
Recovery Procedures
// scripts/disaster-recovery.ts
import { createServiceRoleClient } from '@/lib/supabase/server';
interface RecoveryOptions {
backupDate: string;
restoreData?: boolean;
restoreSettings?: boolean;
dryRun?: boolean;
}
async function initiateDisasterRecovery(options: RecoveryOptions) {
console.log('🚨 Initiating disaster recovery...');
if (options.dryRun) {
console.log('🔍 DRY RUN MODE - No changes will be made');
}
try {
// 1. Validate backup integrity
const isValid = await validateBackupIntegrity(options.backupDate);
if (!isValid) {
throw new Error('Backup integrity check failed');
}
// 2. Create recovery point
if (!options.dryRun) {
await createRecoveryPoint();
}
// 3. Restore application data
if (options.restoreData) {
await restoreApplicationData(options.backupDate, options.dryRun);
}
// 4. Restore system settings
if (options.restoreSettings) {
await restoreSystemSettings(options.backupDate, options.dryRun);
}
// 5. Verify system health
await verifySystemHealth();
console.log('✅ Disaster recovery completed successfully');
} catch (error) {
console.error('❌ Disaster recovery failed:', error);
// Attempt automatic rollback
if (!options.dryRun) {
console.log('🔄 Attempting automatic rollback...');
await rollbackRecovery();
}
throw error;
}
}
async function validateBackupIntegrity(backupDate: string): Promise<boolean> {
// Implementation to validate backup file integrity
// Check file checksums, validate JSON structure, etc.
return true;
}
async function createRecoveryPoint(): Promise<void> {
// Create a recovery point before making changes
await createApplicationBackup({
includeUserData: true,
includeSystemSettings: true,
encrypt: true,
});
}
async function restoreApplicationData(backupDate: string, dryRun?: boolean): Promise<void> {
// Implementation to restore application data from backup
console.log(`📊 Restoring application data from ${backupDate}`);
if (!dryRun) {
// Actual restore logic here
}
}
async function verifySystemHealth(): Promise<void> {
// Run comprehensive health checks
const response = await fetch('/api/health');
const health = await response.json();
if (health.status !== 'healthy') {
throw new Error('System health check failed after recovery');
}
}
export { initiateDisasterRecovery };
Automated Recovery Testing
// scripts/test-recovery.ts
async function testDisasterRecovery() {
console.log('🧪 Testing disaster recovery procedures...');
try {
// Test backup creation
const backupName = await createApplicationBackup({
includeUserData: true,
encrypt: true,
});
// Test backup restoration (dry run)
await initiateDisasterRecovery({
backupDate: new Date().toISOString().split('T')[0],
restoreData: true,
restoreSettings: true,
dryRun: true,
});
// Test health checks
await verifySystemHealth();
console.log('✅ Disaster recovery test completed successfully');
return {
success: true,
backupName,
timestamp: new Date().toISOString(),
};
} catch (error) {
console.error('❌ Disaster recovery test failed:', error);
return {
success: false,
error: error.message,
timestamp: new Date().toISOString(),
};
}
}
// Schedule regular recovery tests
setInterval(async () => {
const result = await testDisasterRecovery();
// Log test results
const supabase = createServiceRoleClient();
await supabase.from('recovery_tests').insert({
success: result.success,
details: result,
created_at: new Date().toISOString(),
});
}, 7 * 24 * 60 * 60 * 1000); // Weekly tests
Security and Backup Checklist
Security Measures
- Environment variables secured
- Secrets rotation implemented
- Rate limiting configured
- Security monitoring active
- DDoS protection enabled
- Regular security audits scheduled
Backup Strategy
- Daily automated backups
- Weekly full backups
- Backup encryption enabled
- Backup integrity verification
- Off-site backup storage
- Recovery procedures documented
Disaster Recovery
- Recovery procedures tested
- RTO and RPO defined
- Emergency contact list updated
- Recovery testing automated
- Rollback procedures prepared
- Communication plan established
This security and backup strategy ensures Smart Shelf maintains the highest level of protection and can recover quickly from any disaster scenarios.