Data Protection

Data encryption, masking, retention policies, and privacy protection measures.

Data Protection

Smart Shelf implements comprehensive data protection measures to safeguard sensitive information throughout its lifecycle, from collection to disposal. Our data protection strategy encompasses encryption, access controls, data masking, and privacy-preserving techniques.

Data Classification

Classification Levels

// types/data-classification.ts
export enum DataClassification {
  PUBLIC = 'public',
  INTERNAL = 'internal',
  CONFIDENTIAL = 'confidential',
  RESTRICTED = 'restricted'
}

export interface DataClassificationMetadata {
  classification: DataClassification
  retention_period: number // days
  encryption_required: boolean
  masking_required: boolean
  audit_required: boolean
  geographic_restrictions: string[]
}

export const DATA_CLASSIFICATIONS: Record<DataClassification, DataClassificationMetadata> = {
  [DataClassification.PUBLIC]: {
    classification: DataClassification.PUBLIC,
    retention_period: 365,
    encryption_required: false,
    masking_required: false,
    audit_required: false,
    geographic_restrictions: []
  },
  [DataClassification.INTERNAL]: {
    classification: DataClassification.INTERNAL,
    retention_period: 2555, // 7 years
    encryption_required: true,
    masking_required: false,
    audit_required: true,
    geographic_restrictions: []
  },
  [DataClassification.CONFIDENTIAL]: {
    classification: DataClassification.CONFIDENTIAL,
    retention_period: 2555, // 7 years
    encryption_required: true,
    masking_required: true,
    audit_required: true,
    geographic_restrictions: ['EU', 'US', 'CA']
  },
  [DataClassification.RESTRICTED]: {
    classification: DataClassification.RESTRICTED,
    retention_period: 2555, // 7 years
    encryption_required: true,
    masking_required: true,
    audit_required: true,
    geographic_restrictions: ['local_only']
  }
}

Field-Level Classification

// lib/data-protection/classification.ts
export interface FieldClassification {
  field_name: string
  classification: DataClassification
  data_type: 'string' | 'number' | 'date' | 'boolean' | 'json'
  is_pii: boolean
  is_financial: boolean
  encryption_algorithm?: string
  masking_pattern?: string
}

export const FIELD_CLASSIFICATIONS: Record<string, FieldClassification> = {
  // User data
  'users.email': {
    field_name: 'email',
    classification: DataClassification.CONFIDENTIAL,
    data_type: 'string',
    is_pii: true,
    is_financial: false,
    encryption_algorithm: 'AES-256-GCM',
    masking_pattern: 'email'
  },
  'users.phone': {
    field_name: 'phone',
    classification: DataClassification.CONFIDENTIAL,
    data_type: 'string',
    is_pii: true,
    is_financial: false,
    encryption_algorithm: 'AES-256-GCM',
    masking_pattern: 'phone'
  },
  'users.full_name': {
    field_name: 'full_name',
    classification: DataClassification.CONFIDENTIAL,
    data_type: 'string',
    is_pii: true,
    is_financial: false,
    masking_pattern: 'name'
  },
  // Financial data
  'products.cost_price': {
    field_name: 'cost_price',
    classification: DataClassification.RESTRICTED,
    data_type: 'number',
    is_pii: false,
    is_financial: true,
    encryption_algorithm: 'AES-256-GCM',
    masking_pattern: 'currency'
  },
  'suppliers.bank_account': {
    field_name: 'bank_account',
    classification: DataClassification.RESTRICTED,
    data_type: 'string',
    is_pii: false,
    is_financial: true,
    encryption_algorithm: 'AES-256-GCM',
    masking_pattern: 'account_number'
  }
}

Encryption

Encryption at Rest

// lib/encryption/at-rest.ts
import crypto from 'crypto'

const ENCRYPTION_KEY = process.env.DATA_ENCRYPTION_KEY!
const ALGORITHM = 'aes-256-gcm'

export class FieldEncryption {
  
  static encrypt(data: string, additionalData?: string): string {
    try {
      const iv = crypto.randomBytes(16)
      const cipher = crypto.createCipher(ALGORITHM, ENCRYPTION_KEY)
      
      if (additionalData) {
        cipher.setAAD(Buffer.from(additionalData, 'utf8'))
      }
      
      let encrypted = cipher.update(data, 'utf8', 'hex')
      encrypted += cipher.final('hex')
      
      const authTag = cipher.getAuthTag()
      
      return `${iv.toString('hex')}:${authTag.toString('hex')}:${encrypted}`
    } catch (error) {
      console.error('Encryption failed:', error)
      throw new Error('Data encryption failed')
    }
  }
  
  static decrypt(encryptedData: string, additionalData?: string): string {
    try {
      const [ivHex, authTagHex, encrypted] = encryptedData.split(':')
      
      const iv = Buffer.from(ivHex, 'hex')
      const authTag = Buffer.from(authTagHex, 'hex')
      
      const decipher = crypto.createDecipher(ALGORITHM, ENCRYPTION_KEY)
      decipher.setAuthTag(authTag)
      
      if (additionalData) {
        decipher.setAAD(Buffer.from(additionalData, 'utf8'))
      }
      
      let decrypted = decipher.update(encrypted, 'hex', 'utf8')
      decrypted += decipher.final('utf8')
      
      return decrypted
    } catch (error) {
      console.error('Decryption failed:', error)
      throw new Error('Data decryption failed')
    }
  }
}

// Database function for transparent encryption
export async function createEncryptedField(tableName: string, fieldName: string) {
  const sql = `
    -- Create encrypted column
    ALTER TABLE ${tableName} 
    ADD COLUMN ${fieldName}_encrypted TEXT;
    
    -- Create trigger function for automatic encryption
    CREATE OR REPLACE FUNCTION encrypt_${tableName}_${fieldName}()
    RETURNS TRIGGER AS $$
    BEGIN
      IF NEW.${fieldName} IS NOT NULL THEN
        NEW.${fieldName}_encrypted = pgp_sym_encrypt(NEW.${fieldName}, '${ENCRYPTION_KEY}');
        NEW.${fieldName} = NULL; -- Clear plaintext
      END IF;
      RETURN NEW;
    END;
    $$ LANGUAGE plpgsql;
    
    -- Create trigger
    CREATE TRIGGER encrypt_${tableName}_${fieldName}_trigger
      BEFORE INSERT OR UPDATE ON ${tableName}
      FOR EACH ROW
      EXECUTE FUNCTION encrypt_${tableName}_${fieldName}();
  `
  
  await supabase.rpc('execute_sql', { sql })
}

Encryption in Transit

// lib/encryption/in-transit.ts
export class TransitEncryption {
  
  // Client-side encryption before transmission
  static async encryptForTransmission(data: any, recipientPublicKey: string): Promise<string> {
    const key = await crypto.subtle.generateKey(
      { name: 'AES-GCM', length: 256 },
      true,
      ['encrypt', 'decrypt']
    )
    
    const iv = crypto.getRandomValues(new Uint8Array(12))
    const encodedData = new TextEncoder().encode(JSON.stringify(data))
    
    const encryptedData = await crypto.subtle.encrypt(
      { name: 'AES-GCM', iv },
      key,
      encodedData
    )
    
    // Encrypt the AES key with recipient's public key
    const exportedKey = await crypto.subtle.exportKey('raw', key)
    const encryptedKey = await this.encryptWithPublicKey(exportedKey, recipientPublicKey)
    
    return JSON.stringify({
      encryptedData: Array.from(new Uint8Array(encryptedData)),
      encryptedKey: Array.from(new Uint8Array(encryptedKey)),
      iv: Array.from(iv)
    })
  }
  
  private static async encryptWithPublicKey(data: ArrayBuffer, publicKeyPem: string): Promise<ArrayBuffer> {
    const publicKey = await crypto.subtle.importKey(
      'spki',
      this.pemToArrayBuffer(publicKeyPem),
      { name: 'RSA-OAEP', hash: 'SHA-256' },
      false,
      ['encrypt']
    )
    
    return await crypto.subtle.encrypt('RSA-OAEP', publicKey, data)
  }
  
  private static pemToArrayBuffer(pem: string): ArrayBuffer {
    const b64Lines = pem.replace(/-----[^-]+-----/g, '').replace(/\s/g, '')
    const b64 = b64Lines.replace(/\n/g, '')
    const binary = atob(b64)
    const bytes = new Uint8Array(binary.length)
    
    for (let i = 0; i < binary.length; i++) {
      bytes[i] = binary.charCodeAt(i)
    }
    
    return bytes.buffer
  }
}

Data Masking

Dynamic Data Masking

// lib/data-protection/masking.ts
export class DataMasking {
  
  static maskEmail(email: string): string {
    const [username, domain] = email.split('@')
    if (username.length <= 2) {
      return `${username[0]}*@${domain}`
    }
    return `${username.slice(0, 2)}***@${domain}`
  }
  
  static maskPhone(phone: string): string {
    const cleaned = phone.replace(/\D/g, '')
    if (cleaned.length === 10) {
      return `(***) ***-${cleaned.slice(-4)}`
    }
    return `***-***-${cleaned.slice(-4)}`
  }
  
  static maskName(name: string): string {
    const parts = name.split(' ')
    return parts.map(part => {
      if (part.length <= 1) return part
      return `${part[0]}${'*'.repeat(part.length - 1)}`
    }).join(' ')
  }
  
  static maskCurrency(amount: number): string {
    return '***.**'
  }
  
  static maskAccountNumber(account: string): string {
    if (account.length <= 4) return '****'
    return `****-****-****-${account.slice(-4)}`
  }
  
  static maskCreditCard(cardNumber: string): string {
    const cleaned = cardNumber.replace(/\D/g, '')
    return `****-****-****-${cleaned.slice(-4)}`
  }
  
  static maskSSN(ssn: string): string {
    const cleaned = ssn.replace(/\D/g, '')
    return `***-**-${cleaned.slice(-4)}`
  }
  
  // Apply masking based on field classification
  static maskField(value: any, fieldName: string, userRole: string): any {
    const classification = FIELD_CLASSIFICATIONS[fieldName]
    
    if (!classification || !this.shouldMask(classification, userRole)) {
      return value
    }
    
    if (value === null || value === undefined) {
      return value
    }
    
    switch (classification.masking_pattern) {
      case 'email':
        return this.maskEmail(value.toString())
      case 'phone':
        return this.maskPhone(value.toString())
      case 'name':
        return this.maskName(value.toString())
      case 'currency':
        return this.maskCurrency(parseFloat(value))
      case 'account_number':
        return this.maskAccountNumber(value.toString())
      case 'credit_card':
        return this.maskCreditCard(value.toString())
      case 'ssn':
        return this.maskSSN(value.toString())
      default:
        return '***'
    }
  }
  
  private static shouldMask(classification: FieldClassification, userRole: string): boolean {
    // Admin users see unmasked data
    if (userRole === 'admin') return false
    
    // Apply masking based on classification level
    if (classification.classification === DataClassification.RESTRICTED) {
      return !['admin', 'manager'].includes(userRole)
    }
    
    if (classification.classification === DataClassification.CONFIDENTIAL) {
      return userRole === 'viewer'
    }
    
    return false
  }
}

Database-Level Masking

-- Create masking functions
CREATE OR REPLACE FUNCTION mask_email(email TEXT, user_role TEXT)
RETURNS TEXT AS $$
BEGIN
  IF user_role = 'admin' THEN
    RETURN email;
  END IF;
  
  IF email IS NULL THEN
    RETURN NULL;
  END IF;
  
  RETURN CONCAT(
    LEFT(SPLIT_PART(email, '@', 1), 2),
    '***@',
    SPLIT_PART(email, '@', 2)
  );
END;
$$ LANGUAGE plpgsql;

CREATE OR REPLACE FUNCTION mask_phone(phone TEXT, user_role TEXT)
RETURNS TEXT AS $$
BEGIN
  IF user_role IN ('admin', 'manager') THEN
    RETURN phone;
  END IF;
  
  IF phone IS NULL THEN
    RETURN NULL;
  END IF;
  
  RETURN CONCAT('***-***-', RIGHT(REGEXP_REPLACE(phone, '[^0-9]', '', 'g'), 4));
END;
$$ LANGUAGE plpgsql;

-- Create masked views
CREATE VIEW users_masked AS
SELECT 
  id,
  mask_email(email, current_setting('app.current_user_role', true)) as email,
  mask_phone(phone, current_setting('app.current_user_role', true)) as phone,
  CASE 
    WHEN current_setting('app.current_user_role', true) = 'viewer' 
    THEN CONCAT(LEFT(full_name, 1), '***')
    ELSE full_name
  END as full_name,
  role,
  created_at,
  updated_at
FROM users;

Data Retention & Disposal

Retention Policies

// lib/data-protection/retention.ts
export interface RetentionPolicy {
  table_name: string
  retention_period_days: number
  archive_before_delete: boolean
  deletion_criteria: string
  notification_before_days: number
}

export const RETENTION_POLICIES: RetentionPolicy[] = [
  {
    table_name: 'audit_logs',
    retention_period_days: 2555, // 7 years
    archive_before_delete: true,
    deletion_criteria: 'created_at < NOW() - INTERVAL \'7 years\'',
    notification_before_days: 30
  },
  {
    table_name: 'user_sessions',
    retention_period_days: 90,
    archive_before_delete: false,
    deletion_criteria: 'expires_at < NOW() - INTERVAL \'90 days\'',
    notification_before_days: 7
  },
  {
    table_name: 'stock_movements',
    retention_period_days: 2555, // 7 years for financial records
    archive_before_delete: true,
    deletion_criteria: 'created_at < NOW() - INTERVAL \'7 years\'',
    notification_before_days: 30
  },
  {
    table_name: 'failed_login_attempts',
    retention_period_days: 30,
    archive_before_delete: false,
    deletion_criteria: 'created_at < NOW() - INTERVAL \'30 days\'',
    notification_before_days: 0
  }
]

export class DataRetentionManager {
  
  static async applyRetentionPolicies(): Promise<void> {
    for (const policy of RETENTION_POLICIES) {
      await this.applyRetentionPolicy(policy)
    }
  }
  
  private static async applyRetentionPolicy(policy: RetentionPolicy): Promise<void> {
    try {
      // Check for records approaching deletion
      if (policy.notification_before_days > 0) {
        await this.notifyUpcomingDeletion(policy)
      }
      
      // Archive records if required
      if (policy.archive_before_delete) {
        await this.archiveRecords(policy)
      }
      
      // Delete expired records
      await this.deleteExpiredRecords(policy)
      
      // Log retention action
      await this.logRetentionAction(policy)
      
    } catch (error) {
      console.error(`Retention policy failed for ${policy.table_name}:`, error)
    }
  }
  
  private static async notifyUpcomingDeletion(policy: RetentionPolicy): Promise<void> {
    const notificationDate = new Date()
    notificationDate.setDate(notificationDate.getDate() + policy.notification_before_days)
    
    const { data: recordsToDelete } = await supabase
      .from(policy.table_name)
      .select('count(*)')
      .lt('created_at', notificationDate.toISOString())
    
    if (recordsToDelete && recordsToDelete[0].count > 0) {
      await this.sendRetentionNotification({
        table_name: policy.table_name,
        records_count: recordsToDelete[0].count,
        deletion_date: notificationDate
      })
    }
  }
  
  private static async archiveRecords(policy: RetentionPolicy): Promise<void> {
    const archiveTableName = `${policy.table_name}_archive`
    
    // Create archive table if it doesn't exist
    await supabase.rpc('create_archive_table', {
      source_table: policy.table_name,
      archive_table: archiveTableName
    })
    
    // Move records to archive
    const { data: recordsToArchive } = await supabase
      .from(policy.table_name)
      .select('*')
      .raw(policy.deletion_criteria)
    
    if (recordsToArchive && recordsToArchive.length > 0) {
      await supabase
        .from(archiveTableName)
        .insert(recordsToArchive.map(record => ({
          ...record,
          archived_at: new Date().toISOString(),
          archive_reason: 'retention_policy'
        })))
    }
  }
  
  private static async deleteExpiredRecords(policy: RetentionPolicy): Promise<void> {
    const { count } = await supabase
      .from(policy.table_name)
      .delete()
      .raw(policy.deletion_criteria)
    
    console.log(`Deleted ${count} expired records from ${policy.table_name}`)
  }
  
  private static async logRetentionAction(policy: RetentionPolicy): Promise<void> {
    await supabase
      .from('data_retention_log')
      .insert({
        table_name: policy.table_name,
        action: 'retention_policy_applied',
        retention_period_days: policy.retention_period_days,
        executed_at: new Date().toISOString()
      })
  }
  
  private static async sendRetentionNotification(details: any): Promise<void> {
    // Send notification to data protection officer
    console.log('Retention notification:', details)
    // Implementation would send email/alert
  }
}

Secure Data Disposal

// lib/data-protection/disposal.ts
export class SecureDataDisposal {
  
  static async secureDelete(tableName: string, recordId: string): Promise<void> {
    try {
      // Step 1: Create audit record before deletion
      await this.createDeletionAuditRecord(tableName, recordId)
      
      // Step 2: Overwrite sensitive fields with random data
      await this.overwriteSensitiveFields(tableName, recordId)
      
      // Step 3: Wait for database checkpoint
      await this.waitForCheckpoint()
      
      // Step 4: Perform actual deletion
      await supabase
        .from(tableName)
        .delete()
        .eq('id', recordId)
      
      // Step 5: Verify deletion
      await this.verifyDeletion(tableName, recordId)
      
    } catch (error) {
      console.error('Secure deletion failed:', error)
      throw error
    }
  }
  
  private static async createDeletionAuditRecord(tableName: string, recordId: string): Promise<void> {
    await supabase
      .from('secure_deletion_log')
      .insert({
        table_name: tableName,
        record_id: recordId,
        deletion_requested_at: new Date().toISOString(),
        deletion_method: 'secure_overwrite'
      })
  }
  
  private static async overwriteSensitiveFields(tableName: string, recordId: string): Promise<void> {
    const sensitiveFields = this.getSensitiveFields(tableName)
    
    if (sensitiveFields.length === 0) return
    
    const overwriteData: any = {}
    
    sensitiveFields.forEach(field => {
      overwriteData[field] = this.generateSecureRandomData(field)
    })
    
    await supabase
      .from(tableName)
      .update(overwriteData)
      .eq('id', recordId)
  }
  
  private static getSensitiveFields(tableName: string): string[] {
    const fieldMappings: Record<string, string[]> = {
      users: ['email', 'phone', 'full_name', 'avatar_url'],
      customers: ['email', 'phone', 'address', 'payment_info'],
      suppliers: ['contact_email', 'phone', 'bank_account'],
      audit_logs: ['details', 'ip_address', 'user_agent']
    }
    
    return fieldMappings[tableName] || []
  }
  
  private static generateSecureRandomData(fieldType: string): string {
    const randomData = crypto.randomBytes(32).toString('hex')
    
    switch (fieldType) {
      case 'email':
        return `deleted_${randomData.slice(0, 8)}@deleted.local`
      case 'phone':
        return '+1-000-000-0000'
      case 'name':
      case 'full_name':
        return 'DELETED USER'
      case 'address':
        return 'DELETED ADDRESS'
      default:
        return `DELETED_${randomData.slice(0, 16)}`
    }
  }
  
  private static async waitForCheckpoint(): Promise<void> {
    // Force database checkpoint to ensure data is written to disk
    await supabase.rpc('pg_checkpoint')
    
    // Wait a brief moment for the checkpoint to complete
    await new Promise(resolve => setTimeout(resolve, 1000))
  }
  
  private static async verifyDeletion(tableName: string, recordId: string): Promise<void> {
    const { data, error } = await supabase
      .from(tableName)
      .select('id')
      .eq('id', recordId)
      .single()
    
    if (data) {
      throw new Error(`Deletion verification failed: Record ${recordId} still exists in ${tableName}`)
    }
    
    // Update audit log with successful deletion
    await supabase
      .from('secure_deletion_log')
      .update({
        deletion_completed_at: new Date().toISOString(),
        deletion_verified: true
      })
      .eq('table_name', tableName)
      .eq('record_id', recordId)
  }
}

Privacy Protection

GDPR Compliance

// lib/data-protection/gdpr.ts
export class GDPRCompliance {
  
  // Right to access - export user data
  static async exportPersonalData(userId: string): Promise<any> {
    const personalData = await Promise.all([
      supabase.from('users').select('*').eq('id', userId),
      supabase.from('user_preferences').select('*').eq('user_id', userId),
      supabase.from('audit_logs').select('*').eq('user_id', userId),
      supabase.from('user_sessions').select('*').eq('user_id', userId)
    ])
    
    return {
      user_profile: personalData[0].data,
      preferences: personalData[1].data,
      activity_logs: personalData[2].data,
      session_history: personalData[3].data,
      export_timestamp: new Date().toISOString(),
      format_version: '1.0',
      legal_basis: 'GDPR Article 15 - Right of Access'
    }
  }
  
  // Right to rectification - update personal data
  static async updatePersonalData(userId: string, updates: any): Promise<void> {
    // Validate that only personal data fields are being updated
    const allowedFields = ['full_name', 'email', 'phone', 'preferences']
    const validatedUpdates = Object.keys(updates)
      .filter(key => allowedFields.includes(key))
      .reduce((obj, key) => {
        obj[key] = updates[key]
        return obj
      }, {} as any)
    
    await supabase
      .from('users')
      .update({
        ...validatedUpdates,
        updated_at: new Date().toISOString()
      })
      .eq('id', userId)
    
    // Log the rectification
    await supabase
      .from('gdpr_actions')
      .insert({
        user_id: userId,
        action_type: 'rectification',
        fields_updated: Object.keys(validatedUpdates),
        performed_at: new Date().toISOString()
      })
  }
  
  // Right to erasure - delete personal data
  static async erasePersonalData(userId: string): Promise<void> {
    try {
      // Step 1: Export data for compliance record
      const exportedData = await this.exportPersonalData(userId)
      
      // Step 2: Store erasure request
      await supabase
        .from('gdpr_erasure_requests')
        .insert({
          user_id: userId,
          requested_at: new Date().toISOString(),
          exported_data: exportedData
        })
      
      // Step 3: Anonymize rather than delete to maintain referential integrity
      await supabase
        .from('users')
        .update({
          email: `anonymized_${userId}@deleted.local`,
          full_name: 'Anonymized User',
          phone: null,
          avatar_url: null,
          is_active: false,
          gdpr_deleted: true,
          gdpr_deleted_at: new Date().toISOString()
        })
        .eq('id', userId)
      
      // Step 4: Delete or anonymize related data
      await this.anonymizeRelatedData(userId)
      
      // Step 5: Invalidate all sessions
      await supabase
        .from('user_sessions')
        .delete()
        .eq('user_id', userId)
      
    } catch (error) {
      console.error('GDPR erasure failed:', error)
      throw error
    }
  }
  
  private static async anonymizeRelatedData(userId: string): Promise<void> {
    // Anonymize audit logs
    await supabase
      .from('audit_logs')
      .update({
        details: null,
        ip_address: '0.0.0.0',
        user_agent: 'anonymized'
      })
      .eq('user_id', userId)
    
    // Keep business records but remove personal identifiers
    await supabase
      .from('stock_movements')
      .update({
        notes: 'User data anonymized per GDPR request'
      })
      .eq('created_by', userId)
  }
  
  // Data portability
  static async generatePortabilityExport(userId: string): Promise<string> {
    const userData = await this.exportPersonalData(userId)
    
    const portabilityData = {
      ...userData,
      export_format: 'JSON',
      portability_standard: 'GDPR Article 20',
      data_controller: 'Smart Shelf Inc.',
      export_purpose: 'data_portability'
    }
    
    return JSON.stringify(portabilityData, null, 2)
  }
}
// lib/data-protection/consent.ts
export enum ConsentType {
  FUNCTIONAL = 'functional',
  ANALYTICS = 'analytics',
  MARKETING = 'marketing',
  PROFILING = 'profiling'
}

export interface ConsentRecord {
  user_id: string
  consent_type: ConsentType
  granted: boolean
  granted_at?: string
  withdrawn_at?: string
  legal_basis: string
  consent_version: string
  ip_address: string
  user_agent: string
}

export class ConsentManager {
  
  static async recordConsent(
    userId: string,
    consentType: ConsentType,
    granted: boolean,
    metadata: Partial<ConsentRecord>
  ): Promise<void> {
    const consentRecord: ConsentRecord = {
      user_id: userId,
      consent_type: consentType,
      granted,
      granted_at: granted ? new Date().toISOString() : undefined,
      withdrawn_at: !granted ? new Date().toISOString() : undefined,
      legal_basis: metadata.legal_basis || 'consent',
      consent_version: metadata.consent_version || '1.0',
      ip_address: metadata.ip_address || 'unknown',
      user_agent: metadata.user_agent || 'unknown'
    }
    
    await supabase
      .from('user_consents')
      .upsert(consentRecord, {
        onConflict: 'user_id,consent_type'
      })
  }
  
  static async checkConsent(userId: string, consentType: ConsentType): Promise<boolean> {
    const { data } = await supabase
      .from('user_consents')
      .select('granted, withdrawn_at')
      .eq('user_id', userId)
      .eq('consent_type', consentType)
      .single()
    
    return data?.granted && !data?.withdrawn_at || false
  }
  
  static async getConsentHistory(userId: string): Promise<ConsentRecord[]> {
    const { data } = await supabase
      .from('user_consents')
      .select('*')
      .eq('user_id', userId)
      .order('granted_at', { ascending: false })
    
    return data || []
  }
  
  static async withdrawAllConsent(userId: string): Promise<void> {
    await supabase
      .from('user_consents')
      .update({
        granted: false,
        withdrawn_at: new Date().toISOString()
      })
      .eq('user_id', userId)
  }
}

This comprehensive data protection framework ensures that Smart Shelf maintains the highest standards of data security and privacy while complying with international regulations like GDPR.