Caching Strategy

Multi-level caching system in DesQTA

Caching Overview

DesQTA implements a sophisticated multi-level caching system to optimize performance, reduce API calls, and enable offline functionality.

Caching Layers

┌─────────────────────────────────────┐
│   Memory Cache (LRU, TTL)           │  ← Fastest, in-memory
├─────────────────────────────────────┤
│   IndexedDB Cache (Browser)         │  ← Persistent, browser storage
├─────────────────────────────────────┤
│   File System Cache (Rust)          │  ← Persistent, backend storage
├─────────────────────────────────────┤
│   Database Cache (SQLite)           │  ← Structured, queryable
└─────────────────────────────────────┘

Layer 1: Memory Cache

Implementation

// src/utils/cache.ts

interface CacheItem<T> {
  data: T;
  expires: number;
}

class Cache {
  private cache: Map<string, CacheItem<any>>;
  private maxSize: number;
  
  constructor(maxSize = 100) {
    this.cache = new Map();
    this.maxSize = maxSize;
  }
  
  set<T>(key: string, data: T, ttlMinutes: number): void {
    // Remove oldest if at capacity
    if (this.cache.size >= this.maxSize) {
      const firstKey = this.cache.keys().next().value;
      this.cache.delete(firstKey);
    }
    
    const expires = Date.now() + (ttlMinutes * 60 * 1000);
    this.cache.set(key, { data, expires });
  }
  
  get<T>(key: string): T | null {
    const item = this.cache.get(key);
    if (!item) return null;
    
    // Check expiration
    if (Date.now() > item.expires) {
      this.cache.delete(key);
      return null;
    }
    
    return item.data as T;
  }
  
  delete(key: string): void {
    this.cache.delete(key);
  }
  
  clear(): void {
    this.cache.clear();
  }
}

export const cache = new Cache(100);

Usage

import { cache } from '../../utils/cache';

// Set cache
cache.set('userInfo', user, 60); // 60 minutes TTL

// Get cache
const cached = cache.get<UserInfo>('userInfo');
if (cached) {
  return cached;
}

// Delete cache
cache.delete('userInfo');

Layer 2: IndexedDB Cache

Implementation

// src/lib/services/idbCache.ts

const DB_NAME = 'DesQTA';
const DB_VERSION = 1;
const STORE_NAME = 'cache';

async function openDB(): Promise<IDBDatabase> {
  return new Promise((resolve, reject) => {
    const request = indexedDB.open(DB_NAME, DB_VERSION);
    
    request.onerror = () => reject(request.error);
    request.onsuccess = () => resolve(request.result);
    
    request.onupgradeneeded = (event) => {
      const db = (event.target as IDBOpenDBRequest).result;
      if (!db.objectStoreNames.contains(STORE_NAME)) {
        db.createObjectStore(STORE_NAME);
      }
    };
  });
}

export async function setIdb<T>(key: string, value: T, ttlMinutes?: number | null): Promise<void> {
  const db = await openDB();
  const transaction = db.transaction([STORE_NAME], 'readwrite');
  const store = transaction.objectStore(STORE_NAME);
  
  const cacheItem = {
    data: value,
    expires: ttlMinutes ? Date.now() + (ttlMinutes * 60 * 1000) : null,
  };
  
  await new Promise<void>((resolve, reject) => {
    const request = store.put(cacheItem, key);
    request.onsuccess = () => resolve();
    request.onerror = () => reject(request.error);
  });
}

export async function getWithIdbFallback<T>(
  memKey: string,
  idbKey: string,
  memGet: () => T | null
): Promise<T | null> {
  // Try memory cache first
  const memCached = memGet();
  if (memCached) return memCached;
  
  // Try IndexedDB
  try {
    const db = await openDB();
    const transaction = db.transaction([STORE_NAME], 'readonly');
    const store = transaction.objectStore(STORE_NAME);
    
    const cacheItem = await new Promise<any>((resolve, reject) => {
      const request = store.get(idbKey);
      request.onsuccess = () => resolve(request.result);
      request.onerror = () => reject(request.error);
    });
    
    if (cacheItem) {
      // Check expiration
      if (cacheItem.expires && Date.now() > cacheItem.expires) {
        // Expired, delete it
        const deleteTransaction = db.transaction([STORE_NAME], 'readwrite');
        deleteTransaction.objectStore(STORE_NAME).delete(idbKey);
        return null;
      }
      
      return cacheItem.data as T;
    }
  } catch (error) {
    console.error('IndexedDB error:', error);
  }
  
  return null;
}

Layer 3: useDataLoader Integration

Unified Caching

// src/lib/utils/useDataLoader.ts

export async function useDataLoader<T>(options: DataLoaderOptions<T>): Promise<T | null> {
  const {
    cacheKey,
    ttlMinutes = 10,
    fetcher,
    skipCache = false,
  } = options;
  
  // Skip cache if requested
  if (skipCache) {
    return await fetcher();
  }
  
  // Step 1: Check memory cache
  const memCached = cache.get<T>(cacheKey);
  if (memCached) {
    // Trigger background sync
    triggerBackgroundSync(cacheKey, fetcher);
    return memCached;
  }
  
  // Step 2: Check IndexedDB
  const idbCached = await getWithIdbFallback<T>(
    cacheKey,
    cacheKey,
    () => cache.get<T>(cacheKey)
  );
  if (idbCached) {
    // Restore to memory cache
    cache.set(cacheKey, idbCached, ttlMinutes);
    triggerBackgroundSync(cacheKey, fetcher);
    return idbCached;
  }
  
  // Step 3: Fetch fresh data
  const freshData = await fetcher();
  
  // Cache in both layers
  cache.set(cacheKey, freshData, ttlMinutes);
  await setIdb(cacheKey, freshData, ttlMinutes);
  
  return freshData;
}

Layer 4: Backend File Cache

Rust File Caching

// src-tauri/src/utils/cache.rs

use std::fs;
use std::path::PathBuf;
use serde::{Deserialize, Serialize};

pub struct FileCache;

impl FileCache {
    fn cache_dir() -> PathBuf {
        let mut dir = dirs::data_dir().unwrap();
        dir.push("DesQTA");
        dir.push("cache");
        dir
    }
    
    pub fn get<T: for<'de> Deserialize<'de>>(key: &str) -> Option<T> {
        let path = Self::cache_dir().join(format!("{}.json", key));
        
        if let Ok(data) = fs::read_to_string(&path) {
            if let Ok(item) = serde_json::from_str::<CacheItem<T>>(&data) {
                // Check expiration
                if item.expires > chrono::Utc::now().timestamp() {
                    return Some(item.data);
                } else {
                    // Expired, delete
                    let _ = fs::remove_file(&path);
                }
            }
        }
        
        None
    }
    
    pub fn set<T: Serialize>(key: &str, data: T, ttl_minutes: i64) -> Result<(), String> {
        let dir = Self::cache_dir();
        fs::create_dir_all(&dir)
            .map_err(|e| format!("Failed to create cache dir: {}", e))?;
        
        let expires = chrono::Utc::now().timestamp() + (ttl_minutes * 60);
        let item = CacheItem {
            data,
            expires,
        };
        
        let path = dir.join(format!("{}.json", key));
        let json = serde_json::to_string(&item)
            .map_err(|e| format!("Failed to serialize: {}", e))?;
        
        fs::write(&path, json)
            .map_err(|e| format!("Failed to write cache: {}", e))?;
        
        Ok(())
    }
}

#[derive(Serialize, Deserialize)]
struct CacheItem<T> {
    data: T,
    expires: i64,
}

Layer 5: Database Cache

SQLite Caching

// src-tauri/src/utils/database.rs

use rusqlite::{Connection, params};

pub struct DatabaseCache {
    conn: Connection,
}

impl DatabaseCache {
    pub fn new() -> Result<Self, String> {
        let mut db_path = dirs::data_dir().ok_or("No data dir")?;
        db_path.push("DesQTA");
        db_path.push("cache.db");
        
        let conn = Connection::open(&db_path)
            .map_err(|e| format!("Failed to open database: {}", e))?;
        
        // Create cache table
        conn.execute(
            "CREATE TABLE IF NOT EXISTS cache (
                key TEXT PRIMARY KEY,
                value TEXT NOT NULL,
                expires INTEGER NOT NULL
            )",
            [],
        )?;
        
        Ok(Self { conn })
    }
    
    pub fn get<T: for<'de> Deserialize<'de>>(&self, key: &str) -> Option<T> {
        let now = chrono::Utc::now().timestamp();
        
        let mut stmt = self.conn.prepare(
            "SELECT value FROM cache WHERE key = ? AND expires > ?"
        ).ok()?;
        
        let value: String = stmt.query_row(
            params![key, now],
            |row| row.get(0)
        ).ok()?;
        
        serde_json::from_str(&value).ok()
    }
    
    pub fn set<T: Serialize>(&self, key: &str, value: T, ttl_minutes: i64) -> Result<(), String> {
        let expires = chrono::Utc::now().timestamp() + (ttl_minutes * 60);
        let json = serde_json::to_string(&value)
            .map_err(|e| format!("Failed to serialize: {}", e))?;
        
        self.conn.execute(
            "INSERT OR REPLACE INTO cache (key, value, expires) VALUES (?1, ?2, ?3)",
            params![key, json, expires],
        )
        .map_err(|e| format!("Failed to insert: {}", e))?;
        
        Ok(())
    }
    
    pub fn cleanup_expired(&self) -> Result<(), String> {
        let now = chrono::Utc::now().timestamp();
        self.conn.execute(
            "DELETE FROM cache WHERE expires < ?",
            params![now],
        )
        .map_err(|e| format!("Failed to cleanup: {}", e))?;
        
        Ok(())
    }
}

Cache TTL Strategy

TTL Presets

export const CACHE_TTL = {
  SHORT: 5,        // API responses, frequently changing
  MEDIUM: 15,     // User data, moderately stable
  LONG: 60,       // Static content, rarely changes
  VERY_LONG: 1440, // Configuration, almost never changes
} as const;

Usage Examples

// Short TTL for dynamic data
cache.set('assessments', data, CACHE_TTL.SHORT);

// Medium TTL for user info
cache.set('userInfo', user, CACHE_TTL.MEDIUM);

// Long TTL for static content
cache.set('subjects', subjects, CACHE_TTL.LONG);

// Very long TTL for config
cache.set('seqtaConfig', config, CACHE_TTL.VERY_LONG);

Background Synchronization

Automatic Refresh

async function triggerBackgroundSync<T>(
  cacheKey: string,
  fetcher: () => Promise<T>
): Promise<void> {
  // Check if online
  const offline = await isOfflineMode();
  if (offline) return;
  
  // Fetch fresh data in background
  fetcher()
    .then((freshData) => {
      // Update cache silently
      cache.set(cacheKey, freshData, 10);
      setIdb(cacheKey, freshData, 10);
    })
    .catch((error) => {
      // Fail silently, keep cached data
      logger.debug('Background sync failed', { cacheKey, error });
    });
}

Cache Invalidation

Manual Invalidation

// Clear specific cache
cache.delete('assessments');
await clearIdb('assessments');

// Clear all cache
cache.clear();
await clearAllIdb();

Pattern-Based Invalidation

function invalidatePattern(pattern: string) {
  for (const key of cache.keys()) {
    if (key.includes(pattern)) {
      cache.delete(key);
    }
  }
}

// Invalidate all assessment-related cache
invalidatePattern('assessment');

Cache Statistics

Monitoring Cache Performance

class CacheStats {
  hits = 0;
  misses = 0;
  
  recordHit() { this.hits++; }
  recordMiss() { this.misses++; }
  
  get hitRate(): number {
    const total = this.hits + this.misses;
    return total > 0 ? this.hits / total : 0;
  }
}

export const cacheStats = new CacheStats();

Best Practices

1. Choose Appropriate TTL

// ✅ Good - Short TTL for dynamic data
cache.set('assessments', data, 10);

// ❌ Avoid - Long TTL for frequently changing data
cache.set('assessments', data, 1440);

2. Use Background Sync

// ✅ Good - Background sync for stale data
const data = await useDataLoader({
  cacheKey: 'data',
  fetcher: fetchData,
  shouldSyncInBackground: () => true
});

// ❌ Avoid - Always fetching fresh
const data = await fetchData(); // No caching

3. Invalidate on Updates

// ✅ Good - Invalidate after update
async function updateAssessment(id: string) {
  await saveAssessment(id);
  cache.delete('assessments'); // Invalidate cache
}

// ❌ Avoid - Stale cache after update
async function updateAssessment(id: string) {
  await saveAssessment(id);
  // Cache still has old data
}

Next Steps