resourceCache

Global cache management providing advanced cache control, LRU eviction, and batch operations for all resource instances.

API

import { resourceCache } from '@hellajs/resource';

interface ResourceCache {
  readonly map: Map<unknown, CacheEntry<unknown>>;
  readonly config: CacheConfig;
  setConfig(config: Partial<CacheConfig>): void;
  set<K, T>(key: K, data: T, cacheTime?: number): K;
  get<K>(key: K): T | undefined;
  update<T>(key: unknown, updater: T | ((old: T | undefined) => T)): boolean;
  cleanup(): void;
  updateMultiple<T>(updates: Array<CacheUpdate<T>>): void;
  invalidate(key: unknown): void;
  invalidateMultiple(keys: unknown[]): void;
  generateKeys<T>(): (template: (params: T) => unknown) => (params: T) => unknown;
  createInvalidator(resources: Array<Pick<Resource<any>, 'invalidate'>>): void;
}

interface CacheConfig {
  maxSize?: number;
  enableLRU?: boolean;
}

interface CacheEntry<T> {
  data: T;
  timestamp: number;
  cacheTime: number;
  lastAccess: number;
}

interface CacheUpdate<T> {
  key: unknown;
  updater: T | ((old: T | undefined) => T);
}

map

Readonly access to the internal cache Map containing all cached entries.

import { resourceCache } from '@hellajs/resource';

// Check cache size
console.log('Cache entries:', resourceCache.map.size);

// Inspect all cache keys
for (const [key, entry] of resourceCache.map) {
  console.log(`Key: ${key}, Data:`, entry.data);
}

config

Readonly access to current cache configuration settings.

import { resourceCache } from '@hellajs/resource';

console.log('Max cache size:', resourceCache.config.maxSize);
console.log('LRU enabled:', resourceCache.config.enableLRU);

setConfig

Updates the global cache configuration with new settings.

import { resourceCache } from '@hellajs/resource';

// Update cache configuration
resourceCache.setConfig({
  maxSize: 2000,      // Maximum cache entries
  enableLRU: true     // Enable LRU eviction
});

// Partial updates work too
resourceCache.setConfig({ maxSize: 5000 });

set

Stores data in the cache with optional time-to-live.

import { resourceCache } from '@hellajs/resource';

// Set with TTL (5 minutes)
resourceCache.set('user:123', { name: 'John' }, 300000);

// Set without TTL (permanent until manual removal)
resourceCache.set('config', { theme: 'dark' });

// Returns the key for type safety
const typedKey = resourceCache.set('user:456', userData, 60000);

get

Retrieves data from the cache by key.

import { resourceCache } from '@hellajs/resource';

// Get cached data
const user = resourceCache.get('user:123');
if (user) {
  console.log('Cached user:', user.name);
} else {
  console.log('User not found in cache');
}

// Type safety with consistent keys
type UserKey = `user:${number}`;
const userKey: UserKey = 'user:123';
const userData = resourceCache.get(userKey);

update

Updates existing cached data using an updater function or direct value.

import { resourceCache } from '@hellajs/resource';

// Update with function
const updated = resourceCache.update('user:123', old => ({
  ...old,
  lastSeen: Date.now()
}));

if (updated) {
  console.log('User updated successfully');
} else {
  console.log('User not found or expired');
}

// Update with direct value
resourceCache.update('settings', { theme: 'light', lang: 'en' });

cleanup

Performs cleanup of expired cache entries to free memory.

import { resourceCache } from '@hellajs/resource';

// Manual cleanup of expired entries
resourceCache.cleanup();

// Periodic cleanup
setInterval(() => {
  resourceCache.cleanup();
  console.log('Cache cleaned up');
}, 60000); // Every minute

updateMultiple

Updates multiple cache entries in a batch operation.

import { resourceCache } from '@hellajs/resource';

// Batch update multiple users
resourceCache.updateMultiple([
  {
    key: 'user:1',
    updater: user => ({ ...user, online: true })
  },
  {
    key: 'user:2',
    updater: user => ({ ...user, online: false })
  },
  {
    key: 'user:3',
    updater: { status: 'active', lastLogin: Date.now() }
  }
]);

invalidate

Removes a single entry from the cache by key.

import { resourceCache } from '@hellajs/resource';

// Remove specific cache entry
resourceCache.invalidate('user:123');

// Entry is now gone
console.log(resourceCache.get('user:123')); // undefined

invalidateMultiple

Removes multiple entries from the cache by keys.

import { resourceCache } from '@hellajs/resource';

// Remove multiple related entries
resourceCache.invalidateMultiple([
  'user:1',
  'user:2',
  'posts:user:1',
  'settings:user:1'
]);

// All specified entries are removed

generateKeys

Creates a key generator template function for consistent cache key creation.

import { resourceCache } from '@hellajs/resource';

// Create typed key generators
const userKeyGen = resourceCache.generateKeys<{id: number}>()
  (params => `user:${params.id}`);

const postKeyGen = resourceCache.generateKeys<{userId: number, postId: number}>()
  (params => `user:${params.userId}:post:${params.postId}`);

// Use generators for consistent keys
const userKey = userKeyGen({ id: 123 }); // 'user:123'
const postKey = postKeyGen({ userId: 123, postId: 456 }); // 'user:123:post:456'

// Keys are consistent across your application
resourceCache.set(userKey, userData);
const cachedUser = resourceCache.get(userKey);

createInvalidator

Immediately invalidates all provided resources by calling their invalidate methods.

import { resourceCache, resource } from '@hellajs/resource';

const userResource = resource(fetchUser);
const userPostsResource = resource(fetchUserPosts);
const userSettingsResource = resource(fetchUserSettings);

// Invalidate all user-related resources at once
resourceCache.createInvalidator([
  userResource,
  userPostsResource,
  userSettingsResource
]);

// All three resources are now invalidated

Key Concepts

Global Cache Management

The resourceCache operates as a single, global cache shared by all resource instances across your application. Unlike individual resource caches, this provides centralized control and allows different resources to share cached data when they use the same keys.

TTL and Expiration

Cache entries have a time-to-live (TTL) that determines when they expire. Expired entries are automatically cleaned up during cache operations to prevent memory leaks. The cleanup process is throttled to avoid performance impact.

LRU Eviction Strategy

When the cache reaches its maximum size limit, the Least Recently Used (LRU) eviction strategy automatically removes the oldest unused entries to make room for new data. This ensures memory usage stays within configured bounds while keeping frequently accessed data available.

Important Considerations

Efficient Cache Patterns

Use selective invalidation and batch operations for optimal performance.

// Use selective key invalidation instead of clearing entire cache
const clearUserData = (userId: number) => {
  const keysToInvalidate = [
    `user:${userId}`,
    `user:${userId}:posts`, 
    `user:${userId}:settings`,
    `user:${userId}:profile`
  ];
  
  resourceCache.invalidateMultiple(keysToInvalidate);
};

// Batch similar operations to reduce overhead
const updateUserStatuses = (userUpdates: Array<{id: number, online: boolean}>) => {
  const updates = userUpdates.map(({id, online}) => ({
    key: `user:${id}`,
    updater: (user: any) => ({ ...user, online })
  }));
  
  resourceCache.updateMultiple(updates);
};

Consistent Key Design

Use hierarchical, predictable key patterns for better organization.

// ✅ Good key patterns - hierarchical and consistent
'user:123'
'user:123:posts'  
'user:123:settings'
'posts:category:tech'

// ❌ Avoid random or inconsistent patterns
'random-123'
'user_data_for_john'
'PostsInTechCategory'

// Generate keys consistently with typed functions
type UserCacheKey = `user:${number}`;
type PostCacheKey = `user:${number}:posts:${number}`;

const getUserKey = (id: number): UserCacheKey => `user:${id}`;
const getPostKey = (userId: number, postId: number): PostCacheKey => 
  `user:${userId}:posts:${postId}`;

// Type-safe usage
resourceCache.set(getUserKey(123), userData);
const user = resourceCache.get(getUserKey(123));

Production Memory Management

Monitor and control cache size to prevent memory issues.

// Configure appropriate limits for your application
resourceCache.setConfig({
  maxSize: 2000,      // Adjust based on available memory
  enableLRU: true     // Enable automatic cleanup
});

// Monitor cache usage in production
setInterval(() => {
  const usage = resourceCache.map.size;
  const limit = resourceCache.config.maxSize;
  const percentage = Math.round(usage/limit*100);
  
  console.log(`Cache usage: ${usage}/${limit} (${percentage}%)`);
  
  // Alert if cache usage is high
  if (percentage > 90) {
    console.warn('Cache usage critical - consider increasing maxSize');
  }
}, 30000);