huge update
This commit is contained in:
47
lib/cache.ts
47
lib/cache.ts
@@ -2,12 +2,30 @@ import { cache } from './redis';
|
||||
|
||||
// API Response caching
|
||||
export const apiCache = {
|
||||
async getProjects() {
|
||||
return await cache.get('api:projects');
|
||||
// Generate cache key based on query parameters
|
||||
generateProjectsKey(params: Record<string, string | null> = {}) {
|
||||
const { page = '1', limit = '50', category, featured, published, difficulty, search } = params;
|
||||
const keyParts = ['api:projects'];
|
||||
|
||||
if (page !== '1') keyParts.push(`page:${page}`);
|
||||
if (limit !== '50') keyParts.push(`limit:${limit}`);
|
||||
if (category) keyParts.push(`cat:${category}`);
|
||||
if (featured !== null) keyParts.push(`feat:${featured}`);
|
||||
if (published !== null) keyParts.push(`pub:${published}`);
|
||||
if (difficulty) keyParts.push(`diff:${difficulty}`);
|
||||
if (search) keyParts.push(`search:${search}`);
|
||||
|
||||
return keyParts.join(':');
|
||||
},
|
||||
|
||||
async setProjects(projects: unknown, ttlSeconds = 300) {
|
||||
return await cache.set('api:projects', projects, ttlSeconds);
|
||||
async getProjects(params: Record<string, string | null> = {}) {
|
||||
const key = this.generateProjectsKey(params);
|
||||
return await cache.get(key);
|
||||
},
|
||||
|
||||
async setProjects(params: Record<string, string | null> = {}, projects: unknown, ttlSeconds = 300) {
|
||||
const key = this.generateProjectsKey(params);
|
||||
return await cache.set(key, projects, ttlSeconds);
|
||||
},
|
||||
|
||||
async getProject(id: number) {
|
||||
@@ -20,11 +38,28 @@ export const apiCache = {
|
||||
|
||||
async invalidateProject(id: number) {
|
||||
await cache.del(`api:project:${id}`);
|
||||
await cache.del('api:projects');
|
||||
// Invalidate all project list caches
|
||||
await this.invalidateAllProjectLists();
|
||||
},
|
||||
|
||||
async invalidateAllProjectLists() {
|
||||
// Clear all project list caches by pattern
|
||||
// This is a simplified approach - in production you'd use Redis SCAN
|
||||
const commonKeys = [
|
||||
'api:projects',
|
||||
'api:projects:pub:true',
|
||||
'api:projects:feat:true:pub:true:limit:6',
|
||||
'api:projects:page:1:limit:50',
|
||||
'api:projects:pub:true:page:1:limit:50'
|
||||
];
|
||||
|
||||
for (const key of commonKeys) {
|
||||
await cache.del(key);
|
||||
}
|
||||
},
|
||||
|
||||
async invalidateAll() {
|
||||
await cache.del('api:projects');
|
||||
await this.invalidateAllProjectLists();
|
||||
// Clear all project caches
|
||||
const keys = await this.getAllProjectKeys();
|
||||
for (const key of keys) {
|
||||
|
||||
Reference in New Issue
Block a user