caching layer implementation for improved performance

This commit is contained in:
2025-05-12 16:45:28 +02:00
parent 8022fceff4
commit e1c6a35325
3 changed files with 139 additions and 27 deletions

79
lib/cache.ts Normal file
View File

@@ -0,0 +1,79 @@
// Simple in-memory cache implementation (Redis simulation for development)
// In production, this would be replaced with actual Redis client
interface CacheEntry {
value: any
expiry: number
}
class SimpleCache {
private cache = new Map<string, CacheEntry>()
set(key: string, value: any, ttlSeconds = 300): void {
const expiry = Date.now() + (ttlSeconds * 1000)
this.cache.set(key, { value, expiry })
}
get<T>(key: string): T | null {
const entry = this.cache.get(key)
if (!entry) return null
if (Date.now() > entry.expiry) {
this.cache.delete(key)
return null
}
return entry.value as T
}
del(key: string): void {
this.cache.delete(key)
}
clear(): void {
this.cache.clear()
}
// Clean expired entries
cleanup(): void {
const now = Date.now()
Array.from(this.cache.entries()).forEach(([key, entry]) => {
if (now > entry.expiry) {
this.cache.delete(key)
}
})
}
}
// Global cache instance
export const cache = new SimpleCache()
// Cleanup expired entries every 5 minutes
setInterval(() => {
cache.cleanup()
}, 5 * 60 * 1000)
export function getCacheKey(...parts: (string | number)[]): string {
return parts.join(':')
}
export async function cacheWrapper<T>(
key: string,
fetcher: () => Promise<T>,
ttl = 300
): Promise<T> {
// Try to get from cache first
const cached = cache.get<T>(key)
if (cached !== null) {
return cached
}
// Fetch fresh data
const data = await fetcher()
// Cache the result
cache.set(key, data, ttl)
return data
}

16
pages/api/cache/stats.ts vendored Normal file
View File

@@ -0,0 +1,16 @@
import type { NextApiRequest, NextApiResponse } from "next"
import { cache } from "../../../lib/cache"
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
// Simple cache statistics
const stats = {
cache_size: (cache as any).cache?.size || 0,
cache_cleanup_interval: '5 minutes',
last_cleanup: 'Automatic',
cache_implementation: 'In-memory (development mode)'
}
res.json(stats)
}

View File

@@ -2,6 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3'
import path from 'path'
import { rateLimit, getRateLimitHeaders } from '../../../lib/rate-limiter'
import { cache, getCacheKey } from '../../../lib/cache'
type CheckResponse = {
is_problematic: boolean
@@ -79,6 +80,14 @@ export default async function handler(
return res.status(400).json({ error: 'Invalid URL format' })
}
// Check cache first
const cacheKey = getCacheKey('domain_check', domain)
const cachedResult = cache.get<CheckResponse>(cacheKey)
if (cachedResult) {
return res.status(200).json(cachedResult)
}
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
@@ -99,16 +108,17 @@ export default async function handler(
)
})
let result: CheckResponse
if (sources.length === 0) {
return res.status(200).json({
result = {
is_problematic: false,
risk_level: 0,
categories: [],
message: 'Stránka nie je v našej databáze problematických zdrojov',
source_count: 0
})
}
} else {
const maxRiskLevel = Math.max(...sources.map(s => s.risk_level))
const allCategories = sources
.map(s => s.categories)
@@ -128,13 +138,20 @@ export default async function handler(
message = 'NÍZKE RIZIKO: Táto stránka je označená ako problematická'
}
return res.status(200).json({
result = {
is_problematic: true,
risk_level: maxRiskLevel,
categories: uniqueCategories,
message,
source_count: sources.length
})
}
}
// Cache the result (5 minutes for non-problematic, 15 minutes for problematic)
const cacheTtl = result.is_problematic ? 900 : 300
cache.set(cacheKey, result, cacheTtl)
return res.status(200).json(result)
} catch (error) {
console.error('Database error:', error)