Compare commits
10 Commits
5c9f1ccea1
...
860070a302
| Author | SHA1 | Date | |
|---|---|---|---|
| 860070a302 | |||
| 52bde64e7f | |||
| 558172f2be | |||
| 597659a148 | |||
| e1c6a35325 | |||
| 8022fceff4 | |||
| 9a18e4bffa | |||
| 1484d051a1 | |||
| 6341148118 | |||
| 786286df0a |
13
drizzle.config.ts
Normal file
13
drizzle.config.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { defineConfig } from 'drizzle-kit';
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
dotenv.config({ path: '.env.local' });
|
||||
|
||||
export default defineConfig({
|
||||
schema: './lib/db/schema.ts',
|
||||
out: './drizzle',
|
||||
dialect: 'postgresql',
|
||||
dbCredentials: {
|
||||
url: process.env.DATABASE_URL!,
|
||||
},
|
||||
});
|
||||
73
lib/api-auth.ts
Normal file
73
lib/api-auth.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import crypto from 'crypto'
|
||||
import { db, schema } from './db/connection'
|
||||
import { eq, and } from 'drizzle-orm'
|
||||
|
||||
export interface ApiKey {
|
||||
id: number
|
||||
keyHash: string
|
||||
name: string
|
||||
permissions: string[]
|
||||
rateLimit: number
|
||||
isActive: boolean
|
||||
lastUsed?: Date
|
||||
createdAt: Date
|
||||
}
|
||||
|
||||
export function generateApiKey(): string {
|
||||
return 'ak_' + crypto.randomBytes(32).toString('hex')
|
||||
}
|
||||
|
||||
export function hashApiKey(key: string): string {
|
||||
return crypto.createHash('sha256').update(key).digest('hex')
|
||||
}
|
||||
|
||||
export async function validateApiKey(key: string): Promise<ApiKey | null> {
|
||||
if (!key || !key.startsWith('ak_')) return null
|
||||
|
||||
const keyHash = hashApiKey(key)
|
||||
|
||||
try {
|
||||
const apiKeys = await db.select()
|
||||
.from(schema.apiKeys)
|
||||
.where(and(
|
||||
eq(schema.apiKeys.keyHash, keyHash),
|
||||
eq(schema.apiKeys.isActive, true)
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
if (apiKeys.length === 0) return null
|
||||
|
||||
const apiKey = apiKeys[0]
|
||||
|
||||
// Update last_used timestamp
|
||||
await db.update(schema.apiKeys)
|
||||
.set({ lastUsed: new Date() })
|
||||
.where(eq(schema.apiKeys.id, apiKey.id))
|
||||
|
||||
return {
|
||||
id: apiKey.id,
|
||||
keyHash: apiKey.keyHash,
|
||||
name: apiKey.name,
|
||||
permissions: apiKey.permissions ? JSON.parse(apiKey.permissions) : [],
|
||||
rateLimit: apiKey.rateLimit,
|
||||
isActive: apiKey.isActive,
|
||||
lastUsed: apiKey.lastUsed,
|
||||
createdAt: apiKey.createdAt
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('API key validation error:', error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export function hasPermission(apiKey: ApiKey, permission: string): boolean {
|
||||
return apiKey.permissions.includes('*') || apiKey.permissions.includes(permission)
|
||||
}
|
||||
|
||||
export const ApiPermissions = {
|
||||
READ_SOURCES: 'sources:read',
|
||||
WRITE_SOURCES: 'sources:write',
|
||||
READ_REPORTS: 'reports:read',
|
||||
WRITE_REPORTS: 'reports:write',
|
||||
ADMIN: '*'
|
||||
} as const
|
||||
@@ -1,5 +1,4 @@
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from './db/connection'
|
||||
|
||||
export interface AuditLogEntry {
|
||||
user_id?: number
|
||||
@@ -11,32 +10,17 @@ export interface AuditLogEntry {
|
||||
}
|
||||
|
||||
export async function logAuditEvent(entry: AuditLogEntry): Promise<void> {
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO audit_logs (user_id, action, resource_type, resource_id, details, ip_address, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, datetime('now'))`,
|
||||
[
|
||||
entry.user_id || null,
|
||||
entry.action,
|
||||
entry.resource_type,
|
||||
entry.resource_id || null,
|
||||
entry.details ? JSON.stringify(entry.details) : null,
|
||||
entry.ip_address || null
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
await db.insert(schema.auditLogs).values({
|
||||
userId: entry.user_id || null,
|
||||
action: entry.action,
|
||||
resourceType: entry.resource_type,
|
||||
resourceId: entry.resource_id || null,
|
||||
details: entry.details ? JSON.stringify(entry.details) : null,
|
||||
ipAddress: entry.ip_address || null
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Audit logging failed:', error)
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
101
lib/backup-utils.ts
Normal file
101
lib/backup-utils.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { exec } from 'child_process'
|
||||
import path from 'path'
|
||||
import { promisify } from 'util'
|
||||
import fs from 'fs/promises'
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
export async function createDatabaseBackup(): Promise<string> {
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error('DATABASE_URL environment variable is required')
|
||||
}
|
||||
|
||||
const timestamp = Date.now()
|
||||
const backupPath = path.join(process.cwd(), 'backups', `backup_${timestamp}.sql`)
|
||||
|
||||
try {
|
||||
// Ensure backup directory exists
|
||||
await execAsync('mkdir -p backups')
|
||||
|
||||
// Create PostgreSQL backup using pg_dump
|
||||
// Extract database name from connection string
|
||||
const url = new URL(process.env.DATABASE_URL)
|
||||
const dbName = url.pathname.substring(1) // Remove leading slash
|
||||
const host = url.hostname
|
||||
const port = url.port || '5432'
|
||||
const username = url.username
|
||||
const password = url.password
|
||||
|
||||
// Set PGPASSWORD environment variable for pg_dump
|
||||
const env = { ...process.env, PGPASSWORD: password }
|
||||
|
||||
await execAsync(
|
||||
`pg_dump -h ${host} -p ${port} -U ${username} -d ${dbName} --no-password > "${backupPath}"`,
|
||||
{ env }
|
||||
)
|
||||
|
||||
return backupPath
|
||||
} catch (error) {
|
||||
console.error('Backup failed:', error)
|
||||
throw new Error('Database backup failed')
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreDatabase(backupPath: string): Promise<void> {
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error('DATABASE_URL environment variable is required')
|
||||
}
|
||||
|
||||
try {
|
||||
// Create backup of current DB first
|
||||
await createDatabaseBackup()
|
||||
|
||||
// Extract database connection details
|
||||
const url = new URL(process.env.DATABASE_URL)
|
||||
const dbName = url.pathname.substring(1)
|
||||
const host = url.hostname
|
||||
const port = url.port || '5432'
|
||||
const username = url.username
|
||||
const password = url.password
|
||||
|
||||
// Set PGPASSWORD environment variable for psql
|
||||
const env = { ...process.env, PGPASSWORD: password }
|
||||
|
||||
// Restore from SQL backup
|
||||
await execAsync(
|
||||
`psql -h ${host} -p ${port} -U ${username} -d ${dbName} --no-password < "${backupPath}"`,
|
||||
{ env }
|
||||
)
|
||||
|
||||
console.log('Database restored successfully')
|
||||
} catch (error) {
|
||||
console.error('Restore failed:', error)
|
||||
throw new Error('Database restore failed')
|
||||
}
|
||||
}
|
||||
|
||||
export async function listBackups(): Promise<string[]> {
|
||||
try {
|
||||
const backupDir = path.join(process.cwd(), 'backups')
|
||||
|
||||
try {
|
||||
const files = await fs.readdir(backupDir)
|
||||
const sqlFiles = files.filter(file => file.endsWith('.sql'))
|
||||
|
||||
// Get file stats for each backup
|
||||
const backupInfo = await Promise.all(
|
||||
sqlFiles.map(async (file) => {
|
||||
const filePath = path.join(backupDir, file)
|
||||
const stats = await fs.stat(filePath)
|
||||
return `${stats.mtime.toISOString()} ${file} (${Math.round(stats.size / 1024)}KB)`
|
||||
})
|
||||
)
|
||||
|
||||
return backupInfo.sort().reverse() // Most recent first
|
||||
} catch (dirError) {
|
||||
return []
|
||||
}
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
}
|
||||
79
lib/cache.ts
Normal file
79
lib/cache.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
// Simple in-memory cache implementation (Redis simulation for development)
|
||||
// In production, this would be replaced with actual Redis client
|
||||
|
||||
interface CacheEntry {
|
||||
value: any
|
||||
expiry: number
|
||||
}
|
||||
|
||||
class SimpleCache {
|
||||
private cache = new Map<string, CacheEntry>()
|
||||
|
||||
set(key: string, value: any, ttlSeconds = 300): void {
|
||||
const expiry = Date.now() + (ttlSeconds * 1000)
|
||||
this.cache.set(key, { value, expiry })
|
||||
}
|
||||
|
||||
get<T>(key: string): T | null {
|
||||
const entry = this.cache.get(key)
|
||||
|
||||
if (!entry) return null
|
||||
|
||||
if (Date.now() > entry.expiry) {
|
||||
this.cache.delete(key)
|
||||
return null
|
||||
}
|
||||
|
||||
return entry.value as T
|
||||
}
|
||||
|
||||
del(key: string): void {
|
||||
this.cache.delete(key)
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.cache.clear()
|
||||
}
|
||||
|
||||
// Clean expired entries
|
||||
cleanup(): void {
|
||||
const now = Date.now()
|
||||
Array.from(this.cache.entries()).forEach(([key, entry]) => {
|
||||
if (now > entry.expiry) {
|
||||
this.cache.delete(key)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Global cache instance
|
||||
export const cache = new SimpleCache()
|
||||
|
||||
// Cleanup expired entries every 5 minutes
|
||||
setInterval(() => {
|
||||
cache.cleanup()
|
||||
}, 5 * 60 * 1000)
|
||||
|
||||
export function getCacheKey(...parts: (string | number)[]): string {
|
||||
return parts.join(':')
|
||||
}
|
||||
|
||||
export async function cacheWrapper<T>(
|
||||
key: string,
|
||||
fetcher: () => Promise<T>,
|
||||
ttl = 300
|
||||
): Promise<T> {
|
||||
// Try to get from cache first
|
||||
const cached = cache.get<T>(key)
|
||||
if (cached !== null) {
|
||||
return cached
|
||||
}
|
||||
|
||||
// Fetch fresh data
|
||||
const data = await fetcher()
|
||||
|
||||
// Cache the result
|
||||
cache.set(key, data, ttl)
|
||||
|
||||
return data
|
||||
}
|
||||
60
lib/db-optimizations.ts
Normal file
60
lib/db-optimizations.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { db, schema } from './db/connection'
|
||||
import { sql, count } from 'drizzle-orm'
|
||||
|
||||
export async function optimizeDatabase(): Promise<void> {
|
||||
try {
|
||||
// PostgreSQL automatically creates indexes defined in schema.ts
|
||||
// Run ANALYZE to update statistics for query optimization
|
||||
const tables = [
|
||||
'sources',
|
||||
'reports',
|
||||
'api_keys',
|
||||
'source_categories',
|
||||
'categories',
|
||||
'users'
|
||||
]
|
||||
|
||||
for (const tableName of tables) {
|
||||
await db.execute(sql`ANALYZE ${sql.raw(tableName)}`)
|
||||
}
|
||||
|
||||
// PostgreSQL equivalent of VACUUM - VACUUM ANALYZE updates statistics and cleans up
|
||||
await db.execute(sql`VACUUM ANALYZE`)
|
||||
|
||||
console.log('Database optimization completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database optimization failed:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function getDatabaseStats(): Promise<any> {
|
||||
try {
|
||||
// Get row counts for each table
|
||||
const [sourcesCount] = await db.select({ count: count() }).from(schema.sources)
|
||||
const [reportsCount] = await db.select({ count: count() }).from(schema.reports)
|
||||
const [apiKeysCount] = await db.select({ count: count() }).from(schema.apiKeys)
|
||||
const [categoriesCount] = await db.select({ count: count() }).from(schema.categories)
|
||||
const [usersCount] = await db.select({ count: count() }).from(schema.users)
|
||||
const [sourceCategoriesCount] = await db.select({ count: count() }).from(schema.sourceCategories)
|
||||
|
||||
const tables = [
|
||||
{ table_name: 'sources', row_count: sourcesCount.count },
|
||||
{ table_name: 'reports', row_count: reportsCount.count },
|
||||
{ table_name: 'api_keys', row_count: apiKeysCount.count },
|
||||
{ table_name: 'categories', row_count: categoriesCount.count },
|
||||
{ table_name: 'users', row_count: usersCount.count },
|
||||
{ table_name: 'source_categories', row_count: sourceCategoriesCount.count }
|
||||
]
|
||||
|
||||
return {
|
||||
tables,
|
||||
optimized_at: new Date().toISOString()
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to get database stats:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
27
lib/db/connection.ts
Normal file
27
lib/db/connection.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { drizzle } from 'drizzle-orm/node-postgres';
|
||||
import { Pool } from 'pg';
|
||||
import * as schema from './schema';
|
||||
|
||||
function createConnection() {
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error('DATABASE_URL environment variable is required');
|
||||
}
|
||||
|
||||
const pool = new Pool({
|
||||
connectionString: process.env.DATABASE_URL,
|
||||
ssl: false,
|
||||
max: 10,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000
|
||||
});
|
||||
|
||||
// Test connection
|
||||
pool.on('error', (err) => {
|
||||
console.error('Unexpected error on idle client', err);
|
||||
});
|
||||
|
||||
return drizzle(pool, { schema });
|
||||
}
|
||||
|
||||
export const db = createConnection();
|
||||
export { schema };
|
||||
228
lib/db/schema.ts
Normal file
228
lib/db/schema.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import {
|
||||
pgTable,
|
||||
serial,
|
||||
varchar,
|
||||
text,
|
||||
boolean,
|
||||
integer,
|
||||
timestamp,
|
||||
decimal,
|
||||
pgEnum,
|
||||
uniqueIndex,
|
||||
index
|
||||
} from 'drizzle-orm/pg-core';
|
||||
import { relations } from 'drizzle-orm';
|
||||
|
||||
// Enums
|
||||
export const roleEnum = pgEnum('role', ['admin', 'moderator']);
|
||||
export const sourceTypeEnum = pgEnum('source_type', [
|
||||
'website', 'facebook_page', 'facebook_group', 'instagram',
|
||||
'blog', 'news_site', 'youtube', 'tiktok', 'telegram', 'other'
|
||||
]);
|
||||
export const sourceStatusEnum = pgEnum('source_status', [
|
||||
'pending', 'verified', 'rejected', 'under_review'
|
||||
]);
|
||||
export const languageEnum = pgEnum('language', ['sk', 'cs', 'en', 'other']);
|
||||
export const priorityEnum = pgEnum('priority', ['low', 'medium', 'high', 'urgent']);
|
||||
export const reportStatusEnum = pgEnum('report_status', [
|
||||
'pending', 'in_review', 'approved', 'rejected', 'duplicate'
|
||||
]);
|
||||
|
||||
// Users table
|
||||
export const users = pgTable('users', {
|
||||
id: serial('id').primaryKey(),
|
||||
email: varchar('email', { length: 255 }).notNull().unique(),
|
||||
passwordHash: varchar('password_hash', { length: 255 }).notNull(),
|
||||
name: varchar('name', { length: 100 }).notNull(),
|
||||
role: roleEnum('role').default('moderator'),
|
||||
isActive: boolean('is_active').default(true),
|
||||
lastLogin: timestamp('last_login'),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
});
|
||||
|
||||
// Categories table
|
||||
export const categories = pgTable('categories', {
|
||||
id: serial('id').primaryKey(),
|
||||
name: varchar('name', { length: 100 }).notNull().unique(),
|
||||
slug: varchar('slug', { length: 100 }).notNull().unique(),
|
||||
description: text('description'),
|
||||
color: varchar('color', { length: 7 }).default('#6B7280'),
|
||||
priority: integer('priority').default(1),
|
||||
icon: varchar('icon', { length: 50 }),
|
||||
isActive: boolean('is_active').default(true),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
slugIdx: uniqueIndex('idx_categories_slug').on(table.slug),
|
||||
priorityIdx: index('idx_categories_priority').on(table.priority)
|
||||
};
|
||||
});
|
||||
|
||||
// Sources table
|
||||
export const sources = pgTable('sources', {
|
||||
id: serial('id').primaryKey(),
|
||||
url: varchar('url', { length: 1000 }).notNull().unique(),
|
||||
domain: varchar('domain', { length: 255 }).notNull(),
|
||||
title: varchar('title', { length: 500 }),
|
||||
description: text('description'),
|
||||
type: sourceTypeEnum('type').notNull(),
|
||||
status: sourceStatusEnum('status').default('pending'),
|
||||
riskLevel: integer('risk_level').default(1),
|
||||
language: languageEnum('language').default('sk'),
|
||||
evidenceUrls: text('evidence_urls'), // JSON
|
||||
reportedBy: varchar('reported_by', { length: 255 }),
|
||||
verifiedBy: integer('verified_by').references(() => users.id),
|
||||
rejectionReason: text('rejection_reason'),
|
||||
followerCount: integer('follower_count').default(0),
|
||||
lastChecked: timestamp('last_checked'),
|
||||
metadata: text('metadata').default('{}'), // JSON
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
domainIdx: index('idx_sources_domain').on(table.domain),
|
||||
statusIdx: index('idx_sources_status').on(table.status),
|
||||
riskLevelIdx: index('idx_sources_risk_level').on(table.riskLevel),
|
||||
typeIdx: index('idx_sources_type').on(table.type),
|
||||
createdAtIdx: index('idx_sources_created_at').on(table.createdAt),
|
||||
verifiedByIdx: index('idx_sources_verified_by').on(table.verifiedBy),
|
||||
statusRiskIdx: index('idx_sources_status_risk').on(table.status, table.riskLevel)
|
||||
};
|
||||
});
|
||||
|
||||
// Source Categories junction table
|
||||
export const sourceCategories = pgTable('source_categories', {
|
||||
id: serial('id').primaryKey(),
|
||||
sourceId: integer('source_id').notNull().references(() => sources.id, { onDelete: 'cascade' }),
|
||||
categoryId: integer('category_id').notNull().references(() => categories.id, { onDelete: 'cascade' }),
|
||||
confidenceScore: decimal('confidence_score', { precision: 3, scale: 2 }).default('1.0'),
|
||||
addedBy: integer('added_by').references(() => users.id),
|
||||
createdAt: timestamp('created_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
sourceIdIdx: index('idx_source_categories_source_id').on(table.sourceId),
|
||||
categoryIdIdx: index('idx_source_categories_category_id').on(table.categoryId),
|
||||
uniqueSourceCategory: uniqueIndex('unique_source_category').on(table.sourceId, table.categoryId)
|
||||
};
|
||||
});
|
||||
|
||||
// Reports table
|
||||
export const reports = pgTable('reports', {
|
||||
id: serial('id').primaryKey(),
|
||||
sourceUrl: varchar('source_url', { length: 1000 }).notNull(),
|
||||
sourceDomain: varchar('source_domain', { length: 255 }).notNull(),
|
||||
reporterEmail: varchar('reporter_email', { length: 255 }),
|
||||
reporterName: varchar('reporter_name', { length: 100 }),
|
||||
categorySuggestions: text('category_suggestions'), // JSON
|
||||
description: text('description').notNull(),
|
||||
evidenceUrls: text('evidence_urls'), // JSON
|
||||
priority: priorityEnum('priority').default('medium'),
|
||||
status: reportStatusEnum('status').default('pending'),
|
||||
assignedTo: integer('assigned_to').references(() => users.id),
|
||||
adminNotes: text('admin_notes'),
|
||||
processedAt: timestamp('processed_at'),
|
||||
ipAddress: varchar('ip_address', { length: 45 }),
|
||||
userAgent: text('user_agent'),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
statusIdx: index('idx_reports_status').on(table.status),
|
||||
sourceDomainIdx: index('idx_reports_source_domain').on(table.sourceDomain),
|
||||
priorityIdx: index('idx_reports_priority').on(table.priority),
|
||||
createdAtIdx: index('idx_reports_created_at').on(table.createdAt),
|
||||
assignedToIdx: index('idx_reports_assigned_to').on(table.assignedTo)
|
||||
};
|
||||
});
|
||||
|
||||
// API Keys table
|
||||
export const apiKeys = pgTable('api_keys', {
|
||||
id: serial('id').primaryKey(),
|
||||
keyHash: varchar('key_hash', { length: 255 }).notNull().unique(),
|
||||
name: varchar('name', { length: 100 }).notNull(),
|
||||
description: text('description'),
|
||||
ownerEmail: varchar('owner_email', { length: 255 }).notNull(),
|
||||
permissions: text('permissions').default('["read"]'), // JSON
|
||||
rateLimit: integer('rate_limit').default(1000),
|
||||
isActive: boolean('is_active').default(true),
|
||||
usageCount: integer('usage_count').default(0),
|
||||
lastUsed: timestamp('last_used'),
|
||||
expiresAt: timestamp('expires_at'),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
keyHashIdx: uniqueIndex('idx_api_keys_hash').on(table.keyHash),
|
||||
ownerIdx: index('idx_api_keys_owner').on(table.ownerEmail)
|
||||
};
|
||||
});
|
||||
|
||||
// Audit Logs table
|
||||
export const auditLogs = pgTable('audit_logs', {
|
||||
id: serial('id').primaryKey(),
|
||||
userId: integer('user_id').references(() => users.id),
|
||||
action: varchar('action', { length: 50 }).notNull(),
|
||||
resourceType: varchar('resource_type', { length: 50 }).notNull(),
|
||||
resourceId: integer('resource_id'),
|
||||
details: text('details'), // JSON
|
||||
ipAddress: varchar('ip_address', { length: 45 }),
|
||||
createdAt: timestamp('created_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
userIdIdx: index('idx_audit_logs_user_id').on(table.userId),
|
||||
createdAtIdx: index('idx_audit_logs_created_at').on(table.createdAt),
|
||||
actionIdx: index('idx_audit_logs_action').on(table.action),
|
||||
resourceTypeIdx: index('idx_audit_logs_resource_type').on(table.resourceType)
|
||||
};
|
||||
});
|
||||
|
||||
// Relations
|
||||
export const usersRelations = relations(users, ({ many }) => ({
|
||||
verifiedSources: many(sources),
|
||||
sourceCategories: many(sourceCategories),
|
||||
assignedReports: many(reports),
|
||||
auditLogs: many(auditLogs)
|
||||
}));
|
||||
|
||||
export const categoriesRelations = relations(categories, ({ many }) => ({
|
||||
sourceCategories: many(sourceCategories)
|
||||
}));
|
||||
|
||||
export const sourcesRelations = relations(sources, ({ one, many }) => ({
|
||||
verifiedBy: one(users, {
|
||||
fields: [sources.verifiedBy],
|
||||
references: [users.id]
|
||||
}),
|
||||
sourceCategories: many(sourceCategories)
|
||||
}));
|
||||
|
||||
export const sourceCategoriesRelations = relations(sourceCategories, ({ one }) => ({
|
||||
source: one(sources, {
|
||||
fields: [sourceCategories.sourceId],
|
||||
references: [sources.id]
|
||||
}),
|
||||
category: one(categories, {
|
||||
fields: [sourceCategories.categoryId],
|
||||
references: [categories.id]
|
||||
}),
|
||||
addedBy: one(users, {
|
||||
fields: [sourceCategories.addedBy],
|
||||
references: [users.id]
|
||||
})
|
||||
}));
|
||||
|
||||
export const reportsRelations = relations(reports, ({ one }) => ({
|
||||
assignedTo: one(users, {
|
||||
fields: [reports.assignedTo],
|
||||
references: [users.id]
|
||||
})
|
||||
}));
|
||||
|
||||
export const auditLogsRelations = relations(auditLogs, ({ one }) => ({
|
||||
user: one(users, {
|
||||
fields: [auditLogs.userId],
|
||||
references: [users.id]
|
||||
})
|
||||
}));
|
||||
98
lib/db/seed.ts
Normal file
98
lib/db/seed.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { db } from './connection';
|
||||
import { users, categories, sources, apiKeys, sourceCategories } from './schema';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
|
||||
export async function seedDatabase() {
|
||||
try {
|
||||
console.log('🌱 Seeding database...');
|
||||
|
||||
// Insert categories one by one
|
||||
const categoryData = [
|
||||
{ name: 'Hoax', slug: 'hoax', description: 'Šírenie nepravdivých informácií a hoaxov', color: '#EF4444', priority: 5, icon: 'AlertTriangle' },
|
||||
{ name: 'Hate Speech', slug: 'hate-speech', description: 'Nenávistné prejavy proti skupinám ľudí', color: '#DC2626', priority: 5, icon: 'MessageSquareX' },
|
||||
{ name: 'Violence', slug: 'violence', description: 'Povzbudzovanie k násiliu', color: '#B91C1C', priority: 5, icon: 'Sword' },
|
||||
{ name: 'Conspiracy', slug: 'conspiracy', description: 'Konšpiračné teórie', color: '#F59E0B', priority: 3, icon: 'Eye' },
|
||||
{ name: 'Propaganda', slug: 'propaganda', description: 'Politická propaganda a manipulácia', color: '#D97706', priority: 2, icon: 'Megaphone' }
|
||||
];
|
||||
|
||||
const insertedCategories = [];
|
||||
for (const cat of categoryData) {
|
||||
const result = await db.insert(categories).values(cat).returning();
|
||||
insertedCategories.push(result[0]);
|
||||
console.log(`✅ Inserted category: ${cat.name}`);
|
||||
}
|
||||
|
||||
// Insert admin user
|
||||
const hashedPassword = await bcrypt.hash('admin123', 12);
|
||||
const insertedUsers = await db.insert(users).values({
|
||||
email: 'admin@antihoax.sk',
|
||||
passwordHash: hashedPassword,
|
||||
name: 'System Admin',
|
||||
role: 'admin'
|
||||
}).returning();
|
||||
console.log(`✅ Inserted user: ${insertedUsers[0].name}`);
|
||||
|
||||
// Insert example sources
|
||||
const sourceData = [
|
||||
{
|
||||
url: 'https://example-hoax-site.com',
|
||||
domain: 'example-hoax-site.com',
|
||||
title: 'Example Hoax Site',
|
||||
description: 'Príklad hoax stránky pre testovanie',
|
||||
type: 'website' as const,
|
||||
status: 'verified' as const,
|
||||
riskLevel: 5,
|
||||
language: 'sk' as const,
|
||||
reportedBy: 'test@example.com',
|
||||
verifiedBy: insertedUsers[0].id,
|
||||
followerCount: 1500,
|
||||
metadata: JSON.stringify({ tags: ['test', 'example'] })
|
||||
},
|
||||
{
|
||||
url: 'https://example-conspiracy.com',
|
||||
domain: 'example-conspiracy.com',
|
||||
title: 'Conspiracy Theory Site',
|
||||
description: 'Stránka šíriaca konšpiračné teórie',
|
||||
type: 'blog' as const,
|
||||
status: 'verified' as const,
|
||||
riskLevel: 3,
|
||||
language: 'sk' as const,
|
||||
reportedBy: 'reporter@example.com',
|
||||
verifiedBy: insertedUsers[0].id,
|
||||
followerCount: 850,
|
||||
metadata: JSON.stringify({ tags: ['conspiracy', 'politics'] })
|
||||
}
|
||||
];
|
||||
|
||||
const insertedSources = [];
|
||||
for (const src of sourceData) {
|
||||
const result = await db.insert(sources).values(src).returning();
|
||||
insertedSources.push(result[0]);
|
||||
console.log(`✅ Inserted source: ${src.title}`);
|
||||
}
|
||||
|
||||
// Link sources with categories
|
||||
await db.insert(sourceCategories).values({
|
||||
sourceId: insertedSources[0].id,
|
||||
categoryId: insertedCategories[0].id, // Hoax
|
||||
confidenceScore: '1.0',
|
||||
addedBy: insertedUsers[0].id
|
||||
});
|
||||
|
||||
await db.insert(sourceCategories).values({
|
||||
sourceId: insertedSources[1].id,
|
||||
categoryId: insertedCategories[3].id, // Conspiracy
|
||||
confidenceScore: '0.9',
|
||||
addedBy: insertedUsers[0].id
|
||||
});
|
||||
|
||||
console.log(`✅ Linked sources with categories`);
|
||||
|
||||
console.log('🎉 Database seeded successfully!');
|
||||
console.log('📧 Admin login: admin@antihoax.sk / admin123');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error seeding database:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
99
lib/monitoring.ts
Normal file
99
lib/monitoring.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
interface MonitoringMetric {
|
||||
name: string
|
||||
value: number
|
||||
timestamp: number
|
||||
tags?: Record<string, string>
|
||||
}
|
||||
|
||||
class MetricsCollector {
|
||||
private metrics: MonitoringMetric[] = []
|
||||
private maxMetrics = 1000
|
||||
|
||||
record(name: string, value: number, tags?: Record<string, string>): void {
|
||||
const metric: MonitoringMetric = {
|
||||
name,
|
||||
value,
|
||||
timestamp: Date.now(),
|
||||
tags
|
||||
}
|
||||
|
||||
this.metrics.push(metric)
|
||||
|
||||
// Keep only recent metrics
|
||||
if (this.metrics.length > this.maxMetrics) {
|
||||
this.metrics = this.metrics.slice(-this.maxMetrics)
|
||||
}
|
||||
}
|
||||
|
||||
getMetrics(name?: string, since?: number): MonitoringMetric[] {
|
||||
let filtered = this.metrics
|
||||
|
||||
if (name) {
|
||||
filtered = filtered.filter(m => m.name === name)
|
||||
}
|
||||
|
||||
if (since) {
|
||||
filtered = filtered.filter(m => m.timestamp >= since)
|
||||
}
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
getAverageResponseTime(since?: number): number {
|
||||
const responseMetrics = this.getMetrics('api.response_time', since)
|
||||
if (responseMetrics.length === 0) return 0
|
||||
|
||||
const sum = responseMetrics.reduce((acc, m) => acc + m.value, 0)
|
||||
return sum / responseMetrics.length
|
||||
}
|
||||
|
||||
getRequestCount(endpoint?: string, since?: number): number {
|
||||
const requestMetrics = this.getMetrics('api.request', since)
|
||||
|
||||
if (endpoint) {
|
||||
return requestMetrics.filter(m => m.tags?.endpoint === endpoint).length
|
||||
}
|
||||
|
||||
return requestMetrics.length
|
||||
}
|
||||
|
||||
getErrorCount(since?: number): number {
|
||||
return this.getMetrics('api.error', since).length
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.metrics = []
|
||||
}
|
||||
}
|
||||
|
||||
export const metrics = new MetricsCollector()
|
||||
|
||||
export function trackApiCall(endpoint: string, method: string, statusCode: number, responseTime: number): void {
|
||||
metrics.record('api.request', 1, { endpoint, method, status: statusCode.toString() })
|
||||
metrics.record('api.response_time', responseTime, { endpoint })
|
||||
|
||||
if (statusCode >= 400) {
|
||||
metrics.record('api.error', 1, { endpoint, status: statusCode.toString() })
|
||||
}
|
||||
}
|
||||
|
||||
export function trackDatabaseQuery(operation: string, duration: number): void {
|
||||
metrics.record('db.query', 1, { operation })
|
||||
metrics.record('db.duration', duration, { operation })
|
||||
}
|
||||
|
||||
export function getHealthStatus(): any {
|
||||
const now = Date.now()
|
||||
const fiveMinutesAgo = now - (5 * 60 * 1000)
|
||||
|
||||
return {
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
metrics: {
|
||||
requests_5min: metrics.getRequestCount(undefined, fiveMinutesAgo),
|
||||
avg_response_time: Math.round(metrics.getAverageResponseTime(fiveMinutesAgo)),
|
||||
errors_5min: metrics.getErrorCount(fiveMinutesAgo),
|
||||
uptime: process.uptime()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,11 +24,11 @@ export function rateLimit(
|
||||
const windowStart = now - config.windowMs
|
||||
|
||||
// Clean expired entries
|
||||
for (const [key, data] of cache.entries()) {
|
||||
Array.from(cache.entries()).forEach(([key, data]) => {
|
||||
if (data.resetTime < now) {
|
||||
cache.delete(key)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
let data = cache.get(identifier)
|
||||
|
||||
|
||||
62
lib/security.ts
Normal file
62
lib/security.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import crypto from 'crypto'
|
||||
|
||||
export function sanitizeHtml(input: string): string {
|
||||
return input
|
||||
.replace(/[<>'"&]/g, (char) => {
|
||||
switch (char) {
|
||||
case '<': return '<'
|
||||
case '>': return '>'
|
||||
case '"': return '"'
|
||||
case "'": return '''
|
||||
case '&': return '&'
|
||||
default: return char
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function validateDomain(domain: string): boolean {
|
||||
const domainRegex = /^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9]?\.[a-zA-Z]{2,}$/
|
||||
return domainRegex.test(domain) && domain.length <= 253
|
||||
}
|
||||
|
||||
export function isValidIPAddress(ip: string): boolean {
|
||||
const ipv4Regex = /^(\d{1,3}\.){3}\d{1,3}$/
|
||||
const ipv6Regex = /^([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$/
|
||||
|
||||
if (ipv4Regex.test(ip)) {
|
||||
return ip.split('.').every(part => {
|
||||
const num = parseInt(part, 10)
|
||||
return num >= 0 && num <= 255
|
||||
})
|
||||
}
|
||||
|
||||
return ipv6Regex.test(ip)
|
||||
}
|
||||
|
||||
export function hashPassword(password: string, salt?: string): { hash: string, salt: string } {
|
||||
const finalSalt = salt || crypto.randomBytes(32).toString('hex')
|
||||
const hash = crypto.pbkdf2Sync(password, finalSalt, 100000, 64, 'sha256').toString('hex')
|
||||
return { hash, salt: finalSalt }
|
||||
}
|
||||
|
||||
export function verifyPassword(password: string, hash: string, salt: string): boolean {
|
||||
const { hash: computedHash } = hashPassword(password, salt)
|
||||
return crypto.timingSafeEqual(Buffer.from(hash, 'hex'), Buffer.from(computedHash, 'hex'))
|
||||
}
|
||||
|
||||
export function generateSecureToken(length = 32): string {
|
||||
return crypto.randomBytes(length).toString('hex')
|
||||
}
|
||||
|
||||
export function rateLimitKey(req: any): string {
|
||||
const forwarded = req.headers['x-forwarded-for']
|
||||
const ip = forwarded ? forwarded.split(',')[0] : req.connection?.remoteAddress
|
||||
return `rate_limit:${ip || 'unknown'}`
|
||||
}
|
||||
|
||||
export class SecurityError extends Error {
|
||||
constructor(message: string, public code: string) {
|
||||
super(message)
|
||||
this.name = 'SecurityError'
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,7 @@
|
||||
import { sanitizeHtml, validateDomain } from './security'
|
||||
|
||||
export function sanitizeUrl(url: string): string {
|
||||
return url
|
||||
.trim()
|
||||
.replace(/[<>'"]/g, '') // Remove potential XSS characters
|
||||
.substring(0, 2048) // Limit length
|
||||
return sanitizeHtml(url.trim().substring(0, 2048))
|
||||
}
|
||||
|
||||
export function validateEmail(email: string): boolean {
|
||||
|
||||
48
middleware/auth.ts
Normal file
48
middleware/auth.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import { validateApiKey, hasPermission, ApiKey } from '../lib/api-auth'
|
||||
|
||||
export interface AuthenticatedRequest extends NextApiRequest {
|
||||
apiKey?: ApiKey
|
||||
}
|
||||
|
||||
export function requireAuth(permission?: string) {
|
||||
return async (
|
||||
req: AuthenticatedRequest,
|
||||
res: NextApiResponse,
|
||||
next: () => void
|
||||
) => {
|
||||
const apiKeyHeader = req.headers['x-api-key'] as string
|
||||
|
||||
if (!apiKeyHeader) {
|
||||
return res.status(401).json({ error: 'API key required' })
|
||||
}
|
||||
|
||||
const apiKey = await validateApiKey(apiKeyHeader)
|
||||
|
||||
if (!apiKey) {
|
||||
return res.status(401).json({ error: 'Invalid API key' })
|
||||
}
|
||||
|
||||
if (permission && !hasPermission(apiKey, permission)) {
|
||||
return res.status(403).json({ error: 'Insufficient permissions' })
|
||||
}
|
||||
|
||||
req.apiKey = apiKey
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
export function withAuth(
|
||||
handler: (req: AuthenticatedRequest, res: NextApiResponse) => Promise<void>,
|
||||
permission?: string
|
||||
) {
|
||||
return async (req: AuthenticatedRequest, res: NextApiResponse) => {
|
||||
const authMiddleware = requireAuth(permission)
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
authMiddleware(req, res, () => {
|
||||
handler(req, res).then(resolve).catch(reject)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
41
middleware/security.ts
Normal file
41
middleware/security.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
|
||||
export function securityHeaders(req: NextApiRequest, res: NextApiResponse, next: () => void) {
|
||||
// Set security headers
|
||||
res.setHeader('X-Content-Type-Options', 'nosniff')
|
||||
res.setHeader('X-Frame-Options', 'DENY')
|
||||
res.setHeader('X-XSS-Protection', '1; mode=block')
|
||||
res.setHeader('Referrer-Policy', 'strict-origin-when-cross-origin')
|
||||
res.setHeader('Permissions-Policy', 'geolocation=(), microphone=(), camera=()')
|
||||
|
||||
// Prevent information leakage
|
||||
res.removeHeader('X-Powered-By')
|
||||
|
||||
next()
|
||||
}
|
||||
|
||||
export function validateContentType(allowedTypes: string[] = ['application/json']) {
|
||||
return (req: NextApiRequest, res: NextApiResponse, next: () => void) => {
|
||||
if (req.method === 'POST' || req.method === 'PUT' || req.method === 'PATCH') {
|
||||
const contentType = req.headers['content-type']
|
||||
|
||||
if (!contentType || !allowedTypes.some(type => contentType.includes(type))) {
|
||||
return res.status(415).json({ error: 'Unsupported Media Type' })
|
||||
}
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
|
||||
export function validateRequestSize(maxSize = 1024 * 1024) { // 1MB default
|
||||
return (req: NextApiRequest, res: NextApiResponse, next: () => void) => {
|
||||
const contentLength = req.headers['content-length']
|
||||
|
||||
if (contentLength && parseInt(contentLength) > maxSize) {
|
||||
return res.status(413).json({ error: 'Request entity too large' })
|
||||
}
|
||||
|
||||
next()
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,27 @@
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
swcMinify: true,
|
||||
async headers() {
|
||||
return [
|
||||
{
|
||||
source: '/api/:path*',
|
||||
headers: [
|
||||
{
|
||||
key: 'Access-Control-Allow-Origin',
|
||||
value: '*'
|
||||
},
|
||||
{
|
||||
key: 'Access-Control-Allow-Methods',
|
||||
value: 'GET,OPTIONS,PATCH,DELETE,POST,PUT'
|
||||
},
|
||||
{
|
||||
key: 'Access-Control-Allow-Headers',
|
||||
value: 'X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version, X-API-Key'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = nextConfig
|
||||
3934
package-lock.json
generated
3934
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
39
package.json
39
package.json
@@ -1,26 +1,37 @@
|
||||
{
|
||||
"name": "infohliadka",
|
||||
"version": "0.1.0",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
"lint": "next lint",
|
||||
"db:generate": "drizzle-kit generate",
|
||||
"db:migrate": "drizzle-kit migrate",
|
||||
"db:push": "drizzle-kit push",
|
||||
"db:studio": "drizzle-kit studio",
|
||||
"db:seed": "npx tsx scripts/seed.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "13.2.4",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0",
|
||||
"sqlite3": "^5.1.6"
|
||||
"@types/bcryptjs": "^2.4.6",
|
||||
"@types/pg": "^8.15.5",
|
||||
"bcryptjs": "^3.0.2",
|
||||
"dotenv": "^17.2.2",
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"drizzle-orm": "^0.44.5",
|
||||
"next": "^14.2.32",
|
||||
"pg": "^8.16.3",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "18.15.5",
|
||||
"@types/react": "18.2.48",
|
||||
"@types/react-dom": "18.2.18",
|
||||
"@types/sqlite3": "^3.1.11",
|
||||
"eslint": "8.36.0",
|
||||
"eslint-config-next": "13.2.4",
|
||||
"typescript": "4.9.4"
|
||||
"@types/node": "^24.3.1",
|
||||
"@types/react": "^19.1.12",
|
||||
"@types/react-dom": "^19.1.9",
|
||||
"eslint": "^9.35.0",
|
||||
"eslint-config-next": "14.2.15",
|
||||
"tsx": "^4.20.5",
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
78
pages/api/admin/api-keys.ts
Normal file
78
pages/api/admin/api-keys.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { desc, eq } from 'drizzle-orm'
|
||||
import { generateApiKey, hashApiKey } from "../../../lib/api-auth"
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
if (req.method === "GET") {
|
||||
const keys = await db
|
||||
.select({
|
||||
id: schema.apiKeys.id,
|
||||
name: schema.apiKeys.name,
|
||||
permissions: schema.apiKeys.permissions,
|
||||
rateLimit: schema.apiKeys.rateLimit,
|
||||
isActive: schema.apiKeys.isActive,
|
||||
lastUsed: schema.apiKeys.lastUsed,
|
||||
createdAt: schema.apiKeys.createdAt
|
||||
})
|
||||
.from(schema.apiKeys)
|
||||
.orderBy(desc(schema.apiKeys.createdAt))
|
||||
|
||||
res.json({
|
||||
keys: keys.map(key => ({
|
||||
...key,
|
||||
permissions: key.permissions ? JSON.parse(key.permissions) : [],
|
||||
key_preview: '***...' + (key.id.toString().slice(-4))
|
||||
}))
|
||||
})
|
||||
|
||||
} else if (req.method === "POST") {
|
||||
const { name, permissions = [], rate_limit = 1000 } = req.body
|
||||
|
||||
if (!name) {
|
||||
return res.status(400).json({ error: "Name required" })
|
||||
}
|
||||
|
||||
const apiKey = generateApiKey()
|
||||
const keyHash = hashApiKey(apiKey)
|
||||
|
||||
const [result] = await db
|
||||
.insert(schema.apiKeys)
|
||||
.values({
|
||||
keyHash: keyHash,
|
||||
name: name,
|
||||
permissions: JSON.stringify(permissions),
|
||||
rateLimit: rate_limit,
|
||||
isActive: true
|
||||
})
|
||||
.returning({ id: schema.apiKeys.id })
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
id: result.id,
|
||||
api_key: apiKey, // Only returned once during creation
|
||||
name,
|
||||
permissions,
|
||||
rate_limit
|
||||
})
|
||||
|
||||
} else if (req.method === "DELETE") {
|
||||
const { id } = req.query
|
||||
|
||||
await db
|
||||
.update(schema.apiKeys)
|
||||
.set({ isActive: false })
|
||||
.where(eq(schema.apiKeys.id, parseInt(id as string)))
|
||||
|
||||
res.json({ success: true })
|
||||
|
||||
} else {
|
||||
res.status(405).json({ error: "Method not allowed" })
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('API keys error:', error)
|
||||
res.status(500).json({ error: "Operation failed" })
|
||||
}
|
||||
}
|
||||
@@ -1,76 +1,74 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { eq, and, desc, count } from 'drizzle-orm'
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
|
||||
const { page = '1', limit = '50', action, resource_type, user_id } = req.query
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
let whereConditions: string[] = []
|
||||
let params: any[] = []
|
||||
let whereConditions = []
|
||||
|
||||
if (action) {
|
||||
whereConditions.push("a.action = ?")
|
||||
params.push(action)
|
||||
whereConditions.push(eq(schema.auditLogs.action, action as string))
|
||||
}
|
||||
|
||||
if (resource_type) {
|
||||
whereConditions.push("a.resource_type = ?")
|
||||
params.push(resource_type)
|
||||
whereConditions.push(eq(schema.auditLogs.resourceType, resource_type as string))
|
||||
}
|
||||
|
||||
if (user_id) {
|
||||
whereConditions.push("a.user_id = ?")
|
||||
params.push(parseInt(user_id as string))
|
||||
whereConditions.push(eq(schema.auditLogs.userId, parseInt(user_id as string)))
|
||||
}
|
||||
|
||||
const whereClause = whereConditions.length > 0 ? `WHERE ${whereConditions.join(' AND ')}` : ''
|
||||
const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
|
||||
const limitInt = parseInt(limit as string)
|
||||
|
||||
const query = `
|
||||
SELECT
|
||||
a.*,
|
||||
u.email as user_email,
|
||||
COUNT(*) OVER() as total_count
|
||||
FROM audit_logs a
|
||||
LEFT JOIN users u ON a.user_id = u.id
|
||||
${whereClause}
|
||||
ORDER BY a.created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`
|
||||
|
||||
params.push(parseInt(limit as string), offset)
|
||||
|
||||
const logs = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
// Get logs with user info
|
||||
const logs = await db
|
||||
.select({
|
||||
id: schema.auditLogs.id,
|
||||
userId: schema.auditLogs.userId,
|
||||
userEmail: schema.users.email,
|
||||
action: schema.auditLogs.action,
|
||||
resourceType: schema.auditLogs.resourceType,
|
||||
resourceId: schema.auditLogs.resourceId,
|
||||
details: schema.auditLogs.details,
|
||||
ipAddress: schema.auditLogs.ipAddress,
|
||||
createdAt: schema.auditLogs.createdAt
|
||||
})
|
||||
})
|
||||
.from(schema.auditLogs)
|
||||
.leftJoin(schema.users, eq(schema.auditLogs.userId, schema.users.id))
|
||||
.where(whereConditions.length > 0 ? and(...whereConditions) : undefined)
|
||||
.orderBy(desc(schema.auditLogs.createdAt))
|
||||
.limit(limitInt)
|
||||
.offset(offset)
|
||||
|
||||
const total = logs.length > 0 ? logs[0].total_count : 0
|
||||
const totalPages = Math.ceil(total / parseInt(limit as string))
|
||||
// Get total count for pagination
|
||||
const [totalResult] = await db
|
||||
.select({ count: count() })
|
||||
.from(schema.auditLogs)
|
||||
.where(whereConditions.length > 0 ? and(...whereConditions) : undefined)
|
||||
|
||||
const total = totalResult.count
|
||||
const totalPages = Math.ceil(total / limitInt)
|
||||
|
||||
res.json({
|
||||
logs: logs.map(log => ({
|
||||
id: log.id,
|
||||
user_id: log.user_id,
|
||||
user_email: log.user_email,
|
||||
user_id: log.userId,
|
||||
user_email: log.userEmail,
|
||||
action: log.action,
|
||||
resource_type: log.resource_type,
|
||||
resource_id: log.resource_id,
|
||||
resource_type: log.resourceType,
|
||||
resource_id: log.resourceId,
|
||||
details: log.details ? JSON.parse(log.details) : null,
|
||||
ip_address: log.ip_address,
|
||||
created_at: log.created_at
|
||||
ip_address: log.ipAddress,
|
||||
created_at: log.createdAt
|
||||
})),
|
||||
pagination: {
|
||||
page: parseInt(page as string),
|
||||
limit: parseInt(limit as string),
|
||||
limit: limitInt,
|
||||
total,
|
||||
totalPages
|
||||
}
|
||||
@@ -79,7 +77,5 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
} catch (error) {
|
||||
console.error('Audit logs error:', error)
|
||||
res.status(500).json({ error: "Failed to fetch audit logs" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
import { db, schema } from "../../../lib/db/connection"
|
||||
import { eq } from "drizzle-orm"
|
||||
|
||||
interface BulkImportItem {
|
||||
domain: string
|
||||
risk_level: number
|
||||
riskLevel: number
|
||||
categories: string[]
|
||||
description?: string
|
||||
}
|
||||
@@ -18,63 +18,57 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
return res.status(400).json({ error: "Sources array required" })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
let imported = 0
|
||||
let skipped = 0
|
||||
|
||||
for (const source of sources) {
|
||||
if (!source.domain || !source.risk_level) {
|
||||
if (!source.domain || !source.riskLevel) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if domain already exists
|
||||
const existing = await new Promise<any>((resolve, reject) => {
|
||||
db.get(
|
||||
"SELECT id FROM sources WHERE domain = ?",
|
||||
[source.domain],
|
||||
(err, row) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row)
|
||||
}
|
||||
)
|
||||
})
|
||||
// Check if source already exists
|
||||
const existing = await db.select()
|
||||
.from(schema.sources)
|
||||
.where(eq(schema.sources.domain, source.domain))
|
||||
.limit(1)
|
||||
|
||||
if (existing) {
|
||||
if (existing.length > 0) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Insert new source
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO sources (domain, title, risk_level, status, description, created_at)
|
||||
VALUES (?, ?, ?, 'verified', ?, datetime('now'))`,
|
||||
[source.domain, source.domain, source.risk_level, source.description || ''],
|
||||
function(err) {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
imported++
|
||||
try {
|
||||
const url = `https://${source.domain}`
|
||||
await db.insert(schema.sources).values({
|
||||
url,
|
||||
domain: source.domain,
|
||||
title: source.domain,
|
||||
description: source.description || `Imported source: ${source.domain}`,
|
||||
type: 'website',
|
||||
status: 'pending',
|
||||
riskLevel: source.riskLevel,
|
||||
language: 'sk',
|
||||
reportedBy: 'bulk-import'
|
||||
})
|
||||
imported++
|
||||
} catch (error) {
|
||||
console.error('Failed to import source:', source.domain, error)
|
||||
skipped++
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
success: true,
|
||||
imported,
|
||||
skipped,
|
||||
total: sources.length
|
||||
message: `Imported ${imported} sources, skipped ${skipped}`
|
||||
})
|
||||
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.error('Bulk import error:', error)
|
||||
res.status(500).json({ error: "Import failed" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../../../lib/db/connection'
|
||||
import { desc, asc } from 'drizzle-orm'
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
@@ -10,26 +10,16 @@ export default async function handler(
|
||||
return res.status(405).json({ error: 'Method not allowed' })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const categories = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(
|
||||
'SELECT * FROM categories ORDER BY priority DESC, name ASC',
|
||||
(err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
}
|
||||
)
|
||||
})
|
||||
const categories = await db
|
||||
.select()
|
||||
.from(schema.categories)
|
||||
.orderBy(desc(schema.categories.priority), asc(schema.categories.name))
|
||||
|
||||
return res.status(200).json(categories)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { eq, and, gte, count } from 'drizzle-orm'
|
||||
|
||||
interface DashboardStats {
|
||||
total_sources: number
|
||||
@@ -19,54 +19,65 @@ export default async function handler(
|
||||
return res.status(405).json({ error: 'Method not allowed' })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const stats = await new Promise<DashboardStats>((resolve, reject) => {
|
||||
const queries = [
|
||||
"SELECT COUNT(*) as total_sources FROM sources WHERE status = 'verified'",
|
||||
"SELECT COUNT(*) as pending_sources FROM sources WHERE status = 'pending'",
|
||||
"SELECT COUNT(*) as pending_reports FROM reports WHERE status = 'pending'",
|
||||
"SELECT COUNT(*) as high_risk_sources FROM sources WHERE status = 'verified' AND risk_level >= 4",
|
||||
"SELECT COUNT(*) as sources_added_week FROM sources WHERE created_at > datetime('now', '-7 days')",
|
||||
"SELECT COUNT(*) as reports_today FROM reports WHERE created_at > datetime('now', '-1 day')"
|
||||
]
|
||||
|
||||
const results: any = {}
|
||||
let completed = 0
|
||||
|
||||
queries.forEach((query, index) => {
|
||||
db.get(query, (err, row: any) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
|
||||
const key = Object.keys(row)[0]
|
||||
results[key] = row[key]
|
||||
completed++
|
||||
|
||||
if (completed === queries.length) {
|
||||
resolve({
|
||||
total_sources: results.total_sources || 0,
|
||||
pending_sources: results.pending_sources || 0,
|
||||
pending_reports: results.pending_reports || 0,
|
||||
high_risk_sources: results.high_risk_sources || 0,
|
||||
sources_added_week: results.sources_added_week || 0,
|
||||
reports_today: results.reports_today || 0
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
// Get all stats in parallel
|
||||
const weekAgo = new Date()
|
||||
weekAgo.setDate(weekAgo.getDate() - 7)
|
||||
|
||||
const dayAgo = new Date()
|
||||
dayAgo.setDate(dayAgo.getDate() - 1)
|
||||
|
||||
const [
|
||||
totalSources,
|
||||
pendingSources,
|
||||
pendingReports,
|
||||
highRiskSources,
|
||||
sourcesAddedWeek,
|
||||
reportsToday
|
||||
] = await Promise.all([
|
||||
db.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(eq(schema.sources.status, 'verified')),
|
||||
|
||||
db.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(eq(schema.sources.status, 'pending')),
|
||||
|
||||
db.select({ count: count() })
|
||||
.from(schema.reports)
|
||||
.where(eq(schema.reports.status, 'pending')),
|
||||
|
||||
db.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(
|
||||
and(
|
||||
eq(schema.sources.status, 'verified'),
|
||||
gte(schema.sources.riskLevel, 4)
|
||||
)
|
||||
),
|
||||
|
||||
db.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(gte(schema.sources.createdAt, weekAgo)),
|
||||
|
||||
db.select({ count: count() })
|
||||
.from(schema.reports)
|
||||
.where(gte(schema.reports.createdAt, dayAgo))
|
||||
])
|
||||
|
||||
const stats: DashboardStats = {
|
||||
total_sources: totalSources[0].count,
|
||||
pending_sources: pendingSources[0].count,
|
||||
pending_reports: pendingReports[0].count,
|
||||
high_risk_sources: highRiskSources[0].count,
|
||||
sources_added_week: sourcesAddedWeek[0].count,
|
||||
reports_today: reportsToday[0].count
|
||||
}
|
||||
|
||||
return res.status(200).json(stats)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
84
pages/api/admin/export.ts
Normal file
84
pages/api/admin/export.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
|
||||
const { format = 'json', type = 'sources' } = req.query
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
let query = ""
|
||||
let filename = ""
|
||||
|
||||
if (type === 'sources') {
|
||||
query = `
|
||||
SELECT s.domain, s.risk_level, s.status, s.created_at,
|
||||
GROUP_CONCAT(c.name) as categories
|
||||
FROM sources s
|
||||
LEFT JOIN source_categories sc ON s.id = sc.source_id
|
||||
LEFT JOIN categories c ON sc.category_id = c.id
|
||||
WHERE s.status = 'verified'
|
||||
GROUP BY s.id
|
||||
ORDER BY s.risk_level DESC
|
||||
`
|
||||
filename = `sources_export_${Date.now()}.${format}`
|
||||
} else if (type === 'reports') {
|
||||
query = `
|
||||
SELECT source_url, status, categories, description, created_at
|
||||
FROM reports
|
||||
WHERE status != 'spam'
|
||||
ORDER BY created_at DESC
|
||||
`
|
||||
filename = `reports_export_${Date.now()}.${format}`
|
||||
}
|
||||
|
||||
const data = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(query, (err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
})
|
||||
})
|
||||
|
||||
if (format === 'csv') {
|
||||
// Convert to CSV
|
||||
if (data.length === 0) {
|
||||
return res.status(200).send('')
|
||||
}
|
||||
|
||||
const headers = Object.keys(data[0]).join(',')
|
||||
const csvRows = data.map(row =>
|
||||
Object.values(row).map(value =>
|
||||
typeof value === 'string' && value.includes(',')
|
||||
? `"${value.replace(/"/g, '""')}"`
|
||||
: value
|
||||
).join(',')
|
||||
)
|
||||
|
||||
const csvContent = [headers, ...csvRows].join('\n')
|
||||
|
||||
res.setHeader('Content-Type', 'text/csv')
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`)
|
||||
res.send(csvContent)
|
||||
|
||||
} else {
|
||||
// JSON format
|
||||
res.setHeader('Content-Type', 'application/json')
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${filename}"`)
|
||||
res.json({
|
||||
exported_at: new Date().toISOString(),
|
||||
count: data.length,
|
||||
data
|
||||
})
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Export error:', error)
|
||||
res.status(500).json({ error: "Export failed" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
28
pages/api/admin/optimize.ts
Normal file
28
pages/api/admin/optimize.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import { optimizeDatabase, getDatabaseStats } from "../../../lib/db-optimizations"
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method === "POST") {
|
||||
try {
|
||||
await optimizeDatabase()
|
||||
res.json({
|
||||
success: true,
|
||||
message: "Database optimized successfully",
|
||||
optimized_at: new Date().toISOString()
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Optimization error:', error)
|
||||
res.status(500).json({ error: "Optimization failed" })
|
||||
}
|
||||
} else if (req.method === "GET") {
|
||||
try {
|
||||
const stats = await getDatabaseStats()
|
||||
res.json(stats)
|
||||
} catch (error) {
|
||||
console.error('Stats error:', error)
|
||||
res.status(500).json({ error: "Failed to get database stats" })
|
||||
}
|
||||
} else {
|
||||
res.status(405).json({ error: "Method not allowed" })
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../../../lib/db/connection'
|
||||
import { eq, desc } from 'drizzle-orm'
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
@@ -12,44 +12,29 @@ export default async function handler(
|
||||
|
||||
const { status = 'pending', page = '1', limit = '20' } = req.query
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const reports = await new Promise<any[]>((resolve, reject) => {
|
||||
const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
|
||||
|
||||
db.all(
|
||||
`SELECT *,
|
||||
CASE
|
||||
WHEN category_suggestions IS NOT NULL
|
||||
THEN json_extract(category_suggestions, '$')
|
||||
ELSE '[]'
|
||||
END as category_suggestions
|
||||
FROM reports
|
||||
WHERE status = ?
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?`,
|
||||
[status, parseInt(limit as string), offset],
|
||||
(err, rows: any[]) => {
|
||||
if (err) reject(err)
|
||||
else {
|
||||
const processedRows = rows.map(row => ({
|
||||
...row,
|
||||
category_suggestions: row.category_suggestions ? JSON.parse(row.category_suggestions) : []
|
||||
}))
|
||||
resolve(processedRows)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
|
||||
const limitInt = parseInt(limit as string)
|
||||
|
||||
const reports = await db
|
||||
.select()
|
||||
.from(schema.reports)
|
||||
.where(eq(schema.reports.status, status as any))
|
||||
.orderBy(desc(schema.reports.createdAt))
|
||||
.limit(limitInt)
|
||||
.offset(offset)
|
||||
|
||||
return res.status(200).json(reports)
|
||||
// Process the reports to parse JSON fields
|
||||
const processedReports = reports.map(report => ({
|
||||
...report,
|
||||
categorySuggestions: report.categorySuggestions ? JSON.parse(report.categorySuggestions) : [],
|
||||
evidenceUrls: report.evidenceUrls ? JSON.parse(report.evidenceUrls) : []
|
||||
}))
|
||||
|
||||
return res.status(200).json(processedReports)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../../../lib/db/connection'
|
||||
import { eq, desc } from 'drizzle-orm'
|
||||
|
||||
export default async function handler(
|
||||
req: NextApiRequest,
|
||||
@@ -12,32 +12,22 @@ export default async function handler(
|
||||
|
||||
const { status = 'pending', page = '1', limit = '20' } = req.query
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const sources = await new Promise<any[]>((resolve, reject) => {
|
||||
const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
|
||||
|
||||
db.all(
|
||||
`SELECT * FROM sources
|
||||
WHERE status = ?
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?`,
|
||||
[status, parseInt(limit as string), offset],
|
||||
(err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
}
|
||||
)
|
||||
})
|
||||
const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
|
||||
const limitInt = parseInt(limit as string)
|
||||
|
||||
const sources = await db
|
||||
.select()
|
||||
.from(schema.sources)
|
||||
.where(eq(schema.sources.status, status as any))
|
||||
.orderBy(desc(schema.sources.createdAt))
|
||||
.limit(limitInt)
|
||||
.offset(offset)
|
||||
|
||||
return res.status(200).json(sources)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,31 +1,26 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
import crypto from "crypto"
|
||||
import { db, schema } from "../../../lib/db/connection"
|
||||
import { eq, count, sql } from "drizzle-orm"
|
||||
import * as bcrypt from "bcryptjs"
|
||||
|
||||
function hashPassword(password: string): { hash: string, salt: string } {
|
||||
const salt = crypto.randomBytes(32).toString('hex')
|
||||
const hash = crypto.pbkdf2Sync(password, salt, 10000, 64, 'sha256').toString('hex')
|
||||
return { hash, salt }
|
||||
async function hashPassword(password: string): Promise<string> {
|
||||
return await bcrypt.hash(password, 12)
|
||||
}
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
if (req.method === "GET") {
|
||||
const users = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT id, email, role, is_active, created_at, last_login,
|
||||
(SELECT COUNT(*) FROM sources WHERE moderator_id = users.id) as sources_moderated
|
||||
FROM users ORDER BY created_at DESC`,
|
||||
(err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
}
|
||||
)
|
||||
const users = await db.select({
|
||||
id: schema.users.id,
|
||||
email: schema.users.email,
|
||||
role: schema.users.role,
|
||||
isActive: schema.users.isActive,
|
||||
createdAt: schema.users.createdAt,
|
||||
lastLogin: schema.users.lastLogin,
|
||||
sourcesModerated: sql<number>`(SELECT COUNT(*) FROM ${schema.sources} WHERE verified_by = ${schema.users.id})`
|
||||
})
|
||||
.from(schema.users)
|
||||
.orderBy(schema.users.createdAt)
|
||||
|
||||
res.json({ users })
|
||||
|
||||
@@ -40,27 +35,25 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
return res.status(400).json({ error: "Invalid role" })
|
||||
}
|
||||
|
||||
const { hash, salt } = hashPassword(password)
|
||||
const passwordHash = await hashPassword(password)
|
||||
|
||||
const result = await new Promise<any>((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO users (email, password_hash, salt, role, is_active, created_at)
|
||||
VALUES (?, ?, ?, ?, 1, datetime('now'))`,
|
||||
[email, hash, salt, role],
|
||||
function(err) {
|
||||
if (err) reject(err)
|
||||
else resolve({ id: this.lastID })
|
||||
}
|
||||
)
|
||||
})
|
||||
const result = await db.insert(schema.users)
|
||||
.values({
|
||||
email,
|
||||
passwordHash,
|
||||
name: email.split('@')[0], // Use email username as name
|
||||
role: role as 'admin' | 'moderator',
|
||||
isActive: true
|
||||
})
|
||||
.returning({ id: schema.users.id })
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
user: {
|
||||
id: result.id,
|
||||
id: result[0].id,
|
||||
email,
|
||||
role,
|
||||
is_active: true
|
||||
isActive: true
|
||||
}
|
||||
})
|
||||
|
||||
@@ -68,14 +61,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
res.status(405).json({ error: "Method not allowed" })
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.error('Users API error:', error)
|
||||
if (error.code === 'SQLITE_CONSTRAINT_UNIQUE') {
|
||||
if (error?.code === '23505') {
|
||||
res.status(400).json({ error: "User already exists" })
|
||||
} else {
|
||||
res.status(500).json({ error: "Operation failed" })
|
||||
}
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
46
pages/api/analytics/performance.ts
Normal file
46
pages/api/analytics/performance.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
// Get performance metrics
|
||||
const stats = await new Promise<any>((resolve, reject) => {
|
||||
db.get(`
|
||||
SELECT
|
||||
COUNT(*) as total_sources,
|
||||
COUNT(CASE WHEN status = 'verified' THEN 1 END) as verified_sources,
|
||||
COUNT(CASE WHEN risk_level >= 4 THEN 1 END) as high_risk_sources,
|
||||
COUNT(CASE WHEN created_at >= date('now', '-7 days') THEN 1 END) as sources_last_week
|
||||
FROM sources
|
||||
`, (err, row) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row)
|
||||
})
|
||||
})
|
||||
|
||||
// Get API usage simulation
|
||||
const apiUsage = {
|
||||
daily_requests: Math.floor(Math.random() * 10000) + 5000,
|
||||
avg_response_time: Math.floor(Math.random() * 150) + 50,
|
||||
error_rate: Math.random() * 0.05
|
||||
}
|
||||
|
||||
res.json({
|
||||
database_stats: stats,
|
||||
api_performance: apiUsage,
|
||||
last_updated: new Date().toISOString()
|
||||
})
|
||||
|
||||
} catch (error) {
|
||||
console.error('Analytics error:', error)
|
||||
res.status(500).json({ error: "Failed to fetch analytics" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,7 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
import crypto from "crypto"
|
||||
|
||||
function hashPassword(password: string, salt: string): string {
|
||||
return crypto.pbkdf2Sync(password, salt, 10000, 64, 'sha256').toString('hex')
|
||||
}
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import bcrypt from 'bcryptjs'
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "POST") return res.status(405).json({ error: "Method not allowed" })
|
||||
@@ -16,45 +12,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
return res.status(400).json({ error: "Email and password required" })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const user = await new Promise<any>((resolve, reject) => {
|
||||
db.get(
|
||||
"SELECT id, email, password_hash, salt, role, is_active FROM users WHERE email = ?",
|
||||
[email],
|
||||
(err, row) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row)
|
||||
}
|
||||
)
|
||||
})
|
||||
const users = await db.select()
|
||||
.from(schema.users)
|
||||
.where(eq(schema.users.email, email))
|
||||
.limit(1)
|
||||
|
||||
if (!user) {
|
||||
if (users.length === 0) {
|
||||
return res.status(401).json({ error: "Invalid credentials" })
|
||||
}
|
||||
|
||||
if (!user.is_active) {
|
||||
const user = users[0]
|
||||
|
||||
if (!user.isActive) {
|
||||
return res.status(401).json({ error: "Account is disabled" })
|
||||
}
|
||||
|
||||
const hashedPassword = hashPassword(password, user.salt)
|
||||
if (hashedPassword !== user.password_hash) {
|
||||
const isValidPassword = await bcrypt.compare(password, user.passwordHash)
|
||||
if (!isValidPassword) {
|
||||
return res.status(401).json({ error: "Invalid credentials" })
|
||||
}
|
||||
|
||||
// Update last login
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
"UPDATE users SET last_login = datetime('now') WHERE id = ?",
|
||||
[user.id],
|
||||
(err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
})
|
||||
await db.update(schema.users)
|
||||
.set({ lastLogin: new Date() })
|
||||
.where(eq(schema.users.id, user.id))
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
@@ -69,7 +51,5 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
} catch (error) {
|
||||
console.error('Login error:', error)
|
||||
res.status(500).json({ error: "Login failed" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
16
pages/api/cache/stats.ts
vendored
Normal file
16
pages/api/cache/stats.ts
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import { cache } from "../../../lib/cache"
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
|
||||
// Simple cache statistics
|
||||
const stats = {
|
||||
cache_size: (cache as any).cache?.size || 0,
|
||||
cache_cleanup_interval: '5 minutes',
|
||||
last_cleanup: 'Automatic',
|
||||
cache_implementation: 'In-memory (development mode)'
|
||||
}
|
||||
|
||||
res.json(stats)
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { eq, gte, desc, count, sql } from 'drizzle-orm'
|
||||
|
||||
interface RiskyDomain {
|
||||
domain: string
|
||||
@@ -20,49 +20,36 @@ export default async function handler(
|
||||
|
||||
const { limit = '20' } = req.query
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const riskyDomains = await new Promise<RiskyDomain[]>((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT
|
||||
s.domain,
|
||||
COUNT(*) as source_count,
|
||||
AVG(s.risk_level) as avg_risk_level,
|
||||
MAX(s.risk_level) as max_risk_level,
|
||||
GROUP_CONCAT(DISTINCT c.name) as categories
|
||||
FROM sources s
|
||||
LEFT JOIN source_categories sc ON s.id = sc.source_id
|
||||
LEFT JOIN categories c ON sc.category_id = c.id
|
||||
WHERE s.status = 'verified'
|
||||
GROUP BY s.domain
|
||||
HAVING AVG(s.risk_level) >= 3
|
||||
ORDER BY avg_risk_level DESC, source_count DESC
|
||||
LIMIT ?`,
|
||||
[parseInt(limit as string)],
|
||||
(err, rows: any[]) => {
|
||||
if (err) reject(err)
|
||||
else {
|
||||
const domains = rows.map(row => ({
|
||||
domain: row.domain,
|
||||
source_count: row.source_count,
|
||||
avg_risk_level: Math.round(row.avg_risk_level * 10) / 10,
|
||||
max_risk_level: row.max_risk_level,
|
||||
categories: row.categories ? row.categories.split(',') : []
|
||||
}))
|
||||
resolve(domains)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
const riskyDomainsResult = await db
|
||||
.select({
|
||||
domain: schema.sources.domain,
|
||||
sourceCount: count(),
|
||||
avgRiskLevel: sql<number>`AVG(${schema.sources.riskLevel})`,
|
||||
maxRiskLevel: sql<number>`MAX(${schema.sources.riskLevel})`,
|
||||
categories: sql<string>`string_agg(DISTINCT ${schema.categories.name}, ',')`
|
||||
})
|
||||
.from(schema.sources)
|
||||
.leftJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
|
||||
.leftJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
|
||||
.where(eq(schema.sources.status, 'verified'))
|
||||
.groupBy(schema.sources.domain)
|
||||
.having(gte(sql`AVG(${schema.sources.riskLevel})`, 3))
|
||||
.orderBy(desc(sql`AVG(${schema.sources.riskLevel})`), desc(count()))
|
||||
.limit(parseInt(limit as string))
|
||||
|
||||
const riskyDomains: RiskyDomain[] = riskyDomainsResult.map(row => ({
|
||||
domain: row.domain,
|
||||
source_count: row.sourceCount,
|
||||
avg_risk_level: Math.round(row.avgRiskLevel * 10) / 10,
|
||||
max_risk_level: row.maxRiskLevel,
|
||||
categories: row.categories ? row.categories.split(',').filter(Boolean) : []
|
||||
}))
|
||||
|
||||
return res.status(200).json(riskyDomains)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
17
pages/api/health.ts
Normal file
17
pages/api/health.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import { getHealthStatus } from "../../lib/monitoring"
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
|
||||
try {
|
||||
const health = getHealthStatus()
|
||||
res.status(200).json(health)
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
status: 'unhealthy',
|
||||
error: 'Health check failed',
|
||||
timestamp: new Date().toISOString()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../lib/db/connection'
|
||||
|
||||
function extractDomain(url: string): string {
|
||||
try {
|
||||
@@ -30,31 +29,16 @@ export default async function handler(
|
||||
return res.status(400).json({ error: 'Invalid URL format' })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO reports (
|
||||
source_url, source_domain, reporter_email, reporter_name,
|
||||
category_suggestions, description, ip_address, user_agent
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||
[
|
||||
source_url,
|
||||
domain,
|
||||
reporter_email || null,
|
||||
reporter_name || null,
|
||||
JSON.stringify(categories || []),
|
||||
description,
|
||||
req.headers['x-forwarded-for'] || req.connection.remoteAddress,
|
||||
req.headers['user-agent']
|
||||
],
|
||||
function(err) {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
await db.insert(schema.reports).values({
|
||||
sourceUrl: source_url,
|
||||
sourceDomain: domain,
|
||||
reporterEmail: reporter_email || null,
|
||||
reporterName: reporter_name || null,
|
||||
categorySuggestions: JSON.stringify(categories || []),
|
||||
description: description,
|
||||
ipAddress: (req.headers['x-forwarded-for'] as string) || (req.socket?.remoteAddress),
|
||||
userAgent: req.headers['user-agent'] || null
|
||||
})
|
||||
|
||||
return res.status(200).json({ success: true, message: 'Report submitted successfully' })
|
||||
@@ -62,7 +46,5 @@ export default async function handler(
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { eq, and, or, like, gte, lte, desc, count, sql } from 'drizzle-orm'
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
@@ -15,80 +15,113 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
limit = '20'
|
||||
} = req.query
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
let whereConditions = ["s.status = ?"]
|
||||
let params: any[] = [status]
|
||||
let whereConditions = [eq(schema.sources.status, status as string)]
|
||||
|
||||
if (q) {
|
||||
whereConditions.push("(s.domain LIKE ? OR s.title LIKE ? OR s.description LIKE ?)")
|
||||
params.push(`%${q}%`, `%${q}%`, `%${q}%`)
|
||||
}
|
||||
|
||||
if (category) {
|
||||
whereConditions.push("EXISTS (SELECT 1 FROM source_categories sc JOIN categories c ON sc.category_id = c.id WHERE sc.source_id = s.id AND c.name = ?)")
|
||||
params.push(category)
|
||||
whereConditions.push(
|
||||
or(
|
||||
like(schema.sources.domain, `%${q}%`),
|
||||
like(schema.sources.title, `%${q}%`),
|
||||
like(schema.sources.description, `%${q}%`)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
if (risk_level_min) {
|
||||
whereConditions.push("s.risk_level >= ?")
|
||||
params.push(parseInt(risk_level_min as string))
|
||||
whereConditions.push(gte(schema.sources.riskLevel, parseInt(risk_level_min as string)))
|
||||
}
|
||||
|
||||
if (risk_level_max) {
|
||||
whereConditions.push("s.risk_level <= ?")
|
||||
params.push(parseInt(risk_level_max as string))
|
||||
whereConditions.push(lte(schema.sources.riskLevel, parseInt(risk_level_max as string)))
|
||||
}
|
||||
|
||||
const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
|
||||
const limitInt = parseInt(limit as string)
|
||||
|
||||
const query = `
|
||||
SELECT s.*, GROUP_CONCAT(c.name) as categories,
|
||||
COUNT(*) OVER() as total_count
|
||||
FROM sources s
|
||||
LEFT JOIN source_categories sc ON s.id = sc.source_id
|
||||
LEFT JOIN categories c ON sc.category_id = c.id
|
||||
WHERE ${whereConditions.join(' AND ')}
|
||||
GROUP BY s.id
|
||||
ORDER BY s.risk_level DESC, s.created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`
|
||||
|
||||
params.push(parseInt(limit as string), offset)
|
||||
|
||||
const results = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(query, params, (err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
// Build the base query
|
||||
let query = db
|
||||
.select({
|
||||
id: schema.sources.id,
|
||||
domain: schema.sources.domain,
|
||||
title: schema.sources.title,
|
||||
riskLevel: schema.sources.riskLevel,
|
||||
description: schema.sources.description,
|
||||
createdAt: schema.sources.createdAt,
|
||||
categories: sql<string>`string_agg(${schema.categories.name}, ',')`
|
||||
})
|
||||
})
|
||||
.from(schema.sources)
|
||||
.leftJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
|
||||
.leftJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
|
||||
.where(and(...whereConditions))
|
||||
.groupBy(schema.sources.id, schema.sources.domain, schema.sources.title, schema.sources.riskLevel, schema.sources.description, schema.sources.createdAt)
|
||||
.orderBy(desc(schema.sources.riskLevel), desc(schema.sources.createdAt))
|
||||
.limit(limitInt)
|
||||
.offset(offset)
|
||||
|
||||
const total = results.length > 0 ? results[0].total_count : 0
|
||||
const totalPages = Math.ceil(total / parseInt(limit as string))
|
||||
// Apply category filter if provided
|
||||
if (category) {
|
||||
query = db
|
||||
.select({
|
||||
id: schema.sources.id,
|
||||
domain: schema.sources.domain,
|
||||
title: schema.sources.title,
|
||||
riskLevel: schema.sources.riskLevel,
|
||||
description: schema.sources.description,
|
||||
createdAt: schema.sources.createdAt,
|
||||
categories: sql<string>`string_agg(${schema.categories.name}, ',')`
|
||||
})
|
||||
.from(schema.sources)
|
||||
.innerJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
|
||||
.innerJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
|
||||
.where(and(...whereConditions, eq(schema.categories.name, category as string)))
|
||||
.groupBy(schema.sources.id, schema.sources.domain, schema.sources.title, schema.sources.riskLevel, schema.sources.description, schema.sources.createdAt)
|
||||
.orderBy(desc(schema.sources.riskLevel), desc(schema.sources.createdAt))
|
||||
.limit(limitInt)
|
||||
.offset(offset)
|
||||
}
|
||||
|
||||
const results = await query
|
||||
|
||||
// Get total count for pagination
|
||||
let countQuery = db
|
||||
.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(and(...whereConditions))
|
||||
|
||||
if (category) {
|
||||
countQuery = db
|
||||
.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.innerJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
|
||||
.innerJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
|
||||
.where(and(...whereConditions, eq(schema.categories.name, category as string)))
|
||||
}
|
||||
|
||||
const [totalResult] = await countQuery
|
||||
const total = totalResult.count
|
||||
const totalPages = Math.ceil(total / limitInt)
|
||||
|
||||
res.json({
|
||||
results: results.map(row => ({
|
||||
id: row.id,
|
||||
domain: row.domain,
|
||||
title: row.title,
|
||||
risk_level: row.risk_level,
|
||||
categories: row.categories ? row.categories.split(',') : [],
|
||||
risk_level: row.riskLevel,
|
||||
categories: row.categories ? row.categories.split(',').filter(Boolean) : [],
|
||||
description: row.description,
|
||||
created_at: row.created_at
|
||||
created_at: row.createdAt
|
||||
})),
|
||||
pagination: {
|
||||
page: parseInt(page as string),
|
||||
limit: parseInt(limit as string),
|
||||
limit: limitInt,
|
||||
total,
|
||||
totalPages
|
||||
}
|
||||
})
|
||||
|
||||
} catch (error) {
|
||||
console.error('Search error:', error)
|
||||
res.status(500).json({ error: "Search failed" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { or, like } from 'drizzle-orm'
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
|
||||
@@ -8,24 +8,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
const { q } = req.query
|
||||
if (!q) return res.status(400).json({ error: "Query required" })
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const results = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(
|
||||
"SELECT * FROM sources WHERE domain LIKE ? OR title LIKE ? LIMIT 20",
|
||||
[`%${q}%`, `%${q}%`],
|
||||
(err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
}
|
||||
const results = await db
|
||||
.select()
|
||||
.from(schema.sources)
|
||||
.where(
|
||||
or(
|
||||
like(schema.sources.domain, `%${q}%`),
|
||||
like(schema.sources.title, `%${q}%`)
|
||||
)
|
||||
)
|
||||
})
|
||||
.limit(20)
|
||||
|
||||
res.json(results)
|
||||
} catch (error) {
|
||||
console.error('Search error:', error)
|
||||
res.status(500).json({ error: "Database error" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../../lib/db/connection'
|
||||
import { eq, and, sql } from 'drizzle-orm'
|
||||
import { rateLimit, getRateLimitHeaders } from '../../../lib/rate-limiter'
|
||||
import { cache, getCacheKey } from '../../../lib/cache'
|
||||
|
||||
type CheckResponse = {
|
||||
is_problematic: boolean
|
||||
@@ -79,67 +80,79 @@ export default async function handler(
|
||||
return res.status(400).json({ error: 'Invalid URL format' })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
// Check cache first
|
||||
const cacheKey = getCacheKey('domain_check', domain)
|
||||
const cachedResult = cache.get<CheckResponse>(cacheKey)
|
||||
|
||||
if (cachedResult) {
|
||||
return res.status(200).json(cachedResult)
|
||||
}
|
||||
|
||||
try {
|
||||
const sources = await new Promise<any[]>((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT s.*, GROUP_CONCAT(c.name) as categories
|
||||
FROM sources s
|
||||
LEFT JOIN source_categories sc ON s.id = sc.source_id
|
||||
LEFT JOIN categories c ON sc.category_id = c.id
|
||||
WHERE s.domain = ? AND s.status = 'verified'
|
||||
GROUP BY s.id`,
|
||||
[domain],
|
||||
(err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
}
|
||||
const sources = await db
|
||||
.select({
|
||||
id: schema.sources.id,
|
||||
riskLevel: schema.sources.riskLevel,
|
||||
categories: sql<string>`string_agg(${schema.categories.name}, ',')`
|
||||
})
|
||||
.from(schema.sources)
|
||||
.leftJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
|
||||
.leftJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
|
||||
.where(
|
||||
and(
|
||||
eq(schema.sources.domain, domain),
|
||||
eq(schema.sources.status, 'verified')
|
||||
)
|
||||
)
|
||||
})
|
||||
.groupBy(schema.sources.id, schema.sources.riskLevel)
|
||||
|
||||
let result: CheckResponse
|
||||
|
||||
if (sources.length === 0) {
|
||||
return res.status(200).json({
|
||||
result = {
|
||||
is_problematic: false,
|
||||
risk_level: 0,
|
||||
categories: [],
|
||||
message: 'Stránka nie je v našej databáze problematických zdrojov',
|
||||
source_count: 0
|
||||
})
|
||||
}
|
||||
|
||||
const maxRiskLevel = Math.max(...sources.map(s => s.risk_level))
|
||||
const allCategories = sources
|
||||
.map(s => s.categories)
|
||||
.filter(Boolean)
|
||||
.join(',')
|
||||
.split(',')
|
||||
.filter(Boolean)
|
||||
|
||||
const uniqueCategories = Array.from(new Set(allCategories))
|
||||
|
||||
let message = ''
|
||||
if (maxRiskLevel >= 4) {
|
||||
message = 'VYSOKÉ RIZIKO: Táto stránka šíri nebezpečné obsahy'
|
||||
} else if (maxRiskLevel >= 3) {
|
||||
message = 'STREDNÉ RIZIKO: Táto stránka môže obsahovať problematické informácie'
|
||||
}
|
||||
} else {
|
||||
message = 'NÍZKE RIZIKO: Táto stránka je označená ako problematická'
|
||||
const maxRiskLevel = Math.max(...sources.map(s => s.riskLevel))
|
||||
const allCategories = sources
|
||||
.map(s => s.categories)
|
||||
.filter(Boolean)
|
||||
.join(',')
|
||||
.split(',')
|
||||
.filter(Boolean)
|
||||
|
||||
const uniqueCategories = Array.from(new Set(allCategories))
|
||||
|
||||
let message = ''
|
||||
if (maxRiskLevel >= 4) {
|
||||
message = 'VYSOKÉ RIZIKO: Táto stránka šíri nebezpečné obsahy'
|
||||
} else if (maxRiskLevel >= 3) {
|
||||
message = 'STREDNÉ RIZIKO: Táto stránka môže obsahovať problematické informácie'
|
||||
} else {
|
||||
message = 'NÍZKE RIZIKO: Táto stránka je označená ako problematická'
|
||||
}
|
||||
|
||||
result = {
|
||||
is_problematic: true,
|
||||
risk_level: maxRiskLevel,
|
||||
categories: uniqueCategories,
|
||||
message,
|
||||
source_count: sources.length
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).json({
|
||||
is_problematic: true,
|
||||
risk_level: maxRiskLevel,
|
||||
categories: uniqueCategories,
|
||||
message,
|
||||
source_count: sources.length
|
||||
})
|
||||
// Cache the result (5 minutes for non-problematic, 15 minutes for problematic)
|
||||
const cacheTtl = result.is_problematic ? 900 : 300
|
||||
cache.set(cacheKey, result, cacheTtl)
|
||||
|
||||
return res.status(200).json(result)
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next'
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from '../../lib/db/connection'
|
||||
import { eq, gte, count, desc, sql } from 'drizzle-orm'
|
||||
|
||||
interface PublicStats {
|
||||
total_sources: number
|
||||
@@ -18,91 +18,69 @@ export default async function handler(
|
||||
return res.status(405).json({ error: 'Method not allowed' })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
// Get basic counts
|
||||
const totalSources = await new Promise<number>((resolve, reject) => {
|
||||
db.get(
|
||||
"SELECT COUNT(*) as count FROM sources WHERE status = 'verified'",
|
||||
(err, row: any) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row.count)
|
||||
}
|
||||
)
|
||||
})
|
||||
const [totalSourcesResult] = await db
|
||||
.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(eq(schema.sources.status, 'verified'))
|
||||
|
||||
const highRiskSources = await new Promise<number>((resolve, reject) => {
|
||||
db.get(
|
||||
"SELECT COUNT(*) as count FROM sources WHERE status = 'verified' AND risk_level >= 4",
|
||||
(err, row: any) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row.count)
|
||||
}
|
||||
const [highRiskSourcesResult] = await db
|
||||
.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(
|
||||
sql`${schema.sources.status} = 'verified' AND ${schema.sources.riskLevel} >= 4`
|
||||
)
|
||||
})
|
||||
|
||||
const recentAdditions = await new Promise<number>((resolve, reject) => {
|
||||
db.get(
|
||||
"SELECT COUNT(*) as count FROM sources WHERE created_at > datetime('now', '-30 days')",
|
||||
(err, row: any) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row.count)
|
||||
}
|
||||
)
|
||||
})
|
||||
const thirtyDaysAgo = new Date()
|
||||
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30)
|
||||
|
||||
const [recentAdditionsResult] = await db
|
||||
.select({ count: count() })
|
||||
.from(schema.sources)
|
||||
.where(gte(schema.sources.createdAt, thirtyDaysAgo))
|
||||
|
||||
// Get categories breakdown
|
||||
const categoriesBreakdown = await new Promise<{ [key: string]: number }>((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT c.name, COUNT(*) as count
|
||||
FROM categories c
|
||||
JOIN source_categories sc ON c.id = sc.category_id
|
||||
JOIN sources s ON sc.source_id = s.id
|
||||
WHERE s.status = 'verified'
|
||||
GROUP BY c.id, c.name`,
|
||||
(err, rows: any[]) => {
|
||||
if (err) reject(err)
|
||||
else {
|
||||
const breakdown: { [key: string]: number } = {}
|
||||
rows.forEach(row => {
|
||||
breakdown[row.name] = row.count
|
||||
})
|
||||
resolve(breakdown)
|
||||
}
|
||||
}
|
||||
)
|
||||
const categoriesBreakdownResult = await db
|
||||
.select({
|
||||
name: schema.categories.name,
|
||||
count: count()
|
||||
})
|
||||
.from(schema.categories)
|
||||
.innerJoin(schema.sourceCategories, eq(schema.categories.id, schema.sourceCategories.categoryId))
|
||||
.innerJoin(schema.sources, eq(schema.sourceCategories.sourceId, schema.sources.id))
|
||||
.where(eq(schema.sources.status, 'verified'))
|
||||
.groupBy(schema.categories.id, schema.categories.name)
|
||||
|
||||
const categoriesBreakdown: { [key: string]: number } = {}
|
||||
categoriesBreakdownResult.forEach(row => {
|
||||
categoriesBreakdown[row.name] = row.count
|
||||
})
|
||||
|
||||
// Get top risky domains
|
||||
const topDomains = await new Promise<{ domain: string; count: number; risk_level: number }[]>((resolve, reject) => {
|
||||
db.all(
|
||||
`SELECT domain, COUNT(*) as count, AVG(risk_level) as avg_risk
|
||||
FROM sources
|
||||
WHERE status = 'verified'
|
||||
GROUP BY domain
|
||||
ORDER BY avg_risk DESC, count DESC
|
||||
LIMIT 10`,
|
||||
(err, rows: any[]) => {
|
||||
if (err) reject(err)
|
||||
else {
|
||||
const domains = rows.map(row => ({
|
||||
domain: row.domain,
|
||||
count: row.count,
|
||||
risk_level: Math.round(row.avg_risk * 10) / 10
|
||||
}))
|
||||
resolve(domains)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
const topDomainsResult = await db
|
||||
.select({
|
||||
domain: schema.sources.domain,
|
||||
count: count(),
|
||||
avgRisk: sql<number>`AVG(${schema.sources.riskLevel})`
|
||||
})
|
||||
.from(schema.sources)
|
||||
.where(eq(schema.sources.status, 'verified'))
|
||||
.groupBy(schema.sources.domain)
|
||||
.orderBy(desc(sql`AVG(${schema.sources.riskLevel})`), desc(count()))
|
||||
.limit(10)
|
||||
|
||||
const topDomains = topDomainsResult.map(row => ({
|
||||
domain: row.domain,
|
||||
count: row.count,
|
||||
risk_level: Math.round(row.avgRisk * 10) / 10
|
||||
}))
|
||||
|
||||
const stats: PublicStats = {
|
||||
total_sources: totalSources,
|
||||
high_risk_sources: highRiskSources,
|
||||
total_sources: totalSourcesResult.count,
|
||||
high_risk_sources: highRiskSourcesResult.count,
|
||||
categories_breakdown: categoriesBreakdown,
|
||||
recent_additions: recentAdditions,
|
||||
recent_additions: recentAdditionsResult.count,
|
||||
top_domains: topDomains
|
||||
}
|
||||
|
||||
@@ -111,7 +89,5 @@ export default async function handler(
|
||||
} catch (error) {
|
||||
console.error('Database error:', error)
|
||||
return res.status(500).json({ error: 'Internal server error' })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
19
scripts/seed.ts
Normal file
19
scripts/seed.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
// Load environment variables first
|
||||
dotenv.config({ path: '.env.local' });
|
||||
|
||||
import { seedDatabase } from '../lib/db/seed';
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
console.log('Using DATABASE_URL:', process.env.DATABASE_URL ? 'configured' : 'missing');
|
||||
await seedDatabase();
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error('Failed to seed database:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
Reference in New Issue
Block a user