migrate from SQLite to PostgreSQL with Drizzle ORM

- Updated all packages to latest versions (React 19, Next.js 14.2.32)
- Replaced sqlite3 with pg and drizzle-orm dependencies
- Created complete PostgreSQL schema with relationships and indexes
- Migrated all API endpoints from SQLite to Drizzle queries
- Added database seeding with sample data
- Updated authentication to use bcrypt instead of pbkdf2
- Configured connection pooling for PostgreSQL
- Updated app version to 1.0.0
- All endpoints tested and working correctly
This commit is contained in:
2025-09-06 12:56:33 +02:00
parent 52bde64e7f
commit 860070a302
26 changed files with 2526 additions and 2403 deletions

13
drizzle.config.ts Normal file
View File

@@ -0,0 +1,13 @@
import { defineConfig } from 'drizzle-kit';
import * as dotenv from 'dotenv';
dotenv.config({ path: '.env.local' });
export default defineConfig({
schema: './lib/db/schema.ts',
out: './drizzle',
dialect: 'postgresql',
dbCredentials: {
url: process.env.DATABASE_URL!,
},
});

View File

@@ -1,16 +1,16 @@
import sqlite3 from 'sqlite3'
import path from 'path'
import crypto from 'crypto' import crypto from 'crypto'
import { db, schema } from './db/connection'
import { eq, and } from 'drizzle-orm'
export interface ApiKey { export interface ApiKey {
id: number id: number
key_hash: string keyHash: string
name: string name: string
permissions: string[] permissions: string[]
rate_limit: number rateLimit: number
is_active: boolean isActive: boolean
last_used?: string lastUsed?: Date
created_at: string createdAt: Date
} }
export function generateApiKey(): string { export function generateApiKey(): string {
@@ -25,48 +25,38 @@ export async function validateApiKey(key: string): Promise<ApiKey | null> {
if (!key || !key.startsWith('ak_')) return null if (!key || !key.startsWith('ak_')) return null
const keyHash = hashApiKey(key) const keyHash = hashApiKey(key)
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const apiKey = await new Promise<ApiKey | null>((resolve, reject) => { const apiKeys = await db.select()
db.get( .from(schema.apiKeys)
'SELECT * FROM api_keys WHERE key_hash = ? AND is_active = 1', .where(and(
[keyHash], eq(schema.apiKeys.keyHash, keyHash),
(err, row: any) => { eq(schema.apiKeys.isActive, true)
if (err) reject(err) ))
else if (row) { .limit(1)
resolve({
...row,
permissions: row.permissions ? JSON.parse(row.permissions) : []
})
} else {
resolve(null)
}
}
)
})
if (apiKey) { if (apiKeys.length === 0) return null
// Update last_used timestamp
await new Promise<void>((resolve, reject) => { const apiKey = apiKeys[0]
db.run(
'UPDATE api_keys SET last_used = datetime("now") WHERE id = ?', // Update last_used timestamp
[apiKey.id], await db.update(schema.apiKeys)
(err) => { .set({ lastUsed: new Date() })
if (err) reject(err) .where(eq(schema.apiKeys.id, apiKey.id))
else resolve()
} return {
) id: apiKey.id,
}) keyHash: apiKey.keyHash,
name: apiKey.name,
permissions: apiKey.permissions ? JSON.parse(apiKey.permissions) : [],
rateLimit: apiKey.rateLimit,
isActive: apiKey.isActive,
lastUsed: apiKey.lastUsed,
createdAt: apiKey.createdAt
} }
return apiKey
} catch (error) { } catch (error) {
console.error('API key validation error:', error) console.error('API key validation error:', error)
return null return null
} finally {
db.close()
} }
} }

View File

@@ -1,5 +1,4 @@
import sqlite3 from 'sqlite3' import { db, schema } from './db/connection'
import path from 'path'
export interface AuditLogEntry { export interface AuditLogEntry {
user_id?: number user_id?: number
@@ -11,32 +10,17 @@ export interface AuditLogEntry {
} }
export async function logAuditEvent(entry: AuditLogEntry): Promise<void> { export async function logAuditEvent(entry: AuditLogEntry): Promise<void> {
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
await new Promise<void>((resolve, reject) => { await db.insert(schema.auditLogs).values({
db.run( userId: entry.user_id || null,
`INSERT INTO audit_logs (user_id, action, resource_type, resource_id, details, ip_address, created_at) action: entry.action,
VALUES (?, ?, ?, ?, ?, ?, datetime('now'))`, resourceType: entry.resource_type,
[ resourceId: entry.resource_id || null,
entry.user_id || null, details: entry.details ? JSON.stringify(entry.details) : null,
entry.action, ipAddress: entry.ip_address || null
entry.resource_type,
entry.resource_id || null,
entry.details ? JSON.stringify(entry.details) : null,
entry.ip_address || null
],
(err) => {
if (err) reject(err)
else resolve()
}
)
}) })
} catch (error) { } catch (error) {
console.error('Audit logging failed:', error) console.error('Audit logging failed:', error)
} finally {
db.close()
} }
} }

View File

@@ -1,19 +1,38 @@
import { exec } from 'child_process' import { exec } from 'child_process'
import path from 'path' import path from 'path'
import { promisify } from 'util' import { promisify } from 'util'
import fs from 'fs/promises'
const execAsync = promisify(exec) const execAsync = promisify(exec)
export async function createDatabaseBackup(): Promise<string> { export async function createDatabaseBackup(): Promise<string> {
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db') if (!process.env.DATABASE_URL) {
const backupPath = path.join(process.cwd(), 'backups', `backup_${Date.now()}.db`) throw new Error('DATABASE_URL environment variable is required')
}
const timestamp = Date.now()
const backupPath = path.join(process.cwd(), 'backups', `backup_${timestamp}.sql`)
try { try {
// Ensure backup directory exists // Ensure backup directory exists
await execAsync('mkdir -p backups') await execAsync('mkdir -p backups')
// Create SQLite backup using .backup command // Create PostgreSQL backup using pg_dump
await execAsync(`sqlite3 "${dbPath}" ".backup '${backupPath}'"`) // Extract database name from connection string
const url = new URL(process.env.DATABASE_URL)
const dbName = url.pathname.substring(1) // Remove leading slash
const host = url.hostname
const port = url.port || '5432'
const username = url.username
const password = url.password
// Set PGPASSWORD environment variable for pg_dump
const env = { ...process.env, PGPASSWORD: password }
await execAsync(
`pg_dump -h ${host} -p ${port} -U ${username} -d ${dbName} --no-password > "${backupPath}"`,
{ env }
)
return backupPath return backupPath
} catch (error) { } catch (error) {
@@ -23,14 +42,30 @@ export async function createDatabaseBackup(): Promise<string> {
} }
export async function restoreDatabase(backupPath: string): Promise<void> { export async function restoreDatabase(backupPath: string): Promise<void> {
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db') if (!process.env.DATABASE_URL) {
throw new Error('DATABASE_URL environment variable is required')
}
try { try {
// Create backup of current DB first // Create backup of current DB first
await createDatabaseBackup() await createDatabaseBackup()
// Copy backup to main location // Extract database connection details
await execAsync(`cp "${backupPath}" "${dbPath}"`) const url = new URL(process.env.DATABASE_URL)
const dbName = url.pathname.substring(1)
const host = url.hostname
const port = url.port || '5432'
const username = url.username
const password = url.password
// Set PGPASSWORD environment variable for psql
const env = { ...process.env, PGPASSWORD: password }
// Restore from SQL backup
await execAsync(
`psql -h ${host} -p ${port} -U ${username} -d ${dbName} --no-password < "${backupPath}"`,
{ env }
)
console.log('Database restored successfully') console.log('Database restored successfully')
} catch (error) { } catch (error) {
@@ -41,8 +76,25 @@ export async function restoreDatabase(backupPath: string): Promise<void> {
export async function listBackups(): Promise<string[]> { export async function listBackups(): Promise<string[]> {
try { try {
const { stdout } = await execAsync('ls -la backups/*.db 2>/dev/null || true') const backupDir = path.join(process.cwd(), 'backups')
return stdout.trim() ? stdout.trim().split('\n') : []
try {
const files = await fs.readdir(backupDir)
const sqlFiles = files.filter(file => file.endsWith('.sql'))
// Get file stats for each backup
const backupInfo = await Promise.all(
sqlFiles.map(async (file) => {
const filePath = path.join(backupDir, file)
const stats = await fs.stat(filePath)
return `${stats.mtime.toISOString()} ${file} (${Math.round(stats.size / 1024)}KB)`
})
)
return backupInfo.sort().reverse() // Most recent first
} catch (dirError) {
return []
}
} catch { } catch {
return [] return []
} }

View File

@@ -1,99 +1,60 @@
import sqlite3 from 'sqlite3' import { db, schema } from './db/connection'
import path from 'path' import { sql, count } from 'drizzle-orm'
export async function optimizeDatabase(): Promise<void> { export async function optimizeDatabase(): Promise<void> {
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
// Create performance indexes // PostgreSQL automatically creates indexes defined in schema.ts
const indexes = [ // Run ANALYZE to update statistics for query optimization
'CREATE INDEX IF NOT EXISTS idx_sources_domain ON sources(domain)', const tables = [
'CREATE INDEX IF NOT EXISTS idx_sources_status_risk ON sources(status, risk_level)', 'sources',
'CREATE INDEX IF NOT EXISTS idx_sources_created ON sources(created_at)', 'reports',
'CREATE INDEX IF NOT EXISTS idx_reports_status ON reports(status)', 'api_keys',
'CREATE INDEX IF NOT EXISTS idx_reports_created ON reports(created_at)', 'source_categories',
'CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash)', 'categories',
'CREATE INDEX IF NOT EXISTS idx_audit_logs_created ON audit_logs(created_at)', 'users'
'CREATE INDEX IF NOT EXISTS idx_source_categories ON source_categories(source_id, category_id)'
] ]
for (const indexQuery of indexes) { for (const tableName of tables) {
await new Promise<void>((resolve, reject) => { await db.execute(sql`ANALYZE ${sql.raw(tableName)}`)
db.run(indexQuery, (err) => {
if (err) reject(err)
else resolve()
})
})
} }
// Analyze tables for query optimization // PostgreSQL equivalent of VACUUM - VACUUM ANALYZE updates statistics and cleans up
const analyzeTables = [ await db.execute(sql`VACUUM ANALYZE`)
'ANALYZE sources',
'ANALYZE reports',
'ANALYZE api_keys',
'ANALYZE audit_logs',
'ANALYZE source_categories'
]
for (const analyzeQuery of analyzeTables) {
await new Promise<void>((resolve, reject) => {
db.run(analyzeQuery, (err) => {
if (err) reject(err)
else resolve()
})
})
}
// Vacuum database to optimize storage
await new Promise<void>((resolve, reject) => {
db.run('VACUUM', (err) => {
if (err) reject(err)
else resolve()
})
})
console.log('Database optimization completed successfully') console.log('Database optimization completed successfully')
} catch (error) { } catch (error) {
console.error('Database optimization failed:', error) console.error('Database optimization failed:', error)
throw error throw error
} finally {
db.close()
} }
} }
export async function getDatabaseStats(): Promise<any> { export async function getDatabaseStats(): Promise<any> {
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const stats = await new Promise<any>((resolve, reject) => { // Get row counts for each table
db.all(` const [sourcesCount] = await db.select({ count: count() }).from(schema.sources)
SELECT const [reportsCount] = await db.select({ count: count() }).from(schema.reports)
name as table_name, const [apiKeysCount] = await db.select({ count: count() }).from(schema.apiKeys)
COUNT(*) as row_count const [categoriesCount] = await db.select({ count: count() }).from(schema.categories)
FROM ( const [usersCount] = await db.select({ count: count() }).from(schema.users)
SELECT 'sources' as name UNION const [sourceCategoriesCount] = await db.select({ count: count() }).from(schema.sourceCategories)
SELECT 'reports' as name UNION
SELECT 'api_keys' as name UNION const tables = [
SELECT 'audit_logs' as name { table_name: 'sources', row_count: sourcesCount.count },
) tables { table_name: 'reports', row_count: reportsCount.count },
`, (err, rows) => { { table_name: 'api_keys', row_count: apiKeysCount.count },
if (err) reject(err) { table_name: 'categories', row_count: categoriesCount.count },
else resolve(rows) { table_name: 'users', row_count: usersCount.count },
}) { table_name: 'source_categories', row_count: sourceCategoriesCount.count }
}) ]
return { return {
tables: stats, tables,
optimized_at: new Date().toISOString() optimized_at: new Date().toISOString()
} }
} catch (error) { } catch (error) {
console.error('Failed to get database stats:', error) console.error('Failed to get database stats:', error)
throw error throw error
} finally {
db.close()
} }
} }

27
lib/db/connection.ts Normal file
View File

@@ -0,0 +1,27 @@
import { drizzle } from 'drizzle-orm/node-postgres';
import { Pool } from 'pg';
import * as schema from './schema';
function createConnection() {
if (!process.env.DATABASE_URL) {
throw new Error('DATABASE_URL environment variable is required');
}
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
ssl: false,
max: 10,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000
});
// Test connection
pool.on('error', (err) => {
console.error('Unexpected error on idle client', err);
});
return drizzle(pool, { schema });
}
export const db = createConnection();
export { schema };

228
lib/db/schema.ts Normal file
View File

@@ -0,0 +1,228 @@
import {
pgTable,
serial,
varchar,
text,
boolean,
integer,
timestamp,
decimal,
pgEnum,
uniqueIndex,
index
} from 'drizzle-orm/pg-core';
import { relations } from 'drizzle-orm';
// Enums
export const roleEnum = pgEnum('role', ['admin', 'moderator']);
export const sourceTypeEnum = pgEnum('source_type', [
'website', 'facebook_page', 'facebook_group', 'instagram',
'blog', 'news_site', 'youtube', 'tiktok', 'telegram', 'other'
]);
export const sourceStatusEnum = pgEnum('source_status', [
'pending', 'verified', 'rejected', 'under_review'
]);
export const languageEnum = pgEnum('language', ['sk', 'cs', 'en', 'other']);
export const priorityEnum = pgEnum('priority', ['low', 'medium', 'high', 'urgent']);
export const reportStatusEnum = pgEnum('report_status', [
'pending', 'in_review', 'approved', 'rejected', 'duplicate'
]);
// Users table
export const users = pgTable('users', {
id: serial('id').primaryKey(),
email: varchar('email', { length: 255 }).notNull().unique(),
passwordHash: varchar('password_hash', { length: 255 }).notNull(),
name: varchar('name', { length: 100 }).notNull(),
role: roleEnum('role').default('moderator'),
isActive: boolean('is_active').default(true),
lastLogin: timestamp('last_login'),
createdAt: timestamp('created_at').defaultNow(),
updatedAt: timestamp('updated_at').defaultNow()
});
// Categories table
export const categories = pgTable('categories', {
id: serial('id').primaryKey(),
name: varchar('name', { length: 100 }).notNull().unique(),
slug: varchar('slug', { length: 100 }).notNull().unique(),
description: text('description'),
color: varchar('color', { length: 7 }).default('#6B7280'),
priority: integer('priority').default(1),
icon: varchar('icon', { length: 50 }),
isActive: boolean('is_active').default(true),
createdAt: timestamp('created_at').defaultNow(),
updatedAt: timestamp('updated_at').defaultNow()
}, (table) => {
return {
slugIdx: uniqueIndex('idx_categories_slug').on(table.slug),
priorityIdx: index('idx_categories_priority').on(table.priority)
};
});
// Sources table
export const sources = pgTable('sources', {
id: serial('id').primaryKey(),
url: varchar('url', { length: 1000 }).notNull().unique(),
domain: varchar('domain', { length: 255 }).notNull(),
title: varchar('title', { length: 500 }),
description: text('description'),
type: sourceTypeEnum('type').notNull(),
status: sourceStatusEnum('status').default('pending'),
riskLevel: integer('risk_level').default(1),
language: languageEnum('language').default('sk'),
evidenceUrls: text('evidence_urls'), // JSON
reportedBy: varchar('reported_by', { length: 255 }),
verifiedBy: integer('verified_by').references(() => users.id),
rejectionReason: text('rejection_reason'),
followerCount: integer('follower_count').default(0),
lastChecked: timestamp('last_checked'),
metadata: text('metadata').default('{}'), // JSON
createdAt: timestamp('created_at').defaultNow(),
updatedAt: timestamp('updated_at').defaultNow()
}, (table) => {
return {
domainIdx: index('idx_sources_domain').on(table.domain),
statusIdx: index('idx_sources_status').on(table.status),
riskLevelIdx: index('idx_sources_risk_level').on(table.riskLevel),
typeIdx: index('idx_sources_type').on(table.type),
createdAtIdx: index('idx_sources_created_at').on(table.createdAt),
verifiedByIdx: index('idx_sources_verified_by').on(table.verifiedBy),
statusRiskIdx: index('idx_sources_status_risk').on(table.status, table.riskLevel)
};
});
// Source Categories junction table
export const sourceCategories = pgTable('source_categories', {
id: serial('id').primaryKey(),
sourceId: integer('source_id').notNull().references(() => sources.id, { onDelete: 'cascade' }),
categoryId: integer('category_id').notNull().references(() => categories.id, { onDelete: 'cascade' }),
confidenceScore: decimal('confidence_score', { precision: 3, scale: 2 }).default('1.0'),
addedBy: integer('added_by').references(() => users.id),
createdAt: timestamp('created_at').defaultNow()
}, (table) => {
return {
sourceIdIdx: index('idx_source_categories_source_id').on(table.sourceId),
categoryIdIdx: index('idx_source_categories_category_id').on(table.categoryId),
uniqueSourceCategory: uniqueIndex('unique_source_category').on(table.sourceId, table.categoryId)
};
});
// Reports table
export const reports = pgTable('reports', {
id: serial('id').primaryKey(),
sourceUrl: varchar('source_url', { length: 1000 }).notNull(),
sourceDomain: varchar('source_domain', { length: 255 }).notNull(),
reporterEmail: varchar('reporter_email', { length: 255 }),
reporterName: varchar('reporter_name', { length: 100 }),
categorySuggestions: text('category_suggestions'), // JSON
description: text('description').notNull(),
evidenceUrls: text('evidence_urls'), // JSON
priority: priorityEnum('priority').default('medium'),
status: reportStatusEnum('status').default('pending'),
assignedTo: integer('assigned_to').references(() => users.id),
adminNotes: text('admin_notes'),
processedAt: timestamp('processed_at'),
ipAddress: varchar('ip_address', { length: 45 }),
userAgent: text('user_agent'),
createdAt: timestamp('created_at').defaultNow(),
updatedAt: timestamp('updated_at').defaultNow()
}, (table) => {
return {
statusIdx: index('idx_reports_status').on(table.status),
sourceDomainIdx: index('idx_reports_source_domain').on(table.sourceDomain),
priorityIdx: index('idx_reports_priority').on(table.priority),
createdAtIdx: index('idx_reports_created_at').on(table.createdAt),
assignedToIdx: index('idx_reports_assigned_to').on(table.assignedTo)
};
});
// API Keys table
export const apiKeys = pgTable('api_keys', {
id: serial('id').primaryKey(),
keyHash: varchar('key_hash', { length: 255 }).notNull().unique(),
name: varchar('name', { length: 100 }).notNull(),
description: text('description'),
ownerEmail: varchar('owner_email', { length: 255 }).notNull(),
permissions: text('permissions').default('["read"]'), // JSON
rateLimit: integer('rate_limit').default(1000),
isActive: boolean('is_active').default(true),
usageCount: integer('usage_count').default(0),
lastUsed: timestamp('last_used'),
expiresAt: timestamp('expires_at'),
createdAt: timestamp('created_at').defaultNow(),
updatedAt: timestamp('updated_at').defaultNow()
}, (table) => {
return {
keyHashIdx: uniqueIndex('idx_api_keys_hash').on(table.keyHash),
ownerIdx: index('idx_api_keys_owner').on(table.ownerEmail)
};
});
// Audit Logs table
export const auditLogs = pgTable('audit_logs', {
id: serial('id').primaryKey(),
userId: integer('user_id').references(() => users.id),
action: varchar('action', { length: 50 }).notNull(),
resourceType: varchar('resource_type', { length: 50 }).notNull(),
resourceId: integer('resource_id'),
details: text('details'), // JSON
ipAddress: varchar('ip_address', { length: 45 }),
createdAt: timestamp('created_at').defaultNow()
}, (table) => {
return {
userIdIdx: index('idx_audit_logs_user_id').on(table.userId),
createdAtIdx: index('idx_audit_logs_created_at').on(table.createdAt),
actionIdx: index('idx_audit_logs_action').on(table.action),
resourceTypeIdx: index('idx_audit_logs_resource_type').on(table.resourceType)
};
});
// Relations
export const usersRelations = relations(users, ({ many }) => ({
verifiedSources: many(sources),
sourceCategories: many(sourceCategories),
assignedReports: many(reports),
auditLogs: many(auditLogs)
}));
export const categoriesRelations = relations(categories, ({ many }) => ({
sourceCategories: many(sourceCategories)
}));
export const sourcesRelations = relations(sources, ({ one, many }) => ({
verifiedBy: one(users, {
fields: [sources.verifiedBy],
references: [users.id]
}),
sourceCategories: many(sourceCategories)
}));
export const sourceCategoriesRelations = relations(sourceCategories, ({ one }) => ({
source: one(sources, {
fields: [sourceCategories.sourceId],
references: [sources.id]
}),
category: one(categories, {
fields: [sourceCategories.categoryId],
references: [categories.id]
}),
addedBy: one(users, {
fields: [sourceCategories.addedBy],
references: [users.id]
})
}));
export const reportsRelations = relations(reports, ({ one }) => ({
assignedTo: one(users, {
fields: [reports.assignedTo],
references: [users.id]
})
}));
export const auditLogsRelations = relations(auditLogs, ({ one }) => ({
user: one(users, {
fields: [auditLogs.userId],
references: [users.id]
})
}));

98
lib/db/seed.ts Normal file
View File

@@ -0,0 +1,98 @@
import { db } from './connection';
import { users, categories, sources, apiKeys, sourceCategories } from './schema';
import * as bcrypt from 'bcryptjs';
export async function seedDatabase() {
try {
console.log('🌱 Seeding database...');
// Insert categories one by one
const categoryData = [
{ name: 'Hoax', slug: 'hoax', description: 'Šírenie nepravdivých informácií a hoaxov', color: '#EF4444', priority: 5, icon: 'AlertTriangle' },
{ name: 'Hate Speech', slug: 'hate-speech', description: 'Nenávistné prejavy proti skupinám ľudí', color: '#DC2626', priority: 5, icon: 'MessageSquareX' },
{ name: 'Violence', slug: 'violence', description: 'Povzbudzovanie k násiliu', color: '#B91C1C', priority: 5, icon: 'Sword' },
{ name: 'Conspiracy', slug: 'conspiracy', description: 'Konšpiračné teórie', color: '#F59E0B', priority: 3, icon: 'Eye' },
{ name: 'Propaganda', slug: 'propaganda', description: 'Politická propaganda a manipulácia', color: '#D97706', priority: 2, icon: 'Megaphone' }
];
const insertedCategories = [];
for (const cat of categoryData) {
const result = await db.insert(categories).values(cat).returning();
insertedCategories.push(result[0]);
console.log(`✅ Inserted category: ${cat.name}`);
}
// Insert admin user
const hashedPassword = await bcrypt.hash('admin123', 12);
const insertedUsers = await db.insert(users).values({
email: 'admin@antihoax.sk',
passwordHash: hashedPassword,
name: 'System Admin',
role: 'admin'
}).returning();
console.log(`✅ Inserted user: ${insertedUsers[0].name}`);
// Insert example sources
const sourceData = [
{
url: 'https://example-hoax-site.com',
domain: 'example-hoax-site.com',
title: 'Example Hoax Site',
description: 'Príklad hoax stránky pre testovanie',
type: 'website' as const,
status: 'verified' as const,
riskLevel: 5,
language: 'sk' as const,
reportedBy: 'test@example.com',
verifiedBy: insertedUsers[0].id,
followerCount: 1500,
metadata: JSON.stringify({ tags: ['test', 'example'] })
},
{
url: 'https://example-conspiracy.com',
domain: 'example-conspiracy.com',
title: 'Conspiracy Theory Site',
description: 'Stránka šíriaca konšpiračné teórie',
type: 'blog' as const,
status: 'verified' as const,
riskLevel: 3,
language: 'sk' as const,
reportedBy: 'reporter@example.com',
verifiedBy: insertedUsers[0].id,
followerCount: 850,
metadata: JSON.stringify({ tags: ['conspiracy', 'politics'] })
}
];
const insertedSources = [];
for (const src of sourceData) {
const result = await db.insert(sources).values(src).returning();
insertedSources.push(result[0]);
console.log(`✅ Inserted source: ${src.title}`);
}
// Link sources with categories
await db.insert(sourceCategories).values({
sourceId: insertedSources[0].id,
categoryId: insertedCategories[0].id, // Hoax
confidenceScore: '1.0',
addedBy: insertedUsers[0].id
});
await db.insert(sourceCategories).values({
sourceId: insertedSources[1].id,
categoryId: insertedCategories[3].id, // Conspiracy
confidenceScore: '0.9',
addedBy: insertedUsers[0].id
});
console.log(`✅ Linked sources with categories`);
console.log('🎉 Database seeded successfully!');
console.log('📧 Admin login: admin@antihoax.sk / admin123');
} catch (error) {
console.error('❌ Error seeding database:', error);
throw error;
}
}

3260
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +1,37 @@
{ {
"name": "infohliadka", "name": "infohliadka",
"version": "0.2.0", "version": "1.0.0",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
"build": "next build", "build": "next build",
"start": "next start", "start": "next start",
"lint": "next lint" "lint": "next lint",
"db:generate": "drizzle-kit generate",
"db:migrate": "drizzle-kit migrate",
"db:push": "drizzle-kit push",
"db:studio": "drizzle-kit studio",
"db:seed": "npx tsx scripts/seed.ts"
}, },
"dependencies": { "dependencies": {
"next": "14.2.15", "@types/bcryptjs": "^2.4.6",
"react": "18.3.1", "@types/pg": "^8.15.5",
"react-dom": "18.3.1", "bcryptjs": "^3.0.2",
"sqlite3": "^5.1.7" "dotenv": "^17.2.2",
"drizzle-kit": "^0.31.4",
"drizzle-orm": "^0.44.5",
"next": "^14.2.32",
"pg": "^8.16.3",
"react": "^19.1.1",
"react-dom": "^19.1.1"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "20.14.15", "@types/node": "^24.3.1",
"@types/react": "18.3.11", "@types/react": "^19.1.12",
"@types/react-dom": "18.3.1", "@types/react-dom": "^19.1.9",
"@types/sqlite3": "^3.1.11", "eslint": "^9.35.0",
"eslint": "8.57.1",
"eslint-config-next": "14.2.15", "eslint-config-next": "14.2.15",
"typescript": "5.6.3" "tsx": "^4.20.5",
"typescript": "^5.9.2"
} }
} }

View File

@@ -1,24 +1,23 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from '../../../lib/db/connection'
import path from "path" import { desc, eq } from 'drizzle-orm'
import { generateApiKey, hashApiKey } from "../../../lib/api-auth" import { generateApiKey, hashApiKey } from "../../../lib/api-auth"
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
if (req.method === "GET") { if (req.method === "GET") {
const keys = await new Promise<any[]>((resolve, reject) => { const keys = await db
db.all( .select({
`SELECT id, name, permissions, rate_limit, is_active, last_used, created_at id: schema.apiKeys.id,
FROM api_keys ORDER BY created_at DESC`, name: schema.apiKeys.name,
(err, rows) => { permissions: schema.apiKeys.permissions,
if (err) reject(err) rateLimit: schema.apiKeys.rateLimit,
else resolve(rows) isActive: schema.apiKeys.isActive,
} lastUsed: schema.apiKeys.lastUsed,
) createdAt: schema.apiKeys.createdAt
}) })
.from(schema.apiKeys)
.orderBy(desc(schema.apiKeys.createdAt))
res.json({ res.json({
keys: keys.map(key => ({ keys: keys.map(key => ({
@@ -38,17 +37,16 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const apiKey = generateApiKey() const apiKey = generateApiKey()
const keyHash = hashApiKey(apiKey) const keyHash = hashApiKey(apiKey)
const result = await new Promise<any>((resolve, reject) => { const [result] = await db
db.run( .insert(schema.apiKeys)
`INSERT INTO api_keys (key_hash, name, permissions, rate_limit, is_active, created_at) .values({
VALUES (?, ?, ?, ?, 1, datetime('now'))`, keyHash: keyHash,
[keyHash, name, JSON.stringify(permissions), rate_limit], name: name,
function(err) { permissions: JSON.stringify(permissions),
if (err) reject(err) rateLimit: rate_limit,
else resolve({ id: this.lastID }) isActive: true
} })
) .returning({ id: schema.apiKeys.id })
})
res.json({ res.json({
success: true, success: true,
@@ -62,16 +60,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} else if (req.method === "DELETE") { } else if (req.method === "DELETE") {
const { id } = req.query const { id } = req.query
await new Promise<void>((resolve, reject) => { await db
db.run( .update(schema.apiKeys)
'UPDATE api_keys SET is_active = 0 WHERE id = ?', .set({ isActive: false })
[id], .where(eq(schema.apiKeys.id, parseInt(id as string)))
(err) => {
if (err) reject(err)
else resolve()
}
)
})
res.json({ success: true }) res.json({ success: true })
@@ -82,7 +74,5 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} catch (error) { } catch (error) {
console.error('API keys error:', error) console.error('API keys error:', error)
res.status(500).json({ error: "Operation failed" }) res.status(500).json({ error: "Operation failed" })
} finally {
db.close()
} }
} }

View File

@@ -1,76 +1,74 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from '../../../lib/db/connection'
import path from "path" import { eq, and, desc, count } from 'drizzle-orm'
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" }) if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
const { page = '1', limit = '50', action, resource_type, user_id } = req.query const { page = '1', limit = '50', action, resource_type, user_id } = req.query
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
let whereConditions: string[] = [] let whereConditions = []
let params: any[] = []
if (action) { if (action) {
whereConditions.push("a.action = ?") whereConditions.push(eq(schema.auditLogs.action, action as string))
params.push(action)
} }
if (resource_type) { if (resource_type) {
whereConditions.push("a.resource_type = ?") whereConditions.push(eq(schema.auditLogs.resourceType, resource_type as string))
params.push(resource_type)
} }
if (user_id) { if (user_id) {
whereConditions.push("a.user_id = ?") whereConditions.push(eq(schema.auditLogs.userId, parseInt(user_id as string)))
params.push(parseInt(user_id as string))
} }
const whereClause = whereConditions.length > 0 ? `WHERE ${whereConditions.join(' AND ')}` : ''
const offset = (parseInt(page as string) - 1) * parseInt(limit as string) const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
const limitInt = parseInt(limit as string)
const query = ` // Get logs with user info
SELECT const logs = await db
a.*, .select({
u.email as user_email, id: schema.auditLogs.id,
COUNT(*) OVER() as total_count userId: schema.auditLogs.userId,
FROM audit_logs a userEmail: schema.users.email,
LEFT JOIN users u ON a.user_id = u.id action: schema.auditLogs.action,
${whereClause} resourceType: schema.auditLogs.resourceType,
ORDER BY a.created_at DESC resourceId: schema.auditLogs.resourceId,
LIMIT ? OFFSET ? details: schema.auditLogs.details,
` ipAddress: schema.auditLogs.ipAddress,
createdAt: schema.auditLogs.createdAt
params.push(parseInt(limit as string), offset)
const logs = await new Promise<any[]>((resolve, reject) => {
db.all(query, params, (err, rows) => {
if (err) reject(err)
else resolve(rows)
}) })
}) .from(schema.auditLogs)
.leftJoin(schema.users, eq(schema.auditLogs.userId, schema.users.id))
.where(whereConditions.length > 0 ? and(...whereConditions) : undefined)
.orderBy(desc(schema.auditLogs.createdAt))
.limit(limitInt)
.offset(offset)
const total = logs.length > 0 ? logs[0].total_count : 0 // Get total count for pagination
const totalPages = Math.ceil(total / parseInt(limit as string)) const [totalResult] = await db
.select({ count: count() })
.from(schema.auditLogs)
.where(whereConditions.length > 0 ? and(...whereConditions) : undefined)
const total = totalResult.count
const totalPages = Math.ceil(total / limitInt)
res.json({ res.json({
logs: logs.map(log => ({ logs: logs.map(log => ({
id: log.id, id: log.id,
user_id: log.user_id, user_id: log.userId,
user_email: log.user_email, user_email: log.userEmail,
action: log.action, action: log.action,
resource_type: log.resource_type, resource_type: log.resourceType,
resource_id: log.resource_id, resource_id: log.resourceId,
details: log.details ? JSON.parse(log.details) : null, details: log.details ? JSON.parse(log.details) : null,
ip_address: log.ip_address, ip_address: log.ipAddress,
created_at: log.created_at created_at: log.createdAt
})), })),
pagination: { pagination: {
page: parseInt(page as string), page: parseInt(page as string),
limit: parseInt(limit as string), limit: limitInt,
total, total,
totalPages totalPages
} }
@@ -79,7 +77,5 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} catch (error) { } catch (error) {
console.error('Audit logs error:', error) console.error('Audit logs error:', error)
res.status(500).json({ error: "Failed to fetch audit logs" }) res.status(500).json({ error: "Failed to fetch audit logs" })
} finally {
db.close()
} }
} }

View File

@@ -1,10 +1,10 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from "../../../lib/db/connection"
import path from "path" import { eq } from "drizzle-orm"
interface BulkImportItem { interface BulkImportItem {
domain: string domain: string
risk_level: number riskLevel: number
categories: string[] categories: string[]
description?: string description?: string
} }
@@ -18,63 +18,57 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.status(400).json({ error: "Sources array required" }) return res.status(400).json({ error: "Sources array required" })
} }
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
let imported = 0 let imported = 0
let skipped = 0 let skipped = 0
for (const source of sources) { for (const source of sources) {
if (!source.domain || !source.risk_level) { if (!source.domain || !source.riskLevel) {
skipped++ skipped++
continue continue
} }
// Check if domain already exists // Check if source already exists
const existing = await new Promise<any>((resolve, reject) => { const existing = await db.select()
db.get( .from(schema.sources)
"SELECT id FROM sources WHERE domain = ?", .where(eq(schema.sources.domain, source.domain))
[source.domain], .limit(1)
(err, row) => {
if (err) reject(err)
else resolve(row)
}
)
})
if (existing) { if (existing.length > 0) {
skipped++ skipped++
continue continue
} }
// Insert new source // Insert new source
await new Promise<void>((resolve, reject) => { try {
db.run( const url = `https://${source.domain}`
`INSERT INTO sources (domain, title, risk_level, status, description, created_at) await db.insert(schema.sources).values({
VALUES (?, ?, ?, 'verified', ?, datetime('now'))`, url,
[source.domain, source.domain, source.risk_level, source.description || ''], domain: source.domain,
function(err) { title: source.domain,
if (err) reject(err) description: source.description || `Imported source: ${source.domain}`,
else resolve() type: 'website',
} status: 'pending',
) riskLevel: source.riskLevel,
}) language: 'sk',
reportedBy: 'bulk-import'
imported++ })
imported++
} catch (error) {
console.error('Failed to import source:', source.domain, error)
skipped++
}
} }
res.json({ res.json({
success: true, success: true,
imported, imported,
skipped, skipped,
total: sources.length message: `Imported ${imported} sources, skipped ${skipped}`
}) })
} catch (error) { } catch (error: any) {
console.error('Bulk import error:', error) console.error('Bulk import error:', error)
res.status(500).json({ error: "Import failed" }) res.status(500).json({ error: "Import failed" })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../../../lib/db/connection'
import path from 'path' import { desc, asc } from 'drizzle-orm'
export default async function handler( export default async function handler(
req: NextApiRequest, req: NextApiRequest,
@@ -10,26 +10,16 @@ export default async function handler(
return res.status(405).json({ error: 'Method not allowed' }) return res.status(405).json({ error: 'Method not allowed' })
} }
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const categories = await new Promise<any[]>((resolve, reject) => { const categories = await db
db.all( .select()
'SELECT * FROM categories ORDER BY priority DESC, name ASC', .from(schema.categories)
(err, rows) => { .orderBy(desc(schema.categories.priority), asc(schema.categories.name))
if (err) reject(err)
else resolve(rows)
}
)
})
return res.status(200).json(categories) return res.status(200).json(categories)
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../../lib/db/connection'
import path from 'path' import { eq, and, gte, count } from 'drizzle-orm'
interface DashboardStats { interface DashboardStats {
total_sources: number total_sources: number
@@ -19,54 +19,65 @@ export default async function handler(
return res.status(405).json({ error: 'Method not allowed' }) return res.status(405).json({ error: 'Method not allowed' })
} }
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const stats = await new Promise<DashboardStats>((resolve, reject) => { // Get all stats in parallel
const queries = [ const weekAgo = new Date()
"SELECT COUNT(*) as total_sources FROM sources WHERE status = 'verified'", weekAgo.setDate(weekAgo.getDate() - 7)
"SELECT COUNT(*) as pending_sources FROM sources WHERE status = 'pending'",
"SELECT COUNT(*) as pending_reports FROM reports WHERE status = 'pending'", const dayAgo = new Date()
"SELECT COUNT(*) as high_risk_sources FROM sources WHERE status = 'verified' AND risk_level >= 4", dayAgo.setDate(dayAgo.getDate() - 1)
"SELECT COUNT(*) as sources_added_week FROM sources WHERE created_at > datetime('now', '-7 days')",
"SELECT COUNT(*) as reports_today FROM reports WHERE created_at > datetime('now', '-1 day')" const [
] totalSources,
pendingSources,
const results: any = {} pendingReports,
let completed = 0 highRiskSources,
sourcesAddedWeek,
queries.forEach((query, index) => { reportsToday
db.get(query, (err, row: any) => { ] = await Promise.all([
if (err) { db.select({ count: count() })
reject(err) .from(schema.sources)
return .where(eq(schema.sources.status, 'verified')),
}
db.select({ count: count() })
const key = Object.keys(row)[0] .from(schema.sources)
results[key] = row[key] .where(eq(schema.sources.status, 'pending')),
completed++
db.select({ count: count() })
if (completed === queries.length) { .from(schema.reports)
resolve({ .where(eq(schema.reports.status, 'pending')),
total_sources: results.total_sources || 0,
pending_sources: results.pending_sources || 0, db.select({ count: count() })
pending_reports: results.pending_reports || 0, .from(schema.sources)
high_risk_sources: results.high_risk_sources || 0, .where(
sources_added_week: results.sources_added_week || 0, and(
reports_today: results.reports_today || 0 eq(schema.sources.status, 'verified'),
}) gte(schema.sources.riskLevel, 4)
} )
}) ),
})
}) db.select({ count: count() })
.from(schema.sources)
.where(gte(schema.sources.createdAt, weekAgo)),
db.select({ count: count() })
.from(schema.reports)
.where(gte(schema.reports.createdAt, dayAgo))
])
const stats: DashboardStats = {
total_sources: totalSources[0].count,
pending_sources: pendingSources[0].count,
pending_reports: pendingReports[0].count,
high_risk_sources: highRiskSources[0].count,
sources_added_week: sourcesAddedWeek[0].count,
reports_today: reportsToday[0].count
}
return res.status(200).json(stats) return res.status(200).json(stats)
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../../../lib/db/connection'
import path from 'path' import { eq, desc } from 'drizzle-orm'
export default async function handler( export default async function handler(
req: NextApiRequest, req: NextApiRequest,
@@ -12,44 +12,29 @@ export default async function handler(
const { status = 'pending', page = '1', limit = '20' } = req.query const { status = 'pending', page = '1', limit = '20' } = req.query
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const reports = await new Promise<any[]>((resolve, reject) => { const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
const offset = (parseInt(page as string) - 1) * parseInt(limit as string) const limitInt = parseInt(limit as string)
db.all( const reports = await db
`SELECT *, .select()
CASE .from(schema.reports)
WHEN category_suggestions IS NOT NULL .where(eq(schema.reports.status, status as any))
THEN json_extract(category_suggestions, '$') .orderBy(desc(schema.reports.createdAt))
ELSE '[]' .limit(limitInt)
END as category_suggestions .offset(offset)
FROM reports
WHERE status = ?
ORDER BY created_at DESC
LIMIT ? OFFSET ?`,
[status, parseInt(limit as string), offset],
(err, rows: any[]) => {
if (err) reject(err)
else {
const processedRows = rows.map(row => ({
...row,
category_suggestions: row.category_suggestions ? JSON.parse(row.category_suggestions) : []
}))
resolve(processedRows)
}
}
)
})
return res.status(200).json(reports) // Process the reports to parse JSON fields
const processedReports = reports.map(report => ({
...report,
categorySuggestions: report.categorySuggestions ? JSON.parse(report.categorySuggestions) : [],
evidenceUrls: report.evidenceUrls ? JSON.parse(report.evidenceUrls) : []
}))
return res.status(200).json(processedReports)
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../../../lib/db/connection'
import path from 'path' import { eq, desc } from 'drizzle-orm'
export default async function handler( export default async function handler(
req: NextApiRequest, req: NextApiRequest,
@@ -12,32 +12,22 @@ export default async function handler(
const { status = 'pending', page = '1', limit = '20' } = req.query const { status = 'pending', page = '1', limit = '20' } = req.query
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const sources = await new Promise<any[]>((resolve, reject) => { const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
const offset = (parseInt(page as string) - 1) * parseInt(limit as string) const limitInt = parseInt(limit as string)
db.all( const sources = await db
`SELECT * FROM sources .select()
WHERE status = ? .from(schema.sources)
ORDER BY created_at DESC .where(eq(schema.sources.status, status as any))
LIMIT ? OFFSET ?`, .orderBy(desc(schema.sources.createdAt))
[status, parseInt(limit as string), offset], .limit(limitInt)
(err, rows) => { .offset(offset)
if (err) reject(err)
else resolve(rows)
}
)
})
return res.status(200).json(sources) return res.status(200).json(sources)
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,31 +1,26 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from "../../../lib/db/connection"
import path from "path" import { eq, count, sql } from "drizzle-orm"
import crypto from "crypto" import * as bcrypt from "bcryptjs"
function hashPassword(password: string): { hash: string, salt: string } { async function hashPassword(password: string): Promise<string> {
const salt = crypto.randomBytes(32).toString('hex') return await bcrypt.hash(password, 12)
const hash = crypto.pbkdf2Sync(password, salt, 10000, 64, 'sha256').toString('hex')
return { hash, salt }
} }
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
if (req.method === "GET") { if (req.method === "GET") {
const users = await new Promise<any[]>((resolve, reject) => { const users = await db.select({
db.all( id: schema.users.id,
`SELECT id, email, role, is_active, created_at, last_login, email: schema.users.email,
(SELECT COUNT(*) FROM sources WHERE moderator_id = users.id) as sources_moderated role: schema.users.role,
FROM users ORDER BY created_at DESC`, isActive: schema.users.isActive,
(err, rows) => { createdAt: schema.users.createdAt,
if (err) reject(err) lastLogin: schema.users.lastLogin,
else resolve(rows) sourcesModerated: sql<number>`(SELECT COUNT(*) FROM ${schema.sources} WHERE verified_by = ${schema.users.id})`
}
)
}) })
.from(schema.users)
.orderBy(schema.users.createdAt)
res.json({ users }) res.json({ users })
@@ -40,27 +35,25 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.status(400).json({ error: "Invalid role" }) return res.status(400).json({ error: "Invalid role" })
} }
const { hash, salt } = hashPassword(password) const passwordHash = await hashPassword(password)
const result = await new Promise<any>((resolve, reject) => { const result = await db.insert(schema.users)
db.run( .values({
`INSERT INTO users (email, password_hash, salt, role, is_active, created_at) email,
VALUES (?, ?, ?, ?, 1, datetime('now'))`, passwordHash,
[email, hash, salt, role], name: email.split('@')[0], // Use email username as name
function(err) { role: role as 'admin' | 'moderator',
if (err) reject(err) isActive: true
else resolve({ id: this.lastID }) })
} .returning({ id: schema.users.id })
)
})
res.json({ res.json({
success: true, success: true,
user: { user: {
id: result.id, id: result[0].id,
email, email,
role, role,
is_active: true isActive: true
} }
}) })
@@ -70,12 +63,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} catch (error: any) { } catch (error: any) {
console.error('Users API error:', error) console.error('Users API error:', error)
if (error?.code === 'SQLITE_CONSTRAINT_UNIQUE') { if (error?.code === '23505') {
res.status(400).json({ error: "User already exists" }) res.status(400).json({ error: "User already exists" })
} else { } else {
res.status(500).json({ error: "Operation failed" }) res.status(500).json({ error: "Operation failed" })
} }
} finally {
db.close()
} }
} }

View File

@@ -1,11 +1,7 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from '../../../lib/db/connection'
import path from "path" import { eq } from 'drizzle-orm'
import crypto from "crypto" import bcrypt from 'bcryptjs'
function hashPassword(password: string, salt: string): string {
return crypto.pbkdf2Sync(password, salt, 10000, 64, 'sha256').toString('hex')
}
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== "POST") return res.status(405).json({ error: "Method not allowed" }) if (req.method !== "POST") return res.status(405).json({ error: "Method not allowed" })
@@ -16,45 +12,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.status(400).json({ error: "Email and password required" }) return res.status(400).json({ error: "Email and password required" })
} }
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
const user = await new Promise<any>((resolve, reject) => { const users = await db.select()
db.get( .from(schema.users)
"SELECT id, email, password_hash, salt, role, is_active FROM users WHERE email = ?", .where(eq(schema.users.email, email))
[email], .limit(1)
(err, row) => {
if (err) reject(err)
else resolve(row)
}
)
})
if (!user) { if (users.length === 0) {
return res.status(401).json({ error: "Invalid credentials" }) return res.status(401).json({ error: "Invalid credentials" })
} }
if (!user.is_active) { const user = users[0]
if (!user.isActive) {
return res.status(401).json({ error: "Account is disabled" }) return res.status(401).json({ error: "Account is disabled" })
} }
const hashedPassword = hashPassword(password, user.salt) const isValidPassword = await bcrypt.compare(password, user.passwordHash)
if (hashedPassword !== user.password_hash) { if (!isValidPassword) {
return res.status(401).json({ error: "Invalid credentials" }) return res.status(401).json({ error: "Invalid credentials" })
} }
// Update last login // Update last login
await new Promise<void>((resolve, reject) => { await db.update(schema.users)
db.run( .set({ lastLogin: new Date() })
"UPDATE users SET last_login = datetime('now') WHERE id = ?", .where(eq(schema.users.id, user.id))
[user.id],
(err) => {
if (err) reject(err)
else resolve()
}
)
})
res.json({ res.json({
success: true, success: true,
@@ -69,7 +51,5 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} catch (error) { } catch (error) {
console.error('Login error:', error) console.error('Login error:', error)
res.status(500).json({ error: "Login failed" }) res.status(500).json({ error: "Login failed" })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../../lib/db/connection'
import path from 'path' import { eq, gte, desc, count, sql } from 'drizzle-orm'
interface RiskyDomain { interface RiskyDomain {
domain: string domain: string
@@ -20,49 +20,36 @@ export default async function handler(
const { limit = '20' } = req.query const { limit = '20' } = req.query
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const riskyDomains = await new Promise<RiskyDomain[]>((resolve, reject) => { const riskyDomainsResult = await db
db.all( .select({
`SELECT domain: schema.sources.domain,
s.domain, sourceCount: count(),
COUNT(*) as source_count, avgRiskLevel: sql<number>`AVG(${schema.sources.riskLevel})`,
AVG(s.risk_level) as avg_risk_level, maxRiskLevel: sql<number>`MAX(${schema.sources.riskLevel})`,
MAX(s.risk_level) as max_risk_level, categories: sql<string>`string_agg(DISTINCT ${schema.categories.name}, ',')`
GROUP_CONCAT(DISTINCT c.name) as categories })
FROM sources s .from(schema.sources)
LEFT JOIN source_categories sc ON s.id = sc.source_id .leftJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
LEFT JOIN categories c ON sc.category_id = c.id .leftJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
WHERE s.status = 'verified' .where(eq(schema.sources.status, 'verified'))
GROUP BY s.domain .groupBy(schema.sources.domain)
HAVING AVG(s.risk_level) >= 3 .having(gte(sql`AVG(${schema.sources.riskLevel})`, 3))
ORDER BY avg_risk_level DESC, source_count DESC .orderBy(desc(sql`AVG(${schema.sources.riskLevel})`), desc(count()))
LIMIT ?`, .limit(parseInt(limit as string))
[parseInt(limit as string)],
(err, rows: any[]) => { const riskyDomains: RiskyDomain[] = riskyDomainsResult.map(row => ({
if (err) reject(err) domain: row.domain,
else { source_count: row.sourceCount,
const domains = rows.map(row => ({ avg_risk_level: Math.round(row.avgRiskLevel * 10) / 10,
domain: row.domain, max_risk_level: row.maxRiskLevel,
source_count: row.source_count, categories: row.categories ? row.categories.split(',').filter(Boolean) : []
avg_risk_level: Math.round(row.avg_risk_level * 10) / 10, }))
max_risk_level: row.max_risk_level,
categories: row.categories ? row.categories.split(',') : []
}))
resolve(domains)
}
}
)
})
return res.status(200).json(riskyDomains) return res.status(200).json(riskyDomains)
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../lib/db/connection'
import path from 'path'
function extractDomain(url: string): string { function extractDomain(url: string): string {
try { try {
@@ -30,31 +29,16 @@ export default async function handler(
return res.status(400).json({ error: 'Invalid URL format' }) return res.status(400).json({ error: 'Invalid URL format' })
} }
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
await new Promise<void>((resolve, reject) => { await db.insert(schema.reports).values({
db.run( sourceUrl: source_url,
`INSERT INTO reports ( sourceDomain: domain,
source_url, source_domain, reporter_email, reporter_name, reporterEmail: reporter_email || null,
category_suggestions, description, ip_address, user_agent reporterName: reporter_name || null,
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, categorySuggestions: JSON.stringify(categories || []),
[ description: description,
source_url, ipAddress: (req.headers['x-forwarded-for'] as string) || (req.socket?.remoteAddress),
domain, userAgent: req.headers['user-agent'] || null
reporter_email || null,
reporter_name || null,
JSON.stringify(categories || []),
description,
req.headers['x-forwarded-for'] || req.connection.remoteAddress,
req.headers['user-agent']
],
function(err) {
if (err) reject(err)
else resolve()
}
)
}) })
return res.status(200).json({ success: true, message: 'Report submitted successfully' }) return res.status(200).json({ success: true, message: 'Report submitted successfully' })
@@ -62,7 +46,5 @@ export default async function handler(
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from '../../../lib/db/connection'
import path from "path" import { eq, and, or, like, gte, lte, desc, count, sql } from 'drizzle-orm'
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" }) if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
@@ -15,80 +15,113 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
limit = '20' limit = '20'
} = req.query } = req.query
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
let whereConditions = ["s.status = ?"] let whereConditions = [eq(schema.sources.status, status as string)]
let params: any[] = [status]
if (q) { if (q) {
whereConditions.push("(s.domain LIKE ? OR s.title LIKE ? OR s.description LIKE ?)") whereConditions.push(
params.push(`%${q}%`, `%${q}%`, `%${q}%`) or(
} like(schema.sources.domain, `%${q}%`),
like(schema.sources.title, `%${q}%`),
if (category) { like(schema.sources.description, `%${q}%`)
whereConditions.push("EXISTS (SELECT 1 FROM source_categories sc JOIN categories c ON sc.category_id = c.id WHERE sc.source_id = s.id AND c.name = ?)") )
params.push(category) )
} }
if (risk_level_min) { if (risk_level_min) {
whereConditions.push("s.risk_level >= ?") whereConditions.push(gte(schema.sources.riskLevel, parseInt(risk_level_min as string)))
params.push(parseInt(risk_level_min as string))
} }
if (risk_level_max) { if (risk_level_max) {
whereConditions.push("s.risk_level <= ?") whereConditions.push(lte(schema.sources.riskLevel, parseInt(risk_level_max as string)))
params.push(parseInt(risk_level_max as string))
} }
const offset = (parseInt(page as string) - 1) * parseInt(limit as string) const offset = (parseInt(page as string) - 1) * parseInt(limit as string)
const limitInt = parseInt(limit as string)
const query = ` // Build the base query
SELECT s.*, GROUP_CONCAT(c.name) as categories, let query = db
COUNT(*) OVER() as total_count .select({
FROM sources s id: schema.sources.id,
LEFT JOIN source_categories sc ON s.id = sc.source_id domain: schema.sources.domain,
LEFT JOIN categories c ON sc.category_id = c.id title: schema.sources.title,
WHERE ${whereConditions.join(' AND ')} riskLevel: schema.sources.riskLevel,
GROUP BY s.id description: schema.sources.description,
ORDER BY s.risk_level DESC, s.created_at DESC createdAt: schema.sources.createdAt,
LIMIT ? OFFSET ? categories: sql<string>`string_agg(${schema.categories.name}, ',')`
`
params.push(parseInt(limit as string), offset)
const results = await new Promise<any[]>((resolve, reject) => {
db.all(query, params, (err, rows) => {
if (err) reject(err)
else resolve(rows)
}) })
}) .from(schema.sources)
.leftJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
.leftJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
.where(and(...whereConditions))
.groupBy(schema.sources.id, schema.sources.domain, schema.sources.title, schema.sources.riskLevel, schema.sources.description, schema.sources.createdAt)
.orderBy(desc(schema.sources.riskLevel), desc(schema.sources.createdAt))
.limit(limitInt)
.offset(offset)
const total = results.length > 0 ? results[0].total_count : 0 // Apply category filter if provided
const totalPages = Math.ceil(total / parseInt(limit as string)) if (category) {
query = db
.select({
id: schema.sources.id,
domain: schema.sources.domain,
title: schema.sources.title,
riskLevel: schema.sources.riskLevel,
description: schema.sources.description,
createdAt: schema.sources.createdAt,
categories: sql<string>`string_agg(${schema.categories.name}, ',')`
})
.from(schema.sources)
.innerJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
.innerJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
.where(and(...whereConditions, eq(schema.categories.name, category as string)))
.groupBy(schema.sources.id, schema.sources.domain, schema.sources.title, schema.sources.riskLevel, schema.sources.description, schema.sources.createdAt)
.orderBy(desc(schema.sources.riskLevel), desc(schema.sources.createdAt))
.limit(limitInt)
.offset(offset)
}
const results = await query
// Get total count for pagination
let countQuery = db
.select({ count: count() })
.from(schema.sources)
.where(and(...whereConditions))
if (category) {
countQuery = db
.select({ count: count() })
.from(schema.sources)
.innerJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
.innerJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
.where(and(...whereConditions, eq(schema.categories.name, category as string)))
}
const [totalResult] = await countQuery
const total = totalResult.count
const totalPages = Math.ceil(total / limitInt)
res.json({ res.json({
results: results.map(row => ({ results: results.map(row => ({
id: row.id, id: row.id,
domain: row.domain, domain: row.domain,
title: row.title, title: row.title,
risk_level: row.risk_level, risk_level: row.riskLevel,
categories: row.categories ? row.categories.split(',') : [], categories: row.categories ? row.categories.split(',').filter(Boolean) : [],
description: row.description, description: row.description,
created_at: row.created_at created_at: row.createdAt
})), })),
pagination: { pagination: {
page: parseInt(page as string), page: parseInt(page as string),
limit: parseInt(limit as string), limit: limitInt,
total, total,
totalPages totalPages
} }
}) })
} catch (error) { } catch (error) {
console.error('Search error:', error)
res.status(500).json({ error: "Search failed" }) res.status(500).json({ error: "Search failed" })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from "next" import type { NextApiRequest, NextApiResponse } from "next"
import sqlite3 from "sqlite3" import { db, schema } from '../../../lib/db/connection'
import path from "path" import { or, like } from 'drizzle-orm'
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" }) if (req.method !== "GET") return res.status(405).json({ error: "Method not allowed" })
@@ -8,24 +8,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { q } = req.query const { q } = req.query
if (!q) return res.status(400).json({ error: "Query required" }) if (!q) return res.status(400).json({ error: "Query required" })
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
const db = new sqlite3.Database(dbPath)
try { try {
const results = await new Promise<any[]>((resolve, reject) => { const results = await db
db.all( .select()
"SELECT * FROM sources WHERE domain LIKE ? OR title LIKE ? LIMIT 20", .from(schema.sources)
[`%${q}%`, `%${q}%`], .where(
(err, rows) => { or(
if (err) reject(err) like(schema.sources.domain, `%${q}%`),
else resolve(rows) like(schema.sources.title, `%${q}%`)
} )
) )
}) .limit(20)
res.json(results) res.json(results)
} catch (error) { } catch (error) {
console.error('Search error:', error)
res.status(500).json({ error: "Database error" }) res.status(500).json({ error: "Database error" })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../../lib/db/connection'
import path from 'path' import { eq, and, sql } from 'drizzle-orm'
import { rateLimit, getRateLimitHeaders } from '../../../lib/rate-limiter' import { rateLimit, getRateLimitHeaders } from '../../../lib/rate-limiter'
import { cache, getCacheKey } from '../../../lib/cache' import { cache, getCacheKey } from '../../../lib/cache'
@@ -88,25 +88,23 @@ export default async function handler(
return res.status(200).json(cachedResult) return res.status(200).json(cachedResult)
} }
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
const sources = await new Promise<any[]>((resolve, reject) => { const sources = await db
db.all( .select({
`SELECT s.*, GROUP_CONCAT(c.name) as categories id: schema.sources.id,
FROM sources s riskLevel: schema.sources.riskLevel,
LEFT JOIN source_categories sc ON s.id = sc.source_id categories: sql<string>`string_agg(${schema.categories.name}, ',')`
LEFT JOIN categories c ON sc.category_id = c.id })
WHERE s.domain = ? AND s.status = 'verified' .from(schema.sources)
GROUP BY s.id`, .leftJoin(schema.sourceCategories, eq(schema.sources.id, schema.sourceCategories.sourceId))
[domain], .leftJoin(schema.categories, eq(schema.sourceCategories.categoryId, schema.categories.id))
(err, rows) => { .where(
if (err) reject(err) and(
else resolve(rows) eq(schema.sources.domain, domain),
} eq(schema.sources.status, 'verified')
)
) )
}) .groupBy(schema.sources.id, schema.sources.riskLevel)
let result: CheckResponse let result: CheckResponse
@@ -119,7 +117,7 @@ export default async function handler(
source_count: 0 source_count: 0
} }
} else { } else {
const maxRiskLevel = Math.max(...sources.map(s => s.risk_level)) const maxRiskLevel = Math.max(...sources.map(s => s.riskLevel))
const allCategories = sources const allCategories = sources
.map(s => s.categories) .map(s => s.categories)
.filter(Boolean) .filter(Boolean)
@@ -156,7 +154,5 @@ export default async function handler(
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next' import type { NextApiRequest, NextApiResponse } from 'next'
import sqlite3 from 'sqlite3' import { db, schema } from '../../lib/db/connection'
import path from 'path' import { eq, gte, count, desc, sql } from 'drizzle-orm'
interface PublicStats { interface PublicStats {
total_sources: number total_sources: number
@@ -18,91 +18,69 @@ export default async function handler(
return res.status(405).json({ error: 'Method not allowed' }) return res.status(405).json({ error: 'Method not allowed' })
} }
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
const db = new sqlite3.Database(dbPath)
try { try {
// Get basic counts // Get basic counts
const totalSources = await new Promise<number>((resolve, reject) => { const [totalSourcesResult] = await db
db.get( .select({ count: count() })
"SELECT COUNT(*) as count FROM sources WHERE status = 'verified'", .from(schema.sources)
(err, row: any) => { .where(eq(schema.sources.status, 'verified'))
if (err) reject(err)
else resolve(row.count)
}
)
})
const highRiskSources = await new Promise<number>((resolve, reject) => { const [highRiskSourcesResult] = await db
db.get( .select({ count: count() })
"SELECT COUNT(*) as count FROM sources WHERE status = 'verified' AND risk_level >= 4", .from(schema.sources)
(err, row: any) => { .where(
if (err) reject(err) sql`${schema.sources.status} = 'verified' AND ${schema.sources.riskLevel} >= 4`
else resolve(row.count)
}
) )
})
const recentAdditions = await new Promise<number>((resolve, reject) => { const thirtyDaysAgo = new Date()
db.get( thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30)
"SELECT COUNT(*) as count FROM sources WHERE created_at > datetime('now', '-30 days')",
(err, row: any) => { const [recentAdditionsResult] = await db
if (err) reject(err) .select({ count: count() })
else resolve(row.count) .from(schema.sources)
} .where(gte(schema.sources.createdAt, thirtyDaysAgo))
)
})
// Get categories breakdown // Get categories breakdown
const categoriesBreakdown = await new Promise<{ [key: string]: number }>((resolve, reject) => { const categoriesBreakdownResult = await db
db.all( .select({
`SELECT c.name, COUNT(*) as count name: schema.categories.name,
FROM categories c count: count()
JOIN source_categories sc ON c.id = sc.category_id })
JOIN sources s ON sc.source_id = s.id .from(schema.categories)
WHERE s.status = 'verified' .innerJoin(schema.sourceCategories, eq(schema.categories.id, schema.sourceCategories.categoryId))
GROUP BY c.id, c.name`, .innerJoin(schema.sources, eq(schema.sourceCategories.sourceId, schema.sources.id))
(err, rows: any[]) => { .where(eq(schema.sources.status, 'verified'))
if (err) reject(err) .groupBy(schema.categories.id, schema.categories.name)
else {
const breakdown: { [key: string]: number } = {} const categoriesBreakdown: { [key: string]: number } = {}
rows.forEach(row => { categoriesBreakdownResult.forEach(row => {
breakdown[row.name] = row.count categoriesBreakdown[row.name] = row.count
})
resolve(breakdown)
}
}
)
}) })
// Get top risky domains // Get top risky domains
const topDomains = await new Promise<{ domain: string; count: number; risk_level: number }[]>((resolve, reject) => { const topDomainsResult = await db
db.all( .select({
`SELECT domain, COUNT(*) as count, AVG(risk_level) as avg_risk domain: schema.sources.domain,
FROM sources count: count(),
WHERE status = 'verified' avgRisk: sql<number>`AVG(${schema.sources.riskLevel})`
GROUP BY domain })
ORDER BY avg_risk DESC, count DESC .from(schema.sources)
LIMIT 10`, .where(eq(schema.sources.status, 'verified'))
(err, rows: any[]) => { .groupBy(schema.sources.domain)
if (err) reject(err) .orderBy(desc(sql`AVG(${schema.sources.riskLevel})`), desc(count()))
else { .limit(10)
const domains = rows.map(row => ({
domain: row.domain, const topDomains = topDomainsResult.map(row => ({
count: row.count, domain: row.domain,
risk_level: Math.round(row.avg_risk * 10) / 10 count: row.count,
})) risk_level: Math.round(row.avgRisk * 10) / 10
resolve(domains) }))
}
}
)
})
const stats: PublicStats = { const stats: PublicStats = {
total_sources: totalSources, total_sources: totalSourcesResult.count,
high_risk_sources: highRiskSources, high_risk_sources: highRiskSourcesResult.count,
categories_breakdown: categoriesBreakdown, categories_breakdown: categoriesBreakdown,
recent_additions: recentAdditions, recent_additions: recentAdditionsResult.count,
top_domains: topDomains top_domains: topDomains
} }
@@ -111,7 +89,5 @@ export default async function handler(
} catch (error) { } catch (error) {
console.error('Database error:', error) console.error('Database error:', error)
return res.status(500).json({ error: 'Internal server error' }) return res.status(500).json({ error: 'Internal server error' })
} finally {
db.close()
} }
} }

19
scripts/seed.ts Normal file
View File

@@ -0,0 +1,19 @@
import * as dotenv from 'dotenv';
// Load environment variables first
dotenv.config({ path: '.env.local' });
import { seedDatabase } from '../lib/db/seed';
async function main() {
try {
console.log('Using DATABASE_URL:', process.env.DATABASE_URL ? 'configured' : 'missing');
await seedDatabase();
process.exit(0);
} catch (error) {
console.error('Failed to seed database:', error);
process.exit(1);
}
}
main();