migrate from SQLite to PostgreSQL with Drizzle ORM
- Updated all packages to latest versions (React 19, Next.js 14.2.32) - Replaced sqlite3 with pg and drizzle-orm dependencies - Created complete PostgreSQL schema with relationships and indexes - Migrated all API endpoints from SQLite to Drizzle queries - Added database seeding with sample data - Updated authentication to use bcrypt instead of pbkdf2 - Configured connection pooling for PostgreSQL - Updated app version to 1.0.0 - All endpoints tested and working correctly
This commit is contained in:
@@ -1,16 +1,16 @@
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import crypto from 'crypto'
|
||||
import { db, schema } from './db/connection'
|
||||
import { eq, and } from 'drizzle-orm'
|
||||
|
||||
export interface ApiKey {
|
||||
id: number
|
||||
key_hash: string
|
||||
keyHash: string
|
||||
name: string
|
||||
permissions: string[]
|
||||
rate_limit: number
|
||||
is_active: boolean
|
||||
last_used?: string
|
||||
created_at: string
|
||||
rateLimit: number
|
||||
isActive: boolean
|
||||
lastUsed?: Date
|
||||
createdAt: Date
|
||||
}
|
||||
|
||||
export function generateApiKey(): string {
|
||||
@@ -25,48 +25,38 @@ export async function validateApiKey(key: string): Promise<ApiKey | null> {
|
||||
if (!key || !key.startsWith('ak_')) return null
|
||||
|
||||
const keyHash = hashApiKey(key)
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const apiKey = await new Promise<ApiKey | null>((resolve, reject) => {
|
||||
db.get(
|
||||
'SELECT * FROM api_keys WHERE key_hash = ? AND is_active = 1',
|
||||
[keyHash],
|
||||
(err, row: any) => {
|
||||
if (err) reject(err)
|
||||
else if (row) {
|
||||
resolve({
|
||||
...row,
|
||||
permissions: row.permissions ? JSON.parse(row.permissions) : []
|
||||
})
|
||||
} else {
|
||||
resolve(null)
|
||||
}
|
||||
}
|
||||
)
|
||||
})
|
||||
const apiKeys = await db.select()
|
||||
.from(schema.apiKeys)
|
||||
.where(and(
|
||||
eq(schema.apiKeys.keyHash, keyHash),
|
||||
eq(schema.apiKeys.isActive, true)
|
||||
))
|
||||
.limit(1)
|
||||
|
||||
if (apiKey) {
|
||||
// Update last_used timestamp
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
'UPDATE api_keys SET last_used = datetime("now") WHERE id = ?',
|
||||
[apiKey.id],
|
||||
(err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
})
|
||||
if (apiKeys.length === 0) return null
|
||||
|
||||
const apiKey = apiKeys[0]
|
||||
|
||||
// Update last_used timestamp
|
||||
await db.update(schema.apiKeys)
|
||||
.set({ lastUsed: new Date() })
|
||||
.where(eq(schema.apiKeys.id, apiKey.id))
|
||||
|
||||
return {
|
||||
id: apiKey.id,
|
||||
keyHash: apiKey.keyHash,
|
||||
name: apiKey.name,
|
||||
permissions: apiKey.permissions ? JSON.parse(apiKey.permissions) : [],
|
||||
rateLimit: apiKey.rateLimit,
|
||||
isActive: apiKey.isActive,
|
||||
lastUsed: apiKey.lastUsed,
|
||||
createdAt: apiKey.createdAt
|
||||
}
|
||||
|
||||
return apiKey
|
||||
} catch (error) {
|
||||
console.error('API key validation error:', error)
|
||||
return null
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from './db/connection'
|
||||
|
||||
export interface AuditLogEntry {
|
||||
user_id?: number
|
||||
@@ -11,32 +10,17 @@ export interface AuditLogEntry {
|
||||
}
|
||||
|
||||
export async function logAuditEvent(entry: AuditLogEntry): Promise<void> {
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO audit_logs (user_id, action, resource_type, resource_id, details, ip_address, created_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, datetime('now'))`,
|
||||
[
|
||||
entry.user_id || null,
|
||||
entry.action,
|
||||
entry.resource_type,
|
||||
entry.resource_id || null,
|
||||
entry.details ? JSON.stringify(entry.details) : null,
|
||||
entry.ip_address || null
|
||||
],
|
||||
(err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
await db.insert(schema.auditLogs).values({
|
||||
userId: entry.user_id || null,
|
||||
action: entry.action,
|
||||
resourceType: entry.resource_type,
|
||||
resourceId: entry.resource_id || null,
|
||||
details: entry.details ? JSON.stringify(entry.details) : null,
|
||||
ipAddress: entry.ip_address || null
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Audit logging failed:', error)
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,19 +1,38 @@
|
||||
import { exec } from 'child_process'
|
||||
import path from 'path'
|
||||
import { promisify } from 'util'
|
||||
import fs from 'fs/promises'
|
||||
|
||||
const execAsync = promisify(exec)
|
||||
|
||||
export async function createDatabaseBackup(): Promise<string> {
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const backupPath = path.join(process.cwd(), 'backups', `backup_${Date.now()}.db`)
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error('DATABASE_URL environment variable is required')
|
||||
}
|
||||
|
||||
const timestamp = Date.now()
|
||||
const backupPath = path.join(process.cwd(), 'backups', `backup_${timestamp}.sql`)
|
||||
|
||||
try {
|
||||
// Ensure backup directory exists
|
||||
await execAsync('mkdir -p backups')
|
||||
|
||||
// Create SQLite backup using .backup command
|
||||
await execAsync(`sqlite3 "${dbPath}" ".backup '${backupPath}'"`)
|
||||
// Create PostgreSQL backup using pg_dump
|
||||
// Extract database name from connection string
|
||||
const url = new URL(process.env.DATABASE_URL)
|
||||
const dbName = url.pathname.substring(1) // Remove leading slash
|
||||
const host = url.hostname
|
||||
const port = url.port || '5432'
|
||||
const username = url.username
|
||||
const password = url.password
|
||||
|
||||
// Set PGPASSWORD environment variable for pg_dump
|
||||
const env = { ...process.env, PGPASSWORD: password }
|
||||
|
||||
await execAsync(
|
||||
`pg_dump -h ${host} -p ${port} -U ${username} -d ${dbName} --no-password > "${backupPath}"`,
|
||||
{ env }
|
||||
)
|
||||
|
||||
return backupPath
|
||||
} catch (error) {
|
||||
@@ -23,14 +42,30 @@ export async function createDatabaseBackup(): Promise<string> {
|
||||
}
|
||||
|
||||
export async function restoreDatabase(backupPath: string): Promise<void> {
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error('DATABASE_URL environment variable is required')
|
||||
}
|
||||
|
||||
try {
|
||||
// Create backup of current DB first
|
||||
await createDatabaseBackup()
|
||||
|
||||
// Copy backup to main location
|
||||
await execAsync(`cp "${backupPath}" "${dbPath}"`)
|
||||
// Extract database connection details
|
||||
const url = new URL(process.env.DATABASE_URL)
|
||||
const dbName = url.pathname.substring(1)
|
||||
const host = url.hostname
|
||||
const port = url.port || '5432'
|
||||
const username = url.username
|
||||
const password = url.password
|
||||
|
||||
// Set PGPASSWORD environment variable for psql
|
||||
const env = { ...process.env, PGPASSWORD: password }
|
||||
|
||||
// Restore from SQL backup
|
||||
await execAsync(
|
||||
`psql -h ${host} -p ${port} -U ${username} -d ${dbName} --no-password < "${backupPath}"`,
|
||||
{ env }
|
||||
)
|
||||
|
||||
console.log('Database restored successfully')
|
||||
} catch (error) {
|
||||
@@ -41,8 +76,25 @@ export async function restoreDatabase(backupPath: string): Promise<void> {
|
||||
|
||||
export async function listBackups(): Promise<string[]> {
|
||||
try {
|
||||
const { stdout } = await execAsync('ls -la backups/*.db 2>/dev/null || true')
|
||||
return stdout.trim() ? stdout.trim().split('\n') : []
|
||||
const backupDir = path.join(process.cwd(), 'backups')
|
||||
|
||||
try {
|
||||
const files = await fs.readdir(backupDir)
|
||||
const sqlFiles = files.filter(file => file.endsWith('.sql'))
|
||||
|
||||
// Get file stats for each backup
|
||||
const backupInfo = await Promise.all(
|
||||
sqlFiles.map(async (file) => {
|
||||
const filePath = path.join(backupDir, file)
|
||||
const stats = await fs.stat(filePath)
|
||||
return `${stats.mtime.toISOString()} ${file} (${Math.round(stats.size / 1024)}KB)`
|
||||
})
|
||||
)
|
||||
|
||||
return backupInfo.sort().reverse() // Most recent first
|
||||
} catch (dirError) {
|
||||
return []
|
||||
}
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
|
||||
@@ -1,99 +1,60 @@
|
||||
import sqlite3 from 'sqlite3'
|
||||
import path from 'path'
|
||||
import { db, schema } from './db/connection'
|
||||
import { sql, count } from 'drizzle-orm'
|
||||
|
||||
export async function optimizeDatabase(): Promise<void> {
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
// Create performance indexes
|
||||
const indexes = [
|
||||
'CREATE INDEX IF NOT EXISTS idx_sources_domain ON sources(domain)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_sources_status_risk ON sources(status, risk_level)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_sources_created ON sources(created_at)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_reports_status ON reports(status)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_reports_created ON reports(created_at)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_api_keys_hash ON api_keys(key_hash)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_audit_logs_created ON audit_logs(created_at)',
|
||||
'CREATE INDEX IF NOT EXISTS idx_source_categories ON source_categories(source_id, category_id)'
|
||||
// PostgreSQL automatically creates indexes defined in schema.ts
|
||||
// Run ANALYZE to update statistics for query optimization
|
||||
const tables = [
|
||||
'sources',
|
||||
'reports',
|
||||
'api_keys',
|
||||
'source_categories',
|
||||
'categories',
|
||||
'users'
|
||||
]
|
||||
|
||||
for (const indexQuery of indexes) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(indexQuery, (err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
})
|
||||
})
|
||||
for (const tableName of tables) {
|
||||
await db.execute(sql`ANALYZE ${sql.raw(tableName)}`)
|
||||
}
|
||||
|
||||
// Analyze tables for query optimization
|
||||
const analyzeTables = [
|
||||
'ANALYZE sources',
|
||||
'ANALYZE reports',
|
||||
'ANALYZE api_keys',
|
||||
'ANALYZE audit_logs',
|
||||
'ANALYZE source_categories'
|
||||
]
|
||||
|
||||
for (const analyzeQuery of analyzeTables) {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(analyzeQuery, (err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Vacuum database to optimize storage
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run('VACUUM', (err) => {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
})
|
||||
})
|
||||
// PostgreSQL equivalent of VACUUM - VACUUM ANALYZE updates statistics and cleans up
|
||||
await db.execute(sql`VACUUM ANALYZE`)
|
||||
|
||||
console.log('Database optimization completed successfully')
|
||||
|
||||
} catch (error) {
|
||||
console.error('Database optimization failed:', error)
|
||||
throw error
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
|
||||
export async function getDatabaseStats(): Promise<any> {
|
||||
const dbPath = path.join(process.cwd(), 'database', 'antihoax.db')
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
const stats = await new Promise<any>((resolve, reject) => {
|
||||
db.all(`
|
||||
SELECT
|
||||
name as table_name,
|
||||
COUNT(*) as row_count
|
||||
FROM (
|
||||
SELECT 'sources' as name UNION
|
||||
SELECT 'reports' as name UNION
|
||||
SELECT 'api_keys' as name UNION
|
||||
SELECT 'audit_logs' as name
|
||||
) tables
|
||||
`, (err, rows) => {
|
||||
if (err) reject(err)
|
||||
else resolve(rows)
|
||||
})
|
||||
})
|
||||
// Get row counts for each table
|
||||
const [sourcesCount] = await db.select({ count: count() }).from(schema.sources)
|
||||
const [reportsCount] = await db.select({ count: count() }).from(schema.reports)
|
||||
const [apiKeysCount] = await db.select({ count: count() }).from(schema.apiKeys)
|
||||
const [categoriesCount] = await db.select({ count: count() }).from(schema.categories)
|
||||
const [usersCount] = await db.select({ count: count() }).from(schema.users)
|
||||
const [sourceCategoriesCount] = await db.select({ count: count() }).from(schema.sourceCategories)
|
||||
|
||||
const tables = [
|
||||
{ table_name: 'sources', row_count: sourcesCount.count },
|
||||
{ table_name: 'reports', row_count: reportsCount.count },
|
||||
{ table_name: 'api_keys', row_count: apiKeysCount.count },
|
||||
{ table_name: 'categories', row_count: categoriesCount.count },
|
||||
{ table_name: 'users', row_count: usersCount.count },
|
||||
{ table_name: 'source_categories', row_count: sourceCategoriesCount.count }
|
||||
]
|
||||
|
||||
return {
|
||||
tables: stats,
|
||||
tables,
|
||||
optimized_at: new Date().toISOString()
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to get database stats:', error)
|
||||
throw error
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
27
lib/db/connection.ts
Normal file
27
lib/db/connection.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { drizzle } from 'drizzle-orm/node-postgres';
|
||||
import { Pool } from 'pg';
|
||||
import * as schema from './schema';
|
||||
|
||||
function createConnection() {
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error('DATABASE_URL environment variable is required');
|
||||
}
|
||||
|
||||
const pool = new Pool({
|
||||
connectionString: process.env.DATABASE_URL,
|
||||
ssl: false,
|
||||
max: 10,
|
||||
idleTimeoutMillis: 30000,
|
||||
connectionTimeoutMillis: 2000
|
||||
});
|
||||
|
||||
// Test connection
|
||||
pool.on('error', (err) => {
|
||||
console.error('Unexpected error on idle client', err);
|
||||
});
|
||||
|
||||
return drizzle(pool, { schema });
|
||||
}
|
||||
|
||||
export const db = createConnection();
|
||||
export { schema };
|
||||
228
lib/db/schema.ts
Normal file
228
lib/db/schema.ts
Normal file
@@ -0,0 +1,228 @@
|
||||
import {
|
||||
pgTable,
|
||||
serial,
|
||||
varchar,
|
||||
text,
|
||||
boolean,
|
||||
integer,
|
||||
timestamp,
|
||||
decimal,
|
||||
pgEnum,
|
||||
uniqueIndex,
|
||||
index
|
||||
} from 'drizzle-orm/pg-core';
|
||||
import { relations } from 'drizzle-orm';
|
||||
|
||||
// Enums
|
||||
export const roleEnum = pgEnum('role', ['admin', 'moderator']);
|
||||
export const sourceTypeEnum = pgEnum('source_type', [
|
||||
'website', 'facebook_page', 'facebook_group', 'instagram',
|
||||
'blog', 'news_site', 'youtube', 'tiktok', 'telegram', 'other'
|
||||
]);
|
||||
export const sourceStatusEnum = pgEnum('source_status', [
|
||||
'pending', 'verified', 'rejected', 'under_review'
|
||||
]);
|
||||
export const languageEnum = pgEnum('language', ['sk', 'cs', 'en', 'other']);
|
||||
export const priorityEnum = pgEnum('priority', ['low', 'medium', 'high', 'urgent']);
|
||||
export const reportStatusEnum = pgEnum('report_status', [
|
||||
'pending', 'in_review', 'approved', 'rejected', 'duplicate'
|
||||
]);
|
||||
|
||||
// Users table
|
||||
export const users = pgTable('users', {
|
||||
id: serial('id').primaryKey(),
|
||||
email: varchar('email', { length: 255 }).notNull().unique(),
|
||||
passwordHash: varchar('password_hash', { length: 255 }).notNull(),
|
||||
name: varchar('name', { length: 100 }).notNull(),
|
||||
role: roleEnum('role').default('moderator'),
|
||||
isActive: boolean('is_active').default(true),
|
||||
lastLogin: timestamp('last_login'),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
});
|
||||
|
||||
// Categories table
|
||||
export const categories = pgTable('categories', {
|
||||
id: serial('id').primaryKey(),
|
||||
name: varchar('name', { length: 100 }).notNull().unique(),
|
||||
slug: varchar('slug', { length: 100 }).notNull().unique(),
|
||||
description: text('description'),
|
||||
color: varchar('color', { length: 7 }).default('#6B7280'),
|
||||
priority: integer('priority').default(1),
|
||||
icon: varchar('icon', { length: 50 }),
|
||||
isActive: boolean('is_active').default(true),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
slugIdx: uniqueIndex('idx_categories_slug').on(table.slug),
|
||||
priorityIdx: index('idx_categories_priority').on(table.priority)
|
||||
};
|
||||
});
|
||||
|
||||
// Sources table
|
||||
export const sources = pgTable('sources', {
|
||||
id: serial('id').primaryKey(),
|
||||
url: varchar('url', { length: 1000 }).notNull().unique(),
|
||||
domain: varchar('domain', { length: 255 }).notNull(),
|
||||
title: varchar('title', { length: 500 }),
|
||||
description: text('description'),
|
||||
type: sourceTypeEnum('type').notNull(),
|
||||
status: sourceStatusEnum('status').default('pending'),
|
||||
riskLevel: integer('risk_level').default(1),
|
||||
language: languageEnum('language').default('sk'),
|
||||
evidenceUrls: text('evidence_urls'), // JSON
|
||||
reportedBy: varchar('reported_by', { length: 255 }),
|
||||
verifiedBy: integer('verified_by').references(() => users.id),
|
||||
rejectionReason: text('rejection_reason'),
|
||||
followerCount: integer('follower_count').default(0),
|
||||
lastChecked: timestamp('last_checked'),
|
||||
metadata: text('metadata').default('{}'), // JSON
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
domainIdx: index('idx_sources_domain').on(table.domain),
|
||||
statusIdx: index('idx_sources_status').on(table.status),
|
||||
riskLevelIdx: index('idx_sources_risk_level').on(table.riskLevel),
|
||||
typeIdx: index('idx_sources_type').on(table.type),
|
||||
createdAtIdx: index('idx_sources_created_at').on(table.createdAt),
|
||||
verifiedByIdx: index('idx_sources_verified_by').on(table.verifiedBy),
|
||||
statusRiskIdx: index('idx_sources_status_risk').on(table.status, table.riskLevel)
|
||||
};
|
||||
});
|
||||
|
||||
// Source Categories junction table
|
||||
export const sourceCategories = pgTable('source_categories', {
|
||||
id: serial('id').primaryKey(),
|
||||
sourceId: integer('source_id').notNull().references(() => sources.id, { onDelete: 'cascade' }),
|
||||
categoryId: integer('category_id').notNull().references(() => categories.id, { onDelete: 'cascade' }),
|
||||
confidenceScore: decimal('confidence_score', { precision: 3, scale: 2 }).default('1.0'),
|
||||
addedBy: integer('added_by').references(() => users.id),
|
||||
createdAt: timestamp('created_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
sourceIdIdx: index('idx_source_categories_source_id').on(table.sourceId),
|
||||
categoryIdIdx: index('idx_source_categories_category_id').on(table.categoryId),
|
||||
uniqueSourceCategory: uniqueIndex('unique_source_category').on(table.sourceId, table.categoryId)
|
||||
};
|
||||
});
|
||||
|
||||
// Reports table
|
||||
export const reports = pgTable('reports', {
|
||||
id: serial('id').primaryKey(),
|
||||
sourceUrl: varchar('source_url', { length: 1000 }).notNull(),
|
||||
sourceDomain: varchar('source_domain', { length: 255 }).notNull(),
|
||||
reporterEmail: varchar('reporter_email', { length: 255 }),
|
||||
reporterName: varchar('reporter_name', { length: 100 }),
|
||||
categorySuggestions: text('category_suggestions'), // JSON
|
||||
description: text('description').notNull(),
|
||||
evidenceUrls: text('evidence_urls'), // JSON
|
||||
priority: priorityEnum('priority').default('medium'),
|
||||
status: reportStatusEnum('status').default('pending'),
|
||||
assignedTo: integer('assigned_to').references(() => users.id),
|
||||
adminNotes: text('admin_notes'),
|
||||
processedAt: timestamp('processed_at'),
|
||||
ipAddress: varchar('ip_address', { length: 45 }),
|
||||
userAgent: text('user_agent'),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
statusIdx: index('idx_reports_status').on(table.status),
|
||||
sourceDomainIdx: index('idx_reports_source_domain').on(table.sourceDomain),
|
||||
priorityIdx: index('idx_reports_priority').on(table.priority),
|
||||
createdAtIdx: index('idx_reports_created_at').on(table.createdAt),
|
||||
assignedToIdx: index('idx_reports_assigned_to').on(table.assignedTo)
|
||||
};
|
||||
});
|
||||
|
||||
// API Keys table
|
||||
export const apiKeys = pgTable('api_keys', {
|
||||
id: serial('id').primaryKey(),
|
||||
keyHash: varchar('key_hash', { length: 255 }).notNull().unique(),
|
||||
name: varchar('name', { length: 100 }).notNull(),
|
||||
description: text('description'),
|
||||
ownerEmail: varchar('owner_email', { length: 255 }).notNull(),
|
||||
permissions: text('permissions').default('["read"]'), // JSON
|
||||
rateLimit: integer('rate_limit').default(1000),
|
||||
isActive: boolean('is_active').default(true),
|
||||
usageCount: integer('usage_count').default(0),
|
||||
lastUsed: timestamp('last_used'),
|
||||
expiresAt: timestamp('expires_at'),
|
||||
createdAt: timestamp('created_at').defaultNow(),
|
||||
updatedAt: timestamp('updated_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
keyHashIdx: uniqueIndex('idx_api_keys_hash').on(table.keyHash),
|
||||
ownerIdx: index('idx_api_keys_owner').on(table.ownerEmail)
|
||||
};
|
||||
});
|
||||
|
||||
// Audit Logs table
|
||||
export const auditLogs = pgTable('audit_logs', {
|
||||
id: serial('id').primaryKey(),
|
||||
userId: integer('user_id').references(() => users.id),
|
||||
action: varchar('action', { length: 50 }).notNull(),
|
||||
resourceType: varchar('resource_type', { length: 50 }).notNull(),
|
||||
resourceId: integer('resource_id'),
|
||||
details: text('details'), // JSON
|
||||
ipAddress: varchar('ip_address', { length: 45 }),
|
||||
createdAt: timestamp('created_at').defaultNow()
|
||||
}, (table) => {
|
||||
return {
|
||||
userIdIdx: index('idx_audit_logs_user_id').on(table.userId),
|
||||
createdAtIdx: index('idx_audit_logs_created_at').on(table.createdAt),
|
||||
actionIdx: index('idx_audit_logs_action').on(table.action),
|
||||
resourceTypeIdx: index('idx_audit_logs_resource_type').on(table.resourceType)
|
||||
};
|
||||
});
|
||||
|
||||
// Relations
|
||||
export const usersRelations = relations(users, ({ many }) => ({
|
||||
verifiedSources: many(sources),
|
||||
sourceCategories: many(sourceCategories),
|
||||
assignedReports: many(reports),
|
||||
auditLogs: many(auditLogs)
|
||||
}));
|
||||
|
||||
export const categoriesRelations = relations(categories, ({ many }) => ({
|
||||
sourceCategories: many(sourceCategories)
|
||||
}));
|
||||
|
||||
export const sourcesRelations = relations(sources, ({ one, many }) => ({
|
||||
verifiedBy: one(users, {
|
||||
fields: [sources.verifiedBy],
|
||||
references: [users.id]
|
||||
}),
|
||||
sourceCategories: many(sourceCategories)
|
||||
}));
|
||||
|
||||
export const sourceCategoriesRelations = relations(sourceCategories, ({ one }) => ({
|
||||
source: one(sources, {
|
||||
fields: [sourceCategories.sourceId],
|
||||
references: [sources.id]
|
||||
}),
|
||||
category: one(categories, {
|
||||
fields: [sourceCategories.categoryId],
|
||||
references: [categories.id]
|
||||
}),
|
||||
addedBy: one(users, {
|
||||
fields: [sourceCategories.addedBy],
|
||||
references: [users.id]
|
||||
})
|
||||
}));
|
||||
|
||||
export const reportsRelations = relations(reports, ({ one }) => ({
|
||||
assignedTo: one(users, {
|
||||
fields: [reports.assignedTo],
|
||||
references: [users.id]
|
||||
})
|
||||
}));
|
||||
|
||||
export const auditLogsRelations = relations(auditLogs, ({ one }) => ({
|
||||
user: one(users, {
|
||||
fields: [auditLogs.userId],
|
||||
references: [users.id]
|
||||
})
|
||||
}));
|
||||
98
lib/db/seed.ts
Normal file
98
lib/db/seed.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { db } from './connection';
|
||||
import { users, categories, sources, apiKeys, sourceCategories } from './schema';
|
||||
import * as bcrypt from 'bcryptjs';
|
||||
|
||||
export async function seedDatabase() {
|
||||
try {
|
||||
console.log('🌱 Seeding database...');
|
||||
|
||||
// Insert categories one by one
|
||||
const categoryData = [
|
||||
{ name: 'Hoax', slug: 'hoax', description: 'Šírenie nepravdivých informácií a hoaxov', color: '#EF4444', priority: 5, icon: 'AlertTriangle' },
|
||||
{ name: 'Hate Speech', slug: 'hate-speech', description: 'Nenávistné prejavy proti skupinám ľudí', color: '#DC2626', priority: 5, icon: 'MessageSquareX' },
|
||||
{ name: 'Violence', slug: 'violence', description: 'Povzbudzovanie k násiliu', color: '#B91C1C', priority: 5, icon: 'Sword' },
|
||||
{ name: 'Conspiracy', slug: 'conspiracy', description: 'Konšpiračné teórie', color: '#F59E0B', priority: 3, icon: 'Eye' },
|
||||
{ name: 'Propaganda', slug: 'propaganda', description: 'Politická propaganda a manipulácia', color: '#D97706', priority: 2, icon: 'Megaphone' }
|
||||
];
|
||||
|
||||
const insertedCategories = [];
|
||||
for (const cat of categoryData) {
|
||||
const result = await db.insert(categories).values(cat).returning();
|
||||
insertedCategories.push(result[0]);
|
||||
console.log(`✅ Inserted category: ${cat.name}`);
|
||||
}
|
||||
|
||||
// Insert admin user
|
||||
const hashedPassword = await bcrypt.hash('admin123', 12);
|
||||
const insertedUsers = await db.insert(users).values({
|
||||
email: 'admin@antihoax.sk',
|
||||
passwordHash: hashedPassword,
|
||||
name: 'System Admin',
|
||||
role: 'admin'
|
||||
}).returning();
|
||||
console.log(`✅ Inserted user: ${insertedUsers[0].name}`);
|
||||
|
||||
// Insert example sources
|
||||
const sourceData = [
|
||||
{
|
||||
url: 'https://example-hoax-site.com',
|
||||
domain: 'example-hoax-site.com',
|
||||
title: 'Example Hoax Site',
|
||||
description: 'Príklad hoax stránky pre testovanie',
|
||||
type: 'website' as const,
|
||||
status: 'verified' as const,
|
||||
riskLevel: 5,
|
||||
language: 'sk' as const,
|
||||
reportedBy: 'test@example.com',
|
||||
verifiedBy: insertedUsers[0].id,
|
||||
followerCount: 1500,
|
||||
metadata: JSON.stringify({ tags: ['test', 'example'] })
|
||||
},
|
||||
{
|
||||
url: 'https://example-conspiracy.com',
|
||||
domain: 'example-conspiracy.com',
|
||||
title: 'Conspiracy Theory Site',
|
||||
description: 'Stránka šíriaca konšpiračné teórie',
|
||||
type: 'blog' as const,
|
||||
status: 'verified' as const,
|
||||
riskLevel: 3,
|
||||
language: 'sk' as const,
|
||||
reportedBy: 'reporter@example.com',
|
||||
verifiedBy: insertedUsers[0].id,
|
||||
followerCount: 850,
|
||||
metadata: JSON.stringify({ tags: ['conspiracy', 'politics'] })
|
||||
}
|
||||
];
|
||||
|
||||
const insertedSources = [];
|
||||
for (const src of sourceData) {
|
||||
const result = await db.insert(sources).values(src).returning();
|
||||
insertedSources.push(result[0]);
|
||||
console.log(`✅ Inserted source: ${src.title}`);
|
||||
}
|
||||
|
||||
// Link sources with categories
|
||||
await db.insert(sourceCategories).values({
|
||||
sourceId: insertedSources[0].id,
|
||||
categoryId: insertedCategories[0].id, // Hoax
|
||||
confidenceScore: '1.0',
|
||||
addedBy: insertedUsers[0].id
|
||||
});
|
||||
|
||||
await db.insert(sourceCategories).values({
|
||||
sourceId: insertedSources[1].id,
|
||||
categoryId: insertedCategories[3].id, // Conspiracy
|
||||
confidenceScore: '0.9',
|
||||
addedBy: insertedUsers[0].id
|
||||
});
|
||||
|
||||
console.log(`✅ Linked sources with categories`);
|
||||
|
||||
console.log('🎉 Database seeded successfully!');
|
||||
console.log('📧 Admin login: admin@antihoax.sk / admin123');
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error seeding database:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user