enhanced domain extraction and bulk import functionality
This commit is contained in:
94
pages/admin/bulk-import.tsx
Normal file
94
pages/admin/bulk-import.tsx
Normal file
@@ -0,0 +1,94 @@
|
||||
import { useState } from "react"
|
||||
import type { NextPage } from "next"
|
||||
import Head from "next/head"
|
||||
import Link from "next/link"
|
||||
|
||||
const BulkImport: NextPage = () => {
|
||||
const [importData, setImportData] = useState("")
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [result, setResult] = useState<any>(null)
|
||||
|
||||
const handleImport = async () => {
|
||||
if (!importData.trim()) return
|
||||
|
||||
setLoading(true)
|
||||
try {
|
||||
const lines = importData.trim().split('\n')
|
||||
const sources = lines.map(line => {
|
||||
const [domain, risk_level] = line.split(',')
|
||||
return {
|
||||
domain: domain?.trim(),
|
||||
risk_level: parseInt(risk_level?.trim()) || 3
|
||||
}
|
||||
}).filter(s => s.domain)
|
||||
|
||||
const response = await fetch('/api/admin/bulk-import', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ sources })
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
setResult(data)
|
||||
} catch (error) {
|
||||
setResult({ error: 'Import failed' })
|
||||
}
|
||||
setLoading(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Head>
|
||||
<title>Bulk Import - Infohliadka</title>
|
||||
</Head>
|
||||
|
||||
<div style={{ padding: '20px' }}>
|
||||
<div style={{ marginBottom: '20px' }}>
|
||||
<Link href="/admin">← Back to Admin</Link>
|
||||
</div>
|
||||
|
||||
<h1>Bulk Import Sources</h1>
|
||||
<p>Import multiple sources at once. Format: domain,risk_level (one per line)</p>
|
||||
|
||||
<textarea
|
||||
value={importData}
|
||||
onChange={(e) => setImportData(e.target.value)}
|
||||
placeholder="example.com,4 badsite.sk,5 spam.org,2"
|
||||
rows={10}
|
||||
style={{ width: '100%', marginBottom: '10px' }}
|
||||
/>
|
||||
|
||||
<button
|
||||
onClick={handleImport}
|
||||
disabled={loading || !importData.trim()}
|
||||
style={{
|
||||
padding: '10px 20px',
|
||||
backgroundColor: loading ? '#ccc' : '#007bff',
|
||||
color: 'white',
|
||||
border: 'none',
|
||||
borderRadius: '4px'
|
||||
}}
|
||||
>
|
||||
{loading ? 'Importing...' : 'Import Sources'}
|
||||
</button>
|
||||
|
||||
{result && (
|
||||
<div style={{ marginTop: '20px', padding: '15px', backgroundColor: '#f8f9fa', border: '1px solid #ddd' }}>
|
||||
<h3>Import Result:</h3>
|
||||
{result.error ? (
|
||||
<p style={{ color: 'red' }}>Error: {result.error}</p>
|
||||
) : (
|
||||
<div>
|
||||
<p>Total processed: {result.total}</p>
|
||||
<p>Successfully imported: {result.imported}</p>
|
||||
<p>Skipped (duplicates/invalid): {result.skipped}</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default BulkImport
|
||||
80
pages/api/admin/bulk-import.ts
Normal file
80
pages/api/admin/bulk-import.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import type { NextApiRequest, NextApiResponse } from "next"
|
||||
import sqlite3 from "sqlite3"
|
||||
import path from "path"
|
||||
|
||||
interface BulkImportItem {
|
||||
domain: string
|
||||
risk_level: number
|
||||
categories: string[]
|
||||
description?: string
|
||||
}
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (req.method !== "POST") return res.status(405).json({ error: "Method not allowed" })
|
||||
|
||||
const { sources } = req.body as { sources: BulkImportItem[] }
|
||||
|
||||
if (!sources || !Array.isArray(sources)) {
|
||||
return res.status(400).json({ error: "Sources array required" })
|
||||
}
|
||||
|
||||
const dbPath = path.join(process.cwd(), "database", "antihoax.db")
|
||||
const db = new sqlite3.Database(dbPath)
|
||||
|
||||
try {
|
||||
let imported = 0
|
||||
let skipped = 0
|
||||
|
||||
for (const source of sources) {
|
||||
if (!source.domain || !source.risk_level) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if domain already exists
|
||||
const existing = await new Promise<any>((resolve, reject) => {
|
||||
db.get(
|
||||
"SELECT id FROM sources WHERE domain = ?",
|
||||
[source.domain],
|
||||
(err, row) => {
|
||||
if (err) reject(err)
|
||||
else resolve(row)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
if (existing) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Insert new source
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
db.run(
|
||||
`INSERT INTO sources (domain, title, risk_level, status, description, created_at)
|
||||
VALUES (?, ?, ?, 'verified', ?, datetime('now'))`,
|
||||
[source.domain, source.domain, source.risk_level, source.description || ''],
|
||||
function(err) {
|
||||
if (err) reject(err)
|
||||
else resolve()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
imported++
|
||||
}
|
||||
|
||||
res.json({
|
||||
success: true,
|
||||
imported,
|
||||
skipped,
|
||||
total: sources.length
|
||||
})
|
||||
|
||||
} catch (error) {
|
||||
console.error('Bulk import error:', error)
|
||||
res.status(500).json({ error: "Import failed" })
|
||||
} finally {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,20 @@ function extractDomain(url: string): string {
|
||||
domain = domain.replace(/^www\./, '')
|
||||
domain = domain.replace(/^m\./, '')
|
||||
domain = domain.replace(/^mobile\./, '')
|
||||
domain = domain.replace(/^amp\./, '')
|
||||
|
||||
// Handle subdomains for known patterns - extract main domain for common TLDs
|
||||
if (domain.includes('.')) {
|
||||
const parts = domain.split('.')
|
||||
if (parts.length > 2) {
|
||||
const tld = parts[parts.length - 1]
|
||||
const sld = parts[parts.length - 2]
|
||||
// Extract main domain for common TLDs
|
||||
if (['com', 'org', 'net', 'sk', 'cz', 'hu', 'pl', 'eu', 'info'].includes(tld)) {
|
||||
domain = `${sld}.${tld}`
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return domain
|
||||
} catch {
|
||||
|
||||
Reference in New Issue
Block a user