Files
crawler/main.go
2026-01-26 16:02:05 -05:00

50 lines
1.1 KiB
Go

package main
import (
"fmt"
"os"
)
func main() {
// Ensure feeds directory exists
if err := os.MkdirAll("feeds", 0755); err != nil {
fmt.Fprintf(os.Stderr, "Error creating feeds directory: %v\n", err)
os.Exit(1)
}
crawler, err := NewCrawler("feeds/feeds.db")
if err != nil {
fmt.Fprintf(os.Stderr, "Error initializing crawler: %v\n", err)
os.Exit(1)
}
defer crawler.Close()
// Start dashboard in background
go func() {
if err := crawler.StartDashboard("0.0.0.0:4321"); err != nil {
fmt.Fprintf(os.Stderr, "Dashboard error: %v\n", err)
}
}()
// Initialize stats in background (can be slow with large DBs)
go crawler.UpdateStats()
// Start all loops independently
fmt.Println("Starting import, crawl, check, and stats loops...")
// Import loop (background)
go crawler.ImportDomainsInBackground("vertices.txt.gz")
// Check loop (background)
go crawler.StartCheckLoop()
// Stats loop (background) - updates once per minute
go crawler.StartStatsLoop()
// Cleanup loop (background) - removes old items once per hour
go crawler.StartCleanupLoop()
// Crawl loop (foreground - blocks forever)
crawler.StartCrawlLoop()
}