Files
crawler/main.go
2026-02-01 19:05:50 -05:00

77 lines
2.0 KiB
Go

package main
import (
"fmt"
"os"
"os/signal"
"syscall"
)
func main() {
// Connection string from environment (DATABASE_URL or DB_* vars)
crawler, err := NewCrawler("")
if err != nil {
fmt.Fprintf(os.Stderr, "Error initializing crawler: %v\n", err)
os.Exit(1)
}
// Setup graceful shutdown
sigChan := make(chan os.Signal, 1)
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
// Start dashboard in background
go func() {
if err := crawler.StartDashboard("0.0.0.0:4321"); err != nil {
fmt.Fprintf(os.Stderr, "Dashboard error: %v\n", err)
}
}()
// Initialize stats in background (can be slow with large DBs)
go crawler.UpdateStats()
// Start all loops independently
fmt.Println("Starting import and processing loops...")
// Import loop (background) - imports .com domains from vertices.txt.gz
go crawler.ImportDomainsInBackground("vertices.txt.gz")
// Add test domains (in addition to imported domains)
go crawler.ImportTestDomains([]string{
"news.ycombinator.com",
"ycombinator.com",
})
// feed_check loop (background) - checks feeds for new items
go crawler.StartFeedCheckLoop()
// Stats loop (background) - updates once per minute
go crawler.StartStatsLoop()
// Cleanup loop (background) - removes old items once per week
go crawler.StartCleanupLoop()
// Maintenance loop (background) - WAL checkpoints and integrity checks
go crawler.StartMaintenanceLoop()
// TLD sync loop (background) - syncs with IANA, marks dead TLDs, adds new ones
go crawler.startTLDSyncLoop()
// Publish loop (background) - autopublishes items for approved feeds
go crawler.StartPublishLoop()
// Domain loop (background) - domain_check + feed_crawl
go crawler.StartDomainLoop()
// Wait for shutdown signal
sig := <-sigChan
fmt.Printf("\nReceived %v, shutting down gracefully...\n", sig)
// Close crawler (checkpoints WAL and closes database)
if err := crawler.Close(); err != nil {
fmt.Fprintf(os.Stderr, "Error closing crawler: %v\n", err)
os.Exit(1)
}
fmt.Println("Shutdown complete")
}