Phase 5: Remove dashboard code from crawler
Removed dashboard-related files (now in standalone dashboard/ service): - api_domains.go, api_feeds.go, api_publish.go, api_search.go - dashboard.go, templates.go - oauth.go, oauth_handlers.go, oauth_middleware.go, oauth_session.go - routes.go - static/dashboard.css, static/dashboard.js Updated crawler.go: - Removed cachedStats, cachedAllDomains, statsMu fields - Removed StartStatsLoop function Updated main.go: - Removed dashboard startup - Removed stats loop and UpdateStats calls The crawler now runs independently without dashboard. Use the standalone dashboard/ service for web interface.
This commit is contained in:
@@ -19,16 +19,6 @@ func main() {
|
||||
sigChan := make(chan os.Signal, 1)
|
||||
signal.Notify(sigChan, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
// Start dashboard in background
|
||||
go func() {
|
||||
if err := crawler.StartDashboard("0.0.0.0:4321"); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Dashboard error: %v\n", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Initialize stats in background (can be slow with large DBs)
|
||||
go crawler.UpdateStats()
|
||||
|
||||
// Start all loops independently
|
||||
fmt.Println("Starting import and processing loops...")
|
||||
|
||||
@@ -44,9 +34,6 @@ func main() {
|
||||
// feed_check loop (background) - checks feeds for new items
|
||||
go crawler.StartFeedCheckLoop()
|
||||
|
||||
// Stats loop (background) - updates once per minute
|
||||
go crawler.StartStatsLoop()
|
||||
|
||||
// Cleanup loop (background) - removes old items once per week
|
||||
go crawler.StartCleanupLoop()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user