package main import ( "fmt" "time" ) // DashboardStats holds all statistics for the dashboard type DashboardStats struct { // Domain stats TotalDomains int `json:"total_domains"` HoldDomains int `json:"hold_domains"` PassDomains int `json:"pass_domains"` SkipDomains int `json:"skip_domains"` // Feed stats TotalFeeds int `json:"total_feeds"` RSSFeeds int `json:"rss_feeds"` AtomFeeds int `json:"atom_feeds"` UnknownFeeds int `json:"unknown_feeds"` // Crawl progress HostsProcessed int32 `json:"hosts_processed"` CrawlRate int `json:"crawl_rate"` // crawls per minute CheckRate int `json:"check_rate"` // feed checks per minute // Timing UpdatedAt time.Time `json:"updated_at"` } type TLDStat struct { TLD string `json:"tld"` Count int `json:"count"` } type RecentFeed struct { URL string `json:"url"` Title string `json:"title"` Type string `json:"type"` DiscoveredAt time.Time `json:"discovered_at"` } type DomainStat struct { Host string `json:"host"` FeedsFound int `json:"feeds_found"` } // commaFormat formats an integer with comma separators func commaFormat(n int) string { s := fmt.Sprintf("%d", n) if len(s) <= 3 { return s } var result []byte for i, c := range s { if i > 0 && (len(s)-i)%3 == 0 { result = append(result, ',') } result = append(result, byte(c)) } return string(result) } // UpdateStats recalculates and caches dashboard statistics func (c *Crawler) UpdateStats() { fmt.Println("UpdateStats: calculating stats...") stats, err := c.calculateStats() if err != nil { fmt.Printf("UpdateStats: error calculating stats: %v\n", err) return } // Cache all domains with feeds (runs in background, so slow query is OK) fmt.Println("UpdateStats: fetching all domains...") allDomains := c.fetchAllDomainsFromDB() fmt.Printf("UpdateStats: got %d domains\n", len(allDomains)) c.statsMu.Lock() c.cachedStats = stats c.cachedAllDomains = allDomains c.statsMu.Unlock() fmt.Println("UpdateStats: complete") } func (c *Crawler) fetchAllDomainsFromDB() []DomainStat { rows, err := c.db.Query(` SELECT tld, source_host, COUNT(*) as cnt FROM feeds GROUP BY tld, source_host ORDER BY tld, source_host `) if err != nil { fmt.Printf("fetchAllDomainsFromDB error: %v\n", err) return nil } defer rows.Close() var domains []DomainStat for rows.Next() { var ds DomainStat var tld string if err := rows.Scan(&tld, &ds.Host, &ds.FeedsFound); err != nil { continue } domains = append(domains, ds) } return domains } // GetDashboardStats returns cached statistics (returns empty stats if not yet cached) func (c *Crawler) GetDashboardStats() (*DashboardStats, error) { c.statsMu.RLock() stats := c.cachedStats c.statsMu.RUnlock() if stats != nil { return stats, nil } // Return empty stats while background calculation runs (don't block HTTP requests) return &DashboardStats{UpdatedAt: time.Now()}, nil } // calculateStats collects all statistics for the dashboard func (c *Crawler) calculateStats() (*DashboardStats, error) { stats := &DashboardStats{ UpdatedAt: time.Now(), HostsProcessed: c.hostsProcessed, } // Calculate crawl rate (crawls per minute), smoothed by +/-1 per update elapsed := time.Since(c.startTime).Minutes() if elapsed > 0 { actualRate := int(float64(c.hostsProcessed) / elapsed) if actualRate > c.displayedCrawlRate { c.displayedCrawlRate++ } else if actualRate < c.displayedCrawlRate { c.displayedCrawlRate-- } stats.CrawlRate = c.displayedCrawlRate // Calculate check rate (feed checks per minute), smoothed by +/-1 per update actualCheckRate := int(float64(c.feedsChecked) / elapsed) if actualCheckRate > c.displayedCheckRate { c.displayedCheckRate++ } else if actualCheckRate < c.displayedCheckRate { c.displayedCheckRate-- } stats.CheckRate = c.displayedCheckRate } // Get domain stats if err := c.collectDomainStats(stats); err != nil { return nil, err } // Get feed stats if err := c.collectFeedStats(stats); err != nil { return nil, err } return stats, nil } func (c *Crawler) collectDomainStats(stats *DashboardStats) error { // Use COUNT(*) for total count err := c.db.QueryRow("SELECT COUNT(*) FROM domains").Scan(&stats.TotalDomains) if err != nil { return err } // Single query to get all status counts (one index scan instead of three) rows, err := c.db.Query("SELECT status, COUNT(*) FROM domains GROUP BY status") if err != nil { return err } defer rows.Close() for rows.Next() { var status string var count int if err := rows.Scan(&status, &count); err != nil { continue } switch status { case "hold": stats.HoldDomains = count case "pass": stats.PassDomains = count case "skip": stats.SkipDomains = count } } if err := rows.Err(); err != nil { return err } return rows.Err() } func (c *Crawler) collectFeedStats(stats *DashboardStats) error { // Use COUNT(*) for total count err := c.db.QueryRow("SELECT COUNT(*) FROM feeds").Scan(&stats.TotalFeeds) if err != nil { return err } // Single query to get all type counts (one index scan instead of three) rows, err := c.db.Query("SELECT type, COUNT(*) FROM feeds GROUP BY type") if err != nil { return err } defer rows.Close() for rows.Next() { var feedType *string var count int if err := rows.Scan(&feedType, &count); err != nil { continue } if feedType == nil { stats.UnknownFeeds += count } else { switch *feedType { case "rss": stats.RSSFeeds = count case "atom": stats.AtomFeeds = count default: stats.UnknownFeeds += count } } } return rows.Err() }