Files
crawler/crawler.go
2026-01-26 16:02:05 -05:00

279 lines
6.0 KiB
Go

package main
import (
"database/sql"
"fmt"
"io"
"net/http"
"runtime"
"strings"
"sync"
"sync/atomic"
"time"
"golang.org/x/net/html"
)
type Crawler struct {
MaxDepth int
MaxPagesPerHost int
Timeout time.Duration
UserAgent string
visited sync.Map
feedsMu sync.Mutex
client *http.Client
hostsProcessed int32
feedsChecked int32
startTime time.Time
db *sql.DB
displayedCrawlRate int
displayedCheckRate int
domainsImported int32
cachedStats *DashboardStats
cachedAllDomains []DomainStat
statsMu sync.RWMutex
}
func NewCrawler(dbPath string) (*Crawler, error) {
db, err := OpenDatabase(dbPath)
if err != nil {
return nil, fmt.Errorf("failed to open database: %v", err)
}
return &Crawler{
MaxDepth: 10,
MaxPagesPerHost: 10,
Timeout: 10 * time.Second,
UserAgent: "FeedCrawler/1.0",
startTime: time.Now(),
db: db,
client: &http.Client{
Timeout: 10 * time.Second,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
if len(via) >= 10 {
return fmt.Errorf("stopped after 10 redirects")
}
return nil
},
},
}, nil
}
func (c *Crawler) Close() error {
if c.db != nil {
return c.db.Close()
}
return nil
}
// StartStatsLoop updates cached stats once per minute
func (c *Crawler) StartStatsLoop() {
for {
c.UpdateStats()
time.Sleep(1 * time.Minute)
}
}
// StartCleanupLoop runs item cleanup once per week
func (c *Crawler) StartCleanupLoop() {
for {
deleted, err := c.CleanupOldItems()
if err != nil {
fmt.Printf("Cleanup error: %v\n", err)
} else if deleted > 0 {
fmt.Printf("Cleanup: removed %d old items\n", deleted)
}
time.Sleep(7 * 24 * time.Hour)
}
}
// StartCrawlLoop runs the domain crawling loop independently
func (c *Crawler) StartCrawlLoop() {
numWorkers := runtime.NumCPU()
if numWorkers < 1 {
numWorkers = 1
}
// Buffered channel for domain work
workChan := make(chan *Domain, 256)
// Start workers
for i := 0; i < numWorkers; i++ {
go func() {
for domain := range workChan {
feedsFound, crawlErr := c.crawlHost(domain.Host)
errStr := ""
if crawlErr != nil {
errStr = crawlErr.Error()
}
if err := c.markDomainCrawled(domain.Host, feedsFound, errStr); err != nil {
fmt.Printf("Error marking domain %s as crawled: %v\n", domain.Host, err)
}
}
}()
}
const fetchSize = 100
for {
domains, err := c.GetUncheckedDomainsRandom(fetchSize)
if err != nil {
fmt.Printf("Error fetching domains: %v\n", err)
}
if len(domains) == 0 {
c.displayedCrawlRate = 0
time.Sleep(1 * time.Second)
continue
}
fmt.Printf("%s crawl: %d domains to check\n", time.Now().Format("15:04:05"), len(domains))
for _, domain := range domains {
workChan <- domain
}
time.Sleep(1 * time.Second)
}
}
// StartCheckLoop runs the feed checking loop independently
func (c *Crawler) StartCheckLoop() {
numWorkers := runtime.NumCPU()
if numWorkers < 1 {
numWorkers = 1
}
// Buffered channel for feed work
workChan := make(chan *Feed, 256)
// Start workers
for i := 0; i < numWorkers; i++ {
go func() {
for feed := range workChan {
c.CheckFeed(feed)
}
}()
}
const fetchSize = 100
for {
feeds, err := c.GetFeedsDueForCheck(fetchSize)
if err != nil {
fmt.Printf("Error fetching feeds: %v\n", err)
}
if len(feeds) == 0 {
c.displayedCheckRate = 0
time.Sleep(1 * time.Second)
continue
}
fmt.Printf("%s check: %d feeds to check\n", time.Now().Format("15:04:05"), len(feeds))
for _, feed := range feeds {
workChan <- feed
}
time.Sleep(1 * time.Second)
}
}
func (c *Crawler) crawlHost(host string) (feedsFound int, err error) {
atomic.AddInt32(&c.hostsProcessed, 1)
localVisited := make(map[string]bool)
pagesVisited := 0
// Try HTTPS first, fall back to HTTP if no pages were visited
c.crawlPage("https://"+host, host, 0, localVisited, &pagesVisited)
if pagesVisited == 0 {
c.crawlPage("http://"+host, host, 0, localVisited, &pagesVisited)
}
// Count feeds found for this specific host
feedsFound, _ = c.GetFeedCountByHost(host)
if pagesVisited == 0 {
return feedsFound, fmt.Errorf("could not connect")
}
return feedsFound, nil
}
func (c *Crawler) crawlPage(pageURL, sourceHost string, depth int, localVisited map[string]bool, pagesVisited *int) {
if *pagesVisited >= c.MaxPagesPerHost || depth > c.MaxDepth {
return
}
if localVisited[pageURL] {
return
}
if _, visited := c.visited.LoadOrStore(pageURL, true); visited {
return
}
localVisited[pageURL] = true
*pagesVisited++
body, contentType, headers, err := c.fetchPage(pageURL)
if err != nil {
return
}
if c.isFeedContent(body, contentType) {
c.processFeed(pageURL, sourceHost, body, headers)
return
}
doc, err := html.Parse(strings.NewReader(body))
if err != nil {
return
}
feedLinks := c.extractFeedLinks(doc, pageURL)
for _, fl := range feedLinks {
c.addFeed(fl.URL, fl.Type, sourceHost, pageURL)
}
anchorFeeds := c.extractAnchorFeeds(doc, pageURL)
for _, fl := range anchorFeeds {
c.addFeed(fl.URL, fl.Type, sourceHost, pageURL)
}
if depth < c.MaxDepth {
links := c.extractLinks(doc, pageURL)
for _, link := range links {
if shouldCrawl(link, pageURL) {
c.crawlPage(link, sourceHost, depth+1, localVisited, pagesVisited)
}
}
}
}
func (c *Crawler) fetchPage(pageURL string) (string, string, http.Header, error) {
req, err := http.NewRequest("GET", pageURL, nil)
if err != nil {
return "", "", nil, err
}
req.Header.Set("User-Agent", c.UserAgent)
resp, err := c.client.Do(req)
if err != nil {
return "", "", nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return "", "", nil, fmt.Errorf("status code: %d", resp.StatusCode)
}
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return "", "", nil, err
}
contentType := resp.Header.Get("Content-Type")
return string(bodyBytes), contentType, resp.Header, nil
}