diff --git a/crawler.go b/crawler.go index 9f948a6..ad6f457 100644 --- a/crawler.go +++ b/crawler.go @@ -156,6 +156,9 @@ func (c *Crawler) StartPublishLoop() { sessions := make(map[string]*PDSSession) publisher := NewPublisher(pdsHost) + // Refresh existing account profiles on startup + c.RefreshAllProfiles(publisher, feedPassword) + for { // Get up to 50 unpublished items from approved feeds, sorted by discovered_at ASC items, err := c.GetAllUnpublishedItems(50) @@ -291,6 +294,62 @@ func (c *Crawler) getFeedInfo(feedURL string) *FeedInfo { } } +// RefreshAllProfiles updates profiles for all existing accounts with feed URLs +func (c *Crawler) RefreshAllProfiles(publisher *Publisher, feedPassword string) { + rows, err := c.db.Query(` + SELECT url, title, description, publish_account + FROM feeds + WHERE publish_account IS NOT NULL AND publish_account <> '' + `) + if err != nil { + fmt.Printf("RefreshProfiles: query error: %v\n", err) + return + } + defer rows.Close() + + for rows.Next() { + var feedURL, account string + var title, description *string + if err := rows.Scan(&feedURL, &title, &description, &account); err != nil { + continue + } + + // Login to account + session, err := publisher.CreateSession(account, feedPassword) + if err != nil { + fmt.Printf("RefreshProfiles: login failed for %s: %v\n", account, err) + continue + } + + // Build profile + displayName := StringValue(title) + if displayName == "" { + displayName = account + } + desc := StringValue(description) + if desc == "" { + desc = "News feed via 1440.news" + } + // Add feed URL + feedURLFull := "https://" + feedURL + desc = desc + "\n\n" + feedURLFull + + // Truncate if needed + if len(displayName) > 64 { + displayName = displayName[:61] + "..." + } + if len(desc) > 256 { + desc = desc[:253] + "..." + } + + if err := publisher.UpdateProfile(session, displayName, desc, nil); err != nil { + fmt.Printf("RefreshProfiles: update failed for %s: %v\n", account, err) + } else { + fmt.Printf("RefreshProfiles: updated %s\n", account) + } + } +} + // GetAllUnpublishedItems returns unpublished items from all approved feeds func (c *Crawler) GetAllUnpublishedItems(limit int) ([]Item, error) { rows, err := c.db.Query(`