Files
crawler/api_publish.go
primal 7ec4207173 Migrate to normalized FK schema (domain_host, domain_tld)
Replace source_host column with proper FK to domains table using
composite key (domain_host, domain_tld). This enables JOIN queries
instead of string concatenation for domain lookups.

Changes:
- Update Feed struct: SourceHost/TLD → DomainHost/DomainTLD
- Update all SQL queries to use domain_host/domain_tld columns
- Add column aliases (as source_host) for API backwards compatibility
- Update trigram index from source_host to domain_host
- Add getDomainHost() helper for extracting host from domain

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-01 22:36:25 -05:00

1029 lines
29 KiB
Go

package main
import (
"encoding/json"
"fmt"
"net/http"
"os"
"strings"
"time"
)
// handleAPIEnablePublish sets a feed's publish status to 'pass'
// If account is not provided, it will be auto-derived from the feed URL
func (c *Crawler) handleAPIEnablePublish(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
account := r.URL.Query().Get("account")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
feedURL = normalizeURL(feedURL)
// Auto-derive account handle if not provided
if account == "" {
account = DeriveHandleFromFeed(feedURL)
if account == "" {
http.Error(w, "could not derive account handle from URL", http.StatusBadRequest)
return
}
}
// Check feed exists
feed, err := c.getFeed(feedURL)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if feed == nil {
http.Error(w, "feed not found", http.StatusNotFound)
return
}
if err := c.SetPublishStatus(feedURL, "pass", account); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Get unpublished count
count, _ := c.GetUnpublishedItemCount(feedURL)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "pass",
"url": feedURL,
"account": account,
"unpublished_items": count,
})
}
// handleAPIDeriveHandle shows what handle would be derived from a feed URL
func (c *Crawler) handleAPIDeriveHandle(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
handle := DeriveHandleFromFeed(feedURL)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"url": feedURL,
"handle": handle,
})
}
// handleAPIDisablePublish sets a feed's publish status to 'skip'
func (c *Crawler) handleAPIDisablePublish(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
feedURL = normalizeURL(feedURL)
if err := c.SetPublishStatus(feedURL, "skip", ""); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "skip",
"url": feedURL,
})
}
// handleAPIPublishEnabled returns all feeds with publish status 'pass'
func (c *Crawler) handleAPIPublishEnabled(w http.ResponseWriter, r *http.Request) {
feeds, err := c.GetFeedsByPublishStatus("pass")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
type FeedPublishInfo struct {
URL string `json:"url"`
Title string `json:"title"`
Account string `json:"account"`
UnpublishedCount int `json:"unpublished_count"`
}
var result []FeedPublishInfo
for _, f := range feeds {
count, _ := c.GetUnpublishedItemCount(f.URL)
result = append(result, FeedPublishInfo{
URL: f.URL,
Title: f.Title,
Account: f.PublishAccount,
UnpublishedCount: count,
})
}
if result == nil {
result = []FeedPublishInfo{}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleAPIPublishDenied returns all feeds with publish status 'skip'
func (c *Crawler) handleAPIPublishDenied(w http.ResponseWriter, r *http.Request) {
feeds, err := c.GetFeedsByPublishStatus("skip")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
type FeedDeniedInfo struct {
URL string `json:"url"`
Title string `json:"title"`
SourceHost string `json:"source_host"`
}
var result []FeedDeniedInfo
for _, f := range feeds {
result = append(result, FeedDeniedInfo{
URL: f.URL,
Title: f.Title,
SourceHost: fullHost(f.DomainHost, f.DomainTLD),
})
}
if result == nil {
result = []FeedDeniedInfo{}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleAPIPublishCandidates returns feeds pending review that have items
func (c *Crawler) handleAPIPublishCandidates(w http.ResponseWriter, r *http.Request) {
limit := 50
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 200 {
limit = 200
}
}
feeds, err := c.GetPublishCandidates(limit)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
type CandidateInfo struct {
URL string `json:"url"`
Title string `json:"title"`
Category string `json:"category"`
SourceHost string `json:"source_host"`
ItemCount int `json:"item_count"`
DerivedHandle string `json:"derived_handle"`
}
var result []CandidateInfo
for _, f := range feeds {
result = append(result, CandidateInfo{
URL: f.URL,
Title: f.Title,
Category: f.Category,
SourceHost: fullHost(f.DomainHost, f.DomainTLD),
ItemCount: f.ItemCount,
DerivedHandle: DeriveHandleFromFeed(f.URL),
})
}
if result == nil {
result = []CandidateInfo{}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleAPISetPublishStatus sets the publish status for a feed
// Status values:
// - 'pass': Create account if needed, begin publishing
// - 'hold': Crawl and store items but don't publish (default)
// - 'skip': Stop crawling but keep existing data
// - 'drop': Full cleanup - remove items, posts, and account
func (c *Crawler) handleAPISetPublishStatus(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
status := r.URL.Query().Get("status")
account := r.URL.Query().Get("account")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
if status != "pass" && status != "skip" && status != "hold" && status != "drop" {
http.Error(w, "status must be 'pass', 'hold', 'skip', or 'drop'", http.StatusBadRequest)
return
}
feedURL = normalizeURL(feedURL)
result := map[string]interface{}{
"url": feedURL,
"status": status,
}
// Handle 'drop' - full cleanup then set to skip
if status == "drop" {
cleanup := c.cleanupFeedPublishing(feedURL)
result["cleanup"] = cleanup
// After dropping, set status to skip with no account
if err := c.SetPublishStatus(feedURL, "skip", ""); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
result["account"] = ""
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
return
}
// Handle 'pass' - create account if needed and publish
if status == "pass" {
if account == "" {
account = DeriveHandleFromFeed(feedURL)
}
// Check if account exists on PDS, create if not
created, err := c.ensureFeedAccountExists(feedURL, account)
if err != nil {
result["error"] = err.Error()
} else if created {
result["account_created"] = true
}
result["account"] = account
}
// Handle 'hold' and 'skip' - just update status
if status == "hold" || status == "skip" {
// Get current account if any (don't change it)
feed, _ := c.getFeed(feedURL)
if feed != nil {
account = feed.PublishAccount
}
}
if err := c.SetPublishStatus(feedURL, status, account); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
result["account"] = account
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// ensureFeedAccountExists creates the PDS account for a feed if it doesn't exist
// Returns (created bool, error)
func (c *Crawler) ensureFeedAccountExists(feedURL, account string) (bool, error) {
// Load PDS credentials
pdsHost := os.Getenv("PDS_HOST")
pdsAdminPassword := os.Getenv("PDS_ADMIN_PASSWORD")
feedPassword := os.Getenv("FEED_PASSWORD")
if pdsHost == "" {
if envData, err := os.ReadFile("pds.env"); err == nil {
for _, line := range strings.Split(string(envData), "\n") {
line = strings.TrimSpace(line)
if strings.HasPrefix(line, "PDS_HOST=") {
pdsHost = strings.TrimPrefix(line, "PDS_HOST=")
} else if strings.HasPrefix(line, "PDS_ADMIN_PASSWORD=") {
pdsAdminPassword = strings.TrimPrefix(line, "PDS_ADMIN_PASSWORD=")
} else if strings.HasPrefix(line, "FEED_PASSWORD=") {
feedPassword = strings.TrimPrefix(line, "FEED_PASSWORD=")
}
}
}
}
if pdsHost == "" || pdsAdminPassword == "" {
return false, fmt.Errorf("PDS credentials not configured")
}
if feedPassword == "" {
feedPassword = "feed1440!"
}
publisher := NewPublisher(pdsHost)
// account is already the full handle (e.g., "ycombinator-blog.1440.news")
handle := account
if !strings.HasSuffix(handle, ".1440.news") {
handle = account + ".1440.news"
}
// Try to login - if successful, account exists
_, err := publisher.CreateSession(handle, feedPassword)
if err == nil {
return false, nil // Account already exists
}
// Account doesn't exist, create it
inviteCode, err := publisher.CreateInviteCode(pdsAdminPassword, 1)
if err != nil {
return false, fmt.Errorf("failed to create invite: %w", err)
}
email := handle + "@1440.news"
session, err := publisher.CreateAccount(handle, email, feedPassword, inviteCode)
if err != nil {
return false, fmt.Errorf("failed to create account: %w", err)
}
fmt.Printf("Created account %s for feed %s\n", handle, feedURL)
// Set up profile
feed, _ := c.getFeed(feedURL)
if feed != nil {
sourceHost := fullHost(feed.DomainHost, feed.DomainTLD)
displayName := feed.Title
if displayName == "" {
displayName = sourceHost
}
description := feed.Description
if description == "" {
description = "News feed via 1440.news"
}
// Add feed URL to description
feedURLFull := "https://" + feedURL
description = feedURLFull + "\n\n" + description
if len(displayName) > 64 {
displayName = displayName[:61] + "..."
}
if len(description) > 256 {
description = description[:253] + "..."
}
// Try to fetch favicon
var avatar *BlobRef
faviconData, mimeType, err := FetchFaviconBytes(sourceHost)
if err == nil && len(faviconData) > 0 {
avatar, _ = publisher.UploadBlob(session, faviconData, mimeType)
}
if err := publisher.UpdateProfile(session, displayName, description, avatar); err != nil {
fmt.Printf("Failed to set profile for %s: %v\n", handle, err)
}
}
// Have directory account follow this new account
if err := publisher.FollowAsDirectory(session.DID); err != nil {
fmt.Printf("Directory follow failed for %s: %v\n", handle, err)
}
return true, nil
}
// cleanupFeedPublishing removes all published content for a feed
// Returns a summary of what was cleaned up
func (c *Crawler) cleanupFeedPublishing(feedURL string) map[string]interface{} {
result := map[string]interface{}{
"posts_deleted": 0,
"account_deleted": false,
"items_cleared": 0,
}
// Get feed info to find the account
feed, err := c.getFeed(feedURL)
if err != nil || feed == nil {
result["error"] = "feed not found"
return result
}
if feed.PublishAccount == "" {
// No account associated, just clear items
itemsCleared, _ := c.db.Exec(`UPDATE items SET published_at = NULL WHERE feed_url = $1`, feedURL)
result["items_cleared"] = itemsCleared
return result
}
// Load PDS credentials
pdsHost := os.Getenv("PDS_HOST")
pdsAdminPassword := os.Getenv("PDS_ADMIN_PASSWORD")
feedPassword := os.Getenv("FEED_PASSWORD")
if pdsHost == "" {
// Try loading from pds.env
if envData, err := os.ReadFile("pds.env"); err == nil {
for _, line := range strings.Split(string(envData), "\n") {
line = strings.TrimSpace(line)
if strings.HasPrefix(line, "PDS_HOST=") {
pdsHost = strings.TrimPrefix(line, "PDS_HOST=")
} else if strings.HasPrefix(line, "PDS_ADMIN_PASSWORD=") {
pdsAdminPassword = strings.TrimPrefix(line, "PDS_ADMIN_PASSWORD=")
} else if strings.HasPrefix(line, "FEED_PASSWORD=") {
feedPassword = strings.TrimPrefix(line, "FEED_PASSWORD=")
}
}
}
}
if pdsHost == "" || feedPassword == "" {
result["error"] = "PDS credentials not configured"
// Still clear items in database
itemsCleared, _ := c.db.Exec(`UPDATE items SET published_at = NULL WHERE feed_url = $1`, feedURL)
result["items_cleared"] = itemsCleared
return result
}
publisher := NewPublisher(pdsHost)
// Try to authenticate as the feed account
session, err := publisher.CreateSession(feed.PublishAccount, feedPassword)
if err == nil && session != nil {
// Delete all posts
deleted, err := publisher.DeleteAllPosts(session)
if err == nil {
result["posts_deleted"] = deleted
} else {
result["posts_delete_error"] = err.Error()
}
} else {
result["session_error"] = "could not authenticate to delete posts"
}
// Delete the account using admin API
if pdsAdminPassword != "" && session != nil {
err := publisher.DeleteAccount(pdsAdminPassword, session.DID)
if err == nil {
result["account_deleted"] = true
} else {
result["account_delete_error"] = err.Error()
}
}
// Clear published_at on all items
itemsCleared, _ := c.db.Exec(`UPDATE items SET published_at = NULL WHERE feed_url = $1`, feedURL)
result["items_cleared"] = itemsCleared
// Clear publish_account on feed
c.db.Exec(`UPDATE feeds SET publish_account = NULL WHERE url = $1`, feedURL)
return result
}
// handleAPIUnpublishedItems returns unpublished items for a feed
func (c *Crawler) handleAPIUnpublishedItems(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
limit := 50
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 200 {
limit = 200
}
}
items, err := c.GetUnpublishedItems(feedURL, limit)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if items == nil {
items = []*Item{}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(items)
}
// handleAPITestPublish tests publishing a single item to PDS
// Requires: url (feed), itemId, handle, password, pds (optional, defaults to https://1440.news)
func (c *Crawler) handleAPITestPublish(w http.ResponseWriter, r *http.Request) {
itemIDStr := r.URL.Query().Get("itemId")
handle := r.URL.Query().Get("handle")
password := r.URL.Query().Get("password")
pdsHost := r.URL.Query().Get("pds")
if itemIDStr == "" {
http.Error(w, "itemId parameter required", http.StatusBadRequest)
return
}
if handle == "" || password == "" {
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
return
}
if pdsHost == "" {
pdsHost = "https://1440.news"
}
var itemID int64
fmt.Sscanf(itemIDStr, "%d", &itemID)
// Get the item
var item Item
var guid, title, link, description, content, author *string
var pubDate, updatedAt, publishedAt *time.Time
var publishedUri *string
err := c.db.QueryRow(`
SELECT id, feed_url, guid, title, link, description, content, author, pub_date, discovered_at, updated_at, published_at, published_uri
FROM items WHERE id = $1
`, itemID).Scan(
&item.ID, &item.FeedURL, &guid, &title, &link,
&description, &content, &author, &pubDate,
&item.DiscoveredAt, &updatedAt, &publishedAt, &publishedUri,
)
if err != nil {
http.Error(w, "item not found: "+err.Error(), http.StatusNotFound)
return
}
item.GUID = StringValue(guid)
item.Title = StringValue(title)
item.Link = StringValue(link)
item.Description = StringValue(description)
item.Content = StringValue(content)
item.Author = StringValue(author)
if pubDate != nil {
item.PubDate = *pubDate
}
// Create publisher and authenticate
publisher := NewPublisher(pdsHost)
session, err := publisher.CreateSession(handle, password)
if err != nil {
http.Error(w, "auth failed: "+err.Error(), http.StatusUnauthorized)
return
}
// Publish the item
uri, err := publisher.PublishItem(session, &item)
if err != nil {
http.Error(w, "publish failed: "+err.Error(), http.StatusInternalServerError)
return
}
// Mark as published
c.MarkItemPublished(item.ID, uri)
// Use PubDate for rkey to match createdAt ordering, fall back to DiscoveredAt
rkeyTime := item.PubDate
if rkeyTime.IsZero() {
rkeyTime = item.DiscoveredAt
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "published",
"uri": uri,
"itemId": item.ID,
"title": item.Title,
"rkey": GenerateRkey(item.GUID, rkeyTime),
})
}
// handleAPIPublishFeed publishes unpublished items for a feed
// Requires: url (feed), handle, password, pds (optional), limit (optional, default 10)
func (c *Crawler) handleAPIPublishFeed(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
handle := r.URL.Query().Get("handle")
password := r.URL.Query().Get("password")
pdsHost := r.URL.Query().Get("pds")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
if handle == "" || password == "" {
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
return
}
if pdsHost == "" {
pdsHost = "https://1440.news"
}
limit := 10
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 50 {
limit = 50
}
}
feedURL = normalizeURL(feedURL)
// Get unpublished items (ordered by pubDate ASC - oldest first)
items, err := c.GetUnpublishedItems(feedURL, limit)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if len(items) == 0 {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "no_items",
"published": 0,
})
return
}
// Create publisher and authenticate
publisher := NewPublisher(pdsHost)
session, err := publisher.CreateSession(handle, password)
if err != nil {
http.Error(w, "auth failed: "+err.Error(), http.StatusUnauthorized)
return
}
type PublishResult struct {
ItemID int64 `json:"item_id"`
Title string `json:"title"`
URI string `json:"uri,omitempty"`
Error string `json:"error,omitempty"`
}
var results []PublishResult
published := 0
failed := 0
for i, item := range items {
result := PublishResult{
ItemID: item.ID,
Title: item.Title,
}
uri, err := publisher.PublishItem(session, item)
if err != nil {
result.Error = err.Error()
failed++
} else {
result.URI = uri
c.MarkItemPublished(item.ID, uri)
published++
}
results = append(results, result)
// Add delay between posts to ensure unique timestamps for relay indexing
if i < len(items)-1 {
time.Sleep(1100 * time.Millisecond)
}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "complete",
"published": published,
"failed": failed,
"results": results,
})
}
// handleAPICreateAccount creates a new account on the PDS
// Requires: handle, email, password, pds (optional), inviteCode (optional)
// If pdsAdminPassword is provided, it will create an invite code first
func (c *Crawler) handleAPICreateAccount(w http.ResponseWriter, r *http.Request) {
handle := r.URL.Query().Get("handle")
email := r.URL.Query().Get("email")
password := r.URL.Query().Get("password")
pdsHost := r.URL.Query().Get("pds")
inviteCode := r.URL.Query().Get("inviteCode")
pdsAdminPassword := r.URL.Query().Get("pdsAdminPassword")
if handle == "" || password == "" {
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
return
}
if pdsHost == "" {
pdsHost = "https://pds.1440.news"
}
if email == "" {
// Generate a placeholder email from handle
email = handle + "@1440.news"
}
publisher := NewPublisher(pdsHost)
// If PDS admin password provided, create an invite code first
if pdsAdminPassword != "" && inviteCode == "" {
code, err := publisher.CreateInviteCode(pdsAdminPassword, 1)
if err != nil {
http.Error(w, "create invite failed: "+err.Error(), http.StatusInternalServerError)
return
}
inviteCode = code
}
// Create the account
session, err := publisher.CreateAccount(handle, email, password, inviteCode)
if err != nil {
http.Error(w, "create account failed: "+err.Error(), http.StatusInternalServerError)
return
}
// Have directory account follow this new account
if err := publisher.FollowAsDirectory(session.DID); err != nil {
fmt.Printf("API: directory follow failed for %s: %v\n", handle, err)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "created",
"handle": session.Handle,
"did": session.DID,
})
}
// handleAPIPublishFeedFull creates an account (if needed) and publishes items
// This is a convenience endpoint that combines account creation and publishing
// Requires: url (feed), pdsAdminPassword, pds (optional), limit (optional), feedPassword (optional)
func (c *Crawler) handleAPIPublishFeedFull(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
pdsAdminPassword := r.URL.Query().Get("pdsAdminPassword")
pdsHost := r.URL.Query().Get("pds")
feedPassword := r.URL.Query().Get("feedPassword") // Password for new feed accounts
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
if pdsAdminPassword == "" {
http.Error(w, "pdsAdminPassword parameter required", http.StatusBadRequest)
return
}
if pdsHost == "" {
pdsHost = "https://pds.1440.news"
}
if feedPassword == "" {
feedPassword = "feed1440!" // Default password for feed accounts
}
limit := 10
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 50 {
limit = 50
}
}
feedURL = normalizeURL(feedURL)
// Get the feed to check its status and get the derived handle
feed, err := c.getFeed(feedURL)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if feed == nil {
http.Error(w, "feed not found", http.StatusNotFound)
return
}
if feed.PublishStatus != "pass" {
http.Error(w, "feed is not approved for publishing (status: "+feed.PublishStatus+")", http.StatusBadRequest)
return
}
handle := feed.PublishAccount
if handle == "" {
handle = DeriveHandleFromFeed(feedURL)
}
email := handle + "@1440.news"
publisher := NewPublisher(pdsHost)
// First, try to authenticate with the feed account
session, err := publisher.CreateSession(handle, feedPassword)
if err != nil {
// Account doesn't exist, create it
fmt.Printf("Account %s doesn't exist, creating...\n", handle)
// Create invite code using PDS admin password
inviteCode, err := publisher.CreateInviteCode(pdsAdminPassword, 1)
if err != nil {
http.Error(w, "create invite failed: "+err.Error(), http.StatusInternalServerError)
return
}
// Create the account
session, err = publisher.CreateAccount(handle, email, feedPassword, inviteCode)
if err != nil {
http.Error(w, "create account failed: "+err.Error(), http.StatusInternalServerError)
return
}
fmt.Printf("Created account: %s (%s)\n", session.Handle, session.DID)
// Set up profile with feed title and favicon
sourceHost := fullHost(feed.DomainHost, feed.DomainTLD)
displayName := feed.Title
if displayName == "" {
displayName = sourceHost
}
description := feed.Description
// Try to fetch favicon for avatar
var avatar *BlobRef
faviconData, mimeType, err := FetchFaviconBytes(sourceHost)
if err == nil && len(faviconData) > 0 {
avatar, err = publisher.UploadBlob(session, faviconData, mimeType)
if err != nil {
fmt.Printf("Failed to upload favicon: %v\n", err)
}
}
if err := publisher.UpdateProfile(session, displayName, description, avatar); err != nil {
fmt.Printf("Failed to update profile: %v\n", err)
} else {
fmt.Printf("Set profile for %s: %s\n", handle, displayName)
}
// Have directory account follow this new account
if err := publisher.FollowAsDirectory(session.DID); err != nil {
fmt.Printf("API: directory follow failed for %s: %v\n", handle, err)
}
}
// Get unpublished items
items, err := c.GetUnpublishedItems(feedURL, limit)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if len(items) == 0 {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "no_items",
"handle": handle,
"published": 0,
})
return
}
type PublishResult struct {
ItemID int64 `json:"item_id"`
Title string `json:"title"`
URI string `json:"uri,omitempty"`
Error string `json:"error,omitempty"`
}
var results []PublishResult
published := 0
failed := 0
for i, item := range items {
result := PublishResult{
ItemID: item.ID,
Title: item.Title,
}
uri, err := publisher.PublishItem(session, item)
if err != nil {
result.Error = err.Error()
failed++
} else {
result.URI = uri
c.MarkItemPublished(item.ID, uri)
published++
}
results = append(results, result)
// Add delay between posts to ensure unique timestamps for relay indexing
if i < len(items)-1 {
time.Sleep(1100 * time.Millisecond)
}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "complete",
"handle": handle,
"did": session.DID,
"published": published,
"failed": failed,
"results": results,
})
}
// handleAPIUpdateProfile updates a profile for an existing account
// Requires: handle, password, pds (optional), displayName (optional), description (optional), faviconUrl (optional)
func (c *Crawler) handleAPIUpdateProfile(w http.ResponseWriter, r *http.Request) {
handle := r.URL.Query().Get("handle")
password := r.URL.Query().Get("password")
pdsHost := r.URL.Query().Get("pds")
displayName := r.URL.Query().Get("displayName")
description := r.URL.Query().Get("description")
faviconURL := r.URL.Query().Get("faviconUrl")
if handle == "" || password == "" {
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
return
}
if pdsHost == "" {
pdsHost = "https://pds.1440.news"
}
publisher := NewPublisher(pdsHost)
// Authenticate
session, err := publisher.CreateSession(handle, password)
if err != nil {
http.Error(w, "auth failed: "+err.Error(), http.StatusUnauthorized)
return
}
// Fetch favicon if URL provided
var avatar *BlobRef
if faviconURL != "" {
faviconData, mimeType, err := FetchFaviconBytes(faviconURL)
if err != nil {
http.Error(w, "fetch favicon failed: "+err.Error(), http.StatusBadRequest)
return
}
avatar, err = publisher.UploadBlob(session, faviconData, mimeType)
if err != nil {
http.Error(w, "upload favicon failed: "+err.Error(), http.StatusInternalServerError)
return
}
}
// Update profile
if err := publisher.UpdateProfile(session, displayName, description, avatar); err != nil {
http.Error(w, "update profile failed: "+err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"status": "updated",
"handle": handle,
"displayName": displayName,
"hasAvatar": avatar != nil,
})
}
// handleAPIResetAllPublishing clears all publish accounts and published_at timestamps
func (c *Crawler) handleAPIResetAllPublishing(w http.ResponseWriter, r *http.Request) {
// Clear all publish_account fields
accountsCleared, err := c.db.Exec(`UPDATE feeds SET publish_account = NULL WHERE publish_account IS NOT NULL`)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Clear all published_at timestamps
itemsCleared, err := c.db.Exec(`UPDATE items SET published_at = NULL WHERE published_at IS NOT NULL`)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// Reset all publish_status to 'hold'
statusReset, err := c.db.Exec(`UPDATE feeds SET publish_status = 'hold' WHERE publish_status IS NOT NULL`)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"success": true,
"accounts_cleared": accountsCleared,
"items_cleared": itemsCleared,
"status_reset": statusReset,
})
}
// handleAPIRefreshProfiles refreshes all account profiles (avatars, descriptions)
// Requires: password (feed account password), pds (optional, defaults to pds.1440.news)
func (c *Crawler) handleAPIRefreshProfiles(w http.ResponseWriter, r *http.Request) {
password := r.URL.Query().Get("password")
pdsHost := r.URL.Query().Get("pds")
if password == "" {
http.Error(w, "password parameter required", http.StatusBadRequest)
return
}
if pdsHost == "" {
pdsHost = "https://pds.1440.news"
}
publisher := NewPublisher(pdsHost)
// Run RefreshAllProfiles synchronously
c.RefreshAllProfiles(publisher, password)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"success": true,
"message": "profiles refreshed",
})
}