Files
crawler/api_feeds.go

511 lines
14 KiB
Go

package main
import (
"encoding/json"
"fmt"
"net/http"
"strings"
"time"
"github.com/jackc/pgx/v5"
)
func (c *Crawler) handleAPIFeedInfo(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
type FeedDetails struct {
URL string `json:"url"`
Type string `json:"type,omitempty"`
Category string `json:"category,omitempty"`
Title string `json:"title,omitempty"`
Description string `json:"description,omitempty"`
Language string `json:"language,omitempty"`
SiteURL string `json:"siteUrl,omitempty"`
DiscoveredAt string `json:"discoveredAt,omitempty"`
LastCrawledAt string `json:"lastCrawledAt,omitempty"`
NextCrawlAt string `json:"nextCrawlAt,omitempty"`
LastBuildDate string `json:"lastBuildDate,omitempty"`
TTLMinutes int `json:"ttlMinutes,omitempty"`
UpdatePeriod string `json:"updatePeriod,omitempty"`
UpdateFreq int `json:"updateFreq,omitempty"`
Status string `json:"status,omitempty"`
ErrorCount int `json:"errorCount,omitempty"`
LastError string `json:"lastError,omitempty"`
ItemCount int `json:"itemCount,omitempty"`
AvgPostFreqHrs float64 `json:"avgPostFreqHrs,omitempty"`
OldestItemDate string `json:"oldestItemDate,omitempty"`
NewestItemDate string `json:"newestItemDate,omitempty"`
PublishStatus string `json:"publishStatus,omitempty"`
PublishAccount string `json:"publishAccount,omitempty"`
}
var f FeedDetails
var category, title, description, language, siteUrl *string
var lastCrawledAt, nextCrawlAt, lastBuildDate *time.Time
var updatePeriod, status, lastError *string
var oldestItemDate, newestItemDate *time.Time
var ttlMinutes, updateFreq, errorCount, itemCount *int
var avgPostFreqHrs *float64
var discoveredAt time.Time
var publishStatus, publishAccount *string
err := c.db.QueryRow(`
SELECT url, type, category, title, description, language, site_url,
discovered_at, last_crawled_at, next_crawl_at, last_build_date,
ttl_minutes, update_period, update_freq,
status, error_count, last_error,
item_count, avg_post_freq_hrs, oldest_item_date, newest_item_date,
publish_status, publish_account
FROM feeds WHERE url = $1
`, feedURL).Scan(
&f.URL, &f.Type, &category, &title, &description, &language, &siteUrl,
&discoveredAt, &lastCrawledAt, &nextCrawlAt, &lastBuildDate,
&ttlMinutes, &updatePeriod, &updateFreq,
&status, &errorCount, &lastError,
&itemCount, &avgPostFreqHrs, &oldestItemDate, &newestItemDate,
&publishStatus, &publishAccount,
)
if err == pgx.ErrNoRows {
http.Error(w, "feed not found", http.StatusNotFound)
return
}
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
f.Category = StringValue(category)
f.Title = StringValue(title)
f.Description = StringValue(description)
f.Language = StringValue(language)
f.SiteURL = StringValue(siteUrl)
f.DiscoveredAt = discoveredAt.Format(time.RFC3339)
if lastCrawledAt != nil {
f.LastCrawledAt = lastCrawledAt.Format(time.RFC3339)
}
if nextCrawlAt != nil {
f.NextCrawlAt = nextCrawlAt.Format(time.RFC3339)
}
if lastBuildDate != nil {
f.LastBuildDate = lastBuildDate.Format(time.RFC3339)
}
if ttlMinutes != nil {
f.TTLMinutes = *ttlMinutes
}
f.UpdatePeriod = StringValue(updatePeriod)
if updateFreq != nil {
f.UpdateFreq = *updateFreq
}
f.Status = StringValue(status)
if errorCount != nil {
f.ErrorCount = *errorCount
}
f.LastError = StringValue(lastError)
if itemCount != nil {
f.ItemCount = *itemCount
}
if avgPostFreqHrs != nil {
f.AvgPostFreqHrs = *avgPostFreqHrs
}
if oldestItemDate != nil {
f.OldestItemDate = oldestItemDate.Format(time.RFC3339)
}
if newestItemDate != nil {
f.NewestItemDate = newestItemDate.Format(time.RFC3339)
}
f.PublishStatus = StringValue(publishStatus)
f.PublishAccount = StringValue(publishAccount)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(f)
}
func (c *Crawler) handleAPIFeedItems(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
limit := 50
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 100 {
limit = 100
}
}
items, err := c.GetItemsByFeed(feedURL, limit)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if items == nil {
items = []*Item{}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(items)
}
func (c *Crawler) handleAPIFeedsByStatus(w http.ResponseWriter, r *http.Request) {
status := r.URL.Query().Get("status")
if status == "" {
http.Error(w, "status parameter required", http.StatusBadRequest)
return
}
limit := 100
offset := 0
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 500 {
limit = 500
}
}
if o := r.URL.Query().Get("offset"); o != "" {
fmt.Sscanf(o, "%d", &offset)
}
rows, err := c.db.Query(`
SELECT url, title, type, source_host, tld, status, error_count, last_error, item_count
FROM feeds
WHERE status = $1
ORDER BY url ASC
LIMIT $2 OFFSET $3
`, status, limit, offset)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
type FeedInfo struct {
URL string `json:"url"`
Title string `json:"title,omitempty"`
Type string `json:"type"`
SourceHost string `json:"source_host"`
TLD string `json:"tld"`
Status string `json:"status"`
ErrorCount int `json:"error_count,omitempty"`
LastError string `json:"last_error,omitempty"`
ItemCount int `json:"item_count,omitempty"`
}
var feeds []FeedInfo
for rows.Next() {
var f FeedInfo
var title, sourceHost, tld, lastError *string
var errorCount, itemCount *int
if err := rows.Scan(&f.URL, &title, &f.Type, &sourceHost, &tld, &f.Status, &errorCount, &lastError, &itemCount); err != nil {
continue
}
f.Title = StringValue(title)
f.SourceHost = StringValue(sourceHost)
f.TLD = StringValue(tld)
f.LastError = StringValue(lastError)
if errorCount != nil {
f.ErrorCount = *errorCount
}
if itemCount != nil {
f.ItemCount = *itemCount
}
feeds = append(feeds, f)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(feeds)
}
// handleAPIFeeds lists feeds with optional publish_status filter
func (c *Crawler) handleAPIFeeds(w http.ResponseWriter, r *http.Request) {
publishStatus := r.URL.Query().Get("publish_status")
limit := 100
offset := 0
if l := r.URL.Query().Get("limit"); l != "" {
fmt.Sscanf(l, "%d", &limit)
if limit > 500 {
limit = 500
}
}
if o := r.URL.Query().Get("offset"); o != "" {
fmt.Sscanf(o, "%d", &offset)
}
var rows pgx.Rows
var err error
if publishStatus != "" {
rows, err = c.db.Query(`
SELECT url, title, type, source_host, tld, status, error_count, last_error, item_count, publish_status, language
FROM feeds
WHERE publish_status = $1
ORDER BY url ASC
LIMIT $2 OFFSET $3
`, publishStatus, limit, offset)
} else {
rows, err = c.db.Query(`
SELECT url, title, type, source_host, tld, status, error_count, last_error, item_count, publish_status, language
FROM feeds
ORDER BY url ASC
LIMIT $1 OFFSET $2
`, limit, offset)
}
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
type FeedInfo struct {
URL string `json:"url"`
Title string `json:"title,omitempty"`
Type string `json:"type"`
SourceHost string `json:"source_host"`
TLD string `json:"tld"`
Status string `json:"status"`
ErrorCount int `json:"error_count,omitempty"`
LastError string `json:"last_error,omitempty"`
ItemCount int `json:"item_count,omitempty"`
PublishStatus string `json:"publish_status,omitempty"`
Language string `json:"language,omitempty"`
}
var feeds []FeedInfo
for rows.Next() {
var f FeedInfo
var title, sourceHost, tld, lastError, publishStatus, language *string
var errorCount, itemCount *int
if err := rows.Scan(&f.URL, &title, &f.Type, &sourceHost, &tld, &f.Status, &errorCount, &lastError, &itemCount, &publishStatus, &language); err != nil {
continue
}
f.Title = StringValue(title)
f.SourceHost = StringValue(sourceHost)
f.TLD = StringValue(tld)
f.LastError = StringValue(lastError)
f.PublishStatus = StringValue(publishStatus)
f.Language = StringValue(language)
if errorCount != nil {
f.ErrorCount = *errorCount
}
if itemCount != nil {
f.ItemCount = *itemCount
}
feeds = append(feeds, f)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(feeds)
}
func (c *Crawler) filterFeeds(w http.ResponseWriter, tld, domain, status string, languages []string, limit, offset int) {
var args []interface{}
argNum := 1
query := `
SELECT url, title, type, category, source_host, tld, status, error_count, last_error, item_count, language
FROM feeds
WHERE 1=1`
if tld != "" {
query += fmt.Sprintf(" AND tld = $%d", argNum)
args = append(args, tld)
argNum++
}
if domain != "" {
query += fmt.Sprintf(" AND source_host = $%d", argNum)
args = append(args, domain)
argNum++
}
if status != "" {
query += fmt.Sprintf(" AND status = $%d", argNum)
args = append(args, status)
argNum++
}
if len(languages) > 0 {
// Build IN clause for languages, handling 'unknown' as empty string
placeholders := make([]string, len(languages))
for i, lang := range languages {
placeholders[i] = fmt.Sprintf("$%d", argNum)
if lang == "unknown" {
args = append(args, "")
} else {
args = append(args, lang)
}
argNum++
}
query += fmt.Sprintf(" AND COALESCE(language, '') IN (%s)", strings.Join(placeholders, ","))
}
query += fmt.Sprintf(" ORDER BY url ASC LIMIT $%d OFFSET $%d", argNum, argNum+1)
args = append(args, limit, offset)
rows, err := c.db.Query(query, args...)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
type FeedInfo struct {
URL string `json:"url"`
Title string `json:"title,omitempty"`
Type string `json:"type"`
Category string `json:"category"`
SourceHost string `json:"source_host"`
TLD string `json:"tld"`
Status string `json:"status"`
ErrorCount int `json:"error_count,omitempty"`
LastError string `json:"last_error,omitempty"`
ItemCount int `json:"item_count,omitempty"`
Language string `json:"language,omitempty"`
}
var feeds []FeedInfo
for rows.Next() {
var f FeedInfo
var title, category, sourceHost, tldVal, lastError, language *string
var errorCount, itemCount *int
if err := rows.Scan(&f.URL, &title, &f.Type, &category, &sourceHost, &tldVal, &f.Status, &errorCount, &lastError, &itemCount, &language); err != nil {
continue
}
f.Title = StringValue(title)
if category != nil && *category != "" {
f.Category = *category
} else {
f.Category = "main"
}
f.SourceHost = StringValue(sourceHost)
f.TLD = StringValue(tldVal)
f.LastError = StringValue(lastError)
if errorCount != nil {
f.ErrorCount = *errorCount
}
if itemCount != nil {
f.ItemCount = *itemCount
}
f.Language = StringValue(language)
feeds = append(feeds, f)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]interface{}{
"type": "feeds",
"data": feeds,
})
}
// handleAPICheckFeed immediately checks a feed and returns items
func (c *Crawler) handleAPICheckFeed(w http.ResponseWriter, r *http.Request) {
feedURL := r.URL.Query().Get("url")
if feedURL == "" {
http.Error(w, "url parameter required", http.StatusBadRequest)
return
}
force := r.URL.Query().Get("force") == "true"
feedURL = normalizeURL(feedURL)
// Get the feed
feed, err := c.getFeed(feedURL)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if feed == nil {
http.Error(w, "feed not found", http.StatusNotFound)
return
}
// Clear cache headers if force is requested
if force {
feed.ETag = ""
feed.LastModified = ""
}
// Force check the feed
fmt.Printf("Force check feed: %s (force=%v)\n", feedURL, force)
changed, checkErr := c.CheckFeed(feed)
// Get updated feed info
feed, _ = c.getFeed(feedURL)
// Get items
items, _ := c.GetItemsByFeed(feedURL, 20)
type ItemSummary struct {
Title string `json:"title"`
Link string `json:"link"`
PubDate string `json:"pub_date,omitempty"`
Author string `json:"author,omitempty"`
}
var itemSummaries []ItemSummary
for _, item := range items {
is := ItemSummary{
Title: item.Title,
Link: item.Link,
Author: item.Author,
}
if !item.PubDate.IsZero() {
is.PubDate = item.PubDate.Format("2006-01-02 15:04")
}
itemSummaries = append(itemSummaries, is)
}
result := map[string]interface{}{
"url": feedURL,
"title": feed.Title,
"type": feed.Type,
"category": feed.Category,
"status": feed.Status,
"changed": changed,
"itemCount": feed.ItemCount,
"items": itemSummaries,
}
if checkErr != nil {
result["error"] = checkErr.Error()
}
if feed.LastError != "" {
result["lastError"] = feed.LastError
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(result)
}
// handleAPILanguages returns distinct languages with counts
func (c *Crawler) handleAPILanguages(w http.ResponseWriter, r *http.Request) {
rows, err := c.db.Query(`
SELECT COALESCE(NULLIF(language, ''), 'unknown') as lang, COUNT(*) as cnt
FROM feeds
GROUP BY lang
ORDER BY cnt DESC
`)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
defer rows.Close()
type LangInfo struct {
Language string `json:"language"`
Count int `json:"count"`
}
var languages []LangInfo
for rows.Next() {
var l LangInfo
if err := rows.Scan(&l.Language, &l.Count); err != nil {
continue
}
languages = append(languages, l)
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(languages)
}