Added ability to toggle between alphabetical and feed count sorting when viewing domains under a TLD. Includes UI toggle in breadcrumb area and backend support for the sort parameter. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2535 lines
71 KiB
Go
2535 lines
71 KiB
Go
package main
|
|
|
|
import (
|
|
"encoding/json"
|
|
"fmt"
|
|
"html/template"
|
|
"net/http"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/jackc/pgx/v5"
|
|
)
|
|
|
|
// DashboardStats holds all statistics for the dashboard
|
|
type DashboardStats struct {
|
|
// Domain stats
|
|
TotalDomains int `json:"total_domains"`
|
|
CheckedDomains int `json:"checked_domains"`
|
|
UncheckedDomains int `json:"unchecked_domains"`
|
|
|
|
// Feed stats
|
|
TotalFeeds int `json:"total_feeds"`
|
|
RSSFeeds int `json:"rss_feeds"`
|
|
AtomFeeds int `json:"atom_feeds"`
|
|
UnknownFeeds int `json:"unknown_feeds"`
|
|
|
|
// Crawl progress
|
|
HostsProcessed int32 `json:"hosts_processed"`
|
|
CrawlRate int `json:"crawl_rate"` // crawls per minute
|
|
CheckRate int `json:"check_rate"` // feed checks per minute
|
|
|
|
// Timing
|
|
UpdatedAt time.Time `json:"updated_at"`
|
|
}
|
|
|
|
type TLDStat struct {
|
|
TLD string `json:"tld"`
|
|
Count int `json:"count"`
|
|
}
|
|
|
|
type RecentFeed struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title"`
|
|
Type string `json:"type"`
|
|
DiscoveredAt time.Time `json:"discovered_at"`
|
|
}
|
|
|
|
type DomainStat struct {
|
|
Host string `json:"host"`
|
|
FeedsFound int `json:"feeds_found"`
|
|
}
|
|
|
|
// commaFormat formats an integer with comma separators
|
|
func commaFormat(n int) string {
|
|
s := fmt.Sprintf("%d", n)
|
|
if len(s) <= 3 {
|
|
return s
|
|
}
|
|
var result []byte
|
|
for i, c := range s {
|
|
if i > 0 && (len(s)-i)%3 == 0 {
|
|
result = append(result, ',')
|
|
}
|
|
result = append(result, byte(c))
|
|
}
|
|
return string(result)
|
|
}
|
|
|
|
// UpdateStats recalculates and caches dashboard statistics
|
|
func (c *Crawler) UpdateStats() {
|
|
fmt.Println("UpdateStats: calculating stats...")
|
|
stats, err := c.calculateStats()
|
|
if err != nil {
|
|
fmt.Printf("UpdateStats: error calculating stats: %v\n", err)
|
|
return
|
|
}
|
|
// Cache all domains with feeds (runs in background, so slow query is OK)
|
|
fmt.Println("UpdateStats: fetching all domains...")
|
|
allDomains := c.fetchAllDomainsFromDB()
|
|
fmt.Printf("UpdateStats: got %d domains\n", len(allDomains))
|
|
|
|
c.statsMu.Lock()
|
|
c.cachedStats = stats
|
|
c.cachedAllDomains = allDomains
|
|
c.statsMu.Unlock()
|
|
fmt.Println("UpdateStats: complete")
|
|
}
|
|
|
|
func (c *Crawler) fetchAllDomainsFromDB() []DomainStat {
|
|
rows, err := c.db.Query( `
|
|
SELECT tld, source_host, COUNT(*) as cnt FROM feeds
|
|
GROUP BY tld, source_host
|
|
ORDER BY tld, source_host
|
|
`)
|
|
if err != nil {
|
|
fmt.Printf("fetchAllDomainsFromDB error: %v\n", err)
|
|
return nil
|
|
}
|
|
defer rows.Close()
|
|
|
|
var domains []DomainStat
|
|
for rows.Next() {
|
|
var ds DomainStat
|
|
var tld string
|
|
if err := rows.Scan(&tld, &ds.Host, &ds.FeedsFound); err != nil {
|
|
continue
|
|
}
|
|
domains = append(domains, ds)
|
|
}
|
|
return domains
|
|
}
|
|
|
|
// GetDashboardStats returns cached statistics (returns empty stats if not yet cached)
|
|
func (c *Crawler) GetDashboardStats() (*DashboardStats, error) {
|
|
c.statsMu.RLock()
|
|
stats := c.cachedStats
|
|
c.statsMu.RUnlock()
|
|
|
|
if stats != nil {
|
|
return stats, nil
|
|
}
|
|
// Return empty stats while background calculation runs (don't block HTTP requests)
|
|
return &DashboardStats{UpdatedAt: time.Now()}, nil
|
|
}
|
|
|
|
// calculateStats collects all statistics for the dashboard
|
|
func (c *Crawler) calculateStats() (*DashboardStats, error) {
|
|
stats := &DashboardStats{
|
|
UpdatedAt: time.Now(),
|
|
HostsProcessed: c.hostsProcessed,
|
|
}
|
|
|
|
// Calculate crawl rate (crawls per minute), smoothed by +/-1 per update
|
|
elapsed := time.Since(c.startTime).Minutes()
|
|
if elapsed > 0 {
|
|
actualRate := int(float64(c.hostsProcessed) / elapsed)
|
|
if actualRate > c.displayedCrawlRate {
|
|
c.displayedCrawlRate++
|
|
} else if actualRate < c.displayedCrawlRate {
|
|
c.displayedCrawlRate--
|
|
}
|
|
stats.CrawlRate = c.displayedCrawlRate
|
|
|
|
// Calculate check rate (feed checks per minute), smoothed by +/-1 per update
|
|
actualCheckRate := int(float64(c.feedsChecked) / elapsed)
|
|
if actualCheckRate > c.displayedCheckRate {
|
|
c.displayedCheckRate++
|
|
} else if actualCheckRate < c.displayedCheckRate {
|
|
c.displayedCheckRate--
|
|
}
|
|
stats.CheckRate = c.displayedCheckRate
|
|
}
|
|
|
|
// Get domain stats
|
|
if err := c.collectDomainStats(stats); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Get feed stats
|
|
if err := c.collectFeedStats(stats); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
return stats, nil
|
|
}
|
|
|
|
func (c *Crawler) collectDomainStats(stats *DashboardStats) error {
|
|
// Use COUNT(*) for total count
|
|
err := c.db.QueryRow( "SELECT COUNT(*) FROM domains").Scan(&stats.TotalDomains)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
// Single query to get all status counts (one index scan instead of three)
|
|
rows, err := c.db.Query( "SELECT status, COUNT(*) FROM domains GROUP BY status")
|
|
if err != nil {
|
|
return err
|
|
}
|
|
defer rows.Close()
|
|
|
|
for rows.Next() {
|
|
var status string
|
|
var count int
|
|
if err := rows.Scan(&status, &count); err != nil {
|
|
continue
|
|
}
|
|
switch status {
|
|
case "checked":
|
|
stats.CheckedDomains = count
|
|
case "unchecked":
|
|
stats.UncheckedDomains = count
|
|
}
|
|
}
|
|
if err := rows.Err(); err != nil {
|
|
return err
|
|
}
|
|
|
|
return rows.Err()
|
|
}
|
|
|
|
func (c *Crawler) collectFeedStats(stats *DashboardStats) error {
|
|
// Use COUNT(*) for total count
|
|
err := c.db.QueryRow( "SELECT COUNT(*) FROM feeds").Scan(&stats.TotalFeeds)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
// Single query to get all type counts (one index scan instead of three)
|
|
rows, err := c.db.Query( "SELECT type, COUNT(*) FROM feeds GROUP BY type")
|
|
if err != nil {
|
|
return err
|
|
}
|
|
defer rows.Close()
|
|
|
|
for rows.Next() {
|
|
var feedType *string
|
|
var count int
|
|
if err := rows.Scan(&feedType, &count); err != nil {
|
|
continue
|
|
}
|
|
if feedType == nil {
|
|
stats.UnknownFeeds += count
|
|
} else {
|
|
switch *feedType {
|
|
case "rss":
|
|
stats.RSSFeeds = count
|
|
case "atom":
|
|
stats.AtomFeeds = count
|
|
default:
|
|
stats.UnknownFeeds += count
|
|
}
|
|
}
|
|
}
|
|
return rows.Err()
|
|
}
|
|
|
|
// StartDashboard starts the web dashboard server
|
|
func (c *Crawler) StartDashboard(addr string) error {
|
|
http.HandleFunc("/dashboard", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleDashboard(w, r)
|
|
})
|
|
|
|
// Root handler for url.1440.news short URLs
|
|
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
|
host := r.Host
|
|
// Strip port if present
|
|
if idx := strings.Index(host, ":"); idx != -1 {
|
|
host = host[:idx]
|
|
}
|
|
|
|
// If this is url.1440.news, treat path as short code
|
|
if host == "url.1440.news" {
|
|
c.handleRedirect(w, r)
|
|
return
|
|
}
|
|
|
|
// Otherwise, redirect to dashboard for root path
|
|
if r.URL.Path == "/" {
|
|
http.Redirect(w, r, "/dashboard", http.StatusFound)
|
|
return
|
|
}
|
|
|
|
// Unknown path
|
|
http.NotFound(w, r)
|
|
})
|
|
|
|
http.HandleFunc("/api/stats", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIStats(w, r)
|
|
})
|
|
http.HandleFunc("/api/allDomains", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIAllDomains(w, r)
|
|
})
|
|
http.HandleFunc("/api/domainFeeds", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIDomainFeeds(w, r)
|
|
})
|
|
http.HandleFunc("/api/feedInfo", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIFeedInfo(w, r)
|
|
})
|
|
http.HandleFunc("/api/feedItems", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIFeedItems(w, r)
|
|
})
|
|
http.HandleFunc("/api/search", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPISearch(w, r)
|
|
})
|
|
http.HandleFunc("/api/tlds", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPITLDs(w, r)
|
|
})
|
|
http.HandleFunc("/api/tldDomains", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPITLDDomains(w, r)
|
|
})
|
|
http.HandleFunc("/api/revisitDomain", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIRevisitDomain(w, r)
|
|
})
|
|
http.HandleFunc("/api/priorityCrawl", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIPriorityCrawl(w, r)
|
|
})
|
|
http.HandleFunc("/api/checkFeed", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPICheckFeed(w, r)
|
|
})
|
|
http.HandleFunc("/api/domainsByStatus", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIDomainsByStatus(w, r)
|
|
})
|
|
http.HandleFunc("/api/feedsByStatus", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIFeedsByStatus(w, r)
|
|
})
|
|
http.HandleFunc("/api/filter", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIFilter(w, r)
|
|
})
|
|
http.HandleFunc("/api/enablePublish", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIEnablePublish(w, r)
|
|
})
|
|
http.HandleFunc("/api/disablePublish", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIDisablePublish(w, r)
|
|
})
|
|
http.HandleFunc("/api/publishEnabled", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIPublishEnabled(w, r)
|
|
})
|
|
http.HandleFunc("/api/publishDenied", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIPublishDenied(w, r)
|
|
})
|
|
http.HandleFunc("/api/publishCandidates", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIPublishCandidates(w, r)
|
|
})
|
|
http.HandleFunc("/api/setPublishStatus", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPISetPublishStatus(w, r)
|
|
})
|
|
http.HandleFunc("/api/unpublishedItems", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIUnpublishedItems(w, r)
|
|
})
|
|
http.HandleFunc("/api/testPublish", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPITestPublish(w, r)
|
|
})
|
|
http.HandleFunc("/api/deriveHandle", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIDeriveHandle(w, r)
|
|
})
|
|
http.HandleFunc("/api/publishFeed", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIPublishFeed(w, r)
|
|
})
|
|
http.HandleFunc("/api/createAccount", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPICreateAccount(w, r)
|
|
})
|
|
http.HandleFunc("/api/publishFeedFull", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIPublishFeedFull(w, r)
|
|
})
|
|
http.HandleFunc("/api/updateProfile", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIUpdateProfile(w, r)
|
|
})
|
|
http.HandleFunc("/api/languages", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPILanguages(w, r)
|
|
})
|
|
http.HandleFunc("/api/denyDomain", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIDenyDomain(w, r)
|
|
})
|
|
http.HandleFunc("/api/undenyDomain", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPIUndenyDomain(w, r)
|
|
})
|
|
http.HandleFunc("/api/tldStats", func(w http.ResponseWriter, r *http.Request) {
|
|
c.handleAPITLDStats(w, r)
|
|
})
|
|
http.HandleFunc("/static/", func(w http.ResponseWriter, r *http.Request) {
|
|
http.StripPrefix("/static/", http.FileServer(http.Dir("static"))).ServeHTTP(w, r)
|
|
})
|
|
|
|
fmt.Printf("Dashboard running at http://%s\n", addr)
|
|
return http.ListenAndServe(addr, nil)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIAllDomains(w http.ResponseWriter, r *http.Request) {
|
|
offset := 0
|
|
limit := 100
|
|
if o := r.URL.Query().Get("offset"); o != "" {
|
|
fmt.Sscanf(o, "%d", &offset)
|
|
}
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 100 {
|
|
limit = 100
|
|
}
|
|
}
|
|
|
|
// Serve from cache (updated once per minute in background)
|
|
c.statsMu.RLock()
|
|
cached := c.cachedAllDomains
|
|
c.statsMu.RUnlock()
|
|
|
|
var domains []DomainStat
|
|
if cached != nil && offset < len(cached) {
|
|
end := offset + limit
|
|
if end > len(cached) {
|
|
end = len(cached)
|
|
}
|
|
domains = cached[offset:end]
|
|
}
|
|
if domains == nil {
|
|
domains = []DomainStat{}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(domains)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIDomainFeeds(w http.ResponseWriter, r *http.Request) {
|
|
host := r.URL.Query().Get("host")
|
|
if host == "" {
|
|
http.Error(w, "host parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
rows, err := c.db.Query( `
|
|
SELECT url, title, type, status, error_count, last_error, item_count
|
|
FROM feeds
|
|
WHERE source_host = $1
|
|
ORDER BY url ASC
|
|
LIMIT 1000
|
|
`, host)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type FeedInfo struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title"`
|
|
Type string `json:"type"`
|
|
Status string `json:"status,omitempty"`
|
|
ErrorCount int `json:"error_count,omitempty"`
|
|
LastError string `json:"last_error,omitempty"`
|
|
ItemCount int `json:"item_count,omitempty"`
|
|
}
|
|
|
|
var feeds []FeedInfo
|
|
for rows.Next() {
|
|
var f FeedInfo
|
|
var title, status, lastError *string
|
|
var errorCount, itemCount *int
|
|
if err := rows.Scan(&f.URL, &title, &f.Type, &status, &errorCount, &lastError, &itemCount); err != nil {
|
|
continue
|
|
}
|
|
f.Title = StringValue(title)
|
|
f.Status = StringValue(status)
|
|
f.LastError = StringValue(lastError)
|
|
if errorCount != nil {
|
|
f.ErrorCount = *errorCount
|
|
}
|
|
if itemCount != nil {
|
|
f.ItemCount = *itemCount
|
|
}
|
|
feeds = append(feeds, f)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(feeds)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIFeedInfo(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
type FeedDetails struct {
|
|
URL string `json:"url"`
|
|
Type string `json:"type,omitempty"`
|
|
Title string `json:"title,omitempty"`
|
|
Description string `json:"description,omitempty"`
|
|
Language string `json:"language,omitempty"`
|
|
SiteURL string `json:"siteUrl,omitempty"`
|
|
DiscoveredAt string `json:"discoveredAt,omitempty"`
|
|
LastCrawledAt string `json:"lastCrawledAt,omitempty"`
|
|
LastBuildDate string `json:"lastBuildDate,omitempty"`
|
|
TTLMinutes int `json:"ttlMinutes,omitempty"`
|
|
UpdatePeriod string `json:"updatePeriod,omitempty"`
|
|
UpdateFreq int `json:"updateFreq,omitempty"`
|
|
Status string `json:"status,omitempty"`
|
|
ErrorCount int `json:"errorCount,omitempty"`
|
|
LastError string `json:"lastError,omitempty"`
|
|
ItemCount int `json:"itemCount,omitempty"`
|
|
AvgPostFreqHrs float64 `json:"avgPostFreqHrs,omitempty"`
|
|
OldestItemDate string `json:"oldestItemDate,omitempty"`
|
|
NewestItemDate string `json:"newestItemDate,omitempty"`
|
|
}
|
|
|
|
var f FeedDetails
|
|
var title, description, language, siteUrl *string
|
|
var lastCrawledAt, lastBuildDate *time.Time
|
|
var updatePeriod, status, lastError *string
|
|
var oldestItemDate, newestItemDate *time.Time
|
|
var ttlMinutes, updateFreq, errorCount, itemCount *int
|
|
var avgPostFreqHrs *float64
|
|
var discoveredAt time.Time
|
|
|
|
err := c.db.QueryRow( `
|
|
SELECT url, type, title, description, language, site_url,
|
|
discovered_at, last_crawled_at, last_build_date,
|
|
ttl_minutes, update_period, update_freq,
|
|
status, error_count, last_error,
|
|
item_count, avg_post_freq_hrs, oldest_item_date, newest_item_date
|
|
FROM feeds WHERE url = $1
|
|
`, feedURL).Scan(
|
|
&f.URL, &f.Type, &title, &description, &language, &siteUrl,
|
|
&discoveredAt, &lastCrawledAt, &lastBuildDate,
|
|
&ttlMinutes, &updatePeriod, &updateFreq,
|
|
&status, &errorCount, &lastError,
|
|
&itemCount, &avgPostFreqHrs, &oldestItemDate, &newestItemDate,
|
|
)
|
|
|
|
if err == pgx.ErrNoRows {
|
|
http.Error(w, "feed not found", http.StatusNotFound)
|
|
return
|
|
}
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
f.Title = StringValue(title)
|
|
f.Description = StringValue(description)
|
|
f.Language = StringValue(language)
|
|
f.SiteURL = StringValue(siteUrl)
|
|
f.DiscoveredAt = discoveredAt.Format(time.RFC3339)
|
|
if lastCrawledAt != nil {
|
|
f.LastCrawledAt = lastCrawledAt.Format(time.RFC3339)
|
|
}
|
|
if lastBuildDate != nil {
|
|
f.LastBuildDate = lastBuildDate.Format(time.RFC3339)
|
|
}
|
|
if ttlMinutes != nil {
|
|
f.TTLMinutes = *ttlMinutes
|
|
}
|
|
f.UpdatePeriod = StringValue(updatePeriod)
|
|
if updateFreq != nil {
|
|
f.UpdateFreq = *updateFreq
|
|
}
|
|
f.Status = StringValue(status)
|
|
if errorCount != nil {
|
|
f.ErrorCount = *errorCount
|
|
}
|
|
f.LastError = StringValue(lastError)
|
|
if itemCount != nil {
|
|
f.ItemCount = *itemCount
|
|
}
|
|
if avgPostFreqHrs != nil {
|
|
f.AvgPostFreqHrs = *avgPostFreqHrs
|
|
}
|
|
if oldestItemDate != nil {
|
|
f.OldestItemDate = oldestItemDate.Format(time.RFC3339)
|
|
}
|
|
if newestItemDate != nil {
|
|
f.NewestItemDate = newestItemDate.Format(time.RFC3339)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(f)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIFeedItems(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
limit := 50
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 100 {
|
|
limit = 100
|
|
}
|
|
}
|
|
|
|
items, err := c.GetItemsByFeed(feedURL, limit)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if items == nil {
|
|
items = []*Item{}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(items)
|
|
}
|
|
|
|
// SearchResult represents a search result with feed and matching items
|
|
type SearchResult struct {
|
|
Feed SearchFeed `json:"feed"`
|
|
Items []SearchItem `json:"items"`
|
|
}
|
|
|
|
type SearchFeed struct {
|
|
URL string `json:"url"`
|
|
Type string `json:"type"`
|
|
Category string `json:"category"`
|
|
Title string `json:"title"`
|
|
Description string `json:"description"`
|
|
Language string `json:"language"`
|
|
SiteURL string `json:"site_url"`
|
|
DiscoveredAt string `json:"discovered_at"`
|
|
LastCrawledAt string `json:"last_crawled_at"`
|
|
NextCrawlAt string `json:"next_crawl_at"`
|
|
LastBuildDate string `json:"last_build_date"`
|
|
TTLMinutes int `json:"ttl_minutes"`
|
|
UpdatePeriod string `json:"update_period"`
|
|
UpdateFreq int `json:"update_freq"`
|
|
Status string `json:"status"`
|
|
ErrorCount int `json:"error_count"`
|
|
LastError string `json:"last_error"`
|
|
LastErrorAt string `json:"last_error_at"`
|
|
SourceURL string `json:"source_url"`
|
|
SourceHost string `json:"source_host"`
|
|
TLD string `json:"tld"`
|
|
ItemCount int `json:"item_count"`
|
|
AvgPostFreqHrs float64 `json:"avg_post_freq_hrs"`
|
|
OldestItemDate string `json:"oldest_item_date"`
|
|
NewestItemDate string `json:"newest_item_date"`
|
|
NoUpdate bool `json:"no_update"`
|
|
}
|
|
|
|
type SearchItem struct {
|
|
ID int64 `json:"id"`
|
|
FeedURL string `json:"feed_url"`
|
|
GUID string `json:"guid"`
|
|
Title string `json:"title"`
|
|
Link string `json:"link"`
|
|
Description string `json:"description"`
|
|
Content string `json:"content"`
|
|
Author string `json:"author"`
|
|
PubDate string `json:"pub_date"`
|
|
DiscoveredAt string `json:"discovered_at"`
|
|
UpdatedAt string `json:"updated_at"`
|
|
}
|
|
|
|
func (c *Crawler) handleAPISearch(w http.ResponseWriter, r *http.Request) {
|
|
query := r.URL.Query().Get("q")
|
|
if query == "" {
|
|
http.Error(w, "q parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
limit := 100
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 500 {
|
|
limit = 500
|
|
}
|
|
}
|
|
|
|
// Results map: feedURL -> SearchResult
|
|
results := make(map[string]*SearchResult)
|
|
|
|
// Helper to scan feed row into SearchFeed
|
|
scanFeed := func(rows pgx.Rows) (string, SearchFeed, bool) {
|
|
var url string
|
|
var feedType, category, title, description, language, siteUrl *string
|
|
var discoveredAt time.Time
|
|
var lastCrawledAt, nextCrawlAt, lastBuildDate *time.Time
|
|
var ttlMinutes, updateFreq, errorCount, itemCount *int
|
|
var updatePeriod, status, lastError *string
|
|
var lastErrorAt *time.Time
|
|
var sourceUrl, sourceHost, tld *string
|
|
var avgPostFreqHrs *float64
|
|
var oldestItemDate, newestItemDate *time.Time
|
|
var noUpdate *bool
|
|
|
|
if err := rows.Scan(&url, &feedType, &category, &title, &description, &language, &siteUrl,
|
|
&discoveredAt, &lastCrawledAt, &nextCrawlAt, &lastBuildDate,
|
|
&ttlMinutes, &updatePeriod, &updateFreq,
|
|
&status, &errorCount, &lastError, &lastErrorAt,
|
|
&sourceUrl, &sourceHost, &tld,
|
|
&itemCount, &avgPostFreqHrs, &oldestItemDate, &newestItemDate, &noUpdate); err != nil {
|
|
return "", SearchFeed{}, false
|
|
}
|
|
cat := StringValue(category)
|
|
if cat == "" {
|
|
cat = "main"
|
|
}
|
|
sf := SearchFeed{
|
|
URL: url,
|
|
Type: StringValue(feedType),
|
|
Category: cat,
|
|
Title: StringValue(title),
|
|
Description: StringValue(description),
|
|
Language: StringValue(language),
|
|
SiteURL: StringValue(siteUrl),
|
|
DiscoveredAt: discoveredAt.Format(time.RFC3339),
|
|
UpdatePeriod: StringValue(updatePeriod),
|
|
Status: StringValue(status),
|
|
LastError: StringValue(lastError),
|
|
SourceURL: StringValue(sourceUrl),
|
|
SourceHost: StringValue(sourceHost),
|
|
TLD: StringValue(tld),
|
|
}
|
|
if lastCrawledAt != nil {
|
|
sf.LastCrawledAt = lastCrawledAt.Format(time.RFC3339)
|
|
}
|
|
if nextCrawlAt != nil {
|
|
sf.NextCrawlAt = nextCrawlAt.Format(time.RFC3339)
|
|
}
|
|
if lastBuildDate != nil {
|
|
sf.LastBuildDate = lastBuildDate.Format(time.RFC3339)
|
|
}
|
|
if ttlMinutes != nil {
|
|
sf.TTLMinutes = *ttlMinutes
|
|
}
|
|
if updateFreq != nil {
|
|
sf.UpdateFreq = *updateFreq
|
|
}
|
|
if errorCount != nil {
|
|
sf.ErrorCount = *errorCount
|
|
}
|
|
if lastErrorAt != nil {
|
|
sf.LastErrorAt = lastErrorAt.Format(time.RFC3339)
|
|
}
|
|
if itemCount != nil {
|
|
sf.ItemCount = *itemCount
|
|
}
|
|
if avgPostFreqHrs != nil {
|
|
sf.AvgPostFreqHrs = *avgPostFreqHrs
|
|
}
|
|
if oldestItemDate != nil {
|
|
sf.OldestItemDate = oldestItemDate.Format(time.RFC3339)
|
|
}
|
|
if newestItemDate != nil {
|
|
sf.NewestItemDate = newestItemDate.Format(time.RFC3339)
|
|
}
|
|
if noUpdate != nil {
|
|
sf.NoUpdate = *noUpdate
|
|
}
|
|
return url, sf, true
|
|
}
|
|
|
|
// Search feeds by source_host (LIKE search for domain matching)
|
|
hostRows, err := c.db.Query( `
|
|
SELECT url, type, category, title, description, language, site_url,
|
|
discovered_at, last_crawled_at, next_crawl_at, last_build_date,
|
|
ttl_minutes, update_period, update_freq,
|
|
status, error_count, last_error, last_error_at,
|
|
source_url, source_host, tld,
|
|
item_count, avg_post_freq_hrs, oldest_item_date, newest_item_date, no_update
|
|
FROM feeds
|
|
WHERE source_host ILIKE $1 OR url ILIKE $1
|
|
LIMIT $2
|
|
`, "%"+query+"%", limit)
|
|
if err == nil {
|
|
defer hostRows.Close()
|
|
for hostRows.Next() {
|
|
if url, feed, ok := scanFeed(hostRows); ok {
|
|
if _, exists := results[url]; !exists {
|
|
results[url] = &SearchResult{Feed: feed, Items: []SearchItem{}}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Search feeds via full-text search
|
|
tsQuery := ToSearchQuery(query)
|
|
feedRows, err := c.db.Query( `
|
|
SELECT url, type, category, title, description, language, site_url,
|
|
discovered_at, last_crawled_at, next_crawl_at, last_build_date,
|
|
ttl_minutes, update_period, update_freq,
|
|
status, error_count, last_error, last_error_at,
|
|
source_url, source_host, tld,
|
|
item_count, avg_post_freq_hrs, oldest_item_date, newest_item_date, no_update
|
|
FROM feeds
|
|
WHERE search_vector @@ to_tsquery('english', $1)
|
|
LIMIT $2
|
|
`, tsQuery, limit)
|
|
if err == nil {
|
|
defer feedRows.Close()
|
|
for feedRows.Next() {
|
|
if url, feed, ok := scanFeed(feedRows); ok {
|
|
if _, exists := results[url]; !exists {
|
|
results[url] = &SearchResult{Feed: feed, Items: []SearchItem{}}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Search items via full-text search
|
|
itemRows, err := c.db.Query( `
|
|
SELECT i.id, i.feed_url, i.guid, i.title, i.link, i.description, i.content, i.author, i.pub_date, i.discovered_at, i.updated_at
|
|
FROM items i
|
|
WHERE i.search_vector @@ to_tsquery('english', $1)
|
|
ORDER BY i.pub_date DESC
|
|
LIMIT $2
|
|
`, tsQuery, limit)
|
|
if err == nil {
|
|
defer itemRows.Close()
|
|
for itemRows.Next() {
|
|
var id int64
|
|
var feedUrl string
|
|
var guid, title, link, description, content, author *string
|
|
var pubDate, discoveredAt, updatedAt *time.Time
|
|
if err := itemRows.Scan(&id, &feedUrl, &guid, &title, &link, &description, &content, &author, &pubDate, &discoveredAt, &updatedAt); err != nil {
|
|
continue
|
|
}
|
|
|
|
item := SearchItem{
|
|
ID: id,
|
|
FeedURL: feedUrl,
|
|
GUID: StringValue(guid),
|
|
Title: StringValue(title),
|
|
Link: StringValue(link),
|
|
Description: StringValue(description),
|
|
Content: StringValue(content),
|
|
Author: StringValue(author),
|
|
}
|
|
if pubDate != nil {
|
|
item.PubDate = pubDate.Format(time.RFC3339)
|
|
}
|
|
if discoveredAt != nil {
|
|
item.DiscoveredAt = discoveredAt.Format(time.RFC3339)
|
|
}
|
|
if updatedAt != nil {
|
|
item.UpdatedAt = updatedAt.Format(time.RFC3339)
|
|
}
|
|
|
|
// Add to existing result or create new one
|
|
if result, exists := results[feedUrl]; exists {
|
|
result.Items = append(result.Items, item)
|
|
} else {
|
|
// Fetch feed info for this item's feed
|
|
var fType, fCategory, fTitle, fDesc, fLang, fSiteUrl *string
|
|
var fDiscoveredAt time.Time
|
|
var fLastCrawledAt, fNextCrawlAt, fLastBuildDate *time.Time
|
|
var fTTLMinutes, fUpdateFreq, fErrorCount, fItemCount *int
|
|
var fUpdatePeriod, fStatus, fLastError *string
|
|
var fLastErrorAt *time.Time
|
|
var fSourceUrl, fSourceHost, fTLD *string
|
|
var fAvgPostFreqHrs *float64
|
|
var fOldestItemDate, fNewestItemDate *time.Time
|
|
var fNoUpdate *bool
|
|
|
|
c.db.QueryRow( `
|
|
SELECT type, category, title, description, language, site_url,
|
|
discovered_at, last_crawled_at, next_crawl_at, last_build_date,
|
|
ttl_minutes, update_period, update_freq,
|
|
status, error_count, last_error, last_error_at,
|
|
source_url, source_host, tld,
|
|
item_count, avg_post_freq_hrs, oldest_item_date, newest_item_date, no_update
|
|
FROM feeds WHERE url = $1
|
|
`, feedUrl).Scan(&fType, &fCategory, &fTitle, &fDesc, &fLang, &fSiteUrl,
|
|
&fDiscoveredAt, &fLastCrawledAt, &fNextCrawlAt, &fLastBuildDate,
|
|
&fTTLMinutes, &fUpdatePeriod, &fUpdateFreq,
|
|
&fStatus, &fErrorCount, &fLastError, &fLastErrorAt,
|
|
&fSourceUrl, &fSourceHost, &fTLD,
|
|
&fItemCount, &fAvgPostFreqHrs, &fOldestItemDate, &fNewestItemDate, &fNoUpdate)
|
|
|
|
fCat := StringValue(fCategory)
|
|
if fCat == "" {
|
|
fCat = "main"
|
|
}
|
|
sf := SearchFeed{
|
|
URL: feedUrl,
|
|
Type: StringValue(fType),
|
|
Category: fCat,
|
|
Title: StringValue(fTitle),
|
|
Description: StringValue(fDesc),
|
|
Language: StringValue(fLang),
|
|
SiteURL: StringValue(fSiteUrl),
|
|
DiscoveredAt: fDiscoveredAt.Format(time.RFC3339),
|
|
UpdatePeriod: StringValue(fUpdatePeriod),
|
|
Status: StringValue(fStatus),
|
|
LastError: StringValue(fLastError),
|
|
SourceURL: StringValue(fSourceUrl),
|
|
SourceHost: StringValue(fSourceHost),
|
|
TLD: StringValue(fTLD),
|
|
}
|
|
if fLastCrawledAt != nil {
|
|
sf.LastCrawledAt = fLastCrawledAt.Format(time.RFC3339)
|
|
}
|
|
if fNextCrawlAt != nil {
|
|
sf.NextCrawlAt = fNextCrawlAt.Format(time.RFC3339)
|
|
}
|
|
if fLastBuildDate != nil {
|
|
sf.LastBuildDate = fLastBuildDate.Format(time.RFC3339)
|
|
}
|
|
if fTTLMinutes != nil {
|
|
sf.TTLMinutes = *fTTLMinutes
|
|
}
|
|
if fUpdateFreq != nil {
|
|
sf.UpdateFreq = *fUpdateFreq
|
|
}
|
|
if fErrorCount != nil {
|
|
sf.ErrorCount = *fErrorCount
|
|
}
|
|
if fLastErrorAt != nil {
|
|
sf.LastErrorAt = fLastErrorAt.Format(time.RFC3339)
|
|
}
|
|
if fItemCount != nil {
|
|
sf.ItemCount = *fItemCount
|
|
}
|
|
if fAvgPostFreqHrs != nil {
|
|
sf.AvgPostFreqHrs = *fAvgPostFreqHrs
|
|
}
|
|
if fOldestItemDate != nil {
|
|
sf.OldestItemDate = fOldestItemDate.Format(time.RFC3339)
|
|
}
|
|
if fNewestItemDate != nil {
|
|
sf.NewestItemDate = fNewestItemDate.Format(time.RFC3339)
|
|
}
|
|
if fNoUpdate != nil {
|
|
sf.NoUpdate = *fNoUpdate
|
|
}
|
|
results[feedUrl] = &SearchResult{
|
|
Feed: sf,
|
|
Items: []SearchItem{item},
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Convert map to slice
|
|
var resultList []SearchResult
|
|
for _, r := range results {
|
|
resultList = append(resultList, *r)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(resultList)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIDomainsByStatus(w http.ResponseWriter, r *http.Request) {
|
|
status := r.URL.Query().Get("status")
|
|
if status == "" {
|
|
http.Error(w, "status parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
limit := 100
|
|
offset := 0
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 500 {
|
|
limit = 500
|
|
}
|
|
}
|
|
if o := r.URL.Query().Get("offset"); o != "" {
|
|
fmt.Sscanf(o, "%d", &offset)
|
|
}
|
|
|
|
rows, err := c.db.Query( `
|
|
SELECT d.host, d.tld, d.status, d.last_error, COALESCE(f.feed_count, 0) as feed_count
|
|
FROM domains d
|
|
LEFT JOIN (
|
|
SELECT source_host, COUNT(*) as feed_count
|
|
FROM feeds
|
|
GROUP BY source_host
|
|
) f ON d.host = f.source_host
|
|
WHERE d.status = $1
|
|
ORDER BY d.host ASC
|
|
LIMIT $2 OFFSET $3
|
|
`, status, limit, offset)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type DomainInfo struct {
|
|
Host string `json:"host"`
|
|
TLD string `json:"tld"`
|
|
Status string `json:"status"`
|
|
LastError string `json:"last_error,omitempty"`
|
|
FeedCount int `json:"feed_count"`
|
|
}
|
|
|
|
var domains []DomainInfo
|
|
for rows.Next() {
|
|
var d DomainInfo
|
|
var tld, lastError *string
|
|
if err := rows.Scan(&d.Host, &tld, &d.Status, &lastError, &d.FeedCount); err != nil {
|
|
continue
|
|
}
|
|
d.TLD = StringValue(tld)
|
|
d.LastError = StringValue(lastError)
|
|
domains = append(domains, d)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(domains)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIFeedsByStatus(w http.ResponseWriter, r *http.Request) {
|
|
status := r.URL.Query().Get("status")
|
|
if status == "" {
|
|
http.Error(w, "status parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
limit := 100
|
|
offset := 0
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 500 {
|
|
limit = 500
|
|
}
|
|
}
|
|
if o := r.URL.Query().Get("offset"); o != "" {
|
|
fmt.Sscanf(o, "%d", &offset)
|
|
}
|
|
|
|
rows, err := c.db.Query( `
|
|
SELECT url, title, type, source_host, tld, status, error_count, last_error, item_count
|
|
FROM feeds
|
|
WHERE status = $1
|
|
ORDER BY url ASC
|
|
LIMIT $2 OFFSET $3
|
|
`, status, limit, offset)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type FeedInfo struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title,omitempty"`
|
|
Type string `json:"type"`
|
|
SourceHost string `json:"source_host"`
|
|
TLD string `json:"tld"`
|
|
Status string `json:"status"`
|
|
ErrorCount int `json:"error_count,omitempty"`
|
|
LastError string `json:"last_error,omitempty"`
|
|
ItemCount int `json:"item_count,omitempty"`
|
|
}
|
|
|
|
var feeds []FeedInfo
|
|
for rows.Next() {
|
|
var f FeedInfo
|
|
var title, sourceHost, tld, lastError *string
|
|
var errorCount, itemCount *int
|
|
if err := rows.Scan(&f.URL, &title, &f.Type, &sourceHost, &tld, &f.Status, &errorCount, &lastError, &itemCount); err != nil {
|
|
continue
|
|
}
|
|
f.Title = StringValue(title)
|
|
f.SourceHost = StringValue(sourceHost)
|
|
f.TLD = StringValue(tld)
|
|
f.LastError = StringValue(lastError)
|
|
if errorCount != nil {
|
|
f.ErrorCount = *errorCount
|
|
}
|
|
if itemCount != nil {
|
|
f.ItemCount = *itemCount
|
|
}
|
|
feeds = append(feeds, f)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(feeds)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIRevisitDomain(w http.ResponseWriter, r *http.Request) {
|
|
host := r.URL.Query().Get("host")
|
|
if host == "" {
|
|
http.Error(w, "host parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
_, err := c.db.Exec( `
|
|
UPDATE domains SET status = 'unchecked', last_error = NULL
|
|
WHERE host = $1
|
|
`, host)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]string{"status": "queued", "host": host})
|
|
}
|
|
|
|
// handleAPIPriorityCrawl immediately crawls a domain (adds it if not exists)
|
|
func (c *Crawler) handleAPIPriorityCrawl(w http.ResponseWriter, r *http.Request) {
|
|
host := r.URL.Query().Get("host")
|
|
if host == "" {
|
|
http.Error(w, "host parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
host = normalizeHost(host)
|
|
|
|
// Add domain if it doesn't exist, or reset to unchecked
|
|
_, err := c.db.Exec( `
|
|
INSERT INTO domains (host, status, discovered_at, tld)
|
|
VALUES ($1, 'unchecked', NOW(), $2)
|
|
ON CONFLICT(host) DO UPDATE SET status = 'unchecked', last_error = NULL
|
|
`, host, getTLD(host))
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Crawl synchronously
|
|
fmt.Printf("Priority crawl: %s\n", host)
|
|
feedsFound, crawlErr := c.crawlHost(host)
|
|
|
|
errStr := ""
|
|
if crawlErr != nil {
|
|
errStr = crawlErr.Error()
|
|
}
|
|
|
|
// Mark as crawled
|
|
c.markDomainCrawled(host, feedsFound, errStr)
|
|
|
|
// Get the feeds we found
|
|
feeds, _ := c.GetFeedsByHost(host)
|
|
|
|
type FeedSummary struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title"`
|
|
Type string `json:"type"`
|
|
Category string `json:"category"`
|
|
Status string `json:"status"`
|
|
}
|
|
var feedSummaries []FeedSummary
|
|
for _, f := range feeds {
|
|
feedSummaries = append(feedSummaries, FeedSummary{
|
|
URL: f.URL,
|
|
Title: f.Title,
|
|
Type: f.Type,
|
|
Category: f.Category,
|
|
Status: f.Status,
|
|
})
|
|
}
|
|
|
|
result := map[string]interface{}{
|
|
"host": host,
|
|
"feeds_found": feedsFound,
|
|
"feeds": feedSummaries,
|
|
}
|
|
if crawlErr != nil {
|
|
result["error"] = crawlErr.Error()
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// handleAPICheckFeed immediately checks a feed and returns items
|
|
func (c *Crawler) handleAPICheckFeed(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
force := r.URL.Query().Get("force") == "true"
|
|
|
|
feedURL = normalizeURL(feedURL)
|
|
|
|
// Get the feed
|
|
feed, err := c.getFeed(feedURL)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
if feed == nil {
|
|
http.Error(w, "feed not found", http.StatusNotFound)
|
|
return
|
|
}
|
|
|
|
// Clear cache headers if force is requested
|
|
if force {
|
|
feed.ETag = ""
|
|
feed.LastModified = ""
|
|
}
|
|
|
|
// Force check the feed
|
|
fmt.Printf("Force check feed: %s (force=%v)\n", feedURL, force)
|
|
changed, checkErr := c.CheckFeed(feed)
|
|
|
|
// Get updated feed info
|
|
feed, _ = c.getFeed(feedURL)
|
|
|
|
// Get items
|
|
items, _ := c.GetItemsByFeed(feedURL, 20)
|
|
|
|
type ItemSummary struct {
|
|
Title string `json:"title"`
|
|
Link string `json:"link"`
|
|
PubDate string `json:"pub_date,omitempty"`
|
|
Author string `json:"author,omitempty"`
|
|
}
|
|
var itemSummaries []ItemSummary
|
|
for _, item := range items {
|
|
is := ItemSummary{
|
|
Title: item.Title,
|
|
Link: item.Link,
|
|
Author: item.Author,
|
|
}
|
|
if !item.PubDate.IsZero() {
|
|
is.PubDate = item.PubDate.Format("2006-01-02 15:04")
|
|
}
|
|
itemSummaries = append(itemSummaries, is)
|
|
}
|
|
|
|
result := map[string]interface{}{
|
|
"url": feedURL,
|
|
"title": feed.Title,
|
|
"type": feed.Type,
|
|
"category": feed.Category,
|
|
"status": feed.Status,
|
|
"changed": changed,
|
|
"itemCount": feed.ItemCount,
|
|
"items": itemSummaries,
|
|
}
|
|
if checkErr != nil {
|
|
result["error"] = checkErr.Error()
|
|
}
|
|
if feed.LastError != "" {
|
|
result["lastError"] = feed.LastError
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// handleAPIFilter handles flexible filtering with stackable parameters
|
|
func (c *Crawler) handleAPIFilter(w http.ResponseWriter, r *http.Request) {
|
|
tld := r.URL.Query().Get("tld")
|
|
domain := r.URL.Query().Get("domain")
|
|
feedStatus := r.URL.Query().Get("feedStatus")
|
|
domainStatus := r.URL.Query().Get("domainStatus")
|
|
languages := r.URL.Query().Get("languages") // comma-separated list
|
|
show := r.URL.Query().Get("show") // "feeds" or "domains"
|
|
sort := r.URL.Query().Get("sort") // "alpha" or "feeds"
|
|
|
|
limit := 100
|
|
offset := 0
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 500 {
|
|
limit = 500
|
|
}
|
|
}
|
|
if o := r.URL.Query().Get("offset"); o != "" {
|
|
fmt.Sscanf(o, "%d", &offset)
|
|
}
|
|
|
|
// Parse languages into slice
|
|
var langList []string
|
|
if languages != "" {
|
|
for _, lang := range strings.Split(languages, ",") {
|
|
lang = strings.TrimSpace(lang)
|
|
if lang != "" {
|
|
langList = append(langList, lang)
|
|
}
|
|
}
|
|
}
|
|
|
|
// Determine what to show based on filters
|
|
if show == "" {
|
|
if feedStatus != "" || domain != "" || len(langList) > 0 {
|
|
show = "feeds"
|
|
} else {
|
|
show = "domains"
|
|
}
|
|
}
|
|
|
|
if show == "feeds" {
|
|
c.filterFeeds(w, tld, domain, feedStatus, langList, limit, offset)
|
|
} else {
|
|
c.filterDomains(w, tld, domainStatus, sort, limit, offset)
|
|
}
|
|
}
|
|
|
|
func (c *Crawler) filterDomains(w http.ResponseWriter, tld, status, sort string, limit, offset int) {
|
|
var args []interface{}
|
|
argNum := 1
|
|
query := `
|
|
SELECT d.host, d.tld, d.status, d.last_error, COALESCE(f.feed_count, 0) as feed_count
|
|
FROM domains d
|
|
LEFT JOIN (
|
|
SELECT source_host, COUNT(*) as feed_count
|
|
FROM feeds
|
|
GROUP BY source_host
|
|
) f ON d.host = f.source_host
|
|
WHERE 1=1`
|
|
|
|
if tld != "" {
|
|
query += fmt.Sprintf(" AND d.tld = $%d", argNum)
|
|
args = append(args, tld)
|
|
argNum++
|
|
}
|
|
if status != "" {
|
|
query += fmt.Sprintf(" AND d.status = $%d", argNum)
|
|
args = append(args, status)
|
|
argNum++
|
|
}
|
|
|
|
// Sort by feed count descending or alphabetically
|
|
if sort == "feeds" {
|
|
query += fmt.Sprintf(" ORDER BY feed_count DESC, d.host ASC LIMIT $%d OFFSET $%d", argNum, argNum+1)
|
|
} else {
|
|
query += fmt.Sprintf(" ORDER BY d.host ASC LIMIT $%d OFFSET $%d", argNum, argNum+1)
|
|
}
|
|
args = append(args, limit, offset)
|
|
|
|
rows, err := c.db.Query( query, args...)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type DomainInfo struct {
|
|
Host string `json:"host"`
|
|
TLD string `json:"tld"`
|
|
Status string `json:"status"`
|
|
LastError string `json:"last_error,omitempty"`
|
|
FeedCount int `json:"feed_count"`
|
|
}
|
|
|
|
var domains []DomainInfo
|
|
for rows.Next() {
|
|
var d DomainInfo
|
|
var tldVal, lastError *string
|
|
if err := rows.Scan(&d.Host, &tldVal, &d.Status, &lastError, &d.FeedCount); err != nil {
|
|
continue
|
|
}
|
|
d.TLD = StringValue(tldVal)
|
|
d.LastError = StringValue(lastError)
|
|
domains = append(domains, d)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"type": "domains",
|
|
"data": domains,
|
|
})
|
|
}
|
|
|
|
func (c *Crawler) filterFeeds(w http.ResponseWriter, tld, domain, status string, languages []string, limit, offset int) {
|
|
var args []interface{}
|
|
argNum := 1
|
|
query := `
|
|
SELECT url, title, type, category, source_host, tld, status, error_count, last_error, item_count, language
|
|
FROM feeds
|
|
WHERE 1=1`
|
|
|
|
if tld != "" {
|
|
query += fmt.Sprintf(" AND tld = $%d", argNum)
|
|
args = append(args, tld)
|
|
argNum++
|
|
}
|
|
if domain != "" {
|
|
query += fmt.Sprintf(" AND source_host = $%d", argNum)
|
|
args = append(args, domain)
|
|
argNum++
|
|
}
|
|
if status != "" {
|
|
query += fmt.Sprintf(" AND status = $%d", argNum)
|
|
args = append(args, status)
|
|
argNum++
|
|
}
|
|
if len(languages) > 0 {
|
|
// Build IN clause for languages, handling 'unknown' as empty string
|
|
placeholders := make([]string, len(languages))
|
|
for i, lang := range languages {
|
|
placeholders[i] = fmt.Sprintf("$%d", argNum)
|
|
if lang == "unknown" {
|
|
args = append(args, "")
|
|
} else {
|
|
args = append(args, lang)
|
|
}
|
|
argNum++
|
|
}
|
|
query += fmt.Sprintf(" AND COALESCE(language, '') IN (%s)", strings.Join(placeholders, ","))
|
|
}
|
|
|
|
query += fmt.Sprintf(" ORDER BY url ASC LIMIT $%d OFFSET $%d", argNum, argNum+1)
|
|
args = append(args, limit, offset)
|
|
|
|
rows, err := c.db.Query( query, args...)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type FeedInfo struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title,omitempty"`
|
|
Type string `json:"type"`
|
|
Category string `json:"category"`
|
|
SourceHost string `json:"source_host"`
|
|
TLD string `json:"tld"`
|
|
Status string `json:"status"`
|
|
ErrorCount int `json:"error_count,omitempty"`
|
|
LastError string `json:"last_error,omitempty"`
|
|
ItemCount int `json:"item_count,omitempty"`
|
|
Language string `json:"language,omitempty"`
|
|
}
|
|
|
|
var feeds []FeedInfo
|
|
for rows.Next() {
|
|
var f FeedInfo
|
|
var title, category, sourceHost, tldVal, lastError, language *string
|
|
var errorCount, itemCount *int
|
|
if err := rows.Scan(&f.URL, &title, &f.Type, &category, &sourceHost, &tldVal, &f.Status, &errorCount, &lastError, &itemCount, &language); err != nil {
|
|
continue
|
|
}
|
|
f.Title = StringValue(title)
|
|
if category != nil && *category != "" {
|
|
f.Category = *category
|
|
} else {
|
|
f.Category = "main"
|
|
}
|
|
f.SourceHost = StringValue(sourceHost)
|
|
f.TLD = StringValue(tldVal)
|
|
f.LastError = StringValue(lastError)
|
|
if errorCount != nil {
|
|
f.ErrorCount = *errorCount
|
|
}
|
|
if itemCount != nil {
|
|
f.ItemCount = *itemCount
|
|
}
|
|
f.Language = StringValue(language)
|
|
feeds = append(feeds, f)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"type": "feeds",
|
|
"data": feeds,
|
|
})
|
|
}
|
|
|
|
func (c *Crawler) handleAPITLDs(w http.ResponseWriter, r *http.Request) {
|
|
// Simple count by TLD from domains table (fast with index)
|
|
// Feed counts fetched separately per-TLD if needed
|
|
rows, err := c.db.Query(`
|
|
SELECT tld, COUNT(*)::int as domain_count
|
|
FROM domains
|
|
WHERE tld IS NOT NULL AND tld != ''
|
|
GROUP BY tld
|
|
ORDER BY tld ASC
|
|
`)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type TLDInfo struct {
|
|
TLD string `json:"tld"`
|
|
DomainCount int `json:"domain_count"`
|
|
FeedCount int `json:"feed_count"`
|
|
}
|
|
|
|
tlds := []TLDInfo{}
|
|
for rows.Next() {
|
|
var t TLDInfo
|
|
if err := rows.Scan(&t.TLD, &t.DomainCount); err != nil {
|
|
continue
|
|
}
|
|
tlds = append(tlds, t)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(tlds)
|
|
}
|
|
|
|
func (c *Crawler) handleAPITLDDomains(w http.ResponseWriter, r *http.Request) {
|
|
tld := r.URL.Query().Get("tld")
|
|
if tld == "" {
|
|
http.Error(w, "tld parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
limit := 100
|
|
offset := 0
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 500 {
|
|
limit = 500
|
|
}
|
|
}
|
|
if o := r.URL.Query().Get("offset"); o != "" {
|
|
fmt.Sscanf(o, "%d", &offset)
|
|
}
|
|
|
|
rows, err := c.db.Query( `
|
|
SELECT d.host, d.status, d.last_error, COALESCE(f.feed_count, 0) as feed_count
|
|
FROM domains d
|
|
LEFT JOIN (
|
|
SELECT source_host, COUNT(*) as feed_count
|
|
FROM feeds
|
|
GROUP BY source_host
|
|
) f ON d.host = f.source_host
|
|
WHERE d.tld = $1
|
|
ORDER BY d.host ASC
|
|
LIMIT $2 OFFSET $3
|
|
`, tld, limit, offset)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type DomainInfo struct {
|
|
Host string `json:"host"`
|
|
Status string `json:"status"`
|
|
LastError string `json:"last_error,omitempty"`
|
|
FeedCount int `json:"feed_count"`
|
|
}
|
|
|
|
var domains []DomainInfo
|
|
for rows.Next() {
|
|
var d DomainInfo
|
|
var lastError *string
|
|
if err := rows.Scan(&d.Host, &d.Status, &lastError, &d.FeedCount); err != nil {
|
|
continue
|
|
}
|
|
d.LastError = StringValue(lastError)
|
|
domains = append(domains, d)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(domains)
|
|
}
|
|
|
|
// handleAPIEnablePublish sets a feed's publish status to 'pass'
|
|
// If account is not provided, it will be auto-derived from the feed URL
|
|
func (c *Crawler) handleAPIEnablePublish(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
account := r.URL.Query().Get("account")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
feedURL = normalizeURL(feedURL)
|
|
|
|
// Auto-derive account handle if not provided
|
|
if account == "" {
|
|
account = DeriveHandleFromFeed(feedURL)
|
|
if account == "" {
|
|
http.Error(w, "could not derive account handle from URL", http.StatusBadRequest)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Check feed exists
|
|
feed, err := c.getFeed(feedURL)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
if feed == nil {
|
|
http.Error(w, "feed not found", http.StatusNotFound)
|
|
return
|
|
}
|
|
|
|
if err := c.SetPublishStatus(feedURL, "pass", account); err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Get unpublished count
|
|
count, _ := c.GetUnpublishedItemCount(feedURL)
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "pass",
|
|
"url": feedURL,
|
|
"account": account,
|
|
"unpublished_items": count,
|
|
})
|
|
}
|
|
|
|
// handleAPIDeriveHandle shows what handle would be derived from a feed URL
|
|
func (c *Crawler) handleAPIDeriveHandle(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
handle := DeriveHandleFromFeed(feedURL)
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"url": feedURL,
|
|
"handle": handle,
|
|
})
|
|
}
|
|
|
|
// handleAPIDisablePublish sets a feed's publish status to 'deny'
|
|
func (c *Crawler) handleAPIDisablePublish(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
feedURL = normalizeURL(feedURL)
|
|
|
|
if err := c.SetPublishStatus(feedURL, "deny", ""); err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "deny",
|
|
"url": feedURL,
|
|
})
|
|
}
|
|
|
|
// handleAPIPublishEnabled returns all feeds with publish status 'pass'
|
|
func (c *Crawler) handleAPIPublishEnabled(w http.ResponseWriter, r *http.Request) {
|
|
feeds, err := c.GetFeedsByPublishStatus("pass")
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
type FeedPublishInfo struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title"`
|
|
Account string `json:"account"`
|
|
UnpublishedCount int `json:"unpublished_count"`
|
|
}
|
|
|
|
var result []FeedPublishInfo
|
|
for _, f := range feeds {
|
|
count, _ := c.GetUnpublishedItemCount(f.URL)
|
|
result = append(result, FeedPublishInfo{
|
|
URL: f.URL,
|
|
Title: f.Title,
|
|
Account: f.PublishAccount,
|
|
UnpublishedCount: count,
|
|
})
|
|
}
|
|
|
|
if result == nil {
|
|
result = []FeedPublishInfo{}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// handleAPIPublishDenied returns all feeds with publish status 'deny'
|
|
func (c *Crawler) handleAPIPublishDenied(w http.ResponseWriter, r *http.Request) {
|
|
feeds, err := c.GetFeedsByPublishStatus("deny")
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
type FeedDeniedInfo struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title"`
|
|
SourceHost string `json:"source_host"`
|
|
}
|
|
|
|
var result []FeedDeniedInfo
|
|
for _, f := range feeds {
|
|
result = append(result, FeedDeniedInfo{
|
|
URL: f.URL,
|
|
Title: f.Title,
|
|
SourceHost: f.SourceHost,
|
|
})
|
|
}
|
|
|
|
if result == nil {
|
|
result = []FeedDeniedInfo{}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// handleAPIPublishCandidates returns feeds pending review that have items
|
|
func (c *Crawler) handleAPIPublishCandidates(w http.ResponseWriter, r *http.Request) {
|
|
limit := 50
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 200 {
|
|
limit = 200
|
|
}
|
|
}
|
|
|
|
feeds, err := c.GetPublishCandidates(limit)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
type CandidateInfo struct {
|
|
URL string `json:"url"`
|
|
Title string `json:"title"`
|
|
Category string `json:"category"`
|
|
SourceHost string `json:"source_host"`
|
|
ItemCount int `json:"item_count"`
|
|
DerivedHandle string `json:"derived_handle"`
|
|
}
|
|
|
|
var result []CandidateInfo
|
|
for _, f := range feeds {
|
|
result = append(result, CandidateInfo{
|
|
URL: f.URL,
|
|
Title: f.Title,
|
|
Category: f.Category,
|
|
SourceHost: f.SourceHost,
|
|
ItemCount: f.ItemCount,
|
|
DerivedHandle: DeriveHandleFromFeed(f.URL),
|
|
})
|
|
}
|
|
|
|
if result == nil {
|
|
result = []CandidateInfo{}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// handleAPISetPublishStatus sets the publish status for a feed
|
|
// status must be 'pass', 'deny', or 'held'
|
|
func (c *Crawler) handleAPISetPublishStatus(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
status := r.URL.Query().Get("status")
|
|
account := r.URL.Query().Get("account")
|
|
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if status != "pass" && status != "deny" && status != "held" {
|
|
http.Error(w, "status must be 'pass', 'deny', or 'held'", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
feedURL = normalizeURL(feedURL)
|
|
|
|
// Auto-derive account for 'pass' if not provided
|
|
if status == "pass" && account == "" {
|
|
account = DeriveHandleFromFeed(feedURL)
|
|
}
|
|
|
|
if err := c.SetPublishStatus(feedURL, status, account); err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"url": feedURL,
|
|
"status": status,
|
|
"account": account,
|
|
})
|
|
}
|
|
|
|
// handleAPIUnpublishedItems returns unpublished items for a feed
|
|
func (c *Crawler) handleAPIUnpublishedItems(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
limit := 50
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 200 {
|
|
limit = 200
|
|
}
|
|
}
|
|
|
|
items, err := c.GetUnpublishedItems(feedURL, limit)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if items == nil {
|
|
items = []*Item{}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(items)
|
|
}
|
|
|
|
// handleAPITestPublish tests publishing a single item to PDS
|
|
// Requires: url (feed), itemId, handle, password, pds (optional, defaults to https://1440.news)
|
|
func (c *Crawler) handleAPITestPublish(w http.ResponseWriter, r *http.Request) {
|
|
itemIDStr := r.URL.Query().Get("itemId")
|
|
handle := r.URL.Query().Get("handle")
|
|
password := r.URL.Query().Get("password")
|
|
pdsHost := r.URL.Query().Get("pds")
|
|
|
|
if itemIDStr == "" {
|
|
http.Error(w, "itemId parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if handle == "" || password == "" {
|
|
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if pdsHost == "" {
|
|
pdsHost = "https://1440.news"
|
|
}
|
|
|
|
var itemID int64
|
|
fmt.Sscanf(itemIDStr, "%d", &itemID)
|
|
|
|
// Get the item
|
|
var item Item
|
|
var guid, title, link, description, content, author *string
|
|
var pubDate, updatedAt, publishedAt *time.Time
|
|
var publishedUri *string
|
|
|
|
err := c.db.QueryRow( `
|
|
SELECT id, feed_url, guid, title, link, description, content, author, pub_date, discovered_at, updated_at, published_at, published_uri
|
|
FROM items WHERE id = $1
|
|
`, itemID).Scan(
|
|
&item.ID, &item.FeedURL, &guid, &title, &link,
|
|
&description, &content, &author, &pubDate,
|
|
&item.DiscoveredAt, &updatedAt, &publishedAt, &publishedUri,
|
|
)
|
|
if err != nil {
|
|
http.Error(w, "item not found: "+err.Error(), http.StatusNotFound)
|
|
return
|
|
}
|
|
|
|
item.GUID = StringValue(guid)
|
|
item.Title = StringValue(title)
|
|
item.Link = StringValue(link)
|
|
item.Description = StringValue(description)
|
|
item.Content = StringValue(content)
|
|
item.Author = StringValue(author)
|
|
if pubDate != nil {
|
|
item.PubDate = *pubDate
|
|
}
|
|
|
|
// Create publisher and authenticate
|
|
publisher := NewPublisher(pdsHost)
|
|
session, err := publisher.CreateSession(handle, password)
|
|
if err != nil {
|
|
http.Error(w, "auth failed: "+err.Error(), http.StatusUnauthorized)
|
|
return
|
|
}
|
|
|
|
// Publish the item
|
|
uri, err := publisher.PublishItem(session, &item)
|
|
if err != nil {
|
|
http.Error(w, "publish failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Mark as published
|
|
c.MarkItemPublished(item.ID, uri)
|
|
|
|
// Use PubDate for rkey to match createdAt ordering, fall back to DiscoveredAt
|
|
rkeyTime := item.PubDate
|
|
if rkeyTime.IsZero() {
|
|
rkeyTime = item.DiscoveredAt
|
|
}
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "published",
|
|
"uri": uri,
|
|
"itemId": item.ID,
|
|
"title": item.Title,
|
|
"rkey": GenerateRkey(item.GUID, rkeyTime),
|
|
})
|
|
}
|
|
|
|
// handleAPIPublishFeed publishes unpublished items for a feed
|
|
// Requires: url (feed), handle, password, pds (optional), limit (optional, default 10)
|
|
func (c *Crawler) handleAPIPublishFeed(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
handle := r.URL.Query().Get("handle")
|
|
password := r.URL.Query().Get("password")
|
|
pdsHost := r.URL.Query().Get("pds")
|
|
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if handle == "" || password == "" {
|
|
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if pdsHost == "" {
|
|
pdsHost = "https://1440.news"
|
|
}
|
|
|
|
limit := 10
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 50 {
|
|
limit = 50
|
|
}
|
|
}
|
|
|
|
feedURL = normalizeURL(feedURL)
|
|
|
|
// Get unpublished items (ordered by pubDate ASC - oldest first)
|
|
items, err := c.GetUnpublishedItems(feedURL, limit)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if len(items) == 0 {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "no_items",
|
|
"published": 0,
|
|
})
|
|
return
|
|
}
|
|
|
|
// Create publisher and authenticate
|
|
publisher := NewPublisher(pdsHost)
|
|
session, err := publisher.CreateSession(handle, password)
|
|
if err != nil {
|
|
http.Error(w, "auth failed: "+err.Error(), http.StatusUnauthorized)
|
|
return
|
|
}
|
|
|
|
type PublishResult struct {
|
|
ItemID int64 `json:"item_id"`
|
|
Title string `json:"title"`
|
|
URI string `json:"uri,omitempty"`
|
|
Error string `json:"error,omitempty"`
|
|
}
|
|
|
|
var results []PublishResult
|
|
published := 0
|
|
failed := 0
|
|
|
|
for i, item := range items {
|
|
result := PublishResult{
|
|
ItemID: item.ID,
|
|
Title: item.Title,
|
|
}
|
|
|
|
uri, err := publisher.PublishItem(session, item)
|
|
if err != nil {
|
|
result.Error = err.Error()
|
|
failed++
|
|
} else {
|
|
result.URI = uri
|
|
c.MarkItemPublished(item.ID, uri)
|
|
published++
|
|
}
|
|
|
|
results = append(results, result)
|
|
|
|
// Add delay between posts to ensure unique timestamps for relay indexing
|
|
if i < len(items)-1 {
|
|
time.Sleep(1100 * time.Millisecond)
|
|
}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "complete",
|
|
"published": published,
|
|
"failed": failed,
|
|
"results": results,
|
|
})
|
|
}
|
|
|
|
// handleAPICreateAccount creates a new account on the PDS
|
|
// Requires: handle, email, password, pds (optional), inviteCode (optional)
|
|
// If pdsAdminPassword is provided, it will create an invite code first
|
|
func (c *Crawler) handleAPICreateAccount(w http.ResponseWriter, r *http.Request) {
|
|
handle := r.URL.Query().Get("handle")
|
|
email := r.URL.Query().Get("email")
|
|
password := r.URL.Query().Get("password")
|
|
pdsHost := r.URL.Query().Get("pds")
|
|
inviteCode := r.URL.Query().Get("inviteCode")
|
|
pdsAdminPassword := r.URL.Query().Get("pdsAdminPassword")
|
|
|
|
if handle == "" || password == "" {
|
|
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if pdsHost == "" {
|
|
pdsHost = "https://pds.1440.news"
|
|
}
|
|
if email == "" {
|
|
// Generate a placeholder email from handle
|
|
email = handle + "@1440.news"
|
|
}
|
|
|
|
publisher := NewPublisher(pdsHost)
|
|
|
|
// If PDS admin password provided, create an invite code first
|
|
if pdsAdminPassword != "" && inviteCode == "" {
|
|
code, err := publisher.CreateInviteCode(pdsAdminPassword, 1)
|
|
if err != nil {
|
|
http.Error(w, "create invite failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
inviteCode = code
|
|
}
|
|
|
|
// Create the account
|
|
session, err := publisher.CreateAccount(handle, email, password, inviteCode)
|
|
if err != nil {
|
|
http.Error(w, "create account failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "created",
|
|
"handle": session.Handle,
|
|
"did": session.DID,
|
|
})
|
|
}
|
|
|
|
// handleAPIPublishFeedFull creates an account (if needed) and publishes items
|
|
// This is a convenience endpoint that combines account creation and publishing
|
|
// Requires: url (feed), pdsAdminPassword, pds (optional), limit (optional), feedPassword (optional)
|
|
func (c *Crawler) handleAPIPublishFeedFull(w http.ResponseWriter, r *http.Request) {
|
|
feedURL := r.URL.Query().Get("url")
|
|
pdsAdminPassword := r.URL.Query().Get("pdsAdminPassword")
|
|
pdsHost := r.URL.Query().Get("pds")
|
|
feedPassword := r.URL.Query().Get("feedPassword") // Password for new feed accounts
|
|
|
|
if feedURL == "" {
|
|
http.Error(w, "url parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if pdsAdminPassword == "" {
|
|
http.Error(w, "pdsAdminPassword parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if pdsHost == "" {
|
|
pdsHost = "https://pds.1440.news"
|
|
}
|
|
if feedPassword == "" {
|
|
feedPassword = "feed1440!" // Default password for feed accounts
|
|
}
|
|
|
|
limit := 10
|
|
if l := r.URL.Query().Get("limit"); l != "" {
|
|
fmt.Sscanf(l, "%d", &limit)
|
|
if limit > 50 {
|
|
limit = 50
|
|
}
|
|
}
|
|
|
|
feedURL = normalizeURL(feedURL)
|
|
|
|
// Get the feed to check its status and get the derived handle
|
|
feed, err := c.getFeed(feedURL)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
if feed == nil {
|
|
http.Error(w, "feed not found", http.StatusNotFound)
|
|
return
|
|
}
|
|
if feed.PublishStatus != "pass" {
|
|
http.Error(w, "feed is not approved for publishing (status: "+feed.PublishStatus+")", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
handle := feed.PublishAccount
|
|
if handle == "" {
|
|
handle = DeriveHandleFromFeed(feedURL)
|
|
}
|
|
email := handle + "@1440.news"
|
|
|
|
publisher := NewPublisher(pdsHost)
|
|
|
|
// First, try to authenticate with the feed account
|
|
session, err := publisher.CreateSession(handle, feedPassword)
|
|
if err != nil {
|
|
// Account doesn't exist, create it
|
|
fmt.Printf("Account %s doesn't exist, creating...\n", handle)
|
|
|
|
// Create invite code using PDS admin password
|
|
inviteCode, err := publisher.CreateInviteCode(pdsAdminPassword, 1)
|
|
if err != nil {
|
|
http.Error(w, "create invite failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Create the account
|
|
session, err = publisher.CreateAccount(handle, email, feedPassword, inviteCode)
|
|
if err != nil {
|
|
http.Error(w, "create account failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
fmt.Printf("Created account: %s (%s)\n", session.Handle, session.DID)
|
|
|
|
// Set up profile with feed title and favicon
|
|
displayName := feed.Title
|
|
if displayName == "" {
|
|
displayName = feed.SourceHost
|
|
}
|
|
description := feed.Description
|
|
|
|
// Try to fetch favicon for avatar
|
|
var avatar *BlobRef
|
|
faviconData, mimeType, err := FetchFavicon(feed.SourceHost)
|
|
if err == nil && len(faviconData) > 0 {
|
|
avatar, err = publisher.UploadBlob(session, faviconData, mimeType)
|
|
if err != nil {
|
|
fmt.Printf("Failed to upload favicon: %v\n", err)
|
|
}
|
|
}
|
|
|
|
if err := publisher.UpdateProfile(session, displayName, description, avatar); err != nil {
|
|
fmt.Printf("Failed to update profile: %v\n", err)
|
|
} else {
|
|
fmt.Printf("Set profile for %s: %s\n", handle, displayName)
|
|
}
|
|
}
|
|
|
|
// Get unpublished items
|
|
items, err := c.GetUnpublishedItems(feedURL, limit)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
if len(items) == 0 {
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "no_items",
|
|
"handle": handle,
|
|
"published": 0,
|
|
})
|
|
return
|
|
}
|
|
|
|
type PublishResult struct {
|
|
ItemID int64 `json:"item_id"`
|
|
Title string `json:"title"`
|
|
URI string `json:"uri,omitempty"`
|
|
Error string `json:"error,omitempty"`
|
|
}
|
|
|
|
var results []PublishResult
|
|
published := 0
|
|
failed := 0
|
|
|
|
for i, item := range items {
|
|
result := PublishResult{
|
|
ItemID: item.ID,
|
|
Title: item.Title,
|
|
}
|
|
|
|
uri, err := publisher.PublishItem(session, item)
|
|
if err != nil {
|
|
result.Error = err.Error()
|
|
failed++
|
|
} else {
|
|
result.URI = uri
|
|
c.MarkItemPublished(item.ID, uri)
|
|
published++
|
|
}
|
|
|
|
results = append(results, result)
|
|
|
|
// Add delay between posts to ensure unique timestamps for relay indexing
|
|
if i < len(items)-1 {
|
|
time.Sleep(1100 * time.Millisecond)
|
|
}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "complete",
|
|
"handle": handle,
|
|
"did": session.DID,
|
|
"published": published,
|
|
"failed": failed,
|
|
"results": results,
|
|
})
|
|
}
|
|
|
|
// handleAPIUpdateProfile updates a profile for an existing account
|
|
// Requires: handle, password, pds (optional), displayName (optional), description (optional), faviconUrl (optional)
|
|
func (c *Crawler) handleAPIUpdateProfile(w http.ResponseWriter, r *http.Request) {
|
|
handle := r.URL.Query().Get("handle")
|
|
password := r.URL.Query().Get("password")
|
|
pdsHost := r.URL.Query().Get("pds")
|
|
displayName := r.URL.Query().Get("displayName")
|
|
description := r.URL.Query().Get("description")
|
|
faviconURL := r.URL.Query().Get("faviconUrl")
|
|
|
|
if handle == "" || password == "" {
|
|
http.Error(w, "handle and password parameters required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
if pdsHost == "" {
|
|
pdsHost = "https://pds.1440.news"
|
|
}
|
|
|
|
publisher := NewPublisher(pdsHost)
|
|
|
|
// Authenticate
|
|
session, err := publisher.CreateSession(handle, password)
|
|
if err != nil {
|
|
http.Error(w, "auth failed: "+err.Error(), http.StatusUnauthorized)
|
|
return
|
|
}
|
|
|
|
// Fetch favicon if URL provided
|
|
var avatar *BlobRef
|
|
if faviconURL != "" {
|
|
faviconData, mimeType, err := FetchFavicon(faviconURL)
|
|
if err != nil {
|
|
http.Error(w, "fetch favicon failed: "+err.Error(), http.StatusBadRequest)
|
|
return
|
|
}
|
|
avatar, err = publisher.UploadBlob(session, faviconData, mimeType)
|
|
if err != nil {
|
|
http.Error(w, "upload favicon failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
}
|
|
|
|
// Update profile
|
|
if err := publisher.UpdateProfile(session, displayName, description, avatar); err != nil {
|
|
http.Error(w, "update profile failed: "+err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"status": "updated",
|
|
"handle": handle,
|
|
"displayName": displayName,
|
|
"hasAvatar": avatar != nil,
|
|
})
|
|
}
|
|
|
|
func (c *Crawler) handleDashboard(w http.ResponseWriter, r *http.Request) {
|
|
stats, err := c.GetDashboardStats()
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
funcMap := template.FuncMap{
|
|
"pct": func(a, b int) float64 {
|
|
if b == 0 {
|
|
return 0
|
|
}
|
|
return float64(a) * 100.0 / float64(b)
|
|
},
|
|
"comma": func(n interface{}) string {
|
|
var val int
|
|
switch v := n.(type) {
|
|
case int:
|
|
val = v
|
|
case int32:
|
|
val = int(v)
|
|
case int64:
|
|
val = int(v)
|
|
default:
|
|
return "0"
|
|
}
|
|
if val < 0 {
|
|
return "-" + commaFormat(-val)
|
|
}
|
|
return commaFormat(val)
|
|
},
|
|
}
|
|
|
|
tmpl, err := template.New("dashboard").Funcs(funcMap).Parse(dashboardHTML)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "text/html")
|
|
tmpl.Execute(w, stats)
|
|
}
|
|
|
|
func (c *Crawler) handleAPIStats(w http.ResponseWriter, r *http.Request) {
|
|
stats, err := c.GetDashboardStats()
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(stats)
|
|
}
|
|
|
|
// handleRedirect handles short URL redirects for url.1440.news
|
|
func (c *Crawler) handleRedirect(w http.ResponseWriter, r *http.Request) {
|
|
code := strings.TrimPrefix(r.URL.Path, "/")
|
|
if code == "" {
|
|
http.NotFound(w, r)
|
|
return
|
|
}
|
|
|
|
// Look up the short URL
|
|
shortURL, err := c.GetShortURL(code)
|
|
if err != nil {
|
|
http.NotFound(w, r)
|
|
return
|
|
}
|
|
|
|
// Record the click asynchronously
|
|
go func() {
|
|
if err := c.RecordClick(code, r); err != nil {
|
|
fmt.Printf("Failed to record click for %s: %v\n", code, err)
|
|
}
|
|
}()
|
|
|
|
// Redirect to original URL
|
|
http.Redirect(w, r, shortURL.OriginalURL, http.StatusFound)
|
|
}
|
|
|
|
// handleAPILanguages returns distinct languages with counts
|
|
func (c *Crawler) handleAPILanguages(w http.ResponseWriter, r *http.Request) {
|
|
rows, err := c.db.Query(`
|
|
SELECT COALESCE(NULLIF(language, ''), 'unknown') as lang, COUNT(*) as cnt
|
|
FROM feeds
|
|
GROUP BY lang
|
|
ORDER BY cnt DESC
|
|
`)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
defer rows.Close()
|
|
|
|
type LangInfo struct {
|
|
Language string `json:"language"`
|
|
Count int `json:"count"`
|
|
}
|
|
|
|
var languages []LangInfo
|
|
for rows.Next() {
|
|
var l LangInfo
|
|
if err := rows.Scan(&l.Language, &l.Count); err != nil {
|
|
continue
|
|
}
|
|
languages = append(languages, l)
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(languages)
|
|
}
|
|
|
|
// handleAPITLDStats returns domain and feed counts for a specific TLD
|
|
func (c *Crawler) handleAPITLDStats(w http.ResponseWriter, r *http.Request) {
|
|
tld := r.URL.Query().Get("tld")
|
|
if tld == "" {
|
|
http.Error(w, "tld parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
var domainCount, feedCount int
|
|
err := c.db.QueryRow(`SELECT COUNT(*) FROM domains WHERE tld = $1`, tld).Scan(&domainCount)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
err = c.db.QueryRow(`SELECT COUNT(*) FROM feeds WHERE tld = $1`, tld).Scan(&feedCount)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"tld": tld,
|
|
"domain_count": domainCount,
|
|
"feed_count": feedCount,
|
|
})
|
|
}
|
|
|
|
// handleAPIDenyDomain denies a domain and all its feeds
|
|
func (c *Crawler) handleAPIDenyDomain(w http.ResponseWriter, r *http.Request) {
|
|
host := r.URL.Query().Get("host")
|
|
if host == "" {
|
|
http.Error(w, "host parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Update domain status to denied
|
|
_, err := c.db.Exec(`UPDATE domains SET status = 'denied' WHERE host = $1`, host)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Deny all feeds from this domain
|
|
feedsAffected, err := c.db.Exec(`UPDATE feeds SET publish_status = 'deny', status = 'dead' WHERE source_host = $1`, host)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"success": true,
|
|
"host": host,
|
|
"feeds_denied": feedsAffected,
|
|
})
|
|
}
|
|
|
|
// handleAPIUndenyDomain removes denied status from a domain
|
|
func (c *Crawler) handleAPIUndenyDomain(w http.ResponseWriter, r *http.Request) {
|
|
host := r.URL.Query().Get("host")
|
|
if host == "" {
|
|
http.Error(w, "host parameter required", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Update domain status back to checked
|
|
_, err := c.db.Exec(`UPDATE domains SET status = 'checked' WHERE host = $1 AND status = 'denied'`, host)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Restore feeds to held status and active
|
|
feedsRestored, err := c.db.Exec(`UPDATE feeds SET publish_status = 'held', status = 'active' WHERE source_host = $1 AND status = 'dead'`, host)
|
|
if err != nil {
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(map[string]interface{}{
|
|
"success": true,
|
|
"host": host,
|
|
"feeds_restored": feedsRestored,
|
|
})
|
|
}
|
|
|
|
const dashboardHTML = `<!DOCTYPE html>
|
|
<html>
|
|
<head>
|
|
<title>1440.news Feed Crawler</title>
|
|
<meta charset="utf-8">
|
|
<link rel="stylesheet" href="/static/dashboard.css">
|
|
<script src="/static/dashboard.js?v=20"></script>
|
|
</head>
|
|
<body>
|
|
<h1>1440.news Feed Crawler</h1>
|
|
|
|
<h2>Crawl Progress</h2>
|
|
<div class="grid">
|
|
<div class="card">
|
|
<div class="stat-value" id="totalDomains">{{comma .TotalDomains}}</div>
|
|
<div class="stat-label">Domains</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" id="checkedDomains">{{comma .CheckedDomains}}</div>
|
|
<div class="stat-label">Checked</div>
|
|
<div class="progress-bar">
|
|
<div class="progress-fill" id="crawlProgress" style="width: {{printf "%.1f" (pct .CheckedDomains .TotalDomains)}}%"></div>
|
|
</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" id="uncheckedDomains">{{comma .UncheckedDomains}}</div>
|
|
<div class="stat-label">Unchecked</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" id="crawlRate">{{comma .CrawlRate}}</div>
|
|
<div class="stat-label">crawls per min</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" id="checkRate">{{comma .CheckRate}}</div>
|
|
<div class="stat-label">checks per min</div>
|
|
</div>
|
|
</div>
|
|
|
|
<h2>Feeds Discovered</h2>
|
|
<div class="grid">
|
|
<div class="card">
|
|
<div class="stat-value" id="totalFeeds">{{comma .TotalFeeds}}</div>
|
|
<div class="stat-label">Total Feeds</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" style="color: #f90" id="rssFeeds">{{comma .RSSFeeds}}</div>
|
|
<div class="stat-label">RSS Feeds</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" style="color: #09f" id="atomFeeds">{{comma .AtomFeeds}}</div>
|
|
<div class="stat-label">Atom Feeds</div>
|
|
</div>
|
|
<div class="card">
|
|
<div class="stat-value" style="color: #666" id="unknownFeeds">{{comma .UnknownFeeds}}</div>
|
|
<div class="stat-label">Unknown Type</div>
|
|
</div>
|
|
</div>
|
|
|
|
<div class="card" id="inputCard">
|
|
<div id="commandButtons" style="margin-bottom: 10px;">
|
|
<button class="cmd-btn" data-cmd="/tlds">tlds</button>
|
|
<button class="cmd-btn" data-cmd="/publish">publish</button>
|
|
<button class="cmd-btn" id="langBtn">lang</button>
|
|
<span style="color: #333; margin: 0 4px;">|</span>
|
|
<button class="cmd-btn" data-cmd="/domains unchecked">domains:unchecked</button>
|
|
<button class="cmd-btn" data-cmd="/domains checked">domains:checked</button>
|
|
<button class="cmd-btn" data-cmd="/domains error">domains:error</button>
|
|
<span style="color: #333; margin: 0 4px;">|</span>
|
|
<button class="cmd-btn" data-cmd="/feeds active">feeds:active</button>
|
|
<button class="cmd-btn" data-cmd="/feeds error">feeds:error</button>
|
|
<button class="cmd-btn" data-cmd="/feeds dead">feeds:dead</button>
|
|
</div>
|
|
<div id="langDropdown" style="display: none; margin-bottom: 10px; padding: 10px; background: #0a0a0a; border: 1px solid #333; border-radius: 4px; max-height: 200px; overflow-y: auto;">
|
|
<div id="langList"></div>
|
|
</div>
|
|
<input type="text" id="commandInput" value="/help"
|
|
style="width: 100%; padding: 12px; background: #0a0a0a; border: 1px solid #333; border-radius: 4px; color: #fff; font-size: 14px; font-family: monospace;">
|
|
</div>
|
|
|
|
<div class="card" id="outputCard">
|
|
<div id="breadcrumb" style="margin-bottom: 10px; display: none;"></div>
|
|
<div id="output"></div>
|
|
</div>
|
|
|
|
<div style="color: #333; font-size: 11px; margin-top: 10px;">v18</div>
|
|
|
|
<div class="updated" id="updatedAt">Last updated: {{.UpdatedAt.Format "2006-01-02 15:04:05"}}</div>
|
|
</body>
|
|
</html>`
|