Compare commits

..

2 Commits

Author SHA1 Message Date
0c28766336 🐛 Fix items' published at 2025-04-06 13:14:44 +08:00
1f27667b7e Reduce the delay between two fetch feed timed task 2025-04-06 13:13:03 +08:00
3 changed files with 24 additions and 8 deletions

View File

@ -14,14 +14,13 @@ func adminTriggerScanTask(c *fiber.Ctx) error {
var data struct { var data struct {
Eager bool `json:"eager"` Eager bool `json:"eager"`
Sources []string `json:"sources"`
} }
if err := exts.BindAndValidate(c, &data); err != nil { if err := exts.BindAndValidate(c, &data); err != nil {
return err return err
} }
go services.FetchFeedTimed() go services.FetchFeed(data.Eager)
return c.SendStatus(fiber.StatusOK) return c.SendStatus(fiber.StatusOK)
} }

View File

@ -22,10 +22,19 @@ func FetchFeedTimed() {
func FetchFeed(eager ...bool) { func FetchFeed(eager ...bool) {
var feeds []models.SubscriptionFeed var feeds []models.SubscriptionFeed
if len(eager) > 0 && eager[0] {
if err := database.C.Where("is_enabled = ?", true).Find(&feeds).Error; err != nil { if err := database.C.Where("is_enabled = ?", true).Find(&feeds).Error; err != nil {
log.Warn().Err(err).Msg("An error occurred when fetching feeds.") log.Warn().Err(err).Msg("An error occurred when fetching feeds.")
return return
} }
} else {
if err := database.C.
Where("last_fetched_at IS NULL OR NOW() >= last_fetched_at + (pull_interval || ' hours')::interval").
Find(&feeds).Error; err != nil {
log.Warn().Err(err).Msg("An error occurred when fetching due feeds.")
return
}
}
log.Info().Int("count", len(feeds)).Msg("Ready to fetch feeds...") log.Info().Int("count", len(feeds)).Msg("Ready to fetch feeds...")
@ -56,7 +65,10 @@ func FetchFeed(eager ...bool) {
count += len(result) count += len(result)
} }
database.C.Where("id IN ?", scannedFeed).Update("last_fetched_at", time.Now()) database.C.
Model(&models.SubscriptionFeed{}).
Where("id IN ?", scannedFeed).
Update("last_fetched_at", time.Now())
log.Info().Int("count", count).Msg("Scanned all feeds.") log.Info().Int("count", count).Msg("Scanned all feeds.")
} }
@ -86,6 +98,8 @@ func feedReadWordpress(src models.SubscriptionFeed, eager ...bool) ([]models.Sub
date, err := time.Parse("2006-01-02T15:04:05", post.DateGMT) date, err := time.Parse("2006-01-02T15:04:05", post.DateGMT)
if err == nil { if err == nil {
article.PublishedAt = date article.PublishedAt = date
} else {
article.PublishedAt = time.Now()
} }
article.GenHash() article.GenHash()
return *article return *article
@ -162,6 +176,8 @@ func feedReadGuidedFeed(src models.SubscriptionFeed, eager ...bool) ([]models.Su
} }
if item.PublishedParsed != nil { if item.PublishedParsed != nil {
parent.PublishedAt = *item.PublishedParsed parent.PublishedAt = *item.PublishedParsed
} else {
parent.PublishedAt = time.Now()
} }
if item.Image != nil { if item.Image != nil {
parent.Thumbnail = item.Image.URL parent.Thumbnail = item.Image.URL
@ -185,6 +201,7 @@ func feedReadWebpage(src models.SubscriptionFeed, eager ...bool) ([]models.Subsc
art := &article art := &article
art.GenHash() art.GenHash()
art.FeedID = src.ID art.FeedID = src.ID
art.PublishedAt = time.Now()
article = *art article = *art
return article return article
} }

View File

@ -75,7 +75,7 @@ func main() {
// Configure timed tasks // Configure timed tasks
quartz := cron.New(cron.WithLogger(cron.VerbosePrintfLogger(&log.Logger))) quartz := cron.New(cron.WithLogger(cron.VerbosePrintfLogger(&log.Logger)))
quartz.AddFunc("@every 60m", services.DoAutoDatabaseCleanup) quartz.AddFunc("@every 60m", services.DoAutoDatabaseCleanup)
quartz.AddFunc("@midnight", services.FetchFeedTimed) quartz.AddFunc("@every 60m", services.FetchFeedTimed)
quartz.Start() quartz.Start()
// Server // Server