Skip to content

Commit 321ab80

Browse files
authored
Add short caching for failed URL fetches
Previously, failed fetches (blocked sites, timeouts, etc.) were never cached, causing every request for the same URL to re-hit the target site. This wastes resources and can trigger further rate limiting. Failed results are now cached for 5 minutes.
1 parent 7807bc6 commit 321ab80

1 file changed

Lines changed: 11 additions & 3 deletions

File tree

unfurlist.go

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -406,10 +406,18 @@ hasMatch:
406406
result.Image, result.ImageWidth, result.ImageHeight = "", 0, 0
407407
}
408408

409-
if mc := h.Cache; mc != nil && !result.Empty() {
409+
if mc := h.Cache; mc != nil {
410410
if cdata, err := json.Marshal(result); err == nil {
411-
h.Log.Printf("Cache update for %q", link)
412-
mc.Set(&memcache.Item{Key: mcKey(link), Value: snappy.Encode(nil, cdata)})
411+
item := &memcache.Item{Key: mcKey(link), Value: snappy.Encode(nil, cdata)}
412+
if result.Empty() {
413+
// Cache failures briefly to avoid repeatedly hitting sites
414+
// that block us. A short TTL ensures we retry eventually.
415+
item.Expiration = 300 // 5 minutes
416+
h.Log.Printf("Empty result cache for %q", link)
417+
} else {
418+
h.Log.Printf("Cache update for %q", link)
419+
}
420+
mc.Set(item)
413421
}
414422
}
415423
return result

0 commit comments

Comments
 (0)