Archived
1
0
This repository has been archived on 2023-08-12. You can view files and clone it, but cannot push or open issues or pull requests.
nntpchan/contrib/backends/srndv2/src/srnd/varnish_cache.go

206 lines
5.6 KiB
Go
Raw Normal View History

package srnd
import (
"fmt"
"log"
"net"
"net/http"
"net/url"
)
type VarnishCache struct {
varnish_url string
prefix string
handler *nullHandler
client *http.Client
2018-03-04 17:03:10 +05:00
workers int
threadsRegenChan chan ArticleEntry
2018-03-04 17:03:10 +05:00
invalidateChan chan *url.URL
}
func (self *VarnishCache) InvertPagination() {
self.handler.invertPagination = true
}
func (self *VarnishCache) invalidate(r string) {
var langs []string
langs = append(langs, "")
self.handler.ForEachI18N(func(lang string) {
langs = append(langs, lang)
})
for _, lang := range langs {
u, _ := url.Parse(r)
if lang != "" {
q := u.Query()
q.Add("lang", lang)
u.RawQuery = q.Encode()
}
2018-03-04 17:03:10 +05:00
self.invalidateChan <- u
}
}
func (self *VarnishCache) doRequest(u *url.URL) {
if u == nil {
return
}
resp, err := self.client.Do(&http.Request{
Method: "PURGE",
URL: u,
})
if err == nil {
resp.Body.Close()
} else {
log.Println("varnish cache error", err)
}
}
func (self *VarnishCache) DeleteBoardMarkup(group string) {
n, _ := self.handler.database.GetPagesPerBoard(group)
for n > 0 {
self.invalidate(fmt.Sprintf("%s%s%s-%d.html", self.varnish_url, self.prefix, group, n))
self.invalidate(fmt.Sprintf("%s%sb/%s/%d/", self.varnish_url, self.prefix, group, n))
n--
}
self.invalidate(fmt.Sprintf("%s%sb/%s/", self.varnish_url, self.prefix, group))
}
// try to delete root post's page
func (self *VarnishCache) DeleteThreadMarkup(root_post_id string) {
2017-09-30 17:55:38 +05:00
id := HashMessageID(root_post_id)
self.invalidate(fmt.Sprintf("%s%sthread-%s.json", self.varnish_url, self.prefix, id))
self.invalidate(fmt.Sprintf("%s%st/%s/json", self.varnish_url, self.prefix, id))
self.invalidate(fmt.Sprintf("%s%sthread-%s.html", self.varnish_url, self.prefix, id))
self.invalidate(fmt.Sprintf("%s%st/%s/", self.varnish_url, self.prefix, id))
}
// regen every newsgroup
func (self *VarnishCache) RegenAll() {
// we will do this as it's used by rengen on start for frontend
groups := self.handler.database.GetAllNewsgroups()
for _, group := range groups {
self.handler.database.GetGroupThreads(group, self.threadsRegenChan)
}
}
2018-03-09 20:12:50 +05:00
func (self *VarnishCache) RegenFrontPage(pagestart int) {
self.invalidate(fmt.Sprintf("%s%s", self.varnish_url, self.prefix))
// TODO: this is also lazy af
self.invalidate(fmt.Sprintf("%s%shistory.html", self.varnish_url, self.prefix))
2018-03-09 20:12:50 +05:00
if self.handler.invertPagination {
self.invalidateUkko(50, pagestart-50)
} else {
self.invalidateUkko(pagestart, 0)
}
}
2018-03-09 20:12:50 +05:00
func (self *VarnishCache) invalidateUkko(pages, start int) {
self.invalidate(fmt.Sprintf("%s%sukko.html", self.varnish_url, self.prefix))
self.invalidate(fmt.Sprintf("%s%so/", self.varnish_url, self.prefix))
2017-09-30 17:55:38 +05:00
self.invalidate(fmt.Sprintf("%s%sukko.json", self.varnish_url, self.prefix))
self.invalidate(fmt.Sprintf("%s%so/json", self.varnish_url, self.prefix))
2018-03-09 20:12:50 +05:00
n := start
end := start + pages
for n < end {
2017-09-30 17:55:38 +05:00
self.invalidate(fmt.Sprintf("%s%so/%d/json", self.varnish_url, self.prefix, n))
self.invalidate(fmt.Sprintf("%s%so/%d/", self.varnish_url, self.prefix, n))
n++
}
}
// regen every page of the board
func (self *VarnishCache) RegenerateBoard(group string) {
n, _ := self.handler.database.GetPagesPerBoard(group)
for n > 0 {
self.invalidate(fmt.Sprintf("%s%s%s-%d.html", self.varnish_url, self.prefix, group, n))
self.invalidate(fmt.Sprintf("%s%sb/%s/%d/", self.varnish_url, self.prefix, group, n))
n--
}
self.invalidate(fmt.Sprintf("%s%sb/%s/", self.varnish_url, self.prefix, group))
}
// regenerate pages after a mod event
func (self *VarnishCache) RegenOnModEvent(newsgroup, msgid, root string, page int) {
self.Regen(ArticleEntry{newsgroup, root})
if page == 0 {
self.invalidate(fmt.Sprintf("%s%sb/%s/", self.varnish_url, self.prefix, newsgroup))
}
self.invalidate(fmt.Sprintf("%s%sb/%s/%d/", self.varnish_url, self.prefix, newsgroup, page))
}
func (self *VarnishCache) poll() {
for {
ent := <-self.threadsRegenChan
self.Regen(ent)
2017-10-13 16:58:10 +05:00
self.RegenerateBoard(ent.Newsgroup())
}
}
func (self *VarnishCache) Start() {
go self.poll()
2018-03-04 17:03:10 +05:00
workers := self.workers
if workers <= 0 {
workers = 1
}
for workers > 0 {
go self.doWorker()
workers--
}
}
func (self *VarnishCache) doWorker() {
for {
self.doRequest(<-self.invalidateChan)
}
}
func (self *VarnishCache) Regen(msg ArticleEntry) {
2017-09-30 17:55:38 +05:00
self.DeleteThreadMarkup(msg.MessageID())
}
2017-10-10 21:17:38 +05:00
func (self *VarnishCache) GetHandler() CacheHandler {
return self.handler
}
func (self *VarnishCache) Close() {
//nothig to do
}
func (self *VarnishCache) SetRequireCaptcha(required bool) {
self.handler.requireCaptcha = required
}
2018-03-04 17:03:10 +05:00
func NewVarnishCache(varnish_url, bind_addr, prefix, webroot, name, translations string, workers int, attachments bool, db Database, store ArticleStore) CacheInterface {
cache := new(VarnishCache)
2018-03-04 17:03:10 +05:00
cache.invalidateChan = make(chan *url.URL)
cache.threadsRegenChan = make(chan ArticleEntry)
2018-03-04 17:03:10 +05:00
cache.workers = workers
local_addr, err := net.ResolveTCPAddr("tcp", bind_addr)
if err != nil {
log.Fatalf("failed to resolve %s for varnish cache: %s", bind_addr, err)
}
cache.client = &http.Client{
Transport: &http.Transport{
Dial: func(network, addr string) (c net.Conn, err error) {
var remote_addr *net.TCPAddr
remote_addr, err = net.ResolveTCPAddr(network, addr)
if err == nil {
c, err = net.DialTCP(network, local_addr, remote_addr)
}
return
},
},
}
2017-04-19 20:55:51 +05:00
cache.prefix = "/"
cache.handler = &nullHandler{
prefix: prefix,
name: name,
attachments: attachments,
database: db,
requireCaptcha: true,
2017-10-10 21:17:38 +05:00
i18n: make(map[string]*I18N),
translations: translations,
}
cache.varnish_url = varnish_url
return cache
}