move srndv2 to nntpchan repo with vendored deps so that nothing breaks every again
this deprecates the github.com/majestrate/srndv2 repo
This commit is contained in:
parent
eb0ef957a4
commit
3a6cbf9de6
3
contrib/backends/srndv2/.gitignore
vendored
3
contrib/backends/srndv2/.gitignore
vendored
@ -1 +1,2 @@
|
||||
nntpchand
|
||||
nntpchand
|
||||
srndv2
|
@ -1,10 +1,9 @@
|
||||
GOPATH=$(PWD)
|
||||
REPO=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
all: nntpchand
|
||||
all: srndv2
|
||||
|
||||
nntpchand:
|
||||
go build -o nntpchand -v nntpchan/cmd/nntpchan
|
||||
srndv2:
|
||||
GOPATH=$(REPO) go build -v
|
||||
|
||||
clean:
|
||||
go clean -v
|
||||
rm -f nntpchand
|
||||
GOPATH=$(REPO) go clean -v
|
||||
|
4
contrib/backends/srndv2/README.md
Normal file
4
contrib/backends/srndv2/README.md
Normal file
@ -0,0 +1,4 @@
|
||||
SRNDv2
|
||||
======
|
||||
|
||||
**S**ome **R**andom **N**ews **D**aemon **v**ersion **2**
|
324
contrib/backends/srndv2/src/srnd/attachment.go
Normal file
324
contrib/backends/srndv2/src/srnd/attachment.go
Normal file
@ -0,0 +1,324 @@
|
||||
//
|
||||
// attachment.go -- nntp attachements
|
||||
//
|
||||
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha512"
|
||||
"encoding/base32"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"io"
|
||||
"log"
|
||||
"mime"
|
||||
"mime/multipart"
|
||||
"net/textproto"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type NNTPAttachment interface {
|
||||
io.WriterTo
|
||||
io.Writer
|
||||
|
||||
// the name of the file
|
||||
Filename() string
|
||||
// the filepath to the saved file
|
||||
Filepath() string
|
||||
// the mime type of the attachment
|
||||
Mime() string
|
||||
// the file extension of the attachment
|
||||
Extension() string
|
||||
// get the sha512 hash of the attachment
|
||||
Hash() []byte
|
||||
// do we need to generate a thumbnail?
|
||||
NeedsThumbnail() bool
|
||||
// mime header
|
||||
Header() textproto.MIMEHeader
|
||||
// make into a model
|
||||
ToModel(prefix string) AttachmentModel
|
||||
// base64'd file data
|
||||
Filedata() string
|
||||
// as raw string
|
||||
AsString() string
|
||||
// reset contents
|
||||
Reset()
|
||||
// get bytes
|
||||
Bytes() []byte
|
||||
// save to directory, filename is decided by the attachment
|
||||
Save(dir string) error
|
||||
// get body as io.ReadCloser
|
||||
OpenBody() (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
type nntpAttachment struct {
|
||||
ext string
|
||||
mime string
|
||||
filename string
|
||||
filepath string
|
||||
hash []byte
|
||||
header textproto.MIMEHeader
|
||||
body *bytes.Buffer
|
||||
rawpath string
|
||||
store ArticleStore
|
||||
}
|
||||
|
||||
type byteBufferReadCloser struct {
|
||||
b *bytes.Buffer
|
||||
}
|
||||
|
||||
func (b *byteBufferReadCloser) Close() error {
|
||||
b.b.Reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *byteBufferReadCloser) Read(d []byte) (int, error) {
|
||||
return b.b.Read(d)
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) OpenBody() (io.ReadCloser, error) {
|
||||
if self.store != nil {
|
||||
return os.Open(self.store.AttachmentFilepath(self.filepath))
|
||||
} else {
|
||||
return &byteBufferReadCloser{
|
||||
self.body,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Reset() {
|
||||
self.body = nil
|
||||
self.header = nil
|
||||
self.hash = nil
|
||||
self.filepath = ""
|
||||
self.filename = ""
|
||||
self.mime = ""
|
||||
self.ext = ""
|
||||
self.store = nil
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) ToModel(prefix string) AttachmentModel {
|
||||
return &attachment{
|
||||
prefix: prefix,
|
||||
Path: self.Filepath(),
|
||||
Name: self.Filename(),
|
||||
}
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Bytes() []byte {
|
||||
if self.body == nil {
|
||||
return nil
|
||||
}
|
||||
return self.body.Bytes()
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Save(dir string) (err error) {
|
||||
if self.body == nil {
|
||||
// no body wat
|
||||
err = errors.New("no attachment body")
|
||||
} else {
|
||||
fpath := filepath.Join(dir, self.filepath)
|
||||
if !CheckFile(fpath) {
|
||||
var f io.WriteCloser
|
||||
// does not exist so will will write it
|
||||
f, err = os.Create(fpath)
|
||||
if err == nil {
|
||||
_, err = f.Write(self.Bytes())
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Write(b []byte) (int, error) {
|
||||
if self.body == nil {
|
||||
self.body = new(bytes.Buffer)
|
||||
}
|
||||
return self.body.Write(b)
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) AsString() string {
|
||||
if self.body == nil {
|
||||
return ""
|
||||
}
|
||||
return string(self.Bytes())
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Filedata() string {
|
||||
e := base64.StdEncoding
|
||||
str := e.EncodeToString(self.Bytes())
|
||||
e = nil
|
||||
return str
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Filename() string {
|
||||
return self.filename
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Filepath() string {
|
||||
return self.filepath
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Mime() string {
|
||||
return self.mime
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Extension() string {
|
||||
return self.ext
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) WriteTo(wr io.Writer) (int64, error) {
|
||||
w, err := wr.Write(self.Bytes())
|
||||
return int64(w), err
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Hash() []byte {
|
||||
// hash it if we haven't already
|
||||
if self.hash == nil || len(self.hash) == 0 {
|
||||
h := sha512.Sum512(self.Bytes())
|
||||
self.hash = h[:]
|
||||
}
|
||||
return self.hash
|
||||
}
|
||||
|
||||
// TODO: detect
|
||||
func (self *nntpAttachment) NeedsThumbnail() bool {
|
||||
for _, ext := range []string{".png", ".jpeg", ".jpg", ".gif", ".bmp", ".webm", ".mp4", ".avi", ".mpeg", ".mpg", ".ogg", ".mp3", ".oga", ".opus", ".flac", ".ico", "m4a"} {
|
||||
if ext == strings.ToLower(self.ext) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (self *nntpAttachment) Header() textproto.MIMEHeader {
|
||||
return self.header
|
||||
}
|
||||
|
||||
// create a plaintext attachment
|
||||
func createPlaintextAttachment(msg []byte) NNTPAttachment {
|
||||
header := make(textproto.MIMEHeader)
|
||||
mime := "text/plain; charset=UTF-8"
|
||||
header.Set("Content-Type", mime)
|
||||
header.Set("Content-Transfer-Encoding", "base64")
|
||||
att := &nntpAttachment{
|
||||
mime: mime,
|
||||
ext: ".txt",
|
||||
header: header,
|
||||
}
|
||||
msg = bytes.Trim(msg, "\r")
|
||||
att.Write(msg)
|
||||
return att
|
||||
}
|
||||
|
||||
// assumes base64'd
|
||||
func createAttachment(content_type, fname string, body io.Reader) NNTPAttachment {
|
||||
|
||||
media_type, _, err := mime.ParseMediaType(content_type)
|
||||
if err == nil {
|
||||
a := new(nntpAttachment)
|
||||
dec := base64.NewDecoder(base64.StdEncoding, body)
|
||||
_, err = io.Copy(a, dec)
|
||||
if err == nil {
|
||||
a.header = make(textproto.MIMEHeader)
|
||||
a.mime = media_type + "; charset=UTF-8"
|
||||
idx := strings.LastIndex(fname, ".")
|
||||
a.ext = ".txt"
|
||||
if idx > 0 {
|
||||
a.ext = fname[idx:]
|
||||
}
|
||||
a.header.Set("Content-Disposition", `form-data; filename="`+fname+`"; name="attachment"`)
|
||||
a.header.Set("Content-Type", a.mime)
|
||||
a.header.Set("Content-Transfer-Encoding", "base64")
|
||||
h := a.Hash()
|
||||
hashstr := base32.StdEncoding.EncodeToString(h[:])
|
||||
a.hash = h[:]
|
||||
a.filepath = hashstr + a.ext
|
||||
a.filename = fname
|
||||
return a
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readAttachmentFromMimePartAndStore(part *multipart.Part, store ArticleStore) NNTPAttachment {
|
||||
hdr := part.Header
|
||||
att := &nntpAttachment{}
|
||||
att.store = store
|
||||
att.header = hdr
|
||||
content_type := hdr.Get("Content-Type")
|
||||
var err error
|
||||
att.mime, _, err = mime.ParseMediaType(content_type)
|
||||
att.filename = part.FileName()
|
||||
idx := strings.LastIndex(att.filename, ".")
|
||||
att.ext = ".txt"
|
||||
if idx > 0 {
|
||||
att.ext = att.filename[idx:]
|
||||
}
|
||||
h := sha512.New()
|
||||
transfer_encoding := hdr.Get("Content-Transfer-Encoding")
|
||||
var r io.Reader
|
||||
if transfer_encoding == "base64" {
|
||||
// decode
|
||||
r = base64.NewDecoder(base64.StdEncoding, part)
|
||||
} else {
|
||||
r = part
|
||||
}
|
||||
var fpath string
|
||||
var mw io.Writer
|
||||
if store == nil {
|
||||
mw = io.MultiWriter(att, h)
|
||||
} else {
|
||||
fname := randStr(10) + ".temp"
|
||||
fpath = filepath.Join(store.AttachmentDir(), fname)
|
||||
f, err := os.Create(fpath)
|
||||
if err != nil {
|
||||
log.Println("!!! failed to store attachment: ", err, "!!!")
|
||||
return nil
|
||||
}
|
||||
defer f.Close()
|
||||
if strings.ToLower(att.mime) == "text/plain" {
|
||||
mw = io.MultiWriter(f, h, att)
|
||||
} else {
|
||||
mw = io.MultiWriter(f, h)
|
||||
}
|
||||
}
|
||||
_, err = io.Copy(mw, r)
|
||||
if err != nil {
|
||||
log.Println("failed to read attachment from mimepart", err)
|
||||
if fpath != "" {
|
||||
DelFile(fpath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
hsh := h.Sum(nil)
|
||||
att.hash = hsh[:]
|
||||
enc := base32.StdEncoding
|
||||
hashstr := enc.EncodeToString(att.hash[:])
|
||||
att.filepath = hashstr + att.ext
|
||||
// we are good just return it
|
||||
if store == nil {
|
||||
return att
|
||||
}
|
||||
att_fpath := filepath.Join(store.AttachmentDir(), att.filepath)
|
||||
if !CheckFile(att_fpath) {
|
||||
// attachment isn't there
|
||||
// move it into it
|
||||
err = os.Rename(fpath, att_fpath)
|
||||
}
|
||||
if err == nil {
|
||||
// now thumbnail
|
||||
if !CheckFile(store.ThumbnailFilepath(att.filepath)) {
|
||||
store.GenerateThumbnail(att.filepath)
|
||||
}
|
||||
} else {
|
||||
// wtf?
|
||||
log.Println("!!! failed to store attachment", err, "!!!")
|
||||
DelFile(fpath)
|
||||
}
|
||||
return att
|
||||
}
|
50
contrib/backends/srndv2/src/srnd/cache_interface.go
Normal file
50
contrib/backends/srndv2/src/srnd/cache_interface.go
Normal file
@ -0,0 +1,50 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type CacheInterface interface {
|
||||
RegenAll()
|
||||
RegenFrontPage()
|
||||
RegenOnModEvent(string, string, string, int)
|
||||
RegenerateBoard(group string)
|
||||
Regen(msg ArticleEntry)
|
||||
|
||||
DeleteThreadMarkup(root_post_id string)
|
||||
DeleteBoardMarkup(group string)
|
||||
|
||||
Start()
|
||||
Close()
|
||||
|
||||
GetThreadChan() chan ArticleEntry
|
||||
GetGroupChan() chan groupRegenRequest
|
||||
GetHandler() http.Handler
|
||||
|
||||
SetRequireCaptcha(required bool)
|
||||
}
|
||||
|
||||
//TODO only pass needed config
|
||||
func NewCache(cache_type, host, port, user, password string, cache_config, config map[string]string, db Database, store ArticleStore) CacheInterface {
|
||||
prefix := config["prefix"]
|
||||
webroot := config["webroot"]
|
||||
threads := mapGetInt(config, "regen_threads", 1)
|
||||
name := config["name"]
|
||||
attachments := mapGetInt(config, "allow_files", 1) == 1
|
||||
|
||||
if cache_type == "file" {
|
||||
return NewFileCache(prefix, webroot, name, threads, attachments, db, store)
|
||||
}
|
||||
if cache_type == "null" {
|
||||
return NewNullCache(prefix, webroot, name, attachments, db, store)
|
||||
}
|
||||
if cache_type == "varnish" {
|
||||
url := cache_config["url"]
|
||||
bind_addr := cache_config["bind"]
|
||||
return NewVarnishCache(url, bind_addr, prefix, webroot, name, attachments, db, store)
|
||||
}
|
||||
|
||||
log.Fatalf("invalid cache type: %s", cache_type)
|
||||
return nil
|
||||
}
|
517
contrib/backends/srndv2/src/srnd/config.go
Normal file
517
contrib/backends/srndv2/src/srnd/config.go
Normal file
@ -0,0 +1,517 @@
|
||||
//
|
||||
// config.go
|
||||
//
|
||||
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"fmt"
|
||||
"github.com/majestrate/configparser"
|
||||
"github.com/majestrate/nacl"
|
||||
"log"
|
||||
"net"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type FeedConfig struct {
|
||||
policy FeedPolicy
|
||||
quarks map[string]string
|
||||
Addr string
|
||||
sync bool
|
||||
proxy_type string
|
||||
proxy_addr string
|
||||
username string
|
||||
passwd string
|
||||
linkauth_keyfile string
|
||||
tls_off bool
|
||||
Name string
|
||||
sync_interval time.Duration
|
||||
connections int
|
||||
}
|
||||
|
||||
type APIConfig struct {
|
||||
srndAddr string
|
||||
frontendAddr string
|
||||
}
|
||||
|
||||
type CryptoConfig struct {
|
||||
privkey_file string
|
||||
cert_file string
|
||||
hostname string
|
||||
cert_dir string
|
||||
}
|
||||
|
||||
// pprof settings
|
||||
type ProfilingConfig struct {
|
||||
bind string
|
||||
enable bool
|
||||
}
|
||||
|
||||
type HookConfig struct {
|
||||
name string
|
||||
exec string
|
||||
enable bool
|
||||
}
|
||||
|
||||
type SRNdConfig struct {
|
||||
daemon map[string]string
|
||||
crypto *CryptoConfig
|
||||
store map[string]string
|
||||
database map[string]string
|
||||
cache map[string]string
|
||||
feeds []FeedConfig
|
||||
frontend map[string]string
|
||||
system map[string]string
|
||||
worker map[string]string
|
||||
pprof *ProfilingConfig
|
||||
hooks []*HookConfig
|
||||
}
|
||||
|
||||
// check for config files
|
||||
// generate defaults on demand
|
||||
func CheckConfig() {
|
||||
if !CheckFile("srnd.ini") {
|
||||
log.Println("No srnd.ini file found in working directory...")
|
||||
if !CheckFile(os.Getenv("SRND_INI_PATH")) {
|
||||
log.Printf("No config file found at %s...", os.Getenv("SRND_INI_PATH"))
|
||||
var conf *configparser.Configuration
|
||||
if !InstallerEnabled() {
|
||||
log.Println("Creating srnd.ini in working directory...")
|
||||
conf = GenSRNdConfig()
|
||||
} else {
|
||||
res := make(chan *configparser.Configuration)
|
||||
installer := NewInstaller(res)
|
||||
go installer.Start()
|
||||
conf = <-res
|
||||
installer.Stop()
|
||||
close(res)
|
||||
}
|
||||
err := configparser.Save(conf, "srnd.ini")
|
||||
if err != nil {
|
||||
log.Fatal("cannot generate srnd.ini", err)
|
||||
}
|
||||
}
|
||||
if !CheckFile("feeds.ini") {
|
||||
if !CheckFile(os.Getenv("SRND_FEEDS_INI_PATH")) {
|
||||
log.Println("no feeds.ini, creating...")
|
||||
err := GenFeedsConfig()
|
||||
if err != nil {
|
||||
log.Fatal("cannot generate feeds.ini", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// generate default feeds.ini
|
||||
func GenFeedsConfig() error {
|
||||
conf := configparser.NewConfiguration()
|
||||
sect := conf.NewSection("feed-dummy")
|
||||
sect.Add("proxy-type", "socks4a")
|
||||
sect.Add("proxy-host", "127.0.0.1")
|
||||
sect.Add("proxy-port", "9050")
|
||||
sect.Add("host", "dummy")
|
||||
sect.Add("port", "119")
|
||||
sect.Add("connections", "1")
|
||||
|
||||
sect = conf.NewSection("dummy")
|
||||
sect.Add("overchan.*", "1")
|
||||
sect.Add("ano.paste", "0")
|
||||
sect.Add("ctl", "1")
|
||||
|
||||
return configparser.Save(conf, "feeds.ini")
|
||||
}
|
||||
|
||||
// generate default srnd.ini
|
||||
func GenSRNdConfig() *configparser.Configuration {
|
||||
conf := configparser.NewConfiguration()
|
||||
|
||||
// nntp related section
|
||||
sect := conf.NewSection("nntp")
|
||||
sect.Add("instance_name", "test.srndv2.tld")
|
||||
sect.Add("bind", "127.0.0.1:1199")
|
||||
sect.Add("sync_on_start", "1")
|
||||
sect.Add("allow_anon", "0")
|
||||
sect.Add("allow_anon_attachments", "0")
|
||||
sect.Add("allow_attachments", "1")
|
||||
sect.Add("require_tls", "1")
|
||||
sect.Add("anon_nntp", "0")
|
||||
sect.Add("feeds", filepath.Join(".", "feeds.d"))
|
||||
sect.Add("archive", "0")
|
||||
sect.Add("article_lifetime", "0")
|
||||
|
||||
// profiling settings
|
||||
sect = conf.NewSection("pprof")
|
||||
sect.Add("enable", "0")
|
||||
sect.Add("bind", "127.0.0.1:17000")
|
||||
|
||||
// dummy hook
|
||||
sect = conf.NewSection("hook-dummy")
|
||||
sect.Add("enable", "0")
|
||||
sect.Add("exec", "/bin/true")
|
||||
|
||||
// crypto related section
|
||||
sect = conf.NewSection("crypto")
|
||||
sect.Add("tls-keyname", "overchan")
|
||||
sect.Add("tls-hostname", "!!put-hostname-or-ip-of-server-here")
|
||||
sect.Add("tls-trust-dir", "certs")
|
||||
|
||||
// article store section
|
||||
sect = conf.NewSection("articles")
|
||||
|
||||
sect.Add("store_dir", "articles")
|
||||
sect.Add("incoming_dir", "/tmp/articles")
|
||||
sect.Add("attachments_dir", "webroot/img")
|
||||
sect.Add("thumbs_dir", "webroot/thm")
|
||||
sect.Add("convert_bin", "/usr/bin/convert")
|
||||
sect.Add("ffmpegthumbnailer_bin", "/usr/bin/ffmpeg")
|
||||
sect.Add("sox_bin", "/usr/bin/sox")
|
||||
sect.Add("placeholder_thumbnail", "contrib/static/placeholder.png")
|
||||
sect.Add("compression", "0")
|
||||
|
||||
// database backend config
|
||||
sect = conf.NewSection("database")
|
||||
|
||||
sect.Add("type", "postgres")
|
||||
sect.Add("schema", "srnd")
|
||||
sect.Add("host", "/var/run/postgresql")
|
||||
sect.Add("port", "")
|
||||
sect.Add("user", "")
|
||||
sect.Add("password", "")
|
||||
sect.Add("maxconns", "10")
|
||||
sect.Add("connlife", "10")
|
||||
sect.Add("connidle", "10")
|
||||
|
||||
// cache backend config
|
||||
sect = conf.NewSection("cache")
|
||||
// defaults to null
|
||||
sect.Add("type", "null")
|
||||
|
||||
// baked in static html frontend
|
||||
sect = conf.NewSection("frontend")
|
||||
sect.Add("enable", "1")
|
||||
sect.Add("allow_files", "1")
|
||||
sect.Add("regen_on_start", "0")
|
||||
sect.Add("regen_threads", "2")
|
||||
sect.Add("bind", "[::]:18000")
|
||||
sect.Add("name", "web.srndv2.test")
|
||||
sect.Add("webroot", "webroot")
|
||||
sect.Add("minimize_html", "0")
|
||||
sect.Add("prefix", "/")
|
||||
sect.Add("static_files", "contrib")
|
||||
sect.Add("templates", "contrib/templates/default")
|
||||
sect.Add("translations", "contrib/translations")
|
||||
sect.Add("markup_script", "contrib/lua/memeposting.lua")
|
||||
sect.Add("locale", "en")
|
||||
sect.Add("domain", "localhost")
|
||||
sect.Add("json-api", "0")
|
||||
sect.Add("json-api-username", "fucking-change-this-value")
|
||||
sect.Add("json-api-password", "seriously-fucking-change-this-value")
|
||||
secret_bytes := nacl.RandBytes(8)
|
||||
secret := base32.StdEncoding.EncodeToString(secret_bytes)
|
||||
sect.Add("api-secret", secret)
|
||||
|
||||
return conf
|
||||
}
|
||||
|
||||
// save a list of feeds to overwrite feeds.ini
|
||||
func SaveFeeds(feeds []FeedConfig) (err error) {
|
||||
conf := configparser.NewConfiguration()
|
||||
for _, feed := range feeds {
|
||||
if len(feed.Name) == 0 {
|
||||
// don't do feed with no name
|
||||
continue
|
||||
}
|
||||
sect := conf.NewSection("feed-" + feed.Name)
|
||||
if len(feed.proxy_type) > 0 {
|
||||
sect.Add("proxy-type", feed.proxy_type)
|
||||
}
|
||||
phost, pport, _ := net.SplitHostPort(feed.proxy_addr)
|
||||
sect.Add("proxy-host", phost)
|
||||
sect.Add("proxy-port", pport)
|
||||
host, port, _ := net.SplitHostPort(feed.Addr)
|
||||
sect.Add("host", host)
|
||||
sect.Add("port", port)
|
||||
sync := "0"
|
||||
if feed.sync {
|
||||
sync = "1"
|
||||
}
|
||||
sect.Add("sync", sync)
|
||||
interval := feed.sync_interval / time.Second
|
||||
sect.Add("sync-interval", fmt.Sprintf("%d", int(interval)))
|
||||
sect.Add("username", feed.username)
|
||||
sect.Add("password", feed.passwd)
|
||||
sect.Add("connections", fmt.Sprintf("%d", feed.connections))
|
||||
sect = conf.NewSection(feed.Name)
|
||||
for k, v := range feed.policy.rules {
|
||||
sect.Add(k, v)
|
||||
}
|
||||
}
|
||||
return configparser.Save(conf, "feeds.ini")
|
||||
}
|
||||
|
||||
// read config files
|
||||
func ReadConfig() *SRNdConfig {
|
||||
|
||||
// begin read srnd.ini
|
||||
|
||||
fname := "srnd.ini"
|
||||
|
||||
if os.Getenv("SRND_INI_PATH") != "" {
|
||||
if CheckFile(os.Getenv("SRND_INI_PATH")) {
|
||||
log.Printf("found SRND config at %s...", os.Getenv("SRND_INI_PATH"))
|
||||
fname = os.Getenv("SRND_INI_PATH")
|
||||
}
|
||||
}
|
||||
var s *configparser.Section
|
||||
conf, err := configparser.Read(fname)
|
||||
if err != nil {
|
||||
log.Fatal("cannot read config file ", fname)
|
||||
return nil
|
||||
}
|
||||
var sconf SRNdConfig
|
||||
|
||||
s, err = conf.Section("pprof")
|
||||
if err == nil {
|
||||
opts := s.Options()
|
||||
sconf.pprof = new(ProfilingConfig)
|
||||
sconf.pprof.enable = opts["enable"] == "1"
|
||||
sconf.pprof.bind = opts["bind"]
|
||||
}
|
||||
|
||||
sections, _ := conf.Find("hook-*")
|
||||
if len(sections) > 0 {
|
||||
for _, hook := range sections {
|
||||
opts := hook.Options()
|
||||
sconf.hooks = append(sconf.hooks, &HookConfig{
|
||||
exec: opts["exec"],
|
||||
enable: opts["enable"] == "1",
|
||||
name: hook.Name(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
s, err = conf.Section("crypto")
|
||||
if err == nil {
|
||||
opts := s.Options()
|
||||
sconf.crypto = new(CryptoConfig)
|
||||
k := opts["tls-keyname"]
|
||||
h := opts["tls-hostname"]
|
||||
if strings.HasPrefix(h, "!") || len(h) == 0 {
|
||||
log.Fatal("please set tls-hostname to be the hostname or ip address of your server")
|
||||
} else {
|
||||
sconf.crypto.hostname = h
|
||||
sconf.crypto.privkey_file = k + "-" + h + ".key"
|
||||
sconf.crypto.cert_dir = opts["tls-trust-dir"]
|
||||
sconf.crypto.cert_file = filepath.Join(sconf.crypto.cert_dir, k+"-"+h+".crt")
|
||||
}
|
||||
} else {
|
||||
// we have no crypto section
|
||||
log.Println("!!! we will not use encryption for nntp as no crypto section is specified in srnd.ini")
|
||||
}
|
||||
s, err = conf.Section("nntp")
|
||||
if err != nil {
|
||||
log.Println("no section 'nntp' in srnd.ini")
|
||||
return nil
|
||||
}
|
||||
|
||||
sconf.daemon = s.Options()
|
||||
|
||||
s, err = conf.Section("database")
|
||||
if err != nil {
|
||||
log.Println("no section 'database' in srnd.ini")
|
||||
return nil
|
||||
}
|
||||
|
||||
sconf.database = s.Options()
|
||||
|
||||
s, err = conf.Section("cache")
|
||||
if err != nil {
|
||||
log.Println("no section 'cache' in srnd.ini")
|
||||
log.Println("falling back to default cache config")
|
||||
sconf.cache = make(map[string]string)
|
||||
sconf.cache["type"] = "file"
|
||||
} else {
|
||||
sconf.cache = s.Options()
|
||||
}
|
||||
|
||||
s, err = conf.Section("articles")
|
||||
if err != nil {
|
||||
log.Println("no section 'articles' in srnd.ini")
|
||||
return nil
|
||||
}
|
||||
|
||||
sconf.store = s.Options()
|
||||
|
||||
// frontend config
|
||||
|
||||
s, err = conf.Section("frontend")
|
||||
|
||||
if err != nil {
|
||||
log.Println("no frontend section in srnd.ini, disabling frontend")
|
||||
sconf.frontend = make(map[string]string)
|
||||
sconf.frontend["enable"] = "0"
|
||||
} else {
|
||||
log.Println("frontend configured in srnd.ini")
|
||||
sconf.frontend = s.Options()
|
||||
_, ok := sconf.frontend["enable"]
|
||||
if !ok {
|
||||
// default to "0"
|
||||
sconf.frontend["enable"] = "0"
|
||||
}
|
||||
enable, _ := sconf.frontend["enable"]
|
||||
if enable == "1" {
|
||||
log.Println("frontend enabled in srnd.ini")
|
||||
} else {
|
||||
log.Println("frontend not enabled in srnd.ini, disabling frontend")
|
||||
}
|
||||
}
|
||||
|
||||
// begin load feeds.ini
|
||||
|
||||
fname = "feeds.ini"
|
||||
|
||||
if os.Getenv("SRND_FEEDS_INI_PATH") != "" {
|
||||
if CheckFile(os.Getenv("SRND_FEEDS_INI_PATH")) {
|
||||
log.Printf("found feeds config at %s...", os.Getenv("SRND_FEEDS_INI_PATH"))
|
||||
fname = os.Getenv("SRND_FEEDS_INI_PATH")
|
||||
}
|
||||
}
|
||||
|
||||
confs, err := feedParse(fname)
|
||||
if err != nil {
|
||||
log.Fatal("failed to parse", fname, err)
|
||||
}
|
||||
|
||||
sconf.feeds = append(sconf.feeds, confs...)
|
||||
|
||||
var feeds_ok bool
|
||||
// check for feeds option
|
||||
fname, feeds_ok = sconf.daemon["feeds"]
|
||||
|
||||
if feeds_ok {
|
||||
// load feeds dir first
|
||||
feeds, err := filepath.Glob(filepath.Join(fname, "*.ini"))
|
||||
if err == nil {
|
||||
for _, f := range feeds {
|
||||
log.Println("load feed", f)
|
||||
confs, err := feedParse(f)
|
||||
if err != nil {
|
||||
log.Fatal("failed to parse feed", f, err)
|
||||
}
|
||||
sconf.feeds = append(sconf.feeds, confs...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &sconf
|
||||
}
|
||||
|
||||
func feedParse(fname string) (confs []FeedConfig, err error) {
|
||||
|
||||
conf, err := configparser.Read(fname)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sections, err := conf.Find("feed-*")
|
||||
|
||||
var num_sections int
|
||||
num_sections = len(sections)
|
||||
|
||||
if num_sections > 0 {
|
||||
// load feeds
|
||||
for _, sect := range sections {
|
||||
var fconf FeedConfig
|
||||
// check for proxy settings
|
||||
val := sect.ValueOf("proxy-type")
|
||||
if len(val) > 0 && strings.ToLower(val) != "none" {
|
||||
fconf.proxy_type = strings.ToLower(val)
|
||||
proxy_host := sect.ValueOf("proxy-host")
|
||||
proxy_port := sect.ValueOf("proxy-port")
|
||||
fconf.proxy_addr = strings.Trim(proxy_host, " ") + ":" + strings.Trim(proxy_port, " ")
|
||||
}
|
||||
|
||||
host := sect.ValueOf("host")
|
||||
port := sect.ValueOf("port")
|
||||
|
||||
// check to see if we want to sync with them first
|
||||
val = sect.ValueOf("sync")
|
||||
if val == "1" {
|
||||
fconf.sync = true
|
||||
// sync interval in seconds
|
||||
i := mapGetInt(sect.Options(), "sync-interval", 60)
|
||||
if i < 60 {
|
||||
i = 60
|
||||
}
|
||||
fconf.sync_interval = time.Second * time.Duration(i)
|
||||
}
|
||||
|
||||
// concurrent connection count
|
||||
fconf.connections = mapGetInt(sect.Options(), "connections", 1)
|
||||
|
||||
// username / password auth
|
||||
fconf.username = sect.ValueOf("username")
|
||||
fconf.passwd = sect.ValueOf("password")
|
||||
fconf.tls_off = sect.ValueOf("disabletls") == "1"
|
||||
|
||||
// load feed polcies
|
||||
sect_name := sect.Name()[5:]
|
||||
fconf.Name = sect_name
|
||||
if len(host) > 0 && len(port) > 0 {
|
||||
// host port specified
|
||||
fconf.Addr = host + ":" + port
|
||||
} else {
|
||||
// no host / port specified
|
||||
fconf.Addr = strings.Trim(sect_name, " ")
|
||||
}
|
||||
feed_sect, err := conf.Section(sect_name)
|
||||
if err != nil {
|
||||
log.Fatal("no section", sect_name, "in ", fname)
|
||||
}
|
||||
opts := feed_sect.Options()
|
||||
fconf.policy.rules = make(map[string]string)
|
||||
for k, v := range opts {
|
||||
fconf.policy.rules[k] = v
|
||||
}
|
||||
confs = append(confs, fconf)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// fatals on failed validation
|
||||
func (self *SRNdConfig) Validate() {
|
||||
// check for daemon section entries
|
||||
daemon_param := []string{"bind", "instance_name", "allow_anon", "allow_anon_attachments"}
|
||||
for _, p := range daemon_param {
|
||||
_, ok := self.daemon[p]
|
||||
if !ok {
|
||||
log.Fatalf("in section [nntp], no parameter '%s' provided", p)
|
||||
}
|
||||
}
|
||||
|
||||
// check validity of store directories
|
||||
store_dirs := []string{"store", "incoming", "attachments", "thumbs"}
|
||||
for _, d := range store_dirs {
|
||||
k := d + "_dir"
|
||||
_, ok := self.store[k]
|
||||
if !ok {
|
||||
log.Fatalf("in section [store], no parameter '%s' provided", k)
|
||||
}
|
||||
}
|
||||
|
||||
// check database parameters existing
|
||||
db_param := []string{"host", "port", "user", "password", "type", "schema"}
|
||||
for _, p := range db_param {
|
||||
_, ok := self.database[p]
|
||||
if !ok {
|
||||
log.Fatalf("in section [database], no parameter '%s' provided", p)
|
||||
}
|
||||
}
|
||||
}
|
1091
contrib/backends/srndv2/src/srnd/daemon.go
Normal file
1091
contrib/backends/srndv2/src/srnd/daemon.go
Normal file
File diff suppressed because it is too large
Load Diff
335
contrib/backends/srndv2/src/srnd/database.go
Normal file
335
contrib/backends/srndv2/src/srnd/database.go
Normal file
@ -0,0 +1,335 @@
|
||||
//
|
||||
// database.go
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// a ( MessageID , newsgroup ) tuple
|
||||
type ArticleEntry [2]string
|
||||
|
||||
func (self ArticleEntry) Newsgroup() string {
|
||||
return self[1]
|
||||
}
|
||||
|
||||
func (self ArticleEntry) MessageID() string {
|
||||
return self[0]
|
||||
}
|
||||
|
||||
// a (messageID , parent messageID) tuple
|
||||
type MessageIDTuple [2]string
|
||||
|
||||
func (self MessageIDTuple) MessageID() string {
|
||||
return strings.Trim(self[0], " ")
|
||||
}
|
||||
|
||||
func (self MessageIDTuple) Reference() string {
|
||||
r := strings.Trim(self[1], " ")
|
||||
if len(r) == 0 {
|
||||
return self.MessageID()
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// a ( time point, magnitude ) tuple
|
||||
type PostEntry [2]int64
|
||||
|
||||
func (self PostEntry) Time() time.Time {
|
||||
return time.Unix(self[0], 0)
|
||||
}
|
||||
|
||||
func (self PostEntry) Count() int64 {
|
||||
return self[1]
|
||||
}
|
||||
|
||||
// stats about newsgroup postings
|
||||
type NewsgroupStats struct {
|
||||
Posted []PostEntry
|
||||
Delted []PostEntry
|
||||
Hits []PostEntry
|
||||
Start time.Time
|
||||
End time.Time
|
||||
Name string
|
||||
}
|
||||
|
||||
type PostingStatsEntry struct {
|
||||
Groups []NewsgroupStats
|
||||
}
|
||||
|
||||
type PostingStats struct {
|
||||
History []PostingStatsEntry
|
||||
}
|
||||
|
||||
type Database interface {
|
||||
Close()
|
||||
CreateTables()
|
||||
HasNewsgroup(group string) bool
|
||||
HasArticle(message_id string) bool
|
||||
HasArticleLocal(message_id string) bool
|
||||
RegisterNewsgroup(group string)
|
||||
RegisterArticle(article NNTPMessage) error
|
||||
GetAllArticlesInGroup(group string, send chan ArticleEntry)
|
||||
CountAllArticlesInGroup(group string) (int64, error)
|
||||
GetAllArticles() []ArticleEntry
|
||||
|
||||
SetConnectionLifetime(seconds int)
|
||||
SetMaxOpenConns(n int)
|
||||
SetMaxIdleConns(n int)
|
||||
|
||||
// check if a newsgroup is banned
|
||||
NewsgroupBanned(group string) (bool, error)
|
||||
|
||||
// ban / unban newsgroup
|
||||
BanNewsgroup(group string) error
|
||||
UnbanNewsgroup(group string) error
|
||||
|
||||
// delete an entire newsgroup
|
||||
// delete from the article store too
|
||||
NukeNewsgroup(group string, store ArticleStore)
|
||||
|
||||
// return true if this is root post has expired
|
||||
IsExpired(root_message_id string) bool
|
||||
|
||||
// get an article's MessageID given the hash of the MessageID
|
||||
// return an article entry or nil when it doesn't exist + and error if it happened
|
||||
GetMessageIDByHash(hash string) (ArticleEntry, error)
|
||||
|
||||
// get root message_id, newsgroup, pageno for a post regardless if it's rootpost or not
|
||||
GetInfoForMessage(msgid string) (string, string, int64, error)
|
||||
|
||||
// what page is the thread with this root post on?
|
||||
// return newsgroup, pageno
|
||||
GetPageForRootMessage(root_message_id string) (string, int64, error)
|
||||
|
||||
// record that a message given a message id was posted signed by this pubkey
|
||||
RegisterSigned(message_id, pubkey string) error
|
||||
|
||||
// get the number of articles we have
|
||||
ArticleCount() int64
|
||||
|
||||
// return true if this thread has any replies
|
||||
ThreadHasReplies(root_message_id string) bool
|
||||
|
||||
// get the number of posts in a certain newsgroup since N seconds ago
|
||||
// if N <= 0 then count all we have now
|
||||
CountPostsInGroup(group string, time_frame int64) int64
|
||||
|
||||
// get all replies to a thread
|
||||
// if last > 0 then get that many of the last replies
|
||||
// start at reply number start
|
||||
GetThreadReplies(root_message_id string, start, last int) []string
|
||||
|
||||
// count the number of replies to this thread
|
||||
CountThreadReplies(root_message_id string) int64
|
||||
|
||||
// get all attachments for this message
|
||||
GetPostAttachments(message_id string) []string
|
||||
|
||||
// get all attachments for this message
|
||||
GetPostAttachmentModels(prefix, message_id string) []AttachmentModel
|
||||
|
||||
// return true if this newsgroup has posts
|
||||
GroupHasPosts(newsgroup string) bool
|
||||
|
||||
// get all active threads on a board
|
||||
// send each thread's ArticleEntry down a channel
|
||||
GetGroupThreads(newsgroup string, send chan ArticleEntry)
|
||||
|
||||
// get every message id for root posts that need to be expired in a newsgroup
|
||||
// threadcount is the upperbound limit to how many root posts we keep
|
||||
GetRootPostsForExpiration(newsgroup string, threadcount int) []string
|
||||
|
||||
// get the number of pages a board has
|
||||
GetGroupPageCount(newsgroup string) int64
|
||||
|
||||
// get board page number N
|
||||
// prefix and frontend are injected
|
||||
// does not load replies for thread, only gets root posts
|
||||
GetGroupForPage(prefix, frontend, newsgroup string, pageno, perpage int) BoardModel
|
||||
|
||||
// get the root posts of the last N bumped threads in a given newsgroup or "" for ukko
|
||||
GetLastBumpedThreads(newsgroup string, threadcount int) []ArticleEntry
|
||||
|
||||
// get root posts of last N bumped threads with pagination offset
|
||||
GetLastBumpedThreadsPaginated(newsgroup string, threadcount, offset int) []ArticleEntry
|
||||
|
||||
// get the PostModels for replies to a thread
|
||||
// prefix is injected into the post models
|
||||
GetThreadReplyPostModels(prefix, rootMessageID string, start, limit int) []PostModel
|
||||
|
||||
// get a post model for a post
|
||||
// prefix is injected into the post model
|
||||
GetPostModel(prefix, messageID string) PostModel
|
||||
|
||||
// add a public key to the database
|
||||
AddModPubkey(pubkey string) error
|
||||
|
||||
// mark that a mod with this pubkey can act on all boards
|
||||
MarkModPubkeyGlobal(pubkey string) error
|
||||
|
||||
// revoke mod with this pubkey the privilege of being able to act on all boards
|
||||
UnMarkModPubkeyGlobal(pubkey string) error
|
||||
|
||||
// check if this mod pubkey can moderate at a global level
|
||||
CheckModPubkeyGlobal(pubkey string) bool
|
||||
|
||||
// check if a mod with this pubkey has permission to moderate at all
|
||||
CheckModPubkey(pubkey string) bool
|
||||
|
||||
// check if a pubkey has admin privs
|
||||
CheckAdminPubkey(pubkey string) (bool, error)
|
||||
|
||||
// mark a key as having admin privs
|
||||
MarkPubkeyAdmin(pubkey string) error
|
||||
|
||||
// unmark a key as having admin privs
|
||||
UnmarkPubkeyAdmin(pubkey string) error
|
||||
|
||||
// check if a mod with this pubkey can moderate on the given newsgroup
|
||||
CheckModPubkeyCanModGroup(pubkey, newsgroup string) bool
|
||||
|
||||
// add a pubkey to be able to mod a newsgroup
|
||||
MarkModPubkeyCanModGroup(pubkey, newsgroup string) error
|
||||
|
||||
// remote a pubkey to they can't mod a newsgroup
|
||||
UnMarkModPubkeyCanModGroup(pubkey, newsgroup string) error
|
||||
|
||||
// ban an article
|
||||
BanArticle(messageID, reason string) error
|
||||
|
||||
// check if an article is banned or not
|
||||
ArticleBanned(messageID string) bool
|
||||
|
||||
// Get ip address given the encrypted version
|
||||
// return emtpy string if we don't have it
|
||||
GetIPAddress(encAddr string) (string, error)
|
||||
|
||||
// check if an ip is banned from our local
|
||||
CheckIPBanned(addr string) (bool, error)
|
||||
|
||||
// check if an encrypted ip is banned from our local
|
||||
CheckEncIPBanned(encAddr string) (bool, error)
|
||||
|
||||
// ban an ip address from the local
|
||||
BanAddr(addr string) error
|
||||
|
||||
// unban an ip address from the local
|
||||
UnbanAddr(addr string) error
|
||||
|
||||
// ban an encrypted ip address from the remote
|
||||
BanEncAddr(encAddr string) error
|
||||
|
||||
// return the encrypted version of an IPAddress
|
||||
// if it's not already there insert it into the database
|
||||
GetEncAddress(addr string) (string, error)
|
||||
|
||||
// get the decryption key for an encrypted address
|
||||
// return empty string if we don't have it
|
||||
GetEncKey(encAddr string) (string, error)
|
||||
|
||||
// delete an article from the database
|
||||
DeleteArticle(msg_id string) error
|
||||
|
||||
// detele the existance of a thread from the threads table, does NOT remove replies
|
||||
DeleteThread(root_msg_id string) error
|
||||
|
||||
// get threads per page for a newsgroup
|
||||
GetThreadsPerPage(group string) (int, error)
|
||||
|
||||
// get pages per board for a newsgroup
|
||||
GetPagesPerBoard(group string) (int, error)
|
||||
|
||||
// get every newsgroup we know of
|
||||
GetAllNewsgroups() []string
|
||||
|
||||
// get all post models in a newsgroup
|
||||
// ordered from oldest to newest
|
||||
GetPostsInGroup(group string) ([]PostModel, error)
|
||||
|
||||
// get the numerical id of the last , first article for a given group
|
||||
GetLastAndFirstForGroup(group string) (int64, int64, error)
|
||||
|
||||
// get a message id give a newsgroup and the nntp id
|
||||
GetMessageIDForNNTPID(group string, id int64) (string, error)
|
||||
|
||||
// get nntp id for a given message-id
|
||||
GetNNTPIDForMessageID(group, msgid string) (int64, error)
|
||||
|
||||
// get the last N days post count in decending order
|
||||
GetLastDaysPosts(n int64) []PostEntry
|
||||
|
||||
// get the last N days post count in decending order
|
||||
GetLastDaysPostsForGroup(newsgroup string, n int64) []PostEntry
|
||||
|
||||
// get post history per month since beginning of time
|
||||
GetMonthlyPostHistory() []PostEntry
|
||||
|
||||
// get the last N posts that were made globally
|
||||
GetLastPostedPostModels(prefix string, n int64) []PostModel
|
||||
|
||||
// check if an nntp login cred is correct
|
||||
CheckNNTPLogin(username, passwd string) (bool, error)
|
||||
|
||||
// add an nntp login credential
|
||||
AddNNTPLogin(username, passwd string) error
|
||||
|
||||
// remove an nntp login credential
|
||||
RemoveNNTPLogin(username string) error
|
||||
|
||||
// check if an nntp login credential given a user exists
|
||||
CheckNNTPUserExists(username string) (bool, error)
|
||||
|
||||
// get the message ids of an article that has this header with the given value
|
||||
GetMessageIDByHeader(name, value string) ([]string, error)
|
||||
|
||||
// get the headers for a message given its message-id
|
||||
GetHeadersForMessage(msgid string) (ArticleHeaders, error)
|
||||
|
||||
// get all message-ids posted by posters in this cidr
|
||||
GetMessageIDByCIDR(cidr *net.IPNet) ([]string, error)
|
||||
|
||||
// get all message-ids posted by poster with encrypted ip
|
||||
GetMessageIDByEncryptedIP(encaddr string) ([]string, error)
|
||||
|
||||
// check if this public key is banned from posting
|
||||
PubkeyIsBanned(pubkey string) (bool, error)
|
||||
|
||||
// ban a public key from posting
|
||||
BanPubkey(pubkey string) error
|
||||
|
||||
// get all message-id posted before a time
|
||||
GetPostsBefore(t time.Time) ([]string, error)
|
||||
|
||||
// get statistics about posting in a time slice
|
||||
GetPostingStats(granularity, begin, end int64) (PostingStats, error)
|
||||
|
||||
// peform search query
|
||||
SearchQuery(prefix, group, text string, chnl chan PostModel) error
|
||||
|
||||
// find posts with similar hash
|
||||
SearchByHash(prefix, group, posthash string, chnl chan PostModel) error
|
||||
|
||||
// get full thread model
|
||||
GetThreadModel(prefix, root_msgid string) (ThreadModel, error)
|
||||
|
||||
// get post models with nntp id in a newsgroup
|
||||
GetNNTPPostsInGroup(newsgroup string) ([]PostModel, error)
|
||||
|
||||
// get post message-id where hash is similar to string
|
||||
GetCitesByPostHashLike(like string) ([]MessageIDTuple, error)
|
||||
}
|
||||
|
||||
func NewDatabase(db_type, schema, host, port, user, password string) Database {
|
||||
if db_type == "postgres" {
|
||||
if schema == "srnd" {
|
||||
return NewPostgresDatabase(host, port, user, password)
|
||||
}
|
||||
}
|
||||
log.Fatalf("invalid database type: %s/%s", db_type, schema)
|
||||
return nil
|
||||
}
|
151
contrib/backends/srndv2/src/srnd/expiration.go
Normal file
151
contrib/backends/srndv2/src/srnd/expiration.go
Normal file
@ -0,0 +1,151 @@
|
||||
//
|
||||
// expiration.go
|
||||
// content expiration
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
// content expiration interface
|
||||
type ExpirationCore interface {
|
||||
// do expiration for a group
|
||||
ExpireGroup(newsgroup string, keep int)
|
||||
// Delete a single post and all children
|
||||
ExpirePost(messageID string)
|
||||
// expire all orphaned articles
|
||||
ExpireOrphans()
|
||||
// expire all articles posted before time
|
||||
ExpireBefore(t time.Time)
|
||||
}
|
||||
|
||||
type ExpireCacheFunc func(string, string, string)
|
||||
|
||||
func createExpirationCore(database Database, store ArticleStore, ex ExpireCacheFunc) ExpirationCore {
|
||||
return expire{database, store, ex}
|
||||
}
|
||||
|
||||
type deleteEvent string
|
||||
|
||||
func (self deleteEvent) Path() string {
|
||||
return string(self)
|
||||
}
|
||||
|
||||
func (self deleteEvent) MessageID() string {
|
||||
return filepath.Base(string(self))
|
||||
}
|
||||
|
||||
type expire struct {
|
||||
database Database
|
||||
store ArticleStore
|
||||
expireCache ExpireCacheFunc
|
||||
}
|
||||
|
||||
func (self expire) ExpirePost(messageID string) {
|
||||
self.handleEvent(deleteEvent(self.store.GetFilename(messageID)))
|
||||
// get article headers
|
||||
headers := self.store.GetHeaders(messageID)
|
||||
if headers != nil {
|
||||
group := headers.Get("Newsgroups", "")
|
||||
// is this a root post ?
|
||||
ref := headers.Get("References", "")
|
||||
if ref == "" || ref == messageID {
|
||||
// ya, expire the entire thread
|
||||
self.ExpireThread(group, messageID)
|
||||
} else {
|
||||
self.expireCache(group, messageID, ref)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self expire) ExpireGroup(newsgroup string, keep int) {
|
||||
log.Println("Expire group", newsgroup, keep)
|
||||
threads := self.database.GetRootPostsForExpiration(newsgroup, keep)
|
||||
for _, root := range threads {
|
||||
self.ExpireThread(newsgroup, root)
|
||||
}
|
||||
}
|
||||
|
||||
func (self expire) ExpireThread(group, rootMsgid string) {
|
||||
replies, err := self.database.GetMessageIDByHeader("References", rootMsgid)
|
||||
if err == nil {
|
||||
for _, reply := range replies {
|
||||
self.handleEvent(deleteEvent(self.store.GetFilename(reply)))
|
||||
}
|
||||
}
|
||||
self.database.DeleteThread(rootMsgid)
|
||||
self.expireCache(group, rootMsgid, rootMsgid)
|
||||
}
|
||||
|
||||
func (self expire) ExpireBefore(t time.Time) {
|
||||
articles, err := self.database.GetPostsBefore(t)
|
||||
if err == nil {
|
||||
for _, msgid := range articles {
|
||||
self.ExpirePost(msgid)
|
||||
}
|
||||
} else {
|
||||
log.Println("failed to expire older posts", err)
|
||||
}
|
||||
}
|
||||
|
||||
// expire all orphaned articles
|
||||
func (self expire) ExpireOrphans() {
|
||||
// get all articles in database
|
||||
articles := self.database.GetAllArticles()
|
||||
if articles != nil {
|
||||
log.Println("expire all orphan posts")
|
||||
// for each article
|
||||
for _, article := range articles {
|
||||
// load headers
|
||||
hdr := self.store.GetHeaders(article.MessageID())
|
||||
if hdr == nil {
|
||||
// article does not exist?
|
||||
// ensure it's deleted
|
||||
self.ExpirePost(article.MessageID())
|
||||
} else {
|
||||
// check if we are a reply
|
||||
rootMsgid := hdr.Get("References", "")
|
||||
if len(rootMsgid) == 0 {
|
||||
// root post
|
||||
} else {
|
||||
// reply
|
||||
// do we have this root post?
|
||||
if self.store.HasArticle(rootMsgid) {
|
||||
// yes, do nothing
|
||||
} else {
|
||||
// no, expire post
|
||||
self.ExpirePost(article.MessageID())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self expire) handleEvent(ev deleteEvent) {
|
||||
log.Println("expire", ev.MessageID())
|
||||
atts := self.database.GetPostAttachments(ev.MessageID())
|
||||
// remove all attachments
|
||||
if atts != nil {
|
||||
for _, att := range atts {
|
||||
img := self.store.AttachmentFilepath(att)
|
||||
os.Remove(img)
|
||||
thm := self.store.ThumbnailFilepath(att)
|
||||
os.Remove(thm)
|
||||
}
|
||||
}
|
||||
err := self.database.BanArticle(ev.MessageID(), "expired")
|
||||
if err != nil {
|
||||
log.Println("failed to ban for expiration", err)
|
||||
}
|
||||
err = self.database.DeleteArticle(ev.MessageID())
|
||||
if err != nil {
|
||||
log.Println("failed to delete article", err)
|
||||
}
|
||||
// remove article
|
||||
os.Remove(ev.Path())
|
||||
}
|
13
contrib/backends/srndv2/src/srnd/feeds.ini.bak
Normal file
13
contrib/backends/srndv2/src/srnd/feeds.ini.bak
Normal file
@ -0,0 +1,13 @@
|
||||
[feed-dummy]
|
||||
proxy-type=socks4a
|
||||
proxy-host=127.0.0.1
|
||||
proxy-port=9050
|
||||
host=dummy
|
||||
port=119
|
||||
connections=1
|
||||
|
||||
[dummy]
|
||||
overchan.*=1
|
||||
ano.paste=0
|
||||
ctl=1
|
||||
|
0
contrib/backends/srndv2/src/srnd/file.txt
Normal file
0
contrib/backends/srndv2/src/srnd/file.txt
Normal file
374
contrib/backends/srndv2/src/srnd/file_cache.go
Normal file
374
contrib/backends/srndv2/src/srnd/file_cache.go
Normal file
@ -0,0 +1,374 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type FileCache struct {
|
||||
database Database
|
||||
store ArticleStore
|
||||
|
||||
webroot_dir string
|
||||
name string
|
||||
|
||||
regen_threads int
|
||||
attachments bool
|
||||
requireCaptcha bool
|
||||
|
||||
prefix string
|
||||
regenThreadChan chan ArticleEntry
|
||||
regenGroupChan chan groupRegenRequest
|
||||
regenBoardMap map[string]groupRegenRequest
|
||||
regenThreadMap map[string]ArticleEntry
|
||||
regenCatalogMap map[string]bool
|
||||
|
||||
regenBoardTicker *time.Ticker
|
||||
ukkoTicker *time.Ticker
|
||||
longTermTicker *time.Ticker
|
||||
regenThreadTicker *time.Ticker
|
||||
regenCatalogTicker *time.Ticker
|
||||
|
||||
regenThreadLock sync.RWMutex
|
||||
regenBoardLock sync.RWMutex
|
||||
regenCatalogLock sync.RWMutex
|
||||
}
|
||||
|
||||
func (self *FileCache) DeleteBoardMarkup(group string) {
|
||||
pages64 := self.database.GetGroupPageCount(group)
|
||||
pages := int(pages64)
|
||||
for page := 0; page < pages; page++ {
|
||||
fname := self.getFilenameForBoardPage(group, page, false)
|
||||
os.Remove(fname)
|
||||
fname = self.getFilenameForBoardPage(group, page, true)
|
||||
os.Remove(fname)
|
||||
}
|
||||
}
|
||||
|
||||
// try to delete root post's page
|
||||
func (self *FileCache) DeleteThreadMarkup(root_post_id string) {
|
||||
fname := self.getFilenameForThread(root_post_id, false)
|
||||
os.Remove(fname)
|
||||
fname = self.getFilenameForThread(root_post_id, true)
|
||||
os.Remove(fname)
|
||||
}
|
||||
|
||||
func (self *FileCache) getFilenameForThread(root_post_id string, json bool) string {
|
||||
var ext string
|
||||
if json {
|
||||
ext = "json"
|
||||
} else {
|
||||
ext = "html"
|
||||
}
|
||||
fname := fmt.Sprintf("thread-%s.%s", HashMessageID(root_post_id), ext)
|
||||
return filepath.Join(self.webroot_dir, fname)
|
||||
}
|
||||
|
||||
func (self *FileCache) getFilenameForBoardPage(boardname string, pageno int, json bool) string {
|
||||
var ext string
|
||||
if json {
|
||||
ext = "json"
|
||||
} else {
|
||||
ext = "html"
|
||||
}
|
||||
fname := fmt.Sprintf("%s-%d.%s", boardname, pageno, ext)
|
||||
return filepath.Join(self.webroot_dir, fname)
|
||||
}
|
||||
|
||||
func (self *FileCache) getFilenameForCatalog(boardname string) string {
|
||||
fname := fmt.Sprintf("catalog-%s.html", boardname)
|
||||
return filepath.Join(self.webroot_dir, fname)
|
||||
}
|
||||
|
||||
// regen every newsgroup
|
||||
func (self *FileCache) RegenAll() {
|
||||
log.Println("regen all on http frontend")
|
||||
|
||||
// get all groups
|
||||
groups := self.database.GetAllNewsgroups()
|
||||
if groups != nil {
|
||||
for _, group := range groups {
|
||||
// send every thread for this group down the regen thread channel
|
||||
go self.database.GetGroupThreads(group, self.regenThreadChan)
|
||||
pages := self.database.GetGroupPageCount(group)
|
||||
var pg int64
|
||||
for pg = 0; pg < pages; pg++ {
|
||||
self.regenGroupChan <- groupRegenRequest{group, int(pg)}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *FileCache) regenLongTerm() {
|
||||
wr, err := os.Create(filepath.Join(self.webroot_dir, "history.html"))
|
||||
defer wr.Close()
|
||||
if err != nil {
|
||||
log.Println("cannot render history graph", err)
|
||||
return
|
||||
}
|
||||
template.genGraphs(self.prefix, wr, self.database)
|
||||
}
|
||||
|
||||
func (self *FileCache) pollLongTerm() {
|
||||
for {
|
||||
<-self.longTermTicker.C
|
||||
// regenerate long term stuff
|
||||
self.regenLongTerm()
|
||||
}
|
||||
}
|
||||
|
||||
func (self *FileCache) pollRegen() {
|
||||
for {
|
||||
select {
|
||||
// listen for regen board requests
|
||||
case req := <-self.regenGroupChan:
|
||||
self.regenBoardLock.Lock()
|
||||
self.regenBoardMap[fmt.Sprintf("%s|%s", req.group, req.page)] = req
|
||||
self.regenBoardLock.Unlock()
|
||||
|
||||
self.regenCatalogLock.Lock()
|
||||
self.regenCatalogMap[req.group] = true
|
||||
self.regenCatalogLock.Unlock()
|
||||
// listen for regen thread requests
|
||||
case entry := <-self.regenThreadChan:
|
||||
self.regenThreadLock.Lock()
|
||||
self.regenThreadMap[fmt.Sprintf("%s|%s", entry[0], entry[1])] = entry
|
||||
self.regenThreadLock.Unlock()
|
||||
// regen ukko
|
||||
case _ = <-self.ukkoTicker.C:
|
||||
self.regenUkko()
|
||||
self.RegenFrontPage()
|
||||
case _ = <-self.regenThreadTicker.C:
|
||||
self.regenThreadLock.Lock()
|
||||
for _, entry := range self.regenThreadMap {
|
||||
self.regenerateThread(entry, false)
|
||||
self.regenerateThread(entry, true)
|
||||
}
|
||||
self.regenThreadMap = make(map[string]ArticleEntry)
|
||||
self.regenThreadLock.Unlock()
|
||||
case _ = <-self.regenBoardTicker.C:
|
||||
self.regenBoardLock.Lock()
|
||||
for _, v := range self.regenBoardMap {
|
||||
self.regenerateBoardPage(v.group, v.page, false)
|
||||
self.regenerateBoardPage(v.group, v.page, true)
|
||||
}
|
||||
self.regenBoardMap = make(map[string]groupRegenRequest)
|
||||
self.regenBoardLock.Unlock()
|
||||
case _ = <-self.regenCatalogTicker.C:
|
||||
self.regenCatalogLock.Lock()
|
||||
for board, _ := range self.regenCatalogMap {
|
||||
self.regenerateCatalog(board)
|
||||
}
|
||||
self.regenCatalogMap = make(map[string]bool)
|
||||
self.regenCatalogLock.Unlock()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// regen every page of the board
|
||||
func (self *FileCache) RegenerateBoard(group string) {
|
||||
pages, _ := self.database.GetPagesPerBoard(group)
|
||||
for page := 0; page < pages; page++ {
|
||||
self.regenerateBoardPage(group, page, false)
|
||||
self.regenerateBoardPage(group, page, true)
|
||||
}
|
||||
}
|
||||
|
||||
// regenerate just a thread page
|
||||
func (self *FileCache) regenerateThread(root ArticleEntry, json bool) {
|
||||
msgid := root.MessageID()
|
||||
if self.store.HasArticle(msgid) {
|
||||
fname := self.getFilenameForThread(msgid, json)
|
||||
wr, err := os.Create(fname)
|
||||
defer wr.Close()
|
||||
if err != nil {
|
||||
log.Println("did not write", fname, err)
|
||||
return
|
||||
}
|
||||
template.genThread(self.attachments, self.requireCaptcha, root, self.prefix, self.name, wr, self.database, json)
|
||||
} else {
|
||||
log.Println("don't have root post", msgid, "not regenerating thread")
|
||||
}
|
||||
}
|
||||
|
||||
// regenerate just a page on a board
|
||||
func (self *FileCache) regenerateBoardPage(board string, page int, json bool) {
|
||||
fname := self.getFilenameForBoardPage(board, page, json)
|
||||
wr, err := os.Create(fname)
|
||||
defer wr.Close()
|
||||
if err != nil {
|
||||
log.Println("error generating board page", page, "for", board, err)
|
||||
return
|
||||
}
|
||||
template.genBoardPage(self.attachments, self.requireCaptcha, self.prefix, self.name, board, page, wr, self.database, json)
|
||||
}
|
||||
|
||||
// regenerate the catalog for a board
|
||||
func (self *FileCache) regenerateCatalog(board string) {
|
||||
fname := self.getFilenameForCatalog(board)
|
||||
wr, err := os.Create(fname)
|
||||
defer wr.Close()
|
||||
if err != nil {
|
||||
log.Println("error generating catalog for", board, err)
|
||||
return
|
||||
}
|
||||
template.genCatalog(self.prefix, self.name, board, wr, self.database)
|
||||
}
|
||||
|
||||
// regenerate the front page
|
||||
func (self *FileCache) RegenFrontPage() {
|
||||
indexwr, err1 := os.Create(filepath.Join(self.webroot_dir, "index.html"))
|
||||
defer indexwr.Close()
|
||||
if err1 != nil {
|
||||
log.Println("cannot render front page", err1)
|
||||
return
|
||||
}
|
||||
boardswr, err2 := os.Create(filepath.Join(self.webroot_dir, "boards.html"))
|
||||
defer boardswr.Close()
|
||||
if err2 != nil {
|
||||
log.Println("cannot render board list page", err2)
|
||||
return
|
||||
}
|
||||
|
||||
template.genFrontPage(10, self.prefix, self.name, indexwr, boardswr, self.database)
|
||||
|
||||
j_boardswr, err2 := os.Create(filepath.Join(self.webroot_dir, "boards.json"))
|
||||
g := self.database.GetAllNewsgroups()
|
||||
err := json.NewEncoder(j_boardswr).Encode(g)
|
||||
if err != nil {
|
||||
log.Println("cannot render boards.json", err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// regenerate the overboard
|
||||
func (self *FileCache) regenUkko() {
|
||||
|
||||
// markup
|
||||
fname := filepath.Join(self.webroot_dir, "ukko.html")
|
||||
wr, err := os.Create(fname)
|
||||
defer wr.Close()
|
||||
if err != nil {
|
||||
log.Println("error generating ukko markup", err)
|
||||
return
|
||||
}
|
||||
template.genUkko(self.prefix, self.name, wr, self.database, false)
|
||||
|
||||
// json
|
||||
fname = filepath.Join(self.webroot_dir, "ukko.json")
|
||||
wr, err = os.Create(fname)
|
||||
defer wr.Close()
|
||||
if err != nil {
|
||||
log.Println("error generating ukko json", err)
|
||||
return
|
||||
}
|
||||
template.genUkko(self.prefix, self.name, wr, self.database, true)
|
||||
i := 0
|
||||
for i < 10 {
|
||||
fname := fmt.Sprintf("ukko-%d.html", i)
|
||||
jname := fmt.Sprintf("ukko-%d.json", i)
|
||||
f, err := os.Create(fname)
|
||||
if err != nil {
|
||||
log.Println("Failed to create html ukko", i, err)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
template.genUkkoPaginated(self.prefix, self.name, f, self.database, i, false)
|
||||
j, err := os.Create(jname)
|
||||
if err != nil {
|
||||
log.Printf("failed to create json ukko", i, err)
|
||||
return
|
||||
}
|
||||
defer j.Close()
|
||||
template.genUkkoPaginated(self.prefix, self.name, j, self.database, i, true)
|
||||
}
|
||||
}
|
||||
|
||||
// regenerate pages after a mod event
|
||||
func (self *FileCache) RegenOnModEvent(newsgroup, msgid, root string, page int) {
|
||||
if root == msgid {
|
||||
fname := self.getFilenameForThread(root, false)
|
||||
os.Remove(fname)
|
||||
fname = self.getFilenameForThread(root, true)
|
||||
os.Remove(fname)
|
||||
} else {
|
||||
self.regenThreadChan <- ArticleEntry{root, newsgroup}
|
||||
}
|
||||
self.regenGroupChan <- groupRegenRequest{newsgroup, int(page)}
|
||||
self.regenUkko()
|
||||
}
|
||||
|
||||
func (self *FileCache) Start() {
|
||||
threads := self.regen_threads
|
||||
|
||||
// check for invalid number of threads
|
||||
if threads <= 0 {
|
||||
threads = 1
|
||||
}
|
||||
|
||||
// use N threads for regeneration
|
||||
for threads > 0 {
|
||||
go self.pollRegen()
|
||||
threads--
|
||||
}
|
||||
// run long term regen jobs
|
||||
go self.pollLongTerm()
|
||||
}
|
||||
|
||||
func (self *FileCache) Regen(msg ArticleEntry) {
|
||||
self.regenThreadChan <- msg
|
||||
self.RegenerateBoard(msg.Newsgroup())
|
||||
}
|
||||
|
||||
func (self *FileCache) GetThreadChan() chan ArticleEntry {
|
||||
return self.regenThreadChan
|
||||
}
|
||||
|
||||
func (self *FileCache) GetGroupChan() chan groupRegenRequest {
|
||||
return self.regenGroupChan
|
||||
}
|
||||
|
||||
func (self *FileCache) GetHandler() http.Handler {
|
||||
return http.FileServer(http.Dir(self.webroot_dir))
|
||||
}
|
||||
|
||||
func (self *FileCache) Close() {
|
||||
//nothig to do
|
||||
}
|
||||
|
||||
func (self *FileCache) SetRequireCaptcha(require bool) {
|
||||
self.requireCaptcha = require
|
||||
}
|
||||
|
||||
func NewFileCache(prefix, webroot, name string, threads int, attachments bool, db Database, store ArticleStore) CacheInterface {
|
||||
cache := new(FileCache)
|
||||
|
||||
cache.regenBoardTicker = time.NewTicker(time.Second * 10)
|
||||
cache.longTermTicker = time.NewTicker(time.Hour)
|
||||
cache.ukkoTicker = time.NewTicker(time.Second * 30)
|
||||
cache.regenThreadTicker = time.NewTicker(time.Second)
|
||||
cache.regenCatalogTicker = time.NewTicker(time.Second * 20)
|
||||
|
||||
cache.regenBoardMap = make(map[string]groupRegenRequest)
|
||||
cache.regenThreadMap = make(map[string]ArticleEntry)
|
||||
cache.regenCatalogMap = make(map[string]bool)
|
||||
|
||||
cache.regenThreadChan = make(chan ArticleEntry, 16)
|
||||
cache.regenGroupChan = make(chan groupRegenRequest, 8)
|
||||
|
||||
cache.prefix = prefix
|
||||
cache.webroot_dir = webroot
|
||||
cache.name = name
|
||||
cache.regen_threads = threads
|
||||
cache.attachments = attachments
|
||||
cache.database = db
|
||||
cache.store = store
|
||||
|
||||
return cache
|
||||
}
|
39
contrib/backends/srndv2/src/srnd/frontend.go
Normal file
39
contrib/backends/srndv2/src/srnd/frontend.go
Normal file
@ -0,0 +1,39 @@
|
||||
//
|
||||
// frontend.go
|
||||
// srnd frontend interfaces
|
||||
//
|
||||
//
|
||||
package srnd
|
||||
|
||||
const BumpLimit = 300
|
||||
|
||||
// ( message-id, references, newsgroup )
|
||||
type frontendPost [3]string
|
||||
|
||||
func (p frontendPost) MessageID() string {
|
||||
return p[0]
|
||||
}
|
||||
|
||||
func (p frontendPost) Reference() string {
|
||||
return p[1]
|
||||
}
|
||||
|
||||
func (p frontendPost) Newsgroup() string {
|
||||
return p[2]
|
||||
}
|
||||
|
||||
// frontend interface for any type of frontend
|
||||
type Frontend interface {
|
||||
|
||||
// channel that is for the frontend to pool for new posts from the nntpd
|
||||
PostsChan() chan frontendPost
|
||||
|
||||
// run mainloop
|
||||
Mainloop()
|
||||
|
||||
// do we want posts from a newsgroup?
|
||||
AllowNewsgroup(group string) bool
|
||||
|
||||
// trigger a manual regen of indexes for a root post
|
||||
Regen(msg ArticleEntry)
|
||||
}
|
1567
contrib/backends/srndv2/src/srnd/frontend_http.go
Normal file
1567
contrib/backends/srndv2/src/srnd/frontend_http.go
Normal file
File diff suppressed because it is too large
Load Diff
54
contrib/backends/srndv2/src/srnd/frontend_multi.go
Normal file
54
contrib/backends/srndv2/src/srnd/frontend_multi.go
Normal file
@ -0,0 +1,54 @@
|
||||
//
|
||||
// frontend_multi.go
|
||||
// frontend multiplexer
|
||||
//
|
||||
|
||||
package srnd
|
||||
|
||||
// muxed frontend for holding many frontends
|
||||
type multiFrontend struct {
|
||||
muxedpostchan chan frontendPost
|
||||
frontends []Frontend
|
||||
}
|
||||
|
||||
func (self multiFrontend) AllowNewsgroup(newsgroup string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (self multiFrontend) Regen(msg ArticleEntry) {
|
||||
for _, front := range self.frontends {
|
||||
front.Regen(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func (self multiFrontend) Mainloop() {
|
||||
for idx := range self.frontends {
|
||||
go self.frontends[idx].Mainloop()
|
||||
}
|
||||
|
||||
// poll for incoming
|
||||
chnl := self.PostsChan()
|
||||
for {
|
||||
select {
|
||||
case nntp := <-chnl:
|
||||
for _, frontend := range self.frontends {
|
||||
if frontend.AllowNewsgroup(nntp.Newsgroup()) {
|
||||
ch := frontend.PostsChan()
|
||||
ch <- nntp
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self multiFrontend) PostsChan() chan frontendPost {
|
||||
return self.muxedpostchan
|
||||
}
|
||||
|
||||
func MuxFrontends(fronts ...Frontend) Frontend {
|
||||
var front multiFrontend
|
||||
front.muxedpostchan = make(chan frontendPost, 64)
|
||||
front.frontends = fronts
|
||||
return front
|
||||
}
|
16
contrib/backends/srndv2/src/srnd/hook.go
Normal file
16
contrib/backends/srndv2/src/srnd/hook.go
Normal file
@ -0,0 +1,16 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os/exec"
|
||||
)
|
||||
|
||||
func ExecHook(config *HookConfig, group, msgid, ref string) {
|
||||
cmd := exec.Command(config.exec, group, msgid, ref)
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
b, _ := cmd.CombinedOutput()
|
||||
log.Println("calling hook", config.name, "failed")
|
||||
log.Println(string(b))
|
||||
}
|
||||
}
|
86
contrib/backends/srndv2/src/srnd/i18n.go
Normal file
86
contrib/backends/srndv2/src/srnd/i18n.go
Normal file
@ -0,0 +1,86 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"github.com/majestrate/configparser"
|
||||
"golang.org/x/text/language"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type i18n struct {
|
||||
locale language.Tag
|
||||
// loaded translations
|
||||
translations map[string]string
|
||||
// loaded formats
|
||||
formats map[string]string
|
||||
// root directory for translations
|
||||
translation_dir string
|
||||
}
|
||||
|
||||
var i18nProvider *i18n = nil
|
||||
|
||||
//Read all .ini files in dir, where the filenames are BCP 47 tags
|
||||
//Use the language matcher to get the best match for the locale preference
|
||||
func InitI18n(locale, dir string) {
|
||||
pref := language.Make(locale) // falls back to en-US on parse error
|
||||
|
||||
files, err := ioutil.ReadDir(dir)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
serverLangs := make([]language.Tag, 1)
|
||||
serverLangs[0] = language.AmericanEnglish // en-US fallback
|
||||
for _, file := range files {
|
||||
if filepath.Ext(file.Name()) == ".ini" {
|
||||
name := strings.TrimSuffix(file.Name(), ".ini")
|
||||
tag, err := language.Parse(name)
|
||||
if err == nil {
|
||||
serverLangs = append(serverLangs, tag)
|
||||
}
|
||||
}
|
||||
}
|
||||
matcher := language.NewMatcher(serverLangs)
|
||||
tag, _, _ := matcher.Match(pref)
|
||||
|
||||
fname := filepath.Join(dir, tag.String()+".ini")
|
||||
conf, err := configparser.Read(fname)
|
||||
if err != nil {
|
||||
log.Fatal("cannot read translation file for", tag.String(), err)
|
||||
}
|
||||
|
||||
formats, err := conf.Section("formats")
|
||||
if err != nil {
|
||||
log.Fatal("Cannot read formats sections in translations for", tag.String(), err)
|
||||
}
|
||||
translations, err := conf.Section("strings")
|
||||
if err != nil {
|
||||
log.Fatal("Cannot read strings sections in translations for", tag.String(), err)
|
||||
}
|
||||
|
||||
i18nProvider = &i18n{
|
||||
translation_dir: dir,
|
||||
formats: formats.Options(),
|
||||
translations: translations.Options(),
|
||||
locale: tag,
|
||||
}
|
||||
}
|
||||
|
||||
func (self *i18n) Translate(key string) string {
|
||||
return self.translations[key]
|
||||
}
|
||||
|
||||
func (self *i18n) Format(key string) string {
|
||||
return self.formats[key]
|
||||
}
|
||||
|
||||
//this signature seems to be expected by mustache
|
||||
func (self *i18n) Translations() (map[string]string, error) {
|
||||
return self.translations, nil
|
||||
}
|
||||
|
||||
func (self *i18n) Formats() (map[string]string, error) {
|
||||
return self.formats, nil
|
||||
}
|
503
contrib/backends/srndv2/src/srnd/installer.go
Normal file
503
contrib/backends/srndv2/src/srnd/installer.go
Normal file
@ -0,0 +1,503 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
_ "github.com/lib/pq"
|
||||
"github.com/majestrate/configparser"
|
||||
"golang.org/x/text/language"
|
||||
"gopkg.in/tylerb/graceful.v1"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
type handlePost func(*dialogNode, url.Values, *configparser.Configuration) (*dialogNode, error)
|
||||
type templateModel map[string]interface{}
|
||||
type prepareModel func(*dialogNode, error, *configparser.Configuration) templateModel
|
||||
|
||||
type dialogNode struct {
|
||||
parent *dialogNode
|
||||
children map[string]*dialogNode
|
||||
|
||||
post handlePost
|
||||
model prepareModel
|
||||
|
||||
templateName string
|
||||
}
|
||||
|
||||
type Installer struct {
|
||||
root *dialogNode
|
||||
currentNode *dialogNode
|
||||
currentErr error
|
||||
result chan *configparser.Configuration
|
||||
config *configparser.Configuration
|
||||
srv *graceful.Server
|
||||
hasTranslations bool
|
||||
}
|
||||
|
||||
func handleDBTypePost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
db := form.Get("db")
|
||||
log.Println("DB chosen: ", db)
|
||||
if db == "postgres" {
|
||||
return self.children["postgres"], nil
|
||||
}
|
||||
return self, nil
|
||||
}
|
||||
|
||||
func prepareDefaultModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
param["dialog"] = &BaseDialogModel{ErrorModel{err}, StepModel{self}}
|
||||
return param
|
||||
}
|
||||
|
||||
func preparePostgresDBModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
sect, _ := conf.Section("database")
|
||||
host := sect.ValueOf("host")
|
||||
port := sect.ValueOf("port")
|
||||
user := sect.ValueOf("user")
|
||||
param["dialog"] = &DBModel{ErrorModel{err}, StepModel{self}, user, host, port}
|
||||
return param
|
||||
}
|
||||
|
||||
func handlePostgresDBPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("database")
|
||||
host := form.Get("host")
|
||||
port := form.Get("port")
|
||||
passwd := form.Get("password")
|
||||
user := form.Get("user")
|
||||
|
||||
err := checkPostgresConnection(host, port, user, passwd)
|
||||
if err != nil {
|
||||
return self, err
|
||||
}
|
||||
sect.Add("type", "postgres")
|
||||
sect.Add("schema", "srnd")
|
||||
sect.Add("host", host)
|
||||
sect.Add("port", port)
|
||||
sect.Add("password", passwd)
|
||||
sect.Add("user", user)
|
||||
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
func prepareNNTPModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
sect, _ := conf.Section("nntp")
|
||||
name := sect.ValueOf("instance_name")
|
||||
param["dialog"] = &NameModel{ErrorModel{err}, StepModel{self}, name}
|
||||
return param
|
||||
}
|
||||
|
||||
func handleNNTPPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("nntp")
|
||||
name := form.Get("nntp_name")
|
||||
|
||||
allow_attachments := form.Get("allow_attachments")
|
||||
if allow_attachments != "1" {
|
||||
allow_attachments = "0"
|
||||
}
|
||||
|
||||
allow_anon := form.Get("allow_anon")
|
||||
if allow_anon != "1" {
|
||||
allow_anon = "0"
|
||||
}
|
||||
|
||||
allow_anon_attachments := form.Get("allow_anon_attachments")
|
||||
if allow_anon_attachments != "1" {
|
||||
allow_anon_attachments = "0"
|
||||
}
|
||||
|
||||
require_tls := form.Get("require_tls")
|
||||
if require_tls != "1" {
|
||||
require_tls = "0"
|
||||
}
|
||||
|
||||
sect.Add("instance_name", name)
|
||||
sect.Add("allow_attachments", allow_attachments)
|
||||
sect.Add("allow_anon", allow_anon)
|
||||
sect.Add("require_tls", require_tls)
|
||||
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
func handleCryptoPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("crypto")
|
||||
host := form.Get("host")
|
||||
key := form.Get("key")
|
||||
|
||||
err := checkHost(host)
|
||||
if err != nil {
|
||||
return self, err
|
||||
}
|
||||
sect.Add("tls-hostname", host)
|
||||
sect.Add("tls-keyname", key)
|
||||
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
func prepareCryptoModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
sect, _ := conf.Section("crypto")
|
||||
host := sect.ValueOf("tls-hostname")
|
||||
key := sect.ValueOf("tls-keyname")
|
||||
param["dialog"] = &CryptoModel{ErrorModel{err}, StepModel{self}, host, key}
|
||||
return param
|
||||
}
|
||||
|
||||
func prepareBinModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
sect, _ := conf.Section("articles")
|
||||
convert := sect.ValueOf("convert_bin")
|
||||
ffmpeg := sect.ValueOf("ffmpegthumbnailer_bin")
|
||||
sox := sect.ValueOf("sox_bin")
|
||||
param["dialog"] = &BinaryModel{ErrorModel{err}, StepModel{self}, convert, ffmpeg, sox}
|
||||
return param
|
||||
}
|
||||
|
||||
func handleBinPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("articles")
|
||||
convert := form.Get("convert")
|
||||
ffmpeg := form.Get("ffmpeg")
|
||||
sox := form.Get("sox")
|
||||
|
||||
err := checkFile(convert)
|
||||
if err == nil {
|
||||
err = checkFile(ffmpeg)
|
||||
if err == nil {
|
||||
err = checkFile(sox)
|
||||
}
|
||||
}
|
||||
|
||||
sect.Add("convert_bin", convert)
|
||||
sect.Add("ffmpegthumbnailer_bin", ffmpeg)
|
||||
sect.Add("sox_bin", sox)
|
||||
|
||||
if err != nil {
|
||||
return self, err
|
||||
}
|
||||
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
func handleCacheTypePost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("cache")
|
||||
|
||||
cache := form.Get("cache")
|
||||
log.Println("Cache chosen: ", cache)
|
||||
sect.Add("type", cache)
|
||||
if cache == "file" || cache == "null" || cache == "varnish" {
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
return self, nil
|
||||
}
|
||||
|
||||
func prepareFrontendModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
sect, _ := conf.Section("frontend")
|
||||
name := sect.ValueOf("name")
|
||||
locale := sect.ValueOf("locale")
|
||||
param["dialog"] = &FrontendModel{ErrorModel{err}, StepModel{self}, name, locale}
|
||||
return param
|
||||
}
|
||||
|
||||
func handleFrontendPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
var next *dialogNode
|
||||
|
||||
sect, _ := conf.Section("frontend")
|
||||
name := form.Get("name")
|
||||
locale := form.Get("locale")
|
||||
|
||||
allow_files := form.Get("allow_files")
|
||||
if allow_files != "1" {
|
||||
allow_files = "0"
|
||||
}
|
||||
|
||||
json_api := form.Get("json")
|
||||
if json_api != "1" {
|
||||
json_api = "0"
|
||||
next = self.children["next"]
|
||||
} else {
|
||||
next = self.children["json"]
|
||||
}
|
||||
|
||||
sect.Add("name", name)
|
||||
sect.Add("locale", locale)
|
||||
sect.Add("allow_files", allow_files)
|
||||
sect.Add("json-api", json_api)
|
||||
|
||||
err := checkLocale(locale)
|
||||
if err != nil {
|
||||
return self, err
|
||||
}
|
||||
|
||||
return next, nil
|
||||
}
|
||||
|
||||
func handleAPIPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("frontend")
|
||||
user := form.Get("user")
|
||||
pass := form.Get("pass")
|
||||
secret := form.Get("secret")
|
||||
|
||||
sect.Add("json-api-username", user)
|
||||
sect.Add("json-api-password", pass)
|
||||
sect.Add("api-secret", secret)
|
||||
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
func prepareAPIModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
sect, _ := conf.Section("frontend")
|
||||
user := sect.ValueOf("json-api-username")
|
||||
secret := sect.ValueOf("api-secret")
|
||||
param["dialog"] = &APIModel{ErrorModel{err}, StepModel{self}, user, secret}
|
||||
return param
|
||||
}
|
||||
|
||||
func handleKeyPost(self *dialogNode, form url.Values, conf *configparser.Configuration) (*dialogNode, error) {
|
||||
if form.Get("back") == "true" {
|
||||
return self.parent, nil
|
||||
}
|
||||
sect, _ := conf.Section("frontend")
|
||||
public := form.Get("public")
|
||||
|
||||
sect.Add("admin_key", public)
|
||||
return self.children["next"], nil
|
||||
}
|
||||
|
||||
func prepareKeyModel(self *dialogNode, err error, conf *configparser.Configuration) templateModel {
|
||||
param := make(map[string]interface{})
|
||||
public, secret := newSignKeypair()
|
||||
param["dialog"] = &KeyModel{ErrorModel{err}, StepModel{self}, public, secret}
|
||||
return param
|
||||
}
|
||||
|
||||
func (self *Installer) HandleInstallerGet(wr http.ResponseWriter, r *http.Request) {
|
||||
if !self.hasTranslations {
|
||||
t, _, _ := language.ParseAcceptLanguage(r.Header.Get("Accept-Language"))
|
||||
locale := ""
|
||||
if len(t) > 0 {
|
||||
locale = t[0].String()
|
||||
}
|
||||
InitI18n(locale, filepath.Join("contrib", "translations"))
|
||||
self.hasTranslations = true
|
||||
}
|
||||
if self.currentNode == nil {
|
||||
wr.WriteHeader(404)
|
||||
} else {
|
||||
m := self.currentNode.model(self.currentNode, self.currentErr, self.config)
|
||||
template.writeTemplate(self.currentNode.templateName, m, wr)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *Installer) HandleInstallerPost(wr http.ResponseWriter, r *http.Request) {
|
||||
err := r.ParseForm()
|
||||
if err == nil {
|
||||
next, newErr := self.currentNode.post(self.currentNode, r.PostForm, self.config)
|
||||
if next == nil {
|
||||
self.result <- self.config
|
||||
//defer self.srv.Stop(10 * time.Second)
|
||||
}
|
||||
self.currentNode = next
|
||||
self.currentErr = newErr
|
||||
|
||||
http.Redirect(wr, r, r.URL.String(), http.StatusSeeOther) //redirect to the same url, but with a GET
|
||||
return
|
||||
}
|
||||
http.Error(wr, "Bad Request", http.StatusBadRequest)
|
||||
}
|
||||
|
||||
func NewInstaller(result chan *configparser.Configuration) *Installer {
|
||||
inst := new(Installer)
|
||||
inst.root = initInstallerTree()
|
||||
inst.currentNode = inst.root
|
||||
inst.result = result
|
||||
inst.config = GenSRNdConfig()
|
||||
inst.hasTranslations = false
|
||||
|
||||
m := mux.NewRouter()
|
||||
m.Path("/").HandlerFunc(inst.HandleInstallerGet).Methods("GET")
|
||||
m.Path("/").HandlerFunc(inst.HandleInstallerPost).Methods("POST")
|
||||
|
||||
inst.srv = &graceful.Server{
|
||||
Timeout: 10 * time.Second,
|
||||
NoSignalHandling: true,
|
||||
|
||||
Server: &http.Server{
|
||||
Addr: ":18000",
|
||||
Handler: m,
|
||||
},
|
||||
}
|
||||
|
||||
return inst
|
||||
}
|
||||
|
||||
func initInstallerTree() *dialogNode {
|
||||
root := &dialogNode{
|
||||
parent: nil,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleDBTypePost,
|
||||
model: prepareDefaultModel,
|
||||
templateName: "inst_db.mustache",
|
||||
}
|
||||
|
||||
postgresDB := &dialogNode{
|
||||
parent: root,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handlePostgresDBPost,
|
||||
model: preparePostgresDBModel,
|
||||
templateName: "inst_postgres_db.mustache",
|
||||
}
|
||||
root.children["postgres"] = postgresDB
|
||||
|
||||
nntp := &dialogNode{
|
||||
parent: root,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleNNTPPost,
|
||||
model: prepareNNTPModel,
|
||||
templateName: "inst_nntp.mustache",
|
||||
}
|
||||
postgresDB.children["next"] = nntp
|
||||
|
||||
crypto := &dialogNode{
|
||||
parent: nntp,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleCryptoPost,
|
||||
model: prepareCryptoModel,
|
||||
templateName: "inst_crypto.mustache",
|
||||
}
|
||||
nntp.children["next"] = crypto
|
||||
|
||||
bins := &dialogNode{
|
||||
parent: crypto,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleBinPost,
|
||||
model: prepareBinModel,
|
||||
templateName: "inst_bins.mustache",
|
||||
}
|
||||
crypto.children["next"] = bins
|
||||
|
||||
cache := &dialogNode{
|
||||
parent: bins,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleCacheTypePost,
|
||||
model: prepareDefaultModel,
|
||||
templateName: "inst_cache.mustache",
|
||||
}
|
||||
bins.children["next"] = cache
|
||||
|
||||
frontend := &dialogNode{
|
||||
parent: cache,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleFrontendPost,
|
||||
model: prepareFrontendModel,
|
||||
templateName: "inst_frontend.mustache",
|
||||
}
|
||||
cache.children["next"] = frontend
|
||||
|
||||
api := &dialogNode{
|
||||
parent: frontend,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleAPIPost,
|
||||
model: prepareAPIModel,
|
||||
templateName: "inst_api.mustache",
|
||||
}
|
||||
frontend.children["json"] = api
|
||||
|
||||
key := &dialogNode{
|
||||
parent: frontend,
|
||||
children: make(map[string]*dialogNode),
|
||||
post: handleKeyPost,
|
||||
model: prepareKeyModel,
|
||||
templateName: "inst_key.mustache",
|
||||
}
|
||||
frontend.children["next"] = key
|
||||
api.children["next"] = key
|
||||
|
||||
return root
|
||||
}
|
||||
|
||||
func checkPostgresConnection(host, port, user, password string) error {
|
||||
var db_str string
|
||||
if len(user) > 0 {
|
||||
if len(password) > 0 {
|
||||
db_str = fmt.Sprintf("user=%s password=%s host=%s port=%s client_encoding='UTF8' connect_timeout=3", user, password, host, port)
|
||||
} else {
|
||||
db_str = fmt.Sprintf("user=%s host=%s port=%s client_encoding='UTF8' connect_timeout=3", user, host, port)
|
||||
}
|
||||
} else {
|
||||
if len(port) > 0 {
|
||||
db_str = fmt.Sprintf("host=%s port=%s client_encoding='UTF8' connect_timeout=3", host, port)
|
||||
} else {
|
||||
db_str = fmt.Sprintf("host=%s client_encoding='UTF8' connect_timeout=3", host)
|
||||
}
|
||||
}
|
||||
|
||||
conn, err := sql.Open("postgres", db_str)
|
||||
defer conn.Close()
|
||||
|
||||
if err == nil {
|
||||
_, err = conn.Exec("SELECT datname FROM pg_database")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func checkLocale(locale string) error {
|
||||
_, err := language.Parse(locale)
|
||||
return err
|
||||
}
|
||||
|
||||
func checkFile(path string) error {
|
||||
_, err := os.Stat(path)
|
||||
return err
|
||||
}
|
||||
|
||||
func checkHost(host string) error {
|
||||
_, err := net.LookupHost(host)
|
||||
return err
|
||||
}
|
||||
|
||||
func (self *Installer) Start() {
|
||||
log.Println("starting installer on", self.srv.Server.Addr)
|
||||
log.Println("open up http://127.0.0.1:18000 to do initial configuration")
|
||||
self.srv.ListenAndServe()
|
||||
}
|
||||
|
||||
func (self *Installer) Stop() {
|
||||
self.srv.Stop(1 * time.Second)
|
||||
}
|
||||
|
||||
func InstallerEnabled() bool {
|
||||
return os.Getenv("SRND_NO_INSTALLER") != "1"
|
||||
}
|
147
contrib/backends/srndv2/src/srnd/installer_models.go
Normal file
147
contrib/backends/srndv2/src/srnd/installer_models.go
Normal file
@ -0,0 +1,147 @@
|
||||
package srnd
|
||||
|
||||
type ErrorModel struct {
|
||||
Err error
|
||||
}
|
||||
|
||||
func (self *ErrorModel) Error() string {
|
||||
return self.Err.Error()
|
||||
}
|
||||
|
||||
func (self *ErrorModel) HasError() bool {
|
||||
return self.Err != nil
|
||||
}
|
||||
|
||||
type StepModel struct {
|
||||
Node *dialogNode
|
||||
}
|
||||
|
||||
func (self *StepModel) HasNext() bool {
|
||||
return len(self.Node.children) > 0
|
||||
}
|
||||
|
||||
func (self *StepModel) HasPrevious() bool {
|
||||
return self.Node.parent != nil
|
||||
}
|
||||
|
||||
type BaseDialogModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
}
|
||||
|
||||
type DBModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
username string
|
||||
host string
|
||||
port string
|
||||
}
|
||||
|
||||
func (self *DBModel) Username() string {
|
||||
return self.username
|
||||
}
|
||||
|
||||
func (self *DBModel) Host() string {
|
||||
return self.host
|
||||
}
|
||||
|
||||
func (self *DBModel) Port() string {
|
||||
return self.port
|
||||
}
|
||||
|
||||
type NameModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
name string
|
||||
}
|
||||
|
||||
func (self *NameModel) Name() string {
|
||||
return self.name
|
||||
}
|
||||
|
||||
type CryptoModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
host string
|
||||
key string
|
||||
}
|
||||
|
||||
func (self *CryptoModel) Host() string {
|
||||
return self.host
|
||||
}
|
||||
|
||||
func (self *CryptoModel) Key() string {
|
||||
return self.key
|
||||
}
|
||||
|
||||
type BinaryModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
convert string
|
||||
ffmpeg string
|
||||
sox string
|
||||
}
|
||||
|
||||
func (self *BinaryModel) Convert() string {
|
||||
return self.convert
|
||||
}
|
||||
|
||||
func (self *BinaryModel) FFmpeg() string {
|
||||
return self.ffmpeg
|
||||
}
|
||||
|
||||
func (self *BinaryModel) Sox() string {
|
||||
return self.sox
|
||||
}
|
||||
|
||||
type FrontendModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
name string
|
||||
locale string
|
||||
}
|
||||
|
||||
func (self *FrontendModel) Name() string {
|
||||
return self.name
|
||||
}
|
||||
|
||||
func (self *FrontendModel) Locale() string {
|
||||
return self.locale
|
||||
}
|
||||
|
||||
type APIModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
name string
|
||||
secret string
|
||||
}
|
||||
|
||||
func (self *APIModel) User() string {
|
||||
return self.name
|
||||
}
|
||||
|
||||
func (self *APIModel) Secret() string {
|
||||
return self.secret
|
||||
}
|
||||
|
||||
type KeyModel struct {
|
||||
ErrorModel
|
||||
StepModel
|
||||
|
||||
public string
|
||||
secret string
|
||||
}
|
||||
|
||||
func (self *KeyModel) Public() string {
|
||||
return self.public
|
||||
}
|
||||
|
||||
func (self *KeyModel) Secret() string {
|
||||
return self.secret
|
||||
}
|
23
contrib/backends/srndv2/src/srnd/line.go
Normal file
23
contrib/backends/srndv2/src/srnd/line.go
Normal file
@ -0,0 +1,23 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
type LineWriter struct {
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func NewLineWriter(w io.Writer) *LineWriter {
|
||||
return &LineWriter{
|
||||
w: w,
|
||||
}
|
||||
}
|
||||
|
||||
func (l *LineWriter) Write(data []byte) (n int, err error) {
|
||||
n = len(data)
|
||||
data = bytes.Replace(data, []byte{13, 10}, []byte{10}, -1)
|
||||
_, err = l.w.Write(data)
|
||||
return
|
||||
}
|
111
contrib/backends/srndv2/src/srnd/markup.go
Normal file
111
contrib/backends/srndv2/src/srnd/markup.go
Normal file
@ -0,0 +1,111 @@
|
||||
// markup.go
|
||||
// memeposting markup parser
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"github.com/mvdan/xurls"
|
||||
"html"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// copypasted from https://stackoverflow.com/questions/161738/what-is-the-best-regular-expression-to-check-if-a-string-is-a-valid-url
|
||||
// var re_external_link = regexp.MustCompile(`((?:(?:https?|ftp):\/\/)(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)(?:\.(?:[a-z\u00a1-\uffff0-9]-*)*[a-z\u00a1-\uffff0-9]+)*(?:\.(?:[a-z\u00a1-\uffff]{2,}))\.?)(?::\d{2,5})?(?:[/?#]\S*)?)`);
|
||||
var re_external_link = xurls.Strict
|
||||
var re_backlink = regexp.MustCompile(`>> ?([0-9a-f]+)`)
|
||||
var re_boardlink = regexp.MustCompile(`>>> ?/([0-9a-zA-Z\.]+)/`)
|
||||
var re_nntpboardlink = regexp.MustCompile(`news:([0-9a-zA-Z\.]+)`)
|
||||
|
||||
// find all backlinks in string
|
||||
func findBacklinks(msg string) (cites []string) {
|
||||
re := re_backlink.Copy()
|
||||
cmap := make(map[string]string)
|
||||
for _, cite := range re.FindAllString(msg, -1) {
|
||||
cmap[cite] = cite
|
||||
}
|
||||
for _, c := range cmap {
|
||||
cites = append(cites, c)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// parse backlink
|
||||
func backlink(word, prefix string) (markup string) {
|
||||
re := re_backlink.Copy()
|
||||
link := re.FindString(word)
|
||||
if len(link) > 2 {
|
||||
link = strings.Trim(link[2:], " ")
|
||||
if len(link) > 2 {
|
||||
url := template.findLink(prefix, link)
|
||||
if len(url) == 0 {
|
||||
return "<span class='memearrows'>>>" + link + "</span>"
|
||||
}
|
||||
// backlink exists
|
||||
parts := strings.Split(url, "#")
|
||||
longhash := ""
|
||||
if len(parts) > 1 {
|
||||
longhash = parts[1]
|
||||
}
|
||||
return `<a class='backlink' backlinkhash="` + longhash + `" href="` + url + `">>>` + link + "</a>"
|
||||
} else {
|
||||
return escapeline(word)
|
||||
}
|
||||
}
|
||||
return escapeline(word)
|
||||
}
|
||||
|
||||
func boardlink(word, prefix string, r *regexp.Regexp) (markup string) {
|
||||
re := r.Copy()
|
||||
l := re.FindStringSubmatch(word)
|
||||
if len(l[1]) > 2 {
|
||||
link := strings.ToLower(l[1])
|
||||
markup = `<a class="boardlink" href="` + prefix + "b/" + link + `">>>>/` + link + `/</a>`
|
||||
return
|
||||
}
|
||||
markup = escapeline(word)
|
||||
return
|
||||
}
|
||||
|
||||
func escapeline(line string) (markup string) {
|
||||
markup = html.EscapeString(line)
|
||||
return
|
||||
}
|
||||
|
||||
func formatline(line, prefix string) (markup string) {
|
||||
if len(line) > 0 {
|
||||
line_nospace := strings.Trim(line, " ")
|
||||
if strings.HasPrefix(line_nospace, ">") && !strings.HasPrefix(line_nospace, ">>") {
|
||||
// le ebin meme arrows
|
||||
markup += "<span class='memearrows'>"
|
||||
markup += escapeline(line)
|
||||
markup += "</span>"
|
||||
} else {
|
||||
// regular line
|
||||
// for each word
|
||||
for _, word := range strings.Split(line, " ") {
|
||||
if re_boardlink.MatchString(word) {
|
||||
markup += boardlink(word, prefix, re_boardlink)
|
||||
} else if re_nntpboardlink.MatchString(word) {
|
||||
markup += boardlink(word, prefix, re_nntpboardlink)
|
||||
} else if re_backlink.MatchString(word) {
|
||||
markup += backlink(word, prefix)
|
||||
} else {
|
||||
// linkify as needed
|
||||
word = escapeline(word)
|
||||
markup += re_external_link.ReplaceAllString(word, `<a href="$1">$1</a>`)
|
||||
}
|
||||
markup += " "
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func MEMEPosting(src, prefix string) (markup string) {
|
||||
for _, line := range strings.Split(src, "\n") {
|
||||
line = strings.Trim(line, "\r")
|
||||
markup += formatline(line, prefix) + "\n"
|
||||
}
|
||||
return extraMemePosting(markup, prefix)
|
||||
}
|
127
contrib/backends/srndv2/src/srnd/markup_lua.go
Normal file
127
contrib/backends/srndv2/src/srnd/markup_lua.go
Normal file
@ -0,0 +1,127 @@
|
||||
// +build lua
|
||||
|
||||
package srnd
|
||||
|
||||
// #cgo pkg-config: lua5.2
|
||||
// #include <lua.h>
|
||||
// #include <lauxlib.h>
|
||||
// #include <lualib.h>
|
||||
// #include <stdlib.h>
|
||||
// #include <string.h>
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"log"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const luaFuncName = "memeposting"
|
||||
|
||||
const markupErrorText = "[failed to render ur meme D:]"
|
||||
|
||||
// lua interpreter
|
||||
type Lua struct {
|
||||
mtx sync.RWMutex
|
||||
state *C.lua_State
|
||||
}
|
||||
|
||||
// do full GC cycle
|
||||
func (l *Lua) GC() {
|
||||
C.lua_gc(l.state, C.LUA_GCCOLLECT, 0)
|
||||
}
|
||||
|
||||
// close the interpreter
|
||||
// all resources are expunged and no operations can be done after this
|
||||
func (l *Lua) Close() {
|
||||
if l.state != nil {
|
||||
C.lua_close(l.state)
|
||||
}
|
||||
l.state = nil
|
||||
}
|
||||
|
||||
func (l *Lua) LoadFile(fname string) (err error) {
|
||||
cfname := C.CString(fname)
|
||||
//defer C.free(unsafe.Pointer(cfname))
|
||||
res := C.luaL_loadfilex(l.state, cfname, nil)
|
||||
if res == 0 {
|
||||
res = C.lua_pcallk(l.state, 0, C.LUA_MULTRET, 0, 0, nil)
|
||||
if res != 0 {
|
||||
err = errors.New(C.GoString(C.lua_tolstring(l.state, -1, nil)))
|
||||
}
|
||||
} else {
|
||||
// failed to load file
|
||||
err = errors.New("failed to load file " + fname)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (l *Lua) MEMEPosting(prefix, body string) (meme string) {
|
||||
l.mtx.Lock()
|
||||
defer l.mtx.Unlock()
|
||||
cf := C.CString(luaFuncName)
|
||||
C.lua_getglobal(l.state, cf)
|
||||
cp := C.CString(prefix)
|
||||
C.lua_pushstring(l.state, cp)
|
||||
cb := C.CString(body)
|
||||
C.lua_pushstring(l.state, cb)
|
||||
res := C.lua_pcallk(l.state, 2, 1, 0, 0, nil)
|
||||
var sz C.size_t
|
||||
cret := C.lua_tolstring(l.state, -1, &sz)
|
||||
|
||||
if sz > 0 {
|
||||
meme = C.GoStringN(cret, C.int(sz))
|
||||
}
|
||||
|
||||
C.lua_settop(l.state, -(1)-1)
|
||||
|
||||
if res != C.LUA_OK {
|
||||
// error
|
||||
log.Println("lua error:", meme)
|
||||
meme = markupErrorText
|
||||
}
|
||||
|
||||
// free buffers
|
||||
C.free(unsafe.Pointer(cb))
|
||||
C.free(unsafe.Pointer(cp))
|
||||
return
|
||||
}
|
||||
|
||||
// create a new lua interpreter
|
||||
func createLua() (l *Lua) {
|
||||
l = new(Lua)
|
||||
l.state = C.luaL_newstate()
|
||||
if l.state == nil {
|
||||
l = nil
|
||||
} else {
|
||||
// open stdlib
|
||||
C.luaL_openlibs(l.state)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var luaInt *Lua
|
||||
|
||||
func extraMemePosting(src, prefix string) string {
|
||||
if luaInt == nil {
|
||||
return src
|
||||
}
|
||||
defer luaInt.GC()
|
||||
return luaInt.MEMEPosting(src, prefix)
|
||||
}
|
||||
|
||||
func SetMarkupScriptFile(fname string) error {
|
||||
if luaInt != nil {
|
||||
luaInt.Close()
|
||||
luaInt = nil
|
||||
}
|
||||
luaInt = createLua()
|
||||
err := luaInt.LoadFile(fname)
|
||||
if err == nil {
|
||||
return nil
|
||||
}
|
||||
luaInt.Close()
|
||||
luaInt = nil
|
||||
return err
|
||||
}
|
12
contrib/backends/srndv2/src/srnd/markup_nolua.go
Normal file
12
contrib/backends/srndv2/src/srnd/markup_nolua.go
Normal file
@ -0,0 +1,12 @@
|
||||
// +build !lua
|
||||
|
||||
package srnd
|
||||
|
||||
func extraMemePosting(src, prefix string) string {
|
||||
return src
|
||||
}
|
||||
|
||||
func SetMarkupScriptFile(fname string) error {
|
||||
// does nothing for non lua
|
||||
return nil
|
||||
}
|
457
contrib/backends/srndv2/src/srnd/message.go
Normal file
457
contrib/backends/srndv2/src/srnd/message.go
Normal file
@ -0,0 +1,457 @@
|
||||
//
|
||||
// message.go
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/majestrate/nacl"
|
||||
"io"
|
||||
"log"
|
||||
"mime"
|
||||
"mime/multipart"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ArticleHeaders map[string][]string
|
||||
|
||||
func (self ArticleHeaders) Has(key string) bool {
|
||||
_, ok := self[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
func (self ArticleHeaders) Set(key, val string) {
|
||||
self[key] = []string{val}
|
||||
}
|
||||
|
||||
func (self ArticleHeaders) Add(key, val string) {
|
||||
if self.Has(key) {
|
||||
self[key] = append(self[key], val)
|
||||
} else {
|
||||
self.Set(key, val)
|
||||
}
|
||||
}
|
||||
|
||||
func (self ArticleHeaders) Get(key, fallback string) string {
|
||||
val, ok := self[key]
|
||||
if ok {
|
||||
str := ""
|
||||
for _, k := range val {
|
||||
str += k + ", "
|
||||
}
|
||||
return str[:len(str)-2]
|
||||
} else {
|
||||
return fallback
|
||||
}
|
||||
}
|
||||
|
||||
type NNTPMessage interface {
|
||||
// this message's messsge id
|
||||
MessageID() string
|
||||
// the parent message's messageid if it's specified
|
||||
Reference() string
|
||||
// the newsgroup this post is in
|
||||
Newsgroup() string
|
||||
// the name of the poster
|
||||
Name() string
|
||||
// any email address associated with the post
|
||||
Email() string
|
||||
// the subject of the post
|
||||
Subject() string
|
||||
// when this was posted
|
||||
Posted() int64
|
||||
// the path header
|
||||
Path() string
|
||||
// get signed part
|
||||
SignedPart() NNTPAttachment
|
||||
// append something to path
|
||||
// return message with new path
|
||||
AppendPath(part string) NNTPMessage
|
||||
// the type of this message usually a mimetype
|
||||
ContentType() string
|
||||
// was this post a sage?
|
||||
Sage() bool
|
||||
// was this post a root post?
|
||||
OP() bool
|
||||
// all attachments
|
||||
Attachments() []NNTPAttachment
|
||||
// all headers
|
||||
Headers() ArticleHeaders
|
||||
// write out everything
|
||||
WriteTo(wr io.Writer) error
|
||||
// write out body
|
||||
WriteBody(wr io.Writer) error
|
||||
// attach a file
|
||||
Attach(att NNTPAttachment)
|
||||
// get the plaintext message if it exists
|
||||
Message() string
|
||||
// pack the whole message and prepare for write
|
||||
Pack()
|
||||
// get the pubkey for this message if it was signed, otherwise empty string
|
||||
Pubkey() string
|
||||
// get the origin encrypted address, i2p destination or empty string for onion posters
|
||||
Addr() string
|
||||
// reset contents
|
||||
Reset()
|
||||
}
|
||||
|
||||
type nntpArticle struct {
|
||||
// mime header
|
||||
headers ArticleHeaders
|
||||
// multipart boundary
|
||||
boundary string
|
||||
// the text part of the message
|
||||
message string
|
||||
// any attachments
|
||||
attachments []NNTPAttachment
|
||||
// the inner nntp message to be verified
|
||||
signedPart *nntpAttachment
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Reset() {
|
||||
self.headers = nil
|
||||
self.boundary = ""
|
||||
self.message = ""
|
||||
if self.attachments != nil {
|
||||
for idx, _ := range self.attachments {
|
||||
self.attachments[idx].Reset()
|
||||
self.attachments[idx] = nil
|
||||
}
|
||||
}
|
||||
self.attachments = nil
|
||||
if self.signedPart != nil {
|
||||
self.signedPart.Reset()
|
||||
self.signedPart = nil
|
||||
}
|
||||
}
|
||||
|
||||
func (self *nntpArticle) SignedPart() NNTPAttachment {
|
||||
return self.signedPart
|
||||
}
|
||||
|
||||
// create a simple plaintext nntp message
|
||||
func newPlaintextArticle(message, email, subject, name, instance, message_id, newsgroup string) NNTPMessage {
|
||||
nntp := &nntpArticle{
|
||||
headers: make(ArticleHeaders),
|
||||
}
|
||||
nntp.headers.Set("From", fmt.Sprintf("%s <%s>", name, email))
|
||||
nntp.headers.Set("Subject", subject)
|
||||
if isSage(subject) {
|
||||
nntp.headers.Set("X-Sage", "1")
|
||||
}
|
||||
nntp.headers.Set("Path", instance)
|
||||
nntp.headers.Set("Message-ID", message_id)
|
||||
// posted now
|
||||
nntp.headers.Set("Date", timeNowStr())
|
||||
nntp.headers.Set("Newsgroups", newsgroup)
|
||||
nntp.message = strings.Trim(message, "\r")
|
||||
nntp.Pack()
|
||||
return nntp
|
||||
}
|
||||
|
||||
// sign an article with a seed
|
||||
func signArticle(nntp NNTPMessage, seed []byte) (signed *nntpArticle, err error) {
|
||||
signed = new(nntpArticle)
|
||||
signed.headers = make(ArticleHeaders)
|
||||
h := nntp.Headers()
|
||||
// copy headers
|
||||
// copy into signed part
|
||||
for k := range h {
|
||||
if k == "X-PubKey-Ed25519" || k == "X-Signature-Ed25519-SHA512" {
|
||||
// don't set signature or pubkey header
|
||||
} else if k == "Content-Type" {
|
||||
signed.headers.Set(k, "message/rfc822; charset=UTF-8")
|
||||
} else {
|
||||
v := h[k][0]
|
||||
signed.headers.Set(k, v)
|
||||
}
|
||||
}
|
||||
sha := sha512.New()
|
||||
signed.signedPart = &nntpAttachment{}
|
||||
// write body to sign buffer
|
||||
mw := io.MultiWriter(sha, signed.signedPart)
|
||||
err = nntp.WriteTo(mw)
|
||||
mw.Write([]byte{10})
|
||||
if err == nil {
|
||||
// build keypair
|
||||
kp := nacl.LoadSignKey(seed)
|
||||
if kp == nil {
|
||||
log.Println("failed to load seed for signing article")
|
||||
return
|
||||
}
|
||||
defer kp.Free()
|
||||
sk := kp.Secret()
|
||||
pk := getSignPubkey(sk)
|
||||
// sign it nigguh
|
||||
digest := sha.Sum(nil)
|
||||
sig := cryptoSign(digest, sk)
|
||||
// log that we signed it
|
||||
log.Printf("signed %s pubkey=%s sig=%s hash=%s", nntp.MessageID(), pk, sig, hexify(digest))
|
||||
signed.headers.Set("X-Signature-Ed25519-SHA512", sig)
|
||||
signed.headers.Set("X-PubKey-Ed25519", pk)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (self *nntpArticle) WriteTo(wr io.Writer) (err error) {
|
||||
// write headers
|
||||
hdrs := self.headers
|
||||
for hdr, hdr_vals := range hdrs {
|
||||
for _, hdr_val := range hdr_vals {
|
||||
wr.Write([]byte(hdr))
|
||||
wr.Write([]byte(": "))
|
||||
wr.Write([]byte(hdr_val))
|
||||
_, err = wr.Write([]byte{10})
|
||||
if err != nil {
|
||||
log.Println("error while writing headers", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
// done headers
|
||||
_, err = wr.Write([]byte{10})
|
||||
if err != nil {
|
||||
log.Println("error while writing body", err)
|
||||
return
|
||||
}
|
||||
|
||||
// write body
|
||||
err = self.WriteBody(wr)
|
||||
return
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Pubkey() string {
|
||||
return self.headers.Get("X-PubKey-Ed25519", self.headers.Get("X-Pubkey-Ed25519", ""))
|
||||
}
|
||||
|
||||
func (self *nntpArticle) MessageID() (msgid string) {
|
||||
for _, h := range []string{"Message-ID", "Messageid", "MessageID", "Message-Id"} {
|
||||
mid := self.headers.Get(h, "")
|
||||
if mid != "" {
|
||||
msgid = string(mid)
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Pack() {
|
||||
if len(self.attachments) > 0 {
|
||||
if len(self.boundary) == 0 {
|
||||
// we have no boundry, set it
|
||||
self.boundary = randStr(24)
|
||||
// set headers
|
||||
self.headers.Set("Mime-Version", "1.0")
|
||||
self.headers.Set("Content-Type", fmt.Sprintf("multipart/mixed; boundary=%s", self.boundary))
|
||||
}
|
||||
} else if self.signedPart == nil {
|
||||
self.headers.Set("Content-Type", "text/plain; charset=utf-8")
|
||||
}
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Reference() string {
|
||||
return self.headers.Get("Reference", self.headers.Get("References", ""))
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Newsgroup() string {
|
||||
return self.headers.Get("Newsgroups", "")
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Name() string {
|
||||
from := self.headers.Get("From", "anonymous <a@no.n>")
|
||||
idx := strings.Index(from, "<")
|
||||
if idx > 1 {
|
||||
return from[:idx]
|
||||
}
|
||||
return "[Invalid From header]"
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Addr() (addr string) {
|
||||
addr = self.headers.Get("X-Encrypted-Ip", "")
|
||||
if addr != "" {
|
||||
return
|
||||
}
|
||||
|
||||
addr = self.headers.Get("X-Encrypted-IP", "")
|
||||
if addr != "" {
|
||||
return
|
||||
}
|
||||
|
||||
addr = self.headers.Get("X-I2P-DestHash", "")
|
||||
if addr != "" {
|
||||
if addr == "None" {
|
||||
return ""
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
addr = self.headers.Get("X-I2p-Desthash", "")
|
||||
return
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Email() string {
|
||||
from := self.headers.Get("From", "anonymous <a@no.n>")
|
||||
idx := strings.Index(from, "<")
|
||||
if idx > 2 {
|
||||
return from[:idx-2]
|
||||
}
|
||||
return "[Invalid From header]"
|
||||
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Subject() string {
|
||||
return self.headers.Get("Subject", "")
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Posted() int64 {
|
||||
posted := self.headers.Get("Date", "")
|
||||
t, err := time.Parse(time.RFC1123Z, posted)
|
||||
if err == nil {
|
||||
return t.Unix()
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Message() string {
|
||||
return strings.Trim(self.message, "\x00")
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Path() string {
|
||||
return self.headers.Get("Path", "unspecified")
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Headers() ArticleHeaders {
|
||||
return self.headers
|
||||
}
|
||||
|
||||
func (self *nntpArticle) AppendPath(part string) NNTPMessage {
|
||||
if self.headers.Has("Path") {
|
||||
self.headers.Set("Path", part+"!"+self.Path())
|
||||
} else {
|
||||
self.headers.Set("Path", part)
|
||||
}
|
||||
return self
|
||||
}
|
||||
func (self *nntpArticle) ContentType() string {
|
||||
// assumes text/plain if unspecified
|
||||
return self.headers.Get("Content-Type", "text/plain; charset=UTF-8")
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Sage() bool {
|
||||
return self.headers.Get("X-Sage", "") == "1"
|
||||
}
|
||||
|
||||
func (self *nntpArticle) OP() bool {
|
||||
ref := self.Reference()
|
||||
return ref == "" || ref == self.MessageID()
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Attachments() []NNTPAttachment {
|
||||
return self.attachments
|
||||
}
|
||||
|
||||
func (self *nntpArticle) Attach(att NNTPAttachment) {
|
||||
self.attachments = append(self.attachments, att)
|
||||
}
|
||||
|
||||
func (self *nntpArticle) WriteBody(wr io.Writer) (err error) {
|
||||
// this is a signed message, don't treat it special
|
||||
if self.signedPart != nil {
|
||||
_, err = wr.Write(self.signedPart.Bytes())
|
||||
return
|
||||
}
|
||||
self.Pack()
|
||||
content_type := self.ContentType()
|
||||
_, params, err := mime.ParseMediaType(content_type)
|
||||
if err != nil {
|
||||
log.Println("failed to parse media type", err)
|
||||
return err
|
||||
}
|
||||
|
||||
boundary, ok := params["boundary"]
|
||||
if ok {
|
||||
w := multipart.NewWriter(NewLineWriter(wr))
|
||||
|
||||
err = w.SetBoundary(boundary)
|
||||
if err == nil {
|
||||
attachments := []NNTPAttachment{createPlaintextAttachment([]byte(self.message))}
|
||||
attachments = append(attachments, self.attachments...)
|
||||
for _, att := range attachments {
|
||||
if att == nil {
|
||||
continue
|
||||
}
|
||||
hdr := att.Header()
|
||||
hdr.Add("Content-Transfer-Encoding", "base64")
|
||||
part, err := w.CreatePart(hdr)
|
||||
if err != nil {
|
||||
log.Println("failed to create part?", err)
|
||||
}
|
||||
var buff [1024]byte
|
||||
var b io.ReadCloser
|
||||
b, err = att.OpenBody()
|
||||
if err == nil {
|
||||
enc := base64.NewEncoder(base64.StdEncoding, part)
|
||||
_, err = io.CopyBuffer(enc, b, buff[:])
|
||||
b.Close()
|
||||
enc.Close()
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
part = nil
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
log.Println("error writing part", err)
|
||||
}
|
||||
err = w.Close()
|
||||
w = nil
|
||||
} else {
|
||||
// write out message
|
||||
_, err = io.WriteString(wr, self.message)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// verify a signed message's body
|
||||
// innerHandler must close reader when done
|
||||
// returns error if one happens while verifying article
|
||||
func verifyMessage(pk, sig string, body io.Reader, innerHandler func(map[string][]string, io.Reader)) (err error) {
|
||||
log.Println("unwrapping signed message from", pk)
|
||||
pk_bytes := unhex(pk)
|
||||
sig_bytes := unhex(sig)
|
||||
h := sha512.New()
|
||||
pr, pw := io.Pipe()
|
||||
// read header
|
||||
// handle inner body
|
||||
go func(hdr_reader *io.PipeReader) {
|
||||
r := bufio.NewReader(hdr_reader)
|
||||
msg, err := readMIMEHeader(r)
|
||||
if err == nil {
|
||||
innerHandler(msg.Header, msg.Body)
|
||||
}
|
||||
hdr_reader.Close()
|
||||
}(pr)
|
||||
body = io.TeeReader(body, pw)
|
||||
// copy body 128 bytes at a time
|
||||
var buff [128]byte
|
||||
_, err = io.CopyBuffer(h, body, buff[:])
|
||||
if err == nil {
|
||||
hash := h.Sum(nil)
|
||||
log.Printf("hash=%s", hexify(hash))
|
||||
log.Printf("sig=%s", hexify(sig_bytes))
|
||||
if nacl.CryptoVerifyFucky(hash, sig_bytes, pk_bytes) {
|
||||
log.Println("signature is valid :^)")
|
||||
} else {
|
||||
err = errors.New("invalid signature")
|
||||
}
|
||||
}
|
||||
// flush pipe
|
||||
pw.Close()
|
||||
return
|
||||
}
|
364
contrib/backends/srndv2/src/srnd/mod.go
Normal file
364
contrib/backends/srndv2/src/srnd/mod.go
Normal file
@ -0,0 +1,364 @@
|
||||
//
|
||||
// mod.go
|
||||
// post moderation
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// regenerate pages function
|
||||
type RegenFunc func(newsgroup, msgid, root string, page int)
|
||||
|
||||
// does an action for the administrator
|
||||
// takes in json
|
||||
type AdminFunc func(param map[string]interface{}) (interface{}, error)
|
||||
|
||||
// interface for moderation ui
|
||||
type ModUI interface {
|
||||
|
||||
// channel for daemon to poll for nntp articles from the mod ui
|
||||
MessageChan() chan NNTPMessage
|
||||
|
||||
// check if this key is allowed to access
|
||||
// return true if it can otherwise false
|
||||
CheckKey(privkey, scope string) (bool, error)
|
||||
|
||||
// serve the base page
|
||||
ServeModPage(wr http.ResponseWriter, r *http.Request)
|
||||
// handle a login POST request
|
||||
HandleLogin(wr http.ResponseWriter, r *http.Request)
|
||||
// handle a delete article request
|
||||
HandleDeletePost(wr http.ResponseWriter, r *http.Request)
|
||||
// handle a ban address request
|
||||
HandleBanAddress(wr http.ResponseWriter, r *http.Request)
|
||||
// handle an unban address request
|
||||
HandleUnbanAddress(wr http.ResponseWriter, r *http.Request)
|
||||
// handle add a pubkey
|
||||
HandleAddPubkey(wr http.ResponseWriter, r *http.Request)
|
||||
// handle removing a pubkey
|
||||
HandleDelPubkey(wr http.ResponseWriter, r *http.Request)
|
||||
// handle key generation
|
||||
HandleKeyGen(wr http.ResponseWriter, r *http.Request)
|
||||
// handle admin command
|
||||
HandleAdminCommand(wr http.ResponseWriter, r *http.Request)
|
||||
}
|
||||
|
||||
type ModEvent interface {
|
||||
// turn it into a string for putting into an article
|
||||
String() string
|
||||
// what type of mod event
|
||||
Action() string
|
||||
// what reason for the event
|
||||
Reason() string
|
||||
// what is the event acting on
|
||||
Target() string
|
||||
// scope of the event, regex of newsgroup
|
||||
Scope() string
|
||||
// when this mod event expires, unix nano
|
||||
Expires() int64
|
||||
}
|
||||
|
||||
type simpleModEvent string
|
||||
|
||||
func (self simpleModEvent) String() string {
|
||||
return string(self)
|
||||
}
|
||||
|
||||
func (self simpleModEvent) Action() string {
|
||||
return strings.Split(string(self), " ")[0]
|
||||
}
|
||||
|
||||
func (self simpleModEvent) Reason() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (self simpleModEvent) Target() string {
|
||||
return strings.Split(string(self), " ")[1]
|
||||
}
|
||||
|
||||
func (self simpleModEvent) Scope() string {
|
||||
// TODO: hard coded
|
||||
return "overchan.*"
|
||||
}
|
||||
|
||||
func (self simpleModEvent) Expires() int64 {
|
||||
// no expiration
|
||||
return -1
|
||||
}
|
||||
|
||||
// create an overchan-delete mod event
|
||||
func overchanDelete(msgid string) ModEvent {
|
||||
return simpleModEvent(fmt.Sprintf("delete %s", msgid))
|
||||
}
|
||||
|
||||
// create an overchan-inet-ban mod event
|
||||
func overchanInetBan(encAddr, key string, expire int64) ModEvent {
|
||||
return simpleModEvent(fmt.Sprintf("overchan-inet-ban %s:%s:%d", encAddr, key, expire))
|
||||
}
|
||||
|
||||
// moderation message
|
||||
// wraps multiple mod events
|
||||
// is turned into an NNTPMessage later
|
||||
type ModMessage []ModEvent
|
||||
|
||||
// write this mod message's body
|
||||
func (self ModMessage) WriteTo(wr io.Writer, delim []byte) (err error) {
|
||||
// write body
|
||||
for _, ev := range self {
|
||||
_, err = io.WriteString(wr, ev.String())
|
||||
_, err = wr.Write(delim)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func ParseModEvent(line string) ModEvent {
|
||||
return simpleModEvent(line)
|
||||
}
|
||||
|
||||
// wrap mod message in an nntp message
|
||||
// does not sign
|
||||
func wrapModMessage(mm ModMessage) NNTPMessage {
|
||||
pathname := "nntpchan.censor"
|
||||
nntp := &nntpArticle{
|
||||
headers: make(ArticleHeaders),
|
||||
}
|
||||
nntp.headers.Set("Newsgroups", "ctl")
|
||||
nntp.headers.Set("Content-Type", "text/plain; charset=UTF-8")
|
||||
nntp.headers.Set("Message-ID", genMessageID(pathname))
|
||||
nntp.headers.Set("Date", timeNowStr())
|
||||
nntp.headers.Set("Path", pathname)
|
||||
// todo: set these maybe?
|
||||
nntp.headers.Set("From", "anon <a@n.on>")
|
||||
nntp.headers.Set("Subject", "censor")
|
||||
|
||||
var buff bytes.Buffer
|
||||
// crlf delimited
|
||||
_ = mm.WriteTo(&buff, []byte{10})
|
||||
// create plaintext attachment, cut off last 2 bytes
|
||||
nntp.message = buff.String()
|
||||
buff.Reset()
|
||||
return nntp
|
||||
}
|
||||
|
||||
type ModEngine interface {
|
||||
// chan to send the mod engine posts given message_id
|
||||
MessageChan() chan string
|
||||
// delete post of a poster
|
||||
DeletePost(msgid string, regen RegenFunc) error
|
||||
// ban a cidr
|
||||
BanAddress(cidr string) error
|
||||
// do we allow this public key to delete this message-id ?
|
||||
AllowDelete(pubkey, msgid string) bool
|
||||
// do we allow this public key to do inet-ban?
|
||||
AllowBan(pubkey string) bool
|
||||
// load a mod message
|
||||
LoadMessage(msgid string) NNTPMessage
|
||||
}
|
||||
|
||||
type modEngine struct {
|
||||
database Database
|
||||
store ArticleStore
|
||||
chnl chan string
|
||||
}
|
||||
|
||||
func (self modEngine) LoadMessage(msgid string) NNTPMessage {
|
||||
return self.store.GetMessage(msgid)
|
||||
}
|
||||
|
||||
func (self modEngine) MessageChan() chan string {
|
||||
return self.chnl
|
||||
}
|
||||
|
||||
func (self modEngine) BanAddress(cidr string) (err error) {
|
||||
return self.database.BanAddr(cidr)
|
||||
}
|
||||
|
||||
func (self modEngine) DeletePost(msgid string, regen RegenFunc) (err error) {
|
||||
hdr, err := self.database.GetHeadersForMessage(msgid)
|
||||
var delposts []string
|
||||
var page int64
|
||||
var ref, group string
|
||||
rootmsgid := ""
|
||||
if hdr == nil {
|
||||
log.Println("failed to get headers for article", msgid, err)
|
||||
} else {
|
||||
ref = hdr.Get("References", "")
|
||||
group = hdr.Get("Newsgroups", "")
|
||||
if ref == "" || ref == msgid {
|
||||
// is root post
|
||||
// delete replies too
|
||||
repls := self.database.GetThreadReplies(msgid, 0, 0)
|
||||
if repls == nil {
|
||||
log.Println("cannot get thread replies for", msgid)
|
||||
} else {
|
||||
delposts = append(delposts, repls...)
|
||||
}
|
||||
|
||||
_, page, err = self.database.GetPageForRootMessage(msgid)
|
||||
ref = msgid
|
||||
rootmsgid = msgid
|
||||
} else {
|
||||
_, page, err = self.database.GetPageForRootMessage(ref)
|
||||
}
|
||||
}
|
||||
delposts = append(delposts, msgid)
|
||||
// get list of files to delete
|
||||
var delfiles []string
|
||||
for _, delmsg := range delposts {
|
||||
article := self.store.GetFilename(delmsg)
|
||||
delfiles = append(delfiles, article)
|
||||
// get attachments for post
|
||||
atts := self.database.GetPostAttachments(delmsg)
|
||||
if atts != nil {
|
||||
for _, att := range atts {
|
||||
img := self.store.AttachmentFilepath(att)
|
||||
thm := self.store.ThumbnailFilepath(att)
|
||||
delfiles = append(delfiles, img, thm)
|
||||
}
|
||||
}
|
||||
}
|
||||
// delete all files
|
||||
for _, f := range delfiles {
|
||||
log.Printf("delete file: %s", f)
|
||||
os.Remove(f)
|
||||
}
|
||||
|
||||
if rootmsgid != "" {
|
||||
self.database.DeleteThread(rootmsgid)
|
||||
}
|
||||
|
||||
for _, delmsg := range delposts {
|
||||
// delete article from post database
|
||||
err = self.database.DeleteArticle(delmsg)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
// ban article
|
||||
self.database.BanArticle(delmsg, "deleted by moderator")
|
||||
}
|
||||
regen(group, msgid, ref, int(page))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self modEngine) AllowBan(pubkey string) bool {
|
||||
is_admin, _ := self.database.CheckAdminPubkey(pubkey)
|
||||
if is_admin {
|
||||
// admins can do whatever
|
||||
return true
|
||||
}
|
||||
return self.database.CheckModPubkeyGlobal(pubkey)
|
||||
}
|
||||
|
||||
func (self modEngine) AllowDelete(pubkey, msgid string) (allow bool) {
|
||||
is_admin, _ := self.database.CheckAdminPubkey(pubkey)
|
||||
if is_admin {
|
||||
// admins can do whatever
|
||||
return true
|
||||
}
|
||||
if self.database.CheckModPubkeyGlobal(pubkey) {
|
||||
// globals can delete as they wish
|
||||
return true
|
||||
}
|
||||
// check for scoped permissions
|
||||
_, group, _, err := self.database.GetInfoForMessage(msgid)
|
||||
if err == nil && newsgroupValidFormat(group) {
|
||||
allow = self.database.CheckModPubkeyCanModGroup(pubkey, group)
|
||||
} else if err != nil {
|
||||
log.Println("db error in mod engine while checking permissions", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// run a mod engine logic mainloop
|
||||
func RunModEngine(mod ModEngine, regen RegenFunc) {
|
||||
|
||||
chnl := mod.MessageChan()
|
||||
for {
|
||||
msgid := <-chnl
|
||||
nntp := mod.LoadMessage(msgid)
|
||||
if nntp == nil {
|
||||
log.Println("failed to load mod message", msgid)
|
||||
continue
|
||||
}
|
||||
// sanity check
|
||||
if nntp.Newsgroup() == "ctl" {
|
||||
pubkey := nntp.Pubkey()
|
||||
for _, line := range strings.Split(nntp.Message(), "\n") {
|
||||
line = strings.Trim(line, "\r\t\n ")
|
||||
ev := ParseModEvent(line)
|
||||
action := ev.Action()
|
||||
if action == "delete" {
|
||||
msgid := ev.Target()
|
||||
if !ValidMessageID(msgid) {
|
||||
// invalid message-id
|
||||
log.Println("invalid message-id for mod delete", msgid, "from", pubkey)
|
||||
continue
|
||||
}
|
||||
// this is a delete action
|
||||
if mod.AllowDelete(pubkey, msgid) {
|
||||
err := mod.DeletePost(msgid, regen)
|
||||
if err != nil {
|
||||
log.Println(msgid, err)
|
||||
}
|
||||
} else {
|
||||
log.Printf("pubkey=%s will not delete %s not trusted", pubkey, msgid)
|
||||
}
|
||||
} else if action == "overchan-inet-ban" {
|
||||
// ban action
|
||||
target := ev.Target()
|
||||
if target[0] == '[' {
|
||||
// probably a literal ipv6 rangeban
|
||||
if mod.AllowBan(pubkey) {
|
||||
err := mod.BanAddress(target)
|
||||
if err != nil {
|
||||
log.Println("failed to do literal ipv6 range ban on", target, err)
|
||||
}
|
||||
} else {
|
||||
log.Println("ignoring literal ipv6 rangeban from", pubkey, "as they are not allowed to ban")
|
||||
}
|
||||
continue
|
||||
}
|
||||
parts := strings.Split(target, ":")
|
||||
if len(parts) == 3 {
|
||||
// encrypted ip
|
||||
encaddr, key := parts[0], parts[1]
|
||||
cidr := decAddr(encaddr, key)
|
||||
if cidr == "" {
|
||||
log.Println("failed to decrypt inet ban")
|
||||
} else if mod.AllowBan(pubkey) {
|
||||
err := mod.BanAddress(cidr)
|
||||
if err != nil {
|
||||
log.Println("failed to do range ban on", cidr, err)
|
||||
}
|
||||
} else {
|
||||
log.Println("ingoring encrypted-ip inet ban from", pubkey, "as they are not allowed to ban")
|
||||
}
|
||||
} else if len(parts) == 1 {
|
||||
// literal cidr
|
||||
cidr := parts[0]
|
||||
if mod.AllowBan(pubkey) {
|
||||
err := mod.BanAddress(cidr)
|
||||
if err != nil {
|
||||
log.Println("failed to do literal range ban on", cidr, err)
|
||||
}
|
||||
} else {
|
||||
log.Println("ingoring literal cidr range ban from", pubkey, "as they are not allowed to ban")
|
||||
}
|
||||
} else {
|
||||
log.Printf("invalid overchan-inet-ban: target=%s", target)
|
||||
}
|
||||
} else {
|
||||
log.Println("invalid mod action", action, "from", pubkey)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
733
contrib/backends/srndv2/src/srnd/mod_http.go
Normal file
733
contrib/backends/srndv2/src/srnd/mod_http.go
Normal file
@ -0,0 +1,733 @@
|
||||
//
|
||||
// mod_http.go
|
||||
//
|
||||
// http mod panel
|
||||
//
|
||||
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gorilla/csrf"
|
||||
"github.com/gorilla/sessions"
|
||||
"github.com/majestrate/nacl"
|
||||
"io"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type httpModUI struct {
|
||||
regenAll func()
|
||||
regen func(ArticleEntry)
|
||||
regenGroup func(string)
|
||||
delete func(string)
|
||||
deleteBoardPages func(string)
|
||||
modMessageChan chan NNTPMessage
|
||||
daemon *NNTPDaemon
|
||||
articles ArticleStore
|
||||
store *sessions.CookieStore
|
||||
prefix string
|
||||
mod_prefix string
|
||||
}
|
||||
|
||||
func createHttpModUI(frontend *httpFrontend) httpModUI {
|
||||
return httpModUI{frontend.regenAll, frontend.Regen, frontend.regenerateBoard, frontend.deleteThreadMarkup, frontend.deleteBoardMarkup, make(chan NNTPMessage), frontend.daemon, frontend.daemon.store, frontend.store, frontend.prefix, frontend.prefix + "mod/"}
|
||||
|
||||
}
|
||||
|
||||
func extractGroup(param map[string]interface{}) string {
|
||||
return extractParam(param, "newsgroup")
|
||||
}
|
||||
|
||||
func (self httpModUI) getAdminFunc(funcname string) AdminFunc {
|
||||
if funcname == "template.reload" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
tname, ok := param["template"]
|
||||
if ok {
|
||||
t := ""
|
||||
switch tname.(type) {
|
||||
case string:
|
||||
t = tname.(string)
|
||||
default:
|
||||
return "failed to reload templates", errors.New("invalid parameters")
|
||||
}
|
||||
template.reloadTemplate(t)
|
||||
return "reloaded " + t, nil
|
||||
}
|
||||
template.reloadAllTemplates()
|
||||
return "reloaded all templates", nil
|
||||
}
|
||||
} else if funcname == "frontend.regen" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
newsgroup := extractGroup(param)
|
||||
if len(newsgroup) > 0 {
|
||||
if self.daemon.database.HasNewsgroup(newsgroup) {
|
||||
go self.regenGroup(newsgroup)
|
||||
} else {
|
||||
return "failed to regen group", errors.New("no such board")
|
||||
}
|
||||
} else {
|
||||
go self.regenAll()
|
||||
}
|
||||
return "started regeneration", nil
|
||||
}
|
||||
} else if funcname == "thumbnail.regen" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
threads, ok := param["threads"]
|
||||
t := 1
|
||||
if ok {
|
||||
switch threads.(type) {
|
||||
case int64:
|
||||
t = int(threads.(int64))
|
||||
if t <= 0 {
|
||||
return "failed to regen thumbnails", errors.New("invalid number of threads")
|
||||
}
|
||||
default:
|
||||
return "failed to regen thumbnails", errors.New("invalid parameters")
|
||||
}
|
||||
}
|
||||
log.Println("regenerating all thumbnails with", t, "threads")
|
||||
msgid := extractParam(param, "message-id")
|
||||
if ValidMessageID(msgid) {
|
||||
a := self.daemon.database.GetPostAttachments(msgid)
|
||||
go func(atts []string) {
|
||||
for _, att := range atts {
|
||||
self.articles.GenerateThumbnail(att)
|
||||
}
|
||||
}(a)
|
||||
return fmt.Sprintf("regenerating %d thumbnails for %s", len(a), msgid), nil
|
||||
}
|
||||
go reThumbnail(t, self.articles, true)
|
||||
return fmt.Sprintf("started rethumbnailing with %d threads", t), nil
|
||||
}
|
||||
} else if funcname == "frontend.add" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
newsgroup := extractGroup(param)
|
||||
if len(newsgroup) > 0 && newsgroupValidFormat(newsgroup) && strings.HasPrefix(newsgroup, "overchan.") && newsgroup != "overchan." {
|
||||
if self.daemon.database.HasNewsgroup(newsgroup) {
|
||||
// we already have this newsgroup
|
||||
return "already have that newsgroup", nil
|
||||
} else {
|
||||
// we dont got this newsgroup
|
||||
log.Println("adding newsgroup", newsgroup)
|
||||
self.daemon.database.RegisterNewsgroup(newsgroup)
|
||||
return "added " + newsgroup, nil
|
||||
}
|
||||
}
|
||||
return "bad newsgroup", errors.New("invalid newsgroup name: " + newsgroup)
|
||||
}
|
||||
} else if funcname == "frontend.ban" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
newsgroup := extractGroup(param)
|
||||
if len(newsgroup) > 0 {
|
||||
log.Println("banning", newsgroup)
|
||||
// check ban
|
||||
banned, err := self.daemon.database.NewsgroupBanned(newsgroup)
|
||||
if banned {
|
||||
// already banned
|
||||
return "cannot ban newsgroup", errors.New("already banned " + newsgroup)
|
||||
} else if err == nil {
|
||||
// do the ban here
|
||||
err = self.daemon.database.BanNewsgroup(newsgroup)
|
||||
// check for error
|
||||
if err == nil {
|
||||
// all gud
|
||||
return "banned " + newsgroup, nil
|
||||
} else {
|
||||
// error while banning
|
||||
return "error banning newsgroup", err
|
||||
}
|
||||
} else {
|
||||
// error checking ban
|
||||
return "cannot check ban", err
|
||||
}
|
||||
} else {
|
||||
// bad parameters
|
||||
return "cannot ban newsgroup", errors.New("invalid parameters")
|
||||
}
|
||||
}
|
||||
} else if funcname == "frontend.unban" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
newsgroup := extractGroup(param)
|
||||
if len(newsgroup) > 0 {
|
||||
log.Println("unbanning", newsgroup)
|
||||
err := self.daemon.database.UnbanNewsgroup(newsgroup)
|
||||
if err == nil {
|
||||
return "unbanned " + newsgroup, nil
|
||||
} else {
|
||||
return "couldn't unban " + newsgroup, err
|
||||
}
|
||||
} else {
|
||||
return "cannot unban", errors.New("invalid paramters")
|
||||
}
|
||||
}
|
||||
} else if funcname == "frontend.nuke" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
newsgroup := extractGroup(param)
|
||||
if len(newsgroup) > 0 {
|
||||
log.Println("nuking", newsgroup)
|
||||
// get every thread we have in this group
|
||||
for _, entry := range self.daemon.database.GetLastBumpedThreads(newsgroup, 10000) {
|
||||
// delete their thread page
|
||||
self.delete(entry.MessageID())
|
||||
}
|
||||
// delete every board page
|
||||
self.deleteBoardPages(newsgroup)
|
||||
go self.daemon.database.NukeNewsgroup(newsgroup, self.articles)
|
||||
return "nuke started", nil
|
||||
} else {
|
||||
return "cannot nuke", errors.New("invalid parameters")
|
||||
}
|
||||
}
|
||||
} else if funcname == "pubkey.add" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
pubkey := extractParam(param, "pubkey")
|
||||
group := extractGroup(param)
|
||||
if group == "" {
|
||||
log.Println("pubkey.add global mod", pubkey)
|
||||
if self.daemon.database.CheckModPubkeyGlobal(pubkey) {
|
||||
return "already added", nil
|
||||
} else {
|
||||
err := self.daemon.database.MarkModPubkeyGlobal(pubkey)
|
||||
if err == nil {
|
||||
return "added", nil
|
||||
} else {
|
||||
return "error", err
|
||||
}
|
||||
}
|
||||
} else if newsgroupValidFormat(group) {
|
||||
log.Println("pubkey.add", group, "mod", pubkey)
|
||||
if self.daemon.database.CheckModPubkeyCanModGroup(pubkey, group) {
|
||||
return "already added", nil
|
||||
}
|
||||
err := self.daemon.database.MarkModPubkeyCanModGroup(pubkey, group)
|
||||
if err == nil {
|
||||
return "added", nil
|
||||
} else {
|
||||
return "error", err
|
||||
}
|
||||
} else {
|
||||
return "bad newsgroup: " + group, nil
|
||||
}
|
||||
}
|
||||
} else if funcname == "pubkey.del" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
pubkey := extractParam(param, "pubkey")
|
||||
log.Println("pubkey.del", pubkey)
|
||||
if self.daemon.database.CheckModPubkeyGlobal(pubkey) {
|
||||
err := self.daemon.database.UnMarkModPubkeyGlobal(pubkey)
|
||||
if err == nil {
|
||||
return "removed", nil
|
||||
} else {
|
||||
return "error", err
|
||||
}
|
||||
} else {
|
||||
return "key not already trusted", nil
|
||||
}
|
||||
}
|
||||
|
||||
} else if funcname == "nntp.login.del" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
username := extractParam(param, "username")
|
||||
if len(username) > 0 {
|
||||
exists, err := self.daemon.database.CheckNNTPUserExists(username)
|
||||
if exists {
|
||||
err = self.daemon.database.RemoveNNTPLogin(username)
|
||||
if err == nil {
|
||||
return "removed user", nil
|
||||
}
|
||||
return "", nil
|
||||
} else if err == nil {
|
||||
return "no such user", nil
|
||||
} else {
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
return "no such user", nil
|
||||
}
|
||||
}
|
||||
} else if funcname == "nntp.login.add" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
username := extractParam(param, "username")
|
||||
passwd := extractParam(param, "passwd")
|
||||
if len(username) > 0 && len(passwd) > 0 {
|
||||
log.Println("nntp.login.add", username)
|
||||
// check if users is there
|
||||
exists, err := self.daemon.database.CheckNNTPUserExists(username)
|
||||
if exists {
|
||||
// user is already there
|
||||
return "nntp user already exists", nil
|
||||
} else if err == nil {
|
||||
// now add the user
|
||||
err = self.daemon.database.AddNNTPLogin(username, passwd)
|
||||
// success adding?
|
||||
if err == nil {
|
||||
// yeh
|
||||
return "added nntp user", nil
|
||||
}
|
||||
// nah
|
||||
return "", err
|
||||
} else {
|
||||
// error happened
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
return "invalid username or password format", nil
|
||||
}
|
||||
}
|
||||
} else if funcname == "feed.add" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
host := extractParam(param, "host")
|
||||
port := extractParam(param, "port")
|
||||
name := extractParam(param, "name")
|
||||
if len(host) == 0 || len(port) == 0 || len(name) == 0 {
|
||||
// bad parameter
|
||||
return "", errors.New("please specific host, port and name")
|
||||
}
|
||||
// make new config
|
||||
conf := FeedConfig{
|
||||
policy: FeedPolicy{
|
||||
// default rules for default policy
|
||||
rules: map[string]string{"overchan.*": "0", "ctl": "1"},
|
||||
},
|
||||
Addr: host + ":" + port,
|
||||
Name: name,
|
||||
quarks: make(map[string]string),
|
||||
}
|
||||
err := self.daemon.addFeed(conf)
|
||||
if err == nil {
|
||||
return "feed added", err
|
||||
} else {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
} else if funcname == "feed.list" {
|
||||
return func(_ map[string]interface{}) (interface{}, error) {
|
||||
feeds := self.daemon.activeFeeds()
|
||||
return feeds, nil
|
||||
}
|
||||
} else if funcname == "feed.sync" {
|
||||
return func(_ map[string]interface{}) (interface{}, error) {
|
||||
go self.daemon.syncAllMessages()
|
||||
return "sync started", nil
|
||||
}
|
||||
} else if funcname == "feed.del" {
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
name := extractParam(param, "name")
|
||||
self.daemon.removeFeed(name)
|
||||
return "okay", nil
|
||||
}
|
||||
} else if funcname == "store.expire" {
|
||||
return func(_ map[string]interface{}) (interface{}, error) {
|
||||
if self.daemon.expire == nil {
|
||||
// TODO: expire orphans?
|
||||
return "archive mode enabled, will not expire orphans", nil
|
||||
} else {
|
||||
go self.daemon.expire.ExpireOrphans()
|
||||
return "expiration started", nil
|
||||
}
|
||||
}
|
||||
} else if funcname == "frontend.posts" {
|
||||
// get all posts given parameters
|
||||
return func(param map[string]interface{}) (interface{}, error) {
|
||||
// by cidr
|
||||
cidr := extractParam(param, "cidr")
|
||||
// by encrypted ip
|
||||
encip := extractParam(param, "encip")
|
||||
|
||||
var err error
|
||||
var post_msgids []string
|
||||
if len(cidr) > 0 {
|
||||
var cnet *net.IPNet
|
||||
_, cnet, err = net.ParseCIDR(cidr)
|
||||
if err == nil {
|
||||
post_msgids, err = self.daemon.database.GetMessageIDByCIDR(cnet)
|
||||
}
|
||||
} else if len(encip) > 0 {
|
||||
post_msgids, err = self.daemon.database.GetMessageIDByEncryptedIP(encip)
|
||||
}
|
||||
return post_msgids, err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// handle an admin action
|
||||
func (self httpModUI) HandleAdminCommand(wr http.ResponseWriter, r *http.Request) {
|
||||
self.asAuthed("admin", func(url string) {
|
||||
action := strings.Split(url, "/admin/")[1]
|
||||
f := self.getAdminFunc(action)
|
||||
if f == nil {
|
||||
wr.WriteHeader(404)
|
||||
} else {
|
||||
var result interface{}
|
||||
var err error
|
||||
req := make(map[string]interface{})
|
||||
if r.Method == "POST" {
|
||||
dec := json.NewDecoder(r.Body)
|
||||
err = dec.Decode(&req)
|
||||
r.Body.Close()
|
||||
}
|
||||
if err == nil {
|
||||
result, err = f(req)
|
||||
}
|
||||
resp := make(map[string]interface{})
|
||||
if err == nil {
|
||||
resp["error"] = nil
|
||||
} else {
|
||||
resp["error"] = err.Error()
|
||||
}
|
||||
resp["result"] = result
|
||||
enc := json.NewEncoder(wr)
|
||||
enc.Encode(resp)
|
||||
}
|
||||
|
||||
}, wr, r)
|
||||
}
|
||||
|
||||
func (self httpModUI) CheckPubkey(pubkey, scope string) (bool, error) {
|
||||
is_admin, err := self.daemon.database.CheckAdminPubkey(pubkey)
|
||||
if is_admin {
|
||||
// admin can do what they want
|
||||
return true, nil
|
||||
}
|
||||
if self.daemon.database.CheckModPubkeyGlobal(pubkey) {
|
||||
// this user is a global mod, can't do admin
|
||||
return scope != "admin", nil
|
||||
}
|
||||
// check for board specific mods
|
||||
if strings.Index(scope, "mod-") == 0 {
|
||||
group := scope[4:]
|
||||
if self.daemon.database.CheckModPubkeyCanModGroup(pubkey, group) {
|
||||
return true, nil
|
||||
}
|
||||
} else if scope == "login" {
|
||||
// check if a user can log in
|
||||
return self.daemon.database.CheckModPubkey(pubkey), nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
|
||||
func (self httpModUI) CheckKey(privkey, scope string) (bool, error) {
|
||||
privkey_bytes, err := hex.DecodeString(privkey)
|
||||
if err == nil {
|
||||
kp := nacl.LoadSignKey(privkey_bytes)
|
||||
if kp != nil {
|
||||
defer kp.Free()
|
||||
pubkey := hex.EncodeToString(kp.Public())
|
||||
return self.CheckPubkey(pubkey, scope)
|
||||
}
|
||||
}
|
||||
log.Println("invalid key format for key", privkey)
|
||||
return false, err
|
||||
}
|
||||
|
||||
func (self httpModUI) MessageChan() chan NNTPMessage {
|
||||
return self.modMessageChan
|
||||
}
|
||||
|
||||
func (self httpModUI) getSession(r *http.Request) *sessions.Session {
|
||||
s, _ := self.store.Get(r, "nntpchan-mod")
|
||||
return s
|
||||
}
|
||||
|
||||
// get the session's private key as bytes or nil if we don't have it
|
||||
func (self httpModUI) getSessionPrivkeyBytes(r *http.Request) []byte {
|
||||
s := self.getSession(r)
|
||||
k, ok := s.Values["privkey"]
|
||||
if ok {
|
||||
privkey_bytes, err := hex.DecodeString(k.(string))
|
||||
if err == nil {
|
||||
return privkey_bytes
|
||||
}
|
||||
log.Println("failed to decode private key bytes from session", err)
|
||||
} else {
|
||||
log.Println("failed to get private key from session, no private key in session?")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// returns true if the session is okay for a scope
|
||||
// otherwise redirect to login page
|
||||
func (self httpModUI) checkSession(r *http.Request, scope string) bool {
|
||||
s := self.getSession(r)
|
||||
k, ok := s.Values["privkey"]
|
||||
if ok {
|
||||
ok, err := self.CheckKey(k.(string), scope)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return ok
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (self httpModUI) writeTemplate(wr http.ResponseWriter, r *http.Request, name string) {
|
||||
self.writeTemplateParam(wr, r, name, nil)
|
||||
}
|
||||
|
||||
func (self httpModUI) writeTemplateParam(wr http.ResponseWriter, r *http.Request, name string, param map[string]interface{}) {
|
||||
if param == nil {
|
||||
param = make(map[string]interface{})
|
||||
}
|
||||
param[csrf.TemplateTag] = csrf.TemplateField(r)
|
||||
param["prefix"] = self.prefix
|
||||
param["mod_prefix"] = self.mod_prefix
|
||||
io.WriteString(wr, template.renderTemplate(name, param))
|
||||
}
|
||||
|
||||
// do a function as authenticated
|
||||
// pass in the request path to the handler
|
||||
func (self httpModUI) asAuthed(scope string, handler func(string), wr http.ResponseWriter, r *http.Request) {
|
||||
if self.checkSession(r, scope) {
|
||||
handler(r.URL.Path)
|
||||
} else {
|
||||
wr.WriteHeader(403)
|
||||
}
|
||||
}
|
||||
|
||||
// do stuff to a certain message if with have it and are authed
|
||||
func (self httpModUI) asAuthedWithMessage(scope string, handler func(ArticleEntry, *http.Request) map[string]interface{}, wr http.ResponseWriter, req *http.Request) {
|
||||
self.asAuthed(scope, func(path string) {
|
||||
// get the long hash
|
||||
if strings.Count(path, "/") > 2 {
|
||||
// TOOD: prefix detection
|
||||
longhash := strings.Split(path, "/")[3]
|
||||
// get the message id
|
||||
msg, err := self.daemon.database.GetMessageIDByHash(longhash)
|
||||
resp := make(map[string]interface{})
|
||||
if err == nil {
|
||||
group := msg.Newsgroup()
|
||||
if err == nil {
|
||||
if self.checkSession(req, "mod-"+group) {
|
||||
// we can moderate this group
|
||||
resp = handler(msg, req)
|
||||
} else {
|
||||
// no permission to moderate this group
|
||||
resp["error"] = fmt.Sprint("you don't have permission to moderate '%s'", group)
|
||||
}
|
||||
} else {
|
||||
resp["error"] = err.Error()
|
||||
}
|
||||
} else {
|
||||
resp["error"] = fmt.Sprint("don't have post %s, %s", longhash, err.Error())
|
||||
}
|
||||
enc := json.NewEncoder(wr)
|
||||
enc.Encode(resp)
|
||||
} else {
|
||||
wr.WriteHeader(404)
|
||||
}
|
||||
}, wr, req)
|
||||
}
|
||||
|
||||
func (self httpModUI) HandleAddPubkey(wr http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (self httpModUI) HandleDelPubkey(wr http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
func (self httpModUI) HandleUnbanAddress(wr http.ResponseWriter, r *http.Request) {
|
||||
self.asAuthed("ban", func(path string) {
|
||||
// extract the ip address
|
||||
// TODO: ip ranges and prefix detection
|
||||
if strings.Count(path, "/") > 2 {
|
||||
addr := strings.Split(path, "/")[3]
|
||||
resp := make(map[string]interface{})
|
||||
banned, err := self.daemon.database.CheckIPBanned(addr)
|
||||
if err != nil {
|
||||
resp["error"] = fmt.Sprintf("cannot tell if %s is banned: %s", addr, err.Error())
|
||||
} else if banned {
|
||||
// TODO: rangebans
|
||||
err = self.daemon.database.UnbanAddr(addr)
|
||||
if err == nil {
|
||||
resp["result"] = fmt.Sprintf("%s was unbanned", addr)
|
||||
} else {
|
||||
resp["error"] = err.Error()
|
||||
}
|
||||
} else {
|
||||
resp["error"] = fmt.Sprintf("%s was not banned", addr)
|
||||
}
|
||||
enc := json.NewEncoder(wr)
|
||||
enc.Encode(resp)
|
||||
} else {
|
||||
wr.WriteHeader(404)
|
||||
}
|
||||
}, wr, r)
|
||||
}
|
||||
|
||||
// handle ban logic
|
||||
func (self httpModUI) handleBanAddress(msg ArticleEntry, r *http.Request) map[string]interface{} {
|
||||
// get the article headers
|
||||
resp := make(map[string]interface{})
|
||||
msgid := msg.MessageID()
|
||||
hdr, err := self.daemon.database.GetHeadersForMessage(msgid)
|
||||
if hdr == nil {
|
||||
// we don't got it?!
|
||||
resp["error"] = fmt.Sprintf("could not load headers for %s: %s", msgid, err.Error())
|
||||
} else {
|
||||
// get the associated encrypted ip
|
||||
encip := hdr.Get("x-encrypted-ip", "")
|
||||
encip = strings.Trim(encip, "\t ")
|
||||
|
||||
if len(encip) == 0 {
|
||||
// no ip header detected
|
||||
resp["error"] = fmt.Sprintf("%s has no IP, ban Tor instead", msgid)
|
||||
} else {
|
||||
// get the ip address if we have it
|
||||
ip, err := self.daemon.database.GetIPAddress(encip)
|
||||
if len(ip) > 0 {
|
||||
// we have it
|
||||
// ban the address
|
||||
err = self.daemon.database.BanAddr(ip)
|
||||
// then we tell everyone about it
|
||||
var key string
|
||||
// TODO: we SHOULD have the key, but what if we do not?
|
||||
key, err = self.daemon.database.GetEncKey(encip)
|
||||
// create mod message
|
||||
// TODO: hardcoded ban period
|
||||
mm := ModMessage{overchanInetBan(encip, key, -1)}
|
||||
privkey_bytes := self.getSessionPrivkeyBytes(r)
|
||||
if privkey_bytes == nil {
|
||||
// this should not happen
|
||||
log.Println("failed to get privkey bytes from session")
|
||||
resp["error"] = "failed to get private key from session. wtf?"
|
||||
} else {
|
||||
// wrap and sign
|
||||
nntp := wrapModMessage(mm)
|
||||
nntp, err = signArticle(nntp, privkey_bytes)
|
||||
if err == nil {
|
||||
// federate
|
||||
self.modMessageChan <- nntp
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// we don't have it
|
||||
// ban the encrypted version
|
||||
err = self.daemon.database.BanEncAddr(encip)
|
||||
}
|
||||
if err == nil {
|
||||
result_msg := fmt.Sprintf("We banned %s", encip)
|
||||
if len(ip) > 0 {
|
||||
result_msg += fmt.Sprintf(" (%s)", ip)
|
||||
}
|
||||
resp["banned"] = result_msg
|
||||
} else {
|
||||
resp["error"] = err.Error()
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
func (self httpModUI) handleDeletePost(msg ArticleEntry, r *http.Request) map[string]interface{} {
|
||||
var mm ModMessage
|
||||
resp := make(map[string]interface{})
|
||||
msgid := msg.MessageID()
|
||||
|
||||
mm = append(mm, overchanDelete(msgid))
|
||||
delmsgs := []string{}
|
||||
// get headers
|
||||
hdr, _ := self.daemon.database.GetHeadersForMessage(msgid)
|
||||
if hdr != nil {
|
||||
ref := hdr.Get("References", hdr.Get("Reference", ""))
|
||||
ref = strings.Trim(ref, "\t ")
|
||||
// is it a root post?
|
||||
if ref == "" {
|
||||
// load replies
|
||||
replies := self.daemon.database.GetThreadReplies(msgid, 0, 0)
|
||||
if replies != nil {
|
||||
for _, repl := range replies {
|
||||
// append mod line to mod message for reply
|
||||
mm = append(mm, overchanDelete(repl))
|
||||
// add to delete queue
|
||||
delmsgs = append(delmsgs, repl)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
delmsgs = append(delmsgs, msgid)
|
||||
// append mod line to mod message
|
||||
resp["deleted"] = delmsgs
|
||||
// only regen threads when we delete a non root port
|
||||
|
||||
privkey_bytes := self.getSessionPrivkeyBytes(r)
|
||||
if privkey_bytes == nil {
|
||||
// crap this should never happen
|
||||
log.Println("failed to get private keys from session, not federating")
|
||||
} else {
|
||||
// wrap and sign mod message
|
||||
nntp, err := signArticle(wrapModMessage(mm), privkey_bytes)
|
||||
if err == nil {
|
||||
// send it off to federate
|
||||
self.modMessageChan <- nntp
|
||||
} else {
|
||||
resp["error"] = fmt.Sprintf("signing error: %s", err.Error())
|
||||
}
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
// ban the address of a poster
|
||||
func (self httpModUI) HandleBanAddress(wr http.ResponseWriter, r *http.Request) {
|
||||
self.asAuthedWithMessage("ban", self.handleBanAddress, wr, r)
|
||||
}
|
||||
|
||||
// delete a post
|
||||
func (self httpModUI) HandleDeletePost(wr http.ResponseWriter, r *http.Request) {
|
||||
self.asAuthedWithMessage("login", self.handleDeletePost, wr, r)
|
||||
}
|
||||
|
||||
func (self httpModUI) HandleLogin(wr http.ResponseWriter, r *http.Request) {
|
||||
privkey := r.FormValue("privkey")
|
||||
msg := "failed login: "
|
||||
if len(privkey) == 0 {
|
||||
msg += "no key"
|
||||
} else {
|
||||
ok, err := self.CheckKey(privkey, "login")
|
||||
if err != nil {
|
||||
msg += fmt.Sprintf("%s", err)
|
||||
} else if ok {
|
||||
msg = "login okay"
|
||||
sess := self.getSession(r)
|
||||
sess.Values["privkey"] = privkey
|
||||
sess.Save(r, wr)
|
||||
} else {
|
||||
msg += "invalid key"
|
||||
}
|
||||
}
|
||||
self.writeTemplateParam(wr, r, "modlogin_result.mustache", map[string]interface{}{"message": msg, csrf.TemplateTag: csrf.TemplateField(r)})
|
||||
}
|
||||
|
||||
func (self httpModUI) HandleKeyGen(wr http.ResponseWriter, r *http.Request) {
|
||||
pk, sk := newSignKeypair()
|
||||
tripcode := makeTripcode(pk)
|
||||
self.writeTemplateParam(wr, r, "keygen.mustache", map[string]interface{}{"public": pk, "secret": sk, "tripcode": tripcode})
|
||||
}
|
||||
|
||||
func (self httpModUI) ServeModPage(wr http.ResponseWriter, r *http.Request) {
|
||||
if self.checkSession(r, "login") {
|
||||
wr.Header().Set("X-CSRF-Token", csrf.Token(r))
|
||||
// we are logged in
|
||||
url := r.URL.String()
|
||||
if strings.HasSuffix(url, "/mod/feeds") {
|
||||
// serve feeds page
|
||||
self.writeTemplate(wr, r, "modfeed.mustache")
|
||||
} else {
|
||||
// serve mod page
|
||||
self.writeTemplate(wr, r, "modpage.mustache")
|
||||
}
|
||||
} else {
|
||||
// we are not logged in
|
||||
// serve login page
|
||||
self.writeTemplate(wr, r, "modlogin.mustache")
|
||||
}
|
||||
if r.Body != nil {
|
||||
r.Body.Close()
|
||||
}
|
||||
}
|
320
contrib/backends/srndv2/src/srnd/model.go
Normal file
320
contrib/backends/srndv2/src/srnd/model.go
Normal file
@ -0,0 +1,320 @@
|
||||
//
|
||||
// model.go
|
||||
// template model interfaces
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// base model type
|
||||
type BaseModel interface {
|
||||
|
||||
// site url prefix
|
||||
Prefix() string
|
||||
|
||||
// impelements json.Marshaller
|
||||
MarshalJSON() ([]byte, error)
|
||||
|
||||
// to json string
|
||||
JSON() string
|
||||
}
|
||||
|
||||
type ThumbInfo struct {
|
||||
Width int
|
||||
Height int
|
||||
}
|
||||
|
||||
// for attachments
|
||||
type AttachmentModel interface {
|
||||
BaseModel
|
||||
|
||||
Thumbnail() string
|
||||
Source() string
|
||||
Filename() string
|
||||
Hash() string
|
||||
ThumbInfo() ThumbInfo
|
||||
}
|
||||
|
||||
// for individual posts
|
||||
type PostModel interface {
|
||||
BaseModel
|
||||
|
||||
CSSClass() string
|
||||
|
||||
MessageID() string
|
||||
PostHash() string
|
||||
ShortHash() string
|
||||
PostURL() string
|
||||
Frontend() string
|
||||
Subject() string
|
||||
Name() string
|
||||
Date() string
|
||||
OP() bool
|
||||
Attachments() []AttachmentModel
|
||||
NumAttachments() int
|
||||
Board() string
|
||||
Sage() bool
|
||||
Pubkey() string
|
||||
Reference() string
|
||||
ReferenceHash() string
|
||||
|
||||
RenderBody() string
|
||||
RenderPost() string
|
||||
RenderBodyPre() string
|
||||
|
||||
// replaces Truncate().RenderBody()
|
||||
RenderTruncatedBody() string
|
||||
|
||||
// replaces Truncate().RenderPost()
|
||||
RenderTruncatedPost() string
|
||||
|
||||
// returns true if this post was truncated
|
||||
IsTruncated() bool
|
||||
|
||||
IsI2P() bool
|
||||
IsTor() bool
|
||||
IsClearnet() bool
|
||||
|
||||
// deprecated
|
||||
// truncate body to a certain size
|
||||
// return copy
|
||||
Truncate() PostModel
|
||||
|
||||
// what is our position in this thread?
|
||||
// 0 for OP, nonzero for reply
|
||||
Index() int
|
||||
// set post index
|
||||
SetIndex(idx int)
|
||||
|
||||
// nntp id number
|
||||
NNTPID() int
|
||||
}
|
||||
|
||||
// interface for models that have a navbar
|
||||
type NavbarModel interface {
|
||||
Navbar() string
|
||||
}
|
||||
|
||||
// for threads
|
||||
type ThreadModel interface {
|
||||
BaseModel
|
||||
NavbarModel
|
||||
|
||||
SetAllowFiles(allow bool)
|
||||
AllowFiles() bool
|
||||
OP() PostModel
|
||||
Replies() []PostModel
|
||||
Board() string
|
||||
BoardURL() string
|
||||
// return a short version of the thread
|
||||
// does not include all replies
|
||||
Truncate() ThreadModel
|
||||
|
||||
// number of posts in this thread
|
||||
PostCount() int
|
||||
// number of images in this thread
|
||||
ImageCount() int
|
||||
// number of posts excluded during truncation
|
||||
// returns 0 if not truncated
|
||||
MissingPostCount() int
|
||||
// number of images excluded during truncation
|
||||
// returns 0 if not truncated
|
||||
MissingImageCount() int
|
||||
// returns true if this thread has truncated replies
|
||||
HasOmittedReplies() bool
|
||||
// returns true if this thread has truncated images
|
||||
HasOmittedImages() bool
|
||||
|
||||
// update the thread's replies
|
||||
Update(db Database)
|
||||
// is this thread dirty and needing updating?
|
||||
IsDirty() bool
|
||||
// mark thread as dirty
|
||||
MarkDirty()
|
||||
}
|
||||
|
||||
// board interface
|
||||
// for 1 page on a board
|
||||
type BoardModel interface {
|
||||
BaseModel
|
||||
NavbarModel
|
||||
|
||||
Frontend() string
|
||||
Name() string
|
||||
Threads() []ThreadModel
|
||||
|
||||
AllowFiles() bool
|
||||
SetAllowFiles(files bool)
|
||||
|
||||
// JUST update this thread
|
||||
// if we don't have it already loaded do nothing
|
||||
UpdateThread(message_id string, db Database)
|
||||
|
||||
// get a thread model with this id
|
||||
// returns nil if we don't have it
|
||||
GetThread(message_id string) ThreadModel
|
||||
|
||||
// put a thread back after updating externally
|
||||
PutThread(th ThreadModel)
|
||||
|
||||
// deprecated, use GetThread
|
||||
HasThread(message_id string) bool
|
||||
|
||||
// update the board's contents
|
||||
Update(db Database)
|
||||
}
|
||||
|
||||
type CatalogModel interface {
|
||||
BaseModel
|
||||
NavbarModel
|
||||
|
||||
Frontend() string
|
||||
Name() string
|
||||
Threads() []CatalogItemModel
|
||||
}
|
||||
|
||||
type CatalogItemModel interface {
|
||||
OP() PostModel
|
||||
ReplyCount() string
|
||||
Page() string
|
||||
}
|
||||
|
||||
type LinkModel interface {
|
||||
Text() string
|
||||
LinkURL() string
|
||||
}
|
||||
|
||||
// newsgroup model
|
||||
// every page on a newsgroup
|
||||
type GroupModel []BoardModel
|
||||
|
||||
// TODO: optimize using 1 query?
|
||||
// update every page
|
||||
func (self *GroupModel) UpdateAll(db Database) {
|
||||
m := *self
|
||||
for _, page := range m {
|
||||
page.Update(db)
|
||||
}
|
||||
}
|
||||
|
||||
// update a certain page
|
||||
// does nothing if out of bounds
|
||||
func (self *GroupModel) Update(page int, db Database) {
|
||||
m := *self
|
||||
if len(m) > page {
|
||||
m[page].Update(db)
|
||||
}
|
||||
}
|
||||
|
||||
type boardPageRow struct {
|
||||
Board string
|
||||
Hour int64
|
||||
Day int64
|
||||
All int64
|
||||
}
|
||||
|
||||
type boardPageRows []boardPageRow
|
||||
|
||||
func (self boardPageRows) Len() int {
|
||||
return len(self)
|
||||
}
|
||||
|
||||
func (self boardPageRows) Less(i, j int) bool {
|
||||
i_val := self[i]
|
||||
j_val := self[j]
|
||||
return (i_val.Day + i_val.Hour*24) > (j_val.Day + j_val.Hour*24)
|
||||
}
|
||||
|
||||
func (self boardPageRows) Swap(i, j int) {
|
||||
self[i], self[j] = self[j], self[i]
|
||||
}
|
||||
|
||||
type postsGraphRow struct {
|
||||
day time.Time
|
||||
Num int64
|
||||
mag int64
|
||||
}
|
||||
|
||||
func (p *postsGraphRow) GraphRune(r string) (s string) {
|
||||
var num int64
|
||||
if p.mag > 0 {
|
||||
num = p.Num / p.mag
|
||||
} else {
|
||||
num = p.Num
|
||||
}
|
||||
for num > 0 {
|
||||
s += r
|
||||
num--
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (p postsGraphRow) Date() (s string) {
|
||||
return p.day.Format(i18nProvider.Format("month_date_format"))
|
||||
}
|
||||
|
||||
func (p postsGraphRow) Day() (s string) {
|
||||
return p.day.Format(i18nProvider.Format("day_date_format"))
|
||||
}
|
||||
|
||||
func (p postsGraphRow) RegularGraph() (s string) {
|
||||
return p.GraphRune("=")
|
||||
}
|
||||
|
||||
// :0========3 overcock :3 graph of data
|
||||
func (p postsGraphRow) OvercockGraph() (s string) {
|
||||
var num int64
|
||||
if p.mag > 0 {
|
||||
num = p.Num / p.mag
|
||||
} else {
|
||||
num = p.Num
|
||||
}
|
||||
if num > 0 {
|
||||
s = ":0"
|
||||
num -= 1
|
||||
for num > 0 {
|
||||
s += "="
|
||||
num--
|
||||
}
|
||||
s += "3"
|
||||
} else {
|
||||
s = ":3"
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type postsGraph []postsGraphRow
|
||||
|
||||
func (self postsGraph) Len() int {
|
||||
return len(self)
|
||||
}
|
||||
|
||||
func (self postsGraph) Less(i, j int) bool {
|
||||
i_val := self[i]
|
||||
j_val := self[j]
|
||||
return i_val.day.Unix() > j_val.day.Unix()
|
||||
}
|
||||
|
||||
func (self postsGraph) Swap(i, j int) {
|
||||
self[i], self[j] = self[j], self[i]
|
||||
}
|
||||
|
||||
func (self postsGraph) Scale() (graph postsGraph) {
|
||||
// find max
|
||||
max := int64(0)
|
||||
for _, p := range self {
|
||||
if p.Num > max {
|
||||
max = p.Num
|
||||
}
|
||||
}
|
||||
mag := max / 25
|
||||
for _, p := range self {
|
||||
p.mag = mag
|
||||
graph = append(graph, p)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type overviewModel []PostModel
|
706
contrib/backends/srndv2/src/srnd/model_mem.go
Normal file
706
contrib/backends/srndv2/src/srnd/model_mem.go
Normal file
@ -0,0 +1,706 @@
|
||||
//
|
||||
// model_mem.go
|
||||
//
|
||||
// models held in memory
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type catalogModel struct {
|
||||
frontend string
|
||||
prefix string
|
||||
board string
|
||||
threads []CatalogItemModel
|
||||
}
|
||||
|
||||
type catalogItemModel struct {
|
||||
page int
|
||||
replycount int
|
||||
op PostModel
|
||||
}
|
||||
|
||||
func (self *catalogModel) Navbar() string {
|
||||
param := make(map[string]interface{})
|
||||
param["name"] = fmt.Sprintf("Catalog for %s", self.board)
|
||||
param["frontend"] = self.frontend
|
||||
var links []LinkModel
|
||||
links = append(links, linkModel{
|
||||
link: fmt.Sprintf("%sb/%s/", self.prefix, self.board),
|
||||
text: "Board index",
|
||||
})
|
||||
param["prefix"] = self.prefix
|
||||
param["links"] = links
|
||||
return template.renderTemplate("navbar.mustache", param)
|
||||
}
|
||||
|
||||
func (self *catalogModel) MarshalJSON() (b []byte, err error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (self *catalogModel) JSON() string {
|
||||
return "null"
|
||||
}
|
||||
|
||||
func (self *catalogModel) Frontend() string {
|
||||
return self.frontend
|
||||
}
|
||||
|
||||
func (self *catalogModel) Prefix() string {
|
||||
return self.prefix
|
||||
}
|
||||
|
||||
func (self *catalogModel) Name() string {
|
||||
return self.board
|
||||
}
|
||||
|
||||
func (self *catalogModel) Threads() []CatalogItemModel {
|
||||
return self.threads
|
||||
}
|
||||
|
||||
func (self *catalogItemModel) OP() PostModel {
|
||||
return self.op
|
||||
}
|
||||
|
||||
func (self *catalogItemModel) Page() string {
|
||||
return strconv.Itoa(self.page)
|
||||
}
|
||||
|
||||
func (self *catalogItemModel) ReplyCount() string {
|
||||
return strconv.Itoa(self.replycount)
|
||||
}
|
||||
|
||||
type boardModel struct {
|
||||
allowFiles bool
|
||||
frontend string
|
||||
prefix string
|
||||
board string
|
||||
page int
|
||||
pages int
|
||||
threads []ThreadModel
|
||||
}
|
||||
|
||||
func (self *boardModel) MarshalJSON() (b []byte, err error) {
|
||||
j := make(map[string]interface{})
|
||||
j["posts"] = self.threads
|
||||
j["page"] = self.page
|
||||
j["name"] = self.board
|
||||
return json.Marshal(j)
|
||||
}
|
||||
|
||||
func (self *boardModel) JSON() string {
|
||||
d, err := self.MarshalJSON()
|
||||
if err == nil {
|
||||
return string(d)
|
||||
} else {
|
||||
return "null"
|
||||
}
|
||||
}
|
||||
|
||||
func (self *boardModel) SetAllowFiles(allow bool) {
|
||||
self.allowFiles = allow
|
||||
}
|
||||
|
||||
func (self *boardModel) AllowFiles() bool {
|
||||
return self.allowFiles
|
||||
}
|
||||
|
||||
func (self *boardModel) PutThread(th ThreadModel) {
|
||||
idx := -1
|
||||
for i, t := range self.threads {
|
||||
if th.OP().MessageID() == t.OP().MessageID() {
|
||||
idx = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if idx != -1 {
|
||||
self.threads[idx] = th
|
||||
}
|
||||
}
|
||||
|
||||
func (self *boardModel) Navbar() string {
|
||||
param := make(map[string]interface{})
|
||||
param["name"] = fmt.Sprintf("page %d for %s", self.page, self.board)
|
||||
param["frontend"] = self.frontend
|
||||
param["prefix"] = self.prefix
|
||||
param["links"] = self.PageList()
|
||||
return template.renderTemplate("navbar.mustache", param)
|
||||
}
|
||||
|
||||
func (self *boardModel) Board() string {
|
||||
return self.board
|
||||
}
|
||||
|
||||
func (self *boardModel) PageList() []LinkModel {
|
||||
var links []LinkModel
|
||||
for i := 0; i < self.pages; i++ {
|
||||
board := fmt.Sprintf("%sb/%s/%d/", self.prefix, self.board, i)
|
||||
if i == 0 {
|
||||
board = fmt.Sprintf("%sb/%s/", self.prefix, self.board)
|
||||
}
|
||||
links = append(links, linkModel{
|
||||
link: board,
|
||||
text: fmt.Sprintf("[ %d ]", i),
|
||||
})
|
||||
}
|
||||
return links
|
||||
}
|
||||
|
||||
func (self *boardModel) UpdateThread(messageID string, db Database) {
|
||||
|
||||
for _, th := range self.threads {
|
||||
if th.OP().MessageID() == messageID {
|
||||
// found it
|
||||
th.Update(db)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *boardModel) GetThread(messageID string) ThreadModel {
|
||||
for _, th := range self.threads {
|
||||
if th.OP().MessageID() == messageID {
|
||||
return th
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self *boardModel) HasThread(messageID string) bool {
|
||||
return self.GetThread(messageID) != nil
|
||||
}
|
||||
|
||||
func (self *boardModel) Frontend() string {
|
||||
return self.frontend
|
||||
}
|
||||
|
||||
func (self *boardModel) Prefix() string {
|
||||
return self.prefix
|
||||
}
|
||||
|
||||
func (self *boardModel) Name() string {
|
||||
return self.board
|
||||
}
|
||||
|
||||
func (self *boardModel) Threads() []ThreadModel {
|
||||
return self.threads
|
||||
}
|
||||
|
||||
// refetch all threads on this page
|
||||
func (self *boardModel) Update(db Database) {
|
||||
// ignore error
|
||||
perpage, _ := db.GetThreadsPerPage(self.board)
|
||||
// refetch all on this page
|
||||
model := db.GetGroupForPage(self.prefix, self.frontend, self.board, self.page, int(perpage))
|
||||
for _, th := range model.Threads() {
|
||||
// XXX: do we really need to update it again?
|
||||
th.Update(db)
|
||||
}
|
||||
self.threads = model.Threads()
|
||||
}
|
||||
|
||||
type post struct {
|
||||
truncated bool
|
||||
prefix string
|
||||
board string
|
||||
PostName string
|
||||
PostSubject string
|
||||
PostMessage string
|
||||
message_rendered string
|
||||
Message_id string
|
||||
MessagePath string
|
||||
addr string
|
||||
Newsgroup string
|
||||
op bool
|
||||
Posted int64
|
||||
Parent string
|
||||
sage bool
|
||||
Key string
|
||||
Files []AttachmentModel
|
||||
HashLong string
|
||||
HashShort string
|
||||
URL string
|
||||
Tripcode string
|
||||
BodyMarkup string
|
||||
PostMarkup string
|
||||
PostPrefix string
|
||||
index int
|
||||
Type string
|
||||
nntp_id int
|
||||
}
|
||||
|
||||
func (self *post) NNTPID() int {
|
||||
return self.nntp_id
|
||||
}
|
||||
|
||||
func (self *post) Index() int {
|
||||
return self.index + 1
|
||||
}
|
||||
|
||||
func (self *post) NumImages() int {
|
||||
return len(self.Files)
|
||||
}
|
||||
|
||||
func (self *post) RepresentativeThumb() string {
|
||||
if len(self.Attachments()) > 0 {
|
||||
return self.Attachments()[0].Thumbnail()
|
||||
}
|
||||
//TODO don't hard-code this
|
||||
return self.prefix + "static/placeholder.png"
|
||||
}
|
||||
|
||||
func (self *post) MarshalJSON() (b []byte, err error) {
|
||||
// compute on fly
|
||||
// TODO: don't do this
|
||||
self.HashLong = self.PostHash()
|
||||
self.HashShort = self.ShortHash()
|
||||
if len(self.Key) > 0 {
|
||||
self.Tripcode = makeTripcode(self.Key)
|
||||
}
|
||||
if len(self.PostMarkup) > 0 {
|
||||
self.PostMarkup = self.RenderPost()
|
||||
}
|
||||
self.PostPrefix = self.Prefix()
|
||||
// for liveui
|
||||
self.Type = "Post"
|
||||
self.Newsgroup = self.board
|
||||
self.URL = self.PostURL()
|
||||
return json.Marshal(*self)
|
||||
}
|
||||
|
||||
func (self *post) JSON() string {
|
||||
d, err := self.MarshalJSON()
|
||||
if err == nil {
|
||||
return string(d)
|
||||
} else {
|
||||
return "null"
|
||||
}
|
||||
}
|
||||
|
||||
type attachment struct {
|
||||
prefix string
|
||||
Path string
|
||||
Name string
|
||||
ThumbWidth int
|
||||
ThumbHeight int
|
||||
}
|
||||
|
||||
func (self *attachment) MarshalJSON() (b []byte, err error) {
|
||||
return json.Marshal(*self)
|
||||
}
|
||||
|
||||
func (self *attachment) JSON() string {
|
||||
d, err := self.MarshalJSON()
|
||||
if err == nil {
|
||||
return string(d)
|
||||
} else {
|
||||
return "null"
|
||||
}
|
||||
}
|
||||
|
||||
func (self *attachment) Hash() string {
|
||||
return strings.Split(self.Path, ".")[0]
|
||||
}
|
||||
|
||||
func (self *attachment) ThumbInfo() ThumbInfo {
|
||||
return ThumbInfo{
|
||||
Width: self.ThumbWidth,
|
||||
Height: self.ThumbHeight,
|
||||
}
|
||||
}
|
||||
|
||||
func (self *attachment) Prefix() string {
|
||||
return self.prefix
|
||||
}
|
||||
|
||||
func (self *attachment) Thumbnail() string {
|
||||
return self.prefix + "thm/" + self.Path + ".jpg"
|
||||
}
|
||||
|
||||
func (self *attachment) Source() string {
|
||||
return self.prefix + "img/" + self.Path
|
||||
}
|
||||
|
||||
func (self *attachment) Filename() string {
|
||||
return self.Name
|
||||
}
|
||||
|
||||
func PostModelFromMessage(parent, prefix string, nntp NNTPMessage) PostModel {
|
||||
p := new(post)
|
||||
p.PostName = nntp.Name()
|
||||
p.PostSubject = nntp.Subject()
|
||||
p.PostMessage = nntp.Message()
|
||||
p.MessagePath = nntp.Path()
|
||||
p.Message_id = nntp.MessageID()
|
||||
p.board = nntp.Newsgroup()
|
||||
p.Posted = nntp.Posted()
|
||||
p.op = nntp.OP()
|
||||
p.prefix = prefix
|
||||
p.Parent = parent
|
||||
p.addr = nntp.Addr()
|
||||
p.sage = nntp.Sage()
|
||||
p.Key = nntp.Pubkey()
|
||||
for _, att := range nntp.Attachments() {
|
||||
p.Files = append(p.Files, att.ToModel(prefix))
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func (self *post) ReferenceHash() string {
|
||||
ref := self.Reference()
|
||||
if len(ref) > 0 {
|
||||
return HashMessageID(self.Reference())
|
||||
}
|
||||
return self.PostHash()
|
||||
}
|
||||
func (self *post) NumAttachments() int {
|
||||
return len(self.Files)
|
||||
}
|
||||
|
||||
func (self *post) RenderTruncatedBody() string {
|
||||
return self.Truncate().RenderBody()
|
||||
}
|
||||
|
||||
func (self *post) Reference() string {
|
||||
return self.Parent
|
||||
}
|
||||
|
||||
func (self *post) ShortHash() string {
|
||||
return ShortHashMessageID(self.MessageID())
|
||||
}
|
||||
|
||||
func (self *post) Pubkey() string {
|
||||
if len(self.Key) > 0 {
|
||||
return fmt.Sprintf("<label title=\"%s\">%s</label>", self.Key, makeTripcode(self.Key))
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (self *post) Sage() bool {
|
||||
return self.sage
|
||||
}
|
||||
|
||||
func (self *post) CSSClass() string {
|
||||
if self.OP() {
|
||||
return "post op"
|
||||
} else {
|
||||
return "post reply"
|
||||
}
|
||||
}
|
||||
|
||||
func (self *post) OP() bool {
|
||||
return self.Parent == self.Message_id || len(self.Parent) == 0
|
||||
}
|
||||
|
||||
func (self *post) Date() string {
|
||||
return time.Unix(self.Posted, 0).Format(i18nProvider.Format("full_date_format"))
|
||||
}
|
||||
|
||||
func (self *post) DateRFC() string {
|
||||
return time.Unix(self.Posted, 0).Format(time.RFC3339)
|
||||
}
|
||||
|
||||
func (self *post) TemplateDir() string {
|
||||
return filepath.Join("contrib", "templates", "default")
|
||||
}
|
||||
|
||||
func (self *post) MessageID() string {
|
||||
return self.Message_id
|
||||
}
|
||||
|
||||
func (self *post) Frontend() string {
|
||||
idx := strings.LastIndex(self.MessagePath, "!")
|
||||
if idx == -1 {
|
||||
return self.MessagePath
|
||||
}
|
||||
return self.MessagePath[idx+1:]
|
||||
}
|
||||
|
||||
func (self *post) Board() string {
|
||||
return self.board
|
||||
}
|
||||
|
||||
func (self *post) PostHash() string {
|
||||
return HashMessageID(self.Message_id)
|
||||
}
|
||||
|
||||
func (self *post) Name() string {
|
||||
return self.PostName
|
||||
}
|
||||
|
||||
func (self *post) Subject() string {
|
||||
return self.PostSubject
|
||||
}
|
||||
|
||||
func (self *post) Attachments() []AttachmentModel {
|
||||
return self.Files
|
||||
}
|
||||
|
||||
func (self *post) PostURL() string {
|
||||
return fmt.Sprintf("%st/%s/#%s", self.Prefix(), HashMessageID(self.Parent), self.PostHash())
|
||||
}
|
||||
|
||||
func (self *post) Prefix() string {
|
||||
if len(self.prefix) == 0 {
|
||||
// fall back if not set
|
||||
return "/"
|
||||
}
|
||||
return self.prefix
|
||||
}
|
||||
|
||||
func (self *post) IsClearnet() bool {
|
||||
return len(self.addr) == encAddrLen()
|
||||
}
|
||||
|
||||
func (self *post) IsI2P() bool {
|
||||
return len(self.addr) == i2pDestHashLen()
|
||||
}
|
||||
|
||||
func (self *post) IsTor() bool {
|
||||
return len(self.addr) == 0
|
||||
}
|
||||
|
||||
func (self *post) SetIndex(idx int) {
|
||||
self.index = idx
|
||||
}
|
||||
|
||||
func (self *post) RenderPost() string {
|
||||
param := make(map[string]interface{})
|
||||
param["post"] = self
|
||||
return template.renderTemplate("post.mustache", param)
|
||||
}
|
||||
|
||||
func (self *post) RenderTruncatedPost() string {
|
||||
return self.Truncate().RenderPost()
|
||||
}
|
||||
|
||||
func (self *post) IsTruncated() bool {
|
||||
return self.truncated
|
||||
}
|
||||
|
||||
func (self *post) Truncate() PostModel {
|
||||
message := self.PostMessage
|
||||
subject := self.PostSubject
|
||||
name := self.PostName
|
||||
if len(message) > 500 {
|
||||
message = message[:500] + "\n...\n[Post Truncated]\n"
|
||||
}
|
||||
if len(subject) > 100 {
|
||||
subject = subject[:100] + "..."
|
||||
}
|
||||
if len(name) > 100 {
|
||||
name = name[:100] + "..."
|
||||
}
|
||||
|
||||
return &post{
|
||||
truncated: true,
|
||||
prefix: self.prefix,
|
||||
board: self.board,
|
||||
PostName: name,
|
||||
PostSubject: subject,
|
||||
PostMessage: message,
|
||||
Message_id: self.Message_id,
|
||||
MessagePath: self.MessagePath,
|
||||
addr: self.addr,
|
||||
op: self.op,
|
||||
Posted: self.Posted,
|
||||
Parent: self.Parent,
|
||||
sage: self.sage,
|
||||
Key: self.Key,
|
||||
// TODO: copy?
|
||||
Files: self.Files,
|
||||
}
|
||||
}
|
||||
|
||||
func (self *post) RenderShortBody() string {
|
||||
return MEMEPosting(self.PostMessage, self.Prefix())
|
||||
}
|
||||
|
||||
func (self *post) RenderBodyPre() string {
|
||||
return self.PostMessage
|
||||
}
|
||||
|
||||
func (self *post) RenderBody() string {
|
||||
// :^)
|
||||
if len(self.message_rendered) == 0 {
|
||||
self.message_rendered = MEMEPosting(self.PostMessage, self.Prefix())
|
||||
}
|
||||
return self.message_rendered
|
||||
}
|
||||
|
||||
type thread struct {
|
||||
allowFiles bool
|
||||
prefix string
|
||||
links []LinkModel
|
||||
Posts []PostModel
|
||||
dirty bool
|
||||
truncatedPostCount int
|
||||
truncatedImageCount int
|
||||
}
|
||||
|
||||
func (self *thread) MarshalJSON() (b []byte, err error) {
|
||||
posts := []PostModel{self.OP()}
|
||||
posts = append(posts, self.Replies()...)
|
||||
return json.Marshal(posts)
|
||||
}
|
||||
|
||||
func (self *thread) JSON() string {
|
||||
d, err := self.MarshalJSON()
|
||||
if err == nil {
|
||||
return string(d)
|
||||
} else {
|
||||
return "null"
|
||||
}
|
||||
}
|
||||
|
||||
func (self *thread) IsDirty() bool {
|
||||
return self.dirty
|
||||
}
|
||||
|
||||
func (self *thread) MarkDirty() {
|
||||
self.dirty = true
|
||||
}
|
||||
|
||||
func (self *thread) Prefix() string {
|
||||
return self.prefix
|
||||
}
|
||||
|
||||
func (self *thread) Navbar() string {
|
||||
param := make(map[string]interface{})
|
||||
param["name"] = fmt.Sprintf("Thread %s", self.Posts[0].ShortHash())
|
||||
param["frontend"] = self.Board()
|
||||
param["links"] = self.links
|
||||
param["prefix"] = self.prefix
|
||||
return template.renderTemplate("navbar.mustache", param)
|
||||
}
|
||||
|
||||
func (self *thread) Board() string {
|
||||
return self.Posts[0].Board()
|
||||
}
|
||||
|
||||
func (self *thread) BoardURL() string {
|
||||
return fmt.Sprintf("%sb/%s/", self.Prefix(), self.Board())
|
||||
}
|
||||
|
||||
func (self *thread) PostCount() int {
|
||||
return len(self.Posts)
|
||||
}
|
||||
|
||||
func (self *thread) ImageCount() (count int) {
|
||||
for _, p := range self.Posts {
|
||||
count += p.NumAttachments()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// get our default template dir
|
||||
func defaultTemplateDir() string {
|
||||
return filepath.Join("contrib", "templates", "default")
|
||||
}
|
||||
|
||||
func createThreadModel(posts ...PostModel) ThreadModel {
|
||||
op := posts[0]
|
||||
group := op.Board()
|
||||
prefix := op.Prefix()
|
||||
return &thread{
|
||||
dirty: true,
|
||||
prefix: prefix,
|
||||
Posts: posts,
|
||||
links: []LinkModel{
|
||||
linkModel{
|
||||
text: group,
|
||||
link: fmt.Sprintf("%sb/%s/", prefix, group),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (self *thread) OP() PostModel {
|
||||
return self.Posts[0]
|
||||
}
|
||||
|
||||
func (self *thread) Replies() []PostModel {
|
||||
if len(self.Posts) > 1 {
|
||||
var replies []PostModel
|
||||
// inject post index
|
||||
for idx, post := range self.Posts[1:] {
|
||||
if post != nil {
|
||||
post.SetIndex(idx + 1)
|
||||
replies = append(replies, post)
|
||||
}
|
||||
}
|
||||
return replies
|
||||
}
|
||||
return []PostModel{}
|
||||
}
|
||||
|
||||
func (self *thread) AllowFiles() bool {
|
||||
return self.allowFiles
|
||||
}
|
||||
|
||||
func (self *thread) SetAllowFiles(allow bool) {
|
||||
self.allowFiles = allow
|
||||
}
|
||||
|
||||
func (self *thread) Truncate() ThreadModel {
|
||||
trunc := 5
|
||||
if len(self.Posts) > trunc {
|
||||
t := &thread{
|
||||
allowFiles: self.allowFiles,
|
||||
links: self.links,
|
||||
Posts: append([]PostModel{self.Posts[0]}, self.Posts[len(self.Posts)-trunc:]...),
|
||||
prefix: self.prefix,
|
||||
dirty: false,
|
||||
}
|
||||
imgs := 0
|
||||
for _, p := range t.Posts {
|
||||
imgs += p.NumAttachments()
|
||||
}
|
||||
t.truncatedPostCount = len(self.Posts) - trunc
|
||||
t.truncatedImageCount = self.ImageCount() - imgs
|
||||
return t
|
||||
}
|
||||
return self
|
||||
}
|
||||
|
||||
func (self *thread) MissingPostCount() int {
|
||||
return self.truncatedPostCount
|
||||
}
|
||||
|
||||
func (self *thread) MissingImageCount() int {
|
||||
return self.truncatedImageCount
|
||||
}
|
||||
|
||||
func (self *thread) HasOmittedReplies() bool {
|
||||
return self.truncatedPostCount > 0
|
||||
}
|
||||
|
||||
func (self *thread) HasOmittedImages() bool {
|
||||
return self.truncatedImageCount > 0
|
||||
}
|
||||
|
||||
func (self *thread) Update(db Database) {
|
||||
root := self.Posts[0].MessageID()
|
||||
self.Posts = append([]PostModel{self.Posts[0]}, db.GetThreadReplyPostModels(self.prefix, root, 0, 0)...)
|
||||
self.dirty = false
|
||||
}
|
||||
|
||||
type linkModel struct {
|
||||
text string
|
||||
link string
|
||||
}
|
||||
|
||||
func (self linkModel) LinkURL() string {
|
||||
return self.link
|
||||
}
|
||||
|
||||
func (self linkModel) Text() string {
|
||||
return self.text
|
||||
}
|
1632
contrib/backends/srndv2/src/srnd/nntp.go
Normal file
1632
contrib/backends/srndv2/src/srnd/nntp.go
Normal file
File diff suppressed because it is too large
Load Diff
246
contrib/backends/srndv2/src/srnd/null_cache.go
Normal file
246
contrib/backends/srndv2/src/srnd/null_cache.go
Normal file
@ -0,0 +1,246 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type NullCache struct {
|
||||
regenThreadChan chan ArticleEntry
|
||||
regenGroupChan chan groupRegenRequest
|
||||
handler *nullHandler
|
||||
}
|
||||
|
||||
type nullHandler struct {
|
||||
database Database
|
||||
attachments bool
|
||||
requireCaptcha bool
|
||||
name string
|
||||
prefix string
|
||||
}
|
||||
|
||||
func (self *nullHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
path := r.URL.Path
|
||||
_, file := filepath.Split(path)
|
||||
|
||||
isjson := strings.HasSuffix(path, "/json") || strings.HasSuffix(path, "/json/")
|
||||
|
||||
if strings.HasPrefix(path, "/t/") {
|
||||
// thread handler
|
||||
parts := strings.Split(path[3:], "/")
|
||||
hash := parts[0]
|
||||
msg, err := self.database.GetMessageIDByHash(hash)
|
||||
if err == nil {
|
||||
template.genThread(self.attachments, self.requireCaptcha, msg, self.prefix, self.name, w, self.database, isjson)
|
||||
return
|
||||
} else {
|
||||
goto notfound
|
||||
}
|
||||
}
|
||||
if strings.Trim(path, "/") == "overboard" {
|
||||
// generate ukko aka overboard
|
||||
template.genUkko(self.prefix, self.name, w, self.database, isjson)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.HasPrefix(path, "/b/") {
|
||||
// board handler
|
||||
parts := strings.Split(path[3:], "/")
|
||||
page := 0
|
||||
group := parts[0]
|
||||
if len(parts) > 1 && parts[1] != "" && parts[1] != "json" {
|
||||
var err error
|
||||
page, err = strconv.Atoi(parts[1])
|
||||
if err != nil {
|
||||
goto notfound
|
||||
}
|
||||
}
|
||||
hasgroup := self.database.HasNewsgroup(group)
|
||||
if !hasgroup {
|
||||
goto notfound
|
||||
}
|
||||
pages := self.database.GetGroupPageCount(group)
|
||||
if page >= int(pages) {
|
||||
goto notfound
|
||||
}
|
||||
template.genBoardPage(self.attachments, self.requireCaptcha, self.prefix, self.name, group, page, w, self.database, isjson)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.HasPrefix(path, "/o/") {
|
||||
page := 0
|
||||
parts := strings.Split(path[3:], "/")
|
||||
if parts[0] != "json" && parts[0] != "" {
|
||||
var err error
|
||||
page, err = strconv.Atoi(parts[0])
|
||||
if err != nil {
|
||||
goto notfound
|
||||
}
|
||||
}
|
||||
template.genUkkoPaginated(self.prefix, self.name, w, self.database, page, isjson)
|
||||
return
|
||||
}
|
||||
|
||||
if len(file) == 0 || file == "index.html" {
|
||||
template.genFrontPage(10, self.prefix, self.name, w, ioutil.Discard, self.database)
|
||||
return
|
||||
}
|
||||
|
||||
if file == "index.json" {
|
||||
// TODO: index.json
|
||||
goto notfound
|
||||
}
|
||||
if strings.HasPrefix(file, "history.html") {
|
||||
template.genGraphs(self.prefix, w, self.database)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(file, "boards.html") {
|
||||
template.genFrontPage(10, self.prefix, self.name, ioutil.Discard, w, self.database)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.HasPrefix(file, "boards.json") {
|
||||
b := self.database.GetAllNewsgroups()
|
||||
json.NewEncoder(w).Encode(b)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.HasPrefix(file, "ukko.html") {
|
||||
template.genUkko(self.prefix, self.name, w, self.database, false)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(file, "ukko.json") {
|
||||
template.genUkko(self.prefix, self.name, w, self.database, true)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.HasPrefix(file, "ukko-") {
|
||||
page := getUkkoPage(file)
|
||||
template.genUkkoPaginated(self.prefix, self.name, w, self.database, page, isjson)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(file, "thread-") {
|
||||
hash := getThreadHash(file)
|
||||
if len(hash) == 0 {
|
||||
goto notfound
|
||||
}
|
||||
msg, err := self.database.GetMessageIDByHash(hash)
|
||||
if err != nil {
|
||||
goto notfound
|
||||
}
|
||||
template.genThread(self.attachments, self.requireCaptcha, msg, self.prefix, self.name, w, self.database, isjson)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(file, "catalog-") {
|
||||
group := getGroupForCatalog(file)
|
||||
if len(group) == 0 {
|
||||
goto notfound
|
||||
}
|
||||
hasgroup := self.database.HasNewsgroup(group)
|
||||
if !hasgroup {
|
||||
goto notfound
|
||||
}
|
||||
template.genCatalog(self.prefix, self.name, group, w, self.database)
|
||||
return
|
||||
} else {
|
||||
group, page := getGroupAndPage(file)
|
||||
if len(group) == 0 || page < 0 {
|
||||
goto notfound
|
||||
}
|
||||
hasgroup := self.database.HasNewsgroup(group)
|
||||
if !hasgroup {
|
||||
goto notfound
|
||||
}
|
||||
pages := self.database.GetGroupPageCount(group)
|
||||
if page >= int(pages) {
|
||||
goto notfound
|
||||
}
|
||||
template.genBoardPage(self.attachments, self.requireCaptcha, self.prefix, self.name, group, page, w, self.database, isjson)
|
||||
return
|
||||
}
|
||||
|
||||
notfound:
|
||||
template.renderNotFound(w, r, self.prefix, self.name)
|
||||
}
|
||||
|
||||
func (self *NullCache) DeleteBoardMarkup(group string) {
|
||||
}
|
||||
|
||||
// try to delete root post's page
|
||||
func (self *NullCache) DeleteThreadMarkup(root_post_id string) {
|
||||
}
|
||||
|
||||
// regen every newsgroup
|
||||
func (self *NullCache) RegenAll() {
|
||||
}
|
||||
|
||||
func (self *NullCache) RegenFrontPage() {
|
||||
}
|
||||
|
||||
func (self *NullCache) SetRequireCaptcha(required bool) {
|
||||
self.handler.requireCaptcha = required
|
||||
}
|
||||
|
||||
func (self *NullCache) pollRegen() {
|
||||
for {
|
||||
select {
|
||||
// consume regen requests
|
||||
case _ = <-self.regenGroupChan:
|
||||
{
|
||||
}
|
||||
case _ = <-self.regenThreadChan:
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// regen every page of the board
|
||||
func (self *NullCache) RegenerateBoard(group string) {
|
||||
}
|
||||
|
||||
// regenerate pages after a mod event
|
||||
func (self *NullCache) RegenOnModEvent(newsgroup, msgid, root string, page int) {
|
||||
}
|
||||
|
||||
func (self *NullCache) Start() {
|
||||
go self.pollRegen()
|
||||
}
|
||||
|
||||
func (self *NullCache) Regen(msg ArticleEntry) {
|
||||
}
|
||||
|
||||
func (self *NullCache) GetThreadChan() chan ArticleEntry {
|
||||
return self.regenThreadChan
|
||||
}
|
||||
|
||||
func (self *NullCache) GetGroupChan() chan groupRegenRequest {
|
||||
return self.regenGroupChan
|
||||
}
|
||||
|
||||
func (self *NullCache) GetHandler() http.Handler {
|
||||
return self.handler
|
||||
}
|
||||
|
||||
func (self *NullCache) Close() {
|
||||
//nothig to do
|
||||
}
|
||||
|
||||
func NewNullCache(prefix, webroot, name string, attachments bool, db Database, store ArticleStore) CacheInterface {
|
||||
cache := new(NullCache)
|
||||
cache.regenThreadChan = make(chan ArticleEntry, 16)
|
||||
cache.regenGroupChan = make(chan groupRegenRequest, 8)
|
||||
cache.handler = &nullHandler{
|
||||
prefix: prefix,
|
||||
name: name,
|
||||
attachments: attachments,
|
||||
requireCaptcha: true,
|
||||
database: db,
|
||||
}
|
||||
|
||||
return cache
|
||||
}
|
32
contrib/backends/srndv2/src/srnd/policy.go
Normal file
32
contrib/backends/srndv2/src/srnd/policy.go
Normal file
@ -0,0 +1,32 @@
|
||||
//
|
||||
// policy.go
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
type FeedPolicy struct {
|
||||
rules map[string]string
|
||||
}
|
||||
|
||||
// do we allow this newsgroup?
|
||||
func (self *FeedPolicy) AllowsNewsgroup(newsgroup string) (result bool) {
|
||||
var k, v string
|
||||
for k, v = range self.rules {
|
||||
match, err := regexp.MatchString(k, newsgroup)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if match {
|
||||
if v == "0" {
|
||||
return false
|
||||
} else if v == "1" {
|
||||
result = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
1911
contrib/backends/srndv2/src/srnd/postgres.go
Normal file
1911
contrib/backends/srndv2/src/srnd/postgres.go
Normal file
File diff suppressed because it is too large
Load Diff
21
contrib/backends/srndv2/src/srnd/sign_test.go
Normal file
21
contrib/backends/srndv2/src/srnd/sign_test.go
Normal file
@ -0,0 +1,21 @@
|
||||
package srnd
|
||||
|
||||
import "testing"
|
||||
import "fmt"
|
||||
import "github.com/majestrate/nacl"
|
||||
|
||||
func TestSignVerify(t *testing.T) {
|
||||
|
||||
msgid := "<asd@asd.asd>"
|
||||
secret := "asdasdasd"
|
||||
seed := parseTripcodeSecret(secret)
|
||||
kp := nacl.LoadSignKey(seed)
|
||||
defer kp.Free()
|
||||
pubkey := hexify(kp.Public())
|
||||
seckey := kp.Secret()
|
||||
sig := msgidFrontendSign(seckey, msgid)
|
||||
fmt.Println(sig, pubkey, msgid)
|
||||
if !verifyFrontendSig(pubkey, sig, msgid) {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
31
contrib/backends/srndv2/src/srnd/srnd_test.go
Normal file
31
contrib/backends/srndv2/src/srnd/srnd_test.go
Normal file
@ -0,0 +1,31 @@
|
||||
package srnd
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestGenFeedsConfig(t *testing.T) {
|
||||
|
||||
err := GenFeedsConfig()
|
||||
// Generate default feeds.ini
|
||||
if err != nil {
|
||||
|
||||
t.Error("Cannot generate feeds.ini", err)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// func (self lineWriter) Write(data []byte) (n int, err error) {
|
||||
|
||||
//func OpenFileWriter(fname string) (io.WriteCloser, error) {
|
||||
|
||||
func TestOpenFileWriter(t *testing.T) {
|
||||
|
||||
_, err := OpenFileWriter("file.txt")
|
||||
// Generate default feeds.ini
|
||||
if err != nil {
|
||||
|
||||
t.Error("Cant open file writer.", err)
|
||||
|
||||
}
|
||||
|
||||
}
|
560
contrib/backends/srndv2/src/srnd/store.go
Normal file
560
contrib/backends/srndv2/src/srnd/store.go
Normal file
@ -0,0 +1,560 @@
|
||||
//
|
||||
// store.go
|
||||
//
|
||||
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"errors"
|
||||
"io"
|
||||
"log"
|
||||
"mime"
|
||||
"mime/multipart"
|
||||
"net/mail"
|
||||
"net/textproto"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ArticleStore interface {
|
||||
|
||||
// full filepath to attachment directory
|
||||
AttachmentDir() string
|
||||
|
||||
// get the filepath for an attachment
|
||||
AttachmentFilepath(fname string) string
|
||||
// get the filepath for an attachment's thumbnail
|
||||
ThumbnailFilepath(fname string) string
|
||||
// do we have this article?
|
||||
HasArticle(msgid string) bool
|
||||
// create a file for a message
|
||||
CreateFile(msgid string) io.WriteCloser
|
||||
// get the filename of a message
|
||||
GetFilename(msgid string) string
|
||||
// open a message in the store for reading given its message-id
|
||||
// return io.ReadCloser, error
|
||||
OpenMessage(msgid string) (io.ReadCloser, error)
|
||||
// get article headers only
|
||||
GetHeaders(msgid string) ArticleHeaders
|
||||
// get mime header
|
||||
GetMIMEHeader(msgid string) textproto.MIMEHeader
|
||||
// get our temp directory for articles
|
||||
TempDir() string
|
||||
// get a list of all the attachments we have
|
||||
GetAllAttachments() ([]string, error)
|
||||
// generate a thumbnail
|
||||
GenerateThumbnail(fname string) (ThumbInfo, error)
|
||||
// generate all thumbanils for this message
|
||||
ThumbnailMessage(msgid string) []ThumbInfo
|
||||
// did we enable compression?
|
||||
Compression() bool
|
||||
// process body of nntp message, register attachments and the article
|
||||
// write the body into writer as we go through the body
|
||||
// does NOT write mime header
|
||||
ProcessMessageBody(wr io.Writer, hdr textproto.MIMEHeader, body io.Reader) error
|
||||
// register this post with the daemon
|
||||
RegisterPost(nntp NNTPMessage) error
|
||||
// register signed message
|
||||
RegisterSigned(msgid, pk string) error
|
||||
|
||||
GetMessage(msgid string) NNTPMessage
|
||||
|
||||
// get size of message on disk
|
||||
GetMessageSize(msgid string) (int64, error)
|
||||
|
||||
// get thumbnail info of file by path
|
||||
ThumbInfo(fpath string) (ThumbInfo, error)
|
||||
}
|
||||
type articleStore struct {
|
||||
directory string
|
||||
temp string
|
||||
attachments string
|
||||
thumbs string
|
||||
database Database
|
||||
convert_path string
|
||||
ffmpeg_path string
|
||||
sox_path string
|
||||
identify_path string
|
||||
placeholder string
|
||||
compression bool
|
||||
compWriter *gzip.Writer
|
||||
}
|
||||
|
||||
func createArticleStore(config map[string]string, database Database) ArticleStore {
|
||||
store := &articleStore{
|
||||
directory: config["store_dir"],
|
||||
temp: config["incoming_dir"],
|
||||
attachments: config["attachments_dir"],
|
||||
thumbs: config["thumbs_dir"],
|
||||
convert_path: config["convert_bin"],
|
||||
identify_path: config["identify_path"],
|
||||
ffmpeg_path: config["ffmpegthumbnailer_bin"],
|
||||
sox_path: config["sox_bin"],
|
||||
placeholder: config["placeholder_thumbnail"],
|
||||
database: database,
|
||||
compression: config["compression"] == "1",
|
||||
}
|
||||
store.Init()
|
||||
return store
|
||||
}
|
||||
|
||||
func (self *articleStore) AttachmentDir() string {
|
||||
return self.attachments
|
||||
}
|
||||
|
||||
func (self *articleStore) Compression() bool {
|
||||
return self.compression
|
||||
}
|
||||
|
||||
func (self *articleStore) TempDir() string {
|
||||
return self.temp
|
||||
}
|
||||
|
||||
// initialize article store
|
||||
func (self *articleStore) Init() {
|
||||
EnsureDir(self.directory)
|
||||
EnsureDir(self.temp)
|
||||
EnsureDir(self.attachments)
|
||||
EnsureDir(self.thumbs)
|
||||
if !CheckFile(self.convert_path) {
|
||||
log.Fatal("cannot find executable for convert: ", self.convert_path, " not found")
|
||||
}
|
||||
if !CheckFile(self.ffmpeg_path) {
|
||||
log.Fatal("connt find executable for ffmpegthumbnailer: ", self.ffmpeg_path, " not found")
|
||||
}
|
||||
if !CheckFile(self.sox_path) {
|
||||
log.Fatal("connt find executable for sox: ", self.sox_path, " not found")
|
||||
}
|
||||
if !CheckFile(self.placeholder) {
|
||||
log.Println("falling back to use default placeholder image")
|
||||
self.placeholder = "contrib/static/placeholder.png"
|
||||
if !CheckFile(self.placeholder) {
|
||||
log.Fatal("cannot find thumbnail placeholder file: ", self.placeholder, " not found")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *articleStore) RegisterSigned(msgid, pk string) (err error) {
|
||||
err = self.database.RegisterSigned(msgid, pk)
|
||||
return
|
||||
}
|
||||
|
||||
func (self *articleStore) isAudio(fname string) bool {
|
||||
for _, ext := range []string{".mp3", ".ogg", ".oga", ".opus", ".flac", ".m4a"} {
|
||||
if strings.HasSuffix(strings.ToLower(fname), ext) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (self *articleStore) ThumbnailMessage(msgid string) (infos []ThumbInfo) {
|
||||
atts := self.database.GetPostAttachments(msgid)
|
||||
for _, att := range atts {
|
||||
if CheckFile(self.ThumbnailFilepath(att)) {
|
||||
continue
|
||||
}
|
||||
info, err := self.GenerateThumbnail(att)
|
||||
if err == nil {
|
||||
infos = append(infos, info)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// is this an image format we need convert for?
|
||||
func (self *articleStore) isImage(fname string) bool {
|
||||
for _, ext := range []string{".gif", ".ico", ".png", ".jpeg", ".jpg", ".png", ".webp"} {
|
||||
if strings.HasSuffix(strings.ToLower(fname), ext) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// is this a video file?
|
||||
func (self *articleStore) isVideo(fname string) bool {
|
||||
for _, ext := range []string{".mpeg", ".ogv", ".mkv", ".avi", ".mp4", ".webm"} {
|
||||
if strings.HasSuffix(strings.ToLower(fname), ext) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (self *articleStore) ThumbInfo(fpath string) (ThumbInfo, error) {
|
||||
var info ThumbInfo
|
||||
log.Println("made thumbnail for", fpath)
|
||||
cmd := exec.Command(self.identify_path, "-format", "%[fx:w] %[fx:h]", fpath)
|
||||
output, err := cmd.Output()
|
||||
if err == nil {
|
||||
parts := strings.Split(string(output), " ")
|
||||
if len(parts) == 2 {
|
||||
info.Width, err = strconv.Atoi(parts[0])
|
||||
if err == nil {
|
||||
info.Height, err = strconv.Atoi(parts[1])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Println("failed to determine size of thumbnail")
|
||||
}
|
||||
return info, err
|
||||
}
|
||||
|
||||
func (self *articleStore) GenerateThumbnail(fname string) (info ThumbInfo, err error) {
|
||||
outfname := self.ThumbnailFilepath(fname)
|
||||
infname := self.AttachmentFilepath(fname)
|
||||
tmpfname := ""
|
||||
var cmd *exec.Cmd
|
||||
if self.isImage(fname) {
|
||||
if strings.HasSuffix(infname, ".gif") {
|
||||
infname += "[0]"
|
||||
}
|
||||
cmd = exec.Command(self.convert_path, "-thumbnail", "200", infname, outfname)
|
||||
} else if self.isAudio(fname) {
|
||||
tmpfname = infname + ".wav"
|
||||
cmd = exec.Command(self.ffmpeg_path, "-i", infname, tmpfname)
|
||||
var out []byte
|
||||
|
||||
out, err = cmd.CombinedOutput()
|
||||
|
||||
if err == nil {
|
||||
cmd = exec.Command(self.sox_path, tmpfname, "-n", "spectrogram", "-a", "-d", "0:10", "-r", "-p", "6", "-x", "200", "-y", "150", "-o", outfname)
|
||||
} else {
|
||||
log.Println("error making thumbnail", string(out))
|
||||
}
|
||||
|
||||
} else if self.isVideo(fname) || strings.HasSuffix(fname, ".txt") {
|
||||
cmd = exec.Command(self.ffmpeg_path, "-i", infname, "-vf", "scale=300:200", "-vframes", "1", outfname)
|
||||
}
|
||||
if cmd == nil {
|
||||
log.Println("use placeholder for", infname)
|
||||
os.Link(self.placeholder, outfname)
|
||||
} else {
|
||||
exec_out, err := cmd.CombinedOutput()
|
||||
if err == nil {
|
||||
log.Println("made thumbnail for", infname)
|
||||
} else {
|
||||
log.Println("error generating thumbnail", string(exec_out))
|
||||
}
|
||||
}
|
||||
if len(tmpfname) > 0 {
|
||||
DelFile(tmpfname)
|
||||
}
|
||||
return info, err
|
||||
}
|
||||
|
||||
func (self *articleStore) GetAllAttachments() (names []string, err error) {
|
||||
var f *os.File
|
||||
f, err = os.Open(self.attachments)
|
||||
if err == nil {
|
||||
names, err = f.Readdirnames(0)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (self *articleStore) OpenMessage(msgid string) (rc io.ReadCloser, err error) {
|
||||
fname := self.GetFilename(msgid)
|
||||
var f *os.File
|
||||
f, err = os.Open(fname)
|
||||
if err == nil {
|
||||
if self.compression {
|
||||
// read gzip header
|
||||
var hdr [2]byte
|
||||
_, err = f.Read(hdr[:])
|
||||
// seek back to beginning
|
||||
f.Seek(0, 0)
|
||||
if err == nil {
|
||||
if hdr[0] == 0x1f && hdr[1] == 0x8b {
|
||||
// gzip header detected
|
||||
rc, err = gzip.NewReader(f)
|
||||
} else {
|
||||
// fall back to uncompressed
|
||||
rc = f
|
||||
}
|
||||
} else {
|
||||
// error reading file
|
||||
f.Close()
|
||||
rc = nil
|
||||
}
|
||||
// will fall back to regular file if gzip header not found
|
||||
} else {
|
||||
// compression disabled
|
||||
// assume uncompressed
|
||||
rc = f
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (self *articleStore) RegisterPost(nntp NNTPMessage) (err error) {
|
||||
err = self.database.RegisterArticle(nntp)
|
||||
return
|
||||
}
|
||||
|
||||
func (self *articleStore) saveAttachment(att NNTPAttachment) {
|
||||
fpath := att.Filepath()
|
||||
upload := self.AttachmentFilepath(fpath)
|
||||
if !CheckFile(upload) {
|
||||
// attachment does not exist on disk
|
||||
f, err := os.Create(upload)
|
||||
if f != nil {
|
||||
_, err = att.WriteTo(f)
|
||||
f.Close()
|
||||
}
|
||||
if err != nil {
|
||||
log.Println("failed to save attachemnt", fpath, err)
|
||||
}
|
||||
}
|
||||
att.Reset()
|
||||
self.thumbnailAttachment(fpath)
|
||||
}
|
||||
|
||||
// generate attachment thumbnail
|
||||
func (self *articleStore) thumbnailAttachment(fpath string) {
|
||||
thumb := self.ThumbnailFilepath(fpath)
|
||||
if !CheckFile(thumb) {
|
||||
_, err := self.GenerateThumbnail(fpath)
|
||||
if err != nil {
|
||||
log.Println("failed to generate thumbnail for", fpath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (self *articleStore) GetMessageSize(msgid string) (sz int64, err error) {
|
||||
var info os.FileInfo
|
||||
info, err = os.Stat(self.GetFilename(msgid))
|
||||
if err == nil {
|
||||
sz = info.Size()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// get the filepath for an attachment
|
||||
func (self *articleStore) AttachmentFilepath(fname string) string {
|
||||
return filepath.Join(self.attachments, fname)
|
||||
}
|
||||
|
||||
// get the filepath for a thumbanil
|
||||
func (self *articleStore) ThumbnailFilepath(fname string) string {
|
||||
// all thumbnails are jpegs now
|
||||
//if strings.HasSuffix(fname, ".gif") {
|
||||
// return filepath.Join(self.thumbs, fname)
|
||||
//}
|
||||
return filepath.Join(self.thumbs, fname+".jpg")
|
||||
}
|
||||
|
||||
// create a file for this article
|
||||
func (self *articleStore) CreateFile(messageID string) io.WriteCloser {
|
||||
fname := self.GetFilename(messageID)
|
||||
if CheckFile(fname) {
|
||||
// already exists
|
||||
log.Println("article with message-id", messageID, "already exists, not saving")
|
||||
return nil
|
||||
}
|
||||
file, err := os.Create(fname)
|
||||
if err != nil {
|
||||
log.Println("cannot open file", fname)
|
||||
return nil
|
||||
}
|
||||
return file
|
||||
}
|
||||
|
||||
// return true if we have an article
|
||||
func (self *articleStore) HasArticle(messageID string) bool {
|
||||
return CheckFile(self.GetFilename(messageID))
|
||||
}
|
||||
|
||||
// get the filename for this article
|
||||
func (self *articleStore) GetFilename(messageID string) string {
|
||||
if !ValidMessageID(messageID) {
|
||||
log.Println("!!! bug: tried to open invalid message", messageID, "!!!")
|
||||
return ""
|
||||
}
|
||||
return filepath.Join(self.directory, messageID)
|
||||
}
|
||||
|
||||
func (self *articleStore) GetHeaders(messageID string) (hdr ArticleHeaders) {
|
||||
txthdr := self.getMIMEHeader(messageID)
|
||||
if txthdr != nil {
|
||||
hdr = make(ArticleHeaders)
|
||||
for k, val := range txthdr {
|
||||
for _, v := range val {
|
||||
hdr.Add(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (self *articleStore) GetMIMEHeader(messageID string) textproto.MIMEHeader {
|
||||
return self.getMIMEHeader(messageID)
|
||||
}
|
||||
|
||||
// get article with headers only
|
||||
func (self *articleStore) getMIMEHeader(messageID string) (hdr textproto.MIMEHeader) {
|
||||
if ValidMessageID(messageID) {
|
||||
fname := self.GetFilename(messageID)
|
||||
f, err := os.Open(fname)
|
||||
if f != nil {
|
||||
r := bufio.NewReader(f)
|
||||
var msg *mail.Message
|
||||
msg, err = readMIMEHeader(r)
|
||||
f.Close()
|
||||
hdr = textproto.MIMEHeader(msg.Header)
|
||||
}
|
||||
if err != nil {
|
||||
log.Println("failed to load article headers for", messageID, err)
|
||||
}
|
||||
}
|
||||
return hdr
|
||||
}
|
||||
|
||||
func (self *articleStore) ProcessMessageBody(wr io.Writer, hdr textproto.MIMEHeader, body io.Reader) (err error) {
|
||||
err = read_message_body(body, hdr, self, wr, false, func(nntp NNTPMessage) {
|
||||
err = self.RegisterPost(nntp)
|
||||
if err == nil {
|
||||
pk := hdr.Get("X-PubKey-Ed25519")
|
||||
if len(pk) > 0 {
|
||||
// signed and valid
|
||||
err = self.RegisterSigned(getMessageID(hdr), pk)
|
||||
if err != nil {
|
||||
log.Println("register signed failed", err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Println("error procesing message body", err)
|
||||
}
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
func (self *articleStore) GetMessage(msgid string) (nntp NNTPMessage) {
|
||||
r, err := self.OpenMessage(msgid)
|
||||
if err == nil {
|
||||
defer r.Close()
|
||||
br := bufio.NewReader(r)
|
||||
msg, err := readMIMEHeader(br)
|
||||
if err == nil {
|
||||
chnl := make(chan NNTPMessage)
|
||||
hdr := textproto.MIMEHeader(msg.Header)
|
||||
err = read_message_body(msg.Body, hdr, nil, nil, true, func(nntp NNTPMessage) {
|
||||
c := chnl
|
||||
// inject pubkey for mod
|
||||
nntp.Headers().Set("X-PubKey-Ed25519", hdr.Get("X-PubKey-Ed25519"))
|
||||
c <- nntp
|
||||
close(c)
|
||||
})
|
||||
nntp = <-chnl
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// read message body with mimeheader pre-read
|
||||
// calls callback for each read nntp message
|
||||
// if writer is not nil and discardAttachmentBody is false the message body will be written to the writer and the nntp message will not be filled
|
||||
// if writer is not nil and discardAttachmentBody is true the message body will be discarded and writer ignored
|
||||
// if writer is nil and discardAttachmentBody is true the body is discarded entirely
|
||||
// if writer is nil and discardAttachmentBody is false the body is loaded into the nntp message
|
||||
// if the body contains a signed message it unrwarps 1 layer of signing
|
||||
func read_message_body(body io.Reader, hdr map[string][]string, store ArticleStore, wr io.Writer, discardAttachmentBody bool, callback func(NNTPMessage)) error {
|
||||
nntp := new(nntpArticle)
|
||||
nntp.headers = ArticleHeaders(hdr)
|
||||
content_type := nntp.ContentType()
|
||||
media_type, params, err := mime.ParseMediaType(content_type)
|
||||
if err != nil {
|
||||
log.Println("failed to parse media type", err, "for mime", content_type)
|
||||
nntp.Reset()
|
||||
return err
|
||||
}
|
||||
if wr != nil && !discardAttachmentBody {
|
||||
body = io.TeeReader(body, wr)
|
||||
}
|
||||
boundary, ok := params["boundary"]
|
||||
if ok || content_type == "multipart/mixed" {
|
||||
partReader := multipart.NewReader(body, boundary)
|
||||
for {
|
||||
part, err := partReader.NextPart()
|
||||
if err == io.EOF {
|
||||
callback(nntp)
|
||||
return nil
|
||||
} else if err == nil {
|
||||
hdr := part.Header
|
||||
// get content type of part
|
||||
part_type := hdr.Get("Content-Type")
|
||||
// parse content type
|
||||
media_type, _, err = mime.ParseMediaType(part_type)
|
||||
if err == nil {
|
||||
if media_type == "text/plain" {
|
||||
att := readAttachmentFromMimePartAndStore(part, store)
|
||||
if att == nil {
|
||||
log.Println("failed to load plaintext attachment")
|
||||
} else {
|
||||
if att.Filename() == "" {
|
||||
// message part
|
||||
nntp.message = att.AsString()
|
||||
} else {
|
||||
// plaintext attachment
|
||||
nntp.Attach(att)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// non plaintext gets added to attachments
|
||||
att := readAttachmentFromMimePartAndStore(part, store)
|
||||
if att == nil {
|
||||
// failed to read attachment
|
||||
log.Println("failed to read attachment of type", media_type)
|
||||
} else {
|
||||
nntp.Attach(att)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log.Println("part has no content type", err)
|
||||
}
|
||||
part.Close()
|
||||
part = nil
|
||||
} else {
|
||||
log.Println("failed to load part! ", err)
|
||||
nntp.Reset()
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else if media_type == "message/rfc822" {
|
||||
// tripcoded message
|
||||
sig := nntp.headers.Get("X-Signature-Ed25519-Sha512", "")
|
||||
pk := nntp.Pubkey()
|
||||
if pk == "" || sig == "" {
|
||||
log.Println("invalid sig or pubkey", sig, pk)
|
||||
nntp.Reset()
|
||||
return errors.New("invalid headers")
|
||||
}
|
||||
// process inner body
|
||||
// verify message
|
||||
err = verifyMessage(pk, sig, body, func(h map[string][]string, innerBody io.Reader) {
|
||||
// handle inner message
|
||||
err := read_message_body(innerBody, h, store, nil, true, callback)
|
||||
if err != nil {
|
||||
log.Println("error reading inner signed message", err)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
log.Println("error reading inner message", err)
|
||||
}
|
||||
} else {
|
||||
// plaintext attachment
|
||||
b := new(bytes.Buffer)
|
||||
_, err = io.Copy(b, body)
|
||||
if err == nil {
|
||||
nntp.message = b.String()
|
||||
callback(nntp)
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
518
contrib/backends/srndv2/src/srnd/templates.go
Normal file
518
contrib/backends/srndv2/src/srnd/templates.go
Normal file
@ -0,0 +1,518 @@
|
||||
//
|
||||
// templates.go
|
||||
// template model interfaces
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/cbroglie/mustache"
|
||||
tinyhtml "github.com/whyrusleeping/tinyhtml"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type templateEngine struct {
|
||||
// loaded templates
|
||||
templates map[string]string
|
||||
// root directory for templates
|
||||
template_dir string
|
||||
// mutex for accessing templates
|
||||
templates_mtx sync.RWMutex
|
||||
// do we want to minimize the html generated?
|
||||
Minimize bool
|
||||
// database
|
||||
DB Database
|
||||
}
|
||||
|
||||
func (self *templateEngine) templateCached(name string) (ok bool) {
|
||||
self.templates_mtx.Lock()
|
||||
_, ok = self.templates[name]
|
||||
self.templates_mtx.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
// explicitly reload a template
|
||||
func (self *templateEngine) reloadTemplate(name string) {
|
||||
self.templates_mtx.Lock()
|
||||
self.templates[name] = self.loadTemplate(name)
|
||||
self.templates_mtx.Unlock()
|
||||
}
|
||||
|
||||
// check if we have this template
|
||||
func (self *templateEngine) hasTemplate(name string) bool {
|
||||
return CheckFile(self.templateFilepath(name))
|
||||
}
|
||||
|
||||
// explicitly reload all loaded templates
|
||||
func (self *templateEngine) reloadAllTemplates() {
|
||||
loadThese := []string{}
|
||||
// get all the names of the templates we have loaded
|
||||
self.templates_mtx.Lock()
|
||||
for tname, _ := range self.templates {
|
||||
loadThese = append(loadThese, tname)
|
||||
}
|
||||
self.templates_mtx.Unlock()
|
||||
// for each template we have loaded, reload the contents from file
|
||||
for _, tname := range loadThese {
|
||||
self.reloadTemplate(tname)
|
||||
}
|
||||
}
|
||||
|
||||
// get cached post model from cache after updating it
|
||||
func (self *templateEngine) updatePostModel(prefix, frontend, msgid, rootmsgid, group string, db Database) PostModel {
|
||||
return db.GetPostModel(prefix, msgid)
|
||||
/*
|
||||
// get board
|
||||
self.groups_mtx.Lock()
|
||||
board := self.groups[group]
|
||||
self.groups_mtx.Unlock()
|
||||
|
||||
var th ThreadModel
|
||||
if msgid == rootmsgid {
|
||||
// new thread
|
||||
if len(board) > 0 {
|
||||
page := board[0]
|
||||
page.Update(db)
|
||||
th = page.GetThread(rootmsgid)
|
||||
}
|
||||
} else {
|
||||
// reply
|
||||
for _, page := range board {
|
||||
t := page.GetThread(rootmsgid)
|
||||
if t != nil {
|
||||
th = t
|
||||
th.Update(db)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if th == nil {
|
||||
// reload board, this will be a heavy operation
|
||||
board.UpdateAll(db)
|
||||
// find it
|
||||
for _, page := range board {
|
||||
t := page.GetThread(rootmsgid)
|
||||
if t != nil {
|
||||
th = t
|
||||
th.Update(db)
|
||||
break
|
||||
}
|
||||
}
|
||||
for _, page := range board {
|
||||
updateLinkCacheForBoard(page)
|
||||
}
|
||||
self.groups_mtx.Lock()
|
||||
self.groups[group] = board
|
||||
self.groups_mtx.Unlock()
|
||||
}
|
||||
if th == nil {
|
||||
if rootmsgid == msgid {
|
||||
return db.GetPostModel(prefix, rootmsgid)
|
||||
}
|
||||
log.Println("template could not find thread", rootmsgid, "in", group)
|
||||
return nil
|
||||
}
|
||||
|
||||
// found
|
||||
m := th.OP()
|
||||
if m.MessageID() == msgid {
|
||||
return m
|
||||
}
|
||||
for _, p := range th.Replies() {
|
||||
if p.MessageID() == msgid {
|
||||
// found as reply
|
||||
return p
|
||||
}
|
||||
}
|
||||
log.Println("template could not find post model for thread", rootmsgid, "in", group)
|
||||
// not found
|
||||
return nil
|
||||
*/
|
||||
}
|
||||
|
||||
// get the filepath to a template
|
||||
func (self *templateEngine) templateFilepath(name string) string {
|
||||
if strings.Count(name, "..") > 0 {
|
||||
return ""
|
||||
}
|
||||
return filepath.Join(self.template_dir, name)
|
||||
}
|
||||
|
||||
// load a template from file, return as string
|
||||
func (self *templateEngine) loadTemplate(name string) (t string) {
|
||||
b, err := ioutil.ReadFile(self.templateFilepath(name))
|
||||
if err == nil {
|
||||
t = string(b)
|
||||
} else {
|
||||
log.Println("error loading template", err)
|
||||
t = err.Error()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// get a template, if it's not cached load from file and cache it
|
||||
func (self *templateEngine) getTemplate(name string) (t string) {
|
||||
if !self.templateCached(name) {
|
||||
self.templates_mtx.Lock()
|
||||
self.templates[name] = self.loadTemplate(name)
|
||||
self.templates_mtx.Unlock()
|
||||
}
|
||||
self.templates_mtx.Lock()
|
||||
t, _ = self.templates[name]
|
||||
self.templates_mtx.Unlock()
|
||||
return
|
||||
}
|
||||
|
||||
// render a template, self explanitory
|
||||
func (self *templateEngine) renderTemplate(name string, obj map[string]interface{}) string {
|
||||
t := self.getTemplate(name)
|
||||
obj["i18n"] = i18nProvider
|
||||
s, err := mustache.Render(t, obj)
|
||||
if err == nil {
|
||||
return s
|
||||
} else {
|
||||
return err.Error()
|
||||
}
|
||||
}
|
||||
|
||||
// write a template to an io.Writer
|
||||
func (self *templateEngine) writeTemplate(name string, obj map[string]interface{}, wr io.Writer) (err error) {
|
||||
str := self.renderTemplate(name, obj)
|
||||
var r io.Reader
|
||||
r = bytes.NewBufferString(str)
|
||||
if self.Minimize {
|
||||
r = tinyhtml.New(r)
|
||||
}
|
||||
_, err = io.Copy(wr, r)
|
||||
return
|
||||
}
|
||||
|
||||
// easy wrapper for json.NewEncoder
|
||||
func (self *templateEngine) renderJSON(wr io.Writer, obj interface{}) {
|
||||
err := json.NewEncoder(wr).Encode(obj)
|
||||
if err != nil {
|
||||
log.Println("error rendering json", err)
|
||||
}
|
||||
}
|
||||
|
||||
// get a board model given a newsgroup
|
||||
// load un updated board model if we don't have it
|
||||
func (self *templateEngine) obtainBoard(prefix, frontend, group string, db Database) (model GroupModel) {
|
||||
// warning, we attempt to do smart reloading
|
||||
// dark magic may lurk here
|
||||
p := db.GetGroupPageCount(group)
|
||||
pages := int(p)
|
||||
perpage, _ := db.GetThreadsPerPage(group)
|
||||
// reload all the pages
|
||||
var newModel GroupModel
|
||||
for page := 0; page < pages; page++ {
|
||||
newModel = append(newModel, db.GetGroupForPage(prefix, frontend, group, page, int(perpage)))
|
||||
}
|
||||
model = newModel
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self *templateEngine) genCatalog(prefix, frontend, group string, wr io.Writer, db Database) {
|
||||
board := self.obtainBoard(prefix, frontend, group, db)
|
||||
catalog := new(catalogModel)
|
||||
catalog.prefix = prefix
|
||||
catalog.frontend = frontend
|
||||
catalog.board = group
|
||||
|
||||
for page, bm := range board {
|
||||
for _, th := range bm.Threads() {
|
||||
th.Update(db)
|
||||
catalog.threads = append(catalog.threads, &catalogItemModel{op: th.OP(), page: page, replycount: len(th.Replies())})
|
||||
}
|
||||
}
|
||||
self.writeTemplate("catalog.mustache", map[string]interface{}{"board": catalog}, wr)
|
||||
}
|
||||
|
||||
// generate a board page
|
||||
func (self *templateEngine) genBoardPage(allowFiles, requireCaptcha bool, prefix, frontend, newsgroup string, page int, wr io.Writer, db Database, json bool) {
|
||||
// get the board page model
|
||||
perpage, _ := db.GetThreadsPerPage(newsgroup)
|
||||
boardPage := db.GetGroupForPage(prefix, frontend, newsgroup, page, int(perpage))
|
||||
boardPage.Update(db)
|
||||
// render it
|
||||
if json {
|
||||
self.renderJSON(wr, boardPage)
|
||||
} else {
|
||||
form := renderPostForm(prefix, newsgroup, "", allowFiles, requireCaptcha)
|
||||
self.writeTemplate("board.mustache", map[string]interface{}{"board": boardPage, "page": page, "form": form}, wr)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *templateEngine) genUkko(prefix, frontend string, wr io.Writer, database Database, json bool) {
|
||||
self.genUkkoPaginated(prefix, frontend, wr, database, 0, json)
|
||||
}
|
||||
|
||||
func (self *templateEngine) genUkkoPaginated(prefix, frontend string, wr io.Writer, database Database, page int, json bool) {
|
||||
var threads []ThreadModel
|
||||
for _, article := range database.GetLastBumpedThreadsPaginated("", 10, page*10) {
|
||||
root := article[0]
|
||||
thread, err := database.GetThreadModel(prefix, root)
|
||||
if err == nil {
|
||||
threads = append(threads, thread)
|
||||
}
|
||||
}
|
||||
obj := map[string]interface{}{"prefix": prefix, "threads": threads, "page": page}
|
||||
if page > 0 {
|
||||
obj["prev"] = map[string]interface{}{"no": page - 1}
|
||||
}
|
||||
if page < 10 {
|
||||
obj["next"] = map[string]interface{}{"no": page + 1}
|
||||
}
|
||||
if json {
|
||||
self.renderJSON(wr, obj)
|
||||
} else {
|
||||
// render ukko navbar
|
||||
navbar := make(map[string]interface{})
|
||||
navbar["name"] = "Overboard"
|
||||
navbar["frontend"] = frontend
|
||||
navbar["prefix"] = prefix
|
||||
// inject navbar
|
||||
obj["navbar"] = self.renderTemplate("navbar.mustache", navbar)
|
||||
// render
|
||||
self.writeTemplate("ukko.mustache", obj, wr)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *templateEngine) genThread(allowFiles, requireCaptcha bool, root ArticleEntry, prefix, frontend string, wr io.Writer, db Database, json bool) {
|
||||
newsgroup := root.Newsgroup()
|
||||
msgid := root.MessageID()
|
||||
|
||||
if !db.HasArticleLocal(msgid) {
|
||||
log.Println("don't have", msgid, "locally, not regenerating")
|
||||
return
|
||||
}
|
||||
|
||||
t, err := db.GetThreadModel(prefix, msgid)
|
||||
if err == nil {
|
||||
if json {
|
||||
self.renderJSON(wr, t)
|
||||
} else {
|
||||
form := renderPostForm(prefix, newsgroup, msgid, allowFiles, requireCaptcha)
|
||||
self.writeTemplate("thread.mustache", map[string]interface{}{"thread": t, "board": map[string]interface{}{"Name": newsgroup, "Frontend": frontend, "AllowFiles": allowFiles}, "form": form}, wr)
|
||||
}
|
||||
} else {
|
||||
log.Println("templates: error getting thread for ", msgid, err.Error())
|
||||
}
|
||||
/*
|
||||
// get the board model, don't update the board
|
||||
board := self.obtainBoard(prefix, frontend, newsgroup, false, db)
|
||||
// find the thread model in question
|
||||
for _, pagemodel := range board {
|
||||
t := pagemodel.GetThread(msgid)
|
||||
if t != nil {
|
||||
// update thread
|
||||
t.Update(db)
|
||||
// render it
|
||||
if json {
|
||||
self.renderJSON(wr, t)
|
||||
} else {
|
||||
form := renderPostForm(prefix, newsgroup, msgid, allowFiles)
|
||||
self.writeTemplate("thread.mustache", map[string]interface{}{"thread": t, "board": pagemodel, "form": form}, wr)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
log.Println("thread not found for message id", msgid)
|
||||
return
|
||||
|
||||
// we didn't find it D:
|
||||
// reload everything
|
||||
// TODO: should we reload everything!?
|
||||
b := self.obtainBoard(prefix, frontend, newsgroup, true, db)
|
||||
// find the thread model in question
|
||||
for _, pagemodel := range b {
|
||||
t := pagemodel.GetThread(msgid)
|
||||
if t != nil {
|
||||
// we found it
|
||||
// render thread
|
||||
t.Update(db)
|
||||
if json {
|
||||
self.renderJSON(wr, t)
|
||||
} else {
|
||||
form := renderPostForm(prefix, newsgroup, msgid, allowFiles)
|
||||
self.writeTemplate("thread.mustache", map[string]interface{}{"thread": t, "board": pagemodel, "form": form}, wr)
|
||||
}
|
||||
self.groups_mtx.Lock()
|
||||
self.groups[newsgroup] = b
|
||||
self.groups_mtx.Unlock()
|
||||
return
|
||||
}
|
||||
}
|
||||
// it's not there wtf
|
||||
log.Println("thread not found for message id", msgid)
|
||||
*/
|
||||
}
|
||||
|
||||
// change the directory we are using for templates
|
||||
func (self *templateEngine) changeTemplateDir(dirname string) {
|
||||
log.Println("change template directory to", dirname)
|
||||
self.template_dir = dirname
|
||||
self.reloadAllTemplates()
|
||||
}
|
||||
|
||||
func (self *templateEngine) createNotFoundHandler(prefix, frontend string) (h http.Handler) {
|
||||
h = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
self.renderNotFound(w, r, prefix, frontend)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// default renderer of 404 pages
|
||||
func (self *templateEngine) renderNotFound(wr http.ResponseWriter, r *http.Request, prefix, frontend string) {
|
||||
wr.WriteHeader(404)
|
||||
opts := make(map[string]interface{})
|
||||
opts["prefix"] = prefix
|
||||
opts["frontend"] = frontend
|
||||
self.writeTemplate("404.mustache", opts, wr)
|
||||
}
|
||||
|
||||
func newTemplateEngine(dir string) *templateEngine {
|
||||
return &templateEngine{
|
||||
templates: make(map[string]string),
|
||||
template_dir: dir,
|
||||
}
|
||||
}
|
||||
|
||||
func (self *templateEngine) findLink(prefix, hash string) (url string) {
|
||||
ents, _ := self.DB.GetCitesByPostHashLike(hash)
|
||||
if len(ents) > 0 {
|
||||
url = fmt.Sprintf("%st/%s/#%s", prefix, HashMessageID(ents[0].Reference()), HashMessageID(ents[0].MessageID()))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
var template = newTemplateEngine(defaultTemplateDir())
|
||||
|
||||
func renderPostForm(prefix, board, op_msg_id string, files, captcha bool) string {
|
||||
url := prefix + "post/" + board
|
||||
button := "New Thread"
|
||||
if op_msg_id != "" {
|
||||
button = "Reply"
|
||||
}
|
||||
return template.renderTemplate("postform.mustache", map[string]interface{}{"post_url": url, "reference": op_msg_id, "button": button, "files": files, "prefix": prefix, "DisableCaptcha": !captcha})
|
||||
}
|
||||
|
||||
// generate misc graphs
|
||||
func (self *templateEngine) genGraphs(prefix string, wr io.Writer, db Database) {
|
||||
|
||||
//
|
||||
// begin gen history.html
|
||||
//
|
||||
|
||||
var all_posts postsGraph
|
||||
// this may take a bit
|
||||
posts := db.GetMonthlyPostHistory()
|
||||
|
||||
if posts == nil {
|
||||
// wtf?
|
||||
} else {
|
||||
for _, entry := range posts {
|
||||
all_posts = append(all_posts, postsGraphRow{
|
||||
day: entry.Time(),
|
||||
Num: entry.Count(),
|
||||
})
|
||||
}
|
||||
}
|
||||
sort.Sort(all_posts)
|
||||
|
||||
_, err := io.WriteString(wr, self.renderTemplate("graph_history.mustache", map[string]interface{}{"history": all_posts}))
|
||||
if err != nil {
|
||||
log.Println("error writing history graph", err)
|
||||
}
|
||||
|
||||
//
|
||||
// end gen history.html
|
||||
//
|
||||
|
||||
}
|
||||
|
||||
// generate front page and board list
|
||||
func (self *templateEngine) genFrontPage(top_count int, prefix, frontend_name string, indexwr, boardswr io.Writer, db Database) {
|
||||
// the graph for the front page
|
||||
var frontpage_graph boardPageRows
|
||||
|
||||
// for each group
|
||||
groups := db.GetAllNewsgroups()
|
||||
for _, group := range groups {
|
||||
// posts this hour
|
||||
hour := db.CountPostsInGroup(group, 3600)
|
||||
// posts today
|
||||
day := db.CountPostsInGroup(group, 86400)
|
||||
// posts total
|
||||
all := db.CountPostsInGroup(group, 0)
|
||||
frontpage_graph = append(frontpage_graph, boardPageRow{
|
||||
All: all,
|
||||
Day: day,
|
||||
Hour: hour,
|
||||
Board: group,
|
||||
})
|
||||
}
|
||||
|
||||
var posts_graph postsGraph
|
||||
|
||||
posts := db.GetLastDaysPosts(10)
|
||||
if posts == nil {
|
||||
// wtf?
|
||||
} else {
|
||||
for _, entry := range posts {
|
||||
posts_graph = append(posts_graph, postsGraphRow{
|
||||
day: entry.Time(),
|
||||
Num: entry.Count(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
models := db.GetLastPostedPostModels(prefix, 20)
|
||||
|
||||
wr := indexwr
|
||||
|
||||
param := make(map[string]interface{})
|
||||
|
||||
param["overview"] = self.renderTemplate("overview.mustache", map[string]interface{}{"overview": overviewModel(models)})
|
||||
|
||||
sort.Sort(posts_graph)
|
||||
param["postsgraph"] = self.renderTemplate("posts_graph.mustache", map[string]interface{}{"graph": posts_graph})
|
||||
sort.Sort(frontpage_graph)
|
||||
if len(frontpage_graph) > top_count {
|
||||
param["boardgraph"] = frontpage_graph[:top_count]
|
||||
} else {
|
||||
param["boardgraph"] = frontpage_graph
|
||||
}
|
||||
param["frontend"] = frontend_name
|
||||
param["totalposts"] = db.ArticleCount()
|
||||
|
||||
// render and inject navbar
|
||||
param["navbar"] = self.renderTemplate("navbar.mustache", map[string]interface{}{"name": "Front Page", "frontend": frontend_name, "prefix": prefix})
|
||||
|
||||
_, err := io.WriteString(wr, self.renderTemplate("frontpage.mustache", param))
|
||||
if err != nil {
|
||||
log.Println("error writing front page", err)
|
||||
}
|
||||
|
||||
wr = boardswr
|
||||
param["graph"] = frontpage_graph
|
||||
_, err = io.WriteString(wr, self.renderTemplate("boardlist.mustache", param))
|
||||
if err != nil {
|
||||
log.Println("error writing board list page", err)
|
||||
}
|
||||
}
|
||||
|
||||
func ReloadTemplates() {
|
||||
log.Println("reload templates")
|
||||
template.reloadAllTemplates()
|
||||
}
|
45
contrib/backends/srndv2/src/srnd/templates_test.go
Normal file
45
contrib/backends/srndv2/src/srnd/templates_test.go
Normal file
@ -0,0 +1,45 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func makeBenchmarkDB() Database {
|
||||
return NewDatabase("postgres", "srnd", "/var/run/postgresql", "", "", "")
|
||||
}
|
||||
|
||||
func BenchmarkRenderBoardPage(b *testing.B) {
|
||||
db := makeBenchmarkDB()
|
||||
db.CreateTables()
|
||||
defer db.Close()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
wr, err := os.Create("boardpage.html")
|
||||
if err == nil {
|
||||
template.genBoardPage(true, true, "prefix", "test", "overchan.random", 0, wr, db, false)
|
||||
} else {
|
||||
log.Println("did not write", "boardpage.html", err)
|
||||
}
|
||||
wr.Close()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkRenderThread(b *testing.B) {
|
||||
db := makeBenchmarkDB()
|
||||
db.CreateTables()
|
||||
defer db.Close()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
wr, err := os.Create("thread.html")
|
||||
if err == nil {
|
||||
template.genThread(true, true, ArticleEntry{"<c49be1451427261@nntp.nsfl.tk>", "overchan.random"}, "prefix", "frontend", wr, db, false)
|
||||
} else {
|
||||
log.Println("did not write", "thread.html", err)
|
||||
}
|
||||
wr.Close()
|
||||
}
|
||||
})
|
||||
}
|
169
contrib/backends/srndv2/src/srnd/tls.go
Normal file
169
contrib/backends/srndv2/src/srnd/tls.go
Normal file
@ -0,0 +1,169 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"crypto/x509/pkix"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"math/big"
|
||||
"net"
|
||||
"net/textproto"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var TlsNotSupported = errors.New("TLS not supported")
|
||||
var TlsFailedToLoadCA = errors.New("could not load CA files")
|
||||
|
||||
// handle STARTTLS on connection
|
||||
func HandleStartTLS(conn net.Conn, config *tls.Config) (econn *textproto.Conn, state tls.ConnectionState, err error) {
|
||||
if config == nil {
|
||||
_, err = io.WriteString(conn, "580 can not intitiate TLS negotiation\r\n")
|
||||
if err == nil {
|
||||
err = TlsNotSupported
|
||||
}
|
||||
} else {
|
||||
_, err = io.WriteString(conn, "382 Continue with TLS negotiation\r\n")
|
||||
if err == nil {
|
||||
// begin tls crap here
|
||||
tconn := tls.Server(conn, config)
|
||||
err = tconn.Handshake()
|
||||
state = tconn.ConnectionState()
|
||||
if err == nil {
|
||||
econn = textproto.NewConn(tconn)
|
||||
return
|
||||
} else {
|
||||
certs := state.PeerCertificates
|
||||
if len(certs) == 0 {
|
||||
log.Println("starttls failed, no peer certs provided")
|
||||
} else {
|
||||
for _, cert := range certs {
|
||||
for _, dns := range cert.DNSNames {
|
||||
log.Println("starttls peer cert from", dns, "not valid")
|
||||
}
|
||||
}
|
||||
}
|
||||
tconn.Close()
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func SendStartTLS(conn net.Conn, config *tls.Config) (econn *textproto.Conn, state tls.ConnectionState, err error) {
|
||||
_, err = io.WriteString(conn, "STARTTLS\r\n")
|
||||
if err == nil {
|
||||
r := bufio.NewReader(conn)
|
||||
var line string
|
||||
line, err = r.ReadString(10)
|
||||
if strings.HasPrefix(line, "382 ") {
|
||||
// we gud
|
||||
tconn := tls.Client(conn, config)
|
||||
// tls okay
|
||||
log.Println("TLS Handshake done", config.ServerName)
|
||||
state = tconn.ConnectionState()
|
||||
econn = textproto.NewConn(tconn)
|
||||
return
|
||||
} else {
|
||||
// it won't do tls
|
||||
err = TlsNotSupported
|
||||
}
|
||||
r = nil
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// create base tls certificate
|
||||
func newTLSCert() x509.Certificate {
|
||||
return x509.Certificate{
|
||||
Subject: pkix.Name{
|
||||
Organization: []string{"overchan"},
|
||||
},
|
||||
NotBefore: time.Now(),
|
||||
NotAfter: time.Date(9005, 1, 1, 1, 1, 1, 1, time.UTC),
|
||||
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageClientAuth},
|
||||
BasicConstraintsValid: true,
|
||||
IsCA: true,
|
||||
}
|
||||
}
|
||||
|
||||
// generate tls config, private key and certificate
|
||||
func GenTLS(cfg *CryptoConfig) (tcfg *tls.Config, err error) {
|
||||
EnsureDir(cfg.cert_dir)
|
||||
// check for private key
|
||||
if !CheckFile(cfg.privkey_file) {
|
||||
// no private key, let's generate it
|
||||
log.Println("generating 4096 RSA private key...")
|
||||
k := newTLSCert()
|
||||
var priv *rsa.PrivateKey
|
||||
priv, err = rsa.GenerateKey(rand.Reader, 4096)
|
||||
if err == nil {
|
||||
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 256)
|
||||
k.SerialNumber, err = rand.Int(rand.Reader, serialNumberLimit)
|
||||
k.DNSNames = append(k.DNSNames, cfg.hostname)
|
||||
k.Subject.CommonName = cfg.hostname
|
||||
if err == nil {
|
||||
var derBytes []byte
|
||||
derBytes, err = x509.CreateCertificate(rand.Reader, &k, &k, &priv.PublicKey, priv)
|
||||
var f io.WriteCloser
|
||||
f, err = os.Create(cfg.cert_file)
|
||||
if err == nil {
|
||||
err = pem.Encode(f, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes})
|
||||
f.Close()
|
||||
if err == nil {
|
||||
f, err = os.Create(cfg.privkey_file)
|
||||
if err == nil {
|
||||
err = pem.Encode(f, &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(priv)})
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
|
||||
caPool := x509.NewCertPool()
|
||||
var m []string
|
||||
log.Println("checking", cfg.cert_dir, "for certificates")
|
||||
m, err = filepath.Glob(filepath.Join(cfg.cert_dir, "*.crt"))
|
||||
log.Println("loading", len(m), "trusted certificates")
|
||||
var data []byte
|
||||
for _, f := range m {
|
||||
var d []byte
|
||||
d, err = ioutil.ReadFile(f)
|
||||
if err == nil {
|
||||
data = append(data, d...)
|
||||
} else {
|
||||
return
|
||||
}
|
||||
}
|
||||
ok := caPool.AppendCertsFromPEM(data)
|
||||
if !ok {
|
||||
err = TlsFailedToLoadCA
|
||||
return
|
||||
}
|
||||
// we should have the key generated and stored by now
|
||||
var cert tls.Certificate
|
||||
cert, err = tls.LoadX509KeyPair(cfg.cert_file, cfg.privkey_file)
|
||||
if err == nil {
|
||||
tcfg = &tls.Config{
|
||||
CipherSuites: []uint16{tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384},
|
||||
RootCAs: caPool,
|
||||
ClientCAs: caPool,
|
||||
Certificates: []tls.Certificate{cert},
|
||||
ClientAuth: tls.RequireAndVerifyClientCert,
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
90
contrib/backends/srndv2/src/srnd/tools.go
Normal file
90
contrib/backends/srndv2/src/srnd/tools.go
Normal file
@ -0,0 +1,90 @@
|
||||
//
|
||||
// tools.go -- srndv2 cli tool functions
|
||||
//
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
// worker for thumbnailer tool
|
||||
func rethumb(chnl chan string, store ArticleStore, missing bool) {
|
||||
for {
|
||||
fname, has := <-chnl
|
||||
if !has {
|
||||
return
|
||||
}
|
||||
thm := store.ThumbnailFilepath(fname)
|
||||
if CheckFile(thm) {
|
||||
if missing {
|
||||
continue
|
||||
}
|
||||
log.Println("remove old thumbnail", thm)
|
||||
os.Remove(thm)
|
||||
}
|
||||
log.Println("generate thumbnail for", fname)
|
||||
store.GenerateThumbnail(fname)
|
||||
}
|
||||
}
|
||||
|
||||
// run thumbnailer
|
||||
func ThumbnailTool(threads int, missing bool) {
|
||||
conf := ReadConfig()
|
||||
if conf == nil {
|
||||
log.Println("cannot load config, ReadConfig() returned nil")
|
||||
return
|
||||
}
|
||||
store := createArticleStore(conf.store, nil)
|
||||
reThumbnail(threads, store, missing)
|
||||
}
|
||||
|
||||
func RegenTool() {
|
||||
conf := ReadConfig()
|
||||
db_host := conf.database["host"]
|
||||
db_port := conf.database["port"]
|
||||
db_user := conf.database["user"]
|
||||
db_passwd := conf.database["password"]
|
||||
db_type := conf.database["type"]
|
||||
db_sche := conf.database["schema"]
|
||||
db := NewDatabase(db_type, db_sche, db_host, db_port, db_user, db_passwd)
|
||||
groups := db.GetAllNewsgroups()
|
||||
if groups != nil {
|
||||
for _, group := range groups {
|
||||
go regenGroup(group, db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func regenGroup(name string, db Database) {
|
||||
log.Println("regenerating", name)
|
||||
}
|
||||
|
||||
// run thumbnailer tool with unspecified number of threads
|
||||
func reThumbnail(threads int, store ArticleStore, missing bool) {
|
||||
|
||||
chnl := make(chan string)
|
||||
|
||||
for threads > 0 {
|
||||
go rethumb(chnl, store, missing)
|
||||
threads--
|
||||
}
|
||||
|
||||
files, err := store.GetAllAttachments()
|
||||
if err == nil {
|
||||
for _, fname := range files {
|
||||
chnl <- fname
|
||||
}
|
||||
} else {
|
||||
log.Println("failed to read attachment directory", err)
|
||||
}
|
||||
close(chnl)
|
||||
log.Println("Rethumbnailing done")
|
||||
}
|
||||
|
||||
// generate a keypair from the command line
|
||||
func KeygenTool() {
|
||||
pub, sec := newSignKeypair()
|
||||
log.Println("public key:", pub)
|
||||
log.Println("secret key:", sec)
|
||||
}
|
645
contrib/backends/srndv2/src/srnd/util.go
Normal file
645
contrib/backends/srndv2/src/srnd/util.go
Normal file
@ -0,0 +1,645 @@
|
||||
//
|
||||
// util.go -- various utilities
|
||||
//
|
||||
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"crypto/sha1"
|
||||
"crypto/sha512"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"github.com/majestrate/nacl"
|
||||
"io"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/mail"
|
||||
"net/textproto"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
func DelFile(fname string) {
|
||||
if CheckFile(fname) {
|
||||
os.Remove(fname)
|
||||
}
|
||||
}
|
||||
|
||||
func CheckFile(fname string) bool {
|
||||
if _, err := os.Stat(fname); os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func IsDir(dirname string) bool {
|
||||
stat, err := os.Stat(dirname)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return stat.IsDir()
|
||||
}
|
||||
|
||||
// ensure a directory exists
|
||||
func EnsureDir(dirname string) {
|
||||
stat, err := os.Stat(dirname)
|
||||
if os.IsNotExist(err) {
|
||||
os.Mkdir(dirname, 0755)
|
||||
} else if !stat.IsDir() {
|
||||
os.Remove(dirname)
|
||||
os.Mkdir(dirname, 0755)
|
||||
}
|
||||
}
|
||||
|
||||
var exp_valid_message_id = regexp.MustCompilePOSIX(`^<[a-zA-Z0-9$.]{2,128}@[a-zA-Z0-9\-.]{2,63}>$`)
|
||||
|
||||
func ValidMessageID(id string) bool {
|
||||
return exp_valid_message_id.MatchString(id)
|
||||
}
|
||||
|
||||
// message id hash
|
||||
func HashMessageID(msgid string) string {
|
||||
return fmt.Sprintf("%x", sha1.Sum([]byte(msgid)))
|
||||
}
|
||||
|
||||
// short message id hash
|
||||
func ShortHashMessageID(msgid string) string {
|
||||
return strings.ToLower(HashMessageID(msgid)[:18])
|
||||
}
|
||||
|
||||
// will this message id produce quads?
|
||||
func MessageIDWillDoQuads(msgid string) bool {
|
||||
h := HashMessageID(msgid)
|
||||
return h[0] == h[1] && h[1] == h[2] && h[2] == h[3]
|
||||
}
|
||||
|
||||
// will this message id produce trips?
|
||||
func MessageIDWillDoTrips(msgid string) bool {
|
||||
h := HashMessageID(msgid)
|
||||
return h[0] == h[1] && h[1] == h[2]
|
||||
}
|
||||
|
||||
// will this message id produce dubs?
|
||||
func MessageIDWillDoDubs(msgid string) bool {
|
||||
h := HashMessageID(msgid)
|
||||
return h[0] == h[1]
|
||||
}
|
||||
|
||||
// shorter message id hash
|
||||
func ShorterHashMessageID(msgid string) string {
|
||||
return strings.ToLower(HashMessageID(msgid)[:10])
|
||||
}
|
||||
|
||||
func OpenFileWriter(fname string) (io.WriteCloser, error) {
|
||||
return os.Create(fname)
|
||||
}
|
||||
|
||||
// make a random string
|
||||
func randStr(length int) string {
|
||||
return hex.EncodeToString(nacl.RandBytes(length))[length:]
|
||||
}
|
||||
|
||||
// time for right now as int64
|
||||
func timeNow() int64 {
|
||||
return time.Now().UTC().Unix()
|
||||
}
|
||||
|
||||
// sanitize data for nntp
|
||||
func nntpSanitize(data string) (ret string) {
|
||||
parts := strings.Split(data, "\n")
|
||||
lines := len(parts)
|
||||
for idx, part := range parts {
|
||||
part = strings.Replace(part, "\n", "", -1)
|
||||
part = strings.Replace(part, "\r", "", -1)
|
||||
if part == "." {
|
||||
part = " ."
|
||||
}
|
||||
ret += part
|
||||
if idx+1 < lines {
|
||||
ret += "\n"
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
type int64Sorter []int64
|
||||
|
||||
func (self int64Sorter) Len() int {
|
||||
return len(self)
|
||||
}
|
||||
|
||||
func (self int64Sorter) Less(i, j int) bool {
|
||||
return self[i] < self[j]
|
||||
}
|
||||
|
||||
func (self int64Sorter) Swap(i, j int) {
|
||||
tmp := self[j]
|
||||
self[j] = self[i]
|
||||
self[i] = tmp
|
||||
}
|
||||
|
||||
// obtain the "real" ip address
|
||||
func getRealIP(name string) string {
|
||||
if len(name) > 0 {
|
||||
ip, err := net.ResolveIPAddr("ip", name)
|
||||
if err == nil {
|
||||
if ip.IP.IsGlobalUnicast() {
|
||||
return ip.IP.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// check that we have permission to access this
|
||||
// fatal on fail
|
||||
func checkPerms(fname string) {
|
||||
fstat, err := os.Stat(fname)
|
||||
if err != nil {
|
||||
log.Fatalf("Cannot access %s, %s", fname, err)
|
||||
}
|
||||
// check if we can access this dir
|
||||
if fstat.IsDir() {
|
||||
tmpfname := filepath.Join(fname, ".test")
|
||||
f, err := os.Create(tmpfname)
|
||||
if err != nil {
|
||||
log.Fatalf("No Write access in %s, %s", fname, err)
|
||||
}
|
||||
err = f.Close()
|
||||
if err != nil {
|
||||
log.Fatalf("failed to close test file %s !? %s", tmpfname, err)
|
||||
}
|
||||
err = os.Remove(tmpfname)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to remove test file %s, %s", tmpfname, err)
|
||||
}
|
||||
} else {
|
||||
// this isn't a dir, treat it like a regular file
|
||||
f, err := os.Open(fname)
|
||||
if err != nil {
|
||||
log.Fatalf("cannot read file %s, %s", fname, err)
|
||||
}
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
|
||||
// number of bytes to use in otp
|
||||
func encAddrBytes() int {
|
||||
return 64
|
||||
}
|
||||
|
||||
// length of an encrypted clearnet address
|
||||
func encAddrLen() int {
|
||||
return 88
|
||||
}
|
||||
|
||||
// length of an i2p dest hash
|
||||
func i2pDestHashLen() int {
|
||||
return 44
|
||||
}
|
||||
|
||||
// given an address
|
||||
// generate a new encryption key for it
|
||||
// return the encryption key and the encrypted address
|
||||
func newAddrEnc(addr string) (string, string) {
|
||||
key_bytes := nacl.RandBytes(encAddrBytes())
|
||||
key := base64.StdEncoding.EncodeToString(key_bytes)
|
||||
return key, encAddr(addr, key)
|
||||
}
|
||||
|
||||
// xor address with a one time pad
|
||||
// if the address isn't long enough it's padded with spaces
|
||||
func encAddr(addr, key string) string {
|
||||
key_bytes, err := base64.StdEncoding.DecodeString(key)
|
||||
|
||||
if err != nil {
|
||||
log.Println("encAddr() key base64 decode", err)
|
||||
return ""
|
||||
}
|
||||
|
||||
if len(addr) > len(key_bytes) {
|
||||
log.Println("encAddr() len(addr) > len(key_bytes)")
|
||||
return ""
|
||||
}
|
||||
|
||||
// pad with spaces
|
||||
for len(addr) < len(key_bytes) {
|
||||
addr += " "
|
||||
}
|
||||
|
||||
addr_bytes := []byte(addr)
|
||||
res_bytes := make([]byte, len(addr_bytes))
|
||||
for idx, b := range key_bytes {
|
||||
res_bytes[idx] = addr_bytes[idx] ^ b
|
||||
}
|
||||
|
||||
return base64.StdEncoding.EncodeToString(res_bytes)
|
||||
}
|
||||
|
||||
func checkError(err error) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// decrypt an address
|
||||
// strips any whitespaces
|
||||
func decAddr(encaddr, key string) string {
|
||||
encaddr_bytes, err := base64.StdEncoding.DecodeString(encaddr)
|
||||
if err != nil {
|
||||
log.Println("decAddr() encaddr base64 decode", err)
|
||||
return ""
|
||||
}
|
||||
if len(encaddr) != len(key) {
|
||||
log.Println("decAddr() len(encaddr_bytes) != len(key)")
|
||||
return ""
|
||||
}
|
||||
key_bytes, err := base64.StdEncoding.DecodeString(key)
|
||||
if err != nil {
|
||||
log.Println("decAddr() key base64 decode", err)
|
||||
}
|
||||
res_bytes := make([]byte, len(key))
|
||||
for idx, b := range key_bytes {
|
||||
res_bytes[idx] = encaddr_bytes[idx] ^ b
|
||||
}
|
||||
res := string(res_bytes)
|
||||
return strings.Trim(res, " ")
|
||||
}
|
||||
|
||||
var exp_valid_newsgroup = regexp.MustCompilePOSIX(`^[a-zA-Z0-9.]{1,128}$`)
|
||||
|
||||
func newsgroupValidFormat(newsgroup string) bool {
|
||||
return exp_valid_newsgroup.MatchString(newsgroup)
|
||||
}
|
||||
|
||||
func ValidNewsgroup(newsgroup string) bool {
|
||||
return newsgroupValidFormat(newsgroup)
|
||||
}
|
||||
|
||||
// generate a new signing keypair
|
||||
// public, secret
|
||||
func newSignKeypair() (string, string) {
|
||||
kp := nacl.GenSignKeypair()
|
||||
defer kp.Free()
|
||||
pk := kp.Public()
|
||||
sk := kp.Seed()
|
||||
return hex.EncodeToString(pk), hex.EncodeToString(sk)
|
||||
}
|
||||
|
||||
// make a utf-8 tripcode
|
||||
func makeTripcode(pk string) string {
|
||||
data, err := hex.DecodeString(pk)
|
||||
if err == nil {
|
||||
tripcode := ""
|
||||
// here is the python code this is based off of
|
||||
// i do something slightly different but this is the base
|
||||
//
|
||||
// for x in range(0, length / 2):
|
||||
// pub_short += '&#%i;' % (9600 + int(full_pubkey_hex[x*2:x*2+2], 16))
|
||||
// length -= length / 2
|
||||
// for x in range(0, length):
|
||||
// pub_short += '&#%i;' % (9600 + int(full_pubkey_hex[-(length*2):][x*2:x*2+2], 16))
|
||||
//
|
||||
for _, c := range data {
|
||||
ch := 9600
|
||||
ch += int(c)
|
||||
tripcode += fmt.Sprintf("&#%04d;", ch)
|
||||
}
|
||||
return tripcode
|
||||
}
|
||||
return "[invalid]"
|
||||
}
|
||||
|
||||
// generate a new message id with base name
|
||||
func genMessageID(name string) string {
|
||||
return fmt.Sprintf("<%s%d@%s>", randStr(5), timeNow(), name)
|
||||
}
|
||||
|
||||
// time now as a string timestamp
|
||||
func timeNowStr() string {
|
||||
return time.Unix(timeNow(), 0).UTC().Format(time.RFC1123Z)
|
||||
}
|
||||
|
||||
func queryGetInt64(q url.Values, key string, fallback int64) int64 {
|
||||
val := q.Get(key)
|
||||
if val != "" {
|
||||
i, err := strconv.ParseInt(val, 10, 64)
|
||||
if err == nil {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return fallback
|
||||
}
|
||||
|
||||
// get from a map an int given a key or fall back to a default value
|
||||
func mapGetInt(m map[string]string, key string, fallback int) int {
|
||||
val, ok := m[key]
|
||||
if ok {
|
||||
i, err := strconv.ParseInt(val, 10, 32)
|
||||
if err == nil {
|
||||
return int(i)
|
||||
}
|
||||
}
|
||||
return fallback
|
||||
}
|
||||
|
||||
func isSage(str string) bool {
|
||||
str = strings.ToLower(str)
|
||||
return str == "sage" || strings.HasPrefix(str, "sage ")
|
||||
}
|
||||
|
||||
func unhex(str string) []byte {
|
||||
buff, _ := hex.DecodeString(str)
|
||||
return buff
|
||||
}
|
||||
|
||||
func hexify(data []byte) string {
|
||||
return hex.EncodeToString(data)
|
||||
}
|
||||
|
||||
// extract pubkey from secret key
|
||||
// return as hex
|
||||
func getSignPubkey(sk []byte) string {
|
||||
k, _ := nacl.GetSignPubkey(sk)
|
||||
return hexify(k)
|
||||
}
|
||||
|
||||
// sign data with secret key the fucky srnd way
|
||||
// return signature as hex
|
||||
func cryptoSign(h, sk []byte) string {
|
||||
// sign
|
||||
sig := nacl.CryptoSignFucky(h, sk)
|
||||
if sig == nil {
|
||||
return "[failed to sign]"
|
||||
}
|
||||
return hexify(sig)
|
||||
}
|
||||
|
||||
// given a tripcode after the #
|
||||
// make a seed byteslice
|
||||
func parseTripcodeSecret(str string) []byte {
|
||||
// try decoding hex
|
||||
raw := unhex(str)
|
||||
keylen := nacl.CryptoSignSeedLen()
|
||||
if raw == nil || len(raw) != keylen {
|
||||
// treat this as a "regular" chan tripcode
|
||||
// decode as bytes then pad the rest with 0s if it doesn't fit
|
||||
raw = make([]byte, keylen)
|
||||
str_bytes := []byte(str)
|
||||
if len(str_bytes) > keylen {
|
||||
copy(raw, str_bytes[:keylen])
|
||||
} else {
|
||||
copy(raw, str_bytes)
|
||||
}
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
// generate a login salt for nntp users
|
||||
func genLoginCredSalt() (salt string) {
|
||||
salt = randStr(128)
|
||||
return
|
||||
}
|
||||
|
||||
// do nntp login credential hash given password and salt
|
||||
func nntpLoginCredHash(passwd, salt string) (str string) {
|
||||
var b []byte
|
||||
b = append(b, []byte(passwd)...)
|
||||
b = append(b, []byte(salt)...)
|
||||
h := sha512.Sum512(b)
|
||||
str = base64.StdEncoding.EncodeToString(h[:])
|
||||
return
|
||||
}
|
||||
|
||||
func IsSubnet(cidr string) (bool, *net.IPNet) {
|
||||
_, ipnet, err := net.ParseCIDR(cidr)
|
||||
if err == nil {
|
||||
return true, ipnet
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func IPNet2MinMax(inet *net.IPNet) (min, max net.IP) {
|
||||
netb := []byte(inet.IP)
|
||||
maskb := []byte(inet.Mask)
|
||||
maxb := make([]byte, len(netb))
|
||||
|
||||
for i, _ := range maxb {
|
||||
maxb[i] = netb[i] | (^maskb[i])
|
||||
}
|
||||
min = net.IP(netb)
|
||||
max = net.IP(maxb)
|
||||
return
|
||||
}
|
||||
|
||||
func ZeroIPString(ip net.IP) string {
|
||||
p := ip
|
||||
|
||||
if len(ip) == 0 {
|
||||
return "<nil>"
|
||||
}
|
||||
|
||||
if p4 := p.To4(); len(p4) == net.IPv4len {
|
||||
return fmt.Sprintf("%03d.%03d.%03d.%03d", p4[0], p4[1], p4[2], p4[3])
|
||||
}
|
||||
if len(p) == net.IPv6len {
|
||||
//>IPv6
|
||||
//ishygddt
|
||||
return fmt.Sprintf("[%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x]", p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7], p[8], p[9], p[10], p[11], p[12], p[13], p[14], p[15])
|
||||
}
|
||||
return "?"
|
||||
}
|
||||
|
||||
func getThreadHash(file string) (thread string) {
|
||||
exp := regexp.MustCompilePOSIX(`thread-([0-9a-f]+)\.*`)
|
||||
matches := exp.FindStringSubmatch(file)
|
||||
if len(matches) != 2 {
|
||||
return ""
|
||||
}
|
||||
thread = matches[1]
|
||||
return
|
||||
}
|
||||
|
||||
func getUkkoPage(file string) (page int) {
|
||||
exp := regexp.MustCompilePOSIX(`ukko-([0-9]+)\.*`)
|
||||
matches := exp.FindStringSubmatch(file)
|
||||
if len(matches) != 2 {
|
||||
return
|
||||
}
|
||||
var err error
|
||||
page, err = strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
page = 0
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func getGroupAndPage(file string) (board string, page int) {
|
||||
exp := regexp.MustCompilePOSIX(`(.*)-([0-9]+)\.*`)
|
||||
matches := exp.FindStringSubmatch(file)
|
||||
if len(matches) != 3 {
|
||||
return "", -1
|
||||
}
|
||||
var err error
|
||||
board = matches[1]
|
||||
tmp := matches[2]
|
||||
page, err = strconv.Atoi(tmp)
|
||||
if err != nil {
|
||||
page = -1
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func getGroupForCatalog(file string) (group string) {
|
||||
exp := regexp.MustCompilePOSIX(`catalog-(.+)\.html`)
|
||||
matches := exp.FindStringSubmatch(file)
|
||||
if len(matches) != 2 {
|
||||
return ""
|
||||
}
|
||||
group = matches[1]
|
||||
return
|
||||
}
|
||||
|
||||
// get a message id from a mime header
|
||||
// checks many values
|
||||
func getMessageID(hdr textproto.MIMEHeader) (msgid string) {
|
||||
msgid = hdr.Get("Message-Id")
|
||||
if msgid == "" {
|
||||
msgid = hdr.Get("Message-ID")
|
||||
}
|
||||
if msgid == "" {
|
||||
msgid = hdr.Get("message-id")
|
||||
}
|
||||
if msgid == "" {
|
||||
msgid = hdr.Get("MESSAGE-ID")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func getMessageIDFromArticleHeaders(hdr ArticleHeaders) (msgid string) {
|
||||
msgid = hdr.Get("Message-Id", hdr.Get("Message-ID", hdr.Get("message-id", hdr.Get("MESSAGE-ID", ""))))
|
||||
return
|
||||
}
|
||||
|
||||
func readMIMEHeader(r *bufio.Reader) (msg *mail.Message, err error) {
|
||||
msg, err = mail.ReadMessage(r)
|
||||
/*
|
||||
hdr = make(textproto.MIMEHeader)
|
||||
for {
|
||||
var str string
|
||||
str, err = r.ReadString(10)
|
||||
if err != nil {
|
||||
hdr = nil
|
||||
return
|
||||
}
|
||||
str = strings.Trim(str, "\r")
|
||||
str = strings.Trim(str, "\n")
|
||||
if str == "" {
|
||||
break
|
||||
}
|
||||
idx := strings.Index(str, ": ")
|
||||
if idx > 0 {
|
||||
hdrname := strings.Trim(str[:idx], " ")
|
||||
hdrval := strings.Trim(str[idx+2:], "\r\n")
|
||||
hdr.Add(hdrname, hdrval)
|
||||
} else {
|
||||
log.Println("invalid header", str)
|
||||
}
|
||||
}
|
||||
*/
|
||||
return
|
||||
}
|
||||
|
||||
// write out a mime header to a writer
|
||||
func writeMIMEHeader(wr io.Writer, hdr map[string][]string) (err error) {
|
||||
// write headers
|
||||
for k, vals := range hdr {
|
||||
for _, val := range vals {
|
||||
wr.Write([]byte(k))
|
||||
wr.Write([]byte(": "))
|
||||
wr.Write([]byte(val))
|
||||
_, err = wr.Write([]byte{10})
|
||||
}
|
||||
}
|
||||
// end of headers
|
||||
_, err = wr.Write([]byte{10})
|
||||
return
|
||||
}
|
||||
|
||||
// like ioutil.Discard but an io.WriteCloser
|
||||
type discardCloser struct {
|
||||
}
|
||||
|
||||
func (*discardCloser) Write(data []byte) (n int, err error) {
|
||||
n = len(data)
|
||||
return
|
||||
}
|
||||
|
||||
func (*discardCloser) Close() (err error) {
|
||||
return
|
||||
}
|
||||
|
||||
// like ioutil.Discard but an io.WriteCloser
|
||||
var Discard = new(discardCloser)
|
||||
|
||||
func extractParamFallback(param map[string]interface{}, k, fallback string) string {
|
||||
v, ok := param[k]
|
||||
if ok {
|
||||
return v.(string)
|
||||
}
|
||||
return fallback
|
||||
}
|
||||
|
||||
func extractParam(param map[string]interface{}, k string) string {
|
||||
return extractParamFallback(param, k, "")
|
||||
}
|
||||
|
||||
// get real ip addresss from an http request
|
||||
func extractRealIP(r *http.Request) (ip string, err error) {
|
||||
ip, _, err = net.SplitHostPort(r.RemoteAddr)
|
||||
if err != nil {
|
||||
log.Println("extract real ip: ", err)
|
||||
}
|
||||
// TODO: have in config upstream proxy ip and check for that
|
||||
if strings.HasPrefix(ip, "127.") {
|
||||
// if it's loopback check headers for reverse proxy headers
|
||||
// TODO: make sure this isn't a tor user being sneaky
|
||||
ip = getRealIP(r.Header.Get("X-Real-IP"))
|
||||
if ip == "" {
|
||||
// try X-Forwarded-For if X-Real-IP not set
|
||||
_ip := r.Header.Get("X-Forwarded-For")
|
||||
parts := strings.Split(_ip, ",")
|
||||
_ip = parts[0]
|
||||
ip = getRealIP(_ip)
|
||||
}
|
||||
if ip == "" {
|
||||
ip = "127.0.0.1"
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func serverPubkeyIsValid(pubkey string) bool {
|
||||
b := unhex(pubkey)
|
||||
return b != nil && len(b) == nacl.CryptoSignPubKeySize()
|
||||
}
|
||||
|
||||
func verifyFrontendSig(pubkey, sig, msgid string) bool {
|
||||
s := unhex(sig)
|
||||
k := unhex(pubkey)
|
||||
h := sha512.Sum512([]byte(msgid))
|
||||
return nacl.CryptoVerifyFucky(h[:], s, k)
|
||||
}
|
||||
|
||||
func msgidFrontendSign(sk []byte, msgid string) string {
|
||||
h := sha512.Sum512([]byte(msgid))
|
||||
return cryptoSign(h[:], sk)
|
||||
}
|
170
contrib/backends/srndv2/src/srnd/varnish_cache.go
Normal file
170
contrib/backends/srndv2/src/srnd/varnish_cache.go
Normal file
@ -0,0 +1,170 @@
|
||||
package srnd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type VarnishCache struct {
|
||||
varnish_url string
|
||||
prefix string
|
||||
handler *nullHandler
|
||||
client *http.Client
|
||||
regenThreadChan chan ArticleEntry
|
||||
regenGroupChan chan groupRegenRequest
|
||||
}
|
||||
|
||||
func (self *VarnishCache) invalidate(r string) {
|
||||
u, _ := url.Parse(r)
|
||||
resp, err := self.client.Do(&http.Request{
|
||||
Method: "PURGE",
|
||||
URL: u,
|
||||
})
|
||||
if err == nil {
|
||||
resp.Body.Close()
|
||||
} else {
|
||||
log.Println("varnish cache error", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *VarnishCache) DeleteBoardMarkup(group string) {
|
||||
n, _ := self.handler.database.GetPagesPerBoard(group)
|
||||
for n > 0 {
|
||||
go self.invalidate(fmt.Sprintf("%s%s%s-%d.html", self.varnish_url, self.prefix, group, n))
|
||||
go self.invalidate(fmt.Sprintf("%s%sb/%s/%d/", self.varnish_url, self.prefix, group, n))
|
||||
n--
|
||||
}
|
||||
self.invalidate(fmt.Sprintf("%s%sb/%s/", self.varnish_url, self.prefix, group))
|
||||
}
|
||||
|
||||
// try to delete root post's page
|
||||
func (self *VarnishCache) DeleteThreadMarkup(root_post_id string) {
|
||||
self.invalidate(fmt.Sprintf("%s%sthread-%s.html", self.varnish_url, self.prefix, HashMessageID(root_post_id)))
|
||||
self.invalidate(fmt.Sprintf("%s%st/%s/", self.varnish_url, self.prefix, HashMessageID(root_post_id)))
|
||||
}
|
||||
|
||||
// regen every newsgroup
|
||||
func (self *VarnishCache) RegenAll() {
|
||||
// we will do this as it's used by rengen on start for frontend
|
||||
groups := self.handler.database.GetAllNewsgroups()
|
||||
for _, group := range groups {
|
||||
self.handler.database.GetGroupThreads(group, self.regenThreadChan)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *VarnishCache) RegenFrontPage() {
|
||||
self.invalidate(fmt.Sprintf("%s%s", self.varnish_url, self.prefix))
|
||||
}
|
||||
|
||||
func (self *VarnishCache) invalidateUkko() {
|
||||
// TODO: invalidate paginated ukko
|
||||
self.invalidate(fmt.Sprintf("%s%sukko.html", self.varnish_url, self.prefix))
|
||||
self.invalidate(fmt.Sprintf("%s%soverboard/", self.varnish_url, self.prefix))
|
||||
self.invalidate(fmt.Sprintf("%s%so/", self.varnish_url, self.prefix))
|
||||
// TODO: this is lazy af
|
||||
self.RegenFrontPage()
|
||||
}
|
||||
|
||||
func (self *VarnishCache) pollRegen() {
|
||||
for {
|
||||
select {
|
||||
// consume regen requests
|
||||
case ev := <-self.regenGroupChan:
|
||||
{
|
||||
self.invalidate(fmt.Sprintf("%s%s%s-%d.html", self.varnish_url, self.prefix, ev.group, ev.page))
|
||||
self.invalidate(fmt.Sprintf("%s%sb/%s/%d/", self.varnish_url, self.prefix, ev.group, ev.page))
|
||||
if ev.page == 0 {
|
||||
self.invalidate(fmt.Sprintf("%s%sb/%s/", self.varnish_url, self.prefix, ev.group))
|
||||
}
|
||||
}
|
||||
case ev := <-self.regenThreadChan:
|
||||
{
|
||||
self.Regen(ev)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// regen every page of the board
|
||||
func (self *VarnishCache) RegenerateBoard(group string) {
|
||||
n, _ := self.handler.database.GetPagesPerBoard(group)
|
||||
for n > 0 {
|
||||
go self.invalidate(fmt.Sprintf("%s%s%s-%d.html", self.varnish_url, self.prefix, group, n))
|
||||
go self.invalidate(fmt.Sprintf("%s%s%s/%d/", self.varnish_url, self.prefix, group, n))
|
||||
n--
|
||||
}
|
||||
self.invalidate(fmt.Sprintf("%s%sb/%s/", self.varnish_url, self.prefix, group))
|
||||
}
|
||||
|
||||
// regenerate pages after a mod event
|
||||
func (self *VarnishCache) RegenOnModEvent(newsgroup, msgid, root string, page int) {
|
||||
self.regenGroupChan <- groupRegenRequest{newsgroup, page}
|
||||
self.regenThreadChan <- ArticleEntry{newsgroup, root}
|
||||
}
|
||||
|
||||
func (self *VarnishCache) Start() {
|
||||
go self.pollRegen()
|
||||
}
|
||||
|
||||
func (self *VarnishCache) Regen(msg ArticleEntry) {
|
||||
go self.invalidate(fmt.Sprintf("%s%s%s-%d.html", self.varnish_url, self.prefix, msg.Newsgroup(), 0))
|
||||
go self.invalidate(fmt.Sprintf("%s%s%s/%d/", self.varnish_url, self.prefix, msg.Newsgroup(), 0))
|
||||
go self.invalidate(fmt.Sprintf("%s%sthread-%s.html", self.varnish_url, self.prefix, HashMessageID(msg.MessageID())))
|
||||
go self.invalidate(fmt.Sprintf("%s%st/%s/", self.varnish_url, self.prefix, HashMessageID(msg.MessageID())))
|
||||
self.invalidateUkko()
|
||||
}
|
||||
|
||||
func (self *VarnishCache) GetThreadChan() chan ArticleEntry {
|
||||
return self.regenThreadChan
|
||||
}
|
||||
|
||||
func (self *VarnishCache) GetGroupChan() chan groupRegenRequest {
|
||||
return self.regenGroupChan
|
||||
}
|
||||
|
||||
func (self *VarnishCache) GetHandler() http.Handler {
|
||||
return self.handler
|
||||
}
|
||||
|
||||
func (self *VarnishCache) Close() {
|
||||
//nothig to do
|
||||
}
|
||||
|
||||
func (self *VarnishCache) SetRequireCaptcha(required bool) {
|
||||
self.handler.requireCaptcha = required
|
||||
}
|
||||
|
||||
func NewVarnishCache(varnish_url, bind_addr, prefix, webroot, name string, attachments bool, db Database, store ArticleStore) CacheInterface {
|
||||
cache := new(VarnishCache)
|
||||
cache.regenThreadChan = make(chan ArticleEntry, 16)
|
||||
cache.regenGroupChan = make(chan groupRegenRequest, 8)
|
||||
local_addr, err := net.ResolveTCPAddr("tcp", bind_addr)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to resolve %s for varnish cache: %s", bind_addr, err)
|
||||
}
|
||||
cache.client = &http.Client{
|
||||
Transport: &http.Transport{
|
||||
Dial: func(network, addr string) (c net.Conn, err error) {
|
||||
var remote_addr *net.TCPAddr
|
||||
remote_addr, err = net.ResolveTCPAddr(network, addr)
|
||||
if err == nil {
|
||||
c, err = net.DialTCP(network, local_addr, remote_addr)
|
||||
}
|
||||
return
|
||||
},
|
||||
},
|
||||
}
|
||||
cache.prefix = prefix
|
||||
cache.handler = &nullHandler{
|
||||
prefix: prefix,
|
||||
name: name,
|
||||
attachments: attachments,
|
||||
database: db,
|
||||
requireCaptcha: true,
|
||||
}
|
||||
cache.varnish_url = varnish_url
|
||||
return cache
|
||||
}
|
12
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/.gitignore
generated
vendored
Normal file
12
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/.gitignore
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
*.6
|
||||
*.8
|
||||
*.o
|
||||
*.so
|
||||
*.out
|
||||
*.go~
|
||||
*.cgo?.*
|
||||
_cgo_*
|
||||
_obj
|
||||
_test
|
||||
_testmain.go
|
||||
*.swp
|
9
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/.travis.yml
generated
vendored
Normal file
9
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
- tip
|
||||
|
||||
script: make ci
|
19
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/LICENSE
generated
vendored
Normal file
19
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright (c) 2009 Michael Hoisie
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
25
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/Makefile
generated
vendored
Normal file
25
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/Makefile
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
.PHONY: all
|
||||
all: fmt vet lint test
|
||||
|
||||
.PHONY:
|
||||
get-deps:
|
||||
go get github.com/golang/lint/golint
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
go test -cover ./...
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
go fmt ./...
|
||||
|
||||
.PHONY: vet
|
||||
vet:
|
||||
go vet ./...
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
golint ./...
|
||||
|
||||
.PHONY: ci
|
||||
ci: fmt vet test
|
181
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/README.md
generated
vendored
Normal file
181
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/README.md
generated
vendored
Normal file
@ -0,0 +1,181 @@
|
||||
# Mustache Template Engine for Go
|
||||
|
||||
[](https://travis-ci.org/cbroglie/mustache)
|
||||
|
||||
## Why a Fork?
|
||||
|
||||
I forked [hoisie/mustache](https://github.com/hoisie/mustache) because it does not appear to be maintained, and I wanted to add the following functionality:
|
||||
- Update the API to follow the idiomatic Go convention of returning errors (this is a breaking change)
|
||||
- Add option to treat missing variables as errors
|
||||
|
||||
## Overview
|
||||
|
||||
This library is an implementation of the Mustache template language in Go.
|
||||
|
||||
### Mustache Spec Compliance
|
||||
|
||||
https://github.com/mustache/spec contains the formal standard for Mustache, and it is added as a submodule (using v1.1.3) for testing compliance. Currently ~40% of tests are failing, and the optional lambda support is not implemented. You can see which tests are disabled (b/c they are failing) by looking at spec_test.go. Getting all tests passing is my top priority (time permitting), and any PRs to that end are welcome.
|
||||
|
||||
## Documentation
|
||||
|
||||
For more information about mustache, check out the [mustache project page](http://github.com/defunkt/mustache) or the [mustache manual](http://mustache.github.com/mustache.5.html).
|
||||
|
||||
Also check out some [example mustache files](http://github.com/defunkt/mustache/tree/master/examples/)
|
||||
|
||||
## Installation
|
||||
To install mustache.go, simply run `go get github.com/cbroglie/mustache`. To use it in a program, use `import "github.com/cbroglie/mustache"`
|
||||
|
||||
## Usage
|
||||
There are four main methods in this package:
|
||||
|
||||
```go
|
||||
Render(data string, context ...interface{}) (string, error)
|
||||
|
||||
RenderFile(filename string, context ...interface{}) (string, error)
|
||||
|
||||
ParseString(data string) (*Template, error)
|
||||
|
||||
ParseFile(filename string) (*Template, error)
|
||||
```
|
||||
|
||||
There are also two additional methods for using layouts (explained below); as well as several more that can provide a custom Partial retrieval.
|
||||
|
||||
The Render method takes a string and a data source, which is generally a map or struct, and returns the output string. If the template file contains an error, the return value is a description of the error. There's a similar method, RenderFile, which takes a filename as an argument and uses that for the template contents.
|
||||
|
||||
```go
|
||||
data, err := mustache.Render("hello {{c}}", map[string]string{"c": "world"})
|
||||
```
|
||||
|
||||
If you're planning to render the same template multiple times, you do it efficiently by compiling the template first:
|
||||
|
||||
```go
|
||||
tmpl, _ := mustache.ParseString("hello {{c}}")
|
||||
var buf bytes.Buffer
|
||||
for i := 0; i < 10; i++ {
|
||||
tmpl.FRender(&buf, map[string]string{"c": "world"})
|
||||
}
|
||||
```
|
||||
|
||||
For more example usage, please see `mustache_test.go`
|
||||
|
||||
## Escaping
|
||||
|
||||
mustache.go follows the official mustache HTML escaping rules. That is, if you enclose a variable with two curly brackets, `{{var}}`, the contents are HTML-escaped. For instance, strings like `5 > 2` are converted to `5 > 2`. To use raw characters, use three curly brackets `{{{var}}}`.
|
||||
|
||||
## Layouts
|
||||
|
||||
It is a common pattern to include a template file as a "wrapper" for other templates. The wrapper may include a header and a footer, for instance. Mustache.go supports this pattern with the following two methods:
|
||||
|
||||
```go
|
||||
RenderInLayout(data string, layout string, context ...interface{}) (string, error)
|
||||
|
||||
RenderFileInLayout(filename string, layoutFile string, context ...interface{}) (string, error)
|
||||
```
|
||||
|
||||
The layout file must have a variable called `{{content}}`. For example, given the following files:
|
||||
|
||||
layout.html.mustache:
|
||||
|
||||
```html
|
||||
<html>
|
||||
<head><title>Hi</title></head>
|
||||
<body>
|
||||
{{{content}}}
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
template.html.mustache:
|
||||
|
||||
```html
|
||||
<h1>Hello World!</h1>
|
||||
```
|
||||
|
||||
A call to `RenderFileInLayout("template.html.mustache", "layout.html.mustache", nil)` will produce:
|
||||
|
||||
```html
|
||||
<html>
|
||||
<head><title>Hi</title></head>
|
||||
<body>
|
||||
<h1>Hello World!</h1>
|
||||
</body>
|
||||
</html>
|
||||
```
|
||||
|
||||
## Custom PartialProvider
|
||||
|
||||
Mustache.go has been extended to support a user-defined repository for mustache partials, instead of the default of requiring file-based templates.
|
||||
|
||||
Several new top-level functions have been introduced to take advantage of this:
|
||||
|
||||
```go
|
||||
|
||||
func RenderPartials(data string, partials PartialProvider, context ...interface{}) (string, error)
|
||||
|
||||
func RenderInLayoutPartials(data string, layoutData string, partials PartialProvider, context ...interface{}) (string, error)
|
||||
|
||||
func ParseStringPartials(data string, partials PartialProvider) (*Template, error)
|
||||
|
||||
func ParseFilePartials(filename string, partials PartialProvider) (*Template, error)
|
||||
|
||||
```
|
||||
|
||||
A `PartialProvider` is any object that responds to `Get(string)
|
||||
(*Template,error)`, and two examples are provided- a `FileProvider` that
|
||||
recreates the old behavior (and is indeed used internally for backwards
|
||||
compatibility); and a `StaticProvider` alias for a `map[string]string`. Using
|
||||
either of these is simple:
|
||||
|
||||
```go
|
||||
|
||||
fp := &FileProvider{
|
||||
Paths: []string{ "", "/opt/mustache", "templates/" },
|
||||
Extensions: []string{ "", ".stache", ".mustache" },
|
||||
}
|
||||
|
||||
tmpl, err := ParseStringPartials("This partial is loaded from a file: {{>foo}}", fp)
|
||||
|
||||
sp := StaticProvider(map[string]string{
|
||||
"foo": "{{>bar}}",
|
||||
"bar": "some data",
|
||||
})
|
||||
|
||||
tmpl, err := ParseStringPartials("This partial is loaded from a map: {{>foo}}", sp)
|
||||
```
|
||||
|
||||
## A note about method receivers
|
||||
|
||||
Mustache.go supports calling methods on objects, but you have to be aware of Go's limitations. For example, lets's say you have the following type:
|
||||
|
||||
```go
|
||||
type Person struct {
|
||||
FirstName string
|
||||
LastName string
|
||||
}
|
||||
|
||||
func (p *Person) Name1() string {
|
||||
return p.FirstName + " " + p.LastName
|
||||
}
|
||||
|
||||
func (p Person) Name2() string {
|
||||
return p.FirstName + " " + p.LastName
|
||||
}
|
||||
```
|
||||
|
||||
While they appear to be identical methods, `Name1` has a pointer receiver, and `Name2` has a value receiver. Objects of type `Person`(non-pointer) can only access `Name2`, while objects of type `*Person`(person) can access both. This is by design in the Go language.
|
||||
|
||||
So if you write the following:
|
||||
|
||||
```go
|
||||
mustache.Render("{{Name1}}", Person{"John", "Smith"})
|
||||
```
|
||||
|
||||
It'll be blank. You either have to use `&Person{"John", "Smith"}`, or call `Name2`
|
||||
|
||||
## Supported features
|
||||
|
||||
* Variables
|
||||
* Comments
|
||||
* Change delimiter
|
||||
* Sections (boolean, enumerable, and inverted)
|
||||
* Partials
|
749
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/mustache.go
generated
vendored
Normal file
749
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/mustache.go
generated
vendored
Normal file
@ -0,0 +1,749 @@
|
||||
package mustache
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
// AllowMissingVariables defines the behavior for a variable "miss." If it
|
||||
// is true (the default), an empty string is emitted. If it is false, an error
|
||||
// is generated instead.
|
||||
AllowMissingVariables = true
|
||||
)
|
||||
|
||||
// A TagType represents the specific type of mustache tag that a Tag
|
||||
// represents. The zero TagType is not a valid type.
|
||||
type TagType uint
|
||||
|
||||
// Defines representing the possible Tag types
|
||||
const (
|
||||
Invalid TagType = iota
|
||||
Variable
|
||||
Section
|
||||
InvertedSection
|
||||
Partial
|
||||
)
|
||||
|
||||
func (t TagType) String() string {
|
||||
if int(t) < len(tagNames) {
|
||||
return tagNames[t]
|
||||
}
|
||||
return "type" + strconv.Itoa(int(t))
|
||||
}
|
||||
|
||||
var tagNames = []string{
|
||||
Invalid: "Invalid",
|
||||
Variable: "Variable",
|
||||
Section: "Section",
|
||||
InvertedSection: "InvertedSection",
|
||||
Partial: "Partial",
|
||||
}
|
||||
|
||||
// Tag represents the different mustache tag types.
|
||||
//
|
||||
// Not all methods apply to all kinds of tags. Restrictions, if any, are noted
|
||||
// in the documentation for each method. Use the Type method to find out the
|
||||
// type of tag before calling type-specific methods. Calling a method
|
||||
// inappropriate to the type of tag causes a run time panic.
|
||||
type Tag interface {
|
||||
// Type returns the type of the tag.
|
||||
Type() TagType
|
||||
// Name returns the name of the tag.
|
||||
Name() string
|
||||
// Tags returns any child tags. It panics for tag types which cannot contain
|
||||
// child tags (i.e. variable tags).
|
||||
Tags() []Tag
|
||||
}
|
||||
|
||||
type textElement struct {
|
||||
text []byte
|
||||
}
|
||||
|
||||
type varElement struct {
|
||||
name string
|
||||
raw bool
|
||||
}
|
||||
|
||||
type sectionElement struct {
|
||||
name string
|
||||
inverted bool
|
||||
startline int
|
||||
elems []interface{}
|
||||
}
|
||||
|
||||
type partialElement struct {
|
||||
name string
|
||||
prov PartialProvider
|
||||
}
|
||||
|
||||
// Template represents a compilde mustache template
|
||||
type Template struct {
|
||||
data string
|
||||
otag string
|
||||
ctag string
|
||||
p int
|
||||
curline int
|
||||
dir string
|
||||
elems []interface{}
|
||||
partial PartialProvider
|
||||
}
|
||||
|
||||
type parseError struct {
|
||||
line int
|
||||
message string
|
||||
}
|
||||
|
||||
// Tags returns the mustache tags for the given template
|
||||
func (tmpl *Template) Tags() []Tag {
|
||||
return extractTags(tmpl.elems)
|
||||
}
|
||||
|
||||
func extractTags(elems []interface{}) []Tag {
|
||||
tags := make([]Tag, 0, len(elems))
|
||||
for _, elem := range elems {
|
||||
switch elem := elem.(type) {
|
||||
case *varElement:
|
||||
tags = append(tags, elem)
|
||||
case *sectionElement:
|
||||
tags = append(tags, elem)
|
||||
case *partialElement:
|
||||
tags = append(tags, elem)
|
||||
}
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
func (e *varElement) Type() TagType {
|
||||
return Variable
|
||||
}
|
||||
|
||||
func (e *varElement) Name() string {
|
||||
return e.name
|
||||
}
|
||||
|
||||
func (e *varElement) Tags() []Tag {
|
||||
panic("mustache: Tags on Variable type")
|
||||
}
|
||||
|
||||
func (e *sectionElement) Type() TagType {
|
||||
if e.inverted {
|
||||
return InvertedSection
|
||||
}
|
||||
return Section
|
||||
}
|
||||
|
||||
func (e *sectionElement) Name() string {
|
||||
return e.name
|
||||
}
|
||||
|
||||
func (e *sectionElement) Tags() []Tag {
|
||||
return extractTags(e.elems)
|
||||
}
|
||||
|
||||
func (e *partialElement) Type() TagType {
|
||||
return Partial
|
||||
}
|
||||
|
||||
func (e *partialElement) Name() string {
|
||||
return e.name
|
||||
}
|
||||
|
||||
func (e *partialElement) Tags() []Tag {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p parseError) Error() string {
|
||||
return fmt.Sprintf("line %d: %s", p.line, p.message)
|
||||
}
|
||||
|
||||
func (tmpl *Template) readString(s string) (string, error) {
|
||||
newlines := 0
|
||||
for i := tmpl.p; ; i++ {
|
||||
//are we at the end of the string?
|
||||
if i+len(s) > len(tmpl.data) {
|
||||
return tmpl.data[tmpl.p:], io.EOF
|
||||
}
|
||||
|
||||
if tmpl.data[i] == '\n' {
|
||||
newlines++
|
||||
}
|
||||
|
||||
if tmpl.data[i] != s[0] {
|
||||
continue
|
||||
}
|
||||
|
||||
match := true
|
||||
for j := 1; j < len(s); j++ {
|
||||
if s[j] != tmpl.data[i+j] {
|
||||
match = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if match {
|
||||
e := i + len(s)
|
||||
text := tmpl.data[tmpl.p:e]
|
||||
tmpl.p = e
|
||||
|
||||
tmpl.curline += newlines
|
||||
return text, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (tmpl *Template) parsePartial(name string) (*partialElement, error) {
|
||||
var prov PartialProvider
|
||||
if tmpl.partial == nil {
|
||||
prov = &FileProvider{
|
||||
Paths: []string{tmpl.dir, " "},
|
||||
}
|
||||
} else {
|
||||
prov = tmpl.partial
|
||||
}
|
||||
|
||||
return &partialElement{
|
||||
name: name,
|
||||
prov: prov,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (tmpl *Template) parseSection(section *sectionElement) error {
|
||||
for {
|
||||
text, err := tmpl.readString(tmpl.otag)
|
||||
|
||||
if err == io.EOF {
|
||||
return parseError{section.startline, "Section " + section.name + " has no closing tag"}
|
||||
}
|
||||
|
||||
// put text into an item
|
||||
text = text[0 : len(text)-len(tmpl.otag)]
|
||||
section.elems = append(section.elems, &textElement{[]byte(text)})
|
||||
if tmpl.p < len(tmpl.data) && tmpl.data[tmpl.p] == '{' {
|
||||
text, err = tmpl.readString("}" + tmpl.ctag)
|
||||
} else {
|
||||
text, err = tmpl.readString(tmpl.ctag)
|
||||
}
|
||||
|
||||
if err == io.EOF {
|
||||
//put the remaining text in a block
|
||||
return parseError{tmpl.curline, "unmatched open tag"}
|
||||
}
|
||||
|
||||
//trim the close tag off the text
|
||||
tag := strings.TrimSpace(text[0 : len(text)-len(tmpl.ctag)])
|
||||
|
||||
if len(tag) == 0 {
|
||||
return parseError{tmpl.curline, "empty tag"}
|
||||
}
|
||||
switch tag[0] {
|
||||
case '!':
|
||||
//ignore comment
|
||||
break
|
||||
case '#', '^':
|
||||
name := strings.TrimSpace(tag[1:])
|
||||
|
||||
//ignore the newline when a section starts
|
||||
if len(tmpl.data) > tmpl.p && tmpl.data[tmpl.p] == '\n' {
|
||||
tmpl.p++
|
||||
} else if len(tmpl.data) > tmpl.p+1 && tmpl.data[tmpl.p] == '\r' && tmpl.data[tmpl.p+1] == '\n' {
|
||||
tmpl.p += 2
|
||||
}
|
||||
|
||||
se := sectionElement{name, tag[0] == '^', tmpl.curline, []interface{}{}}
|
||||
err := tmpl.parseSection(&se)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
section.elems = append(section.elems, &se)
|
||||
case '/':
|
||||
name := strings.TrimSpace(tag[1:])
|
||||
if name != section.name {
|
||||
return parseError{tmpl.curline, "interleaved closing tag: " + name}
|
||||
}
|
||||
return nil
|
||||
case '>':
|
||||
name := strings.TrimSpace(tag[1:])
|
||||
partial, err := tmpl.parsePartial(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
section.elems = append(section.elems, partial)
|
||||
case '=':
|
||||
if tag[len(tag)-1] != '=' {
|
||||
return parseError{tmpl.curline, "Invalid meta tag"}
|
||||
}
|
||||
tag = strings.TrimSpace(tag[1 : len(tag)-1])
|
||||
newtags := strings.SplitN(tag, " ", 2)
|
||||
if len(newtags) == 2 {
|
||||
tmpl.otag = newtags[0]
|
||||
tmpl.ctag = newtags[1]
|
||||
}
|
||||
case '{':
|
||||
if tag[len(tag)-1] == '}' {
|
||||
//use a raw tag
|
||||
section.elems = append(section.elems, &varElement{tag[1 : len(tag)-1], true})
|
||||
}
|
||||
default:
|
||||
section.elems = append(section.elems, &varElement{tag, false})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (tmpl *Template) parse() error {
|
||||
for {
|
||||
text, err := tmpl.readString(tmpl.otag)
|
||||
if err == io.EOF {
|
||||
//put the remaining text in a block
|
||||
tmpl.elems = append(tmpl.elems, &textElement{[]byte(text)})
|
||||
return nil
|
||||
}
|
||||
|
||||
// put text into an item
|
||||
text = text[0 : len(text)-len(tmpl.otag)]
|
||||
tmpl.elems = append(tmpl.elems, &textElement{[]byte(text)})
|
||||
|
||||
if tmpl.p < len(tmpl.data) && tmpl.data[tmpl.p] == '{' {
|
||||
text, err = tmpl.readString("}" + tmpl.ctag)
|
||||
} else {
|
||||
text, err = tmpl.readString(tmpl.ctag)
|
||||
}
|
||||
|
||||
if err == io.EOF {
|
||||
//put the remaining text in a block
|
||||
return parseError{tmpl.curline, "unmatched open tag"}
|
||||
}
|
||||
|
||||
//trim the close tag off the text
|
||||
tag := strings.TrimSpace(text[0 : len(text)-len(tmpl.ctag)])
|
||||
if len(tag) == 0 {
|
||||
return parseError{tmpl.curline, "empty tag"}
|
||||
}
|
||||
switch tag[0] {
|
||||
case '!':
|
||||
//ignore comment
|
||||
break
|
||||
case '#', '^':
|
||||
name := strings.TrimSpace(tag[1:])
|
||||
|
||||
if len(tmpl.data) > tmpl.p && tmpl.data[tmpl.p] == '\n' {
|
||||
tmpl.p++
|
||||
} else if len(tmpl.data) > tmpl.p+1 && tmpl.data[tmpl.p] == '\r' && tmpl.data[tmpl.p+1] == '\n' {
|
||||
tmpl.p += 2
|
||||
}
|
||||
|
||||
se := sectionElement{name, tag[0] == '^', tmpl.curline, []interface{}{}}
|
||||
err := tmpl.parseSection(&se)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tmpl.elems = append(tmpl.elems, &se)
|
||||
case '/':
|
||||
return parseError{tmpl.curline, "unmatched close tag"}
|
||||
case '>':
|
||||
name := strings.TrimSpace(tag[1:])
|
||||
partial, err := tmpl.parsePartial(name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tmpl.elems = append(tmpl.elems, partial)
|
||||
case '=':
|
||||
if tag[len(tag)-1] != '=' {
|
||||
return parseError{tmpl.curline, "Invalid meta tag"}
|
||||
}
|
||||
tag = strings.TrimSpace(tag[1 : len(tag)-1])
|
||||
newtags := strings.SplitN(tag, " ", 2)
|
||||
if len(newtags) == 2 {
|
||||
tmpl.otag = newtags[0]
|
||||
tmpl.ctag = newtags[1]
|
||||
}
|
||||
case '{':
|
||||
//use a raw tag
|
||||
if tag[len(tag)-1] == '}' {
|
||||
tmpl.elems = append(tmpl.elems, &varElement{tag[1 : len(tag)-1], true})
|
||||
}
|
||||
default:
|
||||
tmpl.elems = append(tmpl.elems, &varElement{tag, false})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate interfaces and pointers looking for a value that can look up the name, via a
|
||||
// struct field, method, or map key, and return the result of the lookup.
|
||||
func lookup(contextChain []interface{}, name string, allowMissing bool) (reflect.Value, error) {
|
||||
// dot notation
|
||||
if name != "." && strings.Contains(name, ".") {
|
||||
parts := strings.SplitN(name, ".", 2)
|
||||
|
||||
v, err := lookup(contextChain, parts[0], allowMissing)
|
||||
if err != nil {
|
||||
return v, err
|
||||
}
|
||||
return lookup([]interface{}{v}, parts[1], allowMissing)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Printf("Panic while looking up %q: %s\n", name, r)
|
||||
}
|
||||
}()
|
||||
|
||||
Outer:
|
||||
for _, ctx := range contextChain {
|
||||
v := ctx.(reflect.Value)
|
||||
for v.IsValid() {
|
||||
typ := v.Type()
|
||||
if n := v.Type().NumMethod(); n > 0 {
|
||||
for i := 0; i < n; i++ {
|
||||
m := typ.Method(i)
|
||||
mtyp := m.Type
|
||||
if m.Name == name && mtyp.NumIn() == 1 {
|
||||
return v.Method(i).Call(nil)[0], nil
|
||||
}
|
||||
}
|
||||
}
|
||||
if name == "." {
|
||||
return v, nil
|
||||
}
|
||||
switch av := v; av.Kind() {
|
||||
case reflect.Ptr:
|
||||
v = av.Elem()
|
||||
case reflect.Interface:
|
||||
v = av.Elem()
|
||||
case reflect.Struct:
|
||||
ret := av.FieldByName(name)
|
||||
if ret.IsValid() {
|
||||
return ret, nil
|
||||
}
|
||||
continue Outer
|
||||
case reflect.Map:
|
||||
ret := av.MapIndex(reflect.ValueOf(name))
|
||||
if ret.IsValid() {
|
||||
return ret, nil
|
||||
}
|
||||
continue Outer
|
||||
default:
|
||||
continue Outer
|
||||
}
|
||||
}
|
||||
}
|
||||
if allowMissing {
|
||||
return reflect.Value{}, nil
|
||||
}
|
||||
return reflect.Value{}, fmt.Errorf("Missing variable %q", name)
|
||||
}
|
||||
|
||||
func isEmpty(v reflect.Value) bool {
|
||||
if !v.IsValid() || v.Interface() == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
valueInd := indirect(v)
|
||||
if !valueInd.IsValid() {
|
||||
return true
|
||||
}
|
||||
switch val := valueInd; val.Kind() {
|
||||
case reflect.Bool:
|
||||
return !val.Bool()
|
||||
case reflect.Slice:
|
||||
return val.Len() == 0
|
||||
case reflect.String:
|
||||
return len(strings.TrimSpace(val.String())) == 0
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func indirect(v reflect.Value) reflect.Value {
|
||||
loop:
|
||||
for v.IsValid() {
|
||||
switch av := v; av.Kind() {
|
||||
case reflect.Ptr:
|
||||
v = av.Elem()
|
||||
case reflect.Interface:
|
||||
v = av.Elem()
|
||||
default:
|
||||
break loop
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func renderSection(section *sectionElement, contextChain []interface{}, buf io.Writer) error {
|
||||
value, err := lookup(contextChain, section.name, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var context = contextChain[len(contextChain)-1].(reflect.Value)
|
||||
var contexts = []interface{}{}
|
||||
// if the value is nil, check if it's an inverted section
|
||||
isEmpty := isEmpty(value)
|
||||
if isEmpty && !section.inverted || !isEmpty && section.inverted {
|
||||
return nil
|
||||
} else if !section.inverted {
|
||||
valueInd := indirect(value)
|
||||
switch val := valueInd; val.Kind() {
|
||||
case reflect.Slice:
|
||||
for i := 0; i < val.Len(); i++ {
|
||||
contexts = append(contexts, val.Index(i))
|
||||
}
|
||||
case reflect.Array:
|
||||
for i := 0; i < val.Len(); i++ {
|
||||
contexts = append(contexts, val.Index(i))
|
||||
}
|
||||
case reflect.Map, reflect.Struct:
|
||||
contexts = append(contexts, value)
|
||||
default:
|
||||
contexts = append(contexts, context)
|
||||
}
|
||||
} else if section.inverted {
|
||||
contexts = append(contexts, context)
|
||||
}
|
||||
|
||||
chain2 := make([]interface{}, len(contextChain)+1)
|
||||
copy(chain2[1:], contextChain)
|
||||
//by default we execute the section
|
||||
for _, ctx := range contexts {
|
||||
chain2[0] = ctx
|
||||
for _, elem := range section.elems {
|
||||
renderElement(elem, chain2, buf)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func renderElement(element interface{}, contextChain []interface{}, buf io.Writer) error {
|
||||
switch elem := element.(type) {
|
||||
case *textElement:
|
||||
buf.Write(elem.text)
|
||||
case *varElement:
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Printf("Panic while looking up %q: %s\n", elem.name, r)
|
||||
}
|
||||
}()
|
||||
val, err := lookup(contextChain, elem.name, AllowMissingVariables)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if val.IsValid() {
|
||||
if elem.raw {
|
||||
fmt.Fprint(buf, val.Interface())
|
||||
} else {
|
||||
s := fmt.Sprint(val.Interface())
|
||||
template.HTMLEscape(buf, []byte(s))
|
||||
}
|
||||
}
|
||||
case *sectionElement:
|
||||
if err := renderSection(elem, contextChain, buf); err != nil {
|
||||
return err
|
||||
}
|
||||
case *partialElement:
|
||||
partial, err := elem.prov.Get(elem.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := partial.renderTemplate(contextChain, buf); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tmpl *Template) renderTemplate(contextChain []interface{}, buf io.Writer) error {
|
||||
for _, elem := range tmpl.elems {
|
||||
if err := renderElement(elem, contextChain, buf); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// FRender uses the given data source - generally a map or struct - to
|
||||
// render the compiled template to an io.Writer.
|
||||
func (tmpl *Template) FRender(out io.Writer, context ...interface{}) error {
|
||||
var contextChain []interface{}
|
||||
for _, c := range context {
|
||||
val := reflect.ValueOf(c)
|
||||
contextChain = append(contextChain, val)
|
||||
}
|
||||
return tmpl.renderTemplate(contextChain, out)
|
||||
}
|
||||
|
||||
// Render uses the given data source - generally a map or struct - to render
|
||||
// the compiled template and return the output.
|
||||
func (tmpl *Template) Render(context ...interface{}) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := tmpl.FRender(&buf, context...)
|
||||
return buf.String(), err
|
||||
}
|
||||
|
||||
// RenderInLayout uses the given data source - generally a map or struct - to
|
||||
// render the compiled template and layout "wrapper" template and return the
|
||||
// output.
|
||||
func (tmpl *Template) RenderInLayout(layout *Template, context ...interface{}) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
err := tmpl.FRenderInLayout(&buf, layout, context...)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// FRenderInLayout uses the given data source - generally a map or
|
||||
// struct - to render the compiled templated a loayout "wrapper"
|
||||
// template to an io.Writer.
|
||||
func (tmpl *Template) FRenderInLayout(out io.Writer, layout *Template, context ...interface{}) error {
|
||||
content, err := tmpl.Render(context...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allContext := make([]interface{}, len(context)+1)
|
||||
copy(allContext[1:], context)
|
||||
allContext[0] = map[string]string{"content": content}
|
||||
return layout.FRender(out, allContext...)
|
||||
}
|
||||
|
||||
// ParseString compiles a mustache template string. The resulting output can
|
||||
// be used to efficiently render the template multiple times with different data
|
||||
// sources.
|
||||
func ParseString(data string) (*Template, error) {
|
||||
return ParseStringPartials(data, nil)
|
||||
}
|
||||
|
||||
// ParseStringPartials compiles a mustache template string, retrieving any
|
||||
// required partials from the given provider. The resulting output can be used
|
||||
// to efficiently render the template multiple times with different data
|
||||
// sources.
|
||||
func ParseStringPartials(data string, partials PartialProvider) (*Template, error) {
|
||||
cwd := os.Getenv("CWD")
|
||||
tmpl := Template{data, "{{", "}}", 0, 1, cwd, []interface{}{}, partials}
|
||||
err := tmpl.parse()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &tmpl, err
|
||||
}
|
||||
|
||||
// ParseFile loads a mustache template string from a file and compiles it. The
|
||||
// resulting output can be used to efficiently render the template multiple
|
||||
// times with different data sources.
|
||||
func ParseFile(filename string) (*Template, error) {
|
||||
return ParseFilePartials(filename, nil)
|
||||
}
|
||||
|
||||
// ParseFilePartials loads a mustache template string from a file, retrieving any
|
||||
// required partials from the given provider, and compiles it. The resulting
|
||||
// output can be used to efficiently render the template multiple times with
|
||||
// different data sources.
|
||||
func ParseFilePartials(filename string, partials PartialProvider) (*Template, error) {
|
||||
data, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dirname, _ := path.Split(filename)
|
||||
|
||||
tmpl := Template{string(data), "{{", "}}", 0, 1, dirname, []interface{}{}, partials}
|
||||
err = tmpl.parse()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &tmpl, nil
|
||||
}
|
||||
|
||||
// Render compiles a mustache template string and uses the the given data source
|
||||
// - generally a map or struct - to render the template and return the output.
|
||||
func Render(data string, context ...interface{}) (string, error) {
|
||||
return RenderPartials(data, nil, context...)
|
||||
}
|
||||
|
||||
// RenderPartials compiles a mustache template string and uses the the given partial
|
||||
// provider and data source - generally a map or struct - to render the template
|
||||
// and return the output.
|
||||
func RenderPartials(data string, partials PartialProvider, context ...interface{}) (string, error) {
|
||||
var tmpl *Template
|
||||
var err error
|
||||
if partials == nil {
|
||||
tmpl, err = ParseString(data)
|
||||
} else {
|
||||
tmpl, err = ParseStringPartials(data, partials)
|
||||
}
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return tmpl.Render(context...)
|
||||
}
|
||||
|
||||
// RenderInLayout compiles a mustache template string and layout "wrapper" and
|
||||
// uses the given data source - generally a map or struct - to render the
|
||||
// compiled templates and return the output.
|
||||
func RenderInLayout(data string, layoutData string, context ...interface{}) (string, error) {
|
||||
return RenderInLayoutPartials(data, layoutData, nil, context...)
|
||||
}
|
||||
|
||||
func RenderInLayoutPartials(data string, layoutData string, partials PartialProvider, context ...interface{}) (string, error) {
|
||||
var layoutTmpl, tmpl *Template
|
||||
var err error
|
||||
if partials == nil {
|
||||
layoutTmpl, err = ParseString(layoutData)
|
||||
} else {
|
||||
layoutTmpl, err = ParseStringPartials(layoutData, partials)
|
||||
}
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if partials == nil {
|
||||
tmpl, err = ParseString(data)
|
||||
} else {
|
||||
tmpl, err = ParseStringPartials(data, partials)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return tmpl.RenderInLayout(layoutTmpl, context...)
|
||||
}
|
||||
|
||||
// RenderFile loads a mustache template string from a file and compiles it, and
|
||||
// then uses the the given data source - generally a map or struct - to render
|
||||
// the template and return the output.
|
||||
func RenderFile(filename string, context ...interface{}) (string, error) {
|
||||
tmpl, err := ParseFile(filename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return tmpl.Render(context...)
|
||||
}
|
||||
|
||||
// RenderFileInLayout loads a mustache template string and layout "wrapper"
|
||||
// template string from files and compiles them, and then uses the the given
|
||||
// data source - generally a map or struct - to render the compiled templates
|
||||
// and return the output.
|
||||
func RenderFileInLayout(filename string, layoutFile string, context ...interface{}) (string, error) {
|
||||
layoutTmpl, err := ParseFile(layoutFile)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
tmpl, err := ParseFile(filename)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return tmpl.RenderInLayout(layoutTmpl, context...)
|
||||
}
|
545
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/mustache_test.go
generated
vendored
Normal file
545
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/mustache_test.go
generated
vendored
Normal file
@ -0,0 +1,545 @@
|
||||
package mustache
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type Test struct {
|
||||
tmpl string
|
||||
context interface{}
|
||||
expected string
|
||||
err error
|
||||
}
|
||||
|
||||
type Data struct {
|
||||
A bool
|
||||
B string
|
||||
}
|
||||
|
||||
type User struct {
|
||||
Name string
|
||||
ID int64
|
||||
}
|
||||
|
||||
type Settings struct {
|
||||
Allow bool
|
||||
}
|
||||
|
||||
func (u User) Func1() string {
|
||||
return u.Name
|
||||
}
|
||||
|
||||
func (u *User) Func2() string {
|
||||
return u.Name
|
||||
}
|
||||
|
||||
func (u *User) Func3() (map[string]string, error) {
|
||||
return map[string]string{"name": u.Name}, nil
|
||||
}
|
||||
|
||||
func (u *User) Func4() (map[string]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (u *User) Func5() (*Settings, error) {
|
||||
return &Settings{true}, nil
|
||||
}
|
||||
|
||||
func (u *User) Func6() ([]interface{}, error) {
|
||||
var v []interface{}
|
||||
v = append(v, &Settings{true})
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func (u User) Truefunc1() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (u *User) Truefunc2() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func makeVector(n int) []interface{} {
|
||||
var v []interface{}
|
||||
for i := 0; i < n; i++ {
|
||||
v = append(v, &User{"Mike", 1})
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
type Category struct {
|
||||
Tag string
|
||||
Description string
|
||||
}
|
||||
|
||||
func (c Category) DisplayName() string {
|
||||
return c.Tag + " - " + c.Description
|
||||
}
|
||||
|
||||
var tests = []Test{
|
||||
{`hello world`, nil, "hello world", nil},
|
||||
{`hello {{name}}`, map[string]string{"name": "world"}, "hello world", nil},
|
||||
{`{{var}}`, map[string]string{"var": "5 > 2"}, "5 > 2", nil},
|
||||
{`{{{var}}}`, map[string]string{"var": "5 > 2"}, "5 > 2", nil},
|
||||
{`{{a}}{{b}}{{c}}{{d}}`, map[string]string{"a": "a", "b": "b", "c": "c", "d": "d"}, "abcd", nil},
|
||||
{`0{{a}}1{{b}}23{{c}}456{{d}}89`, map[string]string{"a": "a", "b": "b", "c": "c", "d": "d"}, "0a1b23c456d89", nil},
|
||||
{`hello {{! comment }}world`, map[string]string{}, "hello world", nil},
|
||||
{`{{ a }}{{=<% %>=}}<%b %><%={{ }}=%>{{ c }}`, map[string]string{"a": "a", "b": "b", "c": "c"}, "abc", nil},
|
||||
{`{{ a }}{{= <% %> =}}<%b %><%= {{ }}=%>{{c}}`, map[string]string{"a": "a", "b": "b", "c": "c"}, "abc", nil},
|
||||
|
||||
//section tests
|
||||
{`{{#A}}{{B}}{{/A}}`, Data{true, "hello"}, "hello", nil},
|
||||
{`{{#A}}{{{B}}}{{/A}}`, Data{true, "5 > 2"}, "5 > 2", nil},
|
||||
{`{{#A}}{{B}}{{/A}}`, Data{true, "5 > 2"}, "5 > 2", nil},
|
||||
{`{{#A}}{{B}}{{/A}}`, Data{false, "hello"}, "", nil},
|
||||
{`{{a}}{{#b}}{{b}}{{/b}}{{c}}`, map[string]string{"a": "a", "b": "b", "c": "c"}, "abc", nil},
|
||||
{`{{#A}}{{B}}{{/A}}`, struct {
|
||||
A []struct {
|
||||
B string
|
||||
}
|
||||
}{[]struct {
|
||||
B string
|
||||
}{{"a"}, {"b"}, {"c"}}},
|
||||
"abc",
|
||||
nil,
|
||||
},
|
||||
{`{{#A}}{{b}}{{/A}}`, struct{ A []map[string]string }{[]map[string]string{{"b": "a"}, {"b": "b"}, {"b": "c"}}}, "abc", nil},
|
||||
|
||||
{`{{#users}}{{Name}}{{/users}}`, map[string]interface{}{"users": []User{{"Mike", 1}}}, "Mike", nil},
|
||||
|
||||
{`{{#users}}gone{{Name}}{{/users}}`, map[string]interface{}{"users": nil}, "", nil},
|
||||
{`{{#users}}gone{{Name}}{{/users}}`, map[string]interface{}{"users": (*User)(nil)}, "", nil},
|
||||
{`{{#users}}gone{{Name}}{{/users}}`, map[string]interface{}{"users": []User{}}, "", nil},
|
||||
|
||||
{`{{#users}}{{Name}}{{/users}}`, map[string]interface{}{"users": []*User{{"Mike", 1}}}, "Mike", nil},
|
||||
{`{{#users}}{{Name}}{{/users}}`, map[string]interface{}{"users": []interface{}{&User{"Mike", 12}}}, "Mike", nil},
|
||||
{`{{#users}}{{Name}}{{/users}}`, map[string]interface{}{"users": makeVector(1)}, "Mike", nil},
|
||||
{`{{Name}}`, User{"Mike", 1}, "Mike", nil},
|
||||
{`{{Name}}`, &User{"Mike", 1}, "Mike", nil},
|
||||
{"{{#users}}\n{{Name}}\n{{/users}}", map[string]interface{}{"users": makeVector(2)}, "Mike\nMike\n", nil},
|
||||
{"{{#users}}\r\n{{Name}}\r\n{{/users}}", map[string]interface{}{"users": makeVector(2)}, "Mike\r\nMike\r\n", nil},
|
||||
{"{{#users}}Hi {{Name}}{{/users}}", map[string]interface{}{"users": ""}, "", nil},
|
||||
{"{{#users}}Hi {{Name}}{{/users}}", map[string]interface{}{"users": []interface{}{}}, "", nil},
|
||||
{"{{#users}}Hi {{Name}}{{/users}}", map[string]interface{}{"users": false}, "", nil},
|
||||
|
||||
//section does not exist
|
||||
{`{{#has}}{{/has}}`, &User{"Mike", 1}, "", nil},
|
||||
|
||||
// implicit iterator tests
|
||||
{`"{{#list}}({{.}}){{/list}}"`, map[string]interface{}{"list": []string{"a", "b", "c", "d", "e"}}, "\"(a)(b)(c)(d)(e)\"", nil},
|
||||
{`"{{#list}}({{.}}){{/list}}"`, map[string]interface{}{"list": []int{1, 2, 3, 4, 5}}, "\"(1)(2)(3)(4)(5)\"", nil},
|
||||
{`"{{#list}}({{.}}){{/list}}"`, map[string]interface{}{"list": []float64{1.10, 2.20, 3.30, 4.40, 5.50}}, "\"(1.1)(2.2)(3.3)(4.4)(5.5)\"", nil},
|
||||
|
||||
//inverted section tests
|
||||
{`{{a}}{{^b}}b{{/b}}{{c}}`, map[string]interface{}{"a": "a", "b": false, "c": "c"}, "abc", nil},
|
||||
{`{{^a}}b{{/a}}`, map[string]interface{}{"a": false}, "b", nil},
|
||||
{`{{^a}}b{{/a}}`, map[string]interface{}{"a": true}, "", nil},
|
||||
{`{{^a}}b{{/a}}`, map[string]interface{}{"a": "nonempty string"}, "", nil},
|
||||
{`{{^a}}b{{/a}}`, map[string]interface{}{"a": []string{}}, "b", nil},
|
||||
{`{{a}}{{^b}}b{{/b}}{{c}}`, map[string]string{"a": "a", "c": "c"}, "abc", nil},
|
||||
|
||||
//function tests
|
||||
{`{{#users}}{{Func1}}{{/users}}`, map[string]interface{}{"users": []User{{"Mike", 1}}}, "Mike", nil},
|
||||
{`{{#users}}{{Func1}}{{/users}}`, map[string]interface{}{"users": []*User{{"Mike", 1}}}, "Mike", nil},
|
||||
{`{{#users}}{{Func2}}{{/users}}`, map[string]interface{}{"users": []*User{{"Mike", 1}}}, "Mike", nil},
|
||||
|
||||
{`{{#users}}{{#Func3}}{{name}}{{/Func3}}{{/users}}`, map[string]interface{}{"users": []*User{{"Mike", 1}}}, "Mike", nil},
|
||||
{`{{#users}}{{#Func4}}{{name}}{{/Func4}}{{/users}}`, map[string]interface{}{"users": []*User{{"Mike", 1}}}, "", nil},
|
||||
{`{{#Truefunc1}}abcd{{/Truefunc1}}`, User{"Mike", 1}, "abcd", nil},
|
||||
{`{{#Truefunc1}}abcd{{/Truefunc1}}`, &User{"Mike", 1}, "abcd", nil},
|
||||
{`{{#Truefunc2}}abcd{{/Truefunc2}}`, &User{"Mike", 1}, "abcd", nil},
|
||||
{`{{#Func5}}{{#Allow}}abcd{{/Allow}}{{/Func5}}`, &User{"Mike", 1}, "abcd", nil},
|
||||
{`{{#user}}{{#Func5}}{{#Allow}}abcd{{/Allow}}{{/Func5}}{{/user}}`, map[string]interface{}{"user": &User{"Mike", 1}}, "abcd", nil},
|
||||
{`{{#user}}{{#Func6}}{{#Allow}}abcd{{/Allow}}{{/Func6}}{{/user}}`, map[string]interface{}{"user": &User{"Mike", 1}}, "abcd", nil},
|
||||
|
||||
//context chaining
|
||||
{`hello {{#section}}{{name}}{{/section}}`, map[string]interface{}{"section": map[string]string{"name": "world"}}, "hello world", nil},
|
||||
{`hello {{#section}}{{name}}{{/section}}`, map[string]interface{}{"name": "bob", "section": map[string]string{"name": "world"}}, "hello world", nil},
|
||||
{`hello {{#bool}}{{#section}}{{name}}{{/section}}{{/bool}}`, map[string]interface{}{"bool": true, "section": map[string]string{"name": "world"}}, "hello world", nil},
|
||||
{`{{#users}}{{canvas}}{{/users}}`, map[string]interface{}{"canvas": "hello", "users": []User{{"Mike", 1}}}, "hello", nil},
|
||||
{`{{#categories}}{{DisplayName}}{{/categories}}`, map[string][]*Category{
|
||||
"categories": {&Category{"a", "b"}},
|
||||
}, "a - b", nil},
|
||||
|
||||
//dotted names(dot notation)
|
||||
{`"{{person.name}}" == "{{#person}}{{name}}{{/person}}"`, map[string]interface{}{"person": map[string]string{"name": "Joe"}}, `"Joe" == "Joe"`, nil},
|
||||
{`"{{{person.name}}}" == "{{#person}}{{{name}}}{{/person}}"`, map[string]interface{}{"person": map[string]string{"name": "Joe"}}, `"Joe" == "Joe"`, nil},
|
||||
{`"{{a.b.c.d.e.name}}" == "Phil"`, map[string]interface{}{"a": map[string]interface{}{"b": map[string]interface{}{"c": map[string]interface{}{"d": map[string]interface{}{"e": map[string]string{"name": "Phil"}}}}}}, `"Phil" == "Phil"`, nil},
|
||||
{`"{{#a}}{{b.c.d.e.name}}{{/a}}" == "Phil"`, map[string]interface{}{"a": map[string]interface{}{"b": map[string]interface{}{"c": map[string]interface{}{"d": map[string]interface{}{"e": map[string]string{"name": "Phil"}}}}}, "b": map[string]interface{}{"c": map[string]interface{}{"d": map[string]interface{}{"e": map[string]string{"name": "Wrong"}}}}}, `"Phil" == "Phil"`, nil},
|
||||
{`{{#a}}{{b.c}}{{/a}}`, map[string]interface{}{"a": map[string]interface{}{"b": map[string]string{}}, "b": map[string]string{"c": "ERROR"}}, "", nil},
|
||||
}
|
||||
|
||||
func TestBasic(t *testing.T) {
|
||||
// Default behavior, AllowMissingVariables=true
|
||||
for _, test := range tests {
|
||||
output, err := Render(test.tmpl, test.context)
|
||||
if err != nil {
|
||||
t.Errorf("%q expected %q but got error %q", test.tmpl, test.expected, err.Error())
|
||||
} else if output != test.expected {
|
||||
t.Errorf("%q expected %q got %q", test.tmpl, test.expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
// Now set AllowMissingVariables=false and test again
|
||||
AllowMissingVariables = false
|
||||
defer func() { AllowMissingVariables = true }()
|
||||
for _, test := range tests {
|
||||
output, err := Render(test.tmpl, test.context)
|
||||
if err != nil {
|
||||
t.Errorf("%s expected %s but got error %s", test.tmpl, test.expected, err.Error())
|
||||
} else if output != test.expected {
|
||||
t.Errorf("%q expected %q got %q", test.tmpl, test.expected, output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var missing = []Test{
|
||||
//does not exist
|
||||
{`{{dne}}`, map[string]string{"name": "world"}, "", nil},
|
||||
{`{{dne}}`, User{"Mike", 1}, "", nil},
|
||||
{`{{dne}}`, &User{"Mike", 1}, "", nil},
|
||||
//dotted names(dot notation)
|
||||
{`"{{a.b.c}}" == ""`, map[string]interface{}{}, `"" == ""`, nil},
|
||||
{`"{{a.b.c.name}}" == ""`, map[string]interface{}{"a": map[string]interface{}{"b": map[string]string{}}, "c": map[string]string{"name": "Jim"}}, `"" == ""`, nil},
|
||||
}
|
||||
|
||||
func TestMissing(t *testing.T) {
|
||||
// Default behavior, AllowMissingVariables=true
|
||||
for _, test := range missing {
|
||||
output, err := Render(test.tmpl, test.context)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
} else if output != test.expected {
|
||||
t.Errorf("%q expected %q got %q", test.tmpl, test.expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
// Now set AllowMissingVariables=false and confirm we get errors.
|
||||
AllowMissingVariables = false
|
||||
defer func() { AllowMissingVariables = true }()
|
||||
for _, test := range missing {
|
||||
output, err := Render(test.tmpl, test.context)
|
||||
if err == nil {
|
||||
t.Errorf("%q expected missing variable error but got %q", test.tmpl, output)
|
||||
} else if strings.Index(err.Error(), "Missing variable") == -1 {
|
||||
t.Errorf("%q expected missing variable error but got %q", test.tmpl, err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFile(t *testing.T) {
|
||||
filename := path.Join(path.Join(os.Getenv("PWD"), "tests"), "test1.mustache")
|
||||
expected := "hello world"
|
||||
output, err := RenderFile(filename, map[string]string{"name": "world"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
} else if output != expected {
|
||||
t.Errorf("testfile expected %q got %q", expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFRender(t *testing.T) {
|
||||
filename := path.Join(path.Join(os.Getenv("PWD"), "tests"), "test1.mustache")
|
||||
expected := "hello world"
|
||||
tmpl, err := ParseFile(filename)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
err = tmpl.FRender(&buf, map[string]string{"name": "world"})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
output := buf.String()
|
||||
if output != expected {
|
||||
t.Fatalf("testfile expected %q got %q", expected, output)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPartial(t *testing.T) {
|
||||
filename := path.Join(path.Join(os.Getenv("PWD"), "tests"), "test2.mustache")
|
||||
expected := "hello world"
|
||||
tmpl, err := ParseFile(filename)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
output, err := tmpl.Render(map[string]string{"Name": "world"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
} else if output != expected {
|
||||
t.Errorf("testpartial expected %q got %q", expected, output)
|
||||
return
|
||||
}
|
||||
|
||||
expectedTags := []tag{
|
||||
{
|
||||
Type: Partial,
|
||||
Name: "partial",
|
||||
},
|
||||
}
|
||||
compareTags(t, tmpl.Tags(), expectedTags)
|
||||
}
|
||||
|
||||
/*
|
||||
func TestSectionPartial(t *testing.T) {
|
||||
filename := path.Join(path.Join(os.Getenv("PWD"), "tests"), "test3.mustache")
|
||||
expected := "Mike\nJoe\n"
|
||||
context := map[string]interface{}{"users": []User{{"Mike", 1}, {"Joe", 2}}}
|
||||
output := RenderFile(filename, context)
|
||||
if output != expected {
|
||||
t.Fatalf("testSectionPartial expected %q got %q", expected, output)
|
||||
}
|
||||
}
|
||||
*/
|
||||
func TestMultiContext(t *testing.T) {
|
||||
output, err := Render(`{{hello}} {{World}}`, map[string]string{"hello": "hello"}, struct{ World string }{"world"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
output2, err := Render(`{{hello}} {{World}}`, struct{ World string }{"world"}, map[string]string{"hello": "hello"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
if output != "hello world" || output2 != "hello world" {
|
||||
t.Errorf("TestMultiContext expected %q got %q", "hello world", output)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var malformed = []Test{
|
||||
{`{{#a}}{{}}{{/a}}`, Data{true, "hello"}, "", fmt.Errorf("line 1: empty tag")},
|
||||
{`{{}}`, nil, "", fmt.Errorf("line 1: empty tag")},
|
||||
{`{{}`, nil, "", fmt.Errorf("line 1: unmatched open tag")},
|
||||
{`{{`, nil, "", fmt.Errorf("line 1: unmatched open tag")},
|
||||
//invalid syntax - https://github.com/hoisie/mustache/issues/10
|
||||
{`{{#a}}{{#b}}{{/a}}{{/b}}}`, map[string]interface{}{}, "", fmt.Errorf("line 1: interleaved closing tag: a")},
|
||||
}
|
||||
|
||||
func TestMalformed(t *testing.T) {
|
||||
for _, test := range malformed {
|
||||
output, err := Render(test.tmpl, test.context)
|
||||
if err != nil {
|
||||
if test.err == nil {
|
||||
t.Error(err)
|
||||
} else if test.err.Error() != err.Error() {
|
||||
t.Errorf("%q expected error %q but got error %q", test.tmpl, test.err.Error(), err.Error())
|
||||
}
|
||||
} else {
|
||||
if test.err == nil {
|
||||
t.Errorf("%q expected %q got %q", test.tmpl, test.expected, output)
|
||||
} else {
|
||||
t.Errorf("%q expected error %q but got %q", test.tmpl, test.err.Error(), output)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type LayoutTest struct {
|
||||
layout string
|
||||
tmpl string
|
||||
context interface{}
|
||||
expected string
|
||||
}
|
||||
|
||||
var layoutTests = []LayoutTest{
|
||||
{`Header {{content}} Footer`, `Hello World`, nil, `Header Hello World Footer`},
|
||||
{`Header {{content}} Footer`, `Hello {{s}}`, map[string]string{"s": "World"}, `Header Hello World Footer`},
|
||||
{`Header {{content}} Footer`, `Hello {{content}}`, map[string]string{"content": "World"}, `Header Hello World Footer`},
|
||||
{`Header {{extra}} {{content}} Footer`, `Hello {{content}}`, map[string]string{"content": "World", "extra": "extra"}, `Header extra Hello World Footer`},
|
||||
{`Header {{content}} {{content}} Footer`, `Hello {{content}}`, map[string]string{"content": "World"}, `Header Hello World Hello World Footer`},
|
||||
}
|
||||
|
||||
func TestLayout(t *testing.T) {
|
||||
for _, test := range layoutTests {
|
||||
output, err := RenderInLayout(test.tmpl, test.layout, test.context)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
} else if output != test.expected {
|
||||
t.Errorf("%q expected %q got %q", test.tmpl, test.expected, output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLayoutToWriter(t *testing.T) {
|
||||
for _, test := range layoutTests {
|
||||
tmpl, err := ParseString(test.tmpl)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
layoutTmpl, err := ParseString(test.layout)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
err = tmpl.FRenderInLayout(&buf, layoutTmpl, test.context)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
} else if buf.String() != test.expected {
|
||||
t.Errorf("%q expected %q got %q", test.tmpl, test.expected, buf.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Person struct {
|
||||
FirstName string
|
||||
LastName string
|
||||
}
|
||||
|
||||
func (p *Person) Name1() string {
|
||||
return p.FirstName + " " + p.LastName
|
||||
}
|
||||
|
||||
func (p Person) Name2() string {
|
||||
return p.FirstName + " " + p.LastName
|
||||
}
|
||||
|
||||
func TestPointerReceiver(t *testing.T) {
|
||||
p := Person{"John", "Smith"}
|
||||
tests := []struct {
|
||||
tmpl string
|
||||
context interface{}
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
tmpl: "{{Name1}}",
|
||||
context: &p,
|
||||
expected: "John Smith",
|
||||
},
|
||||
{
|
||||
tmpl: "{{Name2}}",
|
||||
context: &p,
|
||||
expected: "John Smith",
|
||||
},
|
||||
{
|
||||
tmpl: "{{Name1}}",
|
||||
context: p,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
tmpl: "{{Name2}}",
|
||||
context: p,
|
||||
expected: "John Smith",
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
output, err := Render(test.tmpl, test.context)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
} else if output != test.expected {
|
||||
t.Errorf("expected %q got %q", test.expected, output)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type tag struct {
|
||||
Type TagType
|
||||
Name string
|
||||
Tags []tag
|
||||
}
|
||||
|
||||
type tagsTest struct {
|
||||
tmpl string
|
||||
tags []tag
|
||||
}
|
||||
|
||||
var tagTests = []tagsTest{
|
||||
{
|
||||
tmpl: `hello world`,
|
||||
tags: nil,
|
||||
},
|
||||
{
|
||||
tmpl: `hello {{name}}`,
|
||||
tags: []tag{
|
||||
{
|
||||
Type: Variable,
|
||||
Name: "name",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
tmpl: `{{#name}}hello {{name}}{{/name}}{{^name}}hello {{name2}}{{/name}}`,
|
||||
tags: []tag{
|
||||
{
|
||||
Type: Section,
|
||||
Name: "name",
|
||||
Tags: []tag{
|
||||
{
|
||||
Type: Variable,
|
||||
Name: "name",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: InvertedSection,
|
||||
Name: "name",
|
||||
Tags: []tag{
|
||||
{
|
||||
Type: Variable,
|
||||
Name: "name2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestTags(t *testing.T) {
|
||||
for _, test := range tagTests {
|
||||
testTags(t, &test)
|
||||
}
|
||||
}
|
||||
|
||||
func testTags(t *testing.T, test *tagsTest) {
|
||||
tmpl, err := ParseString(test.tmpl)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
compareTags(t, tmpl.Tags(), test.tags)
|
||||
}
|
||||
|
||||
func compareTags(t *testing.T, actual []Tag, expected []tag) {
|
||||
if len(actual) != len(expected) {
|
||||
t.Errorf("expected %d tags, got %d", len(expected), len(actual))
|
||||
return
|
||||
}
|
||||
for i, tag := range actual {
|
||||
if tag.Type() != expected[i].Type {
|
||||
t.Errorf("expected %s, got %s", expected[i].Type, tag.Type())
|
||||
return
|
||||
}
|
||||
if tag.Name() != expected[i].Name {
|
||||
t.Errorf("expected %s, got %s", expected[i].Name, tag.Name())
|
||||
return
|
||||
}
|
||||
|
||||
switch tag.Type() {
|
||||
case Variable:
|
||||
if len(expected[i].Tags) != 0 {
|
||||
t.Errorf("expected %d tags, got 0", len(expected[i].Tags))
|
||||
return
|
||||
}
|
||||
case Section, InvertedSection:
|
||||
compareTags(t, tag.Tags(), expected[i].Tags)
|
||||
case Partial:
|
||||
compareTags(t, tag.Tags(), expected[i].Tags)
|
||||
case Invalid:
|
||||
t.Errorf("invalid tag type: %s", tag.Type())
|
||||
return
|
||||
default:
|
||||
t.Errorf("invalid tag type: %s", tag.Type())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
80
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/partials.go
generated
vendored
Normal file
80
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/partials.go
generated
vendored
Normal file
@ -0,0 +1,80 @@
|
||||
package mustache
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path"
|
||||
)
|
||||
|
||||
// PartialProvider comprises the behaviors required of a struct to be able to provide partials to the mustache rendering
|
||||
// engine.
|
||||
type PartialProvider interface {
|
||||
// Get accepts the name of a partial and returns the parsed partial, if it could be found; a valid but empty
|
||||
// template, if it could not be found; or nil and error if an error occurred (other than an inability to find
|
||||
// the partial).
|
||||
Get(name string) (*Template, error)
|
||||
}
|
||||
|
||||
// FileProvider implements the PartialProvider interface by providing partials drawn from a filesystem. When a partial
|
||||
// named `NAME` is requested, FileProvider searches each listed path for a file named as `NAME` followed by any of the
|
||||
// listed extensions. The default for `Paths` is to search the current working directory. The default for `Extensions`
|
||||
// is to examine, in order, no extension; then ".mustache"; then ".stache".
|
||||
type FileProvider struct {
|
||||
Paths []string
|
||||
Extensions []string
|
||||
}
|
||||
|
||||
func (fp *FileProvider) Get(name string) (*Template, error) {
|
||||
var filename string
|
||||
|
||||
var paths []string
|
||||
if fp.Paths != nil {
|
||||
paths = fp.Paths
|
||||
} else {
|
||||
paths = []string{""}
|
||||
}
|
||||
|
||||
var exts []string
|
||||
if fp.Extensions != nil {
|
||||
exts = fp.Extensions
|
||||
} else {
|
||||
exts = []string{"", ".mustache", ".stache"}
|
||||
}
|
||||
|
||||
for _, p := range paths {
|
||||
for _, e := range exts {
|
||||
name := path.Join(p, name+e)
|
||||
f, err := os.Open(name)
|
||||
if err == nil {
|
||||
filename = name
|
||||
f.Close()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if filename == "" {
|
||||
return ParseString("")
|
||||
}
|
||||
|
||||
return ParseFile(filename)
|
||||
}
|
||||
|
||||
var _ PartialProvider = (*FileProvider)(nil)
|
||||
|
||||
// StaticProvider implements the PartialProvider interface by providing partials drawn from a map, which maps partial
|
||||
// name to template contents.
|
||||
type StaticProvider struct {
|
||||
Partials map[string]string
|
||||
}
|
||||
|
||||
func (sp *StaticProvider) Get(name string) (*Template, error) {
|
||||
if sp.Partials != nil {
|
||||
if data, ok := sp.Partials[name]; ok {
|
||||
return ParseStringPartials(data, sp)
|
||||
}
|
||||
}
|
||||
|
||||
return ParseString("")
|
||||
}
|
||||
|
||||
var _ PartialProvider = (*StaticProvider)(nil)
|
31
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/Changes
generated
vendored
Normal file
31
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/Changes
generated
vendored
Normal file
@ -0,0 +1,31 @@
|
||||
2011-03-20: v1.1.2
|
||||
Added tests for standalone tags at string boundaries.
|
||||
Added tests for rendering lambda returns after delimiter changes.
|
||||
|
||||
2011-03-20: v1.0.3
|
||||
Added tests for standalone tags at string boundaries.
|
||||
Added tests for rendering lambda returns after delimiter changes.
|
||||
|
||||
2011-03-05: v1.1.1
|
||||
Added tests for indented inline sections.
|
||||
Added tests for Windows-style newlines.
|
||||
|
||||
2011-03-05: v1.0.2
|
||||
Added tests for indented inline sections.
|
||||
Added tests for Windows-style newlines.
|
||||
|
||||
2011-03-04: v1.1.0
|
||||
Implicit iterators.
|
||||
A single period (`.`) may now be used as a name in Interpolation tags,
|
||||
which represents the top of stack (cast as a String).
|
||||
Dotted names.
|
||||
Names containing one or more periods should be resolved as chained
|
||||
properties; naïvely, this is like nesting section tags, but with some
|
||||
built-in scoping protections.
|
||||
|
||||
2011-03-02: v1.0.1
|
||||
Clarifying a point in the README about version compliance.
|
||||
Adding high-level documentation to each spec file.
|
||||
|
||||
2011-02-28: v1.0.0
|
||||
Initial Release
|
65
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/README.md
generated
vendored
Normal file
65
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/README.md
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
The repository at https://github.com/mustache/spec is the formal standard for
|
||||
Mustache. It defines both normal usage and edge-case behavior for libraries
|
||||
parsing the Mustache templating language (or a superset thereof).
|
||||
|
||||
The specification is developed as a series of YAML files, under the `specs`
|
||||
directory.
|
||||
|
||||
Versioning
|
||||
----------
|
||||
This specification is being [semantically versioned](http://semver.org).
|
||||
Roughly described, major version changes will always represent backwards
|
||||
incompatible changes, minor version changes will always represent new language
|
||||
features and will be backwards compatible, and patch ('tiny') version changes
|
||||
will always be bug fixes. For the purposes of semantic versioning, the public
|
||||
API is the contents of the `specs` directory and the algorithm for testing
|
||||
against it.
|
||||
|
||||
Mustache implementations SHOULD report the most recent version of the spec
|
||||
(major and minor version numbers). If an implementation has support for any
|
||||
optional modules, they SHOULD indicate so with a remark attached to the
|
||||
version number (e.g. "vX.Y, including lambdas" or "v.X.Y+λ"). It is
|
||||
RECOMMENDED that implementations not supporting at least v1.0.0 of this spec
|
||||
refer to themselves as "Mustache-like", or "Mustache-inspired".
|
||||
|
||||
Alternate Formats
|
||||
-----------------
|
||||
|
||||
Since YAML is a reasonably complex format that not every language has good
|
||||
tools for working with, we also provide JSON versions of the specs on a
|
||||
best-effort basis.
|
||||
|
||||
These should be identical to the YAML specifications, but if you find the need
|
||||
to regenerate them, they can be trivially rebuilt by invoking `rake build`.
|
||||
|
||||
It is also worth noting that some specifications (notably, the lambda module)
|
||||
rely on YAML "tags" to denote special types of data (e.g. source code). Since
|
||||
JSON offers no way to denote this, a special key ("`__tag__`") is injected
|
||||
with the name of the tag as its value. See `TESTING.md` for more information
|
||||
about handling tagged data.
|
||||
|
||||
Optional Modules
|
||||
----------------
|
||||
|
||||
Specification files beginning with a tilde (`~`) describe optional modules.
|
||||
At present, the only module being described as optional is regarding support
|
||||
for lambdas. As a guideline, a module may be a candidate for optionality
|
||||
when:
|
||||
|
||||
* It does not affect the core syntax of the language.
|
||||
* It does not significantly affect the output of rendered templates.
|
||||
* It concerns implementation language features or data types that are not
|
||||
common to or core in every targeted language.
|
||||
* The lack of support by an implementation does not diminish the usage of
|
||||
Mustache in the target language.
|
||||
|
||||
As an example, the lambda module is primarily concerned with the handling of a
|
||||
particular data type (code). This is a type of data that may be difficult to
|
||||
support in some languages, and users of those languages will not see the lack
|
||||
as an 'inconsistency' between implementations.
|
||||
|
||||
Support for specific pragmas or syntax extensions, however, are best managed
|
||||
outside this core specification, as adjunct specifications.
|
||||
|
||||
Implementors are strongly encouraged to support any and all modules they are
|
||||
reasonably capable of supporting.
|
27
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/Rakefile
generated
vendored
Normal file
27
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/Rakefile
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
require 'json'
|
||||
require 'yaml'
|
||||
|
||||
# Our custom YAML tags must retain their magic.
|
||||
%w[ code ].each do |tag|
|
||||
YAML::add_builtin_type(tag) { |_,val| val.merge(:__tag__ => tag) }
|
||||
end
|
||||
|
||||
desc 'Build all alternate versions of the specs.'
|
||||
multitask :build => [ 'build:json' ]
|
||||
|
||||
namespace :build do
|
||||
note = 'Do not edit this file; changes belong in the appropriate YAML file.'
|
||||
|
||||
desc 'Build JSON versions of the specs.'
|
||||
task :json do
|
||||
rm(Dir['specs/*.json'], :verbose => false)
|
||||
Dir.glob('specs/*.yml').each do |filename|
|
||||
json_file = filename.gsub('.yml', '.json')
|
||||
|
||||
File.open(json_file, 'w') do |file|
|
||||
doc = YAML.load_file(filename)
|
||||
file << doc.merge(:__ATTN__ => note).to_json()
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
46
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/TESTING.md
generated
vendored
Normal file
46
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/TESTING.md
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
Testing your Mustache implementation against this specification should be
|
||||
relatively simple. If you have a readily available testing framework on your
|
||||
platform, your task may be even simpler.
|
||||
|
||||
In general, the process for each `.yml` file is as follows:
|
||||
|
||||
1. Use a YAML parser to load the file.
|
||||
|
||||
2. For each test in the 'tests' array:
|
||||
|
||||
1. Ensure that each element of the 'partials' hash (if it exists) is
|
||||
stored in a place where the interpreter will look for it.
|
||||
|
||||
2. If your implementation will not support lambdas, feel free to skip over
|
||||
the optional '~lambdas.yml' file.
|
||||
|
||||
2.1. If your implementation will support lambdas, ensure that each member of
|
||||
'data' tagged with '!code' is properly processed into a language-
|
||||
specific lambda reference.
|
||||
|
||||
* e.g. Given this YAML data hash:
|
||||
|
||||
`{ x: !code { ruby: 'proc { "x" }', perl: 'sub { "x" }' } }`
|
||||
|
||||
a Ruby-based Mustache implementation would process it such that it
|
||||
was equivalent to this Ruby hash:
|
||||
|
||||
`{ 'x' => proc { "x" } }`
|
||||
|
||||
* If your implementation language does not currently have lambda
|
||||
examples in the spec, feel free to implement them and send a pull
|
||||
request.
|
||||
|
||||
* The JSON version of the spec represents these tagged values as a hash
|
||||
with a '`__tag__`' key of 'code'.
|
||||
|
||||
3. Render the template (stored in the 'template' key) with the given 'data'
|
||||
hash.
|
||||
|
||||
4. Compare the results of your rendering against the 'expected' value; any
|
||||
differences should be reported, along with any useful debugging
|
||||
information.
|
||||
|
||||
* Of note, the 'desc' key contains a rough one-line description of the
|
||||
behavior being tested -- this is most useful in conjunction with the
|
||||
file name and test 'name'.
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/comments.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/comments.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"__ATTN__":"Do not edit this file; changes belong in the appropriate YAML file.","overview":"Comment tags represent content that should never appear in the resulting\noutput.\n\nThe tag's content may contain any substring (including newlines) EXCEPT the\nclosing delimiter.\n\nComment tags SHOULD be treated as standalone when appropriate.\n","tests":[{"name":"Inline","data":{},"expected":"1234567890","template":"12345{{! Comment Block! }}67890","desc":"Comment blocks should be removed from the template."},{"name":"Multiline","data":{},"expected":"1234567890\n","template":"12345{{!\n This is a\n multi-line comment...\n}}67890\n","desc":"Multiline comments should be permitted."},{"name":"Standalone","data":{},"expected":"Begin.\nEnd.\n","template":"Begin.\n{{! Comment Block! }}\nEnd.\n","desc":"All standalone comment lines should be removed."},{"name":"Indented Standalone","data":{},"expected":"Begin.\nEnd.\n","template":"Begin.\n {{! Indented Comment Block! }}\nEnd.\n","desc":"All standalone comment lines should be removed."},{"name":"Standalone Line Endings","data":{},"expected":"|\r\n|","template":"|\r\n{{! Standalone Comment }}\r\n|","desc":"\"\\r\\n\" should be considered a newline for standalone tags."},{"name":"Standalone Without Previous Line","data":{},"expected":"!","template":" {{! I'm Still Standalone }}\n!","desc":"Standalone tags should not require a newline to precede them."},{"name":"Standalone Without Newline","data":{},"expected":"!\n","template":"!\n {{! I'm Still Standalone }}","desc":"Standalone tags should not require a newline to follow them."},{"name":"Multiline Standalone","data":{},"expected":"Begin.\nEnd.\n","template":"Begin.\n{{!\nSomething's going on here...\n}}\nEnd.\n","desc":"All standalone comment lines should be removed."},{"name":"Indented Multiline Standalone","data":{},"expected":"Begin.\nEnd.\n","template":"Begin.\n {{!\n Something's going on here...\n }}\nEnd.\n","desc":"All standalone comment lines should be removed."},{"name":"Indented Inline","data":{},"expected":" 12 \n","template":" 12 {{! 34 }}\n","desc":"Inline comments should not strip whitespace"},{"name":"Surrounding Whitespace","data":{},"expected":"12345 67890","template":"12345 {{! Comment Block! }} 67890","desc":"Comment removal should preserve surrounding whitespace."}]}
|
103
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/comments.yml
generated
vendored
Normal file
103
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/comments.yml
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
overview: |
|
||||
Comment tags represent content that should never appear in the resulting
|
||||
output.
|
||||
|
||||
The tag's content may contain any substring (including newlines) EXCEPT the
|
||||
closing delimiter.
|
||||
|
||||
Comment tags SHOULD be treated as standalone when appropriate.
|
||||
tests:
|
||||
- name: Inline
|
||||
desc: Comment blocks should be removed from the template.
|
||||
data: { }
|
||||
template: '12345{{! Comment Block! }}67890'
|
||||
expected: '1234567890'
|
||||
|
||||
- name: Multiline
|
||||
desc: Multiline comments should be permitted.
|
||||
data: { }
|
||||
template: |
|
||||
12345{{!
|
||||
This is a
|
||||
multi-line comment...
|
||||
}}67890
|
||||
expected: |
|
||||
1234567890
|
||||
|
||||
- name: Standalone
|
||||
desc: All standalone comment lines should be removed.
|
||||
data: { }
|
||||
template: |
|
||||
Begin.
|
||||
{{! Comment Block! }}
|
||||
End.
|
||||
expected: |
|
||||
Begin.
|
||||
End.
|
||||
|
||||
- name: Indented Standalone
|
||||
desc: All standalone comment lines should be removed.
|
||||
data: { }
|
||||
template: |
|
||||
Begin.
|
||||
{{! Indented Comment Block! }}
|
||||
End.
|
||||
expected: |
|
||||
Begin.
|
||||
End.
|
||||
|
||||
- name: Standalone Line Endings
|
||||
desc: '"\r\n" should be considered a newline for standalone tags.'
|
||||
data: { }
|
||||
template: "|\r\n{{! Standalone Comment }}\r\n|"
|
||||
expected: "|\r\n|"
|
||||
|
||||
- name: Standalone Without Previous Line
|
||||
desc: Standalone tags should not require a newline to precede them.
|
||||
data: { }
|
||||
template: " {{! I'm Still Standalone }}\n!"
|
||||
expected: "!"
|
||||
|
||||
- name: Standalone Without Newline
|
||||
desc: Standalone tags should not require a newline to follow them.
|
||||
data: { }
|
||||
template: "!\n {{! I'm Still Standalone }}"
|
||||
expected: "!\n"
|
||||
|
||||
- name: Multiline Standalone
|
||||
desc: All standalone comment lines should be removed.
|
||||
data: { }
|
||||
template: |
|
||||
Begin.
|
||||
{{!
|
||||
Something's going on here...
|
||||
}}
|
||||
End.
|
||||
expected: |
|
||||
Begin.
|
||||
End.
|
||||
|
||||
- name: Indented Multiline Standalone
|
||||
desc: All standalone comment lines should be removed.
|
||||
data: { }
|
||||
template: |
|
||||
Begin.
|
||||
{{!
|
||||
Something's going on here...
|
||||
}}
|
||||
End.
|
||||
expected: |
|
||||
Begin.
|
||||
End.
|
||||
|
||||
- name: Indented Inline
|
||||
desc: Inline comments should not strip whitespace
|
||||
data: { }
|
||||
template: " 12 {{! 34 }}\n"
|
||||
expected: " 12 \n"
|
||||
|
||||
- name: Surrounding Whitespace
|
||||
desc: Comment removal should preserve surrounding whitespace.
|
||||
data: { }
|
||||
template: '12345 {{! Comment Block! }} 67890'
|
||||
expected: '12345 67890'
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/delimiters.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/delimiters.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"__ATTN__":"Do not edit this file; changes belong in the appropriate YAML file.","overview":"Set Delimiter tags are used to change the tag delimiters for all content\nfollowing the tag in the current compilation unit.\n\nThe tag's content MUST be any two non-whitespace sequences (separated by\nwhitespace) EXCEPT an equals sign ('=') followed by the current closing\ndelimiter.\n\nSet Delimiter tags SHOULD be treated as standalone when appropriate.\n","tests":[{"name":"Pair Behavior","data":{"text":"Hey!"},"expected":"(Hey!)","template":"{{=<% %>=}}(<%text%>)","desc":"The equals sign (used on both sides) should permit delimiter changes."},{"name":"Special Characters","data":{"text":"It worked!"},"expected":"(It worked!)","template":"({{=[ ]=}}[text])","desc":"Characters with special meaning regexen should be valid delimiters."},{"name":"Sections","data":{"section":true,"data":"I got interpolated."},"expected":"[\n I got interpolated.\n |data|\n\n {{data}}\n I got interpolated.\n]\n","template":"[\n{{#section}}\n {{data}}\n |data|\n{{/section}}\n\n{{= | | =}}\n|#section|\n {{data}}\n |data|\n|/section|\n]\n","desc":"Delimiters set outside sections should persist."},{"name":"Inverted Sections","data":{"section":false,"data":"I got interpolated."},"expected":"[\n I got interpolated.\n |data|\n\n {{data}}\n I got interpolated.\n]\n","template":"[\n{{^section}}\n {{data}}\n |data|\n{{/section}}\n\n{{= | | =}}\n|^section|\n {{data}}\n |data|\n|/section|\n]\n","desc":"Delimiters set outside inverted sections should persist."},{"name":"Partial Inheritence","data":{"value":"yes"},"expected":"[ .yes. ]\n[ .yes. ]\n","template":"[ {{>include}} ]\n{{= | | =}}\n[ |>include| ]\n","desc":"Delimiters set in a parent template should not affect a partial.","partials":{"include":".{{value}}."}},{"name":"Post-Partial Behavior","data":{"value":"yes"},"expected":"[ .yes. .yes. ]\n[ .yes. .|value|. ]\n","template":"[ {{>include}} ]\n[ .{{value}}. .|value|. ]\n","desc":"Delimiters set in a partial should not affect the parent template.","partials":{"include":".{{value}}. {{= | | =}} .|value|."}},{"name":"Surrounding Whitespace","data":{},"expected":"| |","template":"| {{=@ @=}} |","desc":"Surrounding whitespace should be left untouched."},{"name":"Outlying Whitespace (Inline)","data":{},"expected":" | \n","template":" | {{=@ @=}}\n","desc":"Whitespace should be left untouched."},{"name":"Standalone Tag","data":{},"expected":"Begin.\nEnd.\n","template":"Begin.\n{{=@ @=}}\nEnd.\n","desc":"Standalone lines should be removed from the template."},{"name":"Indented Standalone Tag","data":{},"expected":"Begin.\nEnd.\n","template":"Begin.\n {{=@ @=}}\nEnd.\n","desc":"Indented standalone lines should be removed from the template."},{"name":"Standalone Line Endings","data":{},"expected":"|\r\n|","template":"|\r\n{{= @ @ =}}\r\n|","desc":"\"\\r\\n\" should be considered a newline for standalone tags."},{"name":"Standalone Without Previous Line","data":{},"expected":"=","template":" {{=@ @=}}\n=","desc":"Standalone tags should not require a newline to precede them."},{"name":"Standalone Without Newline","data":{},"expected":"=\n","template":"=\n {{=@ @=}}","desc":"Standalone tags should not require a newline to follow them."},{"name":"Pair with Padding","data":{},"expected":"||","template":"|{{= @ @ =}}|","desc":"Superfluous in-tag whitespace should be ignored."}]}
|
158
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/delimiters.yml
generated
vendored
Normal file
158
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/delimiters.yml
generated
vendored
Normal file
@ -0,0 +1,158 @@
|
||||
overview: |
|
||||
Set Delimiter tags are used to change the tag delimiters for all content
|
||||
following the tag in the current compilation unit.
|
||||
|
||||
The tag's content MUST be any two non-whitespace sequences (separated by
|
||||
whitespace) EXCEPT an equals sign ('=') followed by the current closing
|
||||
delimiter.
|
||||
|
||||
Set Delimiter tags SHOULD be treated as standalone when appropriate.
|
||||
tests:
|
||||
- name: Pair Behavior
|
||||
desc: The equals sign (used on both sides) should permit delimiter changes.
|
||||
data: { text: 'Hey!' }
|
||||
template: '{{=<% %>=}}(<%text%>)'
|
||||
expected: '(Hey!)'
|
||||
|
||||
- name: Special Characters
|
||||
desc: Characters with special meaning regexen should be valid delimiters.
|
||||
data: { text: 'It worked!' }
|
||||
template: '({{=[ ]=}}[text])'
|
||||
expected: '(It worked!)'
|
||||
|
||||
- name: Sections
|
||||
desc: Delimiters set outside sections should persist.
|
||||
data: { section: true, data: 'I got interpolated.' }
|
||||
template: |
|
||||
[
|
||||
{{#section}}
|
||||
{{data}}
|
||||
|data|
|
||||
{{/section}}
|
||||
|
||||
{{= | | =}}
|
||||
|#section|
|
||||
{{data}}
|
||||
|data|
|
||||
|/section|
|
||||
]
|
||||
expected: |
|
||||
[
|
||||
I got interpolated.
|
||||
|data|
|
||||
|
||||
{{data}}
|
||||
I got interpolated.
|
||||
]
|
||||
|
||||
- name: Inverted Sections
|
||||
desc: Delimiters set outside inverted sections should persist.
|
||||
data: { section: false, data: 'I got interpolated.' }
|
||||
template: |
|
||||
[
|
||||
{{^section}}
|
||||
{{data}}
|
||||
|data|
|
||||
{{/section}}
|
||||
|
||||
{{= | | =}}
|
||||
|^section|
|
||||
{{data}}
|
||||
|data|
|
||||
|/section|
|
||||
]
|
||||
expected: |
|
||||
[
|
||||
I got interpolated.
|
||||
|data|
|
||||
|
||||
{{data}}
|
||||
I got interpolated.
|
||||
]
|
||||
|
||||
- name: Partial Inheritence
|
||||
desc: Delimiters set in a parent template should not affect a partial.
|
||||
data: { value: 'yes' }
|
||||
partials:
|
||||
include: '.{{value}}.'
|
||||
template: |
|
||||
[ {{>include}} ]
|
||||
{{= | | =}}
|
||||
[ |>include| ]
|
||||
expected: |
|
||||
[ .yes. ]
|
||||
[ .yes. ]
|
||||
|
||||
- name: Post-Partial Behavior
|
||||
desc: Delimiters set in a partial should not affect the parent template.
|
||||
data: { value: 'yes' }
|
||||
partials:
|
||||
include: '.{{value}}. {{= | | =}} .|value|.'
|
||||
template: |
|
||||
[ {{>include}} ]
|
||||
[ .{{value}}. .|value|. ]
|
||||
expected: |
|
||||
[ .yes. .yes. ]
|
||||
[ .yes. .|value|. ]
|
||||
|
||||
# Whitespace Sensitivity
|
||||
|
||||
- name: Surrounding Whitespace
|
||||
desc: Surrounding whitespace should be left untouched.
|
||||
data: { }
|
||||
template: '| {{=@ @=}} |'
|
||||
expected: '| |'
|
||||
|
||||
- name: Outlying Whitespace (Inline)
|
||||
desc: Whitespace should be left untouched.
|
||||
data: { }
|
||||
template: " | {{=@ @=}}\n"
|
||||
expected: " | \n"
|
||||
|
||||
- name: Standalone Tag
|
||||
desc: Standalone lines should be removed from the template.
|
||||
data: { }
|
||||
template: |
|
||||
Begin.
|
||||
{{=@ @=}}
|
||||
End.
|
||||
expected: |
|
||||
Begin.
|
||||
End.
|
||||
|
||||
- name: Indented Standalone Tag
|
||||
desc: Indented standalone lines should be removed from the template.
|
||||
data: { }
|
||||
template: |
|
||||
Begin.
|
||||
{{=@ @=}}
|
||||
End.
|
||||
expected: |
|
||||
Begin.
|
||||
End.
|
||||
|
||||
- name: Standalone Line Endings
|
||||
desc: '"\r\n" should be considered a newline for standalone tags.'
|
||||
data: { }
|
||||
template: "|\r\n{{= @ @ =}}\r\n|"
|
||||
expected: "|\r\n|"
|
||||
|
||||
- name: Standalone Without Previous Line
|
||||
desc: Standalone tags should not require a newline to precede them.
|
||||
data: { }
|
||||
template: " {{=@ @=}}\n="
|
||||
expected: "="
|
||||
|
||||
- name: Standalone Without Newline
|
||||
desc: Standalone tags should not require a newline to follow them.
|
||||
data: { }
|
||||
template: "=\n {{=@ @=}}"
|
||||
expected: "=\n"
|
||||
|
||||
# Whitespace Insensitivity
|
||||
|
||||
- name: Pair with Padding
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { }
|
||||
template: '|{{= @ @ =}}|'
|
||||
expected: '||'
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/interpolation.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/interpolation.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
238
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/interpolation.yml
generated
vendored
Normal file
238
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/interpolation.yml
generated
vendored
Normal file
@ -0,0 +1,238 @@
|
||||
overview: |
|
||||
Interpolation tags are used to integrate dynamic content into the template.
|
||||
|
||||
The tag's content MUST be a non-whitespace character sequence NOT containing
|
||||
the current closing delimiter.
|
||||
|
||||
This tag's content names the data to replace the tag. A single period (`.`)
|
||||
indicates that the item currently sitting atop the context stack should be
|
||||
used; otherwise, name resolution is as follows:
|
||||
1) Split the name on periods; the first part is the name to resolve, any
|
||||
remaining parts should be retained.
|
||||
2) Walk the context stack from top to bottom, finding the first context
|
||||
that is a) a hash containing the name as a key OR b) an object responding
|
||||
to a method with the given name.
|
||||
3) If the context is a hash, the data is the value associated with the
|
||||
name.
|
||||
4) If the context is an object, the data is the value returned by the
|
||||
method with the given name.
|
||||
5) If any name parts were retained in step 1, each should be resolved
|
||||
against a context stack containing only the result from the former
|
||||
resolution. If any part fails resolution, the result should be considered
|
||||
falsey, and should interpolate as the empty string.
|
||||
Data should be coerced into a string (and escaped, if appropriate) before
|
||||
interpolation.
|
||||
|
||||
The Interpolation tags MUST NOT be treated as standalone.
|
||||
tests:
|
||||
- name: No Interpolation
|
||||
desc: Mustache-free templates should render as-is.
|
||||
data: { }
|
||||
template: |
|
||||
Hello from {Mustache}!
|
||||
expected: |
|
||||
Hello from {Mustache}!
|
||||
|
||||
- name: Basic Interpolation
|
||||
desc: Unadorned tags should interpolate content into the template.
|
||||
data: { subject: "world" }
|
||||
template: |
|
||||
Hello, {{subject}}!
|
||||
expected: |
|
||||
Hello, world!
|
||||
|
||||
- name: HTML Escaping
|
||||
desc: Basic interpolation should be HTML escaped.
|
||||
data: { forbidden: '& " < >' }
|
||||
template: |
|
||||
These characters should be HTML escaped: {{forbidden}}
|
||||
expected: |
|
||||
These characters should be HTML escaped: & " < >
|
||||
|
||||
- name: Triple Mustache
|
||||
desc: Triple mustaches should interpolate without HTML escaping.
|
||||
data: { forbidden: '& " < >' }
|
||||
template: |
|
||||
These characters should not be HTML escaped: {{{forbidden}}}
|
||||
expected: |
|
||||
These characters should not be HTML escaped: & " < >
|
||||
|
||||
- name: Ampersand
|
||||
desc: Ampersand should interpolate without HTML escaping.
|
||||
data: { forbidden: '& " < >' }
|
||||
template: |
|
||||
These characters should not be HTML escaped: {{&forbidden}}
|
||||
expected: |
|
||||
These characters should not be HTML escaped: & " < >
|
||||
|
||||
- name: Basic Integer Interpolation
|
||||
desc: Integers should interpolate seamlessly.
|
||||
data: { mph: 85 }
|
||||
template: '"{{mph}} miles an hour!"'
|
||||
expected: '"85 miles an hour!"'
|
||||
|
||||
- name: Triple Mustache Integer Interpolation
|
||||
desc: Integers should interpolate seamlessly.
|
||||
data: { mph: 85 }
|
||||
template: '"{{{mph}}} miles an hour!"'
|
||||
expected: '"85 miles an hour!"'
|
||||
|
||||
- name: Ampersand Integer Interpolation
|
||||
desc: Integers should interpolate seamlessly.
|
||||
data: { mph: 85 }
|
||||
template: '"{{&mph}} miles an hour!"'
|
||||
expected: '"85 miles an hour!"'
|
||||
|
||||
- name: Basic Decimal Interpolation
|
||||
desc: Decimals should interpolate seamlessly with proper significance.
|
||||
data: { power: 1.210 }
|
||||
template: '"{{power}} jiggawatts!"'
|
||||
expected: '"1.21 jiggawatts!"'
|
||||
|
||||
- name: Triple Mustache Decimal Interpolation
|
||||
desc: Decimals should interpolate seamlessly with proper significance.
|
||||
data: { power: 1.210 }
|
||||
template: '"{{{power}}} jiggawatts!"'
|
||||
expected: '"1.21 jiggawatts!"'
|
||||
|
||||
- name: Ampersand Decimal Interpolation
|
||||
desc: Decimals should interpolate seamlessly with proper significance.
|
||||
data: { power: 1.210 }
|
||||
template: '"{{&power}} jiggawatts!"'
|
||||
expected: '"1.21 jiggawatts!"'
|
||||
|
||||
# Context Misses
|
||||
|
||||
- name: Basic Context Miss Interpolation
|
||||
desc: Failed context lookups should default to empty strings.
|
||||
data: { }
|
||||
template: "I ({{cannot}}) be seen!"
|
||||
expected: "I () be seen!"
|
||||
|
||||
- name: Triple Mustache Context Miss Interpolation
|
||||
desc: Failed context lookups should default to empty strings.
|
||||
data: { }
|
||||
template: "I ({{{cannot}}}) be seen!"
|
||||
expected: "I () be seen!"
|
||||
|
||||
- name: Ampersand Context Miss Interpolation
|
||||
desc: Failed context lookups should default to empty strings.
|
||||
data: { }
|
||||
template: "I ({{&cannot}}) be seen!"
|
||||
expected: "I () be seen!"
|
||||
|
||||
# Dotted Names
|
||||
|
||||
- name: Dotted Names - Basic Interpolation
|
||||
desc: Dotted names should be considered a form of shorthand for sections.
|
||||
data: { person: { name: 'Joe' } }
|
||||
template: '"{{person.name}}" == "{{#person}}{{name}}{{/person}}"'
|
||||
expected: '"Joe" == "Joe"'
|
||||
|
||||
- name: Dotted Names - Triple Mustache Interpolation
|
||||
desc: Dotted names should be considered a form of shorthand for sections.
|
||||
data: { person: { name: 'Joe' } }
|
||||
template: '"{{{person.name}}}" == "{{#person}}{{{name}}}{{/person}}"'
|
||||
expected: '"Joe" == "Joe"'
|
||||
|
||||
- name: Dotted Names - Ampersand Interpolation
|
||||
desc: Dotted names should be considered a form of shorthand for sections.
|
||||
data: { person: { name: 'Joe' } }
|
||||
template: '"{{&person.name}}" == "{{#person}}{{&name}}{{/person}}"'
|
||||
expected: '"Joe" == "Joe"'
|
||||
|
||||
- name: Dotted Names - Arbitrary Depth
|
||||
desc: Dotted names should be functional to any level of nesting.
|
||||
data:
|
||||
a: { b: { c: { d: { e: { name: 'Phil' } } } } }
|
||||
template: '"{{a.b.c.d.e.name}}" == "Phil"'
|
||||
expected: '"Phil" == "Phil"'
|
||||
|
||||
- name: Dotted Names - Broken Chains
|
||||
desc: Any falsey value prior to the last part of the name should yield ''.
|
||||
data:
|
||||
a: { }
|
||||
template: '"{{a.b.c}}" == ""'
|
||||
expected: '"" == ""'
|
||||
|
||||
- name: Dotted Names - Broken Chain Resolution
|
||||
desc: Each part of a dotted name should resolve only against its parent.
|
||||
data:
|
||||
a: { b: { } }
|
||||
c: { name: 'Jim' }
|
||||
template: '"{{a.b.c.name}}" == ""'
|
||||
expected: '"" == ""'
|
||||
|
||||
- name: Dotted Names - Initial Resolution
|
||||
desc: The first part of a dotted name should resolve as any other name.
|
||||
data:
|
||||
a: { b: { c: { d: { e: { name: 'Phil' } } } } }
|
||||
b: { c: { d: { e: { name: 'Wrong' } } } }
|
||||
template: '"{{#a}}{{b.c.d.e.name}}{{/a}}" == "Phil"'
|
||||
expected: '"Phil" == "Phil"'
|
||||
|
||||
- name: Dotted Names - Context Precedence
|
||||
desc: Dotted names should be resolved against former resolutions.
|
||||
data:
|
||||
a: { b: { } }
|
||||
b: { c: 'ERROR' }
|
||||
template: '{{#a}}{{b.c}}{{/a}}'
|
||||
expected: ''
|
||||
|
||||
# Whitespace Sensitivity
|
||||
|
||||
- name: Interpolation - Surrounding Whitespace
|
||||
desc: Interpolation should not alter surrounding whitespace.
|
||||
data: { string: '---' }
|
||||
template: '| {{string}} |'
|
||||
expected: '| --- |'
|
||||
|
||||
- name: Triple Mustache - Surrounding Whitespace
|
||||
desc: Interpolation should not alter surrounding whitespace.
|
||||
data: { string: '---' }
|
||||
template: '| {{{string}}} |'
|
||||
expected: '| --- |'
|
||||
|
||||
- name: Ampersand - Surrounding Whitespace
|
||||
desc: Interpolation should not alter surrounding whitespace.
|
||||
data: { string: '---' }
|
||||
template: '| {{&string}} |'
|
||||
expected: '| --- |'
|
||||
|
||||
- name: Interpolation - Standalone
|
||||
desc: Standalone interpolation should not alter surrounding whitespace.
|
||||
data: { string: '---' }
|
||||
template: " {{string}}\n"
|
||||
expected: " ---\n"
|
||||
|
||||
- name: Triple Mustache - Standalone
|
||||
desc: Standalone interpolation should not alter surrounding whitespace.
|
||||
data: { string: '---' }
|
||||
template: " {{{string}}}\n"
|
||||
expected: " ---\n"
|
||||
|
||||
- name: Ampersand - Standalone
|
||||
desc: Standalone interpolation should not alter surrounding whitespace.
|
||||
data: { string: '---' }
|
||||
template: " {{&string}}\n"
|
||||
expected: " ---\n"
|
||||
|
||||
# Whitespace Insensitivity
|
||||
|
||||
- name: Interpolation With Padding
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { string: "---" }
|
||||
template: '|{{ string }}|'
|
||||
expected: '|---|'
|
||||
|
||||
- name: Triple Mustache With Padding
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { string: "---" }
|
||||
template: '|{{{ string }}}|'
|
||||
expected: '|---|'
|
||||
|
||||
- name: Ampersand With Padding
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { string: "---" }
|
||||
template: '|{{& string }}|'
|
||||
expected: '|---|'
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/inverted.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/inverted.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
193
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/inverted.yml
generated
vendored
Normal file
193
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/inverted.yml
generated
vendored
Normal file
@ -0,0 +1,193 @@
|
||||
overview: |
|
||||
Inverted Section tags and End Section tags are used in combination to wrap a
|
||||
section of the template.
|
||||
|
||||
These tags' content MUST be a non-whitespace character sequence NOT
|
||||
containing the current closing delimiter; each Inverted Section tag MUST be
|
||||
followed by an End Section tag with the same content within the same
|
||||
section.
|
||||
|
||||
This tag's content names the data to replace the tag. Name resolution is as
|
||||
follows:
|
||||
1) Split the name on periods; the first part is the name to resolve, any
|
||||
remaining parts should be retained.
|
||||
2) Walk the context stack from top to bottom, finding the first context
|
||||
that is a) a hash containing the name as a key OR b) an object responding
|
||||
to a method with the given name.
|
||||
3) If the context is a hash, the data is the value associated with the
|
||||
name.
|
||||
4) If the context is an object and the method with the given name has an
|
||||
arity of 1, the method SHOULD be called with a String containing the
|
||||
unprocessed contents of the sections; the data is the value returned.
|
||||
5) Otherwise, the data is the value returned by calling the method with
|
||||
the given name.
|
||||
6) If any name parts were retained in step 1, each should be resolved
|
||||
against a context stack containing only the result from the former
|
||||
resolution. If any part fails resolution, the result should be considered
|
||||
falsey, and should interpolate as the empty string.
|
||||
If the data is not of a list type, it is coerced into a list as follows: if
|
||||
the data is truthy (e.g. `!!data == true`), use a single-element list
|
||||
containing the data, otherwise use an empty list.
|
||||
|
||||
This section MUST NOT be rendered unless the data list is empty.
|
||||
|
||||
Inverted Section and End Section tags SHOULD be treated as standalone when
|
||||
appropriate.
|
||||
tests:
|
||||
- name: Falsey
|
||||
desc: Falsey sections should have their contents rendered.
|
||||
data: { boolean: false }
|
||||
template: '"{{^boolean}}This should be rendered.{{/boolean}}"'
|
||||
expected: '"This should be rendered."'
|
||||
|
||||
- name: Truthy
|
||||
desc: Truthy sections should have their contents omitted.
|
||||
data: { boolean: true }
|
||||
template: '"{{^boolean}}This should not be rendered.{{/boolean}}"'
|
||||
expected: '""'
|
||||
|
||||
- name: Context
|
||||
desc: Objects and hashes should behave like truthy values.
|
||||
data: { context: { name: 'Joe' } }
|
||||
template: '"{{^context}}Hi {{name}}.{{/context}}"'
|
||||
expected: '""'
|
||||
|
||||
- name: List
|
||||
desc: Lists should behave like truthy values.
|
||||
data: { list: [ { n: 1 }, { n: 2 }, { n: 3 } ] }
|
||||
template: '"{{^list}}{{n}}{{/list}}"'
|
||||
expected: '""'
|
||||
|
||||
- name: Empty List
|
||||
desc: Empty lists should behave like falsey values.
|
||||
data: { list: [ ] }
|
||||
template: '"{{^list}}Yay lists!{{/list}}"'
|
||||
expected: '"Yay lists!"'
|
||||
|
||||
- name: Doubled
|
||||
desc: Multiple inverted sections per template should be permitted.
|
||||
data: { bool: false, two: 'second' }
|
||||
template: |
|
||||
{{^bool}}
|
||||
* first
|
||||
{{/bool}}
|
||||
* {{two}}
|
||||
{{^bool}}
|
||||
* third
|
||||
{{/bool}}
|
||||
expected: |
|
||||
* first
|
||||
* second
|
||||
* third
|
||||
|
||||
- name: Nested (Falsey)
|
||||
desc: Nested falsey sections should have their contents rendered.
|
||||
data: { bool: false }
|
||||
template: "| A {{^bool}}B {{^bool}}C{{/bool}} D{{/bool}} E |"
|
||||
expected: "| A B C D E |"
|
||||
|
||||
- name: Nested (Truthy)
|
||||
desc: Nested truthy sections should be omitted.
|
||||
data: { bool: true }
|
||||
template: "| A {{^bool}}B {{^bool}}C{{/bool}} D{{/bool}} E |"
|
||||
expected: "| A E |"
|
||||
|
||||
- name: Context Misses
|
||||
desc: Failed context lookups should be considered falsey.
|
||||
data: { }
|
||||
template: "[{{^missing}}Cannot find key 'missing'!{{/missing}}]"
|
||||
expected: "[Cannot find key 'missing'!]"
|
||||
|
||||
# Dotted Names
|
||||
|
||||
- name: Dotted Names - Truthy
|
||||
desc: Dotted names should be valid for Inverted Section tags.
|
||||
data: { a: { b: { c: true } } }
|
||||
template: '"{{^a.b.c}}Not Here{{/a.b.c}}" == ""'
|
||||
expected: '"" == ""'
|
||||
|
||||
- name: Dotted Names - Falsey
|
||||
desc: Dotted names should be valid for Inverted Section tags.
|
||||
data: { a: { b: { c: false } } }
|
||||
template: '"{{^a.b.c}}Not Here{{/a.b.c}}" == "Not Here"'
|
||||
expected: '"Not Here" == "Not Here"'
|
||||
|
||||
- name: Dotted Names - Broken Chains
|
||||
desc: Dotted names that cannot be resolved should be considered falsey.
|
||||
data: { a: { } }
|
||||
template: '"{{^a.b.c}}Not Here{{/a.b.c}}" == "Not Here"'
|
||||
expected: '"Not Here" == "Not Here"'
|
||||
|
||||
# Whitespace Sensitivity
|
||||
|
||||
- name: Surrounding Whitespace
|
||||
desc: Inverted sections should not alter surrounding whitespace.
|
||||
data: { boolean: false }
|
||||
template: " | {{^boolean}}\t|\t{{/boolean}} | \n"
|
||||
expected: " | \t|\t | \n"
|
||||
|
||||
- name: Internal Whitespace
|
||||
desc: Inverted should not alter internal whitespace.
|
||||
data: { boolean: false }
|
||||
template: " | {{^boolean}} {{! Important Whitespace }}\n {{/boolean}} | \n"
|
||||
expected: " | \n | \n"
|
||||
|
||||
- name: Indented Inline Sections
|
||||
desc: Single-line sections should not alter surrounding whitespace.
|
||||
data: { boolean: false }
|
||||
template: " {{^boolean}}NO{{/boolean}}\n {{^boolean}}WAY{{/boolean}}\n"
|
||||
expected: " NO\n WAY\n"
|
||||
|
||||
- name: Standalone Lines
|
||||
desc: Standalone lines should be removed from the template.
|
||||
data: { boolean: false }
|
||||
template: |
|
||||
| This Is
|
||||
{{^boolean}}
|
||||
|
|
||||
{{/boolean}}
|
||||
| A Line
|
||||
expected: |
|
||||
| This Is
|
||||
|
|
||||
| A Line
|
||||
|
||||
- name: Standalone Indented Lines
|
||||
desc: Standalone indented lines should be removed from the template.
|
||||
data: { boolean: false }
|
||||
template: |
|
||||
| This Is
|
||||
{{^boolean}}
|
||||
|
|
||||
{{/boolean}}
|
||||
| A Line
|
||||
expected: |
|
||||
| This Is
|
||||
|
|
||||
| A Line
|
||||
|
||||
- name: Standalone Line Endings
|
||||
desc: '"\r\n" should be considered a newline for standalone tags.'
|
||||
data: { boolean: false }
|
||||
template: "|\r\n{{^boolean}}\r\n{{/boolean}}\r\n|"
|
||||
expected: "|\r\n|"
|
||||
|
||||
- name: Standalone Without Previous Line
|
||||
desc: Standalone tags should not require a newline to precede them.
|
||||
data: { boolean: false }
|
||||
template: " {{^boolean}}\n^{{/boolean}}\n/"
|
||||
expected: "^\n/"
|
||||
|
||||
- name: Standalone Without Newline
|
||||
desc: Standalone tags should not require a newline to follow them.
|
||||
data: { boolean: false }
|
||||
template: "^{{^boolean}}\n/\n {{/boolean}}"
|
||||
expected: "^\n/\n"
|
||||
|
||||
# Whitespace Insensitivity
|
||||
|
||||
- name: Padding
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { boolean: false }
|
||||
template: '|{{^ boolean }}={{/ boolean }}|'
|
||||
expected: '|=|'
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/partials.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/partials.json
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"__ATTN__":"Do not edit this file; changes belong in the appropriate YAML file.","overview":"Partial tags are used to expand an external template into the current\ntemplate.\n\nThe tag's content MUST be a non-whitespace character sequence NOT containing\nthe current closing delimiter.\n\nThis tag's content names the partial to inject. Set Delimiter tags MUST NOT\naffect the parsing of a partial. The partial MUST be rendered against the\ncontext stack local to the tag. If the named partial cannot be found, the\nempty string SHOULD be used instead, as in interpolations.\n\nPartial tags SHOULD be treated as standalone when appropriate. If this tag\nis used standalone, any whitespace preceding the tag should treated as\nindentation, and prepended to each line of the partial before rendering.\n","tests":[{"name":"Basic Behavior","data":{},"expected":"\"from partial\"","template":"\"{{>text}}\"","desc":"The greater-than operator should expand to the named partial.","partials":{"text":"from partial"}},{"name":"Failed Lookup","data":{},"expected":"\"\"","template":"\"{{>text}}\"","desc":"The empty string should be used when the named partial is not found.","partials":{}},{"name":"Context","data":{"text":"content"},"expected":"\"*content*\"","template":"\"{{>partial}}\"","desc":"The greater-than operator should operate within the current context.","partials":{"partial":"*{{text}}*"}},{"name":"Recursion","data":{"content":"X","nodes":[{"content":"Y","nodes":[]}]},"expected":"X<Y<>>","template":"{{>node}}","desc":"The greater-than operator should properly recurse.","partials":{"node":"{{content}}<{{#nodes}}{{>node}}{{/nodes}}>"}},{"name":"Surrounding Whitespace","data":{},"expected":"| \t|\t |","template":"| {{>partial}} |","desc":"The greater-than operator should not alter surrounding whitespace.","partials":{"partial":"\t|\t"}},{"name":"Inline Indentation","data":{"data":"|"},"expected":" | >\n>\n","template":" {{data}} {{> partial}}\n","desc":"Whitespace should be left untouched.","partials":{"partial":">\n>"}},{"name":"Standalone Line Endings","data":{},"expected":"|\r\n>|","template":"|\r\n{{>partial}}\r\n|","desc":"\"\\r\\n\" should be considered a newline for standalone tags.","partials":{"partial":">"}},{"name":"Standalone Without Previous Line","data":{},"expected":" >\n >>","template":" {{>partial}}\n>","desc":"Standalone tags should not require a newline to precede them.","partials":{"partial":">\n>"}},{"name":"Standalone Without Newline","data":{},"expected":">\n >\n >","template":">\n {{>partial}}","desc":"Standalone tags should not require a newline to follow them.","partials":{"partial":">\n>"}},{"name":"Standalone Indentation","data":{"content":"<\n->"},"expected":"\\\n |\n <\n->\n |\n/\n","template":"\\\n {{>partial}}\n/\n","desc":"Each line of the partial should be indented before rendering.","partials":{"partial":"|\n{{{content}}}\n|\n"}},{"name":"Padding Whitespace","data":{"boolean":true},"expected":"|[]|","template":"|{{> partial }}|","desc":"Superfluous in-tag whitespace should be ignored.","partials":{"partial":"[]"}}]}
|
109
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/partials.yml
generated
vendored
Normal file
109
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/partials.yml
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
overview: |
|
||||
Partial tags are used to expand an external template into the current
|
||||
template.
|
||||
|
||||
The tag's content MUST be a non-whitespace character sequence NOT containing
|
||||
the current closing delimiter.
|
||||
|
||||
This tag's content names the partial to inject. Set Delimiter tags MUST NOT
|
||||
affect the parsing of a partial. The partial MUST be rendered against the
|
||||
context stack local to the tag. If the named partial cannot be found, the
|
||||
empty string SHOULD be used instead, as in interpolations.
|
||||
|
||||
Partial tags SHOULD be treated as standalone when appropriate. If this tag
|
||||
is used standalone, any whitespace preceding the tag should treated as
|
||||
indentation, and prepended to each line of the partial before rendering.
|
||||
tests:
|
||||
- name: Basic Behavior
|
||||
desc: The greater-than operator should expand to the named partial.
|
||||
data: { }
|
||||
template: '"{{>text}}"'
|
||||
partials: { text: 'from partial' }
|
||||
expected: '"from partial"'
|
||||
|
||||
- name: Failed Lookup
|
||||
desc: The empty string should be used when the named partial is not found.
|
||||
data: { }
|
||||
template: '"{{>text}}"'
|
||||
partials: { }
|
||||
expected: '""'
|
||||
|
||||
- name: Context
|
||||
desc: The greater-than operator should operate within the current context.
|
||||
data: { text: 'content' }
|
||||
template: '"{{>partial}}"'
|
||||
partials: { partial: '*{{text}}*' }
|
||||
expected: '"*content*"'
|
||||
|
||||
- name: Recursion
|
||||
desc: The greater-than operator should properly recurse.
|
||||
data: { content: "X", nodes: [ { content: "Y", nodes: [] } ] }
|
||||
template: '{{>node}}'
|
||||
partials: { node: '{{content}}<{{#nodes}}{{>node}}{{/nodes}}>' }
|
||||
expected: 'X<Y<>>'
|
||||
|
||||
# Whitespace Sensitivity
|
||||
|
||||
- name: Surrounding Whitespace
|
||||
desc: The greater-than operator should not alter surrounding whitespace.
|
||||
data: { }
|
||||
template: '| {{>partial}} |'
|
||||
partials: { partial: "\t|\t" }
|
||||
expected: "| \t|\t |"
|
||||
|
||||
- name: Inline Indentation
|
||||
desc: Whitespace should be left untouched.
|
||||
data: { data: '|' }
|
||||
template: " {{data}} {{> partial}}\n"
|
||||
partials: { partial: ">\n>" }
|
||||
expected: " | >\n>\n"
|
||||
|
||||
- name: Standalone Line Endings
|
||||
desc: '"\r\n" should be considered a newline for standalone tags.'
|
||||
data: { }
|
||||
template: "|\r\n{{>partial}}\r\n|"
|
||||
partials: { partial: ">" }
|
||||
expected: "|\r\n>|"
|
||||
|
||||
- name: Standalone Without Previous Line
|
||||
desc: Standalone tags should not require a newline to precede them.
|
||||
data: { }
|
||||
template: " {{>partial}}\n>"
|
||||
partials: { partial: ">\n>"}
|
||||
expected: " >\n >>"
|
||||
|
||||
- name: Standalone Without Newline
|
||||
desc: Standalone tags should not require a newline to follow them.
|
||||
data: { }
|
||||
template: ">\n {{>partial}}"
|
||||
partials: { partial: ">\n>" }
|
||||
expected: ">\n >\n >"
|
||||
|
||||
- name: Standalone Indentation
|
||||
desc: Each line of the partial should be indented before rendering.
|
||||
data: { content: "<\n->" }
|
||||
template: |
|
||||
\
|
||||
{{>partial}}
|
||||
/
|
||||
partials:
|
||||
partial: |
|
||||
|
|
||||
{{{content}}}
|
||||
|
|
||||
expected: |
|
||||
\
|
||||
|
|
||||
<
|
||||
->
|
||||
|
|
||||
/
|
||||
|
||||
# Whitespace Insensitivity
|
||||
|
||||
- name: Padding Whitespace
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { boolean: true }
|
||||
template: "|{{> partial }}|"
|
||||
partials: { partial: "[]" }
|
||||
expected: '|[]|'
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/sections.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/sections.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
263
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/sections.yml
generated
vendored
Normal file
263
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/sections.yml
generated
vendored
Normal file
@ -0,0 +1,263 @@
|
||||
overview: |
|
||||
Section tags and End Section tags are used in combination to wrap a section
|
||||
of the template for iteration
|
||||
|
||||
These tags' content MUST be a non-whitespace character sequence NOT
|
||||
containing the current closing delimiter; each Section tag MUST be followed
|
||||
by an End Section tag with the same content within the same section.
|
||||
|
||||
This tag's content names the data to replace the tag. Name resolution is as
|
||||
follows:
|
||||
1) Split the name on periods; the first part is the name to resolve, any
|
||||
remaining parts should be retained.
|
||||
2) Walk the context stack from top to bottom, finding the first context
|
||||
that is a) a hash containing the name as a key OR b) an object responding
|
||||
to a method with the given name.
|
||||
3) If the context is a hash, the data is the value associated with the
|
||||
name.
|
||||
4) If the context is an object and the method with the given name has an
|
||||
arity of 1, the method SHOULD be called with a String containing the
|
||||
unprocessed contents of the sections; the data is the value returned.
|
||||
5) Otherwise, the data is the value returned by calling the method with
|
||||
the given name.
|
||||
6) If any name parts were retained in step 1, each should be resolved
|
||||
against a context stack containing only the result from the former
|
||||
resolution. If any part fails resolution, the result should be considered
|
||||
falsey, and should interpolate as the empty string.
|
||||
If the data is not of a list type, it is coerced into a list as follows: if
|
||||
the data is truthy (e.g. `!!data == true`), use a single-element list
|
||||
containing the data, otherwise use an empty list.
|
||||
|
||||
For each element in the data list, the element MUST be pushed onto the
|
||||
context stack, the section MUST be rendered, and the element MUST be popped
|
||||
off the context stack.
|
||||
|
||||
Section and End Section tags SHOULD be treated as standalone when
|
||||
appropriate.
|
||||
tests:
|
||||
- name: Truthy
|
||||
desc: Truthy sections should have their contents rendered.
|
||||
data: { boolean: true }
|
||||
template: '"{{#boolean}}This should be rendered.{{/boolean}}"'
|
||||
expected: '"This should be rendered."'
|
||||
|
||||
- name: Falsey
|
||||
desc: Falsey sections should have their contents omitted.
|
||||
data: { boolean: false }
|
||||
template: '"{{#boolean}}This should not be rendered.{{/boolean}}"'
|
||||
expected: '""'
|
||||
|
||||
- name: Context
|
||||
desc: Objects and hashes should be pushed onto the context stack.
|
||||
data: { context: { name: 'Joe' } }
|
||||
template: '"{{#context}}Hi {{name}}.{{/context}}"'
|
||||
expected: '"Hi Joe."'
|
||||
|
||||
- name: Deeply Nested Contexts
|
||||
desc: All elements on the context stack should be accessible.
|
||||
data:
|
||||
a: { one: 1 }
|
||||
b: { two: 2 }
|
||||
c: { three: 3 }
|
||||
d: { four: 4 }
|
||||
e: { five: 5 }
|
||||
template: |
|
||||
{{#a}}
|
||||
{{one}}
|
||||
{{#b}}
|
||||
{{one}}{{two}}{{one}}
|
||||
{{#c}}
|
||||
{{one}}{{two}}{{three}}{{two}}{{one}}
|
||||
{{#d}}
|
||||
{{one}}{{two}}{{three}}{{four}}{{three}}{{two}}{{one}}
|
||||
{{#e}}
|
||||
{{one}}{{two}}{{three}}{{four}}{{five}}{{four}}{{three}}{{two}}{{one}}
|
||||
{{/e}}
|
||||
{{one}}{{two}}{{three}}{{four}}{{three}}{{two}}{{one}}
|
||||
{{/d}}
|
||||
{{one}}{{two}}{{three}}{{two}}{{one}}
|
||||
{{/c}}
|
||||
{{one}}{{two}}{{one}}
|
||||
{{/b}}
|
||||
{{one}}
|
||||
{{/a}}
|
||||
expected: |
|
||||
1
|
||||
121
|
||||
12321
|
||||
1234321
|
||||
123454321
|
||||
1234321
|
||||
12321
|
||||
121
|
||||
1
|
||||
|
||||
- name: List
|
||||
desc: Lists should be iterated; list items should visit the context stack.
|
||||
data: { list: [ { item: 1 }, { item: 2 }, { item: 3 } ] }
|
||||
template: '"{{#list}}{{item}}{{/list}}"'
|
||||
expected: '"123"'
|
||||
|
||||
- name: Empty List
|
||||
desc: Empty lists should behave like falsey values.
|
||||
data: { list: [ ] }
|
||||
template: '"{{#list}}Yay lists!{{/list}}"'
|
||||
expected: '""'
|
||||
|
||||
- name: Doubled
|
||||
desc: Multiple sections per template should be permitted.
|
||||
data: { bool: true, two: 'second' }
|
||||
template: |
|
||||
{{#bool}}
|
||||
* first
|
||||
{{/bool}}
|
||||
* {{two}}
|
||||
{{#bool}}
|
||||
* third
|
||||
{{/bool}}
|
||||
expected: |
|
||||
* first
|
||||
* second
|
||||
* third
|
||||
|
||||
- name: Nested (Truthy)
|
||||
desc: Nested truthy sections should have their contents rendered.
|
||||
data: { bool: true }
|
||||
template: "| A {{#bool}}B {{#bool}}C{{/bool}} D{{/bool}} E |"
|
||||
expected: "| A B C D E |"
|
||||
|
||||
- name: Nested (Falsey)
|
||||
desc: Nested falsey sections should be omitted.
|
||||
data: { bool: false }
|
||||
template: "| A {{#bool}}B {{#bool}}C{{/bool}} D{{/bool}} E |"
|
||||
expected: "| A E |"
|
||||
|
||||
- name: Context Misses
|
||||
desc: Failed context lookups should be considered falsey.
|
||||
data: { }
|
||||
template: "[{{#missing}}Found key 'missing'!{{/missing}}]"
|
||||
expected: "[]"
|
||||
|
||||
# Implicit Iterators
|
||||
|
||||
- name: Implicit Iterator - String
|
||||
desc: Implicit iterators should directly interpolate strings.
|
||||
data:
|
||||
list: [ 'a', 'b', 'c', 'd', 'e' ]
|
||||
template: '"{{#list}}({{.}}){{/list}}"'
|
||||
expected: '"(a)(b)(c)(d)(e)"'
|
||||
|
||||
- name: Implicit Iterator - Integer
|
||||
desc: Implicit iterators should cast integers to strings and interpolate.
|
||||
data:
|
||||
list: [ 1, 2, 3, 4, 5 ]
|
||||
template: '"{{#list}}({{.}}){{/list}}"'
|
||||
expected: '"(1)(2)(3)(4)(5)"'
|
||||
|
||||
- name: Implicit Iterator - Decimal
|
||||
desc: Implicit iterators should cast decimals to strings and interpolate.
|
||||
data:
|
||||
list: [ 1.10, 2.20, 3.30, 4.40, 5.50 ]
|
||||
template: '"{{#list}}({{.}}){{/list}}"'
|
||||
expected: '"(1.1)(2.2)(3.3)(4.4)(5.5)"'
|
||||
|
||||
- name: Implicit Iterator - Array
|
||||
desc: Implicit iterators should allow iterating over nested arrays.
|
||||
data:
|
||||
list: [ [1, 2, 3], ['a', 'b', 'c'] ]
|
||||
template: '"{{#list}}({{#.}}{{.}}{{/.}}){{/list}}"'
|
||||
expected: '"(123)(abc)"'
|
||||
|
||||
# Dotted Names
|
||||
|
||||
- name: Dotted Names - Truthy
|
||||
desc: Dotted names should be valid for Section tags.
|
||||
data: { a: { b: { c: true } } }
|
||||
template: '"{{#a.b.c}}Here{{/a.b.c}}" == "Here"'
|
||||
expected: '"Here" == "Here"'
|
||||
|
||||
- name: Dotted Names - Falsey
|
||||
desc: Dotted names should be valid for Section tags.
|
||||
data: { a: { b: { c: false } } }
|
||||
template: '"{{#a.b.c}}Here{{/a.b.c}}" == ""'
|
||||
expected: '"" == ""'
|
||||
|
||||
- name: Dotted Names - Broken Chains
|
||||
desc: Dotted names that cannot be resolved should be considered falsey.
|
||||
data: { a: { } }
|
||||
template: '"{{#a.b.c}}Here{{/a.b.c}}" == ""'
|
||||
expected: '"" == ""'
|
||||
|
||||
# Whitespace Sensitivity
|
||||
|
||||
- name: Surrounding Whitespace
|
||||
desc: Sections should not alter surrounding whitespace.
|
||||
data: { boolean: true }
|
||||
template: " | {{#boolean}}\t|\t{{/boolean}} | \n"
|
||||
expected: " | \t|\t | \n"
|
||||
|
||||
- name: Internal Whitespace
|
||||
desc: Sections should not alter internal whitespace.
|
||||
data: { boolean: true }
|
||||
template: " | {{#boolean}} {{! Important Whitespace }}\n {{/boolean}} | \n"
|
||||
expected: " | \n | \n"
|
||||
|
||||
- name: Indented Inline Sections
|
||||
desc: Single-line sections should not alter surrounding whitespace.
|
||||
data: { boolean: true }
|
||||
template: " {{#boolean}}YES{{/boolean}}\n {{#boolean}}GOOD{{/boolean}}\n"
|
||||
expected: " YES\n GOOD\n"
|
||||
|
||||
- name: Standalone Lines
|
||||
desc: Standalone lines should be removed from the template.
|
||||
data: { boolean: true }
|
||||
template: |
|
||||
| This Is
|
||||
{{#boolean}}
|
||||
|
|
||||
{{/boolean}}
|
||||
| A Line
|
||||
expected: |
|
||||
| This Is
|
||||
|
|
||||
| A Line
|
||||
|
||||
- name: Indented Standalone Lines
|
||||
desc: Indented standalone lines should be removed from the template.
|
||||
data: { boolean: true }
|
||||
template: |
|
||||
| This Is
|
||||
{{#boolean}}
|
||||
|
|
||||
{{/boolean}}
|
||||
| A Line
|
||||
expected: |
|
||||
| This Is
|
||||
|
|
||||
| A Line
|
||||
|
||||
- name: Standalone Line Endings
|
||||
desc: '"\r\n" should be considered a newline for standalone tags.'
|
||||
data: { boolean: true }
|
||||
template: "|\r\n{{#boolean}}\r\n{{/boolean}}\r\n|"
|
||||
expected: "|\r\n|"
|
||||
|
||||
- name: Standalone Without Previous Line
|
||||
desc: Standalone tags should not require a newline to precede them.
|
||||
data: { boolean: true }
|
||||
template: " {{#boolean}}\n#{{/boolean}}\n/"
|
||||
expected: "#\n/"
|
||||
|
||||
- name: Standalone Without Newline
|
||||
desc: Standalone tags should not require a newline to follow them.
|
||||
data: { boolean: true }
|
||||
template: "#{{#boolean}}\n/\n {{/boolean}}"
|
||||
expected: "#\n/\n"
|
||||
|
||||
# Whitespace Insensitivity
|
||||
|
||||
- name: Padding
|
||||
desc: Superfluous in-tag whitespace should be ignored.
|
||||
data: { boolean: true }
|
||||
template: '|{{# boolean }}={{/ boolean }}|'
|
||||
expected: '|=|'
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/~lambdas.json
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/~lambdas.json
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
159
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/~lambdas.yml
generated
vendored
Normal file
159
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec/specs/~lambdas.yml
generated
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
overview: |
|
||||
Lambdas are a special-cased data type for use in interpolations and
|
||||
sections.
|
||||
|
||||
When used as the data value for an Interpolation tag, the lambda MUST be
|
||||
treatable as an arity 0 function, and invoked as such. The returned value
|
||||
MUST be rendered against the default delimiters, then interpolated in place
|
||||
of the lambda.
|
||||
|
||||
When used as the data value for a Section tag, the lambda MUST be treatable
|
||||
as an arity 1 function, and invoked as such (passing a String containing the
|
||||
unprocessed section contents). The returned value MUST be rendered against
|
||||
the current delimiters, then interpolated in place of the section.
|
||||
tests:
|
||||
- name: Interpolation
|
||||
desc: A lambda's return value should be interpolated.
|
||||
data:
|
||||
lambda: !code
|
||||
ruby: 'proc { "world" }'
|
||||
perl: 'sub { "world" }'
|
||||
js: 'function() { return "world" }'
|
||||
php: 'return "world";'
|
||||
python: 'lambda: "world"'
|
||||
clojure: '(fn [] "world")'
|
||||
lisp: '(lambda () "world")'
|
||||
template: "Hello, {{lambda}}!"
|
||||
expected: "Hello, world!"
|
||||
|
||||
- name: Interpolation - Expansion
|
||||
desc: A lambda's return value should be parsed.
|
||||
data:
|
||||
planet: "world"
|
||||
lambda: !code
|
||||
ruby: 'proc { "{{planet}}" }'
|
||||
perl: 'sub { "{{planet}}" }'
|
||||
js: 'function() { return "{{planet}}" }'
|
||||
php: 'return "{{planet}}";'
|
||||
python: 'lambda: "{{planet}}"'
|
||||
clojure: '(fn [] "{{planet}}")'
|
||||
lisp: '(lambda () "{{planet}}")'
|
||||
template: "Hello, {{lambda}}!"
|
||||
expected: "Hello, world!"
|
||||
|
||||
- name: Interpolation - Alternate Delimiters
|
||||
desc: A lambda's return value should parse with the default delimiters.
|
||||
data:
|
||||
planet: "world"
|
||||
lambda: !code
|
||||
ruby: 'proc { "|planet| => {{planet}}" }'
|
||||
perl: 'sub { "|planet| => {{planet}}" }'
|
||||
js: 'function() { return "|planet| => {{planet}}" }'
|
||||
php: 'return "|planet| => {{planet}}";'
|
||||
python: 'lambda: "|planet| => {{planet}}"'
|
||||
clojure: '(fn [] "|planet| => {{planet}}")'
|
||||
lisp: '(lambda () "|planet| => {{planet}}")'
|
||||
template: "{{= | | =}}\nHello, (|&lambda|)!"
|
||||
expected: "Hello, (|planet| => world)!"
|
||||
|
||||
- name: Interpolation - Multiple Calls
|
||||
desc: Interpolated lambdas should not be cached.
|
||||
data:
|
||||
lambda: !code
|
||||
ruby: 'proc { $calls ||= 0; $calls += 1 }'
|
||||
perl: 'sub { no strict; $calls += 1 }'
|
||||
js: 'function() { return (g=(function(){return this})()).calls=(g.calls||0)+1 }'
|
||||
php: 'global $calls; return ++$calls;'
|
||||
python: 'lambda: globals().update(calls=globals().get("calls",0)+1) or calls'
|
||||
clojure: '(def g (atom 0)) (fn [] (swap! g inc))'
|
||||
lisp: '(let ((g 0)) (lambda () (incf g)))'
|
||||
template: '{{lambda}} == {{{lambda}}} == {{lambda}}'
|
||||
expected: '1 == 2 == 3'
|
||||
|
||||
- name: Escaping
|
||||
desc: Lambda results should be appropriately escaped.
|
||||
data:
|
||||
lambda: !code
|
||||
ruby: 'proc { ">" }'
|
||||
perl: 'sub { ">" }'
|
||||
js: 'function() { return ">" }'
|
||||
php: 'return ">";'
|
||||
python: 'lambda: ">"'
|
||||
clojure: '(fn [] ">")'
|
||||
lisp: '(lambda () ">")'
|
||||
template: "<{{lambda}}{{{lambda}}}"
|
||||
expected: "<>>"
|
||||
|
||||
- name: Section
|
||||
desc: Lambdas used for sections should receive the raw section string.
|
||||
data:
|
||||
x: 'Error!'
|
||||
lambda: !code
|
||||
ruby: 'proc { |text| text == "{{x}}" ? "yes" : "no" }'
|
||||
perl: 'sub { $_[0] eq "{{x}}" ? "yes" : "no" }'
|
||||
js: 'function(txt) { return (txt == "{{x}}" ? "yes" : "no") }'
|
||||
php: 'return ($text == "{{x}}") ? "yes" : "no";'
|
||||
python: 'lambda text: text == "{{x}}" and "yes" or "no"'
|
||||
clojure: '(fn [text] (if (= text "{{x}}") "yes" "no"))'
|
||||
lisp: '(lambda (text) (if (string= text "{{x}}") "yes" "no"))'
|
||||
template: "<{{#lambda}}{{x}}{{/lambda}}>"
|
||||
expected: "<yes>"
|
||||
|
||||
- name: Section - Expansion
|
||||
desc: Lambdas used for sections should have their results parsed.
|
||||
data:
|
||||
planet: "Earth"
|
||||
lambda: !code
|
||||
ruby: 'proc { |text| "#{text}{{planet}}#{text}" }'
|
||||
perl: 'sub { $_[0] . "{{planet}}" . $_[0] }'
|
||||
js: 'function(txt) { return txt + "{{planet}}" + txt }'
|
||||
php: 'return $text . "{{planet}}" . $text;'
|
||||
python: 'lambda text: "%s{{planet}}%s" % (text, text)'
|
||||
clojure: '(fn [text] (str text "{{planet}}" text))'
|
||||
lisp: '(lambda (text) (format nil "~a{{planet}}~a" text text))'
|
||||
template: "<{{#lambda}}-{{/lambda}}>"
|
||||
expected: "<-Earth->"
|
||||
|
||||
- name: Section - Alternate Delimiters
|
||||
desc: Lambdas used for sections should parse with the current delimiters.
|
||||
data:
|
||||
planet: "Earth"
|
||||
lambda: !code
|
||||
ruby: 'proc { |text| "#{text}{{planet}} => |planet|#{text}" }'
|
||||
perl: 'sub { $_[0] . "{{planet}} => |planet|" . $_[0] }'
|
||||
js: 'function(txt) { return txt + "{{planet}} => |planet|" + txt }'
|
||||
php: 'return $text . "{{planet}} => |planet|" . $text;'
|
||||
python: 'lambda text: "%s{{planet}} => |planet|%s" % (text, text)'
|
||||
clojure: '(fn [text] (str text "{{planet}} => |planet|" text))'
|
||||
lisp: '(lambda (text) (format nil "~a{{planet}} => |planet|~a" text text))'
|
||||
template: "{{= | | =}}<|#lambda|-|/lambda|>"
|
||||
expected: "<-{{planet}} => Earth->"
|
||||
|
||||
- name: Section - Multiple Calls
|
||||
desc: Lambdas used for sections should not be cached.
|
||||
data:
|
||||
lambda: !code
|
||||
ruby: 'proc { |text| "__#{text}__" }'
|
||||
perl: 'sub { "__" . $_[0] . "__" }'
|
||||
js: 'function(txt) { return "__" + txt + "__" }'
|
||||
php: 'return "__" . $text . "__";'
|
||||
python: 'lambda text: "__%s__" % (text)'
|
||||
clojure: '(fn [text] (str "__" text "__"))'
|
||||
lisp: '(lambda (text) (format nil "__~a__" text))'
|
||||
template: '{{#lambda}}FILE{{/lambda}} != {{#lambda}}LINE{{/lambda}}'
|
||||
expected: '__FILE__ != __LINE__'
|
||||
|
||||
- name: Inverted Section
|
||||
desc: Lambdas used for inverted sections should be considered truthy.
|
||||
data:
|
||||
static: 'static'
|
||||
lambda: !code
|
||||
ruby: 'proc { |text| false }'
|
||||
perl: 'sub { 0 }'
|
||||
js: 'function(txt) { return false }'
|
||||
php: 'return false;'
|
||||
python: 'lambda text: 0'
|
||||
clojure: '(fn [text] false)'
|
||||
lisp: '(lambda (text) (declare (ignore text)) nil)'
|
||||
template: "<{{^lambda}}{{static}}{{/lambda}}>"
|
||||
expected: "<>"
|
221
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec_test.go
generated
vendored
Normal file
221
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/spec_test.go
generated
vendored
Normal file
@ -0,0 +1,221 @@
|
||||
package mustache
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var enabledTests = map[string]map[string]bool{
|
||||
"comments.json": map[string]bool{
|
||||
"Inline": true,
|
||||
"Multiline": true,
|
||||
"Standalone": false,
|
||||
"Indented Standalone": false,
|
||||
"Standalone Line Endings": false,
|
||||
"Standalone Without Previous Line": false,
|
||||
"Standalone Without Newline": false,
|
||||
"Multiline Standalone": false,
|
||||
"Indented Multiline Standalone": false,
|
||||
"Indented Inline": true,
|
||||
"Surrounding Whitespace": true,
|
||||
},
|
||||
"delimiters.json": map[string]bool{
|
||||
"Pair Behavior": true,
|
||||
"Special Characters": true,
|
||||
"Sections": false,
|
||||
"Inverted Sections": false,
|
||||
"Partial Inheritence": false,
|
||||
"Post-Partial Behavior": true,
|
||||
"Outlying Whitespace (Inline)": true,
|
||||
"Standalone Tag": false,
|
||||
"Indented Standalone Tag": false,
|
||||
"Pair with Padding": true,
|
||||
"Surrounding Whitespace": true,
|
||||
"Standalone Line Endings": false,
|
||||
"Standalone Without Previous Line": false,
|
||||
"Standalone Without Newline": false,
|
||||
},
|
||||
"interpolation.json": map[string]bool{
|
||||
"No Interpolation": true,
|
||||
"Basic Interpolation": true,
|
||||
"HTML Escaping": false,
|
||||
"Triple Mustache": true,
|
||||
"Ampersand": false,
|
||||
"Basic Integer Interpolation": true,
|
||||
"Triple Mustache Integer Interpolation": true,
|
||||
"Ampersand Integer Interpolation": false,
|
||||
"Basic Decimal Interpolation": true,
|
||||
"Triple Mustache Decimal Interpolation": true,
|
||||
"Ampersand Decimal Interpolation": false,
|
||||
"Basic Context Miss Interpolation": true,
|
||||
"Triple Mustache Context Miss Interpolation": true,
|
||||
"Ampersand Context Miss Interpolation": true,
|
||||
"Dotted Names - Basic Interpolation": true,
|
||||
"Dotted Names - Triple Mustache Interpolation": true,
|
||||
"Dotted Names - Ampersand Interpolation": false,
|
||||
"Dotted Names - Arbitrary Depth": true,
|
||||
"Dotted Names - Broken Chains": true,
|
||||
"Dotted Names - Broken Chain Resolution": true,
|
||||
"Dotted Names - Initial Resolution": true,
|
||||
"Interpolation - Surrounding Whitespace": true,
|
||||
"Triple Mustache - Surrounding Whitespace": true,
|
||||
"Ampersand - Surrounding Whitespace": false,
|
||||
"Interpolation - Standalone": true,
|
||||
"Triple Mustache - Standalone": true,
|
||||
"Ampersand - Standalone": false,
|
||||
"Interpolation With Padding": true,
|
||||
"Triple Mustache With Padding": false,
|
||||
"Ampersand With Padding": false,
|
||||
},
|
||||
"inverted.json": map[string]bool{
|
||||
"Falsey": true,
|
||||
"Truthy": true,
|
||||
"Context": true,
|
||||
"List": true,
|
||||
"Empty List": true,
|
||||
"Doubled": false,
|
||||
"Nested (Falsey)": true,
|
||||
"Nested (Truthy)": true,
|
||||
"Context Misses": true,
|
||||
"Dotted Names - Truthy": true,
|
||||
"Dotted Names - Falsey": true,
|
||||
"Internal Whitespace": true,
|
||||
"Indented Inline Sections": true,
|
||||
"Standalone Lines": false,
|
||||
"Standalone Indented Lines": false,
|
||||
"Padding": true,
|
||||
"Dotted Names - Broken Chains": true,
|
||||
"Surrounding Whitespace": true,
|
||||
"Standalone Line Endings": false,
|
||||
"Standalone Without Previous Line": false,
|
||||
"Standalone Without Newline": false,
|
||||
},
|
||||
"partials.json": map[string]bool{
|
||||
"Basic Behavior": true,
|
||||
"Failed Lookup": true,
|
||||
"Context": true,
|
||||
"Recursion": true,
|
||||
"Surrounding Whitespace": true,
|
||||
"Inline Indentation": true,
|
||||
"Standalone Line Endings": false,
|
||||
"Standalone Without Previous Line": false,
|
||||
"Standalone Without Newline": false,
|
||||
"Standalone Indentation": false,
|
||||
"Padding Whitespace": true,
|
||||
},
|
||||
"sections.json": map[string]bool{
|
||||
"Truthy": true,
|
||||
"Falsey": true,
|
||||
"Context": true,
|
||||
"Deeply Nested Contexts": false,
|
||||
"List": true,
|
||||
"Empty List": true,
|
||||
"Doubled": false,
|
||||
"Nested (Truthy)": true,
|
||||
"Nested (Falsey)": true,
|
||||
"Context Misses": true,
|
||||
"Implicit Iterator - String": true,
|
||||
"Implicit Iterator - Integer": true,
|
||||
"Implicit Iterator - Decimal": true,
|
||||
"Implicit Iterator - Array": true,
|
||||
"Dotted Names - Truthy": true,
|
||||
"Dotted Names - Falsey": true,
|
||||
"Dotted Names - Broken Chains": true,
|
||||
"Surrounding Whitespace": true,
|
||||
"Internal Whitespace": true,
|
||||
"Indented Inline Sections": true,
|
||||
"Standalone Lines": false,
|
||||
"Indented Standalone Lines": false,
|
||||
"Standalone Line Endings": false,
|
||||
"Standalone Without Previous Line": false,
|
||||
"Standalone Without Newline": false,
|
||||
"Padding": true,
|
||||
},
|
||||
"~lambdas.json": nil, // not implemented
|
||||
}
|
||||
|
||||
type specTest struct {
|
||||
Name string `json:"name"`
|
||||
Data interface{} `json:"data"`
|
||||
Expected string `json:"expected"`
|
||||
Template string `json:"template"`
|
||||
Description string `json:"desc"`
|
||||
Partials map[string]string `json:"partials"`
|
||||
}
|
||||
|
||||
type specTestSuite struct {
|
||||
Tests []specTest `json:"tests"`
|
||||
}
|
||||
|
||||
func TestSpec(t *testing.T) {
|
||||
root := filepath.Join(os.Getenv("PWD"), "spec", "specs")
|
||||
if _, err := os.Stat(root); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
t.Fatalf("Could not find the specs folder at %s, ensure the submodule exists by running 'git submodule update --init'", root)
|
||||
}
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
paths, err := filepath.Glob(root + "/*.json")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
sort.Strings(paths)
|
||||
|
||||
for _, path := range paths {
|
||||
_, file := filepath.Split(path)
|
||||
enabled, ok := enabledTests[file]
|
||||
if !ok {
|
||||
t.Errorf("Unexpected file %s, consider adding to enabledFiles", file)
|
||||
continue
|
||||
}
|
||||
if enabled == nil {
|
||||
continue
|
||||
}
|
||||
b, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var suite specTestSuite
|
||||
err = json.Unmarshal(b, &suite)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for _, test := range suite.Tests {
|
||||
runTest(t, file, &test)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func runTest(t *testing.T, file string, test *specTest) {
|
||||
enabled, ok := enabledTests[file][test.Name]
|
||||
if !ok {
|
||||
t.Errorf("[%s %s]: Unexpected test, add to enabledTests", file, test.Name)
|
||||
}
|
||||
if !enabled {
|
||||
t.Logf("[%s %s]: Skipped", file, test.Name)
|
||||
return
|
||||
}
|
||||
|
||||
var out string
|
||||
var err error
|
||||
if len(test.Partials) > 0 {
|
||||
out, err = RenderPartials(test.Template, &StaticProvider{test.Partials}, test.Data)
|
||||
} else {
|
||||
out, err = Render(test.Template, test.Data)
|
||||
}
|
||||
if err != nil {
|
||||
t.Errorf("[%s %s]: %s", file, test.Name, err.Error())
|
||||
return
|
||||
}
|
||||
if out != test.Expected {
|
||||
t.Errorf("[%s %s]: Expected %q, got %q", file, test.Name, test.Expected, out)
|
||||
return
|
||||
}
|
||||
|
||||
t.Logf("[%s %s]: Passed", file, test.Name)
|
||||
}
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/partial.mustache
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/partial.mustache
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{{Name}}
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/test1.mustache
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/test1.mustache
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
hello {{name}}
|
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/test2.mustache
generated
vendored
Normal file
1
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/test2.mustache
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
hello {{> partial}}
|
3
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/test3.mustache
generated
vendored
Normal file
3
contrib/backends/srndv2/src/srnd/vendor/github.com/cbroglie/mustache/tests/test3.mustache
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{{#users}}
|
||||
{{> partial}}
|
||||
{{/users}}
|
8
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/.gitignore
generated
vendored
Normal file
8
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/.gitignore
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
# Generated test captchas
|
||||
capgen/*.png
|
||||
capgen/*.wav
|
||||
|
||||
# Programs
|
||||
capgen/capgen
|
||||
cangensounds/cangensounds
|
||||
capexample/capexample
|
19
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/LICENSE
generated
vendored
Normal file
19
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/LICENSE
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
Copyright (c) 2011-2014 Dmitry Chestnykh <dmitry@codingrobots.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
275
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/README.md
generated
vendored
Normal file
275
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/README.md
generated
vendored
Normal file
@ -0,0 +1,275 @@
|
||||
Package captcha
|
||||
=====================
|
||||
|
||||
import "github.com/dchest/captcha"
|
||||
|
||||
Package captcha implements generation and verification of image and audio
|
||||
CAPTCHAs.
|
||||
|
||||
A captcha solution is the sequence of digits 0-9 with the defined length.
|
||||
There are two captcha representations: image and audio.
|
||||
|
||||
An image representation is a PNG-encoded image with the solution printed on
|
||||
it in such a way that makes it hard for computers to solve it using OCR.
|
||||
|
||||
An audio representation is a WAVE-encoded (8 kHz unsigned 8-bit) sound with the
|
||||
spoken solution (currently in English, Russian, and Chinese). To make it hard
|
||||
for computers to solve audio captcha, the voice that pronounces numbers has
|
||||
random speed and pitch, and there is a randomly generated background noise
|
||||
mixed into the sound.
|
||||
|
||||
This package doesn't require external files or libraries to generate captcha
|
||||
representations; it is self-contained.
|
||||
|
||||
To make captchas one-time, the package includes a memory storage that stores
|
||||
captcha ids, their solutions, and expiration time. Used captchas are removed
|
||||
from the store immediately after calling Verify or VerifyString, while
|
||||
unused captchas (user loaded a page with captcha, but didn't submit the
|
||||
form) are collected automatically after the predefined expiration time.
|
||||
Developers can also provide custom store (for example, which saves captcha
|
||||
ids and solutions in database) by implementing Store interface and
|
||||
registering the object with SetCustomStore.
|
||||
|
||||
Captchas are created by calling New, which returns the captcha id. Their
|
||||
representations, though, are created on-the-fly by calling WriteImage or
|
||||
WriteAudio functions. Created representations are not stored anywhere, but
|
||||
subsequent calls to these functions with the same id will write the same
|
||||
captcha solution. Reload function will create a new different solution for the
|
||||
provided captcha, allowing users to "reload" captcha if they can't solve the
|
||||
displayed one without reloading the whole page. Verify and VerifyString are
|
||||
used to verify that the given solution is the right one for the given captcha
|
||||
id.
|
||||
|
||||
Server provides an http.Handler which can serve image and audio
|
||||
representations of captchas automatically from the URL. It can also be used
|
||||
to reload captchas. Refer to Server function documentation for details, or
|
||||
take a look at the example in "capexample" subdirectory.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||

|
||||
|
||||
[Audio](https://github.com/dchest/captcha/raw/master/capgen/example.wav)
|
||||
|
||||
|
||||
Constants
|
||||
---------
|
||||
|
||||
``` go
|
||||
const (
|
||||
// Default number of digits in captcha solution.
|
||||
DefaultLen = 6
|
||||
// The number of captchas created that triggers garbage collection used
|
||||
// by default store.
|
||||
CollectNum = 100
|
||||
// Expiration time of captchas used by default store.
|
||||
Expiration = 10 * time.Minute
|
||||
)
|
||||
```
|
||||
|
||||
``` go
|
||||
const (
|
||||
// Standard width and height of a captcha image.
|
||||
StdWidth = 240
|
||||
StdHeight = 80
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
Variables
|
||||
---------
|
||||
|
||||
``` go
|
||||
var (
|
||||
ErrNotFound = errors.New("captcha: id not found")
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
|
||||
Functions
|
||||
---------
|
||||
|
||||
### func New
|
||||
|
||||
func New() string
|
||||
|
||||
New creates a new captcha with the standard length, saves it in the internal
|
||||
storage and returns its id.
|
||||
|
||||
### func NewLen
|
||||
|
||||
func NewLen(length int) (id string)
|
||||
|
||||
NewLen is just like New, but accepts length of a captcha solution as the
|
||||
argument.
|
||||
|
||||
### func RandomDigits
|
||||
|
||||
func RandomDigits(length int) (b []byte)
|
||||
|
||||
RandomDigits returns a byte slice of the given length containing
|
||||
pseudorandom numbers in range 0-9. The slice can be used as a captcha
|
||||
solution.
|
||||
|
||||
### func Reload
|
||||
|
||||
func Reload(id string) bool
|
||||
|
||||
Reload generates and remembers new digits for the given captcha id. This
|
||||
function returns false if there is no captcha with the given id.
|
||||
|
||||
After calling this function, the image or audio presented to a user must be
|
||||
refreshed to show the new captcha representation (WriteImage and WriteAudio
|
||||
will write the new one).
|
||||
|
||||
### func Server
|
||||
|
||||
func Server(imgWidth, imgHeight int) http.Handler
|
||||
|
||||
Server returns a handler that serves HTTP requests with image or
|
||||
audio representations of captchas. Image dimensions are accepted as
|
||||
arguments. The server decides which captcha to serve based on the last URL
|
||||
path component: file name part must contain a captcha id, file extension —
|
||||
its format (PNG or WAV).
|
||||
|
||||
For example, for file name "LBm5vMjHDtdUfaWYXiQX.png" it serves an image captcha
|
||||
with id "LBm5vMjHDtdUfaWYXiQX", and for "LBm5vMjHDtdUfaWYXiQX.wav" it serves the
|
||||
same captcha in audio format.
|
||||
|
||||
To serve a captcha as a downloadable file, the URL must be constructed in
|
||||
such a way as if the file to serve is in the "download" subdirectory:
|
||||
"/download/LBm5vMjHDtdUfaWYXiQX.wav".
|
||||
|
||||
To reload captcha (get a different solution for the same captcha id), append
|
||||
"?reload=x" to URL, where x may be anything (for example, current time or a
|
||||
random number to make browsers refetch an image instead of loading it from
|
||||
cache).
|
||||
|
||||
By default, the Server serves audio in English language. To serve audio
|
||||
captcha in one of the other supported languages, append "lang" value, for
|
||||
example, "?lang=ru".
|
||||
|
||||
### func SetCustomStore
|
||||
|
||||
func SetCustomStore(s Store)
|
||||
|
||||
SetCustomStore sets custom storage for captchas, replacing the default
|
||||
memory store. This function must be called before generating any captchas.
|
||||
|
||||
### func Verify
|
||||
|
||||
func Verify(id string, digits []byte) bool
|
||||
|
||||
Verify returns true if the given digits are the ones that were used to
|
||||
create the given captcha id.
|
||||
|
||||
The function deletes the captcha with the given id from the internal
|
||||
storage, so that the same captcha can't be verified anymore.
|
||||
|
||||
### func VerifyString
|
||||
|
||||
func VerifyString(id string, digits string) bool
|
||||
|
||||
VerifyString is like Verify, but accepts a string of digits. It removes
|
||||
spaces and commas from the string, but any other characters, apart from
|
||||
digits and listed above, will cause the function to return false.
|
||||
|
||||
### func WriteAudio
|
||||
|
||||
func WriteAudio(w io.Writer, id string, lang string) error
|
||||
|
||||
WriteAudio writes WAV-encoded audio representation of the captcha with the
|
||||
given id and the given language. If there are no sounds for the given
|
||||
language, English is used.
|
||||
|
||||
### func WriteImage
|
||||
|
||||
func WriteImage(w io.Writer, id string, width, height int) error
|
||||
|
||||
WriteImage writes PNG-encoded image representation of the captcha with the
|
||||
given id. The image will have the given width and height.
|
||||
|
||||
|
||||
Types
|
||||
-----
|
||||
|
||||
``` go
|
||||
type Audio struct {
|
||||
// contains unexported fields
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### func NewAudio
|
||||
|
||||
func NewAudio(id string, digits []byte, lang string) *Audio
|
||||
|
||||
NewAudio returns a new audio captcha with the given digits, where each digit
|
||||
must be in range 0-9. Digits are pronounced in the given language. If there
|
||||
are no sounds for the given language, English is used.
|
||||
|
||||
Possible values for lang are "en", "ru", "zh".
|
||||
|
||||
### func (*Audio) EncodedLen
|
||||
|
||||
func (a *Audio) EncodedLen() int
|
||||
|
||||
EncodedLen returns the length of WAV-encoded audio captcha.
|
||||
|
||||
### func (*Audio) WriteTo
|
||||
|
||||
func (a *Audio) WriteTo(w io.Writer) (n int64, err error)
|
||||
|
||||
WriteTo writes captcha audio in WAVE format into the given io.Writer, and
|
||||
returns the number of bytes written and an error if any.
|
||||
|
||||
``` go
|
||||
type Image struct {
|
||||
*image.Paletted
|
||||
// contains unexported fields
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### func NewImage
|
||||
|
||||
func NewImage(id string, digits []byte, width, height int) *Image
|
||||
|
||||
NewImage returns a new captcha image of the given width and height with the
|
||||
given digits, where each digit must be in range 0-9.
|
||||
|
||||
### func (*Image) WriteTo
|
||||
|
||||
func (m *Image) WriteTo(w io.Writer) (int64, error)
|
||||
|
||||
WriteTo writes captcha image in PNG format into the given writer.
|
||||
|
||||
``` go
|
||||
type Store interface {
|
||||
// Set sets the digits for the captcha id.
|
||||
Set(id string, digits []byte)
|
||||
|
||||
// Get returns stored digits for the captcha id. Clear indicates
|
||||
// whether the captcha must be deleted from the store.
|
||||
Get(id string, clear bool) (digits []byte)
|
||||
}
|
||||
```
|
||||
|
||||
An object implementing Store interface can be registered with SetCustomStore
|
||||
function to handle storage and retrieval of captcha ids and solutions for
|
||||
them, replacing the default memory store.
|
||||
|
||||
It is the responsibility of an object to delete expired and used captchas
|
||||
when necessary (for example, the default memory store collects them in Set
|
||||
method after the certain amount of captchas has been stored.)
|
||||
|
||||
### func NewMemoryStore
|
||||
|
||||
func NewMemoryStore(collectNum int, expiration time.Duration) Store
|
||||
|
||||
NewMemoryStore returns a new standard memory store for captchas with the
|
||||
given collection threshold and expiration time in seconds. The returned
|
||||
store must be registered with SetCustomStore to replace the default one.
|
232
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/audio.go
generated
vendored
Normal file
232
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/audio.go
generated
vendored
Normal file
@ -0,0 +1,232 @@
|
||||
// Copyright 2011-2014 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"math"
|
||||
)
|
||||
|
||||
const sampleRate = 8000 // Hz
|
||||
|
||||
var endingBeepSound []byte
|
||||
|
||||
func init() {
|
||||
endingBeepSound = changeSpeed(beepSound, 1.4)
|
||||
}
|
||||
|
||||
type Audio struct {
|
||||
body *bytes.Buffer
|
||||
digitSounds [][]byte
|
||||
rng siprng
|
||||
}
|
||||
|
||||
// NewAudio returns a new audio captcha with the given digits, where each digit
|
||||
// must be in range 0-9. Digits are pronounced in the given language. If there
|
||||
// are no sounds for the given language, English is used.
|
||||
//
|
||||
// Possible values for lang are "en", "ru", "zh".
|
||||
func NewAudio(id string, digits []byte, lang string) *Audio {
|
||||
a := new(Audio)
|
||||
|
||||
// Initialize PRNG.
|
||||
a.rng.Seed(deriveSeed(audioSeedPurpose, id, digits))
|
||||
|
||||
if sounds, ok := digitSounds[lang]; ok {
|
||||
a.digitSounds = sounds
|
||||
} else {
|
||||
a.digitSounds = digitSounds["en"]
|
||||
}
|
||||
numsnd := make([][]byte, len(digits))
|
||||
nsdur := 0
|
||||
for i, n := range digits {
|
||||
snd := a.randomizedDigitSound(n)
|
||||
nsdur += len(snd)
|
||||
numsnd[i] = snd
|
||||
}
|
||||
// Random intervals between digits (including beginning).
|
||||
intervals := make([]int, len(digits)+1)
|
||||
intdur := 0
|
||||
for i := range intervals {
|
||||
dur := a.rng.Int(sampleRate, sampleRate*3) // 1 to 3 seconds
|
||||
intdur += dur
|
||||
intervals[i] = dur
|
||||
}
|
||||
// Generate background sound.
|
||||
bg := a.makeBackgroundSound(a.longestDigitSndLen()*len(digits) + intdur)
|
||||
// Create buffer and write audio to it.
|
||||
sil := makeSilence(sampleRate / 5)
|
||||
bufcap := 3*len(beepSound) + 2*len(sil) + len(bg) + len(endingBeepSound)
|
||||
a.body = bytes.NewBuffer(make([]byte, 0, bufcap))
|
||||
// Write prelude, three beeps.
|
||||
a.body.Write(beepSound)
|
||||
a.body.Write(sil)
|
||||
a.body.Write(beepSound)
|
||||
a.body.Write(sil)
|
||||
a.body.Write(beepSound)
|
||||
// Write digits.
|
||||
pos := intervals[0]
|
||||
for i, v := range numsnd {
|
||||
mixSound(bg[pos:], v)
|
||||
pos += len(v) + intervals[i+1]
|
||||
}
|
||||
a.body.Write(bg)
|
||||
// Write ending (one beep).
|
||||
a.body.Write(endingBeepSound)
|
||||
return a
|
||||
}
|
||||
|
||||
// WriteTo writes captcha audio in WAVE format into the given io.Writer, and
|
||||
// returns the number of bytes written and an error if any.
|
||||
func (a *Audio) WriteTo(w io.Writer) (n int64, err error) {
|
||||
// Calculate padded length of PCM chunk data.
|
||||
bodyLen := uint32(a.body.Len())
|
||||
paddedBodyLen := bodyLen
|
||||
if bodyLen%2 != 0 {
|
||||
paddedBodyLen++
|
||||
}
|
||||
totalLen := uint32(len(waveHeader)) - 4 + paddedBodyLen
|
||||
// Header.
|
||||
header := make([]byte, len(waveHeader)+4) // includes 4 bytes for chunk size
|
||||
copy(header, waveHeader)
|
||||
// Put the length of whole RIFF chunk.
|
||||
binary.LittleEndian.PutUint32(header[4:], totalLen)
|
||||
// Put the length of WAVE chunk.
|
||||
binary.LittleEndian.PutUint32(header[len(waveHeader):], bodyLen)
|
||||
// Write header.
|
||||
nn, err := w.Write(header)
|
||||
n = int64(nn)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
// Write data.
|
||||
n, err = a.body.WriteTo(w)
|
||||
n += int64(nn)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
// Pad byte if chunk length is odd.
|
||||
// (As header has even length, we can check if n is odd, not chunk).
|
||||
if bodyLen != paddedBodyLen {
|
||||
w.Write([]byte{0})
|
||||
n++
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// EncodedLen returns the length of WAV-encoded audio captcha.
|
||||
func (a *Audio) EncodedLen() int {
|
||||
return len(waveHeader) + 4 + a.body.Len()
|
||||
}
|
||||
|
||||
func (a *Audio) makeBackgroundSound(length int) []byte {
|
||||
b := a.makeWhiteNoise(length, 4)
|
||||
for i := 0; i < length/(sampleRate/10); i++ {
|
||||
snd := reversedSound(a.digitSounds[a.rng.Intn(10)])
|
||||
snd = changeSpeed(snd, a.rng.Float(0.8, 1.4))
|
||||
place := a.rng.Intn(len(b) - len(snd))
|
||||
setSoundLevel(snd, a.rng.Float(0.2, 0.5))
|
||||
mixSound(b[place:], snd)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func (a *Audio) randomizedDigitSound(n byte) []byte {
|
||||
s := a.randomSpeed(a.digitSounds[n])
|
||||
setSoundLevel(s, a.rng.Float(0.75, 1.2))
|
||||
return s
|
||||
}
|
||||
|
||||
func (a *Audio) longestDigitSndLen() int {
|
||||
n := 0
|
||||
for _, v := range a.digitSounds {
|
||||
if n < len(v) {
|
||||
n = len(v)
|
||||
}
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (a *Audio) randomSpeed(b []byte) []byte {
|
||||
pitch := a.rng.Float(0.9, 1.2)
|
||||
return changeSpeed(b, pitch)
|
||||
}
|
||||
|
||||
func (a *Audio) makeWhiteNoise(length int, level uint8) []byte {
|
||||
noise := a.rng.Bytes(length)
|
||||
adj := 128 - level/2
|
||||
for i, v := range noise {
|
||||
v %= level
|
||||
v += adj
|
||||
noise[i] = v
|
||||
}
|
||||
return noise
|
||||
}
|
||||
|
||||
// mixSound mixes src into dst. Dst must have length equal to or greater than
|
||||
// src length.
|
||||
func mixSound(dst, src []byte) {
|
||||
for i, v := range src {
|
||||
av := int(v)
|
||||
bv := int(dst[i])
|
||||
if av < 128 && bv < 128 {
|
||||
dst[i] = byte(av * bv / 128)
|
||||
} else {
|
||||
dst[i] = byte(2*(av+bv) - av*bv/128 - 256)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func setSoundLevel(a []byte, level float64) {
|
||||
for i, v := range a {
|
||||
av := float64(v)
|
||||
switch {
|
||||
case av > 128:
|
||||
if av = (av-128)*level + 128; av < 128 {
|
||||
av = 128
|
||||
}
|
||||
case av < 128:
|
||||
if av = 128 - (128-av)*level; av > 128 {
|
||||
av = 128
|
||||
}
|
||||
default:
|
||||
continue
|
||||
}
|
||||
a[i] = byte(av)
|
||||
}
|
||||
}
|
||||
|
||||
// changeSpeed returns new PCM bytes from the bytes with the speed and pitch
|
||||
// changed to the given value that must be in range [0, x].
|
||||
func changeSpeed(a []byte, speed float64) []byte {
|
||||
b := make([]byte, int(math.Floor(float64(len(a))*speed)))
|
||||
var p float64
|
||||
for _, v := range a {
|
||||
for i := int(p); i < int(p+speed); i++ {
|
||||
b[i] = v
|
||||
}
|
||||
p += speed
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func makeSilence(length int) []byte {
|
||||
b := make([]byte, length)
|
||||
for i := range b {
|
||||
b[i] = 128
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func reversedSound(a []byte) []byte {
|
||||
n := len(a)
|
||||
b := make([]byte, n)
|
||||
for i, v := range a {
|
||||
b[n-1-i] = v
|
||||
}
|
||||
return b
|
||||
}
|
32
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/audio_test.go
generated
vendored
Normal file
32
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/audio_test.go
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func BenchmarkNewAudio(b *testing.B) {
|
||||
b.StopTimer()
|
||||
d := RandomDigits(DefaultLen)
|
||||
id := randomId()
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
NewAudio(id, d, "")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAudioWriteTo(b *testing.B) {
|
||||
b.StopTimer()
|
||||
d := RandomDigits(DefaultLen)
|
||||
id := randomId()
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
a := NewAudio(id, d, "")
|
||||
n, _ := a.WriteTo(ioutil.Discard)
|
||||
b.SetBytes(n)
|
||||
}
|
||||
}
|
165
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/captcha.go
generated
vendored
Normal file
165
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/captcha.go
generated
vendored
Normal file
@ -0,0 +1,165 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package captcha implements generation and verification of image and audio
|
||||
// CAPTCHAs.
|
||||
//
|
||||
// A captcha solution is the sequence of digits 0-9 with the defined length.
|
||||
// There are two captcha representations: image and audio.
|
||||
//
|
||||
// An image representation is a PNG-encoded image with the solution printed on
|
||||
// it in such a way that makes it hard for computers to solve it using OCR.
|
||||
//
|
||||
// An audio representation is a WAVE-encoded (8 kHz unsigned 8-bit) sound with
|
||||
// the spoken solution (currently in English, Russian, and Chinese). To make it
|
||||
// hard for computers to solve audio captcha, the voice that pronounces numbers
|
||||
// has random speed and pitch, and there is a randomly generated background
|
||||
// noise mixed into the sound.
|
||||
//
|
||||
// This package doesn't require external files or libraries to generate captcha
|
||||
// representations; it is self-contained.
|
||||
//
|
||||
// To make captchas one-time, the package includes a memory storage that stores
|
||||
// captcha ids, their solutions, and expiration time. Used captchas are removed
|
||||
// from the store immediately after calling Verify or VerifyString, while
|
||||
// unused captchas (user loaded a page with captcha, but didn't submit the
|
||||
// form) are collected automatically after the predefined expiration time.
|
||||
// Developers can also provide custom store (for example, which saves captcha
|
||||
// ids and solutions in database) by implementing Store interface and
|
||||
// registering the object with SetCustomStore.
|
||||
//
|
||||
// Captchas are created by calling New, which returns the captcha id. Their
|
||||
// representations, though, are created on-the-fly by calling WriteImage or
|
||||
// WriteAudio functions. Created representations are not stored anywhere, but
|
||||
// subsequent calls to these functions with the same id will write the same
|
||||
// captcha solution. Reload function will create a new different solution for
|
||||
// the provided captcha, allowing users to "reload" captcha if they can't solve
|
||||
// the displayed one without reloading the whole page. Verify and VerifyString
|
||||
// are used to verify that the given solution is the right one for the given
|
||||
// captcha id.
|
||||
//
|
||||
// Server provides an http.Handler which can serve image and audio
|
||||
// representations of captchas automatically from the URL. It can also be used
|
||||
// to reload captchas. Refer to Server function documentation for details, or
|
||||
// take a look at the example in "capexample" subdirectory.
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
// Default number of digits in captcha solution.
|
||||
DefaultLen = 6
|
||||
// The number of captchas created that triggers garbage collection used
|
||||
// by default store.
|
||||
CollectNum = 100
|
||||
// Expiration time of captchas used by default store.
|
||||
Expiration = 10 * time.Minute
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNotFound = errors.New("captcha: id not found")
|
||||
// globalStore is a shared storage for captchas, generated by New function.
|
||||
globalStore = NewMemoryStore(CollectNum, Expiration)
|
||||
)
|
||||
|
||||
// SetCustomStore sets custom storage for captchas, replacing the default
|
||||
// memory store. This function must be called before generating any captchas.
|
||||
func SetCustomStore(s Store) {
|
||||
globalStore = s
|
||||
}
|
||||
|
||||
// New creates a new captcha with the standard length, saves it in the internal
|
||||
// storage and returns its id.
|
||||
func New() string {
|
||||
return NewLen(DefaultLen)
|
||||
}
|
||||
|
||||
// NewLen is just like New, but accepts length of a captcha solution as the
|
||||
// argument.
|
||||
func NewLen(length int) (id string) {
|
||||
id = randomId()
|
||||
globalStore.Set(id, RandomDigits(length))
|
||||
return
|
||||
}
|
||||
|
||||
// Reload generates and remembers new digits for the given captcha id. This
|
||||
// function returns false if there is no captcha with the given id.
|
||||
//
|
||||
// After calling this function, the image or audio presented to a user must be
|
||||
// refreshed to show the new captcha representation (WriteImage and WriteAudio
|
||||
// will write the new one).
|
||||
func Reload(id string) bool {
|
||||
old := globalStore.Get(id, false)
|
||||
if old == nil {
|
||||
return false
|
||||
}
|
||||
globalStore.Set(id, RandomDigits(len(old)))
|
||||
return true
|
||||
}
|
||||
|
||||
// WriteImage writes PNG-encoded image representation of the captcha with the
|
||||
// given id. The image will have the given width and height.
|
||||
func WriteImage(w io.Writer, id string, width, height int) error {
|
||||
d := globalStore.Get(id, false)
|
||||
if d == nil {
|
||||
return ErrNotFound
|
||||
}
|
||||
_, err := NewImage(id, d, width, height).WriteTo(w)
|
||||
return err
|
||||
}
|
||||
|
||||
// WriteAudio writes WAV-encoded audio representation of the captcha with the
|
||||
// given id and the given language. If there are no sounds for the given
|
||||
// language, English is used.
|
||||
func WriteAudio(w io.Writer, id string, lang string) error {
|
||||
d := globalStore.Get(id, false)
|
||||
if d == nil {
|
||||
return ErrNotFound
|
||||
}
|
||||
_, err := NewAudio(id, d, lang).WriteTo(w)
|
||||
return err
|
||||
}
|
||||
|
||||
// Verify returns true if the given digits are the ones that were used to
|
||||
// create the given captcha id.
|
||||
//
|
||||
// The function deletes the captcha with the given id from the internal
|
||||
// storage, so that the same captcha can't be verified anymore.
|
||||
func Verify(id string, digits []byte) bool {
|
||||
if digits == nil || len(digits) == 0 {
|
||||
return false
|
||||
}
|
||||
reald := globalStore.Get(id, true)
|
||||
if reald == nil {
|
||||
return false
|
||||
}
|
||||
return bytes.Equal(digits, reald)
|
||||
}
|
||||
|
||||
// VerifyString is like Verify, but accepts a string of digits. It removes
|
||||
// spaces and commas from the string, but any other characters, apart from
|
||||
// digits and listed above, will cause the function to return false.
|
||||
func VerifyString(id string, digits string) bool {
|
||||
if digits == "" {
|
||||
return false
|
||||
}
|
||||
ns := make([]byte, len(digits))
|
||||
for i := range ns {
|
||||
d := digits[i]
|
||||
switch {
|
||||
case '0' <= d && d <= '9':
|
||||
ns[i] = d - '0'
|
||||
case d == ' ' || d == ',':
|
||||
// ignore
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
return Verify(id, ns)
|
||||
}
|
52
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/captcha_test.go
generated
vendored
Normal file
52
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/captcha_test.go
generated
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNew(t *testing.T) {
|
||||
c := New()
|
||||
if c == "" {
|
||||
t.Errorf("expected id, got empty string")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVerify(t *testing.T) {
|
||||
id := New()
|
||||
if Verify(id, []byte{0, 0}) {
|
||||
t.Errorf("verified wrong captcha")
|
||||
}
|
||||
id = New()
|
||||
d := globalStore.Get(id, false) // cheating
|
||||
if !Verify(id, d) {
|
||||
t.Errorf("proper captcha not verified")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReload(t *testing.T) {
|
||||
id := New()
|
||||
d1 := globalStore.Get(id, false) // cheating
|
||||
Reload(id)
|
||||
d2 := globalStore.Get(id, false) // cheating again
|
||||
if bytes.Equal(d1, d2) {
|
||||
t.Errorf("reload didn't work: %v = %v", d1, d2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRandomDigits(t *testing.T) {
|
||||
d1 := RandomDigits(10)
|
||||
for _, v := range d1 {
|
||||
if v > 9 {
|
||||
t.Errorf("digits not in range 0-9: %v", d1)
|
||||
}
|
||||
}
|
||||
d2 := RandomDigits(10)
|
||||
if bytes.Equal(d1, d2) {
|
||||
t.Errorf("digits seem to be not random")
|
||||
}
|
||||
}
|
214
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/font.go
generated
vendored
Normal file
214
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/font.go
generated
vendored
Normal file
@ -0,0 +1,214 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
const (
|
||||
fontWidth = 11
|
||||
fontHeight = 18
|
||||
blackChar = 1
|
||||
)
|
||||
|
||||
var font = [][]byte{
|
||||
{ // 0
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
},
|
||||
{ // 1
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
},
|
||||
{ // 2
|
||||
0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
},
|
||||
{ // 3
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
},
|
||||
{ // 4
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
1, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
},
|
||||
{ // 5
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
},
|
||||
{ // 6
|
||||
0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0,
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0,
|
||||
1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0,
|
||||
1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
},
|
||||
{ // 7
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
|
||||
1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0,
|
||||
},
|
||||
{ // 8
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0,
|
||||
0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0,
|
||||
0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,
|
||||
1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
},
|
||||
{ // 9
|
||||
0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
|
||||
0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1,
|
||||
0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,
|
||||
0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,
|
||||
0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0,
|
||||
0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0,
|
||||
},
|
||||
}
|
271
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/image.go
generated
vendored
Normal file
271
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/image.go
generated
vendored
Normal file
@ -0,0 +1,271 @@
|
||||
// Copyright 2011-2014 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"image"
|
||||
"image/color"
|
||||
"image/png"
|
||||
"io"
|
||||
"math"
|
||||
)
|
||||
|
||||
const (
|
||||
// Standard width and height of a captcha image.
|
||||
StdWidth = 240
|
||||
StdHeight = 80
|
||||
// Maximum absolute skew factor of a single digit.
|
||||
maxSkew = 0.7
|
||||
// Number of background circles.
|
||||
circleCount = 20
|
||||
)
|
||||
|
||||
type Image struct {
|
||||
*image.Paletted
|
||||
numWidth int
|
||||
numHeight int
|
||||
dotSize int
|
||||
rng siprng
|
||||
}
|
||||
|
||||
// NewImage returns a new captcha image of the given width and height with the
|
||||
// given digits, where each digit must be in range 0-9.
|
||||
func NewImage(id string, digits []byte, width, height int) *Image {
|
||||
m := new(Image)
|
||||
|
||||
// Initialize PRNG.
|
||||
m.rng.Seed(deriveSeed(imageSeedPurpose, id, digits))
|
||||
|
||||
m.Paletted = image.NewPaletted(image.Rect(0, 0, width, height), m.getRandomPalette())
|
||||
m.calculateSizes(width, height, len(digits))
|
||||
// Randomly position captcha inside the image.
|
||||
maxx := width - (m.numWidth+m.dotSize)*len(digits) - m.dotSize
|
||||
maxy := height - m.numHeight - m.dotSize*2
|
||||
var border int
|
||||
if width > height {
|
||||
border = height / 5
|
||||
} else {
|
||||
border = width / 5
|
||||
}
|
||||
x := m.rng.Int(border, maxx-border)
|
||||
y := m.rng.Int(border, maxy-border)
|
||||
// Draw digits.
|
||||
for _, n := range digits {
|
||||
m.drawDigit(font[n], x, y)
|
||||
x += m.numWidth + m.dotSize
|
||||
}
|
||||
// Draw strike-through line.
|
||||
m.strikeThrough()
|
||||
// Apply wave distortion.
|
||||
m.distort(m.rng.Float(5, 10), m.rng.Float(100, 200))
|
||||
// Fill image with random circles.
|
||||
m.fillWithCircles(circleCount, m.dotSize)
|
||||
return m
|
||||
}
|
||||
|
||||
func (m *Image) getRandomPalette() color.Palette {
|
||||
p := make([]color.Color, circleCount+1)
|
||||
// Transparent color.
|
||||
p[0] = color.RGBA{0xFF, 0xFF, 0xFF, 0x00}
|
||||
// Primary color.
|
||||
prim := color.RGBA{
|
||||
uint8(m.rng.Intn(129)),
|
||||
uint8(m.rng.Intn(129)),
|
||||
uint8(m.rng.Intn(129)),
|
||||
0xFF,
|
||||
}
|
||||
p[1] = prim
|
||||
// Circle colors.
|
||||
for i := 2; i <= circleCount; i++ {
|
||||
p[i] = m.randomBrightness(prim, 255)
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
// encodedPNG encodes an image to PNG and returns
|
||||
// the result as a byte slice.
|
||||
func (m *Image) encodedPNG() []byte {
|
||||
var buf bytes.Buffer
|
||||
if err := png.Encode(&buf, m.Paletted); err != nil {
|
||||
panic(err.Error())
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// WriteTo writes captcha image in PNG format into the given writer.
|
||||
func (m *Image) WriteTo(w io.Writer) (int64, error) {
|
||||
n, err := w.Write(m.encodedPNG())
|
||||
return int64(n), err
|
||||
}
|
||||
|
||||
func (m *Image) calculateSizes(width, height, ncount int) {
|
||||
// Goal: fit all digits inside the image.
|
||||
var border int
|
||||
if width > height {
|
||||
border = height / 4
|
||||
} else {
|
||||
border = width / 4
|
||||
}
|
||||
// Convert everything to floats for calculations.
|
||||
w := float64(width - border*2)
|
||||
h := float64(height - border*2)
|
||||
// fw takes into account 1-dot spacing between digits.
|
||||
fw := float64(fontWidth + 1)
|
||||
fh := float64(fontHeight)
|
||||
nc := float64(ncount)
|
||||
// Calculate the width of a single digit taking into account only the
|
||||
// width of the image.
|
||||
nw := w / nc
|
||||
// Calculate the height of a digit from this width.
|
||||
nh := nw * fh / fw
|
||||
// Digit too high?
|
||||
if nh > h {
|
||||
// Fit digits based on height.
|
||||
nh = h
|
||||
nw = fw / fh * nh
|
||||
}
|
||||
// Calculate dot size.
|
||||
m.dotSize = int(nh / fh)
|
||||
if m.dotSize < 1 {
|
||||
m.dotSize = 1
|
||||
}
|
||||
// Save everything, making the actual width smaller by 1 dot to account
|
||||
// for spacing between digits.
|
||||
m.numWidth = int(nw) - m.dotSize
|
||||
m.numHeight = int(nh)
|
||||
}
|
||||
|
||||
func (m *Image) drawHorizLine(fromX, toX, y int, colorIdx uint8) {
|
||||
for x := fromX; x <= toX; x++ {
|
||||
m.SetColorIndex(x, y, colorIdx)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Image) drawCircle(x, y, radius int, colorIdx uint8) {
|
||||
f := 1 - radius
|
||||
dfx := 1
|
||||
dfy := -2 * radius
|
||||
xo := 0
|
||||
yo := radius
|
||||
|
||||
m.SetColorIndex(x, y+radius, colorIdx)
|
||||
m.SetColorIndex(x, y-radius, colorIdx)
|
||||
m.drawHorizLine(x-radius, x+radius, y, colorIdx)
|
||||
|
||||
for xo < yo {
|
||||
if f >= 0 {
|
||||
yo--
|
||||
dfy += 2
|
||||
f += dfy
|
||||
}
|
||||
xo++
|
||||
dfx += 2
|
||||
f += dfx
|
||||
m.drawHorizLine(x-xo, x+xo, y+yo, colorIdx)
|
||||
m.drawHorizLine(x-xo, x+xo, y-yo, colorIdx)
|
||||
m.drawHorizLine(x-yo, x+yo, y+xo, colorIdx)
|
||||
m.drawHorizLine(x-yo, x+yo, y-xo, colorIdx)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Image) fillWithCircles(n, maxradius int) {
|
||||
maxx := m.Bounds().Max.X
|
||||
maxy := m.Bounds().Max.Y
|
||||
for i := 0; i < n; i++ {
|
||||
colorIdx := uint8(m.rng.Int(1, circleCount-1))
|
||||
r := m.rng.Int(1, maxradius)
|
||||
m.drawCircle(m.rng.Int(r, maxx-r), m.rng.Int(r, maxy-r), r, colorIdx)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Image) strikeThrough() {
|
||||
maxx := m.Bounds().Max.X
|
||||
maxy := m.Bounds().Max.Y
|
||||
y := m.rng.Int(maxy/3, maxy-maxy/3)
|
||||
amplitude := m.rng.Float(5, 20)
|
||||
period := m.rng.Float(80, 180)
|
||||
dx := 2.0 * math.Pi / period
|
||||
for x := 0; x < maxx; x++ {
|
||||
xo := amplitude * math.Cos(float64(y)*dx)
|
||||
yo := amplitude * math.Sin(float64(x)*dx)
|
||||
for yn := 0; yn < m.dotSize; yn++ {
|
||||
r := m.rng.Int(0, m.dotSize)
|
||||
m.drawCircle(x+int(xo), y+int(yo)+(yn*m.dotSize), r/2, 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Image) drawDigit(digit []byte, x, y int) {
|
||||
skf := m.rng.Float(-maxSkew, maxSkew)
|
||||
xs := float64(x)
|
||||
r := m.dotSize / 2
|
||||
y += m.rng.Int(-r, r)
|
||||
for yo := 0; yo < fontHeight; yo++ {
|
||||
for xo := 0; xo < fontWidth; xo++ {
|
||||
if digit[yo*fontWidth+xo] != blackChar {
|
||||
continue
|
||||
}
|
||||
m.drawCircle(x+xo*m.dotSize, y+yo*m.dotSize, r, 1)
|
||||
}
|
||||
xs += skf
|
||||
x = int(xs)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Image) distort(amplude float64, period float64) {
|
||||
w := m.Bounds().Max.X
|
||||
h := m.Bounds().Max.Y
|
||||
|
||||
oldm := m.Paletted
|
||||
newm := image.NewPaletted(image.Rect(0, 0, w, h), oldm.Palette)
|
||||
|
||||
dx := 2.0 * math.Pi / period
|
||||
for x := 0; x < w; x++ {
|
||||
for y := 0; y < h; y++ {
|
||||
xo := amplude * math.Sin(float64(y)*dx)
|
||||
yo := amplude * math.Cos(float64(x)*dx)
|
||||
newm.SetColorIndex(x, y, oldm.ColorIndexAt(x+int(xo), y+int(yo)))
|
||||
}
|
||||
}
|
||||
m.Paletted = newm
|
||||
}
|
||||
|
||||
func (m *Image) randomBrightness(c color.RGBA, max uint8) color.RGBA {
|
||||
minc := min3(c.R, c.G, c.B)
|
||||
maxc := max3(c.R, c.G, c.B)
|
||||
if maxc > max {
|
||||
return c
|
||||
}
|
||||
n := m.rng.Intn(int(max-maxc)) - int(minc)
|
||||
return color.RGBA{
|
||||
uint8(int(c.R) + n),
|
||||
uint8(int(c.G) + n),
|
||||
uint8(int(c.B) + n),
|
||||
uint8(c.A),
|
||||
}
|
||||
}
|
||||
|
||||
func min3(x, y, z uint8) (m uint8) {
|
||||
m = x
|
||||
if y < m {
|
||||
m = y
|
||||
}
|
||||
if z < m {
|
||||
m = z
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func max3(x, y, z uint8) (m uint8) {
|
||||
m = x
|
||||
if y > m {
|
||||
m = y
|
||||
}
|
||||
if z > m {
|
||||
m = z
|
||||
}
|
||||
return
|
||||
}
|
40
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/image_test.go
generated
vendored
Normal file
40
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/image_test.go
generated
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import "testing"
|
||||
|
||||
type byteCounter struct {
|
||||
n int64
|
||||
}
|
||||
|
||||
func (bc *byteCounter) Write(b []byte) (int, error) {
|
||||
bc.n += int64(len(b))
|
||||
return len(b), nil
|
||||
}
|
||||
|
||||
func BenchmarkNewImage(b *testing.B) {
|
||||
b.StopTimer()
|
||||
d := RandomDigits(DefaultLen)
|
||||
id := randomId()
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
NewImage(id, d, StdWidth, StdHeight)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkImageWriteTo(b *testing.B) {
|
||||
b.StopTimer()
|
||||
d := RandomDigits(DefaultLen)
|
||||
id := randomId()
|
||||
b.StartTimer()
|
||||
counter := &byteCounter{}
|
||||
for i := 0; i < b.N; i++ {
|
||||
img := NewImage(id, d, StdWidth, StdHeight)
|
||||
img.WriteTo(counter)
|
||||
b.SetBytes(counter.n)
|
||||
counter.n = 0
|
||||
}
|
||||
}
|
108
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/random.go
generated
vendored
Normal file
108
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/random.go
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
// Copyright 2011-2014 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"io"
|
||||
)
|
||||
|
||||
// idLen is a length of captcha id string.
|
||||
// (20 bytes of 62-letter alphabet give ~119 bits.)
|
||||
const idLen = 20
|
||||
|
||||
// idChars are characters allowed in captcha id.
|
||||
var idChars = []byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
|
||||
|
||||
// rngKey is a secret key used to deterministically derive seeds for
|
||||
// PRNGs used in image and audio. Generated once during initialization.
|
||||
var rngKey [32]byte
|
||||
|
||||
func init() {
|
||||
if _, err := io.ReadFull(rand.Reader, rngKey[:]); err != nil {
|
||||
panic("captcha: error reading random source: " + err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
// Purposes for seed derivation. The goal is to make deterministic PRNG produce
|
||||
// different outputs for images and audio by using different derived seeds.
|
||||
const (
|
||||
imageSeedPurpose = 0x01
|
||||
audioSeedPurpose = 0x02
|
||||
)
|
||||
|
||||
// deriveSeed returns a 16-byte PRNG seed from rngKey, purpose, id and digits.
|
||||
// Same purpose, id and digits will result in the same derived seed for this
|
||||
// instance of running application.
|
||||
//
|
||||
// out = HMAC(rngKey, purpose || id || 0x00 || digits) (cut to 16 bytes)
|
||||
//
|
||||
func deriveSeed(purpose byte, id string, digits []byte) (out [16]byte) {
|
||||
var buf [sha256.Size]byte
|
||||
h := hmac.New(sha256.New, rngKey[:])
|
||||
h.Write([]byte{purpose})
|
||||
io.WriteString(h, id)
|
||||
h.Write([]byte{0})
|
||||
h.Write(digits)
|
||||
sum := h.Sum(buf[:0])
|
||||
copy(out[:], sum)
|
||||
return
|
||||
}
|
||||
|
||||
// RandomDigits returns a byte slice of the given length containing
|
||||
// pseudorandom numbers in range 0-9. The slice can be used as a captcha
|
||||
// solution.
|
||||
func RandomDigits(length int) []byte {
|
||||
return randomBytesMod(length, 10)
|
||||
}
|
||||
|
||||
// randomBytes returns a byte slice of the given length read from CSPRNG.
|
||||
func randomBytes(length int) (b []byte) {
|
||||
b = make([]byte, length)
|
||||
if _, err := io.ReadFull(rand.Reader, b); err != nil {
|
||||
panic("captcha: error reading random source: " + err.Error())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// randomBytesMod returns a byte slice of the given length, where each byte is
|
||||
// a random number modulo mod.
|
||||
func randomBytesMod(length int, mod byte) (b []byte) {
|
||||
if length == 0 {
|
||||
return nil
|
||||
}
|
||||
if mod == 0 {
|
||||
panic("captcha: bad mod argument for randomBytesMod")
|
||||
}
|
||||
maxrb := 255 - byte(256%int(mod))
|
||||
b = make([]byte, length)
|
||||
i := 0
|
||||
for {
|
||||
r := randomBytes(length + (length / 4))
|
||||
for _, c := range r {
|
||||
if c > maxrb {
|
||||
// Skip this number to avoid modulo bias.
|
||||
continue
|
||||
}
|
||||
b[i] = c % mod
|
||||
i++
|
||||
if i == length {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// randomId returns a new random id string.
|
||||
func randomId() string {
|
||||
b := randomBytesMod(idLen, byte(len(idChars)))
|
||||
for i, c := range b {
|
||||
b[i] = idChars[c]
|
||||
}
|
||||
return string(b)
|
||||
}
|
87
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/server.go
generated
vendored
Normal file
87
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/server.go
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type captchaHandler struct {
|
||||
imgWidth int
|
||||
imgHeight int
|
||||
}
|
||||
|
||||
// Server returns a handler that serves HTTP requests with image or
|
||||
// audio representations of captchas. Image dimensions are accepted as
|
||||
// arguments. The server decides which captcha to serve based on the last URL
|
||||
// path component: file name part must contain a captcha id, file extension —
|
||||
// its format (PNG or WAV).
|
||||
//
|
||||
// For example, for file name "LBm5vMjHDtdUfaWYXiQX.png" it serves an image captcha
|
||||
// with id "LBm5vMjHDtdUfaWYXiQX", and for "LBm5vMjHDtdUfaWYXiQX.wav" it serves the
|
||||
// same captcha in audio format.
|
||||
//
|
||||
// To serve a captcha as a downloadable file, the URL must be constructed in
|
||||
// such a way as if the file to serve is in the "download" subdirectory:
|
||||
// "/download/LBm5vMjHDtdUfaWYXiQX.wav".
|
||||
//
|
||||
// To reload captcha (get a different solution for the same captcha id), append
|
||||
// "?reload=x" to URL, where x may be anything (for example, current time or a
|
||||
// random number to make browsers refetch an image instead of loading it from
|
||||
// cache).
|
||||
//
|
||||
// By default, the Server serves audio in English language. To serve audio
|
||||
// captcha in one of the other supported languages, append "lang" value, for
|
||||
// example, "?lang=ru".
|
||||
func Server(imgWidth, imgHeight int) http.Handler {
|
||||
return &captchaHandler{imgWidth, imgHeight}
|
||||
}
|
||||
|
||||
func (h *captchaHandler) serve(w http.ResponseWriter, r *http.Request, id, ext, lang string, download bool) error {
|
||||
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
|
||||
var content bytes.Buffer
|
||||
switch ext {
|
||||
case ".png":
|
||||
w.Header().Set("Content-Type", "image/png")
|
||||
WriteImage(&content, id, h.imgWidth, h.imgHeight)
|
||||
case ".wav":
|
||||
w.Header().Set("Content-Type", "audio/x-wav")
|
||||
WriteAudio(&content, id, lang)
|
||||
default:
|
||||
return ErrNotFound
|
||||
}
|
||||
|
||||
if download {
|
||||
w.Header().Set("Content-Type", "application/octet-stream")
|
||||
}
|
||||
http.ServeContent(w, r, id+ext, time.Time{}, bytes.NewReader(content.Bytes()))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (h *captchaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
dir, file := path.Split(r.URL.Path)
|
||||
ext := path.Ext(file)
|
||||
id := file[:len(file)-len(ext)]
|
||||
if ext == "" || id == "" {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
if r.FormValue("reload") != "" {
|
||||
Reload(id)
|
||||
}
|
||||
lang := strings.ToLower(r.FormValue("lang"))
|
||||
download := path.Base(dir) == "download"
|
||||
if h.serve(w, r, id, ext, lang, download) == ErrNotFound {
|
||||
http.NotFound(w, r)
|
||||
}
|
||||
// Ignore other errors.
|
||||
}
|
278
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/siprng.go
generated
vendored
Normal file
278
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/siprng.go
generated
vendored
Normal file
@ -0,0 +1,278 @@
|
||||
// Copyright 2014 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import "encoding/binary"
|
||||
|
||||
// siprng is PRNG based on SipHash-2-4.
|
||||
// (Note: it's not safe to use a single siprng from multiple goroutines.)
|
||||
type siprng struct {
|
||||
k0, k1, ctr uint64
|
||||
}
|
||||
|
||||
// siphash implements SipHash-2-4, accepting a uint64 as a message.
|
||||
func siphash(k0, k1, m uint64) uint64 {
|
||||
// Initialization.
|
||||
v0 := k0 ^ 0x736f6d6570736575
|
||||
v1 := k1 ^ 0x646f72616e646f6d
|
||||
v2 := k0 ^ 0x6c7967656e657261
|
||||
v3 := k1 ^ 0x7465646279746573
|
||||
t := uint64(8) << 56
|
||||
|
||||
// Compression.
|
||||
v3 ^= m
|
||||
|
||||
// Round 1.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
// Round 2.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
v0 ^= m
|
||||
|
||||
// Compress last block.
|
||||
v3 ^= t
|
||||
|
||||
// Round 1.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
// Round 2.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
v0 ^= t
|
||||
|
||||
// Finalization.
|
||||
v2 ^= 0xff
|
||||
|
||||
// Round 1.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
// Round 2.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
// Round 3.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
// Round 4.
|
||||
v0 += v1
|
||||
v1 = v1<<13 | v1>>(64-13)
|
||||
v1 ^= v0
|
||||
v0 = v0<<32 | v0>>(64-32)
|
||||
|
||||
v2 += v3
|
||||
v3 = v3<<16 | v3>>(64-16)
|
||||
v3 ^= v2
|
||||
|
||||
v0 += v3
|
||||
v3 = v3<<21 | v3>>(64-21)
|
||||
v3 ^= v0
|
||||
|
||||
v2 += v1
|
||||
v1 = v1<<17 | v1>>(64-17)
|
||||
v1 ^= v2
|
||||
v2 = v2<<32 | v2>>(64-32)
|
||||
|
||||
return v0 ^ v1 ^ v2 ^ v3
|
||||
}
|
||||
|
||||
// SetSeed sets a new secret seed for PRNG.
|
||||
func (p *siprng) Seed(k [16]byte) {
|
||||
p.k0 = binary.LittleEndian.Uint64(k[0:8])
|
||||
p.k1 = binary.LittleEndian.Uint64(k[8:16])
|
||||
p.ctr = 1
|
||||
}
|
||||
|
||||
// Uint64 returns a new pseudorandom uint64.
|
||||
func (p *siprng) Uint64() uint64 {
|
||||
v := siphash(p.k0, p.k1, p.ctr)
|
||||
p.ctr++
|
||||
return v
|
||||
}
|
||||
|
||||
func (p *siprng) Bytes(n int) []byte {
|
||||
// Since we don't have a buffer for generated bytes in siprng state,
|
||||
// we just generate enough 8-byte blocks and then cut the result to the
|
||||
// required length. Doing it this way, we lose generated bytes, and we
|
||||
// don't get the strictly sequential deterministic output from PRNG:
|
||||
// calling Uint64() and then Bytes(3) produces different output than
|
||||
// when calling them in the reverse order, but for our applications
|
||||
// this is OK.
|
||||
numBlocks := (n + 8 - 1) / 8
|
||||
b := make([]byte, numBlocks*8)
|
||||
for i := 0; i < len(b); i += 8 {
|
||||
binary.LittleEndian.PutUint64(b[i:], p.Uint64())
|
||||
}
|
||||
return b[:n]
|
||||
}
|
||||
|
||||
func (p *siprng) Int63() int64 {
|
||||
return int64(p.Uint64() & 0x7fffffffffffffff)
|
||||
}
|
||||
|
||||
func (p *siprng) Uint32() uint32 {
|
||||
return uint32(p.Uint64())
|
||||
}
|
||||
|
||||
func (p *siprng) Int31() int32 {
|
||||
return int32(p.Uint32() & 0x7fffffff)
|
||||
}
|
||||
|
||||
func (p *siprng) Intn(n int) int {
|
||||
if n <= 0 {
|
||||
panic("invalid argument to Intn")
|
||||
}
|
||||
if n <= 1<<31-1 {
|
||||
return int(p.Int31n(int32(n)))
|
||||
}
|
||||
return int(p.Int63n(int64(n)))
|
||||
}
|
||||
|
||||
func (p *siprng) Int63n(n int64) int64 {
|
||||
if n <= 0 {
|
||||
panic("invalid argument to Int63n")
|
||||
}
|
||||
max := int64((1 << 63) - 1 - (1<<63)%uint64(n))
|
||||
v := p.Int63()
|
||||
for v > max {
|
||||
v = p.Int63()
|
||||
}
|
||||
return v % n
|
||||
}
|
||||
|
||||
func (p *siprng) Int31n(n int32) int32 {
|
||||
if n <= 0 {
|
||||
panic("invalid argument to Int31n")
|
||||
}
|
||||
max := int32((1 << 31) - 1 - (1<<31)%uint32(n))
|
||||
v := p.Int31()
|
||||
for v > max {
|
||||
v = p.Int31()
|
||||
}
|
||||
return v % n
|
||||
}
|
||||
|
||||
func (p *siprng) Float64() float64 { return float64(p.Int63()) / (1 << 63) }
|
||||
|
||||
// Int returns a pseudorandom int in range [from, to].
|
||||
func (p *siprng) Int(from, to int) int {
|
||||
return p.Intn(to+1-from) + from
|
||||
}
|
||||
|
||||
// Float returns a pseudorandom float64 in range [from, to].
|
||||
func (p *siprng) Float(from, to float64) float64 {
|
||||
return (to-from)*p.Float64() + from
|
||||
}
|
54
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/siprng_test.go
generated
vendored
Normal file
54
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/siprng_test.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSiphash(t *testing.T) {
|
||||
good := uint64(0xe849e8bb6ffe2567)
|
||||
cur := siphash(0, 0, 0)
|
||||
if cur != good {
|
||||
t.Fatalf("siphash: expected %x, got %x", good, cur)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSiprng(t *testing.T) {
|
||||
m := make(map[uint64]interface{})
|
||||
var yes interface{}
|
||||
r := siprng{}
|
||||
r.Seed([16]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15})
|
||||
for i := 0; i < 100000; i++ {
|
||||
v := r.Uint64()
|
||||
if _, ok := m[v]; ok {
|
||||
t.Errorf("siphash: collision on %d: %x", i, v)
|
||||
}
|
||||
m[v] = yes
|
||||
}
|
||||
}
|
||||
|
||||
func TestSiprngBytes(t *testing.T) {
|
||||
r := siprng{}
|
||||
r.Seed([16]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15})
|
||||
x := r.Bytes(32)
|
||||
if len(x) != 32 {
|
||||
t.Fatalf("siphash: wrong length: expected 32, got %d", len(x))
|
||||
}
|
||||
y := r.Bytes(32)
|
||||
if bytes.Equal(x, y) {
|
||||
t.Fatalf("siphash: stream repeats: %x = %x", x, y)
|
||||
}
|
||||
r.Seed([16]byte{})
|
||||
z := r.Bytes(32)
|
||||
if bytes.Equal(z, x) {
|
||||
t.Fatalf("siphash: outputs under different keys repeat: %x = %x", z, x)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSiprng(b *testing.B) {
|
||||
b.SetBytes(8)
|
||||
p := &siprng{}
|
||||
for i := 0; i < b.N; i++ {
|
||||
p.Uint64()
|
||||
}
|
||||
}
|
7390
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/sounds.go
generated
vendored
Normal file
7390
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/sounds.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
117
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/store.go
generated
vendored
Normal file
117
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/store.go
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"container/list"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// An object implementing Store interface can be registered with SetCustomStore
|
||||
// function to handle storage and retrieval of captcha ids and solutions for
|
||||
// them, replacing the default memory store.
|
||||
//
|
||||
// It is the responsibility of an object to delete expired and used captchas
|
||||
// when necessary (for example, the default memory store collects them in Set
|
||||
// method after the certain amount of captchas has been stored.)
|
||||
type Store interface {
|
||||
// Set sets the digits for the captcha id.
|
||||
Set(id string, digits []byte)
|
||||
|
||||
// Get returns stored digits for the captcha id. Clear indicates
|
||||
// whether the captcha must be deleted from the store.
|
||||
Get(id string, clear bool) (digits []byte)
|
||||
}
|
||||
|
||||
// expValue stores timestamp and id of captchas. It is used in the list inside
|
||||
// memoryStore for indexing generated captchas by timestamp to enable garbage
|
||||
// collection of expired captchas.
|
||||
type idByTimeValue struct {
|
||||
timestamp time.Time
|
||||
id string
|
||||
}
|
||||
|
||||
// memoryStore is an internal store for captcha ids and their values.
|
||||
type memoryStore struct {
|
||||
sync.RWMutex
|
||||
digitsById map[string][]byte
|
||||
idByTime *list.List
|
||||
// Number of items stored since last collection.
|
||||
numStored int
|
||||
// Number of saved items that triggers collection.
|
||||
collectNum int
|
||||
// Expiration time of captchas.
|
||||
expiration time.Duration
|
||||
}
|
||||
|
||||
// NewMemoryStore returns a new standard memory store for captchas with the
|
||||
// given collection threshold and expiration time (duration). The returned
|
||||
// store must be registered with SetCustomStore to replace the default one.
|
||||
func NewMemoryStore(collectNum int, expiration time.Duration) Store {
|
||||
s := new(memoryStore)
|
||||
s.digitsById = make(map[string][]byte)
|
||||
s.idByTime = list.New()
|
||||
s.collectNum = collectNum
|
||||
s.expiration = expiration
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *memoryStore) Set(id string, digits []byte) {
|
||||
s.Lock()
|
||||
s.digitsById[id] = digits
|
||||
s.idByTime.PushBack(idByTimeValue{time.Now(), id})
|
||||
s.numStored++
|
||||
if s.numStored <= s.collectNum {
|
||||
s.Unlock()
|
||||
return
|
||||
}
|
||||
s.Unlock()
|
||||
go s.collect()
|
||||
}
|
||||
|
||||
func (s *memoryStore) Get(id string, clear bool) (digits []byte) {
|
||||
if !clear {
|
||||
// When we don't need to clear captcha, acquire read lock.
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
} else {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
}
|
||||
digits, ok := s.digitsById[id]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if clear {
|
||||
delete(s.digitsById, id)
|
||||
// XXX(dchest) Index (s.idByTime) will be cleaned when
|
||||
// collecting expired captchas. Can't clean it here, because
|
||||
// we don't store reference to expValue in the map.
|
||||
// Maybe store it?
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *memoryStore) collect() {
|
||||
now := time.Now()
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
s.numStored = 0
|
||||
for e := s.idByTime.Front(); e != nil; {
|
||||
ev, ok := e.Value.(idByTimeValue)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if ev.timestamp.Add(s.expiration).Before(now) {
|
||||
delete(s.digitsById, ev.id)
|
||||
next := e.Next()
|
||||
s.idByTime.Remove(e)
|
||||
e = next
|
||||
} else {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
79
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/store_test.go
generated
vendored
Normal file
79
contrib/backends/srndv2/src/srnd/vendor/github.com/dchest/captcha/store_test.go
generated
vendored
Normal file
@ -0,0 +1,79 @@
|
||||
// Copyright 2011 Dmitry Chestnykh. All rights reserved.
|
||||
// Use of this source code is governed by a MIT-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package captcha
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSetGet(t *testing.T) {
|
||||
s := NewMemoryStore(CollectNum, Expiration)
|
||||
id := "captcha id"
|
||||
d := RandomDigits(10)
|
||||
s.Set(id, d)
|
||||
d2 := s.Get(id, false)
|
||||
if d2 == nil || !bytes.Equal(d, d2) {
|
||||
t.Errorf("saved %v, getDigits returned got %v", d, d2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetClear(t *testing.T) {
|
||||
s := NewMemoryStore(CollectNum, Expiration)
|
||||
id := "captcha id"
|
||||
d := RandomDigits(10)
|
||||
s.Set(id, d)
|
||||
d2 := s.Get(id, true)
|
||||
if d2 == nil || !bytes.Equal(d, d2) {
|
||||
t.Errorf("saved %v, getDigitsClear returned got %v", d, d2)
|
||||
}
|
||||
d2 = s.Get(id, false)
|
||||
if d2 != nil {
|
||||
t.Errorf("getDigitClear didn't clear (%q=%v)", id, d2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCollect(t *testing.T) {
|
||||
//TODO(dchest): can't test automatic collection when saving, because
|
||||
//it's currently launched in a different goroutine.
|
||||
s := NewMemoryStore(10, -1)
|
||||
// create 10 ids
|
||||
ids := make([]string, 10)
|
||||
d := RandomDigits(10)
|
||||
for i := range ids {
|
||||
ids[i] = randomId()
|
||||
s.Set(ids[i], d)
|
||||
}
|
||||
s.(*memoryStore).collect()
|
||||
// Must be already collected
|
||||
nc := 0
|
||||
for i := range ids {
|
||||
d2 := s.Get(ids[i], false)
|
||||
if d2 != nil {
|
||||
t.Errorf("%d: not collected", i)
|
||||
nc++
|
||||
}
|
||||
}
|
||||
if nc > 0 {
|
||||
t.Errorf("= not collected %d out of %d captchas", nc, len(ids))
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSetCollect(b *testing.B) {
|
||||
b.StopTimer()
|
||||
d := RandomDigits(10)
|
||||
s := NewMemoryStore(9999, -1)
|
||||
ids := make([]string, 1000)
|
||||
for i := range ids {
|
||||
ids[i] = randomId()
|
||||
}
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for j := 0; j < 1000; j++ {
|
||||
s.Set(ids[j], d)
|
||||
}
|
||||
s.(*memoryStore).collect()
|
||||
}
|
||||
}
|
19
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/.travis.yml
generated
vendored
Normal file
19
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
language: go
|
||||
sudo: false
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- go: 1.3
|
||||
- go: 1.4
|
||||
- go: 1.5
|
||||
- go: 1.6
|
||||
- go: 1.7
|
||||
- go: tip
|
||||
allow_failures:
|
||||
- go: tip
|
||||
|
||||
script:
|
||||
- go get -t -v ./...
|
||||
- diff -u <(echo -n) <(gofmt -d .)
|
||||
- go vet $(go list ./... | grep -v /vendor/)
|
||||
- go test -v -race ./...
|
27
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/LICENSE
generated
vendored
Normal file
27
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2012 Rodrigo Moraes. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
10
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/README.md
generated
vendored
Normal file
10
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/README.md
generated
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
context
|
||||
=======
|
||||
[](https://travis-ci.org/gorilla/context)
|
||||
|
||||
gorilla/context is a general purpose registry for global request variables.
|
||||
|
||||
> Note: gorilla/context, having been born well before `context.Context` existed, does not play well
|
||||
> with the shallow copying of the request that [`http.Request.WithContext`](https://golang.org/pkg/net/http/#Request.WithContext) (added to net/http Go 1.7 onwards) performs. You should either use *just* gorilla/context, or moving forward, the new `http.Request.Context()`.
|
||||
|
||||
Read the full documentation here: http://www.gorillatoolkit.org/pkg/context
|
143
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/context.go
generated
vendored
Normal file
143
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/context.go
generated
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package context
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
mutex sync.RWMutex
|
||||
data = make(map[*http.Request]map[interface{}]interface{})
|
||||
datat = make(map[*http.Request]int64)
|
||||
)
|
||||
|
||||
// Set stores a value for a given key in a given request.
|
||||
func Set(r *http.Request, key, val interface{}) {
|
||||
mutex.Lock()
|
||||
if data[r] == nil {
|
||||
data[r] = make(map[interface{}]interface{})
|
||||
datat[r] = time.Now().Unix()
|
||||
}
|
||||
data[r][key] = val
|
||||
mutex.Unlock()
|
||||
}
|
||||
|
||||
// Get returns a value stored for a given key in a given request.
|
||||
func Get(r *http.Request, key interface{}) interface{} {
|
||||
mutex.RLock()
|
||||
if ctx := data[r]; ctx != nil {
|
||||
value := ctx[key]
|
||||
mutex.RUnlock()
|
||||
return value
|
||||
}
|
||||
mutex.RUnlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetOk returns stored value and presence state like multi-value return of map access.
|
||||
func GetOk(r *http.Request, key interface{}) (interface{}, bool) {
|
||||
mutex.RLock()
|
||||
if _, ok := data[r]; ok {
|
||||
value, ok := data[r][key]
|
||||
mutex.RUnlock()
|
||||
return value, ok
|
||||
}
|
||||
mutex.RUnlock()
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// GetAll returns all stored values for the request as a map. Nil is returned for invalid requests.
|
||||
func GetAll(r *http.Request) map[interface{}]interface{} {
|
||||
mutex.RLock()
|
||||
if context, ok := data[r]; ok {
|
||||
result := make(map[interface{}]interface{}, len(context))
|
||||
for k, v := range context {
|
||||
result[k] = v
|
||||
}
|
||||
mutex.RUnlock()
|
||||
return result
|
||||
}
|
||||
mutex.RUnlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetAllOk returns all stored values for the request as a map and a boolean value that indicates if
|
||||
// the request was registered.
|
||||
func GetAllOk(r *http.Request) (map[interface{}]interface{}, bool) {
|
||||
mutex.RLock()
|
||||
context, ok := data[r]
|
||||
result := make(map[interface{}]interface{}, len(context))
|
||||
for k, v := range context {
|
||||
result[k] = v
|
||||
}
|
||||
mutex.RUnlock()
|
||||
return result, ok
|
||||
}
|
||||
|
||||
// Delete removes a value stored for a given key in a given request.
|
||||
func Delete(r *http.Request, key interface{}) {
|
||||
mutex.Lock()
|
||||
if data[r] != nil {
|
||||
delete(data[r], key)
|
||||
}
|
||||
mutex.Unlock()
|
||||
}
|
||||
|
||||
// Clear removes all values stored for a given request.
|
||||
//
|
||||
// This is usually called by a handler wrapper to clean up request
|
||||
// variables at the end of a request lifetime. See ClearHandler().
|
||||
func Clear(r *http.Request) {
|
||||
mutex.Lock()
|
||||
clear(r)
|
||||
mutex.Unlock()
|
||||
}
|
||||
|
||||
// clear is Clear without the lock.
|
||||
func clear(r *http.Request) {
|
||||
delete(data, r)
|
||||
delete(datat, r)
|
||||
}
|
||||
|
||||
// Purge removes request data stored for longer than maxAge, in seconds.
|
||||
// It returns the amount of requests removed.
|
||||
//
|
||||
// If maxAge <= 0, all request data is removed.
|
||||
//
|
||||
// This is only used for sanity check: in case context cleaning was not
|
||||
// properly set some request data can be kept forever, consuming an increasing
|
||||
// amount of memory. In case this is detected, Purge() must be called
|
||||
// periodically until the problem is fixed.
|
||||
func Purge(maxAge int) int {
|
||||
mutex.Lock()
|
||||
count := 0
|
||||
if maxAge <= 0 {
|
||||
count = len(data)
|
||||
data = make(map[*http.Request]map[interface{}]interface{})
|
||||
datat = make(map[*http.Request]int64)
|
||||
} else {
|
||||
min := time.Now().Unix() - int64(maxAge)
|
||||
for r := range data {
|
||||
if datat[r] < min {
|
||||
clear(r)
|
||||
count++
|
||||
}
|
||||
}
|
||||
}
|
||||
mutex.Unlock()
|
||||
return count
|
||||
}
|
||||
|
||||
// ClearHandler wraps an http.Handler and clears request values at the end
|
||||
// of a request lifetime.
|
||||
func ClearHandler(h http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
defer Clear(r)
|
||||
h.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
161
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/context_test.go
generated
vendored
Normal file
161
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/context_test.go
generated
vendored
Normal file
@ -0,0 +1,161 @@
|
||||
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package context
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type keyType int
|
||||
|
||||
const (
|
||||
key1 keyType = iota
|
||||
key2
|
||||
)
|
||||
|
||||
func TestContext(t *testing.T) {
|
||||
assertEqual := func(val interface{}, exp interface{}) {
|
||||
if val != exp {
|
||||
t.Errorf("Expected %v, got %v.", exp, val)
|
||||
}
|
||||
}
|
||||
|
||||
r, _ := http.NewRequest("GET", "http://localhost:8080/", nil)
|
||||
emptyR, _ := http.NewRequest("GET", "http://localhost:8080/", nil)
|
||||
|
||||
// Get()
|
||||
assertEqual(Get(r, key1), nil)
|
||||
|
||||
// Set()
|
||||
Set(r, key1, "1")
|
||||
assertEqual(Get(r, key1), "1")
|
||||
assertEqual(len(data[r]), 1)
|
||||
|
||||
Set(r, key2, "2")
|
||||
assertEqual(Get(r, key2), "2")
|
||||
assertEqual(len(data[r]), 2)
|
||||
|
||||
//GetOk
|
||||
value, ok := GetOk(r, key1)
|
||||
assertEqual(value, "1")
|
||||
assertEqual(ok, true)
|
||||
|
||||
value, ok = GetOk(r, "not exists")
|
||||
assertEqual(value, nil)
|
||||
assertEqual(ok, false)
|
||||
|
||||
Set(r, "nil value", nil)
|
||||
value, ok = GetOk(r, "nil value")
|
||||
assertEqual(value, nil)
|
||||
assertEqual(ok, true)
|
||||
|
||||
// GetAll()
|
||||
values := GetAll(r)
|
||||
assertEqual(len(values), 3)
|
||||
|
||||
// GetAll() for empty request
|
||||
values = GetAll(emptyR)
|
||||
if values != nil {
|
||||
t.Error("GetAll didn't return nil value for invalid request")
|
||||
}
|
||||
|
||||
// GetAllOk()
|
||||
values, ok = GetAllOk(r)
|
||||
assertEqual(len(values), 3)
|
||||
assertEqual(ok, true)
|
||||
|
||||
// GetAllOk() for empty request
|
||||
values, ok = GetAllOk(emptyR)
|
||||
assertEqual(len(values), 0)
|
||||
assertEqual(ok, false)
|
||||
|
||||
// Delete()
|
||||
Delete(r, key1)
|
||||
assertEqual(Get(r, key1), nil)
|
||||
assertEqual(len(data[r]), 2)
|
||||
|
||||
Delete(r, key2)
|
||||
assertEqual(Get(r, key2), nil)
|
||||
assertEqual(len(data[r]), 1)
|
||||
|
||||
// Clear()
|
||||
Clear(r)
|
||||
assertEqual(len(data), 0)
|
||||
}
|
||||
|
||||
func parallelReader(r *http.Request, key string, iterations int, wait, done chan struct{}) {
|
||||
<-wait
|
||||
for i := 0; i < iterations; i++ {
|
||||
Get(r, key)
|
||||
}
|
||||
done <- struct{}{}
|
||||
|
||||
}
|
||||
|
||||
func parallelWriter(r *http.Request, key, value string, iterations int, wait, done chan struct{}) {
|
||||
<-wait
|
||||
for i := 0; i < iterations; i++ {
|
||||
Set(r, key, value)
|
||||
}
|
||||
done <- struct{}{}
|
||||
|
||||
}
|
||||
|
||||
func benchmarkMutex(b *testing.B, numReaders, numWriters, iterations int) {
|
||||
|
||||
b.StopTimer()
|
||||
r, _ := http.NewRequest("GET", "http://localhost:8080/", nil)
|
||||
done := make(chan struct{})
|
||||
b.StartTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
wait := make(chan struct{})
|
||||
|
||||
for i := 0; i < numReaders; i++ {
|
||||
go parallelReader(r, "test", iterations, wait, done)
|
||||
}
|
||||
|
||||
for i := 0; i < numWriters; i++ {
|
||||
go parallelWriter(r, "test", "123", iterations, wait, done)
|
||||
}
|
||||
|
||||
close(wait)
|
||||
|
||||
for i := 0; i < numReaders+numWriters; i++ {
|
||||
<-done
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func BenchmarkMutexSameReadWrite1(b *testing.B) {
|
||||
benchmarkMutex(b, 1, 1, 32)
|
||||
}
|
||||
func BenchmarkMutexSameReadWrite2(b *testing.B) {
|
||||
benchmarkMutex(b, 2, 2, 32)
|
||||
}
|
||||
func BenchmarkMutexSameReadWrite4(b *testing.B) {
|
||||
benchmarkMutex(b, 4, 4, 32)
|
||||
}
|
||||
func BenchmarkMutex1(b *testing.B) {
|
||||
benchmarkMutex(b, 2, 8, 32)
|
||||
}
|
||||
func BenchmarkMutex2(b *testing.B) {
|
||||
benchmarkMutex(b, 16, 4, 64)
|
||||
}
|
||||
func BenchmarkMutex3(b *testing.B) {
|
||||
benchmarkMutex(b, 1, 2, 128)
|
||||
}
|
||||
func BenchmarkMutex4(b *testing.B) {
|
||||
benchmarkMutex(b, 128, 32, 256)
|
||||
}
|
||||
func BenchmarkMutex5(b *testing.B) {
|
||||
benchmarkMutex(b, 1024, 2048, 64)
|
||||
}
|
||||
func BenchmarkMutex6(b *testing.B) {
|
||||
benchmarkMutex(b, 2048, 1024, 512)
|
||||
}
|
88
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/doc.go
generated
vendored
Normal file
88
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/context/doc.go
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
// Copyright 2012 The Gorilla Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package context stores values shared during a request lifetime.
|
||||
|
||||
Note: gorilla/context, having been born well before `context.Context` existed,
|
||||
does not play well > with the shallow copying of the request that
|
||||
[`http.Request.WithContext`](https://golang.org/pkg/net/http/#Request.WithContext)
|
||||
(added to net/http Go 1.7 onwards) performs. You should either use *just*
|
||||
gorilla/context, or moving forward, the new `http.Request.Context()`.
|
||||
|
||||
For example, a router can set variables extracted from the URL and later
|
||||
application handlers can access those values, or it can be used to store
|
||||
sessions values to be saved at the end of a request. There are several
|
||||
others common uses.
|
||||
|
||||
The idea was posted by Brad Fitzpatrick to the go-nuts mailing list:
|
||||
|
||||
http://groups.google.com/group/golang-nuts/msg/e2d679d303aa5d53
|
||||
|
||||
Here's the basic usage: first define the keys that you will need. The key
|
||||
type is interface{} so a key can be of any type that supports equality.
|
||||
Here we define a key using a custom int type to avoid name collisions:
|
||||
|
||||
package foo
|
||||
|
||||
import (
|
||||
"github.com/gorilla/context"
|
||||
)
|
||||
|
||||
type key int
|
||||
|
||||
const MyKey key = 0
|
||||
|
||||
Then set a variable. Variables are bound to an http.Request object, so you
|
||||
need a request instance to set a value:
|
||||
|
||||
context.Set(r, MyKey, "bar")
|
||||
|
||||
The application can later access the variable using the same key you provided:
|
||||
|
||||
func MyHandler(w http.ResponseWriter, r *http.Request) {
|
||||
// val is "bar".
|
||||
val := context.Get(r, foo.MyKey)
|
||||
|
||||
// returns ("bar", true)
|
||||
val, ok := context.GetOk(r, foo.MyKey)
|
||||
// ...
|
||||
}
|
||||
|
||||
And that's all about the basic usage. We discuss some other ideas below.
|
||||
|
||||
Any type can be stored in the context. To enforce a given type, make the key
|
||||
private and wrap Get() and Set() to accept and return values of a specific
|
||||
type:
|
||||
|
||||
type key int
|
||||
|
||||
const mykey key = 0
|
||||
|
||||
// GetMyKey returns a value for this package from the request values.
|
||||
func GetMyKey(r *http.Request) SomeType {
|
||||
if rv := context.Get(r, mykey); rv != nil {
|
||||
return rv.(SomeType)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// SetMyKey sets a value for this package in the request values.
|
||||
func SetMyKey(r *http.Request, val SomeType) {
|
||||
context.Set(r, mykey, val)
|
||||
}
|
||||
|
||||
Variables must be cleared at the end of a request, to remove all values
|
||||
that were stored. This can be done in an http.Handler, after a request was
|
||||
served. Just call Clear() passing the request:
|
||||
|
||||
context.Clear(r)
|
||||
|
||||
...or use ClearHandler(), which conveniently wraps an http.Handler to clear
|
||||
variables at the end of a request lifetime.
|
||||
|
||||
The Routers from the packages gorilla/mux and gorilla/pat call Clear()
|
||||
so if you are using either of them you don't need to clear the context manually.
|
||||
*/
|
||||
package context
|
22
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/.travis.yml
generated
vendored
Normal file
22
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
language: go
|
||||
sudo: false
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- go: 1.2
|
||||
- go: 1.3
|
||||
- go: 1.4
|
||||
- go: 1.5
|
||||
- go: 1.6
|
||||
- go: tip
|
||||
allow_failures:
|
||||
- go: tip
|
||||
|
||||
install:
|
||||
- # skip
|
||||
|
||||
script:
|
||||
- go get -t -v ./...
|
||||
- diff -u <(echo -n) <(gofmt -d .)
|
||||
- go vet $(go list ./... | grep -v /vendor/)
|
||||
- go test -v -race ./...
|
27
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/LICENSE
generated
vendored
Normal file
27
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/LICENSE
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
Copyright (c) 2015, Matt Silverlock (matt@eatsleeprepeat.net) All rights
|
||||
reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
232
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/README.md
generated
vendored
Normal file
232
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/README.md
generated
vendored
Normal file
@ -0,0 +1,232 @@
|
||||
# gorilla/csrf
|
||||
[](https://godoc.org/github.com/gorilla/csrf) [](https://travis-ci.org/gorilla/csrf) [](https://sourcegraph.com/github.com/gorilla/csrf?badge)
|
||||
|
||||
gorilla/csrf is a HTTP middleware library that provides [cross-site request
|
||||
forgery](http://blog.codinghorror.com/preventing-csrf-and-xsrf-attacks/) (CSRF)
|
||||
protection. It includes:
|
||||
|
||||
* The `csrf.Protect` middleware/handler provides CSRF protection on routes
|
||||
attached to a router or a sub-router.
|
||||
* A `csrf.Token` function that provides the token to pass into your response,
|
||||
whether that be a HTML form or a JSON response body.
|
||||
* ... and a `csrf.TemplateField` helper that you can pass into your `html/template`
|
||||
templates to replace a `{{ .csrfField }}` template tag with a hidden input
|
||||
field.
|
||||
|
||||
gorilla/csrf is designed to work with any Go web framework, including:
|
||||
|
||||
* The [Gorilla](http://www.gorillatoolkit.org/) toolkit
|
||||
* Go's built-in [net/http](http://golang.org/pkg/net/http/) package
|
||||
* [Goji](https://goji.io) - see the [tailored fork](https://github.com/goji/csrf)
|
||||
* [Gin](https://github.com/gin-gonic/gin)
|
||||
* [Echo](https://github.com/labstack/echo)
|
||||
* ... and any other router/framework that rallies around Go's `http.Handler` interface.
|
||||
|
||||
gorilla/csrf is also compatible with middleware 'helper' libraries like
|
||||
[Alice](https://github.com/justinas/alice) and [Negroni](https://github.com/codegangsta/negroni).
|
||||
|
||||
## Install
|
||||
|
||||
With a properly configured Go toolchain:
|
||||
```sh
|
||||
go get github.com/gorilla/csrf
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
* [HTML Forms](#html-forms)
|
||||
* [JavaScript Apps](#javascript-applications)
|
||||
* [Google App Engine](#google-app-engine)
|
||||
* [Setting Options](#setting-options)
|
||||
|
||||
gorilla/csrf is easy to use: add the middleware to your router with
|
||||
the below:
|
||||
|
||||
```go
|
||||
CSRF := csrf.Protect([]byte("32-byte-long-auth-key"))
|
||||
http.ListenAndServe(":8000", CSRF(r))
|
||||
```
|
||||
|
||||
...and then collect the token with `csrf.Token(r)` in your handlers before
|
||||
passing it to the template, JSON body or HTTP header (see below).
|
||||
|
||||
Note that the authentication key passed to `csrf.Protect([]byte(key))` should be
|
||||
32-bytes long and persist across application restarts. Generating a random key
|
||||
won't allow you to authenticate existing cookies and will break your CSRF
|
||||
validation.
|
||||
|
||||
gorilla/csrf inspects the HTTP headers (first) and form body (second) on
|
||||
subsequent POST/PUT/PATCH/DELETE/etc. requests for the token.
|
||||
|
||||
### HTML Forms
|
||||
|
||||
Here's the common use-case: HTML forms you want to provide CSRF protection for,
|
||||
in order to protect malicious POST requests being made:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gorilla/csrf"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func main() {
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/signup", ShowSignupForm)
|
||||
// All POST requests without a valid token will return HTTP 403 Forbidden.
|
||||
r.HandleFunc("/signup/post", SubmitSignupForm)
|
||||
|
||||
// Add the middleware to your router by wrapping it.
|
||||
http.ListenAndServe(":8000",
|
||||
csrf.Protect([]byte("32-byte-long-auth-key"))(r))
|
||||
// PS: Don't forget to pass csrf.Secure(false) if you're developing locally
|
||||
// over plain HTTP (just don't leave it on in production).
|
||||
}
|
||||
|
||||
func ShowSignupForm(w http.ResponseWriter, r *http.Request) {
|
||||
// signup_form.tmpl just needs a {{ .csrfField }} template tag for
|
||||
// csrf.TemplateField to inject the CSRF token into. Easy!
|
||||
t.ExecuteTemplate(w, "signup_form.tmpl", map[string]interface{}{
|
||||
csrf.TemplateTag: csrf.TemplateField(r),
|
||||
})
|
||||
// We could also retrieve the token directly from csrf.Token(r) and
|
||||
// set it in the request header - w.Header.Set("X-CSRF-Token", token)
|
||||
// This is useful if you're sending JSON to clients or a front-end JavaScript
|
||||
// framework.
|
||||
}
|
||||
|
||||
func SubmitSignupForm(w http.ResponseWriter, r *http.Request) {
|
||||
// We can trust that requests making it this far have satisfied
|
||||
// our CSRF protection requirements.
|
||||
}
|
||||
```
|
||||
|
||||
Note that the CSRF middleware will (by necessity) consume the request body if the
|
||||
token is passed via POST form values. If you need to consume this in your
|
||||
handler, insert your own middleware earlier in the chain to capture the request
|
||||
body.
|
||||
|
||||
### JavaScript Applications
|
||||
|
||||
This approach is useful if you're using a front-end JavaScript framework like
|
||||
React, Ember or Angular, or are providing a JSON API.
|
||||
|
||||
We'll also look at applying selective CSRF protection using
|
||||
[gorilla/mux's](http://www.gorillatoolkit.org/pkg/mux) sub-routers,
|
||||
as we don't handle any POST/PUT/DELETE requests with our top-level router.
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/gorilla/csrf"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func main() {
|
||||
r := mux.NewRouter()
|
||||
|
||||
api := r.PathPrefix("/api").Subrouter()
|
||||
api.HandleFunc("/user/{id}", GetUser).Methods("GET")
|
||||
|
||||
http.ListenAndServe(":8000",
|
||||
csrf.Protect([]byte("32-byte-long-auth-key"))(r))
|
||||
}
|
||||
|
||||
func GetUser(w http.ResponseWriter, r *http.Request) {
|
||||
// Authenticate the request, get the id from the route params,
|
||||
// and fetch the user from the DB, etc.
|
||||
|
||||
// Get the token and pass it in the CSRF header. Our JSON-speaking client
|
||||
// or JavaScript framework can now read the header and return the token in
|
||||
// in its own "X-CSRF-Token" request header on the subsequent POST.
|
||||
w.Header().Set("X-CSRF-Token", csrf.Token(r))
|
||||
b, err := json.Marshal(user)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), 500)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write(b)
|
||||
}
|
||||
```
|
||||
|
||||
### Google App Engine
|
||||
|
||||
If you're using [Google App
|
||||
Engine](https://cloud.google.com/appengine/docs/go/how-requests-are-handled#Go_Requests_and_HTTP),
|
||||
which doesn't allow you to hook into the default `http.ServeMux` directly,
|
||||
you can still use gorilla/csrf (and gorilla/mux):
|
||||
|
||||
```go
|
||||
package app
|
||||
|
||||
// Remember: appengine has its own package main
|
||||
func init() {
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/", IndexHandler)
|
||||
// ...
|
||||
|
||||
// We pass our CSRF-protected router to the DefaultServeMux
|
||||
http.Handle("/", csrf.Protect([]byte(your-key))(r))
|
||||
}
|
||||
```
|
||||
|
||||
### Setting Options
|
||||
|
||||
What about providing your own error handler and changing the HTTP header the
|
||||
package inspects on requests? (i.e. an existing API you're porting to Go). Well,
|
||||
gorilla/csrf provides options for changing these as you see fit:
|
||||
|
||||
```go
|
||||
func main() {
|
||||
CSRF := csrf.Protect(
|
||||
[]byte("a-32-byte-long-key-goes-here"),
|
||||
csrf.RequestHeader("Authenticity-Token"),
|
||||
csrf.FieldName("authenticity_token"),
|
||||
csrf.ErrorHandler(http.HandlerFunc(serverError(403))),
|
||||
)
|
||||
|
||||
r := mux.NewRouter()
|
||||
r.HandleFunc("/signup", GetSignupForm)
|
||||
r.HandleFunc("/signup/post", PostSignupForm)
|
||||
|
||||
http.ListenAndServe(":8000", CSRF(r))
|
||||
}
|
||||
```
|
||||
|
||||
Not too bad, right?
|
||||
|
||||
If there's something you're confused about or a feature you would like to see
|
||||
added, open an issue.
|
||||
|
||||
## Design Notes
|
||||
|
||||
Getting CSRF protection right is important, so here's some background:
|
||||
|
||||
* This library generates unique-per-request (masked) tokens as a mitigation
|
||||
against the [BREACH attack](http://breachattack.com/).
|
||||
* The 'base' (unmasked) token is stored in the session, which means that
|
||||
multiple browser tabs won't cause a user problems as their per-request token
|
||||
is compared with the base token.
|
||||
* Operates on a "whitelist only" approach where safe (non-mutating) HTTP methods
|
||||
(GET, HEAD, OPTIONS, TRACE) are the *only* methods where token validation is not
|
||||
enforced.
|
||||
* The design is based on the battle-tested
|
||||
[Django](https://docs.djangoproject.com/en/1.8/ref/csrf/) and [Ruby on
|
||||
Rails](http://api.rubyonrails.org/classes/ActionController/RequestForgeryProtection.html)
|
||||
approaches.
|
||||
* Cookies are authenticated and based on the [securecookie](https://github.com/gorilla/securecookie)
|
||||
library. They're also Secure (issued over HTTPS only) and are HttpOnly
|
||||
by default, because sane defaults are important.
|
||||
* Go's `crypto/rand` library is used to generate the 32 byte (256 bit) tokens
|
||||
and the one-time-pad used for masking them.
|
||||
|
||||
This library does not seek to be adventurous.
|
||||
|
||||
## License
|
||||
|
||||
BSD licensed. See the LICENSE file for details.
|
29
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/context.go
generated
vendored
Normal file
29
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/context.go
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
// +build go1.7
|
||||
|
||||
package csrf
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func contextGet(r *http.Request, key string) (interface{}, error) {
|
||||
val := r.Context().Value(key)
|
||||
if val == nil {
|
||||
return nil, errors.Errorf("no value exists in the context for key %q", key)
|
||||
}
|
||||
|
||||
return val, nil
|
||||
}
|
||||
|
||||
func contextSave(r *http.Request, key string, val interface{}) *http.Request {
|
||||
ctx := r.Context()
|
||||
ctx = context.WithValue(ctx, key, val)
|
||||
return r.WithContext(ctx)
|
||||
}
|
||||
|
||||
func contextClear(r *http.Request) {
|
||||
// no-op for go1.7+
|
||||
}
|
28
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/context_legacy.go
generated
vendored
Normal file
28
contrib/backends/srndv2/src/srnd/vendor/github.com/gorilla/csrf/context_legacy.go
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
// +build !go1.7
|
||||
|
||||
package csrf
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gorilla/context"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
func contextGet(r *http.Request, key string) (interface{}, error) {
|
||||
if val, ok := context.GetOk(r, key); ok {
|
||||
return val, nil
|
||||
}
|
||||
|
||||
return nil, errors.Errorf("no value exists in the context for key %q", key)
|
||||
}
|
||||
|
||||
func contextSave(r *http.Request, key string, val interface{}) *http.Request {
|
||||
context.Set(r, key, val)
|
||||
return r
|
||||
}
|
||||
|
||||
func contextClear(r *http.Request) {
|
||||
context.Clear(r)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user