2024-06-27 11:51:46 +00:00
|
|
|
package app
|
|
|
|
|
|
|
|
import (
|
2024-07-05 21:46:25 +00:00
|
|
|
"encoding/base64"
|
|
|
|
"io"
|
2024-06-30 11:40:27 +00:00
|
|
|
"net/http"
|
2024-07-05 21:46:25 +00:00
|
|
|
u "net/url"
|
|
|
|
"os"
|
2024-06-30 11:40:27 +00:00
|
|
|
"strconv"
|
2024-06-27 11:51:46 +00:00
|
|
|
"strings"
|
2024-07-05 21:46:25 +00:00
|
|
|
"syscall"
|
2024-06-30 11:40:27 +00:00
|
|
|
"text/template"
|
2024-07-05 21:46:25 +00:00
|
|
|
"time"
|
2024-06-27 11:51:46 +00:00
|
|
|
|
|
|
|
"git.macaw.me/skunky/devianter"
|
2024-07-08 16:01:53 +00:00
|
|
|
"golang.org/x/net/html"
|
2024-06-27 11:51:46 +00:00
|
|
|
)
|
|
|
|
|
2024-07-13 18:32:04 +00:00
|
|
|
/* INTERNAL */
|
|
|
|
func exit(msg string, code int) {
|
|
|
|
println(msg)
|
|
|
|
os.Exit(code)
|
|
|
|
}
|
|
|
|
func try(e error) {
|
|
|
|
if e != nil {
|
|
|
|
println(e.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
func try_with_exitstatus(err error, code int) {
|
|
|
|
if err != nil {
|
|
|
|
exit(err.Error(), code)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// some crap for frontend
|
2024-06-30 11:40:27 +00:00
|
|
|
func (s skunkyart) ExecuteTemplate(file string, data any) {
|
|
|
|
var buf strings.Builder
|
2024-07-06 17:06:04 +00:00
|
|
|
tmp := template.New(file)
|
|
|
|
tmp, e := tmp.Parse(Templates[file])
|
2024-07-13 18:32:04 +00:00
|
|
|
try(e)
|
|
|
|
try(tmp.Execute(&buf, &data))
|
2024-06-30 11:40:27 +00:00
|
|
|
wr(s.Writer, buf.String())
|
|
|
|
}
|
|
|
|
|
2024-07-05 21:46:25 +00:00
|
|
|
func UrlBuilder(strs ...string) string {
|
2024-07-04 08:26:52 +00:00
|
|
|
var str strings.Builder
|
2024-07-05 21:46:25 +00:00
|
|
|
l := len(strs)
|
2024-07-13 18:32:04 +00:00
|
|
|
str.WriteString(Host)
|
2024-07-05 21:46:25 +00:00
|
|
|
str.WriteString(CFG.BasePath)
|
|
|
|
for n, x := range strs {
|
2024-07-04 08:26:52 +00:00
|
|
|
str.WriteString(x)
|
2024-07-05 21:46:25 +00:00
|
|
|
if n+1 < l && !(strs[n+1][0] == '?' || strs[n+1][0] == '&') && !(x[0] == '?' || x[0] == '&') {
|
|
|
|
str.WriteString("/")
|
|
|
|
}
|
2024-07-04 08:26:52 +00:00
|
|
|
}
|
|
|
|
return str.String()
|
|
|
|
}
|
|
|
|
|
2024-06-30 11:40:27 +00:00
|
|
|
func (s skunkyart) ReturnHTTPError(status int) {
|
|
|
|
s.Writer.WriteHeader(status)
|
|
|
|
|
|
|
|
var msg strings.Builder
|
2024-07-05 21:46:25 +00:00
|
|
|
msg.WriteString(`<html><link rel="stylesheet" href="`)
|
2024-07-13 18:32:04 +00:00
|
|
|
msg.WriteString(UrlBuilder("stylesheet"))
|
2024-07-05 21:46:25 +00:00
|
|
|
msg.WriteString(`" /><h1>`)
|
2024-06-30 11:40:27 +00:00
|
|
|
msg.WriteString(strconv.Itoa(status))
|
|
|
|
msg.WriteString(" - ")
|
|
|
|
msg.WriteString(http.StatusText(status))
|
|
|
|
msg.WriteString("</h1></html>")
|
|
|
|
|
|
|
|
wr(s.Writer, msg.String())
|
|
|
|
}
|
|
|
|
|
2024-07-13 18:32:04 +00:00
|
|
|
type Downloaded struct {
|
|
|
|
Headers http.Header
|
|
|
|
Status int
|
|
|
|
Body []byte
|
|
|
|
}
|
|
|
|
|
|
|
|
func Download(url string) (d Downloaded) {
|
|
|
|
cli := &http.Client{}
|
|
|
|
if CFG.DownloadProxy != "" {
|
|
|
|
u, e := u.Parse(CFG.DownloadProxy)
|
|
|
|
try(e)
|
|
|
|
cli.Transport = &http.Transport{Proxy: http.ProxyURL(u)}
|
|
|
|
}
|
|
|
|
|
|
|
|
req, e := http.NewRequest("GET", url, nil)
|
|
|
|
try(e)
|
|
|
|
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; Linux x86_64; rv:123.0) Gecko/20100101 Firefox/123.0.0")
|
|
|
|
|
|
|
|
resp, e := cli.Do(req)
|
|
|
|
try(e)
|
|
|
|
defer resp.Body.Close()
|
|
|
|
b, e := io.ReadAll(resp.Body)
|
|
|
|
try(e)
|
|
|
|
|
|
|
|
d.Body = b
|
|
|
|
d.Status = resp.StatusCode
|
|
|
|
d.Headers = resp.Header
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// caching
|
|
|
|
func (s skunkyart) DownloadAndSendMedia(subdomain, path string) {
|
|
|
|
var url strings.Builder
|
|
|
|
url.WriteString("https://images-wixmp-")
|
|
|
|
url.WriteString(subdomain)
|
|
|
|
url.WriteString(".wixmp.com/")
|
|
|
|
url.WriteString(path)
|
|
|
|
url.WriteString("?token=")
|
|
|
|
url.WriteString(s.Args.Get("token"))
|
|
|
|
|
|
|
|
if CFG.Cache.Enabled {
|
|
|
|
os.Mkdir(CFG.Cache.Path, 0700)
|
|
|
|
fname := CFG.Cache.Path + "/" + base64.StdEncoding.EncodeToString([]byte(subdomain+path))
|
|
|
|
file, e := os.Open(fname)
|
|
|
|
|
|
|
|
if e != nil {
|
|
|
|
dwnld := Download(url.String())
|
|
|
|
if dwnld.Status == 200 && dwnld.Headers["Content-Type"][0][:5] == "image" {
|
|
|
|
try(os.WriteFile(fname, dwnld.Body, 0700))
|
|
|
|
s.Writer.Write(dwnld.Body)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
file, e := io.ReadAll(file)
|
|
|
|
try(e)
|
|
|
|
s.Writer.Write(file)
|
|
|
|
}
|
|
|
|
} else if CFG.Proxy {
|
|
|
|
dwnld := Download(url.String())
|
|
|
|
s.Writer.Write(dwnld.Body)
|
|
|
|
} else {
|
|
|
|
s.Writer.WriteHeader(403)
|
|
|
|
s.Writer.Write([]byte("Sorry, butt proxy on this instance are disabled."))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func InitCacheSystem() {
|
|
|
|
c := &CFG.Cache
|
|
|
|
for {
|
|
|
|
dir, e := os.Open(c.Path)
|
|
|
|
try(e)
|
|
|
|
stat, e := dir.Stat()
|
|
|
|
try(e)
|
|
|
|
|
|
|
|
dirnames, e := dir.Readdirnames(-1)
|
|
|
|
try(e)
|
|
|
|
for _, a := range dirnames {
|
|
|
|
a = c.Path + "/" + a
|
|
|
|
if c.Lifetime != 0 {
|
|
|
|
now := time.Now().UnixMilli()
|
|
|
|
|
|
|
|
f, _ := os.Stat(a)
|
|
|
|
stat := f.Sys().(*syscall.Stat_t)
|
|
|
|
time := time.Unix(stat.Ctim.Unix()).UnixMilli()
|
|
|
|
|
|
|
|
if time+c.Lifetime <= now {
|
|
|
|
try(os.RemoveAll(a))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if c.MaxSize != 0 && stat.Size() > c.MaxSize {
|
|
|
|
try(os.RemoveAll(a))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
dir.Close()
|
|
|
|
time.Sleep(time.Second * time.Duration(CFG.Cache.UpdateInterval))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func CopyTemplatesToMemory() {
|
|
|
|
for _, dirname := range CFG.Dirs {
|
|
|
|
dir, e := os.ReadDir(dirname)
|
|
|
|
try_with_exitstatus(e, 1)
|
|
|
|
|
|
|
|
for _, x := range dir {
|
|
|
|
n := dirname + "/" + x.Name()
|
|
|
|
file, e := os.ReadFile(n)
|
|
|
|
try_with_exitstatus(e, 1)
|
|
|
|
Templates[n] = string(file)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* PARSING HELPERS */
|
|
|
|
func ParseMedia(media devianter.Media) string {
|
|
|
|
url := devianter.UrlFromMedia(media)
|
|
|
|
if len(url) != 0 && CFG.Proxy {
|
|
|
|
url = url[21:]
|
|
|
|
dot := strings.Index(url, ".")
|
|
|
|
|
|
|
|
return UrlBuilder("media", "file", url[:dot], url[dot+11:])
|
|
|
|
}
|
|
|
|
return url
|
|
|
|
}
|
|
|
|
|
|
|
|
func ConvertDeviantArtUrlToSkunkyArt(url string) (output string) {
|
2024-07-05 21:46:25 +00:00
|
|
|
if len(url) > 32 && url[27:32] != "stash" {
|
|
|
|
url = url[27:]
|
|
|
|
toart := strings.Index(url, "/art/")
|
2024-07-06 17:06:04 +00:00
|
|
|
if toart != -1 {
|
|
|
|
output = UrlBuilder("post", url[:toart], url[toart+5:])
|
|
|
|
}
|
2024-07-05 21:46:25 +00:00
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-07-09 09:54:15 +00:00
|
|
|
func BuildUserPlate(name string) string {
|
|
|
|
var htm strings.Builder
|
|
|
|
htm.WriteString(`<div class="user-plate"><img src="`)
|
|
|
|
htm.WriteString(UrlBuilder("media", "emojitar", name, "?type=a"))
|
|
|
|
htm.WriteString(`"><a href="`)
|
|
|
|
htm.WriteString(UrlBuilder("group_user", "?type=about&q=", name))
|
|
|
|
htm.WriteString(`">`)
|
|
|
|
htm.WriteString(name)
|
|
|
|
htm.WriteString(`</a></div>`)
|
|
|
|
return htm.String()
|
|
|
|
}
|
|
|
|
|
2024-07-13 18:32:04 +00:00
|
|
|
func GetValueOfTag(t *html.Tokenizer) string {
|
2024-07-09 09:54:15 +00:00
|
|
|
for tt := t.Next(); ; {
|
|
|
|
switch tt {
|
|
|
|
default:
|
|
|
|
return ""
|
|
|
|
case html.TextToken:
|
|
|
|
return string(t.Text())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-30 11:40:27 +00:00
|
|
|
// навигация по страницам
|
2024-07-13 18:32:04 +00:00
|
|
|
type DeviationList struct {
|
2024-06-30 11:40:27 +00:00
|
|
|
Pages int
|
|
|
|
More bool
|
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME: на некоротрых артах первая страница может вызывать полное отсутствие панели навигации.
|
2024-07-13 18:32:04 +00:00
|
|
|
func (s skunkyart) NavBase(c DeviationList) string {
|
2024-06-30 11:40:27 +00:00
|
|
|
// TODO: сделать понятнее
|
|
|
|
// навигация по страницам
|
|
|
|
var list strings.Builder
|
|
|
|
list.WriteString("<br>")
|
|
|
|
p := s.Page
|
|
|
|
|
|
|
|
// функция для генерации ссылок
|
|
|
|
prevrev := func(msg string, page int, onpage bool) {
|
|
|
|
if !onpage {
|
|
|
|
list.WriteString(`<a href="?p=`)
|
|
|
|
list.WriteString(strconv.Itoa(page))
|
|
|
|
if s.Type != 0 {
|
|
|
|
list.WriteString("&type=")
|
|
|
|
list.WriteRune(s.Type)
|
|
|
|
}
|
|
|
|
if s.Query != "" {
|
|
|
|
list.WriteString("&q=")
|
|
|
|
list.WriteString(s.Query)
|
|
|
|
}
|
2024-07-04 08:26:52 +00:00
|
|
|
if f := s.Args.Get("folder"); f != "" {
|
|
|
|
list.WriteString("&folder=")
|
|
|
|
list.WriteString(f)
|
|
|
|
}
|
2024-06-30 11:40:27 +00:00
|
|
|
list.WriteString(`">`)
|
|
|
|
list.WriteString(msg)
|
|
|
|
list.WriteString("</a> ")
|
|
|
|
} else {
|
|
|
|
list.WriteString(strconv.Itoa(page))
|
|
|
|
list.WriteString(" ")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// вперёд-назад
|
|
|
|
if p > 1 {
|
|
|
|
prevrev("<= Prev |", p-1, false)
|
|
|
|
} else {
|
|
|
|
p = 1
|
|
|
|
}
|
|
|
|
|
|
|
|
if c.Pages > 0 {
|
|
|
|
// назад
|
|
|
|
for x := p - 6; x < p && x > 0; x++ {
|
|
|
|
prevrev(strconv.Itoa(x), x, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
// вперёд
|
2024-07-13 18:32:04 +00:00
|
|
|
for x := p; x <= p+6 && c.Pages > p+6; x++ {
|
2024-06-30 11:40:27 +00:00
|
|
|
if x == p {
|
|
|
|
prevrev("", x, true)
|
|
|
|
x++
|
|
|
|
}
|
|
|
|
|
|
|
|
if x > p {
|
|
|
|
prevrev(strconv.Itoa(x), x, false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// вперёд-назад
|
2024-07-13 18:32:04 +00:00
|
|
|
if c.More {
|
2024-06-30 11:40:27 +00:00
|
|
|
prevrev("| Next =>", p+1, false)
|
|
|
|
}
|
|
|
|
|
|
|
|
return list.String()
|
|
|
|
}
|