feat(webui): add cached image proxy with configurable dir
Add disk-backed image proxy support to the web UI and expose it via `/api/image`. The proxy validates image URLs, fetches remote images with a timeout, stores image bytes + metadata in a local cache, and serves cached responses with proper content type and cache headers. Also add `SCRAPPR_IMAGE_CACHE` (default `.cache/webui-images`) and pass it through `cmd/outward-web` into `webui.Run`, with startup logging updated to include the cache location. This reduces repeated remote fetches and makes image delivery more reliable for the UI.feat(webui): add cached image proxy with configurable dir Add disk-backed image proxy support to the web UI and expose it via `/api/image`. The proxy validates image URLs, fetches remote images with a timeout, stores image bytes + metadata in a local cache, and serves cached responses with proper content type and cache headers. Also add `SCRAPPR_IMAGE_CACHE` (default `.cache/webui-images`) and pass it through `cmd/outward-web` into `webui.Run`, with startup logging updated to include the cache location. This reduces repeated remote fetches and makes image delivery more reliable for the UI.
This commit is contained in:
@@ -1,14 +1,19 @@
|
||||
package webui
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"embed"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"scrappr/internal/logx"
|
||||
"scrappr/internal/model"
|
||||
@@ -36,12 +41,28 @@ type CraftableEntry struct {
|
||||
Recipes []model.Recipe `json:"recipes"`
|
||||
}
|
||||
|
||||
func Run(addr, dataPath string) error {
|
||||
type imageProxy struct {
|
||||
cacheDir string
|
||||
client *http.Client
|
||||
}
|
||||
|
||||
type cachedImageMeta struct {
|
||||
ContentType string `json:"content_type"`
|
||||
SourceURL string `json:"source_url"`
|
||||
SavedAt time.Time `json:"saved_at"`
|
||||
}
|
||||
|
||||
func Run(addr, dataPath, imageCacheDir string) error {
|
||||
catalog, err := loadCatalog(dataPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
images, err := newImageProxy(imageCacheDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
staticFS, err := fs.Sub(staticFiles, "static")
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -63,13 +84,15 @@ func Run(addr, dataPath string) error {
|
||||
logx.Eventf("error", "catalog encode failed: %v", err)
|
||||
}
|
||||
})
|
||||
mux.HandleFunc("/api/image", images.handle)
|
||||
|
||||
logx.Eventf(
|
||||
"start",
|
||||
"web UI listening on %s using %s (%d craftable items)",
|
||||
"web UI listening on %s using %s (%d craftable items, image cache %s)",
|
||||
displayAddr(addr),
|
||||
dataPath,
|
||||
len(catalog.Craftables),
|
||||
imageCacheDir,
|
||||
)
|
||||
|
||||
return http.ListenAndServe(addr, mux)
|
||||
@@ -225,3 +248,228 @@ func setNoCache(w http.ResponseWriter) {
|
||||
w.Header().Set("Pragma", "no-cache")
|
||||
w.Header().Set("Expires", "0")
|
||||
}
|
||||
|
||||
func newImageProxy(cacheDir string) (*imageProxy, error) {
|
||||
if err := os.MkdirAll(cacheDir, 0o755); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &imageProxy{
|
||||
cacheDir: cacheDir,
|
||||
client: &http.Client{
|
||||
Timeout: 20 * time.Second,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *imageProxy) handle(w http.ResponseWriter, r *http.Request) {
|
||||
if r.Method != http.MethodGet {
|
||||
http.Error(w, "method not allowed", http.StatusMethodNotAllowed)
|
||||
return
|
||||
}
|
||||
|
||||
rawURL := strings.TrimSpace(r.URL.Query().Get("url"))
|
||||
if rawURL == "" {
|
||||
http.Error(w, "missing url", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if !isAllowedImageURL(rawURL) {
|
||||
http.Error(w, "forbidden image host", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
cacheKey := cacheKeyFor(rawURL)
|
||||
if p.serveCached(w, cacheKey) {
|
||||
return
|
||||
}
|
||||
|
||||
contentType, body, sourceURL, err := p.fetchRemote(rawURL)
|
||||
if err != nil {
|
||||
logx.Eventf("warn", "image proxy failed for %s: %v", rawURL, err)
|
||||
http.Error(w, "image unavailable", http.StatusBadGateway)
|
||||
return
|
||||
}
|
||||
|
||||
if err := p.storeCached(cacheKey, contentType, body, sourceURL); err != nil {
|
||||
logx.Eventf("warn", "image cache store failed for %s: %v", rawURL, err)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Cache-Control", "public, max-age=86400")
|
||||
_, _ = w.Write(body)
|
||||
}
|
||||
|
||||
func (p *imageProxy) serveCached(w http.ResponseWriter, cacheKey string) bool {
|
||||
bodyPath := filepath.Join(p.cacheDir, cacheKey+".bin")
|
||||
metaPath := filepath.Join(p.cacheDir, cacheKey+".json")
|
||||
|
||||
metaBytes, err := os.ReadFile(metaPath)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
bodyBytes, err := os.ReadFile(bodyPath)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
var meta cachedImageMeta
|
||||
if err := json.Unmarshal(metaBytes, &meta); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if meta.ContentType == "" {
|
||||
meta.ContentType = "image/png"
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", meta.ContentType)
|
||||
w.Header().Set("Cache-Control", "public, max-age=86400")
|
||||
_, _ = w.Write(bodyBytes)
|
||||
return true
|
||||
}
|
||||
|
||||
func (p *imageProxy) storeCached(cacheKey, contentType string, body []byte, sourceURL string) error {
|
||||
bodyPath := filepath.Join(p.cacheDir, cacheKey+".bin")
|
||||
metaPath := filepath.Join(p.cacheDir, cacheKey+".json")
|
||||
|
||||
meta := cachedImageMeta{
|
||||
ContentType: contentType,
|
||||
SourceURL: sourceURL,
|
||||
SavedAt: time.Now(),
|
||||
}
|
||||
|
||||
metaBytes, err := json.Marshal(meta)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := os.WriteFile(bodyPath, body, 0o644); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return os.WriteFile(metaPath, metaBytes, 0o644)
|
||||
}
|
||||
|
||||
func (p *imageProxy) fetchRemote(rawURL string) (string, []byte, string, error) {
|
||||
var lastErr error
|
||||
for _, candidate := range imageCandidates(rawURL) {
|
||||
req, err := http.NewRequest(http.MethodGet, candidate, nil)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/134.0.0.0 Safari/537.36")
|
||||
req.Header.Set("Accept", "image/avif,image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8")
|
||||
req.Header.Set("Accept-Language", "en-US,en;q=0.9")
|
||||
req.Header.Set("Referer", "https://outward.fandom.com/")
|
||||
req.Header.Set("Sec-Fetch-Dest", "image")
|
||||
req.Header.Set("Sec-Fetch-Mode", "no-cors")
|
||||
req.Header.Set("Sec-Fetch-Site", "cross-site")
|
||||
|
||||
resp, err := p.client.Do(req)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
|
||||
body, readErr := io.ReadAll(resp.Body)
|
||||
_ = resp.Body.Close()
|
||||
if readErr != nil {
|
||||
lastErr = readErr
|
||||
continue
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
lastErr = fmt.Errorf("status %d from %s", resp.StatusCode, candidate)
|
||||
continue
|
||||
}
|
||||
|
||||
contentType := resp.Header.Get("Content-Type")
|
||||
if !strings.HasPrefix(contentType, "image/") {
|
||||
contentType = "image/png"
|
||||
}
|
||||
|
||||
logx.Eventf("cache", "fetched image %s", candidate)
|
||||
return contentType, body, candidate, nil
|
||||
}
|
||||
|
||||
if lastErr == nil {
|
||||
lastErr = fmt.Errorf("no usable candidate")
|
||||
}
|
||||
return "", nil, "", lastErr
|
||||
}
|
||||
|
||||
func imageCandidates(raw string) []string {
|
||||
seen := map[string]bool{}
|
||||
var out []string
|
||||
|
||||
push := func(value string) {
|
||||
value = strings.TrimSpace(value)
|
||||
if value == "" || seen[value] {
|
||||
return
|
||||
}
|
||||
seen[value] = true
|
||||
out = append(out, value)
|
||||
}
|
||||
|
||||
normalized := normalizeRemoteImageURL(raw)
|
||||
push(normalized)
|
||||
push(removeQuery(normalized))
|
||||
|
||||
withoutRevision := strings.Replace(normalized, "/revision/latest", "", 1)
|
||||
push(withoutRevision)
|
||||
push(removeQuery(withoutRevision))
|
||||
|
||||
push(raw)
|
||||
push(removeQuery(raw))
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func normalizeRemoteImageURL(raw string) string {
|
||||
raw = strings.TrimSpace(raw)
|
||||
if strings.HasPrefix(raw, "//") {
|
||||
raw = "https:" + raw
|
||||
}
|
||||
|
||||
query := ""
|
||||
if idx := strings.Index(raw, "?"); idx >= 0 {
|
||||
query = raw[idx:]
|
||||
raw = raw[:idx]
|
||||
}
|
||||
|
||||
const scaledMarker = "/revision/latest/scale-to-width-down/"
|
||||
if idx := strings.Index(raw, scaledMarker); idx >= 0 {
|
||||
raw = raw[:idx] + "/revision/latest"
|
||||
}
|
||||
|
||||
return raw + query
|
||||
}
|
||||
|
||||
func removeQuery(raw string) string {
|
||||
if idx := strings.Index(raw, "?"); idx >= 0 {
|
||||
return raw[:idx]
|
||||
}
|
||||
return raw
|
||||
}
|
||||
|
||||
func cacheKeyFor(raw string) string {
|
||||
sum := sha256.Sum256([]byte(strings.TrimSpace(raw)))
|
||||
return fmt.Sprintf("%x", sum[:])
|
||||
}
|
||||
|
||||
func isAllowedImageURL(raw string) bool {
|
||||
parsed, err := url.Parse(strings.TrimSpace(raw))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
host := strings.ToLower(parsed.Host)
|
||||
if host != "static.wikia.nocookie.net" {
|
||||
return false
|
||||
}
|
||||
|
||||
return strings.Contains(parsed.Path, "/outward_gamepedia/images/")
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user