1
0
mirror of https://github.com/imgproxy/imgproxy.git synced 2024-11-29 08:22:11 +02:00
imgproxy/stream.go
Ewan Higgs 4944dfab30
Support Last-Modified response header and support If-Modified-Since request header. (#1147)
* Always return Last-Modified and support If-Modified-Since.

* IMGPROXY_USE_LAST_MODIFIED config setting.

IMGPROXY_USE_LAST_MODIFIED  (default false) when enabled will return the
Last-Modified time of the upstream image and also allow the support of
the If-Modified-Since request header (returning a 304 if the image
hasn't been modified).

If-Modified-Since allows If-None-Match to take precedence.

* Fixes based on DarthSim's feedback.

1. Don't worry about nil maps.
2. Fix a test now that we use the config.LastModifiedEnabled (and move
   it's location it he test file to a more sane place).
3. Update GCS transport code based on the refactoring of DarthSim.

In this iteration, we pull the Updated time from the GCS object attributes
and format them as a string. We then parse it in the notmodified module.
Seems a bit silly to do it this way. If we agree on the approach here,
then AWS and Azure can follow.

* Support azure, fs, s3, and swift.

* Grab the headers for If-Modified-Since and Last-Modified before parsing them.

* Add tests for last-modified for fs.

* Support Last-Modified being passed when streaming an upstream file.

* Tests for Last-Modified for GCS and Azure

* Support s3 and swift tests. Sadly fakes3 doesn't support Last-Modified

* Test against forked gofakes3
2023-05-03 21:21:46 +06:00

145 lines
3.1 KiB
Go

package main
import (
"context"
"io"
"mime"
"net/http"
"net/http/cookiejar"
"path/filepath"
"strconv"
"sync"
log "github.com/sirupsen/logrus"
"github.com/imgproxy/imgproxy/v3/config"
"github.com/imgproxy/imgproxy/v3/cookies"
"github.com/imgproxy/imgproxy/v3/imagedata"
"github.com/imgproxy/imgproxy/v3/imagetype"
"github.com/imgproxy/imgproxy/v3/metrics"
"github.com/imgproxy/imgproxy/v3/metrics/stats"
"github.com/imgproxy/imgproxy/v3/options"
"github.com/imgproxy/imgproxy/v3/router"
)
var (
streamReqHeaders = []string{
"If-None-Match",
"Accept-Encoding",
"Range",
}
streamRespHeaders = []string{
"Cache-Control",
"Expires",
"ETag",
"Content-Type",
"Content-Encoding",
"Content-Range",
"Accept-Ranges",
"Last-Modified",
}
streamBufPool = sync.Pool{
New: func() interface{} {
buf := make([]byte, 4096)
return &buf
},
}
)
func streamOriginImage(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, imageURL string) {
stats.IncImagesInProgress()
defer stats.DecImagesInProgress()
defer metrics.StartStreamingSegment(ctx)()
var (
cookieJar *cookiejar.Jar
err error
)
imgRequestHeader := make(http.Header)
for _, k := range streamReqHeaders {
if v := r.Header.Get(k); len(v) != 0 {
imgRequestHeader.Set(k, v)
}
}
if config.CookiePassthrough {
cookieJar, err = cookies.JarFromRequest(r)
checkErr(ctx, "streaming", err)
}
req, reqCancel, err := imagedata.BuildImageRequest(r.Context(), imageURL, imgRequestHeader, cookieJar)
defer reqCancel()
checkErr(ctx, "streaming", err)
res, err := imagedata.SendRequest(req)
if res != nil {
defer res.Body.Close()
}
checkErr(ctx, "streaming", err)
for _, k := range streamRespHeaders {
vv := res.Header.Values(k)
for _, v := range vv {
rw.Header().Set(k, v)
}
}
if res.ContentLength >= 0 {
rw.Header().Set("Content-Length", strconv.Itoa(int(res.ContentLength)))
}
if res.StatusCode < 300 {
var filename, ext, mimetype string
_, filename = filepath.Split(req.URL.Path)
ext = filepath.Ext(filename)
if len(po.Filename) > 0 {
filename = po.Filename
} else {
filename = filename[:len(filename)-len(ext)]
}
mimetype = rw.Header().Get("Content-Type")
if len(ext) == 0 && len(mimetype) > 0 {
if exts, err := mime.ExtensionsByType(mimetype); err == nil && len(exts) != 0 {
ext = exts[0]
}
}
rw.Header().Set("Content-Disposition", imagetype.ContentDisposition(filename, ext, po.ReturnAttachment))
}
setCacheControl(rw, po.Expires, map[string]string{
"Cache-Control": rw.Header().Get("Cache-Control"),
"Expires": rw.Header().Get("Expires"),
})
setCanonical(rw, imageURL)
rw.Header().Set("Content-Security-Policy", "script-src 'none'")
rw.WriteHeader(res.StatusCode)
buf := streamBufPool.Get().(*[]byte)
defer streamBufPool.Put(buf)
_, copyerr := io.CopyBuffer(rw, res.Body, *buf)
router.LogResponse(
reqID, r, res.StatusCode, nil,
log.Fields{
"image_url": imageURL,
"processing_options": po,
},
)
if copyerr != nil {
panic(http.ErrAbortHandler)
}
}