1
0
mirror of https://github.com/imgproxy/imgproxy.git synced 2025-01-23 11:14:48 +02:00

raw processing option

This commit is contained in:
DarthSim 2022-09-07 16:50:21 +06:00
parent 8a081a917e
commit 0f7281e56e
11 changed files with 245 additions and 10 deletions

View File

@ -2,6 +2,7 @@
## [Unreleased]
### Add
- Add [raw](https://docs.imgproxy.net/latest/generating_the_url?id=raw) processing option.
- (pro) Add encrypted source URL support.
## [3.7.2] - 2022-08-22

View File

@ -23,6 +23,7 @@ RUN apt-get update \
liblzma5 \
libzstd1 \
libpcre3 \
media-types \
&& rm -rf /var/lib/apt/lists/*
COPY --from=0 /usr/local/bin/imgproxy /usr/local/bin/

View File

@ -662,6 +662,21 @@ When set, imgproxy will skip the processing of the listed formats. Also availabl
Default: empty
### Raw
```
raw:%raw
```
When set to `1`, `t` or `true`, imgproxy will respond with a raw unprocessed, and unchecked source image. There are some differences between `raw` and `skip_processing` options:
* While the `skip_processing` option has some conditions to skip the processing, the `raw` option allows to skip processing no matter what
* With the `raw` option set, imgproxy doesn't check the source image's type, resolution, and file size. Basically, the `raw` option allows streaming of any file type
* With the `raw` option set, imgproxy won't download the whole image to the memory. Instead, it will stream the source image directly to the response lowering memory usage
* The requests with the `raw` option set are not limited by the `IMGPROXY_CONCURRENCY` config
Default: `false`
### Cache buster
```

View File

@ -130,7 +130,7 @@ func headersToStore(res *http.Response) map[string]string {
return m
}
func requestImage(imageURL string, header http.Header, jar *cookiejar.Jar) (*http.Response, error) {
func BuildImageRequest(imageURL string, header http.Header, jar *cookiejar.Jar) (*http.Request, error) {
req, err := http.NewRequest("GET", imageURL, nil)
if err != nil {
return nil, ierrors.New(404, err.Error(), msgSourceImageIsUnreachable)
@ -158,12 +158,31 @@ func requestImage(imageURL string, header http.Header, jar *cookiejar.Jar) (*htt
}
}
return req, nil
}
func SendRequest(req *http.Request) (*http.Response, error) {
res, err := downloadClient.Do(req)
if err != nil {
return nil, ierrors.New(500, checkTimeoutErr(err).Error(), msgSourceImageIsUnreachable)
}
return res, nil
}
func requestImage(imageURL string, header http.Header, jar *cookiejar.Jar) (*http.Response, error) {
req, err := BuildImageRequest(imageURL, header, jar)
if err != nil {
return nil, err
}
res, err := SendRequest(req)
if err != nil {
return nil, err
}
if res.StatusCode == http.StatusNotModified {
res.Body.Close()
return nil, &ErrorNotModified{Message: "Not Modified", Headers: headersToStore(res)}
}

View File

@ -67,6 +67,15 @@ var (
}
)
func ByMime(mime string) Type {
for k, v := range mimes {
if v == mime {
return k
}
}
return Unknown
}
func (it Type) String() string {
for k, v := range Types {
if v == it {

View File

@ -88,6 +88,20 @@ func StartProcessingSegment(ctx context.Context) context.CancelFunc {
return cancel
}
func StartStreamingSegment(ctx context.Context) context.CancelFunc {
promCancel := prometheus.StartStreamingSegment()
nrCancel := newrelic.StartSegment(ctx, "Streaming image")
ddCancel := datadog.StartSpan(ctx, "streaming_image")
cancel := func() {
promCancel()
nrCancel()
ddCancel()
}
return cancel
}
func SendError(ctx context.Context, errType string, err error) {
prometheus.IncrementErrorsTotal(errType)
newrelic.SendError(ctx, errType, err)

View File

@ -195,6 +195,14 @@ func StartProcessingSegment() context.CancelFunc {
}
}
func StartStreamingSegment() context.CancelFunc {
if !enabled {
return func() {}
}
return startDuration(requestSpanDuration.With(prometheus.Labels{"span": "streaming"}))
}
func startDuration(m prometheus.Observer) context.CancelFunc {
t := time.Now()
return func() {

View File

@ -103,6 +103,8 @@ type ProcessingOptions struct {
Filename string
ReturnAttachment bool
Raw bool
UsedPresets []string
defaultQuality int
@ -775,6 +777,16 @@ func applySkipProcessingFormatsOption(po *ProcessingOptions, args []string) erro
return nil
}
func applyRawOption(po *ProcessingOptions, args []string) error {
if len(args) > 1 {
return fmt.Errorf("Invalid return_attachment arguments: %v", args)
}
po.Raw = parseBoolOption(args[0])
return nil
}
func applyFilenameOption(po *ProcessingOptions, args []string) error {
if len(args) > 1 {
return fmt.Errorf("Invalid filename arguments: %v", args)
@ -928,6 +940,8 @@ func applyURLOption(po *ProcessingOptions, name string, args []string) error {
// Handling options
case "skip_processing", "skp":
return applySkipProcessingFormatsOption(po, args)
case "raw":
return applyRawOption(po, args)
case "cachebuster", "cb":
return applyCacheBusterOption(po, args)
case "expires", "exp":

View File

@ -61,10 +61,10 @@ func setCacheControl(rw http.ResponseWriter, originHeaders map[string]string) {
var ttl int
if config.CacheControlPassthrough && originHeaders != nil {
if val, ok := originHeaders["Cache-Control"]; ok {
if val, ok := originHeaders["Cache-Control"]; ok && len(val) > 0 {
cacheControl = val
}
if val, ok := originHeaders["Expires"]; ok {
if val, ok := originHeaders["Expires"]; ok && len(val) > 0 {
expires = val
}
}
@ -92,6 +92,15 @@ func setVary(rw http.ResponseWriter) {
}
}
func setCanonical(rw http.ResponseWriter, originURL string) {
if config.SetCanonicalHeader {
if strings.HasPrefix(originURL, "https://") || strings.HasPrefix(originURL, "http://") {
linkHeader := fmt.Sprintf(`<%s>; rel="canonical"`, originURL)
rw.Header().Set("Link", linkHeader)
}
}
}
func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, statusCode int, resultData *imagedata.ImageData, po *options.ProcessingOptions, originURL string, originData *imagedata.ImageData) {
var contentDisposition string
if len(po.Filename) > 0 {
@ -107,15 +116,9 @@ func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, sta
rw.Header().Set("Content-DPR", strconv.FormatFloat(po.Dpr, 'f', 2, 32))
}
if config.SetCanonicalHeader {
if strings.HasPrefix(originURL, "https://") || strings.HasPrefix(originURL, "http://") {
linkHeader := fmt.Sprintf(`<%s>; rel="canonical"`, originURL)
rw.Header().Set("Link", linkHeader)
}
}
setCacheControl(rw, originData.Headers)
setVary(rw)
setCanonical(rw, originURL)
if config.EnableDebugHeaders {
rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(originData.Data)))
@ -229,6 +232,11 @@ func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
))
}
if po.Raw {
streamOriginImage(ctx, reqID, r, rw, po, imageURL)
return
}
// SVG is a special case. Though saving to svg is not supported, SVG->SVG is.
if !vips.SupportsSave(po.Format) && po.Format != imagetype.Unknown && po.Format != imagetype.SVG {
sendErrAndPanic(ctx, "path_parsing", ierrors.New(

View File

@ -133,6 +133,10 @@ func withPanicHandler(h router.RouteHandler) router.RouteHandler {
return func(reqID string, rw http.ResponseWriter, r *http.Request) {
defer func() {
if rerr := recover(); rerr != nil {
if rerr == http.ErrAbortHandler {
panic(rerr)
}
err, ok := rerr.(error)
if !ok {
panic(rerr)

142
stream.go Normal file
View File

@ -0,0 +1,142 @@
package main
import (
"context"
"io"
"mime"
"net/http"
"net/http/cookiejar"
"net/url"
"path/filepath"
"strconv"
"sync"
log "github.com/sirupsen/logrus"
"github.com/imgproxy/imgproxy/v3/config"
"github.com/imgproxy/imgproxy/v3/cookies"
"github.com/imgproxy/imgproxy/v3/imagedata"
"github.com/imgproxy/imgproxy/v3/imagetype"
"github.com/imgproxy/imgproxy/v3/metrics"
"github.com/imgproxy/imgproxy/v3/metrics/stats"
"github.com/imgproxy/imgproxy/v3/options"
"github.com/imgproxy/imgproxy/v3/router"
)
var (
streamReqHeaders = []string{
"If-None-Match",
"Accept-Encoding",
"Range",
}
streamRespHeaders = []string{
"Cache-Control",
"Expires",
"ETag",
"Content-Type",
"Content-Encoding",
"Content-Range",
}
streamBufPool = sync.Pool{
New: func() interface{} {
buf := make([]byte, 4096)
return &buf
},
}
)
func streamOriginImage(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter, po *options.ProcessingOptions, imageURL string) {
stats.IncImagesInProgress()
defer stats.DecImagesInProgress()
defer metrics.StartStreamingSegment(ctx)()
var (
cookieJar *cookiejar.Jar
err error
)
imgRequestHeader := make(http.Header)
for _, k := range streamReqHeaders {
if v := r.Header.Get(k); len(v) != 0 {
imgRequestHeader.Set(k, v)
}
}
if config.CookiePassthrough {
cookieJar, err = cookies.JarFromRequest(r)
checkErr(ctx, "streaming", err)
}
req, err := imagedata.BuildImageRequest(imageURL, imgRequestHeader, cookieJar)
checkErr(ctx, "streaming", err)
res, err := imagedata.SendRequest(req)
checkErr(ctx, "streaming", err)
defer res.Body.Close()
for _, k := range streamRespHeaders {
vv := res.Header.Values(k)
for _, v := range vv {
rw.Header().Set(k, v)
}
}
if res.ContentLength >= 0 {
rw.Header().Set("Content-Length", strconv.Itoa(int(res.ContentLength)))
}
if res.StatusCode < 300 {
imgtype := imagetype.Unknown
if mimetype := rw.Header().Get("Content-Type"); len(mimetype) > 0 {
imgtype = imagetype.ByMime(mimetype)
} else {
if u, uerr := url.Parse(imageURL); uerr == nil {
if ext := filepath.Ext(u.Path); len(ext) > 1 {
imgtype = imagetype.Types[ext[1:]]
if mimetype := mime.TypeByExtension(ext); len(mimetype) > 0 {
rw.Header().Set("Content-Type", mimetype)
}
}
}
}
var contentDisposition string
if len(po.Filename) > 0 {
contentDisposition = imgtype.ContentDisposition(po.Filename, po.ReturnAttachment)
} else {
contentDisposition = imgtype.ContentDispositionFromURL(imageURL, po.ReturnAttachment)
}
rw.Header().Set("Content-Disposition", contentDisposition)
}
setCacheControl(rw, map[string]string{
"Cache-Control": rw.Header().Get("Cache-Control"),
"Expires": rw.Header().Get("Expires"),
})
setCanonical(rw, imageURL)
rw.WriteHeader(res.StatusCode)
buf := streamBufPool.Get().(*[]byte)
defer streamBufPool.Put(buf)
if _, err := io.CopyBuffer(rw, res.Body, *buf); err != nil {
panic(http.ErrAbortHandler)
}
router.LogResponse(
reqID, r, res.StatusCode, nil,
log.Fields{
"image_url": imageURL,
"processing_options": po,
},
)
}