2019-06-03 19:02:46 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"net/http"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
responseGzipBufPool *bufPool
|
|
|
|
responseGzipPool *gzipPool
|
|
|
|
|
|
|
|
processingSem chan struct{}
|
|
|
|
|
|
|
|
headerVaryValue string
|
|
|
|
)
|
|
|
|
|
2020-02-27 17:44:59 +02:00
|
|
|
func initProcessingHandler() error {
|
2019-06-03 19:02:46 +02:00
|
|
|
processingSem = make(chan struct{}, conf.Concurrency)
|
|
|
|
|
|
|
|
if conf.GZipCompression > 0 {
|
2020-02-27 17:44:59 +02:00
|
|
|
var err error
|
2019-06-03 19:02:46 +02:00
|
|
|
responseGzipBufPool = newBufPool("gzip", conf.Concurrency, conf.GZipBufferSize)
|
2020-02-27 17:44:59 +02:00
|
|
|
if responseGzipPool, err = newGzipPool(conf.Concurrency); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-06-03 19:02:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
vary := make([]string, 0)
|
|
|
|
|
|
|
|
if conf.EnableWebpDetection || conf.EnforceWebp {
|
|
|
|
vary = append(vary, "Accept")
|
|
|
|
}
|
|
|
|
|
|
|
|
if conf.GZipCompression > 0 {
|
|
|
|
vary = append(vary, "Accept-Encoding")
|
|
|
|
}
|
|
|
|
|
|
|
|
if conf.EnableClientHints {
|
|
|
|
vary = append(vary, "DPR", "Viewport-Width", "Width")
|
|
|
|
}
|
|
|
|
|
|
|
|
headerVaryValue = strings.Join(vary, ", ")
|
2020-02-27 17:44:59 +02:00
|
|
|
|
|
|
|
return nil
|
2019-06-03 19:02:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func respondWithImage(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter, data []byte) {
|
|
|
|
po := getProcessingOptions(ctx)
|
|
|
|
|
2019-08-13 13:42:47 +02:00
|
|
|
var contentDisposition string
|
|
|
|
if len(po.Filename) > 0 {
|
|
|
|
contentDisposition = po.Format.ContentDisposition(po.Filename)
|
|
|
|
} else {
|
|
|
|
contentDisposition = po.Format.ContentDispositionFromURL(getImageURL(ctx))
|
|
|
|
}
|
|
|
|
|
2019-06-03 19:02:46 +02:00
|
|
|
rw.Header().Set("Content-Type", po.Format.Mime())
|
2019-08-13 13:42:47 +02:00
|
|
|
rw.Header().Set("Content-Disposition", contentDisposition)
|
2019-06-03 19:02:46 +02:00
|
|
|
|
2020-02-04 11:23:41 +02:00
|
|
|
var cacheControl, expires string
|
|
|
|
|
|
|
|
if conf.CacheControlPassthrough {
|
|
|
|
cacheControl = getCacheControlHeader(ctx)
|
|
|
|
expires = getExpiresHeader(ctx)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(cacheControl) == 0 && len(expires) == 0 {
|
|
|
|
cacheControl = fmt.Sprintf("max-age=%d, public", conf.TTL)
|
|
|
|
expires = time.Now().Add(time.Second * time.Duration(conf.TTL)).Format(http.TimeFormat)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(cacheControl) > 0 {
|
|
|
|
rw.Header().Set("Cache-Control", cacheControl)
|
|
|
|
}
|
|
|
|
if len(expires) > 0 {
|
|
|
|
rw.Header().Set("Expires", expires)
|
|
|
|
}
|
|
|
|
|
2019-06-03 19:02:46 +02:00
|
|
|
if len(headerVaryValue) > 0 {
|
|
|
|
rw.Header().Set("Vary", headerVaryValue)
|
|
|
|
}
|
|
|
|
|
|
|
|
if conf.GZipCompression > 0 && strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
|
|
|
|
buf := responseGzipBufPool.Get(0)
|
|
|
|
defer responseGzipBufPool.Put(buf)
|
|
|
|
|
|
|
|
gz := responseGzipPool.Get(buf)
|
|
|
|
defer responseGzipPool.Put(gz)
|
|
|
|
|
|
|
|
gz.Write(data)
|
|
|
|
gz.Close()
|
|
|
|
|
|
|
|
rw.Header().Set("Content-Encoding", "gzip")
|
|
|
|
rw.Header().Set("Content-Length", strconv.Itoa(buf.Len()))
|
|
|
|
|
|
|
|
rw.WriteHeader(200)
|
|
|
|
rw.Write(buf.Bytes())
|
|
|
|
} else {
|
|
|
|
rw.Header().Set("Content-Length", strconv.Itoa(len(data)))
|
|
|
|
rw.WriteHeader(200)
|
|
|
|
rw.Write(data)
|
|
|
|
}
|
|
|
|
|
2019-09-16 11:53:45 +02:00
|
|
|
imageURL := getImageURL(ctx)
|
|
|
|
|
|
|
|
logResponse(reqID, r, 200, nil, &imageURL, po)
|
|
|
|
// logResponse(reqID, r, 200, getTimerSince(ctx), getImageURL(ctx), po))
|
|
|
|
}
|
|
|
|
|
|
|
|
func respondWithNotModified(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter) {
|
|
|
|
rw.WriteHeader(304)
|
|
|
|
|
|
|
|
imageURL := getImageURL(ctx)
|
|
|
|
|
|
|
|
logResponse(reqID, r, 304, nil, &imageURL, getProcessingOptions(ctx))
|
2019-06-03 19:02:46 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) {
|
2019-09-16 11:53:45 +02:00
|
|
|
ctx := r.Context()
|
2019-06-03 19:02:46 +02:00
|
|
|
|
|
|
|
if newRelicEnabled {
|
|
|
|
var newRelicCancel context.CancelFunc
|
|
|
|
ctx, newRelicCancel = startNewRelicTransaction(ctx, rw, r)
|
|
|
|
defer newRelicCancel()
|
|
|
|
}
|
|
|
|
|
|
|
|
if prometheusEnabled {
|
|
|
|
prometheusRequestsTotal.Inc()
|
|
|
|
defer startPrometheusDuration(prometheusRequestDuration)()
|
|
|
|
}
|
|
|
|
|
|
|
|
processingSem <- struct{}{}
|
|
|
|
defer func() { <-processingSem }()
|
|
|
|
|
2019-09-16 11:53:45 +02:00
|
|
|
ctx, timeoutCancel := context.WithTimeout(ctx, time.Duration(conf.WriteTimeout)*time.Second)
|
2019-06-03 19:02:46 +02:00
|
|
|
defer timeoutCancel()
|
|
|
|
|
|
|
|
ctx, err := parsePath(ctx, r)
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx, downloadcancel, err := downloadImage(ctx)
|
|
|
|
defer downloadcancel()
|
|
|
|
if err != nil {
|
|
|
|
if newRelicEnabled {
|
|
|
|
sendErrorToNewRelic(ctx, err)
|
|
|
|
}
|
|
|
|
if prometheusEnabled {
|
|
|
|
incrementPrometheusErrorsTotal("download")
|
|
|
|
}
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
checkTimeout(ctx)
|
|
|
|
|
|
|
|
if conf.ETagEnabled {
|
|
|
|
eTag := calcETag(ctx)
|
|
|
|
rw.Header().Set("ETag", eTag)
|
|
|
|
|
|
|
|
if eTag == r.Header.Get("If-None-Match") {
|
2019-09-16 11:53:45 +02:00
|
|
|
respondWithNotModified(ctx, reqID, r, rw)
|
2019-06-03 19:02:46 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
checkTimeout(ctx)
|
|
|
|
|
|
|
|
imageData, processcancel, err := processImage(ctx)
|
|
|
|
defer processcancel()
|
|
|
|
if err != nil {
|
|
|
|
if newRelicEnabled {
|
|
|
|
sendErrorToNewRelic(ctx, err)
|
|
|
|
}
|
|
|
|
if prometheusEnabled {
|
|
|
|
incrementPrometheusErrorsTotal("processing")
|
|
|
|
}
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
checkTimeout(ctx)
|
|
|
|
|
|
|
|
respondWithImage(ctx, reqID, r, rw, imageData)
|
|
|
|
}
|