2017-06-20 15:58:55 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2018-10-05 17:17:36 +02:00
|
|
|
"context"
|
2018-09-26 15:00:37 +02:00
|
|
|
"crypto/tls"
|
2017-07-05 14:28:22 +02:00
|
|
|
"fmt"
|
2017-07-01 22:26:32 +02:00
|
|
|
"io"
|
2017-07-05 14:28:22 +02:00
|
|
|
"io/ioutil"
|
2019-01-28 18:19:59 +02:00
|
|
|
"net"
|
2017-06-20 15:58:55 +02:00
|
|
|
"net/http"
|
2017-07-05 14:28:22 +02:00
|
|
|
"time"
|
2017-06-20 15:58:55 +02:00
|
|
|
|
2020-02-27 18:30:31 +02:00
|
|
|
"github.com/imgproxy/imgproxy/v2/imagemeta"
|
2017-06-20 15:58:55 +02:00
|
|
|
)
|
|
|
|
|
2018-10-05 17:17:36 +02:00
|
|
|
var (
|
2020-02-04 11:23:41 +02:00
|
|
|
downloadClient *http.Client
|
|
|
|
|
|
|
|
imageDataCtxKey = ctxKey("imageData")
|
|
|
|
cacheControlHeaderCtxKey = ctxKey("cacheControlHeader")
|
|
|
|
expiresHeaderCtxKey = ctxKey("expiresHeader")
|
2018-10-05 22:29:55 +02:00
|
|
|
|
2018-11-20 14:53:44 +02:00
|
|
|
errSourceDimensionsTooBig = newError(422, "Source image dimensions are too big", "Invalid source image")
|
2019-01-21 12:36:31 +02:00
|
|
|
errSourceResolutionTooBig = newError(422, "Source image resolution is too big", "Invalid source image")
|
|
|
|
errSourceFileTooBig = newError(422, "Source image file is too big", "Invalid source image")
|
2018-11-20 14:53:44 +02:00
|
|
|
errSourceImageTypeNotSupported = newError(422, "Source image type not supported", "Invalid source image")
|
2018-10-05 17:17:36 +02:00
|
|
|
)
|
|
|
|
|
2018-11-20 14:53:44 +02:00
|
|
|
const msgSourceImageIsUnreachable = "Source image is unreachable"
|
|
|
|
|
2019-01-17 10:51:19 +02:00
|
|
|
var downloadBufPool *bufPool
|
2017-07-05 14:28:22 +02:00
|
|
|
|
2019-01-21 12:36:31 +02:00
|
|
|
type limitReader struct {
|
2019-09-20 13:01:00 +02:00
|
|
|
r io.Reader
|
2019-01-21 12:36:31 +02:00
|
|
|
left int
|
|
|
|
}
|
|
|
|
|
|
|
|
func (lr *limitReader) Read(p []byte) (n int, err error) {
|
|
|
|
n, err = lr.r.Read(p)
|
2019-09-11 10:42:36 +02:00
|
|
|
lr.left -= n
|
2019-01-21 12:36:31 +02:00
|
|
|
|
|
|
|
if err == nil && lr.left < 0 {
|
|
|
|
err = errSourceFileTooBig
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-02-27 17:44:59 +02:00
|
|
|
func initDownloading() error {
|
2018-02-02 14:46:30 +02:00
|
|
|
transport := &http.Transport{
|
2019-01-28 18:19:59 +02:00
|
|
|
Proxy: http.ProxyFromEnvironment,
|
|
|
|
MaxIdleConns: conf.Concurrency,
|
|
|
|
MaxIdleConnsPerHost: conf.Concurrency,
|
|
|
|
DisableCompression: true,
|
|
|
|
Dial: (&net.Dialer{KeepAlive: 600 * time.Second}).Dial,
|
2018-02-02 14:46:30 +02:00
|
|
|
}
|
2018-09-26 15:00:37 +02:00
|
|
|
|
|
|
|
if conf.IgnoreSslVerification {
|
|
|
|
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
|
|
|
}
|
|
|
|
|
2017-12-29 21:59:20 +02:00
|
|
|
if conf.LocalFileSystemRoot != "" {
|
2019-02-04 16:04:19 +02:00
|
|
|
transport.RegisterProtocol("local", newFsTransport())
|
2017-12-29 21:59:20 +02:00
|
|
|
}
|
2018-09-26 15:00:37 +02:00
|
|
|
|
2018-05-26 16:22:41 +02:00
|
|
|
if conf.S3Enabled {
|
2020-02-27 17:44:59 +02:00
|
|
|
if t, err := newS3Transport(); err != nil {
|
|
|
|
return err
|
|
|
|
} else {
|
|
|
|
transport.RegisterProtocol("s3", t)
|
|
|
|
}
|
2018-05-26 16:22:41 +02:00
|
|
|
}
|
2018-10-04 17:12:51 +02:00
|
|
|
|
2019-09-30 15:10:55 +02:00
|
|
|
if conf.GCSEnabled {
|
2020-02-27 17:44:59 +02:00
|
|
|
if t, err := newGCSTransport(); err != nil {
|
|
|
|
return err
|
|
|
|
} else {
|
|
|
|
transport.RegisterProtocol("gs", t)
|
|
|
|
}
|
2018-10-30 14:12:56 +02:00
|
|
|
}
|
|
|
|
|
2017-11-13 22:28:04 +02:00
|
|
|
downloadClient = &http.Client{
|
2018-02-02 14:46:30 +02:00
|
|
|
Timeout: time.Duration(conf.DownloadTimeout) * time.Second,
|
2017-12-29 21:59:20 +02:00
|
|
|
Transport: transport,
|
2017-11-13 22:28:04 +02:00
|
|
|
}
|
2019-01-17 10:51:19 +02:00
|
|
|
|
2019-01-28 18:18:54 +02:00
|
|
|
downloadBufPool = newBufPool("download", conf.Concurrency, conf.DownloadBufferSize)
|
2020-02-11 15:28:13 +02:00
|
|
|
|
|
|
|
imagemeta.SetMaxSvgCheckRead(conf.MaxSvgCheckBytes)
|
2020-02-27 17:44:59 +02:00
|
|
|
|
|
|
|
return nil
|
2017-11-13 22:28:04 +02:00
|
|
|
}
|
|
|
|
|
2018-11-08 12:34:21 +02:00
|
|
|
func checkDimensions(width, height int) error {
|
2018-11-15 15:25:53 +02:00
|
|
|
if conf.MaxSrcDimension > 0 && (width > conf.MaxSrcDimension || height > conf.MaxSrcDimension) {
|
2018-11-08 12:34:21 +02:00
|
|
|
return errSourceDimensionsTooBig
|
|
|
|
}
|
|
|
|
|
|
|
|
if width*height > conf.MaxSrcResolution {
|
|
|
|
return errSourceResolutionTooBig
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-09-27 10:42:49 +02:00
|
|
|
func checkTypeAndDimensions(r io.Reader) (imageType, error) {
|
2019-12-25 11:06:15 +02:00
|
|
|
meta, err := imagemeta.DecodeMeta(r)
|
|
|
|
if err == imagemeta.ErrFormat {
|
2018-11-20 14:53:44 +02:00
|
|
|
return imageTypeUnknown, errSourceImageTypeNotSupported
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
2019-01-21 12:36:31 +02:00
|
|
|
if err != nil {
|
2019-09-19 15:23:53 +02:00
|
|
|
return imageTypeUnknown, newUnexpectedError(err.Error(), 0)
|
2019-01-21 12:36:31 +02:00
|
|
|
}
|
2018-11-08 12:34:21 +02:00
|
|
|
|
2019-12-25 11:06:15 +02:00
|
|
|
imgtype, imgtypeOk := imageTypes[meta.Format()]
|
2019-10-01 14:08:30 +02:00
|
|
|
if !imgtypeOk || !imageTypeLoadSupport(imgtype) {
|
2018-10-05 22:29:55 +02:00
|
|
|
return imageTypeUnknown, errSourceImageTypeNotSupported
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
2017-09-27 10:42:49 +02:00
|
|
|
|
2019-12-25 11:06:15 +02:00
|
|
|
if err = checkDimensions(meta.Width(), meta.Height()); err != nil {
|
2018-11-08 12:34:21 +02:00
|
|
|
return imageTypeUnknown, err
|
|
|
|
}
|
|
|
|
|
2017-09-27 10:42:49 +02:00
|
|
|
return imgtype, nil
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
func readAndCheckImage(r io.Reader, contentLength int) (*imageData, error) {
|
|
|
|
if conf.MaxSrcFileSize > 0 && contentLength > conf.MaxSrcFileSize {
|
|
|
|
return nil, errSourceFileTooBig
|
2019-01-30 10:36:19 +02:00
|
|
|
}
|
|
|
|
|
2019-01-30 12:31:00 +02:00
|
|
|
buf := downloadBufPool.Get(contentLength)
|
2019-09-20 13:01:00 +02:00
|
|
|
cancel := func() { downloadBufPool.Put(buf) }
|
|
|
|
|
|
|
|
if conf.MaxSrcFileSize > 0 {
|
|
|
|
r = &limitReader{r: r, left: conf.MaxSrcFileSize}
|
2018-10-05 17:17:36 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
imgtype, err := checkTypeAndDimensions(io.TeeReader(r, buf))
|
|
|
|
if err != nil {
|
|
|
|
cancel()
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-01-21 12:36:31 +02:00
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
if _, err = buf.ReadFrom(r); err != nil {
|
|
|
|
cancel()
|
|
|
|
return nil, newError(404, err.Error(), msgSourceImageIsUnreachable)
|
2019-01-21 12:36:31 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
return &imageData{buf.Bytes(), imgtype, cancel}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func requestImage(imageURL string) (*http.Response, error) {
|
|
|
|
req, err := http.NewRequest("GET", imageURL, nil)
|
2017-09-27 10:42:49 +02:00
|
|
|
if err != nil {
|
2019-10-01 11:18:37 +02:00
|
|
|
return nil, newError(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors)
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
req.Header.Set("User-Agent", conf.UserAgent)
|
|
|
|
|
|
|
|
res, err := downloadClient.Do(req)
|
|
|
|
if err != nil {
|
2019-10-01 11:18:37 +02:00
|
|
|
return res, newError(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors)
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
if res.StatusCode != 200 {
|
|
|
|
body, _ := ioutil.ReadAll(res.Body)
|
|
|
|
msg := fmt.Sprintf("Can't download image; Status: %d; %s", res.StatusCode, string(body))
|
2019-10-01 11:18:37 +02:00
|
|
|
return res, newError(404, msg, msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors)
|
2019-09-20 13:01:00 +02:00
|
|
|
}
|
2018-11-20 14:53:44 +02:00
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
return res, nil
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
|
|
|
|
2018-10-05 17:17:36 +02:00
|
|
|
func downloadImage(ctx context.Context) (context.Context, context.CancelFunc, error) {
|
2019-09-20 13:01:00 +02:00
|
|
|
imageURL := getImageURL(ctx)
|
2018-04-26 13:38:40 +02:00
|
|
|
|
2018-10-25 15:24:34 +02:00
|
|
|
if newRelicEnabled {
|
|
|
|
newRelicCancel := startNewRelicSegment(ctx, "Downloading image")
|
|
|
|
defer newRelicCancel()
|
|
|
|
}
|
|
|
|
|
2018-10-29 14:04:47 +02:00
|
|
|
if prometheusEnabled {
|
|
|
|
defer startPrometheusDuration(prometheusDownloadDuration)()
|
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
res, err := requestImage(imageURL)
|
2019-01-28 18:19:59 +02:00
|
|
|
if res != nil {
|
|
|
|
defer res.Body.Close()
|
|
|
|
}
|
2017-06-20 15:58:55 +02:00
|
|
|
if err != nil {
|
2019-09-20 13:01:00 +02:00
|
|
|
return ctx, func() {}, err
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
imgdata, err := readAndCheckImage(res.Body, int(res.ContentLength))
|
|
|
|
if err != nil {
|
|
|
|
return ctx, func() {}, err
|
2017-07-05 14:28:22 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
ctx = context.WithValue(ctx, imageDataCtxKey, imgdata)
|
2020-02-04 11:23:41 +02:00
|
|
|
ctx = context.WithValue(ctx, cacheControlHeaderCtxKey, res.Header.Get("Cache-Control"))
|
|
|
|
ctx = context.WithValue(ctx, expiresHeaderCtxKey, res.Header.Get("Expires"))
|
2018-10-05 17:17:36 +02:00
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
return ctx, imgdata.Close, err
|
2018-10-05 17:17:36 +02:00
|
|
|
}
|
|
|
|
|
2019-09-20 13:01:00 +02:00
|
|
|
func getImageData(ctx context.Context) *imageData {
|
|
|
|
return ctx.Value(imageDataCtxKey).(*imageData)
|
2017-06-20 15:58:55 +02:00
|
|
|
}
|
2020-02-04 11:23:41 +02:00
|
|
|
|
|
|
|
func getCacheControlHeader(ctx context.Context) string {
|
2020-02-05 13:00:31 +02:00
|
|
|
str, _ := ctx.Value(cacheControlHeaderCtxKey).(string)
|
|
|
|
return str
|
2020-02-04 11:23:41 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func getExpiresHeader(ctx context.Context) string {
|
2020-02-05 13:00:31 +02:00
|
|
|
str, _ := ctx.Value(expiresHeaderCtxKey).(string)
|
|
|
|
return str
|
2020-02-04 11:23:41 +02:00
|
|
|
}
|