mirror of
				https://github.com/imgproxy/imgproxy.git
				synced 2025-10-30 23:08:02 +02:00 
			
		
		
		
	Global refactoring
This commit is contained in:
		| @@ -98,7 +98,7 @@ jobs: | ||||
|           command: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | BINARY=golangci-lint sh -s -- -b $(go env GOPATH)/bin v1.18.0 | ||||
|       - run: | ||||
|           name: Lint imgproxy | ||||
|           command: golangci-lint run . | ||||
|           command: golangci-lint run | ||||
|  | ||||
|   build: | ||||
|     executor: imgproxy | ||||
| @@ -119,7 +119,7 @@ jobs: | ||||
|             - go-modules-{{ checksum "go.sum" }} | ||||
|       - run: | ||||
|           name: Build imgproxy | ||||
|           command: go test -v | ||||
|           command: go test -v ./... | ||||
|       - save_cache: | ||||
|           key: go-modules-{{ checksum "go.sum" }} | ||||
|           paths: | ||||
|   | ||||
| @@ -35,12 +35,12 @@ issues: | ||||
|     # False positives on CGO generated code | ||||
|     - linters: [staticcheck] | ||||
|       text: "SA4000:" | ||||
|       path: vips\.go | ||||
|       path: vips/* | ||||
|  | ||||
|     # False positives on CGO generated code | ||||
|     - linters: [gocritic] | ||||
|       text: "dupSubExpr" | ||||
|       path: vips\.go | ||||
|       path: vips/* | ||||
|  | ||||
|     - linters: [stylecheck] | ||||
|       text: "ST1005:" | ||||
|   | ||||
| @@ -1,10 +1,14 @@ | ||||
| package main | ||||
| package bufpool | ||||
| 
 | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"runtime" | ||||
| 	"sort" | ||||
| 	"sync" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics/prometheus" | ||||
| ) | ||||
| 
 | ||||
| type intSlice []int | ||||
| @@ -13,7 +17,7 @@ func (p intSlice) Len() int           { return len(p) } | ||||
| func (p intSlice) Less(i, j int) bool { return p[i] < p[j] } | ||||
| func (p intSlice) Swap(i, j int)      { p[i], p[j] = p[j], p[i] } | ||||
| 
 | ||||
| type bufPool struct { | ||||
| type Pool struct { | ||||
| 	name        string | ||||
| 	defaultSize int | ||||
| 	maxSize     int | ||||
| @@ -25,12 +29,12 @@ type bufPool struct { | ||||
| 	mutex sync.Mutex | ||||
| } | ||||
| 
 | ||||
| func newBufPool(name string, n int, defaultSize int) *bufPool { | ||||
| 	pool := bufPool{ | ||||
| func New(name string, n int, defaultSize int) *Pool { | ||||
| 	pool := Pool{ | ||||
| 		name:        name, | ||||
| 		defaultSize: defaultSize, | ||||
| 		buffers:     make([]*bytes.Buffer, n), | ||||
| 		calls:       make(intSlice, conf.BufferPoolCalibrationThreshold), | ||||
| 		calls:       make(intSlice, config.BufferPoolCalibrationThreshold), | ||||
| 	} | ||||
| 
 | ||||
| 	for i := range pool.buffers { | ||||
| @@ -40,7 +44,7 @@ func newBufPool(name string, n int, defaultSize int) *bufPool { | ||||
| 	return &pool | ||||
| } | ||||
| 
 | ||||
| func (p *bufPool) calibrateAndClean() { | ||||
| func (p *Pool) calibrateAndClean() { | ||||
| 	sort.Sort(p.calls) | ||||
| 
 | ||||
| 	pos := int(float64(len(p.calls)) * 0.95) | ||||
| @@ -49,8 +53,8 @@ func (p *bufPool) calibrateAndClean() { | ||||
| 	p.callInd = 0 | ||||
| 	p.maxSize = p.normalizeSize(score) | ||||
| 
 | ||||
| 	p.defaultSize = maxInt(p.defaultSize, p.calls[0]) | ||||
| 	p.maxSize = maxInt(p.defaultSize, p.maxSize) | ||||
| 	p.defaultSize = imath.Max(p.defaultSize, p.calls[0]) | ||||
| 	p.maxSize = imath.Max(p.defaultSize, p.maxSize) | ||||
| 
 | ||||
| 	cleaned := false | ||||
| 
 | ||||
| @@ -65,11 +69,11 @@ func (p *bufPool) calibrateAndClean() { | ||||
| 		runtime.GC() | ||||
| 	} | ||||
| 
 | ||||
| 	setPrometheusBufferDefaultSize(p.name, p.defaultSize) | ||||
| 	setPrometheusBufferMaxSize(p.name, p.maxSize) | ||||
| 	prometheus.SetBufferDefaultSize(p.name, p.defaultSize) | ||||
| 	prometheus.SetBufferMaxSize(p.name, p.maxSize) | ||||
| } | ||||
| 
 | ||||
| func (p *bufPool) Get(size int) *bytes.Buffer { | ||||
| func (p *Pool) Get(size int) *bytes.Buffer { | ||||
| 	p.mutex.Lock() | ||||
| 	defer p.mutex.Unlock() | ||||
| 
 | ||||
| @@ -111,7 +115,7 @@ func (p *bufPool) Get(size int) *bytes.Buffer { | ||||
| 
 | ||||
| 	buf.Reset() | ||||
| 
 | ||||
| 	growSize := maxInt(size, p.defaultSize) | ||||
| 	growSize := imath.Max(size, p.defaultSize) | ||||
| 
 | ||||
| 	if growSize > buf.Cap() { | ||||
| 		buf.Grow(growSize) | ||||
| @@ -120,7 +124,7 @@ func (p *bufPool) Get(size int) *bytes.Buffer { | ||||
| 	return buf | ||||
| } | ||||
| 
 | ||||
| func (p *bufPool) Put(buf *bytes.Buffer) { | ||||
| func (p *Pool) Put(buf *bytes.Buffer) { | ||||
| 	p.mutex.Lock() | ||||
| 	defer p.mutex.Unlock() | ||||
| 
 | ||||
| @@ -142,7 +146,7 @@ func (p *bufPool) Put(buf *bytes.Buffer) { | ||||
| 			p.buffers[i] = buf | ||||
| 
 | ||||
| 			if buf.Cap() > 0 { | ||||
| 				observePrometheusBufferSize(p.name, buf.Cap()) | ||||
| 				prometheus.ObserveBufferSize(p.name, buf.Cap()) | ||||
| 			} | ||||
| 
 | ||||
| 			return | ||||
| @@ -150,6 +154,6 @@ func (p *bufPool) Put(buf *bytes.Buffer) { | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func (p *bufPool) normalizeSize(n int) int { | ||||
| func (p *Pool) normalizeSize(n int) int { | ||||
| 	return (n/bytes.MinRead + 2) * bytes.MinRead | ||||
| } | ||||
| @@ -1,26 +1,28 @@ | ||||
| package main | ||||
| package bufreader | ||||
| 
 | ||||
| import ( | ||||
| 	"bufio" | ||||
| 	"bytes" | ||||
| 	"io" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| ) | ||||
| 
 | ||||
| type bufReader struct { | ||||
| type Reader struct { | ||||
| 	r   io.Reader | ||||
| 	buf *bytes.Buffer | ||||
| 	cur int | ||||
| } | ||||
| 
 | ||||
| func newBufReader(r io.Reader, buf *bytes.Buffer) *bufReader { | ||||
| 	br := bufReader{ | ||||
| func New(r io.Reader, buf *bytes.Buffer) *Reader { | ||||
| 	br := Reader{ | ||||
| 		r:   r, | ||||
| 		buf: buf, | ||||
| 	} | ||||
| 	return &br | ||||
| } | ||||
| 
 | ||||
| func (br *bufReader) Read(p []byte) (int, error) { | ||||
| func (br *Reader) Read(p []byte) (int, error) { | ||||
| 	if err := br.fill(br.cur + len(p)); err != nil { | ||||
| 		return 0, err | ||||
| 	} | ||||
| @@ -30,7 +32,7 @@ func (br *bufReader) Read(p []byte) (int, error) { | ||||
| 	return n, nil | ||||
| } | ||||
| 
 | ||||
| func (br *bufReader) ReadByte() (byte, error) { | ||||
| func (br *Reader) ReadByte() (byte, error) { | ||||
| 	if err := br.fill(br.cur + 1); err != nil { | ||||
| 		return 0, err | ||||
| 	} | ||||
| @@ -40,7 +42,7 @@ func (br *bufReader) ReadByte() (byte, error) { | ||||
| 	return b, nil | ||||
| } | ||||
| 
 | ||||
| func (br *bufReader) Discard(n int) (int, error) { | ||||
| func (br *Reader) Discard(n int) (int, error) { | ||||
| 	if n < 0 { | ||||
| 		return 0, bufio.ErrNegativeCount | ||||
| 	} | ||||
| @@ -52,12 +54,12 @@ func (br *bufReader) Discard(n int) (int, error) { | ||||
| 		return 0, err | ||||
| 	} | ||||
| 
 | ||||
| 	n = minInt(n, br.buf.Len()-br.cur) | ||||
| 	n = imath.Min(n, br.buf.Len()-br.cur) | ||||
| 	br.cur += n | ||||
| 	return n, nil | ||||
| } | ||||
| 
 | ||||
| func (br *bufReader) Peek(n int) ([]byte, error) { | ||||
| func (br *Reader) Peek(n int) ([]byte, error) { | ||||
| 	if n < 0 { | ||||
| 		return []byte{}, bufio.ErrNegativeCount | ||||
| 	} | ||||
| @@ -76,18 +78,18 @@ func (br *bufReader) Peek(n int) ([]byte, error) { | ||||
| 	return br.buf.Bytes()[br.cur : br.cur+n], nil | ||||
| } | ||||
| 
 | ||||
| func (br *bufReader) Flush() error { | ||||
| func (br *Reader) Flush() error { | ||||
| 	_, err := br.buf.ReadFrom(br.r) | ||||
| 	return err | ||||
| } | ||||
| 
 | ||||
| func (br *bufReader) fill(need int) error { | ||||
| func (br *Reader) fill(need int) error { | ||||
| 	n := need - br.buf.Len() | ||||
| 	if n <= 0 { | ||||
| 		return nil | ||||
| 	} | ||||
| 
 | ||||
| 	n = maxInt(4096, n) | ||||
| 	n = imath.Max(4096, n) | ||||
| 
 | ||||
| 	if _, err := br.buf.ReadFrom(io.LimitReader(br.r, int64(n))); err != nil { | ||||
| 		return err | ||||
							
								
								
									
										607
									
								
								config.go
									
									
									
									
									
								
							
							
						
						
									
										607
									
								
								config.go
									
									
									
									
									
								
							| @@ -1,607 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"bufio" | ||||
| 	"encoding/hex" | ||||
| 	"flag" | ||||
| 	"fmt" | ||||
| 	"math" | ||||
| 	"os" | ||||
| 	"runtime" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| func intEnvConfig(i *int, name string) { | ||||
| 	if env, err := strconv.Atoi(os.Getenv(name)); err == nil { | ||||
| 		*i = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func floatEnvConfig(i *float64, name string) { | ||||
| 	if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil { | ||||
| 		*i = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func megaIntEnvConfig(f *int, name string) { | ||||
| 	if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil { | ||||
| 		*f = int(env * 1000000) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func strEnvConfig(s *string, name string) { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		*s = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func strSliceEnvConfig(s *[]string, name string) { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		for i, p := range parts { | ||||
| 			parts[i] = strings.TrimSpace(p) | ||||
| 		} | ||||
|  | ||||
| 		*s = parts | ||||
|  | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	*s = []string{} | ||||
| } | ||||
|  | ||||
| func boolEnvConfig(b *bool, name string) { | ||||
| 	if env, err := strconv.ParseBool(os.Getenv(name)); err == nil { | ||||
| 		*b = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func imageTypesEnvConfig(it *[]imageType, name string) { | ||||
| 	*it = []imageType{} | ||||
|  | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		for _, p := range parts { | ||||
| 			pt := strings.TrimSpace(p) | ||||
| 			if t, ok := imageTypes[pt]; ok { | ||||
| 				*it = append(*it, t) | ||||
| 			} else { | ||||
| 				logWarning("Unknown image format to skip: %s", pt) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func formatQualityEnvConfig(m map[imageType]int, name string) { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		for _, p := range parts { | ||||
| 			i := strings.Index(p, "=") | ||||
| 			if i < 0 { | ||||
| 				logWarning("Invalid format quality string: %s", p) | ||||
| 				continue | ||||
| 			} | ||||
|  | ||||
| 			imgtypeStr, qStr := strings.TrimSpace(p[:i]), strings.TrimSpace(p[i+1:]) | ||||
|  | ||||
| 			imgtype, ok := imageTypes[imgtypeStr] | ||||
| 			if !ok { | ||||
| 				logWarning("Invalid format: %s", p) | ||||
| 			} | ||||
|  | ||||
| 			q, err := strconv.Atoi(qStr) | ||||
| 			if err != nil || q <= 0 || q > 100 { | ||||
| 				logWarning("Invalid quality: %s", p) | ||||
| 			} | ||||
|  | ||||
| 			m[imgtype] = q | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func hexEnvConfig(b *[]securityKey, name string) error { | ||||
| 	var err error | ||||
|  | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		keys := make([]securityKey, len(parts)) | ||||
|  | ||||
| 		for i, part := range parts { | ||||
| 			if keys[i], err = hex.DecodeString(part); err != nil { | ||||
| 				return fmt.Errorf("%s expected to be hex-encoded strings. Invalid: %s\n", name, part) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		*b = keys | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func hexFileConfig(b *[]securityKey, filepath string) error { | ||||
| 	if len(filepath) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	f, err := os.Open(filepath) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't open file %s\n", filepath) | ||||
| 	} | ||||
|  | ||||
| 	keys := []securityKey{} | ||||
|  | ||||
| 	scanner := bufio.NewScanner(f) | ||||
| 	for scanner.Scan() { | ||||
| 		part := scanner.Text() | ||||
|  | ||||
| 		if len(part) == 0 { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		if key, err := hex.DecodeString(part); err == nil { | ||||
| 			keys = append(keys, key) | ||||
| 		} else { | ||||
| 			return fmt.Errorf("%s expected to contain hex-encoded strings. Invalid: %s\n", filepath, part) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := scanner.Err(); err != nil { | ||||
| 		return fmt.Errorf("Failed to read file %s: %s", filepath, err) | ||||
| 	} | ||||
|  | ||||
| 	*b = keys | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func presetEnvConfig(p presets, name string) error { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		presetStrings := strings.Split(env, ",") | ||||
|  | ||||
| 		for _, presetStr := range presetStrings { | ||||
| 			if err := parsePreset(p, presetStr); err != nil { | ||||
| 				return fmt.Errorf(err.Error()) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func presetFileConfig(p presets, filepath string) error { | ||||
| 	if len(filepath) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	f, err := os.Open(filepath) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't open file %s\n", filepath) | ||||
| 	} | ||||
|  | ||||
| 	scanner := bufio.NewScanner(f) | ||||
| 	for scanner.Scan() { | ||||
| 		if err := parsePreset(p, scanner.Text()); err != nil { | ||||
| 			return fmt.Errorf(err.Error()) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := scanner.Err(); err != nil { | ||||
| 		return fmt.Errorf("Failed to read presets file: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| type config struct { | ||||
| 	Network          string | ||||
| 	Bind             string | ||||
| 	ReadTimeout      int | ||||
| 	WriteTimeout     int | ||||
| 	KeepAliveTimeout int | ||||
| 	DownloadTimeout  int | ||||
| 	Concurrency      int | ||||
| 	MaxClients       int | ||||
|  | ||||
| 	TTL                     int | ||||
| 	CacheControlPassthrough bool | ||||
| 	SetCanonicalHeader      bool | ||||
|  | ||||
| 	SoReuseport bool | ||||
|  | ||||
| 	PathPrefix string | ||||
|  | ||||
| 	MaxSrcResolution   int | ||||
| 	MaxSrcFileSize     int | ||||
| 	MaxAnimationFrames int | ||||
| 	MaxSvgCheckBytes   int | ||||
|  | ||||
| 	JpegProgressive       bool | ||||
| 	PngInterlaced         bool | ||||
| 	PngQuantize           bool | ||||
| 	PngQuantizationColors int | ||||
| 	Quality               int | ||||
| 	FormatQuality         map[imageType]int | ||||
| 	StripMetadata         bool | ||||
| 	StripColorProfile     bool | ||||
| 	AutoRotate            bool | ||||
|  | ||||
| 	EnableWebpDetection bool | ||||
| 	EnforceWebp         bool | ||||
| 	EnableAvifDetection bool | ||||
| 	EnforceAvif         bool | ||||
| 	EnableClientHints   bool | ||||
|  | ||||
| 	SkipProcessingFormats []imageType | ||||
|  | ||||
| 	UseLinearColorspace bool | ||||
| 	DisableShrinkOnLoad bool | ||||
|  | ||||
| 	Keys          []securityKey | ||||
| 	Salts         []securityKey | ||||
| 	AllowInsecure bool | ||||
| 	SignatureSize int | ||||
|  | ||||
| 	Secret string | ||||
|  | ||||
| 	AllowOrigin string | ||||
|  | ||||
| 	UserAgent string | ||||
|  | ||||
| 	IgnoreSslVerification bool | ||||
| 	DevelopmentErrorsMode bool | ||||
|  | ||||
| 	AllowedSources      []string | ||||
| 	LocalFileSystemRoot string | ||||
| 	S3Enabled           bool | ||||
| 	S3Region            string | ||||
| 	S3Endpoint          string | ||||
| 	GCSEnabled          bool | ||||
| 	GCSKey              string | ||||
| 	ABSEnabled          bool | ||||
| 	ABSName             string | ||||
| 	ABSKey              string | ||||
| 	ABSEndpoint         string | ||||
|  | ||||
| 	ETagEnabled bool | ||||
|  | ||||
| 	BaseURL string | ||||
|  | ||||
| 	Presets     presets | ||||
| 	OnlyPresets bool | ||||
|  | ||||
| 	WatermarkData    string | ||||
| 	WatermarkPath    string | ||||
| 	WatermarkURL     string | ||||
| 	WatermarkOpacity float64 | ||||
|  | ||||
| 	FallbackImageData     string | ||||
| 	FallbackImagePath     string | ||||
| 	FallbackImageURL      string | ||||
| 	FallbackImageHTTPCode int | ||||
|  | ||||
| 	DataDogEnable bool | ||||
|  | ||||
| 	NewRelicAppName string | ||||
| 	NewRelicKey     string | ||||
|  | ||||
| 	PrometheusBind      string | ||||
| 	PrometheusNamespace string | ||||
|  | ||||
| 	BugsnagKey        string | ||||
| 	BugsnagStage      string | ||||
| 	HoneybadgerKey    string | ||||
| 	HoneybadgerEnv    string | ||||
| 	SentryDSN         string | ||||
| 	SentryEnvironment string | ||||
| 	SentryRelease     string | ||||
|  | ||||
| 	ReportDownloadingErrors bool | ||||
|  | ||||
| 	EnableDebugHeaders bool | ||||
|  | ||||
| 	FreeMemoryInterval             int | ||||
| 	DownloadBufferSize             int | ||||
| 	BufferPoolCalibrationThreshold int | ||||
| } | ||||
|  | ||||
| var conf = config{ | ||||
| 	Network:                        "tcp", | ||||
| 	Bind:                           ":8080", | ||||
| 	ReadTimeout:                    10, | ||||
| 	WriteTimeout:                   10, | ||||
| 	KeepAliveTimeout:               10, | ||||
| 	DownloadTimeout:                5, | ||||
| 	Concurrency:                    runtime.NumCPU() * 2, | ||||
| 	TTL:                            3600, | ||||
| 	MaxSrcResolution:               16800000, | ||||
| 	MaxAnimationFrames:             1, | ||||
| 	MaxSvgCheckBytes:               32 * 1024, | ||||
| 	SignatureSize:                  32, | ||||
| 	PngQuantizationColors:          256, | ||||
| 	Quality:                        80, | ||||
| 	FormatQuality:                  map[imageType]int{imageTypeAVIF: 50}, | ||||
| 	StripMetadata:                  true, | ||||
| 	StripColorProfile:              true, | ||||
| 	AutoRotate:                     true, | ||||
| 	UserAgent:                      fmt.Sprintf("imgproxy/%s", version), | ||||
| 	Presets:                        make(presets), | ||||
| 	WatermarkOpacity:               1, | ||||
| 	FallbackImageHTTPCode:          200, | ||||
| 	BugsnagStage:                   "production", | ||||
| 	HoneybadgerEnv:                 "production", | ||||
| 	SentryEnvironment:              "production", | ||||
| 	SentryRelease:                  fmt.Sprintf("imgproxy/%s", version), | ||||
| 	ReportDownloadingErrors:        true, | ||||
| 	FreeMemoryInterval:             10, | ||||
| 	BufferPoolCalibrationThreshold: 1024, | ||||
| } | ||||
|  | ||||
| func configure() error { | ||||
| 	keyPath := flag.String("keypath", "", "path of the file with hex-encoded key") | ||||
| 	saltPath := flag.String("saltpath", "", "path of the file with hex-encoded salt") | ||||
| 	presetsPath := flag.String("presets", "", "path of the file with presets") | ||||
| 	flag.Parse() | ||||
|  | ||||
| 	if port := os.Getenv("PORT"); len(port) > 0 { | ||||
| 		conf.Bind = fmt.Sprintf(":%s", port) | ||||
| 	} | ||||
|  | ||||
| 	strEnvConfig(&conf.Network, "IMGPROXY_NETWORK") | ||||
| 	strEnvConfig(&conf.Bind, "IMGPROXY_BIND") | ||||
| 	intEnvConfig(&conf.ReadTimeout, "IMGPROXY_READ_TIMEOUT") | ||||
| 	intEnvConfig(&conf.WriteTimeout, "IMGPROXY_WRITE_TIMEOUT") | ||||
| 	intEnvConfig(&conf.KeepAliveTimeout, "IMGPROXY_KEEP_ALIVE_TIMEOUT") | ||||
| 	intEnvConfig(&conf.DownloadTimeout, "IMGPROXY_DOWNLOAD_TIMEOUT") | ||||
| 	intEnvConfig(&conf.Concurrency, "IMGPROXY_CONCURRENCY") | ||||
| 	intEnvConfig(&conf.MaxClients, "IMGPROXY_MAX_CLIENTS") | ||||
|  | ||||
| 	intEnvConfig(&conf.TTL, "IMGPROXY_TTL") | ||||
| 	boolEnvConfig(&conf.CacheControlPassthrough, "IMGPROXY_CACHE_CONTROL_PASSTHROUGH") | ||||
| 	boolEnvConfig(&conf.SetCanonicalHeader, "IMGPROXY_SET_CANONICAL_HEADER") | ||||
|  | ||||
| 	boolEnvConfig(&conf.SoReuseport, "IMGPROXY_SO_REUSEPORT") | ||||
|  | ||||
| 	strEnvConfig(&conf.PathPrefix, "IMGPROXY_PATH_PREFIX") | ||||
|  | ||||
| 	megaIntEnvConfig(&conf.MaxSrcResolution, "IMGPROXY_MAX_SRC_RESOLUTION") | ||||
| 	intEnvConfig(&conf.MaxSrcFileSize, "IMGPROXY_MAX_SRC_FILE_SIZE") | ||||
| 	intEnvConfig(&conf.MaxSvgCheckBytes, "IMGPROXY_MAX_SVG_CHECK_BYTES") | ||||
|  | ||||
| 	intEnvConfig(&conf.MaxAnimationFrames, "IMGPROXY_MAX_ANIMATION_FRAMES") | ||||
|  | ||||
| 	strSliceEnvConfig(&conf.AllowedSources, "IMGPROXY_ALLOWED_SOURCES") | ||||
|  | ||||
| 	boolEnvConfig(&conf.JpegProgressive, "IMGPROXY_JPEG_PROGRESSIVE") | ||||
| 	boolEnvConfig(&conf.PngInterlaced, "IMGPROXY_PNG_INTERLACED") | ||||
| 	boolEnvConfig(&conf.PngQuantize, "IMGPROXY_PNG_QUANTIZE") | ||||
| 	intEnvConfig(&conf.PngQuantizationColors, "IMGPROXY_PNG_QUANTIZATION_COLORS") | ||||
| 	intEnvConfig(&conf.Quality, "IMGPROXY_QUALITY") | ||||
| 	formatQualityEnvConfig(conf.FormatQuality, "IMGPROXY_FORMAT_QUALITY") | ||||
| 	boolEnvConfig(&conf.StripMetadata, "IMGPROXY_STRIP_METADATA") | ||||
| 	boolEnvConfig(&conf.StripColorProfile, "IMGPROXY_STRIP_COLOR_PROFILE") | ||||
| 	boolEnvConfig(&conf.AutoRotate, "IMGPROXY_AUTO_ROTATE") | ||||
|  | ||||
| 	boolEnvConfig(&conf.EnableWebpDetection, "IMGPROXY_ENABLE_WEBP_DETECTION") | ||||
| 	boolEnvConfig(&conf.EnforceWebp, "IMGPROXY_ENFORCE_WEBP") | ||||
| 	boolEnvConfig(&conf.EnableAvifDetection, "IMGPROXY_ENABLE_AVIF_DETECTION") | ||||
| 	boolEnvConfig(&conf.EnforceAvif, "IMGPROXY_ENFORCE_AVIF") | ||||
| 	boolEnvConfig(&conf.EnableClientHints, "IMGPROXY_ENABLE_CLIENT_HINTS") | ||||
|  | ||||
| 	imageTypesEnvConfig(&conf.SkipProcessingFormats, "IMGPROXY_SKIP_PROCESSING_FORMATS") | ||||
|  | ||||
| 	boolEnvConfig(&conf.UseLinearColorspace, "IMGPROXY_USE_LINEAR_COLORSPACE") | ||||
| 	boolEnvConfig(&conf.DisableShrinkOnLoad, "IMGPROXY_DISABLE_SHRINK_ON_LOAD") | ||||
|  | ||||
| 	if err := hexEnvConfig(&conf.Keys, "IMGPROXY_KEY"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	if err := hexEnvConfig(&conf.Salts, "IMGPROXY_SALT"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	intEnvConfig(&conf.SignatureSize, "IMGPROXY_SIGNATURE_SIZE") | ||||
|  | ||||
| 	if err := hexFileConfig(&conf.Keys, *keyPath); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	if err := hexFileConfig(&conf.Salts, *saltPath); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	strEnvConfig(&conf.Secret, "IMGPROXY_SECRET") | ||||
|  | ||||
| 	strEnvConfig(&conf.AllowOrigin, "IMGPROXY_ALLOW_ORIGIN") | ||||
|  | ||||
| 	strEnvConfig(&conf.UserAgent, "IMGPROXY_USER_AGENT") | ||||
|  | ||||
| 	boolEnvConfig(&conf.IgnoreSslVerification, "IMGPROXY_IGNORE_SSL_VERIFICATION") | ||||
| 	boolEnvConfig(&conf.DevelopmentErrorsMode, "IMGPROXY_DEVELOPMENT_ERRORS_MODE") | ||||
|  | ||||
| 	strEnvConfig(&conf.LocalFileSystemRoot, "IMGPROXY_LOCAL_FILESYSTEM_ROOT") | ||||
|  | ||||
| 	boolEnvConfig(&conf.S3Enabled, "IMGPROXY_USE_S3") | ||||
| 	strEnvConfig(&conf.S3Region, "IMGPROXY_S3_REGION") | ||||
| 	strEnvConfig(&conf.S3Endpoint, "IMGPROXY_S3_ENDPOINT") | ||||
|  | ||||
| 	boolEnvConfig(&conf.GCSEnabled, "IMGPROXY_USE_GCS") | ||||
| 	strEnvConfig(&conf.GCSKey, "IMGPROXY_GCS_KEY") | ||||
|  | ||||
| 	boolEnvConfig(&conf.ABSEnabled, "IMGPROXY_USE_ABS") | ||||
| 	strEnvConfig(&conf.ABSName, "IMGPROXY_ABS_NAME") | ||||
| 	strEnvConfig(&conf.ABSKey, "IMGPROXY_ABS_KEY") | ||||
| 	strEnvConfig(&conf.ABSEndpoint, "IMGPROXY_ABS_ENDPOINT") | ||||
|  | ||||
| 	boolEnvConfig(&conf.ETagEnabled, "IMGPROXY_USE_ETAG") | ||||
|  | ||||
| 	strEnvConfig(&conf.BaseURL, "IMGPROXY_BASE_URL") | ||||
|  | ||||
| 	if err := presetEnvConfig(conf.Presets, "IMGPROXY_PRESETS"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	if err := presetFileConfig(conf.Presets, *presetsPath); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	boolEnvConfig(&conf.OnlyPresets, "IMGPROXY_ONLY_PRESETS") | ||||
|  | ||||
| 	strEnvConfig(&conf.WatermarkData, "IMGPROXY_WATERMARK_DATA") | ||||
| 	strEnvConfig(&conf.WatermarkPath, "IMGPROXY_WATERMARK_PATH") | ||||
| 	strEnvConfig(&conf.WatermarkURL, "IMGPROXY_WATERMARK_URL") | ||||
| 	floatEnvConfig(&conf.WatermarkOpacity, "IMGPROXY_WATERMARK_OPACITY") | ||||
|  | ||||
| 	strEnvConfig(&conf.FallbackImageData, "IMGPROXY_FALLBACK_IMAGE_DATA") | ||||
| 	strEnvConfig(&conf.FallbackImagePath, "IMGPROXY_FALLBACK_IMAGE_PATH") | ||||
| 	strEnvConfig(&conf.FallbackImageURL, "IMGPROXY_FALLBACK_IMAGE_URL") | ||||
| 	intEnvConfig(&conf.FallbackImageHTTPCode, "IMGPROXY_FALLBACK_IMAGE_HTTP_CODE") | ||||
|  | ||||
| 	boolEnvConfig(&conf.DataDogEnable, "IMGPROXY_DATADOG_ENABLE") | ||||
|  | ||||
| 	strEnvConfig(&conf.NewRelicAppName, "IMGPROXY_NEW_RELIC_APP_NAME") | ||||
| 	strEnvConfig(&conf.NewRelicKey, "IMGPROXY_NEW_RELIC_KEY") | ||||
|  | ||||
| 	strEnvConfig(&conf.PrometheusBind, "IMGPROXY_PROMETHEUS_BIND") | ||||
| 	strEnvConfig(&conf.PrometheusNamespace, "IMGPROXY_PROMETHEUS_NAMESPACE") | ||||
|  | ||||
| 	strEnvConfig(&conf.BugsnagKey, "IMGPROXY_BUGSNAG_KEY") | ||||
| 	strEnvConfig(&conf.BugsnagStage, "IMGPROXY_BUGSNAG_STAGE") | ||||
| 	strEnvConfig(&conf.HoneybadgerKey, "IMGPROXY_HONEYBADGER_KEY") | ||||
| 	strEnvConfig(&conf.HoneybadgerEnv, "IMGPROXY_HONEYBADGER_ENV") | ||||
| 	strEnvConfig(&conf.SentryDSN, "IMGPROXY_SENTRY_DSN") | ||||
| 	strEnvConfig(&conf.SentryEnvironment, "IMGPROXY_SENTRY_ENVIRONMENT") | ||||
| 	strEnvConfig(&conf.SentryRelease, "IMGPROXY_SENTRY_RELEASE") | ||||
| 	boolEnvConfig(&conf.ReportDownloadingErrors, "IMGPROXY_REPORT_DOWNLOADING_ERRORS") | ||||
| 	boolEnvConfig(&conf.EnableDebugHeaders, "IMGPROXY_ENABLE_DEBUG_HEADERS") | ||||
|  | ||||
| 	intEnvConfig(&conf.FreeMemoryInterval, "IMGPROXY_FREE_MEMORY_INTERVAL") | ||||
| 	intEnvConfig(&conf.DownloadBufferSize, "IMGPROXY_DOWNLOAD_BUFFER_SIZE") | ||||
| 	intEnvConfig(&conf.BufferPoolCalibrationThreshold, "IMGPROXY_BUFFER_POOL_CALIBRATION_THRESHOLD") | ||||
|  | ||||
| 	if len(conf.Keys) != len(conf.Salts) { | ||||
| 		return fmt.Errorf("Number of keys and number of salts should be equal. Keys: %d, salts: %d", len(conf.Keys), len(conf.Salts)) | ||||
| 	} | ||||
| 	if len(conf.Keys) == 0 { | ||||
| 		logWarning("No keys defined, so signature checking is disabled") | ||||
| 		conf.AllowInsecure = true | ||||
| 	} | ||||
| 	if len(conf.Salts) == 0 { | ||||
| 		logWarning("No salts defined, so signature checking is disabled") | ||||
| 		conf.AllowInsecure = true | ||||
| 	} | ||||
|  | ||||
| 	if conf.SignatureSize < 1 || conf.SignatureSize > 32 { | ||||
| 		return fmt.Errorf("Signature size should be within 1 and 32, now - %d\n", conf.SignatureSize) | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.Bind) == 0 { | ||||
| 		return fmt.Errorf("Bind address is not defined") | ||||
| 	} | ||||
|  | ||||
| 	if conf.ReadTimeout <= 0 { | ||||
| 		return fmt.Errorf("Read timeout should be greater than 0, now - %d\n", conf.ReadTimeout) | ||||
| 	} | ||||
|  | ||||
| 	if conf.WriteTimeout <= 0 { | ||||
| 		return fmt.Errorf("Write timeout should be greater than 0, now - %d\n", conf.WriteTimeout) | ||||
| 	} | ||||
| 	if conf.KeepAliveTimeout < 0 { | ||||
| 		return fmt.Errorf("KeepAlive timeout should be greater than or equal to 0, now - %d\n", conf.KeepAliveTimeout) | ||||
| 	} | ||||
|  | ||||
| 	if conf.DownloadTimeout <= 0 { | ||||
| 		return fmt.Errorf("Download timeout should be greater than 0, now - %d\n", conf.DownloadTimeout) | ||||
| 	} | ||||
|  | ||||
| 	if conf.Concurrency <= 0 { | ||||
| 		return fmt.Errorf("Concurrency should be greater than 0, now - %d\n", conf.Concurrency) | ||||
| 	} | ||||
|  | ||||
| 	if conf.MaxClients <= 0 { | ||||
| 		conf.MaxClients = conf.Concurrency * 10 | ||||
| 	} | ||||
|  | ||||
| 	if conf.TTL <= 0 { | ||||
| 		return fmt.Errorf("TTL should be greater than 0, now - %d\n", conf.TTL) | ||||
| 	} | ||||
|  | ||||
| 	if conf.MaxSrcResolution <= 0 { | ||||
| 		return fmt.Errorf("Max src resolution should be greater than 0, now - %d\n", conf.MaxSrcResolution) | ||||
| 	} | ||||
|  | ||||
| 	if conf.MaxSrcFileSize < 0 { | ||||
| 		return fmt.Errorf("Max src file size should be greater than or equal to 0, now - %d\n", conf.MaxSrcFileSize) | ||||
| 	} | ||||
|  | ||||
| 	if conf.MaxAnimationFrames <= 0 { | ||||
| 		return fmt.Errorf("Max animation frames should be greater than 0, now - %d\n", conf.MaxAnimationFrames) | ||||
| 	} | ||||
|  | ||||
| 	if conf.PngQuantizationColors < 2 { | ||||
| 		return fmt.Errorf("Png quantization colors should be greater than 1, now - %d\n", conf.PngQuantizationColors) | ||||
| 	} else if conf.PngQuantizationColors > 256 { | ||||
| 		return fmt.Errorf("Png quantization colors can't be greater than 256, now - %d\n", conf.PngQuantizationColors) | ||||
| 	} | ||||
|  | ||||
| 	if conf.Quality <= 0 { | ||||
| 		return fmt.Errorf("Quality should be greater than 0, now - %d\n", conf.Quality) | ||||
| 	} else if conf.Quality > 100 { | ||||
| 		return fmt.Errorf("Quality can't be greater than 100, now - %d\n", conf.Quality) | ||||
| 	} | ||||
|  | ||||
| 	if conf.IgnoreSslVerification { | ||||
| 		logWarning("Ignoring SSL verification is very unsafe") | ||||
| 	} | ||||
|  | ||||
| 	if conf.LocalFileSystemRoot != "" { | ||||
| 		stat, err := os.Stat(conf.LocalFileSystemRoot) | ||||
|  | ||||
| 		if err != nil { | ||||
| 			return fmt.Errorf("Cannot use local directory: %s", err) | ||||
| 		} | ||||
|  | ||||
| 		if !stat.IsDir() { | ||||
| 			return fmt.Errorf("Cannot use local directory: not a directory") | ||||
| 		} | ||||
|  | ||||
| 		if conf.LocalFileSystemRoot == "/" { | ||||
| 			logWarning("Exposing root via IMGPROXY_LOCAL_FILESYSTEM_ROOT is unsafe") | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if _, ok := os.LookupEnv("IMGPROXY_USE_GCS"); !ok && len(conf.GCSKey) > 0 { | ||||
| 		logWarning("Set IMGPROXY_USE_GCS to true since it may be required by future versions to enable GCS support") | ||||
| 		conf.GCSEnabled = true | ||||
| 	} | ||||
|  | ||||
| 	if conf.WatermarkOpacity <= 0 { | ||||
| 		return fmt.Errorf("Watermark opacity should be greater than 0") | ||||
| 	} else if conf.WatermarkOpacity > 1 { | ||||
| 		return fmt.Errorf("Watermark opacity should be less than or equal to 1") | ||||
| 	} | ||||
|  | ||||
| 	if conf.FallbackImageHTTPCode < 100 || conf.FallbackImageHTTPCode > 599 { | ||||
| 		return fmt.Errorf("Fallback image HTTP code should be between 100 and 599") | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.PrometheusBind) > 0 && conf.PrometheusBind == conf.Bind { | ||||
| 		return fmt.Errorf("Can't use the same binding for the main server and Prometheus") | ||||
| 	} | ||||
|  | ||||
| 	if conf.FreeMemoryInterval <= 0 { | ||||
| 		return fmt.Errorf("Free memory interval should be greater than zero") | ||||
| 	} | ||||
|  | ||||
| 	if conf.DownloadBufferSize < 0 { | ||||
| 		return fmt.Errorf("Download buffer size should be greater than or equal to 0") | ||||
| 	} else if conf.DownloadBufferSize > math.MaxInt32 { | ||||
| 		return fmt.Errorf("Download buffer size can't be greater than %d", math.MaxInt32) | ||||
| 	} | ||||
|  | ||||
| 	if conf.BufferPoolCalibrationThreshold < 64 { | ||||
| 		return fmt.Errorf("Buffer pool calibration threshold should be greater than or equal to 64") | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										510
									
								
								config/config.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										510
									
								
								config/config.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,510 @@ | ||||
| package config | ||||
|  | ||||
| import ( | ||||
| 	"flag" | ||||
| 	"fmt" | ||||
| 	"math" | ||||
| 	"os" | ||||
| 	"runtime" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config/configurators" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/version" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	Network          string | ||||
| 	Bind             string | ||||
| 	ReadTimeout      int | ||||
| 	WriteTimeout     int | ||||
| 	KeepAliveTimeout int | ||||
| 	DownloadTimeout  int | ||||
| 	Concurrency      int | ||||
| 	MaxClients       int | ||||
|  | ||||
| 	TTL                     int | ||||
| 	CacheControlPassthrough bool | ||||
| 	SetCanonicalHeader      bool | ||||
|  | ||||
| 	SoReuseport bool | ||||
|  | ||||
| 	PathPrefix string | ||||
|  | ||||
| 	MaxSrcResolution   int | ||||
| 	MaxSrcFileSize     int | ||||
| 	MaxAnimationFrames int | ||||
| 	MaxSvgCheckBytes   int | ||||
|  | ||||
| 	JpegProgressive       bool | ||||
| 	PngInterlaced         bool | ||||
| 	PngQuantize           bool | ||||
| 	PngQuantizationColors int | ||||
| 	Quality               int | ||||
| 	FormatQuality         map[imagetype.Type]int | ||||
| 	StripMetadata         bool | ||||
| 	StripColorProfile     bool | ||||
| 	AutoRotate            bool | ||||
|  | ||||
| 	EnableWebpDetection bool | ||||
| 	EnforceWebp         bool | ||||
| 	EnableAvifDetection bool | ||||
| 	EnforceAvif         bool | ||||
| 	EnableClientHints   bool | ||||
|  | ||||
| 	SkipProcessingFormats []imagetype.Type | ||||
|  | ||||
| 	UseLinearColorspace bool | ||||
| 	DisableShrinkOnLoad bool | ||||
|  | ||||
| 	Keys          [][]byte | ||||
| 	Salts         [][]byte | ||||
| 	SignatureSize int | ||||
|  | ||||
| 	Secret string | ||||
|  | ||||
| 	AllowOrigin string | ||||
|  | ||||
| 	UserAgent string | ||||
|  | ||||
| 	IgnoreSslVerification bool | ||||
| 	DevelopmentErrorsMode bool | ||||
|  | ||||
| 	AllowedSources      []string | ||||
| 	LocalFileSystemRoot string | ||||
| 	S3Enabled           bool | ||||
| 	S3Region            string | ||||
| 	S3Endpoint          string | ||||
| 	GCSEnabled          bool | ||||
| 	GCSKey              string | ||||
| 	ABSEnabled          bool | ||||
| 	ABSName             string | ||||
| 	ABSKey              string | ||||
| 	ABSEndpoint         string | ||||
|  | ||||
| 	ETagEnabled bool | ||||
|  | ||||
| 	BaseURL string | ||||
|  | ||||
| 	Presets     []string | ||||
| 	OnlyPresets bool | ||||
|  | ||||
| 	WatermarkData    string | ||||
| 	WatermarkPath    string | ||||
| 	WatermarkURL     string | ||||
| 	WatermarkOpacity float64 | ||||
|  | ||||
| 	FallbackImageData     string | ||||
| 	FallbackImagePath     string | ||||
| 	FallbackImageURL      string | ||||
| 	FallbackImageHTTPCode int | ||||
|  | ||||
| 	DataDogEnable bool | ||||
|  | ||||
| 	NewRelicAppName string | ||||
| 	NewRelicKey     string | ||||
|  | ||||
| 	PrometheusBind      string | ||||
| 	PrometheusNamespace string | ||||
|  | ||||
| 	BugsnagKey   string | ||||
| 	BugsnagStage string | ||||
|  | ||||
| 	HoneybadgerKey string | ||||
| 	HoneybadgerEnv string | ||||
|  | ||||
| 	SentryDSN         string | ||||
| 	SentryEnvironment string | ||||
| 	SentryRelease     string | ||||
|  | ||||
| 	ReportDownloadingErrors bool | ||||
|  | ||||
| 	EnableDebugHeaders bool | ||||
|  | ||||
| 	FreeMemoryInterval             int | ||||
| 	DownloadBufferSize             int | ||||
| 	BufferPoolCalibrationThreshold int | ||||
| ) | ||||
|  | ||||
| func init() { | ||||
| 	Reset() | ||||
| } | ||||
|  | ||||
| func Reset() { | ||||
| 	Network = "tcp" | ||||
| 	Bind = ":8080" | ||||
| 	ReadTimeout = 10 | ||||
| 	WriteTimeout = 10 | ||||
| 	KeepAliveTimeout = 10 | ||||
| 	DownloadTimeout = 5 | ||||
| 	Concurrency = runtime.NumCPU() * 2 | ||||
| 	MaxClients = 0 | ||||
|  | ||||
| 	TTL = 3600 | ||||
| 	CacheControlPassthrough = false | ||||
| 	SetCanonicalHeader = false | ||||
|  | ||||
| 	SoReuseport = false | ||||
|  | ||||
| 	PathPrefix = "" | ||||
|  | ||||
| 	MaxSrcResolution = 16800000 | ||||
| 	MaxSrcFileSize = 0 | ||||
| 	MaxAnimationFrames = 1 | ||||
| 	MaxSvgCheckBytes = 32 * 1024 | ||||
|  | ||||
| 	JpegProgressive = false | ||||
| 	PngInterlaced = false | ||||
| 	PngQuantize = false | ||||
| 	PngQuantizationColors = 256 | ||||
| 	Quality = 80 | ||||
| 	FormatQuality = map[imagetype.Type]int{imagetype.AVIF: 50} | ||||
| 	StripMetadata = true | ||||
| 	StripColorProfile = true | ||||
| 	AutoRotate = true | ||||
|  | ||||
| 	EnableWebpDetection = false | ||||
| 	EnforceWebp = false | ||||
| 	EnableAvifDetection = false | ||||
| 	EnforceAvif = false | ||||
| 	EnableClientHints = false | ||||
|  | ||||
| 	SkipProcessingFormats = make([]imagetype.Type, 0) | ||||
|  | ||||
| 	UseLinearColorspace = false | ||||
| 	DisableShrinkOnLoad = false | ||||
|  | ||||
| 	Keys = make([][]byte, 0) | ||||
| 	Salts = make([][]byte, 0) | ||||
| 	SignatureSize = 32 | ||||
|  | ||||
| 	Secret = "" | ||||
|  | ||||
| 	AllowOrigin = "" | ||||
|  | ||||
| 	UserAgent = fmt.Sprintf("imgproxy/%s", version.Version()) | ||||
|  | ||||
| 	IgnoreSslVerification = false | ||||
| 	DevelopmentErrorsMode = false | ||||
|  | ||||
| 	AllowedSources = make([]string, 0) | ||||
| 	LocalFileSystemRoot = "" | ||||
| 	S3Enabled = false | ||||
| 	S3Region = "" | ||||
| 	S3Endpoint = "" | ||||
| 	GCSEnabled = false | ||||
| 	GCSKey = "" | ||||
| 	ABSEnabled = false | ||||
| 	ABSName = "" | ||||
| 	ABSKey = "" | ||||
| 	ABSEndpoint = "" | ||||
|  | ||||
| 	ETagEnabled = false | ||||
|  | ||||
| 	BaseURL = "" | ||||
|  | ||||
| 	Presets = make([]string, 0) | ||||
| 	OnlyPresets = false | ||||
|  | ||||
| 	WatermarkData = "" | ||||
| 	WatermarkPath = "" | ||||
| 	WatermarkURL = "" | ||||
| 	WatermarkOpacity = 1 | ||||
|  | ||||
| 	FallbackImageData = "" | ||||
| 	FallbackImagePath = "" | ||||
| 	FallbackImageURL = "" | ||||
| 	FallbackImageHTTPCode = 200 | ||||
|  | ||||
| 	DataDogEnable = false | ||||
|  | ||||
| 	NewRelicAppName = "" | ||||
| 	NewRelicKey = "" | ||||
|  | ||||
| 	PrometheusBind = "" | ||||
| 	PrometheusNamespace = "" | ||||
|  | ||||
| 	BugsnagKey = "" | ||||
| 	BugsnagStage = "production" | ||||
|  | ||||
| 	HoneybadgerKey = "" | ||||
| 	HoneybadgerEnv = "production" | ||||
|  | ||||
| 	SentryDSN = "" | ||||
| 	SentryEnvironment = "production" | ||||
| 	SentryRelease = fmt.Sprintf("imgproxy/%s", version.Version()) | ||||
|  | ||||
| 	ReportDownloadingErrors = true | ||||
|  | ||||
| 	EnableDebugHeaders = false | ||||
|  | ||||
| 	FreeMemoryInterval = 10 | ||||
| 	DownloadBufferSize = 0 | ||||
| 	BufferPoolCalibrationThreshold = 1024 | ||||
| } | ||||
|  | ||||
| func Configure() error { | ||||
| 	keyPath := flag.String("keypath", "", "path of the file with hex-encoded key") | ||||
| 	saltPath := flag.String("saltpath", "", "path of the file with hex-encoded salt") | ||||
| 	presetsPath := flag.String("presets", "", "path of the file with presets") | ||||
| 	flag.Parse() | ||||
|  | ||||
| 	if port := os.Getenv("PORT"); len(port) > 0 { | ||||
| 		Bind = fmt.Sprintf(":%s", port) | ||||
| 	} | ||||
|  | ||||
| 	configurators.String(&Network, "IMGPROXY_NETWORK") | ||||
| 	configurators.String(&Bind, "IMGPROXY_BIND") | ||||
| 	configurators.Int(&ReadTimeout, "IMGPROXY_READ_TIMEOUT") | ||||
| 	configurators.Int(&WriteTimeout, "IMGPROXY_WRITE_TIMEOUT") | ||||
| 	configurators.Int(&KeepAliveTimeout, "IMGPROXY_KEEP_ALIVE_TIMEOUT") | ||||
| 	configurators.Int(&DownloadTimeout, "IMGPROXY_DOWNLOAD_TIMEOUT") | ||||
| 	configurators.Int(&Concurrency, "IMGPROXY_CONCURRENCY") | ||||
| 	configurators.Int(&MaxClients, "IMGPROXY_MAX_CLIENTS") | ||||
|  | ||||
| 	configurators.Int(&TTL, "IMGPROXY_TTL") | ||||
| 	configurators.Bool(&CacheControlPassthrough, "IMGPROXY_CACHE_CONTROL_PASSTHROUGH") | ||||
| 	configurators.Bool(&SetCanonicalHeader, "IMGPROXY_SET_CANONICAL_HEADER") | ||||
|  | ||||
| 	configurators.Bool(&SoReuseport, "IMGPROXY_SO_REUSEPORT") | ||||
|  | ||||
| 	configurators.String(&PathPrefix, "IMGPROXY_PATH_PREFIX") | ||||
|  | ||||
| 	configurators.MegaInt(&MaxSrcResolution, "IMGPROXY_MAX_SRC_RESOLUTION") | ||||
| 	configurators.Int(&MaxSrcFileSize, "IMGPROXY_MAX_SRC_FILE_SIZE") | ||||
| 	configurators.Int(&MaxSvgCheckBytes, "IMGPROXY_MAX_SVG_CHECK_BYTES") | ||||
|  | ||||
| 	configurators.Int(&MaxAnimationFrames, "IMGPROXY_MAX_ANIMATION_FRAMES") | ||||
|  | ||||
| 	configurators.StringSlice(&AllowedSources, "IMGPROXY_ALLOWED_SOURCES") | ||||
|  | ||||
| 	configurators.Bool(&JpegProgressive, "IMGPROXY_JPEG_PROGRESSIVE") | ||||
| 	configurators.Bool(&PngInterlaced, "IMGPROXY_PNG_INTERLACED") | ||||
| 	configurators.Bool(&PngQuantize, "IMGPROXY_PNG_QUANTIZE") | ||||
| 	configurators.Int(&PngQuantizationColors, "IMGPROXY_PNG_QUANTIZATION_COLORS") | ||||
| 	configurators.Int(&Quality, "IMGPROXY_QUALITY") | ||||
| 	if err := configurators.ImageTypesQuality(FormatQuality, "IMGPROXY_FORMAT_QUALITY"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	configurators.Bool(&StripMetadata, "IMGPROXY_STRIP_METADATA") | ||||
| 	configurators.Bool(&StripColorProfile, "IMGPROXY_STRIP_COLOR_PROFILE") | ||||
| 	configurators.Bool(&AutoRotate, "IMGPROXY_AUTO_ROTATE") | ||||
|  | ||||
| 	configurators.Bool(&EnableWebpDetection, "IMGPROXY_ENABLE_WEBP_DETECTION") | ||||
| 	configurators.Bool(&EnforceWebp, "IMGPROXY_ENFORCE_WEBP") | ||||
| 	configurators.Bool(&EnableAvifDetection, "IMGPROXY_ENABLE_AVIF_DETECTION") | ||||
| 	configurators.Bool(&EnforceAvif, "IMGPROXY_ENFORCE_AVIF") | ||||
| 	configurators.Bool(&EnableClientHints, "IMGPROXY_ENABLE_CLIENT_HINTS") | ||||
|  | ||||
| 	if err := configurators.ImageTypes(&SkipProcessingFormats, "IMGPROXY_SKIP_PROCESSING_FORMATS"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	configurators.Bool(&UseLinearColorspace, "IMGPROXY_USE_LINEAR_COLORSPACE") | ||||
| 	configurators.Bool(&DisableShrinkOnLoad, "IMGPROXY_DISABLE_SHRINK_ON_LOAD") | ||||
|  | ||||
| 	if err := configurators.Hex(&Keys, "IMGPROXY_KEY"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	if err := configurators.Hex(&Salts, "IMGPROXY_SALT"); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	configurators.Int(&SignatureSize, "IMGPROXY_SIGNATURE_SIZE") | ||||
|  | ||||
| 	if err := configurators.HexFile(&Keys, *keyPath); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	if err := configurators.HexFile(&Salts, *saltPath); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	configurators.String(&Secret, "IMGPROXY_SECRET") | ||||
|  | ||||
| 	configurators.String(&AllowOrigin, "IMGPROXY_ALLOW_ORIGIN") | ||||
|  | ||||
| 	configurators.String(&UserAgent, "IMGPROXY_USER_AGENT") | ||||
|  | ||||
| 	configurators.Bool(&IgnoreSslVerification, "IMGPROXY_IGNORE_SSL_VERIFICATION") | ||||
| 	configurators.Bool(&DevelopmentErrorsMode, "IMGPROXY_DEVELOPMENT_ERRORS_MODE") | ||||
|  | ||||
| 	configurators.String(&LocalFileSystemRoot, "IMGPROXY_LOCAL_FILESYSTEM_ROOT") | ||||
|  | ||||
| 	configurators.Bool(&S3Enabled, "IMGPROXY_USE_S3") | ||||
| 	configurators.String(&S3Region, "IMGPROXY_S3_REGION") | ||||
| 	configurators.String(&S3Endpoint, "IMGPROXY_S3_ENDPOINT") | ||||
|  | ||||
| 	configurators.Bool(&GCSEnabled, "IMGPROXY_USE_GCS") | ||||
| 	configurators.String(&GCSKey, "IMGPROXY_GCS_KEY") | ||||
|  | ||||
| 	configurators.Bool(&ABSEnabled, "IMGPROXY_USE_ABS") | ||||
| 	configurators.String(&ABSName, "IMGPROXY_ABS_NAME") | ||||
| 	configurators.String(&ABSKey, "IMGPROXY_ABS_KEY") | ||||
| 	configurators.String(&ABSEndpoint, "IMGPROXY_ABS_ENDPOINT") | ||||
|  | ||||
| 	configurators.Bool(&ETagEnabled, "IMGPROXY_USE_ETAG") | ||||
|  | ||||
| 	configurators.String(&BaseURL, "IMGPROXY_BASE_URL") | ||||
|  | ||||
| 	configurators.StringSlice(&Presets, "IMGPROXY_PRESETS") | ||||
| 	if err := configurators.StringSliceFile(&Presets, *presetsPath); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	configurators.Bool(&OnlyPresets, "IMGPROXY_ONLY_PRESETS") | ||||
|  | ||||
| 	configurators.String(&WatermarkData, "IMGPROXY_WATERMARK_DATA") | ||||
| 	configurators.String(&WatermarkPath, "IMGPROXY_WATERMARK_PATH") | ||||
| 	configurators.String(&WatermarkURL, "IMGPROXY_WATERMARK_URL") | ||||
| 	configurators.Float(&WatermarkOpacity, "IMGPROXY_WATERMARK_OPACITY") | ||||
|  | ||||
| 	configurators.String(&FallbackImageData, "IMGPROXY_FALLBACK_IMAGE_DATA") | ||||
| 	configurators.String(&FallbackImagePath, "IMGPROXY_FALLBACK_IMAGE_PATH") | ||||
| 	configurators.String(&FallbackImageURL, "IMGPROXY_FALLBACK_IMAGE_URL") | ||||
| 	configurators.Int(&FallbackImageHTTPCode, "IMGPROXY_FALLBACK_IMAGE_HTTP_CODE") | ||||
|  | ||||
| 	configurators.Bool(&DataDogEnable, "IMGPROXY_DATADOG_ENABLE") | ||||
|  | ||||
| 	configurators.String(&NewRelicAppName, "IMGPROXY_NEW_RELIC_APP_NAME") | ||||
| 	configurators.String(&NewRelicKey, "IMGPROXY_NEW_RELIC_KEY") | ||||
|  | ||||
| 	configurators.String(&PrometheusBind, "IMGPROXY_PROMETHEUS_BIND") | ||||
| 	configurators.String(&PrometheusNamespace, "IMGPROXY_PROMETHEUS_NAMESPACE") | ||||
|  | ||||
| 	configurators.String(&BugsnagKey, "IMGPROXY_BUGSNAG_KEY") | ||||
| 	configurators.String(&BugsnagStage, "IMGPROXY_BUGSNAG_STAGE") | ||||
| 	configurators.String(&HoneybadgerKey, "IMGPROXY_HONEYBADGER_KEY") | ||||
| 	configurators.String(&HoneybadgerEnv, "IMGPROXY_HONEYBADGER_ENV") | ||||
| 	configurators.String(&SentryDSN, "IMGPROXY_SENTRY_DSN") | ||||
| 	configurators.String(&SentryEnvironment, "IMGPROXY_SENTRY_ENVIRONMENT") | ||||
| 	configurators.String(&SentryRelease, "IMGPROXY_SENTRY_RELEASE") | ||||
| 	configurators.Bool(&ReportDownloadingErrors, "IMGPROXY_REPORT_DOWNLOADING_ERRORS") | ||||
| 	configurators.Bool(&EnableDebugHeaders, "IMGPROXY_ENABLE_DEBUG_HEADERS") | ||||
|  | ||||
| 	configurators.Int(&FreeMemoryInterval, "IMGPROXY_FREE_MEMORY_INTERVAL") | ||||
| 	configurators.Int(&DownloadBufferSize, "IMGPROXY_DOWNLOAD_BUFFER_SIZE") | ||||
| 	configurators.Int(&BufferPoolCalibrationThreshold, "IMGPROXY_BUFFER_POOL_CALIBRATION_THRESHOLD") | ||||
|  | ||||
| 	if len(Keys) != len(Salts) { | ||||
| 		return fmt.Errorf("Number of keys and number of salts should be equal. Keys: %d, salts: %d", len(Keys), len(Salts)) | ||||
| 	} | ||||
| 	if len(Keys) == 0 { | ||||
| 		log.Warning("No keys defined, so signature checking is disabled") | ||||
| 	} | ||||
| 	if len(Salts) == 0 { | ||||
| 		log.Warning("No salts defined, so signature checking is disabled") | ||||
| 	} | ||||
|  | ||||
| 	if SignatureSize < 1 || SignatureSize > 32 { | ||||
| 		return fmt.Errorf("Signature size should be within 1 and 32, now - %d\n", SignatureSize) | ||||
| 	} | ||||
|  | ||||
| 	if len(Bind) == 0 { | ||||
| 		return fmt.Errorf("Bind address is not defined") | ||||
| 	} | ||||
|  | ||||
| 	if ReadTimeout <= 0 { | ||||
| 		return fmt.Errorf("Read timeout should be greater than 0, now - %d\n", ReadTimeout) | ||||
| 	} | ||||
|  | ||||
| 	if WriteTimeout <= 0 { | ||||
| 		return fmt.Errorf("Write timeout should be greater than 0, now - %d\n", WriteTimeout) | ||||
| 	} | ||||
| 	if KeepAliveTimeout < 0 { | ||||
| 		return fmt.Errorf("KeepAlive timeout should be greater than or equal to 0, now - %d\n", KeepAliveTimeout) | ||||
| 	} | ||||
|  | ||||
| 	if DownloadTimeout <= 0 { | ||||
| 		return fmt.Errorf("Download timeout should be greater than 0, now - %d\n", DownloadTimeout) | ||||
| 	} | ||||
|  | ||||
| 	if Concurrency <= 0 { | ||||
| 		return fmt.Errorf("Concurrency should be greater than 0, now - %d\n", Concurrency) | ||||
| 	} | ||||
|  | ||||
| 	if MaxClients <= 0 { | ||||
| 		MaxClients = Concurrency * 10 | ||||
| 	} | ||||
|  | ||||
| 	if TTL <= 0 { | ||||
| 		return fmt.Errorf("TTL should be greater than 0, now - %d\n", TTL) | ||||
| 	} | ||||
|  | ||||
| 	if MaxSrcResolution <= 0 { | ||||
| 		return fmt.Errorf("Max src resolution should be greater than 0, now - %d\n", MaxSrcResolution) | ||||
| 	} | ||||
|  | ||||
| 	if MaxSrcFileSize < 0 { | ||||
| 		return fmt.Errorf("Max src file size should be greater than or equal to 0, now - %d\n", MaxSrcFileSize) | ||||
| 	} | ||||
|  | ||||
| 	if MaxAnimationFrames <= 0 { | ||||
| 		return fmt.Errorf("Max animation frames should be greater than 0, now - %d\n", MaxAnimationFrames) | ||||
| 	} | ||||
|  | ||||
| 	if PngQuantizationColors < 2 { | ||||
| 		return fmt.Errorf("Png quantization colors should be greater than 1, now - %d\n", PngQuantizationColors) | ||||
| 	} else if PngQuantizationColors > 256 { | ||||
| 		return fmt.Errorf("Png quantization colors can't be greater than 256, now - %d\n", PngQuantizationColors) | ||||
| 	} | ||||
|  | ||||
| 	if Quality <= 0 { | ||||
| 		return fmt.Errorf("Quality should be greater than 0, now - %d\n", Quality) | ||||
| 	} else if Quality > 100 { | ||||
| 		return fmt.Errorf("Quality can't be greater than 100, now - %d\n", Quality) | ||||
| 	} | ||||
|  | ||||
| 	if IgnoreSslVerification { | ||||
| 		log.Warning("Ignoring SSL verification is very unsafe") | ||||
| 	} | ||||
|  | ||||
| 	if LocalFileSystemRoot != "" { | ||||
| 		stat, err := os.Stat(LocalFileSystemRoot) | ||||
|  | ||||
| 		if err != nil { | ||||
| 			return fmt.Errorf("Cannot use local directory: %s", err) | ||||
| 		} | ||||
|  | ||||
| 		if !stat.IsDir() { | ||||
| 			return fmt.Errorf("Cannot use local directory: not a directory") | ||||
| 		} | ||||
|  | ||||
| 		if LocalFileSystemRoot == "/" { | ||||
| 			log.Warning("Exposing root via IMGPROXY_LOCAL_FILESYSTEM_ROOT is unsafe") | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if _, ok := os.LookupEnv("IMGPROXY_USE_GCS"); !ok && len(GCSKey) > 0 { | ||||
| 		log.Warning("Set IMGPROXY_USE_GCS to true since it may be required by future versions to enable GCS support") | ||||
| 		GCSEnabled = true | ||||
| 	} | ||||
|  | ||||
| 	if WatermarkOpacity <= 0 { | ||||
| 		return fmt.Errorf("Watermark opacity should be greater than 0") | ||||
| 	} else if WatermarkOpacity > 1 { | ||||
| 		return fmt.Errorf("Watermark opacity should be less than or equal to 1") | ||||
| 	} | ||||
|  | ||||
| 	if FallbackImageHTTPCode < 100 || FallbackImageHTTPCode > 599 { | ||||
| 		return fmt.Errorf("Fallback image HTTP code should be between 100 and 599") | ||||
| 	} | ||||
|  | ||||
| 	if len(PrometheusBind) > 0 && PrometheusBind == Bind { | ||||
| 		return fmt.Errorf("Can't use the same binding for the main server and Prometheus") | ||||
| 	} | ||||
|  | ||||
| 	if FreeMemoryInterval <= 0 { | ||||
| 		return fmt.Errorf("Free memory interval should be greater than zero") | ||||
| 	} | ||||
|  | ||||
| 	if DownloadBufferSize < 0 { | ||||
| 		return fmt.Errorf("Download buffer size should be greater than or equal to 0") | ||||
| 	} else if DownloadBufferSize > math.MaxInt32 { | ||||
| 		return fmt.Errorf("Download buffer size can't be greater than %d", math.MaxInt32) | ||||
| 	} | ||||
|  | ||||
| 	if BufferPoolCalibrationThreshold < 64 { | ||||
| 		return fmt.Errorf("Buffer pool calibration threshold should be greater than or equal to 64") | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										186
									
								
								config/configurators/configurators.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										186
									
								
								config/configurators/configurators.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,186 @@ | ||||
| package configurators | ||||
|  | ||||
| import ( | ||||
| 	"bufio" | ||||
| 	"encoding/hex" | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| ) | ||||
|  | ||||
| func Int(i *int, name string) { | ||||
| 	if env, err := strconv.Atoi(os.Getenv(name)); err == nil { | ||||
| 		*i = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Float(i *float64, name string) { | ||||
| 	if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil { | ||||
| 		*i = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func MegaInt(f *int, name string) { | ||||
| 	if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil { | ||||
| 		*f = int(env * 1000000) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func String(s *string, name string) { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		*s = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func StringSlice(s *[]string, name string) { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		for i, p := range parts { | ||||
| 			parts[i] = strings.TrimSpace(p) | ||||
| 		} | ||||
|  | ||||
| 		*s = parts | ||||
|  | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	*s = []string{} | ||||
| } | ||||
|  | ||||
| func StringSliceFile(s *[]string, filepath string) error { | ||||
| 	if len(filepath) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	f, err := os.Open(filepath) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't open file %s\n", filepath) | ||||
| 	} | ||||
|  | ||||
| 	scanner := bufio.NewScanner(f) | ||||
| 	for scanner.Scan() { | ||||
| 		if str := scanner.Text(); len(str) != 0 && !strings.HasPrefix(str, "#") { | ||||
| 			*s = append(*s, str) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := scanner.Err(); err != nil { | ||||
| 		return fmt.Errorf("Failed to read presets file: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func Bool(b *bool, name string) { | ||||
| 	if env, err := strconv.ParseBool(os.Getenv(name)); err == nil { | ||||
| 		*b = env | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func ImageTypes(it *[]imagetype.Type, name string) error { | ||||
| 	*it = []imagetype.Type{} | ||||
|  | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		for _, p := range parts { | ||||
| 			pt := strings.TrimSpace(p) | ||||
| 			if t, ok := imagetype.Types[pt]; ok { | ||||
| 				*it = append(*it, t) | ||||
| 			} else { | ||||
| 				return fmt.Errorf("Unknown image format to skip: %s", pt) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func ImageTypesQuality(m map[imagetype.Type]int, name string) error { | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		for _, p := range parts { | ||||
| 			i := strings.Index(p, "=") | ||||
| 			if i < 0 { | ||||
| 				return fmt.Errorf("Invalid format quality string: %s", p) | ||||
| 			} | ||||
|  | ||||
| 			imgtypeStr, qStr := strings.TrimSpace(p[:i]), strings.TrimSpace(p[i+1:]) | ||||
|  | ||||
| 			imgtype, ok := imagetype.Types[imgtypeStr] | ||||
| 			if !ok { | ||||
| 				return fmt.Errorf("Invalid format: %s", p) | ||||
| 			} | ||||
|  | ||||
| 			q, err := strconv.Atoi(qStr) | ||||
| 			if err != nil || q <= 0 || q > 100 { | ||||
| 				return fmt.Errorf("Invalid quality: %s", p) | ||||
| 			} | ||||
|  | ||||
| 			m[imgtype] = q | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func Hex(b *[][]byte, name string) error { | ||||
| 	var err error | ||||
|  | ||||
| 	if env := os.Getenv(name); len(env) > 0 { | ||||
| 		parts := strings.Split(env, ",") | ||||
|  | ||||
| 		keys := make([][]byte, len(parts)) | ||||
|  | ||||
| 		for i, part := range parts { | ||||
| 			if keys[i], err = hex.DecodeString(part); err != nil { | ||||
| 				return fmt.Errorf("%s expected to be hex-encoded strings. Invalid: %s\n", name, part) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		*b = keys | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func HexFile(b *[][]byte, filepath string) error { | ||||
| 	if len(filepath) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	f, err := os.Open(filepath) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't open file %s\n", filepath) | ||||
| 	} | ||||
|  | ||||
| 	keys := [][]byte{} | ||||
|  | ||||
| 	scanner := bufio.NewScanner(f) | ||||
| 	for scanner.Scan() { | ||||
| 		part := scanner.Text() | ||||
|  | ||||
| 		if len(part) == 0 { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		if key, err := hex.DecodeString(part); err == nil { | ||||
| 			keys = append(keys, key) | ||||
| 		} else { | ||||
| 			return fmt.Errorf("%s expected to contain hex-encoded strings. Invalid: %s\n", filepath, part) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := scanner.Err(); err != nil { | ||||
| 		return fmt.Errorf("Failed to read file %s: %s", filepath, err) | ||||
| 	} | ||||
|  | ||||
| 	*b = keys | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										41
									
								
								crypt.go
									
									
									
									
									
								
							
							
						
						
									
										41
									
								
								crypt.go
									
									
									
									
									
								
							| @@ -1,41 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"crypto/hmac" | ||||
| 	"crypto/sha256" | ||||
| 	"encoding/base64" | ||||
| 	"errors" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	errInvalidSignature         = errors.New("Invalid signature") | ||||
| 	errInvalidSignatureEncoding = errors.New("Invalid signature encoding") | ||||
| ) | ||||
|  | ||||
| type securityKey []byte | ||||
|  | ||||
| func validatePath(signature, path string) error { | ||||
| 	messageMAC, err := base64.RawURLEncoding.DecodeString(signature) | ||||
| 	if err != nil { | ||||
| 		return errInvalidSignatureEncoding | ||||
| 	} | ||||
|  | ||||
| 	for i := 0; i < len(conf.Keys); i++ { | ||||
| 		if hmac.Equal(messageMAC, signatureFor(path, i)) { | ||||
| 			return nil | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return errInvalidSignature | ||||
| } | ||||
|  | ||||
| func signatureFor(str string, pairInd int) []byte { | ||||
| 	mac := hmac.New(sha256.New, conf.Keys[pairInd]) | ||||
| 	mac.Write(conf.Salts[pairInd]) | ||||
| 	mac.Write([]byte(str)) | ||||
| 	expectedMAC := mac.Sum(nil) | ||||
| 	if conf.SignatureSize < 32 { | ||||
| 		return expectedMAC[:conf.SignatureSize] | ||||
| 	} | ||||
| 	return expectedMAC | ||||
| } | ||||
| @@ -1,52 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"github.com/stretchr/testify/suite" | ||||
| ) | ||||
|  | ||||
| type CryptTestSuite struct{ MainTestSuite } | ||||
|  | ||||
| func (s *CryptTestSuite) SetupTest() { | ||||
| 	s.MainTestSuite.SetupTest() | ||||
|  | ||||
| 	conf.Keys = []securityKey{securityKey("test-key")} | ||||
| 	conf.Salts = []securityKey{securityKey("test-salt")} | ||||
| } | ||||
|  | ||||
| func (s *CryptTestSuite) TestValidatePath() { | ||||
| 	err := validatePath("dtLwhdnPPiu_epMl1LrzheLpvHas-4mwvY6L3Z8WwlY", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *CryptTestSuite) TestValidatePathTruncated() { | ||||
| 	conf.SignatureSize = 8 | ||||
|  | ||||
| 	err := validatePath("dtLwhdnPPis", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *CryptTestSuite) TestValidatePathInvalid() { | ||||
| 	err := validatePath("dtLwhdnPPis", "asd") | ||||
| 	assert.Error(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *CryptTestSuite) TestValidatePathMultiplePairs() { | ||||
| 	conf.Keys = append(conf.Keys, securityKey("test-key2")) | ||||
| 	conf.Salts = append(conf.Salts, securityKey("test-salt2")) | ||||
|  | ||||
| 	err := validatePath("dtLwhdnPPiu_epMl1LrzheLpvHas-4mwvY6L3Z8WwlY", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	err = validatePath("jbDffNPt1-XBgDccsaE-XJB9lx8JIJqdeYIZKgOqZpg", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	err = validatePath("dtLwhdnPPis", "asd") | ||||
| 	assert.Error(s.T(), err) | ||||
| } | ||||
|  | ||||
| func TestCrypt(t *testing.T) { | ||||
| 	suite.Run(t, new(CryptTestSuite)) | ||||
| } | ||||
							
								
								
									
										91
									
								
								datadog.go
									
									
									
									
									
								
							
							
						
						
									
										91
									
								
								datadog.go
									
									
									
									
									
								
							| @@ -1,91 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"errors" | ||||
| 	"net/http" | ||||
| 	"os" | ||||
| 	"time" | ||||
|  | ||||
| 	"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/ext" | ||||
| 	"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	dataDogSpanCtxKey = ctxKey("dataDogSpan") | ||||
| ) | ||||
|  | ||||
| func initDataDog() { | ||||
| 	if !conf.DataDogEnable { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	name := os.Getenv("DD_SERVICE") | ||||
| 	if len(name) == 0 { | ||||
| 		name = "imgproxy" | ||||
| 	} | ||||
|  | ||||
| 	tracer.Start( | ||||
| 		tracer.WithService(name), | ||||
| 		tracer.WithServiceVersion(version), | ||||
| 		tracer.WithLogger(dataDogLogger{}), | ||||
| 	) | ||||
| } | ||||
|  | ||||
| func stopDataDog() { | ||||
| 	tracer.Stop() | ||||
| } | ||||
|  | ||||
| func startDataDogRootSpan(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) { | ||||
| 	span := tracer.StartSpan( | ||||
| 		"request", | ||||
| 		tracer.Measured(), | ||||
| 		tracer.SpanType("web"), | ||||
| 		tracer.Tag(ext.HTTPMethod, r.Method), | ||||
| 		tracer.Tag(ext.HTTPURL, r.RequestURI), | ||||
| 	) | ||||
| 	cancel := func() { span.Finish() } | ||||
| 	newRw := dataDogResponseWriter{rw, span} | ||||
|  | ||||
| 	return context.WithValue(ctx, dataDogSpanCtxKey, span), cancel, newRw | ||||
| } | ||||
|  | ||||
| func startDataDogSpan(ctx context.Context, name string) context.CancelFunc { | ||||
| 	rootSpan, _ := ctx.Value(dataDogSpanCtxKey).(tracer.Span) | ||||
| 	span := tracer.StartSpan(name, tracer.Measured(), tracer.ChildOf(rootSpan.Context())) | ||||
| 	return func() { span.Finish() } | ||||
| } | ||||
|  | ||||
| func sendErrorToDataDog(ctx context.Context, err error) { | ||||
| 	rootSpan, _ := ctx.Value(dataDogSpanCtxKey).(tracer.Span) | ||||
| 	rootSpan.Finish(tracer.WithError(err)) | ||||
| } | ||||
|  | ||||
| func sendTimeoutToDataDog(ctx context.Context, d time.Duration) { | ||||
| 	rootSpan, _ := ctx.Value(dataDogSpanCtxKey).(tracer.Span) | ||||
| 	rootSpan.SetTag("timeout_duration", d) | ||||
| 	rootSpan.Finish(tracer.WithError(errors.New("Timeout"))) | ||||
| } | ||||
|  | ||||
| type dataDogLogger struct { | ||||
| } | ||||
|  | ||||
| func (l dataDogLogger) Log(msg string) { | ||||
| 	logNotice(msg) | ||||
| } | ||||
|  | ||||
| type dataDogResponseWriter struct { | ||||
| 	rw   http.ResponseWriter | ||||
| 	span tracer.Span | ||||
| } | ||||
|  | ||||
| func (ddrw dataDogResponseWriter) Header() http.Header { | ||||
| 	return ddrw.rw.Header() | ||||
| } | ||||
| func (ddrw dataDogResponseWriter) Write(data []byte) (int, error) { | ||||
| 	return ddrw.rw.Write(data) | ||||
| } | ||||
| func (ddrw dataDogResponseWriter) WriteHeader(statusCode int) { | ||||
| 	ddrw.span.SetTag(ext.HTTPCode, statusCode) | ||||
| 	ddrw.rw.WriteHeader(statusCode) | ||||
| } | ||||
							
								
								
									
										249
									
								
								download.go
									
									
									
									
									
								
							
							
						
						
									
										249
									
								
								download.go
									
									
									
									
									
								
							| @@ -1,249 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"compress/gzip" | ||||
| 	"context" | ||||
| 	"crypto/tls" | ||||
| 	"fmt" | ||||
| 	"io" | ||||
| 	"io/ioutil" | ||||
| 	"net" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagemeta" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	downloadClient *http.Client | ||||
|  | ||||
| 	imageDataCtxKey = ctxKey("imageData") | ||||
|  | ||||
| 	imageHeadersToStore = []string{ | ||||
| 		"Cache-Control", | ||||
| 		"Expires", | ||||
| 	} | ||||
|  | ||||
| 	errSourceResolutionTooBig      = newError(422, "Source image resolution is too big", "Invalid source image") | ||||
| 	errSourceFileTooBig            = newError(422, "Source image file is too big", "Invalid source image") | ||||
| 	errSourceImageTypeNotSupported = newError(422, "Source image type not supported", "Invalid source image") | ||||
| ) | ||||
|  | ||||
| const msgSourceImageIsUnreachable = "Source image is unreachable" | ||||
|  | ||||
| var downloadBufPool *bufPool | ||||
|  | ||||
| type hardLimitReader struct { | ||||
| 	r    io.Reader | ||||
| 	left int | ||||
| } | ||||
|  | ||||
| func (lr *hardLimitReader) Read(p []byte) (n int, err error) { | ||||
| 	if lr.left <= 0 { | ||||
| 		return 0, errSourceFileTooBig | ||||
| 	} | ||||
| 	if len(p) > lr.left { | ||||
| 		p = p[0:lr.left] | ||||
| 	} | ||||
| 	n, err = lr.r.Read(p) | ||||
| 	lr.left -= n | ||||
| 	return | ||||
| } | ||||
|  | ||||
| func initDownloading() error { | ||||
| 	transport := &http.Transport{ | ||||
| 		Proxy:               http.ProxyFromEnvironment, | ||||
| 		MaxIdleConns:        conf.Concurrency, | ||||
| 		MaxIdleConnsPerHost: conf.Concurrency, | ||||
| 		DisableCompression:  true, | ||||
| 		DialContext:         (&net.Dialer{KeepAlive: 600 * time.Second}).DialContext, | ||||
| 	} | ||||
|  | ||||
| 	if conf.IgnoreSslVerification { | ||||
| 		transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} | ||||
| 	} | ||||
|  | ||||
| 	if conf.LocalFileSystemRoot != "" { | ||||
| 		transport.RegisterProtocol("local", newFsTransport()) | ||||
| 	} | ||||
|  | ||||
| 	if conf.S3Enabled { | ||||
| 		if t, err := newS3Transport(); err != nil { | ||||
| 			return err | ||||
| 		} else { | ||||
| 			transport.RegisterProtocol("s3", t) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if conf.GCSEnabled { | ||||
| 		if t, err := newGCSTransport(); err != nil { | ||||
| 			return err | ||||
| 		} else { | ||||
| 			transport.RegisterProtocol("gs", t) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if conf.ABSEnabled { | ||||
| 		if t, err := newAzureTransport(); err != nil { | ||||
| 			return err | ||||
| 		} else { | ||||
| 			transport.RegisterProtocol("abs", t) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	downloadClient = &http.Client{ | ||||
| 		Timeout:   time.Duration(conf.DownloadTimeout) * time.Second, | ||||
| 		Transport: transport, | ||||
| 	} | ||||
|  | ||||
| 	downloadBufPool = newBufPool("download", conf.Concurrency, conf.DownloadBufferSize) | ||||
|  | ||||
| 	imagemeta.SetMaxSvgCheckRead(conf.MaxSvgCheckBytes) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func checkDimensions(width, height int) error { | ||||
| 	if width*height > conf.MaxSrcResolution { | ||||
| 		return errSourceResolutionTooBig | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func checkTypeAndDimensions(r io.Reader) (imageType, error) { | ||||
| 	meta, err := imagemeta.DecodeMeta(r) | ||||
| 	if err == imagemeta.ErrFormat { | ||||
| 		return imageTypeUnknown, errSourceImageTypeNotSupported | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return imageTypeUnknown, wrapError(err, 0) | ||||
| 	} | ||||
|  | ||||
| 	imgtype, imgtypeOk := imageTypes[meta.Format()] | ||||
| 	if !imgtypeOk || !imageTypeLoadSupport(imgtype) { | ||||
| 		return imageTypeUnknown, errSourceImageTypeNotSupported | ||||
| 	} | ||||
|  | ||||
| 	if err = checkDimensions(meta.Width(), meta.Height()); err != nil { | ||||
| 		return imageTypeUnknown, err | ||||
| 	} | ||||
|  | ||||
| 	return imgtype, nil | ||||
| } | ||||
|  | ||||
| func readAndCheckImage(r io.Reader, contentLength int) (*imageData, error) { | ||||
| 	if conf.MaxSrcFileSize > 0 && contentLength > conf.MaxSrcFileSize { | ||||
| 		return nil, errSourceFileTooBig | ||||
| 	} | ||||
|  | ||||
| 	buf := downloadBufPool.Get(contentLength) | ||||
| 	cancel := func() { downloadBufPool.Put(buf) } | ||||
|  | ||||
| 	if conf.MaxSrcFileSize > 0 { | ||||
| 		r = &hardLimitReader{r: r, left: conf.MaxSrcFileSize} | ||||
| 	} | ||||
|  | ||||
| 	br := newBufReader(r, buf) | ||||
|  | ||||
| 	imgtype, err := checkTypeAndDimensions(br) | ||||
| 	if err != nil { | ||||
| 		cancel() | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	if err = br.Flush(); err != nil { | ||||
| 		cancel() | ||||
| 		return nil, newError(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	return &imageData{ | ||||
| 		Data:   buf.Bytes(), | ||||
| 		Type:   imgtype, | ||||
| 		cancel: cancel, | ||||
| 	}, nil | ||||
| } | ||||
|  | ||||
| func requestImage(imageURL string) (*http.Response, error) { | ||||
| 	req, err := http.NewRequest("GET", imageURL, nil) | ||||
| 	if err != nil { | ||||
| 		return nil, newError(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	req.Header.Set("User-Agent", conf.UserAgent) | ||||
|  | ||||
| 	res, err := downloadClient.Do(req) | ||||
| 	if err != nil { | ||||
| 		return res, newError(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	if res.StatusCode != 200 { | ||||
| 		body, _ := ioutil.ReadAll(res.Body) | ||||
| 		res.Body.Close() | ||||
|  | ||||
| 		msg := fmt.Sprintf("Can't download image; Status: %d; %s", res.StatusCode, string(body)) | ||||
| 		return res, newError(404, msg, msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func downloadImage(imageURL string) (*imageData, error) { | ||||
| 	res, err := requestImage(imageURL) | ||||
| 	if res != nil { | ||||
| 		defer res.Body.Close() | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	body := res.Body | ||||
| 	contentLength := int(res.ContentLength) | ||||
|  | ||||
| 	if res.Header.Get("Content-Encoding") == "gzip" { | ||||
| 		gzipBody, errGzip := gzip.NewReader(res.Body) | ||||
| 		if gzipBody != nil { | ||||
| 			defer gzipBody.Close() | ||||
| 		} | ||||
| 		if errGzip != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 		body = gzipBody | ||||
| 		contentLength = 0 | ||||
| 	} | ||||
|  | ||||
| 	imgdata, err := readAndCheckImage(body, contentLength) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	imgdata.Headers = make(map[string]string) | ||||
| 	for _, h := range imageHeadersToStore { | ||||
| 		if val := res.Header.Get(h); len(val) != 0 { | ||||
| 			imgdata.Headers[h] = val | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, nil | ||||
| } | ||||
|  | ||||
| func downloadImageCtx(ctx context.Context) (context.Context, context.CancelFunc, error) { | ||||
| 	imageURL := getImageURL(ctx) | ||||
|  | ||||
| 	defer startDataDogSpan(ctx, "downloading_image")() | ||||
| 	defer startNewRelicSegment(ctx, "Downloading image")() | ||||
| 	defer startPrometheusDuration(prometheusDownloadDuration)() | ||||
|  | ||||
| 	imgdata, err := downloadImage(imageURL) | ||||
| 	if err != nil { | ||||
| 		return ctx, func() {}, err | ||||
| 	} | ||||
|  | ||||
| 	ctx = context.WithValue(ctx, imageDataCtxKey, imgdata) | ||||
|  | ||||
| 	return ctx, imgdata.Close, nil | ||||
| } | ||||
|  | ||||
| func getImageData(ctx context.Context) *imageData { | ||||
| 	return ctx.Value(imageDataCtxKey).(*imageData) | ||||
| } | ||||
							
								
								
									
										26
									
								
								errorreport/bugsnag/bugsnag.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								errorreport/bugsnag/bugsnag.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| package bugsnag | ||||
|  | ||||
| import ( | ||||
| 	"net/http" | ||||
|  | ||||
| 	"github.com/bugsnag/bugsnag-go" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| var enabled bool | ||||
|  | ||||
| func Init() { | ||||
| 	if len(config.BugsnagKey) > 0 { | ||||
| 		bugsnag.Configure(bugsnag.Configuration{ | ||||
| 			APIKey:       config.BugsnagKey, | ||||
| 			ReleaseStage: config.BugsnagStage, | ||||
| 		}) | ||||
| 		enabled = true | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Report(err error, req *http.Request) { | ||||
| 	if enabled { | ||||
| 		bugsnag.Notify(err, req) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										21
									
								
								errorreport/errorreport.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								errorreport/errorreport.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| package errorreport | ||||
|  | ||||
| import ( | ||||
| 	"net/http" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/errorreport/bugsnag" | ||||
| 	"github.com/imgproxy/imgproxy/v2/errorreport/honeybadger" | ||||
| 	"github.com/imgproxy/imgproxy/v2/errorreport/sentry" | ||||
| ) | ||||
|  | ||||
| func Init() { | ||||
| 	bugsnag.Init() | ||||
| 	honeybadger.Init() | ||||
| 	sentry.Init() | ||||
| } | ||||
|  | ||||
| func Report(err error, req *http.Request) { | ||||
| 	bugsnag.Report(err, req) | ||||
| 	honeybadger.Report(err, req) | ||||
| 	sentry.Report(err, req) | ||||
| } | ||||
							
								
								
									
										38
									
								
								errorreport/honeybadger/honeybadger.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								errorreport/honeybadger/honeybadger.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| package honeybadger | ||||
|  | ||||
| import ( | ||||
| 	"net/http" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/honeybadger-io/honeybadger-go" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	enabled bool | ||||
|  | ||||
| 	headersReplacer = strings.NewReplacer("-", "_") | ||||
| ) | ||||
|  | ||||
| func Init() { | ||||
| 	if len(config.HoneybadgerKey) > 0 { | ||||
| 		honeybadger.Configure(honeybadger.Configuration{ | ||||
| 			APIKey: config.HoneybadgerKey, | ||||
| 			Env:    config.HoneybadgerEnv, | ||||
| 		}) | ||||
| 		enabled = true | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Report(err error, req *http.Request) { | ||||
| 	if enabled { | ||||
| 		headers := make(honeybadger.CGIData) | ||||
|  | ||||
| 		for k, v := range req.Header { | ||||
| 			key := "HTTP_" + headersReplacer.Replace(strings.ToUpper(k)) | ||||
| 			headers[key] = v[0] | ||||
| 		} | ||||
|  | ||||
| 		honeybadger.Notify(err, req.URL, headers) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										39
									
								
								errorreport/sentry/sentry.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								errorreport/sentry/sentry.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| package sentry | ||||
|  | ||||
| import ( | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/getsentry/sentry-go" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	enabled bool | ||||
|  | ||||
| 	timeout = 5 * time.Second | ||||
| ) | ||||
|  | ||||
| func Init() { | ||||
| 	if len(config.SentryDSN) > 0 { | ||||
| 		sentry.Init(sentry.ClientOptions{ | ||||
| 			Dsn:         config.SentryDSN, | ||||
| 			Release:     config.SentryRelease, | ||||
| 			Environment: config.SentryEnvironment, | ||||
| 		}) | ||||
|  | ||||
| 		enabled = true | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func Report(err error, req *http.Request) { | ||||
| 	if enabled { | ||||
| 		hub := sentry.CurrentHub().Clone() | ||||
| 		hub.Scope().SetRequest(req) | ||||
| 		hub.Scope().SetLevel(sentry.LevelError) | ||||
| 		eventID := hub.CaptureException(err) | ||||
| 		if eventID != nil { | ||||
| 			hub.Flush(timeout) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| @@ -1,75 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"net/http" | ||||
| 	"strings" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/bugsnag/bugsnag-go" | ||||
| 	"github.com/getsentry/sentry-go" | ||||
| 	"github.com/honeybadger-io/honeybadger-go" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	bugsnagEnabled     bool | ||||
| 	honeybadgerEnabled bool | ||||
| 	sentryEnabled      bool | ||||
|  | ||||
| 	headersReplacer = strings.NewReplacer("-", "_") | ||||
| 	sentryTimeout   = 5 * time.Second | ||||
| ) | ||||
|  | ||||
| func initErrorsReporting() { | ||||
| 	if len(conf.BugsnagKey) > 0 { | ||||
| 		bugsnag.Configure(bugsnag.Configuration{ | ||||
| 			APIKey:       conf.BugsnagKey, | ||||
| 			ReleaseStage: conf.BugsnagStage, | ||||
| 		}) | ||||
| 		bugsnagEnabled = true | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.HoneybadgerKey) > 0 { | ||||
| 		honeybadger.Configure(honeybadger.Configuration{ | ||||
| 			APIKey: conf.HoneybadgerKey, | ||||
| 			Env:    conf.HoneybadgerEnv, | ||||
| 		}) | ||||
| 		honeybadgerEnabled = true | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.SentryDSN) > 0 { | ||||
| 		sentry.Init(sentry.ClientOptions{ | ||||
| 			Dsn:         conf.SentryDSN, | ||||
| 			Release:     conf.SentryRelease, | ||||
| 			Environment: conf.SentryEnvironment, | ||||
| 		}) | ||||
|  | ||||
| 		sentryEnabled = true | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func reportError(err error, req *http.Request) { | ||||
| 	if bugsnagEnabled { | ||||
| 		bugsnag.Notify(err, req) | ||||
| 	} | ||||
|  | ||||
| 	if honeybadgerEnabled { | ||||
| 		headers := make(honeybadger.CGIData) | ||||
|  | ||||
| 		for k, v := range req.Header { | ||||
| 			key := "HTTP_" + headersReplacer.Replace(strings.ToUpper(k)) | ||||
| 			headers[key] = v[0] | ||||
| 		} | ||||
|  | ||||
| 		honeybadger.Notify(err, req.URL, headers) | ||||
| 	} | ||||
|  | ||||
| 	if sentryEnabled { | ||||
| 		hub := sentry.CurrentHub().Clone() | ||||
| 		hub.Scope().SetRequest(req) | ||||
| 		hub.Scope().SetLevel(sentry.LevelError) | ||||
| 		eventID := hub.CaptureException(err) | ||||
| 		if eventID != nil { | ||||
| 			hub.Flush(sentryTimeout) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										13
									
								
								etag.go
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								etag.go
									
									
									
									
									
								
							| @@ -7,6 +7,10 @@ import ( | ||||
| 	"encoding/json" | ||||
| 	"hash" | ||||
| 	"sync" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/version" | ||||
| ) | ||||
|  | ||||
| type eTagCalc struct { | ||||
| @@ -26,19 +30,18 @@ var eTagCalcPool = sync.Pool{ | ||||
| 	}, | ||||
| } | ||||
|  | ||||
| func calcETag(ctx context.Context) string { | ||||
| func calcETag(ctx context.Context, imgdata *imagedata.ImageData, po *options.ProcessingOptions) string { | ||||
| 	c := eTagCalcPool.Get().(*eTagCalc) | ||||
| 	defer eTagCalcPool.Put(c) | ||||
|  | ||||
| 	c.hash.Reset() | ||||
| 	c.hash.Write(getImageData(ctx).Data) | ||||
| 	c.hash.Write(imgdata.Data) | ||||
| 	footprint := c.hash.Sum(nil) | ||||
|  | ||||
| 	c.hash.Reset() | ||||
| 	c.hash.Write(footprint) | ||||
| 	c.hash.Write([]byte(version)) | ||||
| 	c.enc.Encode(conf) | ||||
| 	c.enc.Encode(getProcessingOptions(ctx)) | ||||
| 	c.hash.Write([]byte(version.Version())) | ||||
| 	c.enc.Encode(po) | ||||
|  | ||||
| 	return hex.EncodeToString(c.hash.Sum(nil)) | ||||
| } | ||||
|   | ||||
| @@ -7,14 +7,17 @@ import ( | ||||
| 	"net" | ||||
| 	"net/http" | ||||
| 	"os" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config/configurators" | ||||
| ) | ||||
|  | ||||
| func healthcheck() int { | ||||
| 	network := conf.Network | ||||
| 	bind := conf.Bind | ||||
| 	network := config.Network | ||||
| 	bind := config.Bind | ||||
|  | ||||
| 	strEnvConfig(&network, "IMGPROXY_NETWORK") | ||||
| 	strEnvConfig(&bind, "IMGPROXY_BIND") | ||||
| 	configurators.String(&network, "IMGPROXY_NETWORK") | ||||
| 	configurators.String(&bind, "IMGPROXY_BIND") | ||||
|  | ||||
| 	httpc := http.Client{ | ||||
| 		Transport: &http.Transport{ | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package ierrors | ||||
| 
 | ||||
| import ( | ||||
| 	"fmt" | ||||
| @@ -6,7 +6,7 @@ import ( | ||||
| 	"strings" | ||||
| ) | ||||
| 
 | ||||
| type imgproxyError struct { | ||||
| type Error struct { | ||||
| 	StatusCode    int | ||||
| 	Message       string | ||||
| 	PublicMessage string | ||||
| @@ -15,11 +15,11 @@ type imgproxyError struct { | ||||
| 	stack []uintptr | ||||
| } | ||||
| 
 | ||||
| func (e *imgproxyError) Error() string { | ||||
| func (e *Error) Error() string { | ||||
| 	return e.Message | ||||
| } | ||||
| 
 | ||||
| func (e *imgproxyError) FormatStack() string { | ||||
| func (e *Error) FormatStack() string { | ||||
| 	if e.stack == nil { | ||||
| 		return "" | ||||
| 	} | ||||
| @@ -27,25 +27,25 @@ func (e *imgproxyError) FormatStack() string { | ||||
| 	return formatStack(e.stack) | ||||
| } | ||||
| 
 | ||||
| func (e *imgproxyError) StackTrace() []uintptr { | ||||
| func (e *Error) StackTrace() []uintptr { | ||||
| 	return e.stack | ||||
| } | ||||
| 
 | ||||
| func (e *imgproxyError) SetUnexpected(u bool) *imgproxyError { | ||||
| func (e *Error) SetUnexpected(u bool) *Error { | ||||
| 	e.Unexpected = u | ||||
| 	return e | ||||
| } | ||||
| 
 | ||||
| func newError(status int, msg string, pub string) *imgproxyError { | ||||
| 	return &imgproxyError{ | ||||
| func New(status int, msg string, pub string) *Error { | ||||
| 	return &Error{ | ||||
| 		StatusCode:    status, | ||||
| 		Message:       msg, | ||||
| 		PublicMessage: pub, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func newUnexpectedError(msg string, skip int) *imgproxyError { | ||||
| 	return &imgproxyError{ | ||||
| func NewUnexpected(msg string, skip int) *Error { | ||||
| 	return &Error{ | ||||
| 		StatusCode:    500, | ||||
| 		Message:       msg, | ||||
| 		PublicMessage: "Internal error", | ||||
| @@ -55,11 +55,11 @@ func newUnexpectedError(msg string, skip int) *imgproxyError { | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func wrapError(err error, skip int) *imgproxyError { | ||||
| 	if ierr, ok := err.(*imgproxyError); ok { | ||||
| func Wrap(err error, skip int) *Error { | ||||
| 	if ierr, ok := err.(*Error); ok { | ||||
| 		return ierr | ||||
| 	} | ||||
| 	return newUnexpectedError(err.Error(), skip+1) | ||||
| 	return NewUnexpected(err.Error(), skip+1) | ||||
| } | ||||
| 
 | ||||
| func callers(skip int) []uintptr { | ||||
| @@ -1,97 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"context" | ||||
| 	"encoding/base64" | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| ) | ||||
|  | ||||
| type imageData struct { | ||||
| 	Data    []byte | ||||
| 	Type    imageType | ||||
| 	Headers map[string]string | ||||
|  | ||||
| 	cancel context.CancelFunc | ||||
| } | ||||
|  | ||||
| func (d *imageData) Close() { | ||||
| 	if d.cancel != nil { | ||||
| 		d.cancel() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func getWatermarkData() (*imageData, error) { | ||||
| 	if len(conf.WatermarkData) > 0 { | ||||
| 		return base64ImageData(conf.WatermarkData, "watermark") | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.WatermarkPath) > 0 { | ||||
| 		return fileImageData(conf.WatermarkPath, "watermark") | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.WatermarkURL) > 0 { | ||||
| 		return remoteImageData(conf.WatermarkURL, "watermark") | ||||
| 	} | ||||
|  | ||||
| 	return nil, nil | ||||
| } | ||||
|  | ||||
| func getFallbackImageData() (*imageData, error) { | ||||
| 	if len(conf.FallbackImageData) > 0 { | ||||
| 		return base64ImageData(conf.FallbackImageData, "fallback image") | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.FallbackImagePath) > 0 { | ||||
| 		return fileImageData(conf.FallbackImagePath, "fallback image") | ||||
| 	} | ||||
|  | ||||
| 	if len(conf.FallbackImageURL) > 0 { | ||||
| 		return remoteImageData(conf.FallbackImageURL, "fallback image") | ||||
| 	} | ||||
|  | ||||
| 	return nil, nil | ||||
| } | ||||
|  | ||||
| func base64ImageData(encoded, desc string) (*imageData, error) { | ||||
| 	data, err := base64.StdEncoding.DecodeString(encoded) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't decode %s data: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	imgtype, err := checkTypeAndDimensions(bytes.NewReader(data)) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't decode %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	return &imageData{Data: data, Type: imgtype}, nil | ||||
| } | ||||
|  | ||||
| func fileImageData(path, desc string) (*imageData, error) { | ||||
| 	f, err := os.Open(path) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't read %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	fi, err := f.Stat() | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't read %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	imgdata, err := readAndCheckImage(f, int(fi.Size())) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't read %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, err | ||||
| } | ||||
|  | ||||
| func remoteImageData(imageURL, desc string) (*imageData, error) { | ||||
| 	imgdata, err := downloadImage(imageURL) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't download %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, nil | ||||
| } | ||||
							
								
								
									
										133
									
								
								image_type.go
									
									
									
									
									
								
							
							
						
						
									
										133
									
								
								image_type.go
									
									
									
									
									
								
							| @@ -1,133 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| /* | ||||
| #cgo LDFLAGS: -s -w | ||||
| #include "vips.h" | ||||
| */ | ||||
| import "C" | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"net/url" | ||||
| 	"path/filepath" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| type imageType int | ||||
|  | ||||
| const ( | ||||
| 	imageTypeUnknown = imageType(C.UNKNOWN) | ||||
| 	imageTypeJPEG    = imageType(C.JPEG) | ||||
| 	imageTypePNG     = imageType(C.PNG) | ||||
| 	imageTypeWEBP    = imageType(C.WEBP) | ||||
| 	imageTypeGIF     = imageType(C.GIF) | ||||
| 	imageTypeICO     = imageType(C.ICO) | ||||
| 	imageTypeSVG     = imageType(C.SVG) | ||||
| 	imageTypeHEIC    = imageType(C.HEIC) | ||||
| 	imageTypeAVIF    = imageType(C.AVIF) | ||||
| 	imageTypeBMP     = imageType(C.BMP) | ||||
| 	imageTypeTIFF    = imageType(C.TIFF) | ||||
|  | ||||
| 	contentDispositionFilenameFallback = "image" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	imageTypes = map[string]imageType{ | ||||
| 		"jpeg": imageTypeJPEG, | ||||
| 		"jpg":  imageTypeJPEG, | ||||
| 		"png":  imageTypePNG, | ||||
| 		"webp": imageTypeWEBP, | ||||
| 		"gif":  imageTypeGIF, | ||||
| 		"ico":  imageTypeICO, | ||||
| 		"svg":  imageTypeSVG, | ||||
| 		"heic": imageTypeHEIC, | ||||
| 		"avif": imageTypeAVIF, | ||||
| 		"bmp":  imageTypeBMP, | ||||
| 		"tiff": imageTypeTIFF, | ||||
| 	} | ||||
|  | ||||
| 	mimes = map[imageType]string{ | ||||
| 		imageTypeJPEG: "image/jpeg", | ||||
| 		imageTypePNG:  "image/png", | ||||
| 		imageTypeWEBP: "image/webp", | ||||
| 		imageTypeGIF:  "image/gif", | ||||
| 		imageTypeICO:  "image/x-icon", | ||||
| 		imageTypeSVG:  "image/svg+xml", | ||||
| 		imageTypeHEIC: "image/heif", | ||||
| 		imageTypeAVIF: "image/avif", | ||||
| 		imageTypeBMP:  "image/bmp", | ||||
| 		imageTypeTIFF: "image/tiff", | ||||
| 	} | ||||
|  | ||||
| 	contentDispositionsFmt = map[imageType]string{ | ||||
| 		imageTypeJPEG: "inline; filename=\"%s.jpg\"", | ||||
| 		imageTypePNG:  "inline; filename=\"%s.png\"", | ||||
| 		imageTypeWEBP: "inline; filename=\"%s.webp\"", | ||||
| 		imageTypeGIF:  "inline; filename=\"%s.gif\"", | ||||
| 		imageTypeICO:  "inline; filename=\"%s.ico\"", | ||||
| 		imageTypeSVG:  "inline; filename=\"%s.svg\"", | ||||
| 		imageTypeHEIC: "inline; filename=\"%s.heic\"", | ||||
| 		imageTypeAVIF: "inline; filename=\"%s.avif\"", | ||||
| 		imageTypeBMP:  "inline; filename=\"%s.bmp\"", | ||||
| 		imageTypeTIFF: "inline; filename=\"%s.tiff\"", | ||||
| 	} | ||||
| ) | ||||
|  | ||||
| func (it imageType) String() string { | ||||
| 	for k, v := range imageTypes { | ||||
| 		if v == it { | ||||
| 			return k | ||||
| 		} | ||||
| 	} | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| func (it imageType) MarshalJSON() ([]byte, error) { | ||||
| 	for k, v := range imageTypes { | ||||
| 		if v == it { | ||||
| 			return []byte(fmt.Sprintf("%q", k)), nil | ||||
| 		} | ||||
| 	} | ||||
| 	return []byte("null"), nil | ||||
| } | ||||
|  | ||||
| func (it imageType) Mime() string { | ||||
| 	if mime, ok := mimes[it]; ok { | ||||
| 		return mime | ||||
| 	} | ||||
|  | ||||
| 	return "application/octet-stream" | ||||
| } | ||||
|  | ||||
| func (it imageType) ContentDisposition(filename string) string { | ||||
| 	format, ok := contentDispositionsFmt[it] | ||||
| 	if !ok { | ||||
| 		return "inline" | ||||
| 	} | ||||
|  | ||||
| 	return fmt.Sprintf(format, filename) | ||||
| } | ||||
|  | ||||
| func (it imageType) ContentDispositionFromURL(imageURL string) string { | ||||
| 	url, err := url.Parse(imageURL) | ||||
| 	if err != nil { | ||||
| 		return it.ContentDisposition(contentDispositionFilenameFallback) | ||||
| 	} | ||||
|  | ||||
| 	_, filename := filepath.Split(url.Path) | ||||
| 	if len(filename) == 0 { | ||||
| 		return it.ContentDisposition(contentDispositionFilenameFallback) | ||||
| 	} | ||||
|  | ||||
| 	return it.ContentDisposition(strings.TrimSuffix(filename, filepath.Ext(filename))) | ||||
| } | ||||
|  | ||||
| func (it imageType) SupportsAlpha() bool { | ||||
| 	return it != imageTypeJPEG && it != imageTypeBMP | ||||
| } | ||||
|  | ||||
| func (it imageType) SupportsColourProfile() bool { | ||||
| 	return it == imageTypeJPEG || | ||||
| 		it == imageTypeWEBP || | ||||
| 		it == imageTypeAVIF | ||||
| } | ||||
							
								
								
									
										142
									
								
								imagedata/download.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										142
									
								
								imagedata/download.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,142 @@ | ||||
| package imagedata | ||||
|  | ||||
| import ( | ||||
| 	"compress/gzip" | ||||
| 	"crypto/tls" | ||||
| 	"fmt" | ||||
| 	"io/ioutil" | ||||
| 	"net" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
|  | ||||
| 	azureTransport "github.com/imgproxy/imgproxy/v2/transport/azure" | ||||
| 	fsTransport "github.com/imgproxy/imgproxy/v2/transport/fs" | ||||
| 	gcsTransport "github.com/imgproxy/imgproxy/v2/transport/gcs" | ||||
| 	s3Transport "github.com/imgproxy/imgproxy/v2/transport/s3" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	downloadClient *http.Client | ||||
|  | ||||
| 	imageHeadersToStore = []string{ | ||||
| 		"Cache-Control", | ||||
| 		"Expires", | ||||
| 	} | ||||
| ) | ||||
|  | ||||
| const msgSourceImageIsUnreachable = "Source image is unreachable" | ||||
|  | ||||
| func initDownloading() error { | ||||
| 	transport := &http.Transport{ | ||||
| 		Proxy:               http.ProxyFromEnvironment, | ||||
| 		MaxIdleConns:        config.Concurrency, | ||||
| 		MaxIdleConnsPerHost: config.Concurrency, | ||||
| 		DisableCompression:  true, | ||||
| 		DialContext:         (&net.Dialer{KeepAlive: 600 * time.Second}).DialContext, | ||||
| 	} | ||||
|  | ||||
| 	if config.IgnoreSslVerification { | ||||
| 		transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true} | ||||
| 	} | ||||
|  | ||||
| 	if config.LocalFileSystemRoot != "" { | ||||
| 		transport.RegisterProtocol("local", fsTransport.New()) | ||||
| 	} | ||||
|  | ||||
| 	if config.S3Enabled { | ||||
| 		if t, err := s3Transport.New(); err != nil { | ||||
| 			return err | ||||
| 		} else { | ||||
| 			transport.RegisterProtocol("s3", t) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if config.GCSEnabled { | ||||
| 		if t, err := gcsTransport.New(); err != nil { | ||||
| 			return err | ||||
| 		} else { | ||||
| 			transport.RegisterProtocol("gs", t) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if config.ABSEnabled { | ||||
| 		if t, err := azureTransport.New(); err != nil { | ||||
| 			return err | ||||
| 		} else { | ||||
| 			transport.RegisterProtocol("abs", t) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	downloadClient = &http.Client{ | ||||
| 		Timeout:   time.Duration(config.DownloadTimeout) * time.Second, | ||||
| 		Transport: transport, | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func requestImage(imageURL string) (*http.Response, error) { | ||||
| 	req, err := http.NewRequest("GET", imageURL, nil) | ||||
| 	if err != nil { | ||||
| 		return nil, ierrors.New(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(config.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	req.Header.Set("User-Agent", config.UserAgent) | ||||
|  | ||||
| 	res, err := downloadClient.Do(req) | ||||
| 	if err != nil { | ||||
| 		return res, ierrors.New(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(config.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	if res.StatusCode != 200 { | ||||
| 		body, _ := ioutil.ReadAll(res.Body) | ||||
| 		res.Body.Close() | ||||
|  | ||||
| 		msg := fmt.Sprintf("Can't download image; Status: %d; %s", res.StatusCode, string(body)) | ||||
| 		return res, ierrors.New(404, msg, msgSourceImageIsUnreachable).SetUnexpected(config.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	return res, nil | ||||
| } | ||||
|  | ||||
| func download(imageURL string) (*ImageData, error) { | ||||
| 	res, err := requestImage(imageURL) | ||||
| 	if res != nil { | ||||
| 		defer res.Body.Close() | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	body := res.Body | ||||
| 	contentLength := int(res.ContentLength) | ||||
|  | ||||
| 	if res.Header.Get("Content-Encoding") == "gzip" { | ||||
| 		gzipBody, errGzip := gzip.NewReader(res.Body) | ||||
| 		if gzipBody != nil { | ||||
| 			defer gzipBody.Close() | ||||
| 		} | ||||
| 		if errGzip != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 		body = gzipBody | ||||
| 		contentLength = 0 | ||||
| 	} | ||||
|  | ||||
| 	imgdata, err := readAndCheckImage(body, contentLength) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	imgdata.Headers = make(map[string]string) | ||||
| 	for _, h := range imageHeadersToStore { | ||||
| 		if val := res.Header.Get(h); len(val) != 0 { | ||||
| 			imgdata.Headers[h] = val | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, nil | ||||
| } | ||||
							
								
								
									
										135
									
								
								imagedata/image_data.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								imagedata/image_data.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,135 @@ | ||||
| package imagedata | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"encoding/base64" | ||||
| 	"fmt" | ||||
| 	"os" | ||||
| 	"strings" | ||||
| 	"sync" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	Watermark     *ImageData | ||||
| 	FallbackImage *ImageData | ||||
| ) | ||||
|  | ||||
| type ImageData struct { | ||||
| 	Type    imagetype.Type | ||||
| 	Data    []byte | ||||
| 	Headers map[string]string | ||||
|  | ||||
| 	cancel     context.CancelFunc | ||||
| 	cancelOnce sync.Once | ||||
| } | ||||
|  | ||||
| func (d *ImageData) Close() { | ||||
| 	d.cancelOnce.Do(func() { | ||||
| 		if d.cancel != nil { | ||||
| 			d.cancel() | ||||
| 		} | ||||
| 	}) | ||||
| } | ||||
|  | ||||
| func (d *ImageData) SetCancel(cancel context.CancelFunc) { | ||||
| 	d.cancel = cancel | ||||
| } | ||||
|  | ||||
| func Init() error { | ||||
| 	initRead() | ||||
|  | ||||
| 	if err := initDownloading(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := loadWatermark(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := loadFallbackImage(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func loadWatermark() (err error) { | ||||
| 	if len(config.WatermarkData) > 0 { | ||||
| 		Watermark, err = FromBase64(config.WatermarkData, "watermark") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if len(config.WatermarkPath) > 0 { | ||||
| 		Watermark, err = FromFile(config.WatermarkPath, "watermark") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if len(config.WatermarkURL) > 0 { | ||||
| 		Watermark, err = Download(config.WatermarkURL, "watermark") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func loadFallbackImage() (err error) { | ||||
| 	if len(config.FallbackImageData) > 0 { | ||||
| 		FallbackImage, err = FromBase64(config.FallbackImageData, "fallback image") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if len(config.FallbackImagePath) > 0 { | ||||
| 		FallbackImage, err = FromFile(config.FallbackImagePath, "fallback image") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if len(config.FallbackImageURL) > 0 { | ||||
| 		FallbackImage, err = Download(config.FallbackImageURL, "fallback image") | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func FromBase64(encoded, desc string) (*ImageData, error) { | ||||
| 	dec := base64.NewDecoder(base64.StdEncoding, strings.NewReader(encoded)) | ||||
| 	size := 4 * (len(encoded)/3 + 1) | ||||
|  | ||||
| 	imgdata, err := readAndCheckImage(dec, size) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't decode %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, nil | ||||
| } | ||||
|  | ||||
| func FromFile(path, desc string) (*ImageData, error) { | ||||
| 	f, err := os.Open(path) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't read %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	fi, err := f.Stat() | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't read %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	imgdata, err := readAndCheckImage(f, int(fi.Size())) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't read %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, nil | ||||
| } | ||||
|  | ||||
| func Download(imageURL, desc string) (*ImageData, error) { | ||||
| 	imgdata, err := download(imageURL) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't download %s: %s", desc, err) | ||||
| 	} | ||||
|  | ||||
| 	return imgdata, nil | ||||
| } | ||||
							
								
								
									
										86
									
								
								imagedata/read.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										86
									
								
								imagedata/read.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,86 @@ | ||||
| package imagedata | ||||
|  | ||||
| import ( | ||||
| 	"io" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/bufpool" | ||||
| 	"github.com/imgproxy/imgproxy/v2/bufreader" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagemeta" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/security" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	ErrSourceFileTooBig            = ierrors.New(422, "Source image file is too big", "Invalid source image") | ||||
| 	ErrSourceImageTypeNotSupported = ierrors.New(422, "Source image type not supported", "Invalid source image") | ||||
| ) | ||||
|  | ||||
| var downloadBufPool *bufpool.Pool | ||||
|  | ||||
| func initRead() { | ||||
| 	downloadBufPool = bufpool.New("download", config.Concurrency, config.DownloadBufferSize) | ||||
|  | ||||
| 	imagemeta.SetMaxSvgCheckRead(config.MaxSvgCheckBytes) | ||||
| } | ||||
|  | ||||
| type hardLimitReader struct { | ||||
| 	r    io.Reader | ||||
| 	left int | ||||
| } | ||||
|  | ||||
| func (lr *hardLimitReader) Read(p []byte) (n int, err error) { | ||||
| 	if lr.left <= 0 { | ||||
| 		return 0, ErrSourceFileTooBig | ||||
| 	} | ||||
| 	if len(p) > lr.left { | ||||
| 		p = p[0:lr.left] | ||||
| 	} | ||||
| 	n, err = lr.r.Read(p) | ||||
| 	lr.left -= n | ||||
| 	return | ||||
| } | ||||
|  | ||||
| func readAndCheckImage(r io.Reader, contentLength int) (*ImageData, error) { | ||||
| 	if config.MaxSrcFileSize > 0 && contentLength > config.MaxSrcFileSize { | ||||
| 		return nil, ErrSourceFileTooBig | ||||
| 	} | ||||
|  | ||||
| 	buf := downloadBufPool.Get(contentLength) | ||||
| 	cancel := func() { downloadBufPool.Put(buf) } | ||||
|  | ||||
| 	if config.MaxSrcFileSize > 0 { | ||||
| 		r = &hardLimitReader{r: r, left: config.MaxSrcFileSize} | ||||
| 	} | ||||
|  | ||||
| 	br := bufreader.New(r, buf) | ||||
|  | ||||
| 	meta, err := imagemeta.DecodeMeta(br) | ||||
| 	if err == imagemeta.ErrFormat { | ||||
| 		return nil, ErrSourceImageTypeNotSupported | ||||
| 	} | ||||
| 	if err != nil { | ||||
| 		return nil, ierrors.Wrap(err, 0) | ||||
| 	} | ||||
|  | ||||
| 	imgtype, imgtypeOk := imagetype.Types[meta.Format()] | ||||
| 	if !imgtypeOk { | ||||
| 		return nil, ErrSourceImageTypeNotSupported | ||||
| 	} | ||||
|  | ||||
| 	if err = security.CheckDimensions(meta.Width(), meta.Height()); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	if err = br.Flush(); err != nil { | ||||
| 		cancel() | ||||
| 		return nil, ierrors.New(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(config.ReportDownloadingErrors) | ||||
| 	} | ||||
|  | ||||
| 	return &ImageData{ | ||||
| 		Data:   buf.Bytes(), | ||||
| 		Type:   imgtype, | ||||
| 		cancel: cancel, | ||||
| 	}, nil | ||||
| } | ||||
							
								
								
									
										137
									
								
								imagetype/imagetype.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										137
									
								
								imagetype/imagetype.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,137 @@ | ||||
| package imagetype | ||||
|  | ||||
| /* | ||||
| #cgo LDFLAGS: -s -w | ||||
| #include "imagetype.h" | ||||
| */ | ||||
| import "C" | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"net/url" | ||||
| 	"path/filepath" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| type Type int | ||||
|  | ||||
| const ( | ||||
| 	Unknown = Type(C.UNKNOWN) | ||||
| 	JPEG    = Type(C.JPEG) | ||||
| 	PNG     = Type(C.PNG) | ||||
| 	WEBP    = Type(C.WEBP) | ||||
| 	GIF     = Type(C.GIF) | ||||
| 	ICO     = Type(C.ICO) | ||||
| 	SVG     = Type(C.SVG) | ||||
| 	HEIC    = Type(C.HEIC) | ||||
| 	AVIF    = Type(C.AVIF) | ||||
| 	BMP     = Type(C.BMP) | ||||
| 	TIFF    = Type(C.TIFF) | ||||
|  | ||||
| 	contentDispositionFilenameFallback = "image" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	Types = map[string]Type{ | ||||
| 		"jpeg": JPEG, | ||||
| 		"jpg":  JPEG, | ||||
| 		"png":  PNG, | ||||
| 		"webp": WEBP, | ||||
| 		"gif":  GIF, | ||||
| 		"ico":  ICO, | ||||
| 		"svg":  SVG, | ||||
| 		"heic": HEIC, | ||||
| 		"avif": AVIF, | ||||
| 		"bmp":  BMP, | ||||
| 		"tiff": TIFF, | ||||
| 	} | ||||
|  | ||||
| 	mimes = map[Type]string{ | ||||
| 		JPEG: "image/jpeg", | ||||
| 		PNG:  "image/png", | ||||
| 		WEBP: "image/webp", | ||||
| 		GIF:  "image/gif", | ||||
| 		ICO:  "image/x-icon", | ||||
| 		SVG:  "image/svg+xml", | ||||
| 		HEIC: "image/heif", | ||||
| 		AVIF: "image/avif", | ||||
| 		BMP:  "image/bmp", | ||||
| 		TIFF: "image/tiff", | ||||
| 	} | ||||
|  | ||||
| 	contentDispositionsFmt = map[Type]string{ | ||||
| 		JPEG: "inline; filename=\"%s.jpg\"", | ||||
| 		PNG:  "inline; filename=\"%s.png\"", | ||||
| 		WEBP: "inline; filename=\"%s.webp\"", | ||||
| 		GIF:  "inline; filename=\"%s.gif\"", | ||||
| 		ICO:  "inline; filename=\"%s.ico\"", | ||||
| 		SVG:  "inline; filename=\"%s.svg\"", | ||||
| 		HEIC: "inline; filename=\"%s.heic\"", | ||||
| 		AVIF: "inline; filename=\"%s.avif\"", | ||||
| 		BMP:  "inline; filename=\"%s.bmp\"", | ||||
| 		TIFF: "inline; filename=\"%s.tiff\"", | ||||
| 	} | ||||
| ) | ||||
|  | ||||
| func (it Type) String() string { | ||||
| 	for k, v := range Types { | ||||
| 		if v == it { | ||||
| 			return k | ||||
| 		} | ||||
| 	} | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| func (it Type) MarshalJSON() ([]byte, error) { | ||||
| 	for k, v := range Types { | ||||
| 		if v == it { | ||||
| 			return []byte(fmt.Sprintf("%q", k)), nil | ||||
| 		} | ||||
| 	} | ||||
| 	return []byte("null"), nil | ||||
| } | ||||
|  | ||||
| func (it Type) Mime() string { | ||||
| 	if mime, ok := mimes[it]; ok { | ||||
| 		return mime | ||||
| 	} | ||||
|  | ||||
| 	return "application/octet-stream" | ||||
| } | ||||
|  | ||||
| func (it Type) ContentDisposition(filename string) string { | ||||
| 	format, ok := contentDispositionsFmt[it] | ||||
| 	if !ok { | ||||
| 		return "inline" | ||||
| 	} | ||||
|  | ||||
| 	return fmt.Sprintf(format, filename) | ||||
| } | ||||
|  | ||||
| func (it Type) ContentDispositionFromURL(imageURL string) string { | ||||
| 	url, err := url.Parse(imageURL) | ||||
| 	if err != nil { | ||||
| 		return it.ContentDisposition(contentDispositionFilenameFallback) | ||||
| 	} | ||||
|  | ||||
| 	_, filename := filepath.Split(url.Path) | ||||
| 	if len(filename) == 0 { | ||||
| 		return it.ContentDisposition(contentDispositionFilenameFallback) | ||||
| 	} | ||||
|  | ||||
| 	return it.ContentDisposition(strings.TrimSuffix(filename, filepath.Ext(filename))) | ||||
| } | ||||
|  | ||||
| func (it Type) SupportsAlpha() bool { | ||||
| 	return it != JPEG && it != BMP | ||||
| } | ||||
|  | ||||
| func (it Type) SupportsAnimation() bool { | ||||
| 	return it == GIF || it == WEBP | ||||
| } | ||||
|  | ||||
| func (it Type) SupportsColourProfile() bool { | ||||
| 	return it == JPEG || | ||||
| 		it == WEBP || | ||||
| 		it == AVIF | ||||
| } | ||||
							
								
								
									
										13
									
								
								imagetype/imagetype.h
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										13
									
								
								imagetype/imagetype.h
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,13 @@ | ||||
| enum ImgproxyImageTypes { | ||||
|   UNKNOWN = 0, | ||||
|   JPEG, | ||||
|   PNG, | ||||
|   WEBP, | ||||
|   GIF, | ||||
|   ICO, | ||||
|   SVG, | ||||
|   HEIC, | ||||
|   AVIF, | ||||
|   BMP, | ||||
|   TIFF | ||||
| }; | ||||
							
								
								
									
										40
									
								
								imath/imath.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								imath/imath.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | ||||
| package imath | ||||
|  | ||||
| import "math" | ||||
|  | ||||
| func Max(a, b int) int { | ||||
| 	if a > b { | ||||
| 		return a | ||||
| 	} | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func Min(a, b int) int { | ||||
| 	if a < b { | ||||
| 		return a | ||||
| 	} | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func MinNonZero(a, b int) int { | ||||
| 	switch { | ||||
| 	case a == 0: | ||||
| 		return b | ||||
| 	case b == 0: | ||||
| 		return a | ||||
| 	} | ||||
|  | ||||
| 	return Min(a, b) | ||||
| } | ||||
|  | ||||
| func Round(a float64) int { | ||||
| 	return int(math.Round(a)) | ||||
| } | ||||
|  | ||||
| func Scale(a int, scale float64) int { | ||||
| 	if a == 0 { | ||||
| 		return 0 | ||||
| 	} | ||||
|  | ||||
| 	return Round(float64(a) * scale) | ||||
| } | ||||
| @@ -1,15 +0,0 @@ | ||||
| // +build !linux,!darwin !go1.11 | ||||
|  | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"net" | ||||
| ) | ||||
|  | ||||
| func listenReuseport(network, address string) (net.Listener, error) { | ||||
| 	if conf.SoReuseport { | ||||
| 		logWarning("SO_REUSEPORT support is not implemented for your OS or Go version") | ||||
| 	} | ||||
|  | ||||
| 	return net.Listen(network, address) | ||||
| } | ||||
							
								
								
									
										112
									
								
								log.go
									
									
									
									
									
								
							
							
						
						
									
										112
									
								
								log.go
									
									
									
									
									
								
							| @@ -1,112 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
|  | ||||
| 	logrus "github.com/sirupsen/logrus" | ||||
| ) | ||||
|  | ||||
| func initLog() error { | ||||
| 	logFormat := "pretty" | ||||
| 	strEnvConfig(&logFormat, "IMGPROXY_LOG_FORMAT") | ||||
|  | ||||
| 	switch logFormat { | ||||
| 	case "structured": | ||||
| 		logrus.SetFormatter(&logStructuredFormatter{}) | ||||
| 	case "json": | ||||
| 		logrus.SetFormatter(&logrus.JSONFormatter{}) | ||||
| 	default: | ||||
| 		logrus.SetFormatter(newLogPrettyFormatter()) | ||||
| 	} | ||||
|  | ||||
| 	logLevel := "info" | ||||
| 	strEnvConfig(&logLevel, "IMGPROXY_LOG_LEVEL") | ||||
|  | ||||
| 	levelLogLevel, err := logrus.ParseLevel(logLevel) | ||||
| 	if err != nil { | ||||
| 		levelLogLevel = logrus.InfoLevel | ||||
| 	} | ||||
|  | ||||
| 	logrus.SetLevel(levelLogLevel) | ||||
|  | ||||
| 	if isSyslogEnabled() { | ||||
| 		slHook, err := newSyslogHook() | ||||
| 		if err != nil { | ||||
| 			return fmt.Errorf("Unable to connect to syslog daemon: %s", err) | ||||
| 		} | ||||
|  | ||||
| 		logrus.AddHook(slHook) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func logRequest(reqID string, r *http.Request) { | ||||
| 	path := r.RequestURI | ||||
|  | ||||
| 	logrus.WithFields(logrus.Fields{ | ||||
| 		"request_id": reqID, | ||||
| 		"method":     r.Method, | ||||
| 	}).Infof("Started %s", path) | ||||
| } | ||||
|  | ||||
| func logResponse(reqID string, r *http.Request, status int, err *imgproxyError, imageURL *string, po *processingOptions) { | ||||
| 	var level logrus.Level | ||||
|  | ||||
| 	switch { | ||||
| 	case status >= 500: | ||||
| 		level = logrus.ErrorLevel | ||||
| 	case status >= 400: | ||||
| 		level = logrus.WarnLevel | ||||
| 	default: | ||||
| 		level = logrus.InfoLevel | ||||
| 	} | ||||
|  | ||||
| 	fields := logrus.Fields{ | ||||
| 		"request_id": reqID, | ||||
| 		"method":     r.Method, | ||||
| 		"status":     status, | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		fields["error"] = err | ||||
|  | ||||
| 		if stack := err.FormatStack(); len(stack) > 0 { | ||||
| 			fields["stack"] = stack | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if imageURL != nil { | ||||
| 		fields["image_url"] = *imageURL | ||||
| 	} | ||||
|  | ||||
| 	if po != nil { | ||||
| 		fields["processing_options"] = po | ||||
| 	} | ||||
|  | ||||
| 	logrus.WithFields(fields).Logf( | ||||
| 		level, | ||||
| 		"Completed in %s %s", getTimerSince(r.Context()), r.RequestURI, | ||||
| 	) | ||||
| } | ||||
|  | ||||
| func logNotice(f string, args ...interface{}) { | ||||
| 	logrus.Infof(f, args...) | ||||
| } | ||||
|  | ||||
| func logWarning(f string, args ...interface{}) { | ||||
| 	logrus.Warnf(f, args...) | ||||
| } | ||||
|  | ||||
| func logError(f string, args ...interface{}) { | ||||
| 	logrus.Errorf(f, args...) | ||||
| } | ||||
|  | ||||
| func logFatal(f string, args ...interface{}) { | ||||
| 	logrus.Fatalf(f, args...) | ||||
| } | ||||
|  | ||||
| func logDebug(f string, args ...interface{}) { | ||||
| 	logrus.Debugf(f, args...) | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package logger | ||||
| 
 | ||||
| import ( | ||||
| 	"bytes" | ||||
| @@ -30,12 +30,12 @@ func (p logKeys) Len() int           { return len(p) } | ||||
| func (p logKeys) Less(i, j int) bool { return logKeysPriorities[p[i]] > logKeysPriorities[p[j]] } | ||||
| func (p logKeys) Swap(i, j int)      { p[i], p[j] = p[j], p[i] } | ||||
| 
 | ||||
| type logPrettyFormatter struct { | ||||
| type prettyFormatter struct { | ||||
| 	levelFormat string | ||||
| } | ||||
| 
 | ||||
| func newLogPrettyFormatter() *logPrettyFormatter { | ||||
| 	f := new(logPrettyFormatter) | ||||
| func newPrettyFormatter() *prettyFormatter { | ||||
| 	f := new(prettyFormatter) | ||||
| 
 | ||||
| 	levelLenMax := 0 | ||||
| 	for _, level := range logrus.AllLevels { | ||||
| @@ -50,7 +50,7 @@ func newLogPrettyFormatter() *logPrettyFormatter { | ||||
| 	return f | ||||
| } | ||||
| 
 | ||||
| func (f *logPrettyFormatter) Format(entry *logrus.Entry) ([]byte, error) { | ||||
| func (f *prettyFormatter) Format(entry *logrus.Entry) ([]byte, error) { | ||||
| 	keys := make([]string, 0, len(entry.Data)) | ||||
| 	for k := range entry.Data { | ||||
| 		if k != "stack" { | ||||
| @@ -97,7 +97,7 @@ func (f *logPrettyFormatter) Format(entry *logrus.Entry) ([]byte, error) { | ||||
| 	return b.Bytes(), nil | ||||
| } | ||||
| 
 | ||||
| func (f *logPrettyFormatter) appendValue(b *bytes.Buffer, value interface{}) { | ||||
| func (f *prettyFormatter) appendValue(b *bytes.Buffer, value interface{}) { | ||||
| 	strValue, ok := value.(string) | ||||
| 	if !ok { | ||||
| 		strValue = fmt.Sprint(value) | ||||
| @@ -110,9 +110,9 @@ func (f *logPrettyFormatter) appendValue(b *bytes.Buffer, value interface{}) { | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| type logStructuredFormatter struct{} | ||||
| type structuredFormatter struct{} | ||||
| 
 | ||||
| func (f *logStructuredFormatter) Format(entry *logrus.Entry) ([]byte, error) { | ||||
| func (f *structuredFormatter) Format(entry *logrus.Entry) ([]byte, error) { | ||||
| 	keys := make([]string, 0, len(entry.Data)) | ||||
| 	for k := range entry.Data { | ||||
| 		keys = append(keys, k) | ||||
| @@ -141,7 +141,7 @@ func (f *logStructuredFormatter) Format(entry *logrus.Entry) ([]byte, error) { | ||||
| 	return b.Bytes(), nil | ||||
| } | ||||
| 
 | ||||
| func (f *logStructuredFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) { | ||||
| func (f *structuredFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) { | ||||
| 	if b.Len() != 0 { | ||||
| 		b.WriteByte(' ') | ||||
| 	} | ||||
							
								
								
									
										48
									
								
								logger/log.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								logger/log.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| package logger | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"log" | ||||
| 	"os" | ||||
|  | ||||
| 	logrus "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config/configurators" | ||||
| ) | ||||
|  | ||||
| func Init() error { | ||||
| 	log.SetOutput(os.Stdout) | ||||
|  | ||||
| 	logFormat := "pretty" | ||||
| 	logLevel := "info" | ||||
|  | ||||
| 	configurators.String(&logFormat, "IMGPROXY_LOG_FORMAT") | ||||
| 	configurators.String(&logLevel, "IMGPROXY_LOG_LEVEL") | ||||
|  | ||||
| 	switch logFormat { | ||||
| 	case "structured": | ||||
| 		logrus.SetFormatter(&structuredFormatter{}) | ||||
| 	case "json": | ||||
| 		logrus.SetFormatter(&logrus.JSONFormatter{}) | ||||
| 	default: | ||||
| 		logrus.SetFormatter(newPrettyFormatter()) | ||||
| 	} | ||||
|  | ||||
| 	levelLogLevel, err := logrus.ParseLevel(logLevel) | ||||
| 	if err != nil { | ||||
| 		levelLogLevel = logrus.InfoLevel | ||||
| 	} | ||||
|  | ||||
| 	logrus.SetLevel(levelLogLevel) | ||||
|  | ||||
| 	if isSyslogEnabled() { | ||||
| 		slHook, err := newSyslogHook() | ||||
| 		if err != nil { | ||||
| 			return fmt.Errorf("Unable to connect to syslog daemon: %s", err) | ||||
| 		} | ||||
|  | ||||
| 		logrus.AddHook(slHook) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
| @@ -1,10 +1,11 @@ | ||||
| package main | ||||
| package logger | ||||
| 
 | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"log/syslog" | ||||
| 	"os" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config/configurators" | ||||
| 	"github.com/sirupsen/logrus" | ||||
| ) | ||||
| 
 | ||||
| @@ -24,7 +25,7 @@ type syslogHook struct { | ||||
| } | ||||
| 
 | ||||
| func isSyslogEnabled() (enabled bool) { | ||||
| 	boolEnvConfig(&enabled, "IMGPROXY_SYSLOG_ENABLE") | ||||
| 	configurators.Bool(&enabled, "IMGPROXY_SYSLOG_ENABLE") | ||||
| 	return | ||||
| } | ||||
| 
 | ||||
| @@ -37,16 +38,16 @@ func newSyslogHook() (*syslogHook, error) { | ||||
| 		levelStr = "notice" | ||||
| 	) | ||||
| 
 | ||||
| 	strEnvConfig(&network, "IMGPROXY_SYSLOG_NETWORK") | ||||
| 	strEnvConfig(&addr, "IMGPROXY_SYSLOG_ADDRESS") | ||||
| 	strEnvConfig(&tag, "IMGPROXY_SYSLOG_TAG") | ||||
| 	strEnvConfig(&levelStr, "IMGPROXY_SYSLOG_LEVEL") | ||||
| 	configurators.String(&network, "IMGPROXY_SYSLOG_NETWORK") | ||||
| 	configurators.String(&addr, "IMGPROXY_SYSLOG_ADDRESS") | ||||
| 	configurators.String(&tag, "IMGPROXY_SYSLOG_TAG") | ||||
| 	configurators.String(&levelStr, "IMGPROXY_SYSLOG_LEVEL") | ||||
| 
 | ||||
| 	if l, ok := syslogLevels[levelStr]; ok { | ||||
| 		level = l | ||||
| 	} else { | ||||
| 		level = logrus.InfoLevel | ||||
| 		logWarning("Syslog level '%s' is invalid, 'info' is used", levelStr) | ||||
| 		logrus.Warningf("Syslog level '%s' is invalid, 'info' is used", levelStr) | ||||
| 	} | ||||
| 
 | ||||
| 	w, err := syslog.Dial(network, addr, syslog.LOG_NOTICE, tag) | ||||
| @@ -54,7 +55,7 @@ func newSyslogHook() (*syslogHook, error) { | ||||
| 	return &syslogHook{ | ||||
| 		writer:    w, | ||||
| 		levels:    logrus.AllLevels[:int(level)+1], | ||||
| 		formatter: &logStructuredFormatter{}, | ||||
| 		formatter: &structuredFormatter{}, | ||||
| 	}, err | ||||
| } | ||||
| 
 | ||||
							
								
								
									
										71
									
								
								main.go
									
									
									
									
									
								
							
							
						
						
									
										71
									
								
								main.go
									
									
									
									
									
								
							| @@ -3,81 +3,90 @@ package main | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"log" | ||||
| 	"os" | ||||
| 	"os/signal" | ||||
| 	"runtime" | ||||
| 	"syscall" | ||||
| 	"time" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/errorreport" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/logger" | ||||
| 	"github.com/imgproxy/imgproxy/v2/memory" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics/prometheus" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/version" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| const version = "2.16.1" | ||||
|  | ||||
| type ctxKey string | ||||
|  | ||||
| func initialize() error { | ||||
| 	log.SetOutput(os.Stdout) | ||||
|  | ||||
| 	if err := initLog(); err != nil { | ||||
| 	if err := logger.Init(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := configure(); err != nil { | ||||
| 	if err := config.Configure(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := initNewrelic(); err != nil { | ||||
| 	if err := metrics.Init(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	initDataDog() | ||||
|  | ||||
| 	initPrometheus() | ||||
|  | ||||
| 	if err := initDownloading(); err != nil { | ||||
| 	if err := imagedata.Init(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	initErrorsReporting() | ||||
| 	initProcessingHandler() | ||||
|  | ||||
| 	if err := initVips(); err != nil { | ||||
| 	errorreport.Init() | ||||
|  | ||||
| 	if err := vips.Init(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := checkPresets(conf.Presets); err != nil { | ||||
| 		shutdownVips() | ||||
| 	if err := options.ParsePresets(config.Presets); err != nil { | ||||
| 		vips.Shutdown() | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := options.ValidatePresets(); err != nil { | ||||
| 		vips.Shutdown() | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func shutdown() { | ||||
| 	vips.Shutdown() | ||||
| 	metrics.Stop() | ||||
| } | ||||
|  | ||||
| func run() error { | ||||
| 	if err := initialize(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	defer shutdownVips() | ||||
|  | ||||
| 	defer stopDataDog() | ||||
| 	defer shutdown() | ||||
|  | ||||
| 	go func() { | ||||
| 		var logMemStats = len(os.Getenv("IMGPROXY_LOG_MEM_STATS")) > 0 | ||||
|  | ||||
| 		for range time.Tick(time.Duration(conf.FreeMemoryInterval) * time.Second) { | ||||
| 			freeMemory() | ||||
| 		for range time.Tick(time.Duration(config.FreeMemoryInterval) * time.Second) { | ||||
| 			memory.Free() | ||||
|  | ||||
| 			if logMemStats { | ||||
| 				var m runtime.MemStats | ||||
| 				runtime.ReadMemStats(&m) | ||||
| 				logDebug("MEMORY USAGE: Sys=%d HeapIdle=%d HeapInuse=%d", m.Sys/1024/1024, m.HeapIdle/1024/1024, m.HeapInuse/1024/1024) | ||||
| 				memory.LogStats() | ||||
| 			} | ||||
| 		} | ||||
| 	}() | ||||
|  | ||||
| 	ctx, cancel := context.WithCancel(context.Background()) | ||||
|  | ||||
| 	if err := startPrometheusServer(cancel); err != nil { | ||||
| 	if err := prometheus.StartServer(cancel); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| @@ -104,12 +113,12 @@ func main() { | ||||
| 		case "health": | ||||
| 			os.Exit(healthcheck()) | ||||
| 		case "version": | ||||
| 			fmt.Println(version) | ||||
| 			fmt.Println(version.Version()) | ||||
| 			os.Exit(0) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := run(); err != nil { | ||||
| 		logFatal(err.Error()) | ||||
| 		log.Fatal(err) | ||||
| 	} | ||||
| } | ||||
|   | ||||
							
								
								
									
										29
									
								
								main_test.go
									
									
									
									
									
								
							
							
						
						
									
										29
									
								
								main_test.go
									
									
									
									
									
								
							| @@ -1,29 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"os" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/suite" | ||||
| ) | ||||
|  | ||||
| type MainTestSuite struct { | ||||
| 	suite.Suite | ||||
|  | ||||
| 	oldConf config | ||||
| } | ||||
|  | ||||
| func TestMain(m *testing.M) { | ||||
| 	initialize() | ||||
| 	os.Exit(m.Run()) | ||||
| } | ||||
|  | ||||
| func (s *MainTestSuite) SetupTest() { | ||||
| 	s.oldConf = conf | ||||
| 	// Reset presets | ||||
| 	conf.Presets = make(presets) | ||||
| } | ||||
|  | ||||
| func (s *MainTestSuite) TearDownTest() { | ||||
| 	conf = s.oldConf | ||||
| } | ||||
| @@ -1,9 +1,9 @@ | ||||
| // +build !linux | ||||
| 
 | ||||
| package main | ||||
| package memory | ||||
| 
 | ||||
| import "runtime/debug" | ||||
| 
 | ||||
| func freeMemory() { | ||||
| func Free() { | ||||
| 	debug.FreeOSMemory() | ||||
| } | ||||
| @@ -1,6 +1,6 @@ | ||||
| // +build linux | ||||
| 
 | ||||
| package main | ||||
| package memory | ||||
| 
 | ||||
| /* | ||||
| #include <features.h> | ||||
| @@ -13,7 +13,7 @@ void malloc_trim(size_t pad){} | ||||
| import "C" | ||||
| import "runtime/debug" | ||||
| 
 | ||||
| func freeMemory() { | ||||
| func Free() { | ||||
| 	debug.FreeOSMemory() | ||||
| 
 | ||||
| 	C.malloc_trim(0) | ||||
							
								
								
									
										23
									
								
								memory/stats.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										23
									
								
								memory/stats.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,23 @@ | ||||
| package memory | ||||
|  | ||||
| import ( | ||||
| 	"runtime" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func LogStats() { | ||||
| 	var m runtime.MemStats | ||||
| 	runtime.ReadMemStats(&m) | ||||
| 	log.Debugf( | ||||
| 		"GO MEMORY USAGE: Sys=%d HeapIdle=%d HeapInuse=%d", | ||||
| 		m.Sys/1024/1024, m.HeapIdle/1024/1024, m.HeapInuse/1024/1024, | ||||
| 	) | ||||
|  | ||||
| 	log.Debugf( | ||||
| 		"VIPS MEMORY USAGE: Cur=%d Max=%d Allocs=%d", | ||||
| 		int(vips.GetMem())/1024/1024, int(vips.GetMemHighwater())/1024/1024, int(vips.GetAllocs()), | ||||
| 	) | ||||
| } | ||||
							
								
								
									
										118
									
								
								metrics/datadog/datadog.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										118
									
								
								metrics/datadog/datadog.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,118 @@ | ||||
| package datadog | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"errors" | ||||
| 	"net/http" | ||||
| 	"os" | ||||
| 	"time" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| 	"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/ext" | ||||
| 	"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/version" | ||||
| ) | ||||
|  | ||||
| type spanCtxKey struct{} | ||||
|  | ||||
| var enabled bool | ||||
|  | ||||
| func Init() { | ||||
| 	if !config.DataDogEnable { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	name := os.Getenv("DD_SERVICE") | ||||
| 	if len(name) == 0 { | ||||
| 		name = "imgproxy" | ||||
| 	} | ||||
|  | ||||
| 	tracer.Start( | ||||
| 		tracer.WithService(name), | ||||
| 		tracer.WithServiceVersion(version.Version()), | ||||
| 		tracer.WithLogger(dataDogLogger{}), | ||||
| 	) | ||||
| } | ||||
|  | ||||
| func Stop() { | ||||
| 	if enabled { | ||||
| 		tracer.Stop() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func StartRootSpan(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) { | ||||
| 	if !enabled { | ||||
| 		return ctx, func() {}, rw | ||||
| 	} | ||||
|  | ||||
| 	span := tracer.StartSpan( | ||||
| 		"request", | ||||
| 		tracer.Measured(), | ||||
| 		tracer.SpanType("web"), | ||||
| 		tracer.Tag(ext.HTTPMethod, r.Method), | ||||
| 		tracer.Tag(ext.HTTPURL, r.RequestURI), | ||||
| 	) | ||||
| 	cancel := func() { span.Finish() } | ||||
| 	newRw := dataDogResponseWriter{rw, span} | ||||
|  | ||||
| 	return context.WithValue(ctx, spanCtxKey{}, span), cancel, newRw | ||||
| } | ||||
|  | ||||
| func StartSpan(ctx context.Context, name string) context.CancelFunc { | ||||
| 	if !enabled { | ||||
| 		return func() {} | ||||
| 	} | ||||
|  | ||||
| 	if rootSpan, ok := ctx.Value(spanCtxKey{}).(tracer.Span); ok { | ||||
| 		span := tracer.StartSpan(name, tracer.Measured(), tracer.ChildOf(rootSpan.Context())) | ||||
| 		return func() { span.Finish() } | ||||
| 	} | ||||
|  | ||||
| 	return func() {} | ||||
| } | ||||
|  | ||||
| func SendError(ctx context.Context, err error) { | ||||
| 	if !enabled { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if rootSpan, ok := ctx.Value(spanCtxKey{}).(tracer.Span); ok { | ||||
| 		rootSpan.Finish(tracer.WithError(err)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func SendTimeout(ctx context.Context, d time.Duration) { | ||||
| 	if !enabled { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if rootSpan, ok := ctx.Value(spanCtxKey{}).(tracer.Span); ok { | ||||
| 		rootSpan.SetTag("timeout_duration", d) | ||||
| 		rootSpan.Finish(tracer.WithError(errors.New("Timeout"))) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| type dataDogLogger struct { | ||||
| } | ||||
|  | ||||
| func (l dataDogLogger) Log(msg string) { | ||||
| 	log.Info(msg) | ||||
| } | ||||
|  | ||||
| type dataDogResponseWriter struct { | ||||
| 	rw   http.ResponseWriter | ||||
| 	span tracer.Span | ||||
| } | ||||
|  | ||||
| func (ddrw dataDogResponseWriter) Header() http.Header { | ||||
| 	return ddrw.rw.Header() | ||||
| } | ||||
| func (ddrw dataDogResponseWriter) Write(data []byte) (int, error) { | ||||
| 	return ddrw.rw.Write(data) | ||||
| } | ||||
| func (ddrw dataDogResponseWriter) WriteHeader(statusCode int) { | ||||
| 	ddrw.span.SetTag(ext.HTTPCode, statusCode) | ||||
| 	ddrw.rw.WriteHeader(statusCode) | ||||
| } | ||||
							
								
								
									
										81
									
								
								metrics/metrics.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								metrics/metrics.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| package metrics | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics/datadog" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics/newrelic" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics/prometheus" | ||||
| ) | ||||
|  | ||||
| func Init() error { | ||||
| 	prometheus.Init() | ||||
|  | ||||
| 	if err := newrelic.Init(); err != nil { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	datadog.Init() | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func Stop() { | ||||
| 	datadog.Stop() | ||||
| } | ||||
|  | ||||
| func StartRequest(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) { | ||||
| 	promCancel := prometheus.StartRequest() | ||||
| 	ctx, nrCancel, rw := newrelic.StartTransaction(ctx, rw, r) | ||||
| 	ctx, ddCancel, rw := datadog.StartRootSpan(ctx, rw, r) | ||||
|  | ||||
| 	cancel := func() { | ||||
| 		promCancel() | ||||
| 		nrCancel() | ||||
| 		ddCancel() | ||||
| 	} | ||||
|  | ||||
| 	return ctx, cancel, rw | ||||
| } | ||||
|  | ||||
| func StartDownloadingSegment(ctx context.Context) context.CancelFunc { | ||||
| 	promCancel := prometheus.StartDownloadingSegment() | ||||
| 	nrCancel := newrelic.StartSegment(ctx, "Downloading image") | ||||
| 	ddCancel := datadog.StartSpan(ctx, "downloading_image") | ||||
|  | ||||
| 	cancel := func() { | ||||
| 		promCancel() | ||||
| 		nrCancel() | ||||
| 		ddCancel() | ||||
| 	} | ||||
|  | ||||
| 	return cancel | ||||
| } | ||||
|  | ||||
| func StartProcessingSegment(ctx context.Context) context.CancelFunc { | ||||
| 	promCancel := prometheus.StartProcessingSegment() | ||||
| 	nrCancel := newrelic.StartSegment(ctx, "Processing image") | ||||
| 	ddCancel := datadog.StartSpan(ctx, "processing_image") | ||||
|  | ||||
| 	cancel := func() { | ||||
| 		promCancel() | ||||
| 		nrCancel() | ||||
| 		ddCancel() | ||||
| 	} | ||||
|  | ||||
| 	return cancel | ||||
| } | ||||
|  | ||||
| func SendError(ctx context.Context, errType string, err error) { | ||||
| 	prometheus.IncrementErrorsTotal(errType) | ||||
| 	newrelic.SendError(ctx, err) | ||||
| 	datadog.SendError(ctx, err) | ||||
| } | ||||
|  | ||||
| func SendTimeout(ctx context.Context, d time.Duration) { | ||||
| 	prometheus.IncrementErrorsTotal("timeout") | ||||
| 	newrelic.SendTimeout(ctx, d) | ||||
| 	datadog.SendTimeout(ctx, d) | ||||
| } | ||||
							
								
								
									
										96
									
								
								metrics/newrelic/newrelic.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										96
									
								
								metrics/newrelic/newrelic.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,96 @@ | ||||
| package newrelic | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/newrelic/go-agent/v3/newrelic" | ||||
| ) | ||||
|  | ||||
| type transactionCtxKey struct{} | ||||
|  | ||||
| var ( | ||||
| 	enabled = false | ||||
|  | ||||
| 	newRelicApp *newrelic.Application | ||||
| ) | ||||
|  | ||||
| func Init() error { | ||||
| 	if len(config.NewRelicKey) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	name := config.NewRelicAppName | ||||
| 	if len(name) == 0 { | ||||
| 		name = "imgproxy" | ||||
| 	} | ||||
|  | ||||
| 	var err error | ||||
|  | ||||
| 	newRelicApp, err = newrelic.NewApplication( | ||||
| 		newrelic.ConfigAppName(name), | ||||
| 		newrelic.ConfigLicense(config.NewRelicKey), | ||||
| 	) | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't init New Relic agent: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	enabled = true | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func StartTransaction(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) { | ||||
| 	if !enabled { | ||||
| 		return ctx, func() {}, rw | ||||
| 	} | ||||
|  | ||||
| 	txn := newRelicApp.StartTransaction("request") | ||||
| 	txn.SetWebRequestHTTP(r) | ||||
| 	newRw := txn.SetWebResponse(rw) | ||||
| 	cancel := func() { txn.End() } | ||||
| 	return context.WithValue(ctx, transactionCtxKey{}, txn), cancel, newRw | ||||
| } | ||||
|  | ||||
| func StartSegment(ctx context.Context, name string) context.CancelFunc { | ||||
| 	if !enabled { | ||||
| 		return func() {} | ||||
| 	} | ||||
|  | ||||
| 	if txn, ok := ctx.Value(transactionCtxKey{}).(*newrelic.Transaction); ok { | ||||
| 		segment := txn.StartSegment(name) | ||||
| 		return func() { segment.End() } | ||||
| 	} | ||||
|  | ||||
| 	return func() {} | ||||
| } | ||||
|  | ||||
| func SendError(ctx context.Context, err error) { | ||||
| 	if !enabled { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if txn, ok := ctx.Value(transactionCtxKey{}).(*newrelic.Transaction); ok { | ||||
| 		txn.NoticeError(err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func SendTimeout(ctx context.Context, d time.Duration) { | ||||
| 	if !enabled { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if txn, ok := ctx.Value(transactionCtxKey{}).(*newrelic.Transaction); ok { | ||||
| 		txn.NoticeError(newrelic.Error{ | ||||
| 			Message: "Timeout", | ||||
| 			Class:   "Timeout", | ||||
| 			Attributes: map[string]interface{}{ | ||||
| 				"time": d.Seconds(), | ||||
| 			}, | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										181
									
								
								metrics/prometheus/prometheus.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										181
									
								
								metrics/prometheus/prometheus.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,181 @@ | ||||
| package prometheus | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/prometheus/client_golang/prometheus" | ||||
| 	"github.com/prometheus/client_golang/prometheus/promhttp" | ||||
| 	log "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/reuseport" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	enabled = false | ||||
|  | ||||
| 	requestsTotal      prometheus.Counter | ||||
| 	errorsTotal        *prometheus.CounterVec | ||||
| 	requestDuration    prometheus.Histogram | ||||
| 	downloadDuration   prometheus.Histogram | ||||
| 	processingDuration prometheus.Histogram | ||||
| 	bufferSize         *prometheus.HistogramVec | ||||
| 	bufferDefaultSize  *prometheus.GaugeVec | ||||
| 	bufferMaxSize      *prometheus.GaugeVec | ||||
| ) | ||||
|  | ||||
| func Init() { | ||||
| 	if len(config.PrometheusBind) == 0 { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	requestsTotal = prometheus.NewCounter(prometheus.CounterOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "requests_total", | ||||
| 		Help:      "A counter of the total number of HTTP requests imgproxy processed.", | ||||
| 	}) | ||||
|  | ||||
| 	errorsTotal = prometheus.NewCounterVec(prometheus.CounterOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "errors_total", | ||||
| 		Help:      "A counter of the occurred errors separated by type.", | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	requestDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "request_duration_seconds", | ||||
| 		Help:      "A histogram of the response latency.", | ||||
| 	}) | ||||
|  | ||||
| 	downloadDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "download_duration_seconds", | ||||
| 		Help:      "A histogram of the source image downloading latency.", | ||||
| 	}) | ||||
|  | ||||
| 	processingDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "processing_duration_seconds", | ||||
| 		Help:      "A histogram of the image processing latency.", | ||||
| 	}) | ||||
|  | ||||
| 	bufferSize = prometheus.NewHistogramVec(prometheus.HistogramOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "buffer_size_bytes", | ||||
| 		Help:      "A histogram of the buffer size in bytes.", | ||||
| 		Buckets:   prometheus.ExponentialBuckets(1024, 2, 14), | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	bufferDefaultSize = prometheus.NewGaugeVec(prometheus.GaugeOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "buffer_default_size_bytes", | ||||
| 		Help:      "A gauge of the buffer default size in bytes.", | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	bufferMaxSize = prometheus.NewGaugeVec(prometheus.GaugeOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      "buffer_max_size_bytes", | ||||
| 		Help:      "A gauge of the buffer max size in bytes.", | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	prometheus.MustRegister( | ||||
| 		requestsTotal, | ||||
| 		errorsTotal, | ||||
| 		requestDuration, | ||||
| 		downloadDuration, | ||||
| 		processingDuration, | ||||
| 		bufferSize, | ||||
| 		bufferDefaultSize, | ||||
| 		bufferMaxSize, | ||||
| 	) | ||||
|  | ||||
| 	enabled = true | ||||
| } | ||||
|  | ||||
| func StartServer(cancel context.CancelFunc) error { | ||||
| 	if !enabled { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	s := http.Server{Handler: promhttp.Handler()} | ||||
|  | ||||
| 	l, err := reuseport.Listen("tcp", config.PrometheusBind) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't start Prometheus metrics server: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	go func() { | ||||
| 		log.Infof("Starting Prometheus server at %s", config.PrometheusBind) | ||||
| 		if err := s.Serve(l); err != nil && err != http.ErrServerClosed { | ||||
| 			log.Error(err) | ||||
| 		} | ||||
| 		cancel() | ||||
| 	}() | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func StartRequest() context.CancelFunc { | ||||
| 	return startDuration(requestDuration) | ||||
| } | ||||
|  | ||||
| func StartDownloadingSegment() context.CancelFunc { | ||||
| 	return startDuration(downloadDuration) | ||||
| } | ||||
|  | ||||
| func StartProcessingSegment() context.CancelFunc { | ||||
| 	return startDuration(processingDuration) | ||||
| } | ||||
|  | ||||
| func startDuration(m prometheus.Histogram) context.CancelFunc { | ||||
| 	if !enabled { | ||||
| 		return func() {} | ||||
| 	} | ||||
|  | ||||
| 	t := time.Now() | ||||
| 	return func() { | ||||
| 		m.Observe(time.Since(t).Seconds()) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func IncrementErrorsTotal(t string) { | ||||
| 	if enabled { | ||||
| 		errorsTotal.With(prometheus.Labels{"type": t}).Inc() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func IncrementRequestsTotal() { | ||||
| 	if enabled { | ||||
| 		requestsTotal.Inc() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func ObserveBufferSize(t string, size int) { | ||||
| 	if enabled { | ||||
| 		bufferSize.With(prometheus.Labels{"type": t}).Observe(float64(size)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func SetBufferDefaultSize(t string, size int) { | ||||
| 	if enabled { | ||||
| 		bufferDefaultSize.With(prometheus.Labels{"type": t}).Set(float64(size)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func SetBufferMaxSize(t string, size int) { | ||||
| 	if enabled { | ||||
| 		bufferMaxSize.With(prometheus.Labels{"type": t}).Set(float64(size)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func AddGaugeFunc(name, help string, f func() float64) { | ||||
| 	gauge := prometheus.NewGaugeFunc(prometheus.GaugeOpts{ | ||||
| 		Namespace: config.PrometheusNamespace, | ||||
| 		Name:      name, | ||||
| 		Help:      help, | ||||
| 	}, f) | ||||
| 	prometheus.MustRegister(gauge) | ||||
| } | ||||
							
								
								
									
										88
									
								
								newrelic.go
									
									
									
									
									
								
							
							
						
						
									
										88
									
								
								newrelic.go
									
									
									
									
									
								
							| @@ -1,88 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/newrelic/go-agent/v3/newrelic" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	newRelicTransactionCtxKey = ctxKey("newRelicTransaction") | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	newRelicEnabled = false | ||||
|  | ||||
| 	newRelicApp *newrelic.Application | ||||
| ) | ||||
|  | ||||
| func initNewrelic() error { | ||||
| 	if len(conf.NewRelicKey) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	name := conf.NewRelicAppName | ||||
| 	if len(name) == 0 { | ||||
| 		name = "imgproxy" | ||||
| 	} | ||||
|  | ||||
| 	var err error | ||||
|  | ||||
| 	newRelicApp, err = newrelic.NewApplication( | ||||
| 		newrelic.ConfigAppName(name), | ||||
| 		newrelic.ConfigLicense(conf.NewRelicKey), | ||||
| 	) | ||||
|  | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't init New Relic agent: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	newRelicEnabled = true | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func startNewRelicTransaction(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) { | ||||
| 	if !newRelicEnabled { | ||||
| 		return ctx, func() {}, rw | ||||
| 	} | ||||
|  | ||||
| 	txn := newRelicApp.StartTransaction("request") | ||||
| 	txn.SetWebRequestHTTP(r) | ||||
| 	newRw := txn.SetWebResponse(rw) | ||||
| 	cancel := func() { txn.End() } | ||||
| 	return context.WithValue(ctx, newRelicTransactionCtxKey, txn), cancel, newRw | ||||
| } | ||||
|  | ||||
| func startNewRelicSegment(ctx context.Context, name string) context.CancelFunc { | ||||
| 	if !newRelicEnabled { | ||||
| 		return func() {} | ||||
| 	} | ||||
|  | ||||
| 	txn := ctx.Value(newRelicTransactionCtxKey).(*newrelic.Transaction) | ||||
| 	segment := txn.StartSegment(name) | ||||
| 	return func() { segment.End() } | ||||
| } | ||||
|  | ||||
| func sendErrorToNewRelic(ctx context.Context, err error) { | ||||
| 	if newRelicEnabled { | ||||
| 		txn := ctx.Value(newRelicTransactionCtxKey).(*newrelic.Transaction) | ||||
| 		txn.NoticeError(err) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func sendTimeoutToNewRelic(ctx context.Context, d time.Duration) { | ||||
| 	if newRelicEnabled { | ||||
| 		txn := ctx.Value(newRelicTransactionCtxKey).(*newrelic.Transaction) | ||||
| 		txn.NoticeError(newrelic.Error{ | ||||
| 			Message: "Timeout", | ||||
| 			Class:   "Timeout", | ||||
| 			Attributes: map[string]interface{}{ | ||||
| 				"time": d.Seconds(), | ||||
| 			}, | ||||
| 		}) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										52
									
								
								options/gravity_type.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								options/gravity_type.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| package options | ||||
|  | ||||
| import "fmt" | ||||
|  | ||||
| type GravityType int | ||||
|  | ||||
| const ( | ||||
| 	GravityUnknown GravityType = iota | ||||
| 	GravityCenter | ||||
| 	GravityNorth | ||||
| 	GravityEast | ||||
| 	GravitySouth | ||||
| 	GravityWest | ||||
| 	GravityNorthWest | ||||
| 	GravityNorthEast | ||||
| 	GravitySouthWest | ||||
| 	GravitySouthEast | ||||
| 	GravitySmart | ||||
| 	GravityFocusPoint | ||||
| ) | ||||
|  | ||||
| var gravityTypes = map[string]GravityType{ | ||||
| 	"ce":   GravityCenter, | ||||
| 	"no":   GravityNorth, | ||||
| 	"ea":   GravityEast, | ||||
| 	"so":   GravitySouth, | ||||
| 	"we":   GravityWest, | ||||
| 	"nowe": GravityNorthWest, | ||||
| 	"noea": GravityNorthEast, | ||||
| 	"sowe": GravitySouthWest, | ||||
| 	"soea": GravitySouthEast, | ||||
| 	"sm":   GravitySmart, | ||||
| 	"fp":   GravityFocusPoint, | ||||
| } | ||||
|  | ||||
| func (gt GravityType) String() string { | ||||
| 	for k, v := range gravityTypes { | ||||
| 		if v == gt { | ||||
| 			return k | ||||
| 		} | ||||
| 	} | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| func (gt GravityType) MarshalJSON() ([]byte, error) { | ||||
| 	for k, v := range gravityTypes { | ||||
| 		if v == gt { | ||||
| 			return []byte(fmt.Sprintf("%q", k)), nil | ||||
| 		} | ||||
| 	} | ||||
| 	return []byte("null"), nil | ||||
| } | ||||
| @@ -1,13 +1,23 @@ | ||||
| package main | ||||
| package options | ||||
| 
 | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"strings" | ||||
| ) | ||||
| 
 | ||||
| type presets map[string]urlOptions | ||||
| var presets map[string]urlOptions | ||||
| 
 | ||||
| func parsePreset(p presets, presetStr string) error { | ||||
| func ParsePresets(presetStrs []string) error { | ||||
| 	for _, presetStr := range presetStrs { | ||||
| 		if err := parsePreset(presetStr); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func parsePreset(presetStr string) error { | ||||
| 	presetStr = strings.Trim(presetStr, " ") | ||||
| 
 | ||||
| 	if len(presetStr) == 0 || strings.HasPrefix(presetStr, "#") { | ||||
| @@ -38,16 +48,19 @@ func parsePreset(p presets, presetStr string) error { | ||||
| 		return fmt.Errorf("Invalid preset value: %s", presetStr) | ||||
| 	} | ||||
| 
 | ||||
| 	p[name] = opts | ||||
| 	if presets == nil { | ||||
| 		presets = make(map[string]urlOptions) | ||||
| 	} | ||||
| 	presets[name] = opts | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func checkPresets(p presets) error { | ||||
| 	var po processingOptions | ||||
| func ValidatePresets() error { | ||||
| 	var po ProcessingOptions | ||||
| 
 | ||||
| 	for name, opts := range p { | ||||
| 		if err := applyProcessingOptions(&po, opts); err != nil { | ||||
| 	for name, opts := range presets { | ||||
| 		if err := applyURLOptions(&po, opts); err != nil { | ||||
| 			return fmt.Errorf("Error in preset `%s`: %s", name, err) | ||||
| 		} | ||||
| 	} | ||||
| @@ -1,109 +1,102 @@ | ||||
| package main | ||||
| package options | ||||
| 
 | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"testing" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"github.com/stretchr/testify/require" | ||||
| 	"github.com/stretchr/testify/suite" | ||||
| ) | ||||
| 
 | ||||
| type PresetsTestSuite struct{ MainTestSuite } | ||||
| type PresetsTestSuite struct{ suite.Suite } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) SetupTest() { | ||||
| 	config.Reset() | ||||
| 	// Reset presets | ||||
| 	presets = make(map[string]urlOptions) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePreset() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	err := parsePreset(p, "test=resize:fit:100:200/sharpen:2") | ||||
| 	err := parsePreset("test=resize:fit:100:200/sharpen:2") | ||||
| 
 | ||||
| 	require.Nil(s.T(), err) | ||||
| 
 | ||||
| 	assert.Equal(s.T(), urlOptions{ | ||||
| 		urlOption{Name: "resize", Args: []string{"fit", "100", "200"}}, | ||||
| 		urlOption{Name: "sharpen", Args: []string{"2"}}, | ||||
| 	}, p["test"]) | ||||
| 	}, presets["test"]) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePresetInvalidString() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	presetStr := "resize:fit:100:200/sharpen:2" | ||||
| 	err := parsePreset(p, presetStr) | ||||
| 	err := parsePreset(presetStr) | ||||
| 
 | ||||
| 	assert.Equal(s.T(), fmt.Errorf("Invalid preset string: %s", presetStr), err) | ||||
| 	assert.Empty(s.T(), p) | ||||
| 	assert.Empty(s.T(), presets) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePresetEmptyName() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	presetStr := "=resize:fit:100:200/sharpen:2" | ||||
| 	err := parsePreset(p, presetStr) | ||||
| 	err := parsePreset(presetStr) | ||||
| 
 | ||||
| 	assert.Equal(s.T(), fmt.Errorf("Empty preset name: %s", presetStr), err) | ||||
| 	assert.Empty(s.T(), p) | ||||
| 	assert.Empty(s.T(), presets) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePresetEmptyValue() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	presetStr := "test=" | ||||
| 	err := parsePreset(p, presetStr) | ||||
| 	err := parsePreset(presetStr) | ||||
| 
 | ||||
| 	assert.Equal(s.T(), fmt.Errorf("Empty preset value: %s", presetStr), err) | ||||
| 	assert.Empty(s.T(), p) | ||||
| 	assert.Empty(s.T(), presets) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePresetInvalidValue() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	presetStr := "test=resize:fit:100:200/sharpen:2/blur" | ||||
| 	err := parsePreset(p, presetStr) | ||||
| 	err := parsePreset(presetStr) | ||||
| 
 | ||||
| 	assert.Equal(s.T(), fmt.Errorf("Invalid preset value: %s", presetStr), err) | ||||
| 	assert.Empty(s.T(), p) | ||||
| 	assert.Empty(s.T(), presets) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePresetEmptyString() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	err := parsePreset(p, "  ") | ||||
| 	err := parsePreset("  ") | ||||
| 
 | ||||
| 	assert.Nil(s.T(), err) | ||||
| 	assert.Empty(s.T(), p) | ||||
| 	assert.Empty(s.T(), presets) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestParsePresetComment() { | ||||
| 	p := make(presets) | ||||
| 
 | ||||
| 	err := parsePreset(p, "#  test=resize:fit:100:200/sharpen:2") | ||||
| 	err := parsePreset("#  test=resize:fit:100:200/sharpen:2") | ||||
| 
 | ||||
| 	assert.Nil(s.T(), err) | ||||
| 	assert.Empty(s.T(), p) | ||||
| 	assert.Empty(s.T(), presets) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestCheckPresets() { | ||||
| 	p := presets{ | ||||
| func (s *PresetsTestSuite) TestValidatePresets() { | ||||
| 	presets = map[string]urlOptions{ | ||||
| 		"test": urlOptions{ | ||||
| 			urlOption{Name: "resize", Args: []string{"fit", "100", "200"}}, | ||||
| 			urlOption{Name: "sharpen", Args: []string{"2"}}, | ||||
| 		}, | ||||
| 	} | ||||
| 
 | ||||
| 	err := checkPresets(p) | ||||
| 	err := ValidatePresets() | ||||
| 
 | ||||
| 	assert.Nil(s.T(), err) | ||||
| } | ||||
| 
 | ||||
| func (s *PresetsTestSuite) TestCheckPresetsInvalid() { | ||||
| 	p := presets{ | ||||
| func (s *PresetsTestSuite) TestValidatePresetsInvalid() { | ||||
| 	presets = map[string]urlOptions{ | ||||
| 		"test": urlOptions{ | ||||
| 			urlOption{Name: "resize", Args: []string{"fit", "-1", "-2"}}, | ||||
| 			urlOption{Name: "sharpen", Args: []string{"2"}}, | ||||
| 		}, | ||||
| 	} | ||||
| 
 | ||||
| 	err := checkPresets(p) | ||||
| 	err := ValidatePresets() | ||||
| 
 | ||||
| 	assert.Error(s.T(), err) | ||||
| } | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										596
									
								
								options/processing_options_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										596
									
								
								options/processing_options_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,596 @@ | ||||
| package options | ||||
|  | ||||
| import ( | ||||
| 	"encoding/base64" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"net/url" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"github.com/stretchr/testify/require" | ||||
| 	"github.com/stretchr/testify/suite" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| ) | ||||
|  | ||||
| type ProcessingOptionsTestSuite struct{ suite.Suite } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) SetupTest() { | ||||
| 	config.Reset() | ||||
| 	// Reset presets | ||||
| 	presets = make(map[string]urlOptions) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URL() { | ||||
| 	originURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	path := fmt.Sprintf("/size:100:100/%s.png", base64.RawURLEncoding.EncodeToString([]byte(originURL))) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), originURL, imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.PNG, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URLWithoutExtension() { | ||||
| 	originURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	path := fmt.Sprintf("/size:100:100/%s", base64.RawURLEncoding.EncodeToString([]byte(originURL))) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), originURL, imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.Unknown, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URLWithBase() { | ||||
| 	config.BaseURL = "http://images.dev/" | ||||
|  | ||||
| 	originURL := "lorem/ipsum.jpg?param=value" | ||||
| 	path := fmt.Sprintf("/size:100:100/%s.png", base64.RawURLEncoding.EncodeToString([]byte(originURL))) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), fmt.Sprintf("%s%s", config.BaseURL, originURL), imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.PNG, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURL() { | ||||
| 	originURL := "http://images.dev/lorem/ipsum.jpg" | ||||
| 	path := fmt.Sprintf("/size:100:100/plain/%s@png", originURL) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), originURL, imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.PNG, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLWithoutExtension() { | ||||
| 	originURL := "http://images.dev/lorem/ipsum.jpg" | ||||
| 	path := fmt.Sprintf("/size:100:100/plain/%s", originURL) | ||||
|  | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), originURL, imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.Unknown, po.Format) | ||||
| } | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLEscaped() { | ||||
| 	originURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	path := fmt.Sprintf("/size:100:100/plain/%s@png", url.PathEscape(originURL)) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), originURL, imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.PNG, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLWithBase() { | ||||
| 	config.BaseURL = "http://images.dev/" | ||||
|  | ||||
| 	originURL := "lorem/ipsum.jpg" | ||||
| 	path := fmt.Sprintf("/size:100:100/plain/%s@png", originURL) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), fmt.Sprintf("%s%s", config.BaseURL, originURL), imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.PNG, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLEscapedWithBase() { | ||||
| 	config.BaseURL = "http://images.dev/" | ||||
|  | ||||
| 	originURL := "lorem/ipsum.jpg?param=value" | ||||
| 	path := fmt.Sprintf("/size:100:100/plain/%s@png", url.PathEscape(originURL)) | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), fmt.Sprintf("%s%s", config.BaseURL, originURL), imageURL) | ||||
| 	assert.Equal(s.T(), imagetype.PNG, po.Format) | ||||
| } | ||||
|  | ||||
| // func (s *ProcessingOptionsTestSuite) TestParseURLAllowedSource() { | ||||
| // 	config.AllowedSources = []string{"local://", "http://images.dev/"} | ||||
|  | ||||
| // 	path := "/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| // 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| // 	require.Nil(s.T(), err) | ||||
| // } | ||||
|  | ||||
| // func (s *ProcessingOptionsTestSuite) TestParseURLNotAllowedSource() { | ||||
| // 	config.AllowedSources = []string{"local://", "http://images.dev/"} | ||||
|  | ||||
| // 	path := "/plain/s3://images/lorem/ipsum.jpg" | ||||
| // 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| // 	require.Error(s.T(), err) | ||||
| // } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathFormat() { | ||||
| 	path := "/format:webp/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), imagetype.WEBP, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathResize() { | ||||
| 	path := "/resize:fill:100:200:1/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), ResizeFill, po.ResizingType) | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| 	assert.Equal(s.T(), 200, po.Height) | ||||
| 	assert.True(s.T(), po.Enlarge) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathResizingType() { | ||||
| 	path := "/resizing_type:fill/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), ResizeFill, po.ResizingType) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathSize() { | ||||
| 	path := "/size:100:200:1/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| 	assert.Equal(s.T(), 200, po.Height) | ||||
| 	assert.True(s.T(), po.Enlarge) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidth() { | ||||
| 	path := "/width:100/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathHeight() { | ||||
| 	path := "/height:100/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 100, po.Height) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathEnlarge() { | ||||
| 	path := "/enlarge:1/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.True(s.T(), po.Enlarge) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathExtend() { | ||||
| 	path := "/extend:1:so:10:20/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), true, po.Extend.Enabled) | ||||
| 	assert.Equal(s.T(), GravitySouth, po.Extend.Gravity.Type) | ||||
| 	assert.Equal(s.T(), 10.0, po.Extend.Gravity.X) | ||||
| 	assert.Equal(s.T(), 20.0, po.Extend.Gravity.Y) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathGravity() { | ||||
| 	path := "/gravity:soea/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), GravitySouthEast, po.Gravity.Type) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathGravityFocuspoint() { | ||||
| 	path := "/gravity:fp:0.5:0.75/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), GravityFocusPoint, po.Gravity.Type) | ||||
| 	assert.Equal(s.T(), 0.5, po.Gravity.X) | ||||
| 	assert.Equal(s.T(), 0.75, po.Gravity.Y) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathQuality() { | ||||
| 	path := "/quality:55/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 55, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBackground() { | ||||
| 	path := "/background:128:129:130/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.True(s.T(), po.Flatten) | ||||
| 	assert.Equal(s.T(), uint8(128), po.Background.R) | ||||
| 	assert.Equal(s.T(), uint8(129), po.Background.G) | ||||
| 	assert.Equal(s.T(), uint8(130), po.Background.B) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBackgroundHex() { | ||||
| 	path := "/background:ffddee/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.True(s.T(), po.Flatten) | ||||
| 	assert.Equal(s.T(), uint8(0xff), po.Background.R) | ||||
| 	assert.Equal(s.T(), uint8(0xdd), po.Background.G) | ||||
| 	assert.Equal(s.T(), uint8(0xee), po.Background.B) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBackgroundDisable() { | ||||
| 	path := "/background:fff/background:/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.False(s.T(), po.Flatten) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBlur() { | ||||
| 	path := "/blur:0.2/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathSharpen() { | ||||
| 	path := "/sharpen:0.2/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Sharpen) | ||||
| } | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathDpr() { | ||||
| 	path := "/dpr:2/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 2.0, po.Dpr) | ||||
| } | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWatermark() { | ||||
| 	path := "/watermark:0.5:soea:10:20:0.6/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.True(s.T(), po.Watermark.Enabled) | ||||
| 	assert.Equal(s.T(), GravitySouthEast, po.Watermark.Gravity.Type) | ||||
| 	assert.Equal(s.T(), 10.0, po.Watermark.Gravity.X) | ||||
| 	assert.Equal(s.T(), 20.0, po.Watermark.Gravity.Y) | ||||
| 	assert.Equal(s.T(), 0.6, po.Watermark.Scale) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathPreset() { | ||||
| 	presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "resizing_type", Args: []string{"fill"}}, | ||||
| 	} | ||||
|  | ||||
| 	presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	path := "/preset:test1:test2/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), ResizeFill, po.ResizingType) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 50, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathPresetDefault() { | ||||
| 	presets["default"] = urlOptions{ | ||||
| 		urlOption{Name: "resizing_type", Args: []string{"fill"}}, | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	path := "/quality:70/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), ResizeFill, po.ResizingType) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 70, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathPresetLoopDetection() { | ||||
| 	presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "resizing_type", Args: []string{"fill"}}, | ||||
| 	} | ||||
|  | ||||
| 	presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	path := "/preset:test1:test2:test1/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	require.ElementsMatch(s.T(), po.UsedPresets, []string{"test1", "test2"}) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathCachebuster() { | ||||
| 	path := "/cachebuster:123/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), "123", po.CacheBuster) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathStripMetadata() { | ||||
| 	path := "/strip_metadata:true/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.True(s.T(), po.StripMetadata) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWebpDetection() { | ||||
| 	config.EnableWebpDetection = true | ||||
|  | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	headers := http.Header{"Accept": []string{"image/webp"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), true, po.PreferWebP) | ||||
| 	assert.Equal(s.T(), false, po.EnforceWebP) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWebpEnforce() { | ||||
| 	config.EnforceWebp = true | ||||
|  | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Accept": []string{"image/webp"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), true, po.PreferWebP) | ||||
| 	assert.Equal(s.T(), true, po.EnforceWebP) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeader() { | ||||
| 	config.EnableClientHints = true | ||||
|  | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Width": []string{"100"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeaderDisabled() { | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Width": []string{"100"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 0, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeaderRedefine() { | ||||
| 	config.EnableClientHints = true | ||||
|  | ||||
| 	path := "/width:150/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Width": []string{"100"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 150, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeader() { | ||||
| 	config.EnableClientHints = true | ||||
|  | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Viewport-Width": []string{"100"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeaderDisabled() { | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Viewport-Width": []string{"100"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 0, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeaderRedefine() { | ||||
| 	config.EnableClientHints = true | ||||
|  | ||||
| 	path := "/width:150/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Viewport-Width": []string{"100"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 150, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathDprHeader() { | ||||
| 	config.EnableClientHints = true | ||||
|  | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Dpr": []string{"2"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 2.0, po.Dpr) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathDprHeaderDisabled() { | ||||
| 	path := "/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| 	headers := http.Header{"Dpr": []string{"2"}} | ||||
| 	po, _, err := ParsePath(path, headers) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), 1.0, po.Dpr) | ||||
| } | ||||
|  | ||||
| // func (s *ProcessingOptionsTestSuite) TestParsePathSigned() { | ||||
| // 	config.Keys = [][]byte{[]byte("test-key")} | ||||
| // 	config.Salts = [][]byte{[]byte("test-salt")} | ||||
|  | ||||
| // 	path := "/HcvNognEV1bW6f8zRqxNYuOkV0IUf1xloRb57CzbT4g/width:150/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| // 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| // 	require.Nil(s.T(), err) | ||||
| // } | ||||
|  | ||||
| // func (s *ProcessingOptionsTestSuite) TestParsePathSignedInvalid() { | ||||
| // 	config.Keys = [][]byte{[]byte("test-key")} | ||||
| // 	config.Salts = [][]byte{[]byte("test-salt")} | ||||
|  | ||||
| // 	path := "/unsafe/width:150/plain/http://images.dev/lorem/ipsum.jpg@png" | ||||
| // 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| // 	require.Error(s.T(), err) | ||||
| // 	assert.Equal(s.T(), signature.ErrInvalidSignature.Error(), err.Error()) | ||||
| // } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathOnlyPresets() { | ||||
| 	config.OnlyPresets = true | ||||
| 	presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 	} | ||||
| 	presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	path := "/test1:test2/plain/http://images.dev/lorem/ipsum.jpg" | ||||
|  | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 50, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseSkipProcessing() { | ||||
| 	path := "/skp:jpg:png/plain/http://images.dev/lorem/ipsum.jpg" | ||||
|  | ||||
| 	po, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), []imagetype.Type{imagetype.JPEG, imagetype.PNG}, po.SkipProcessingFormats) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseSkipProcessingInvalid() { | ||||
| 	path := "/skp:jpg:png:bad_format/plain/http://images.dev/lorem/ipsum.jpg" | ||||
|  | ||||
| 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Error(s.T(), err) | ||||
| 	assert.Equal(s.T(), "Invalid image format in skip processing: bad_format", err.Error()) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseExpires() { | ||||
| 	path := "/exp:32503669200/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseExpiresExpired() { | ||||
| 	path := "/exp:1609448400/plain/http://images.dev/lorem/ipsum.jpg" | ||||
| 	_, _, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Error(s.T(), err) | ||||
| 	assert.Equal(s.T(), errExpiredURL.Error(), err.Error()) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URLOnlyPresets() { | ||||
| 	config.OnlyPresets = true | ||||
| 	presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 	} | ||||
| 	presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	originURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	path := fmt.Sprintf("/test1:test2/%s.png", base64.RawURLEncoding.EncodeToString([]byte(originURL))) | ||||
|  | ||||
| 	po, imageURL, err := ParsePath(path, make(http.Header)) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 50, po.Quality) | ||||
| 	assert.Equal(s.T(), originURL, imageURL) | ||||
| } | ||||
|  | ||||
| func TestProcessingOptions(t *testing.T) { | ||||
| 	suite.Run(t, new(ProcessingOptionsTestSuite)) | ||||
| } | ||||
							
								
								
									
										39
									
								
								options/resize_type.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										39
									
								
								options/resize_type.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,39 @@ | ||||
| package options | ||||
|  | ||||
| import "fmt" | ||||
|  | ||||
| type ResizeType int | ||||
|  | ||||
| const ( | ||||
| 	ResizeFit ResizeType = iota | ||||
| 	ResizeFill | ||||
| 	ResizeFillDown | ||||
| 	ResizeForce | ||||
| 	ResizeAuto | ||||
| ) | ||||
|  | ||||
| var resizeTypes = map[string]ResizeType{ | ||||
| 	"fit":       ResizeFit, | ||||
| 	"fill":      ResizeFill, | ||||
| 	"fill-down": ResizeFillDown, | ||||
| 	"force":     ResizeForce, | ||||
| 	"auto":      ResizeAuto, | ||||
| } | ||||
|  | ||||
| func (rt ResizeType) String() string { | ||||
| 	for k, v := range resizeTypes { | ||||
| 		if v == rt { | ||||
| 			return k | ||||
| 		} | ||||
| 	} | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| func (rt ResizeType) MarshalJSON() ([]byte, error) { | ||||
| 	for k, v := range resizeTypes { | ||||
| 		if v == rt { | ||||
| 			return []byte(fmt.Sprintf("%q", k)), nil | ||||
| 		} | ||||
| 	} | ||||
| 	return []byte("null"), nil | ||||
| } | ||||
							
								
								
									
										81
									
								
								options/url.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								options/url.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| package options | ||||
|  | ||||
| import ( | ||||
| 	"encoding/base64" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"net/url" | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| const urlTokenPlain = "plain" | ||||
|  | ||||
| func decodeBase64URL(parts []string) (string, string, error) { | ||||
| 	var format string | ||||
|  | ||||
| 	encoded := strings.Join(parts, "") | ||||
| 	urlParts := strings.Split(encoded, ".") | ||||
|  | ||||
| 	if len(urlParts[0]) == 0 { | ||||
| 		return "", "", errors.New("Image URL is empty") | ||||
| 	} | ||||
|  | ||||
| 	if len(urlParts) > 2 { | ||||
| 		return "", "", fmt.Errorf("Multiple formats are specified: %s", encoded) | ||||
| 	} | ||||
|  | ||||
| 	if len(urlParts) == 2 && len(urlParts[1]) > 0 { | ||||
| 		format = urlParts[1] | ||||
| 	} | ||||
|  | ||||
| 	imageURL, err := base64.RawURLEncoding.DecodeString(strings.TrimRight(urlParts[0], "=")) | ||||
| 	if err != nil { | ||||
| 		return "", "", fmt.Errorf("Invalid url encoding: %s", encoded) | ||||
| 	} | ||||
|  | ||||
| 	fullURL := fmt.Sprintf("%s%s", config.BaseURL, string(imageURL)) | ||||
|  | ||||
| 	return fullURL, format, nil | ||||
| } | ||||
|  | ||||
| func decodePlainURL(parts []string) (string, string, error) { | ||||
| 	var format string | ||||
|  | ||||
| 	encoded := strings.Join(parts, "/") | ||||
| 	urlParts := strings.Split(encoded, "@") | ||||
|  | ||||
| 	if len(urlParts[0]) == 0 { | ||||
| 		return "", "", errors.New("Image URL is empty") | ||||
| 	} | ||||
|  | ||||
| 	if len(urlParts) > 2 { | ||||
| 		return "", "", fmt.Errorf("Multiple formats are specified: %s", encoded) | ||||
| 	} | ||||
|  | ||||
| 	if len(urlParts) == 2 && len(urlParts[1]) > 0 { | ||||
| 		format = urlParts[1] | ||||
| 	} | ||||
|  | ||||
| 	unescaped, err := url.PathUnescape(urlParts[0]) | ||||
| 	if err != nil { | ||||
| 		return "", "", fmt.Errorf("Invalid url encoding: %s", encoded) | ||||
| 	} | ||||
|  | ||||
| 	fullURL := fmt.Sprintf("%s%s", config.BaseURL, unescaped) | ||||
|  | ||||
| 	return fullURL, format, nil | ||||
| } | ||||
|  | ||||
| func DecodeURL(parts []string) (string, string, error) { | ||||
| 	if len(parts) == 0 { | ||||
| 		return "", "", errors.New("Image URL is empty") | ||||
| 	} | ||||
|  | ||||
| 	if parts[0] == urlTokenPlain && len(parts) > 1 { | ||||
| 		return decodePlainURL(parts[1:]) | ||||
| 	} | ||||
|  | ||||
| 	return decodeBase64URL(parts) | ||||
| } | ||||
							
								
								
									
										36
									
								
								options/url_options.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								options/url_options.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| package options | ||||
|  | ||||
| import "strings" | ||||
|  | ||||
| type urlOption struct { | ||||
| 	Name string | ||||
| 	Args []string | ||||
| } | ||||
|  | ||||
| type urlOptions []urlOption | ||||
|  | ||||
| func parseURLOptions(opts []string) (urlOptions, []string) { | ||||
| 	parsed := make(urlOptions, 0, len(opts)) | ||||
| 	urlStart := len(opts) + 1 | ||||
|  | ||||
| 	for i, opt := range opts { | ||||
| 		args := strings.Split(opt, ":") | ||||
|  | ||||
| 		if len(args) == 1 { | ||||
| 			urlStart = i | ||||
| 			break | ||||
| 		} | ||||
|  | ||||
| 		parsed = append(parsed, urlOption{Name: args[0], Args: args[1:]}) | ||||
| 	} | ||||
|  | ||||
| 	var rest []string | ||||
|  | ||||
| 	if urlStart < len(opts) { | ||||
| 		rest = opts[urlStart:] | ||||
| 	} else { | ||||
| 		rest = []string{} | ||||
| 	} | ||||
|  | ||||
| 	return parsed, rest | ||||
| } | ||||
							
								
								
									
										896
									
								
								process.go
									
									
									
									
									
								
							
							
						
						
									
										896
									
								
								process.go
									
									
									
									
									
								
							| @@ -1,896 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"math" | ||||
| 	"runtime" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagemeta" | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	// https://chromium.googlesource.com/webm/libwebp/+/refs/heads/master/src/webp/encode.h#529 | ||||
| 	webpMaxDimension = 16383.0 | ||||
| ) | ||||
|  | ||||
| var errConvertingNonSvgToSvg = newError(422, "Converting non-SVG images to SVG is not supported", "Converting non-SVG images to SVG is not supported") | ||||
|  | ||||
| func imageTypeLoadSupport(imgtype imageType) bool { | ||||
| 	return imgtype == imageTypeSVG || | ||||
| 		imgtype == imageTypeICO || | ||||
| 		vipsTypeSupportLoad[imgtype] | ||||
| } | ||||
|  | ||||
| func imageTypeSaveSupport(imgtype imageType) bool { | ||||
| 	return imgtype == imageTypeSVG || vipsTypeSupportSave[imgtype] | ||||
| } | ||||
|  | ||||
| func imageTypeGoodForWeb(imgtype imageType) bool { | ||||
| 	return imgtype != imageTypeTIFF && | ||||
| 		imgtype != imageTypeBMP | ||||
| } | ||||
|  | ||||
| func canSwitchFormat(src, dst, want imageType) bool { | ||||
| 	return imageTypeSaveSupport(want) && | ||||
| 		(!vipsSupportAnimation(src) || | ||||
| 			(dst != imageTypeUnknown && !vipsSupportAnimation(dst)) || | ||||
| 			vipsSupportAnimation(want)) | ||||
| } | ||||
|  | ||||
| func extractMeta(img *vipsImage, baseAngle int, useOrientation bool) (int, int, int, bool) { | ||||
| 	width := img.Width() | ||||
| 	height := img.Height() | ||||
|  | ||||
| 	angle := 0 | ||||
| 	flip := false | ||||
|  | ||||
| 	if useOrientation { | ||||
| 		orientation := img.Orientation() | ||||
|  | ||||
| 		if orientation == 3 || orientation == 4 { | ||||
| 			angle = 180 | ||||
| 		} | ||||
| 		if orientation == 5 || orientation == 6 { | ||||
| 			angle = 90 | ||||
| 		} | ||||
| 		if orientation == 7 || orientation == 8 { | ||||
| 			angle = 270 | ||||
| 		} | ||||
| 		if orientation == 2 || orientation == 4 || orientation == 5 || orientation == 7 { | ||||
| 			flip = true | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if (angle+baseAngle)%180 != 0 { | ||||
| 		width, height = height, width | ||||
| 	} | ||||
|  | ||||
| 	return width, height, angle, flip | ||||
| } | ||||
|  | ||||
| func calcScale(width, height int, po *processingOptions, imgtype imageType) (float64, float64) { | ||||
| 	var wshrink, hshrink float64 | ||||
|  | ||||
| 	srcW, srcH := float64(width), float64(height) | ||||
| 	dstW, dstH := float64(po.Width), float64(po.Height) | ||||
|  | ||||
| 	if po.Width == 0 { | ||||
| 		dstW = srcW | ||||
| 	} | ||||
|  | ||||
| 	if dstW == srcW { | ||||
| 		wshrink = 1 | ||||
| 	} else { | ||||
| 		wshrink = srcW / dstW | ||||
| 	} | ||||
|  | ||||
| 	if po.Height == 0 { | ||||
| 		dstH = srcH | ||||
| 	} | ||||
|  | ||||
| 	if dstH == srcH { | ||||
| 		hshrink = 1 | ||||
| 	} else { | ||||
| 		hshrink = srcH / dstH | ||||
| 	} | ||||
|  | ||||
| 	if wshrink != 1 || hshrink != 1 { | ||||
| 		rt := po.ResizingType | ||||
|  | ||||
| 		if rt == resizeAuto { | ||||
| 			srcD := srcW - srcH | ||||
| 			dstD := dstW - dstH | ||||
|  | ||||
| 			if (srcD >= 0 && dstD >= 0) || (srcD < 0 && dstD < 0) { | ||||
| 				rt = resizeFill | ||||
| 			} else { | ||||
| 				rt = resizeFit | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		switch { | ||||
| 		case po.Width == 0 && rt != resizeForce: | ||||
| 			wshrink = hshrink | ||||
| 		case po.Height == 0 && rt != resizeForce: | ||||
| 			hshrink = wshrink | ||||
| 		case rt == resizeFit: | ||||
| 			wshrink = math.Max(wshrink, hshrink) | ||||
| 			hshrink = wshrink | ||||
| 		case rt == resizeFill || rt == resizeFillDown: | ||||
| 			wshrink = math.Min(wshrink, hshrink) | ||||
| 			hshrink = wshrink | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if !po.Enlarge && imgtype != imageTypeSVG { | ||||
| 		if wshrink < 1 { | ||||
| 			hshrink /= wshrink | ||||
| 			wshrink = 1 | ||||
| 		} | ||||
| 		if hshrink < 1 { | ||||
| 			wshrink /= hshrink | ||||
| 			hshrink = 1 | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.MinWidth > 0 { | ||||
| 		if minShrink := srcW / float64(po.MinWidth); minShrink < wshrink { | ||||
| 			hshrink /= wshrink / minShrink | ||||
| 			wshrink = minShrink | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.MinHeight > 0 { | ||||
| 		if minShrink := srcH / float64(po.MinHeight); minShrink < hshrink { | ||||
| 			wshrink /= hshrink / minShrink | ||||
| 			hshrink = minShrink | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	wshrink /= po.Dpr | ||||
| 	hshrink /= po.Dpr | ||||
|  | ||||
| 	if wshrink > srcW { | ||||
| 		wshrink = srcW | ||||
| 	} | ||||
|  | ||||
| 	if hshrink > srcH { | ||||
| 		hshrink = srcH | ||||
| 	} | ||||
|  | ||||
| 	return 1.0 / wshrink, 1.0 / hshrink | ||||
| } | ||||
|  | ||||
| func canScaleOnLoad(imgtype imageType, scale float64) bool { | ||||
| 	if imgtype == imageTypeSVG { | ||||
| 		return true | ||||
| 	} | ||||
|  | ||||
| 	if conf.DisableShrinkOnLoad || scale >= 1 { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	return imgtype == imageTypeJPEG || imgtype == imageTypeWEBP | ||||
| } | ||||
|  | ||||
| func canFitToBytes(imgtype imageType) bool { | ||||
| 	switch imgtype { | ||||
| 	case imageTypeJPEG, imageTypeWEBP, imageTypeAVIF, imageTypeTIFF: | ||||
| 		return true | ||||
| 	default: | ||||
| 		return false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func calcJpegShink(scale float64, imgtype imageType) int { | ||||
| 	shrink := int(1.0 / scale) | ||||
|  | ||||
| 	switch { | ||||
| 	case shrink >= 8: | ||||
| 		return 8 | ||||
| 	case shrink >= 4: | ||||
| 		return 4 | ||||
| 	case shrink >= 2: | ||||
| 		return 2 | ||||
| 	} | ||||
|  | ||||
| 	return 1 | ||||
| } | ||||
|  | ||||
| func calcCropSize(orig int, crop float64) int { | ||||
| 	switch { | ||||
| 	case crop == 0.0: | ||||
| 		return 0 | ||||
| 	case crop >= 1.0: | ||||
| 		return int(crop) | ||||
| 	default: | ||||
| 		return maxInt(1, scaleInt(orig, crop)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func calcPosition(width, height, innerWidth, innerHeight int, gravity *gravityOptions, allowOverflow bool) (left, top int) { | ||||
| 	if gravity.Type == gravityFocusPoint { | ||||
| 		pointX := scaleInt(width, gravity.X) | ||||
| 		pointY := scaleInt(height, gravity.Y) | ||||
|  | ||||
| 		left = pointX - innerWidth/2 | ||||
| 		top = pointY - innerHeight/2 | ||||
| 	} else { | ||||
| 		offX, offY := int(gravity.X), int(gravity.Y) | ||||
|  | ||||
| 		left = (width-innerWidth+1)/2 + offX | ||||
| 		top = (height-innerHeight+1)/2 + offY | ||||
|  | ||||
| 		if gravity.Type == gravityNorth || gravity.Type == gravityNorthEast || gravity.Type == gravityNorthWest { | ||||
| 			top = 0 + offY | ||||
| 		} | ||||
|  | ||||
| 		if gravity.Type == gravityEast || gravity.Type == gravityNorthEast || gravity.Type == gravitySouthEast { | ||||
| 			left = width - innerWidth - offX | ||||
| 		} | ||||
|  | ||||
| 		if gravity.Type == gravitySouth || gravity.Type == gravitySouthEast || gravity.Type == gravitySouthWest { | ||||
| 			top = height - innerHeight - offY | ||||
| 		} | ||||
|  | ||||
| 		if gravity.Type == gravityWest || gravity.Type == gravityNorthWest || gravity.Type == gravitySouthWest { | ||||
| 			left = 0 + offX | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	var minX, maxX, minY, maxY int | ||||
|  | ||||
| 	if allowOverflow { | ||||
| 		minX, maxX = -innerWidth+1, width-1 | ||||
| 		minY, maxY = -innerHeight+1, height-1 | ||||
| 	} else { | ||||
| 		minX, maxX = 0, width-innerWidth | ||||
| 		minY, maxY = 0, height-innerHeight | ||||
| 	} | ||||
|  | ||||
| 	left = maxInt(minX, minInt(left, maxX)) | ||||
| 	top = maxInt(minY, minInt(top, maxY)) | ||||
|  | ||||
| 	return | ||||
| } | ||||
|  | ||||
| func cropImage(img *vipsImage, cropWidth, cropHeight int, gravity *gravityOptions) error { | ||||
| 	if cropWidth == 0 && cropHeight == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	imgWidth, imgHeight := img.Width(), img.Height() | ||||
|  | ||||
| 	cropWidth = minNonZeroInt(cropWidth, imgWidth) | ||||
| 	cropHeight = minNonZeroInt(cropHeight, imgHeight) | ||||
|  | ||||
| 	if cropWidth >= imgWidth && cropHeight >= imgHeight { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if gravity.Type == gravitySmart { | ||||
| 		if err := img.CopyMemory(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		if err := img.SmartCrop(cropWidth, cropHeight); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		// Applying additional modifications after smart crop causes SIGSEGV on Alpine | ||||
| 		// so we have to copy memory after it | ||||
| 		return img.CopyMemory() | ||||
| 	} | ||||
|  | ||||
| 	left, top := calcPosition(imgWidth, imgHeight, cropWidth, cropHeight, gravity, false) | ||||
| 	return img.Crop(left, top, cropWidth, cropHeight) | ||||
| } | ||||
|  | ||||
| func prepareWatermark(wm *vipsImage, wmData *imageData, opts *watermarkOptions, imgWidth, imgHeight int) error { | ||||
| 	if err := wm.Load(wmData.Data, wmData.Type, 1, 1.0, 1); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	po := newProcessingOptions() | ||||
| 	po.ResizingType = resizeFit | ||||
| 	po.Dpr = 1 | ||||
| 	po.Enlarge = true | ||||
| 	po.Format = wmData.Type | ||||
|  | ||||
| 	if opts.Scale > 0 { | ||||
| 		po.Width = maxInt(scaleInt(imgWidth, opts.Scale), 1) | ||||
| 		po.Height = maxInt(scaleInt(imgHeight, opts.Scale), 1) | ||||
| 	} | ||||
|  | ||||
| 	if err := transformImage(context.Background(), wm, wmData.Data, po, wmData.Type); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := wm.EnsureAlpha(); err != nil { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if opts.Replicate { | ||||
| 		return wm.Replicate(imgWidth, imgHeight) | ||||
| 	} | ||||
|  | ||||
| 	left, top := calcPosition(imgWidth, imgHeight, wm.Width(), wm.Height(), &opts.Gravity, true) | ||||
|  | ||||
| 	return wm.Embed(imgWidth, imgHeight, left, top, rgbColor{0, 0, 0}, true) | ||||
| } | ||||
|  | ||||
| func applyWatermark(img *vipsImage, wmData *imageData, opts *watermarkOptions, framesCount int) error { | ||||
| 	if err := img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := img.CopyMemory(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	wm := new(vipsImage) | ||||
| 	defer wm.Clear() | ||||
|  | ||||
| 	width := img.Width() | ||||
| 	height := img.Height() | ||||
|  | ||||
| 	if err := prepareWatermark(wm, wmData, opts, width, height/framesCount); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if framesCount > 1 { | ||||
| 		if err := wm.Replicate(width, height); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	opacity := opts.Opacity * conf.WatermarkOpacity | ||||
|  | ||||
| 	return img.ApplyWatermark(wm, opacity) | ||||
| } | ||||
|  | ||||
| func copyMemoryAndCheckTimeout(ctx context.Context, img *vipsImage) error { | ||||
| 	err := img.CopyMemory() | ||||
| 	checkTimeout(ctx) | ||||
| 	return err | ||||
| } | ||||
|  | ||||
| func transformImage(ctx context.Context, img *vipsImage, data []byte, po *processingOptions, imgtype imageType) error { | ||||
| 	var ( | ||||
| 		err     error | ||||
| 		trimmed bool | ||||
| 	) | ||||
|  | ||||
| 	if po.Trim.Enabled { | ||||
| 		if err = img.Trim(po.Trim.Threshold, po.Trim.Smart, po.Trim.Color, po.Trim.EqualHor, po.Trim.EqualVer); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		trimmed = true | ||||
| 	} | ||||
|  | ||||
| 	srcWidth, srcHeight, angle, flip := extractMeta(img, po.Rotate, po.AutoRotate) | ||||
|  | ||||
| 	cropWidth := calcCropSize(srcWidth, po.Crop.Width) | ||||
| 	cropHeight := calcCropSize(srcHeight, po.Crop.Height) | ||||
|  | ||||
| 	cropGravity := po.Crop.Gravity | ||||
| 	if cropGravity.Type == gravityUnknown { | ||||
| 		cropGravity = po.Gravity | ||||
| 	} | ||||
|  | ||||
| 	widthToScale := minNonZeroInt(cropWidth, srcWidth) | ||||
| 	heightToScale := minNonZeroInt(cropHeight, srcHeight) | ||||
|  | ||||
| 	wscale, hscale := calcScale(widthToScale, heightToScale, po, imgtype) | ||||
|  | ||||
| 	if cropWidth > 0 { | ||||
| 		cropWidth = maxInt(1, scaleInt(cropWidth, wscale)) | ||||
| 	} | ||||
| 	if cropHeight > 0 { | ||||
| 		cropHeight = maxInt(1, scaleInt(cropHeight, hscale)) | ||||
| 	} | ||||
| 	if cropGravity.Type != gravityFocusPoint { | ||||
| 		cropGravity.X *= wscale | ||||
| 		cropGravity.Y *= hscale | ||||
| 	} | ||||
|  | ||||
| 	prescale := math.Max(wscale, hscale) | ||||
|  | ||||
| 	if !trimmed && prescale != 1 && data != nil && canScaleOnLoad(imgtype, prescale) { | ||||
| 		jpegShrink := calcJpegShink(prescale, imgtype) | ||||
|  | ||||
| 		if imgtype != imageTypeJPEG || jpegShrink != 1 { | ||||
| 			// Do some scale-on-load | ||||
| 			if err = img.Load(data, imgtype, jpegShrink, prescale, 1); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		// Update scales after scale-on-load | ||||
| 		newWidth, newHeight, _, _ := extractMeta(img, po.Rotate, po.AutoRotate) | ||||
|  | ||||
| 		wscale = float64(srcWidth) * wscale / float64(newWidth) | ||||
| 		if srcWidth == scaleInt(srcWidth, wscale) { | ||||
| 			wscale = 1.0 | ||||
| 		} | ||||
|  | ||||
| 		hscale = float64(srcHeight) * hscale / float64(newHeight) | ||||
| 		if srcHeight == scaleInt(srcHeight, hscale) { | ||||
| 			hscale = 1.0 | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.Rad2Float(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	iccImported := false | ||||
| 	convertToLinear := conf.UseLinearColorspace && (wscale != 1 || hscale != 1) | ||||
|  | ||||
| 	if convertToLinear || !img.IsSRGB() { | ||||
| 		if err = img.ImportColourProfile(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		iccImported = true | ||||
| 	} | ||||
|  | ||||
| 	if convertToLinear { | ||||
| 		if err = img.LinearColourspace(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} else { | ||||
| 		if err = img.RgbColourspace(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	hasAlpha := img.HasAlpha() | ||||
|  | ||||
| 	if wscale != 1 || hscale != 1 { | ||||
| 		if err = img.Resize(wscale, hscale, hasAlpha); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err = img.Rotate(angle); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if flip { | ||||
| 		if err = img.Flip(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.Rotate(po.Rotate); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err = cropImage(img, cropWidth, cropHeight, &cropGravity); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Crop image to the result size | ||||
| 	resultWidth := scaleInt(po.Width, po.Dpr) | ||||
| 	resultHeight := scaleInt(po.Height, po.Dpr) | ||||
|  | ||||
| 	if po.ResizingType == resizeFillDown { | ||||
| 		if resultWidth > img.Width() { | ||||
| 			resultHeight = scaleInt(resultHeight, float64(img.Width())/float64(resultWidth)) | ||||
| 			resultWidth = img.Width() | ||||
| 		} | ||||
|  | ||||
| 		if resultHeight > img.Height() { | ||||
| 			resultWidth = scaleInt(resultWidth, float64(img.Height())/float64(resultHeight)) | ||||
| 			resultHeight = img.Height() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = cropImage(img, resultWidth, resultHeight, &po.Gravity); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if po.Format == imageTypeWEBP { | ||||
| 		webpLimitShrink := float64(maxInt(img.Width(), img.Height())) / webpMaxDimension | ||||
|  | ||||
| 		if webpLimitShrink > 1.0 { | ||||
| 			scale := 1.0 / webpLimitShrink | ||||
| 			if err = img.Resize(scale, scale, hasAlpha); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 			logWarning("WebP dimension size is limited to %d. The image is rescaled to %dx%d", int(webpMaxDimension), img.Width(), img.Height()) | ||||
|  | ||||
| 			if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	keepProfile := !po.StripColorProfile && po.Format.SupportsColourProfile() | ||||
|  | ||||
| 	if iccImported { | ||||
| 		if keepProfile { | ||||
| 			// We imported ICC profile and want to keep it, | ||||
| 			// so we need to export it | ||||
| 			if err = img.ExportColourProfile(); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 		} else { | ||||
| 			// We imported ICC profile but don't want to keep it, | ||||
| 			// so we need to export image to sRGB for maximum compatibility | ||||
| 			if err = img.ExportColourProfileToSRGB(); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 		} | ||||
| 	} else if !keepProfile { | ||||
| 		// We don't import ICC profile and don't want to keep it, | ||||
| 		// so we need to transform it to sRGB for maximum compatibility | ||||
| 		if err = img.TransformColourProfile(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if !keepProfile { | ||||
| 		if err = img.RemoveColourProfile(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	transparentBg := po.Format.SupportsAlpha() && !po.Flatten | ||||
|  | ||||
| 	if hasAlpha && !transparentBg { | ||||
| 		if err = img.Flatten(po.Background); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if po.Blur > 0 { | ||||
| 		if err = img.Blur(po.Blur); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.Sharpen > 0 { | ||||
| 		if err = img.Sharpen(po.Sharpen); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if po.Extend.Enabled && (resultWidth > img.Width() || resultHeight > img.Height()) { | ||||
| 		offX, offY := calcPosition(resultWidth, resultHeight, img.Width(), img.Height(), &po.Extend.Gravity, false) | ||||
| 		if err = img.Embed(resultWidth, resultHeight, offX, offY, po.Background, transparentBg); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.Padding.Enabled { | ||||
| 		paddingTop := scaleInt(po.Padding.Top, po.Dpr) | ||||
| 		paddingRight := scaleInt(po.Padding.Right, po.Dpr) | ||||
| 		paddingBottom := scaleInt(po.Padding.Bottom, po.Dpr) | ||||
| 		paddingLeft := scaleInt(po.Padding.Left, po.Dpr) | ||||
| 		if err = img.Embed( | ||||
| 			img.Width()+paddingLeft+paddingRight, | ||||
| 			img.Height()+paddingTop+paddingBottom, | ||||
| 			paddingLeft, | ||||
| 			paddingTop, | ||||
| 			po.Background, | ||||
| 			transparentBg, | ||||
| 		); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.Watermark.Enabled && watermark != nil { | ||||
| 		if err = applyWatermark(img, watermark, &po.Watermark, 1); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := img.CastUchar(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if po.StripMetadata { | ||||
| 		if err := img.Strip(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return copyMemoryAndCheckTimeout(ctx, img) | ||||
| } | ||||
|  | ||||
| func transformAnimated(ctx context.Context, img *vipsImage, data []byte, po *processingOptions, imgtype imageType) error { | ||||
| 	if po.Trim.Enabled { | ||||
| 		logWarning("Trim is not supported for animated images") | ||||
| 		po.Trim.Enabled = false | ||||
| 	} | ||||
|  | ||||
| 	imgWidth := img.Width() | ||||
|  | ||||
| 	frameHeight, err := img.GetInt("page-height") | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	framesCount := minInt(img.Height()/frameHeight, conf.MaxAnimationFrames) | ||||
|  | ||||
| 	// Double check dimensions because animated image has many frames | ||||
| 	if err = checkDimensions(imgWidth, frameHeight*framesCount); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Vips 8.8+ supports n-pages and doesn't load the whole animated image on header access | ||||
| 	if nPages, _ := img.GetIntDefault("n-pages", 0); nPages > framesCount { | ||||
| 		// Load only the needed frames | ||||
| 		if err = img.Load(data, imgtype, 1, 1.0, framesCount); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	delay, err := img.GetIntSliceDefault("delay", nil) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	loop, err := img.GetIntDefault("loop", 0) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Legacy fields | ||||
| 	// TODO: remove this in major update | ||||
| 	gifLoop, err := img.GetIntDefault("gif-loop", -1) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	gifDelay, err := img.GetIntDefault("gif-delay", -1) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	watermarkEnabled := po.Watermark.Enabled | ||||
| 	po.Watermark.Enabled = false | ||||
| 	defer func() { po.Watermark.Enabled = watermarkEnabled }() | ||||
|  | ||||
| 	frames := make([]*vipsImage, framesCount) | ||||
| 	defer func() { | ||||
| 		for _, frame := range frames { | ||||
| 			if frame != nil { | ||||
| 				frame.Clear() | ||||
| 			} | ||||
| 		} | ||||
| 	}() | ||||
|  | ||||
| 	for i := 0; i < framesCount; i++ { | ||||
| 		frame := new(vipsImage) | ||||
|  | ||||
| 		if err = img.Extract(frame, 0, i*frameHeight, imgWidth, frameHeight); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		frames[i] = frame | ||||
|  | ||||
| 		if err = transformImage(ctx, frame, nil, po, imgtype); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		if err = copyMemoryAndCheckTimeout(ctx, frame); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.Arrayjoin(frames); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if watermarkEnabled && watermark != nil { | ||||
| 		if err = applyWatermark(img, watermark, &po.Watermark, framesCount); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.CastUchar(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if len(delay) == 0 { | ||||
| 		delay = make([]int, framesCount) | ||||
| 		for i := range delay { | ||||
| 			delay[i] = 40 | ||||
| 		} | ||||
| 	} else if len(delay) > framesCount { | ||||
| 		delay = delay[:framesCount] | ||||
| 	} | ||||
|  | ||||
| 	img.SetInt("page-height", frames[0].Height()) | ||||
| 	img.SetIntSlice("delay", delay) | ||||
| 	img.SetInt("loop", loop) | ||||
| 	img.SetInt("n-pages", framesCount) | ||||
|  | ||||
| 	// Legacy fields | ||||
| 	// TODO: remove this in major update | ||||
| 	if gifLoop >= 0 { | ||||
| 		img.SetInt("gif-loop", gifLoop) | ||||
| 	} | ||||
| 	if gifDelay >= 0 { | ||||
| 		img.SetInt("gif-delay", gifDelay) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func getIcoData(imgdata *imageData) (*imageData, error) { | ||||
| 	icoMeta, err := imagemeta.DecodeIcoMeta(bytes.NewReader(imgdata.Data)) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	offset := icoMeta.BestImageOffset() | ||||
| 	size := icoMeta.BestImageSize() | ||||
|  | ||||
| 	data := imgdata.Data[offset : offset+size] | ||||
|  | ||||
| 	var format string | ||||
|  | ||||
| 	meta, err := imagemeta.DecodeMeta(bytes.NewReader(data)) | ||||
| 	if err != nil { | ||||
| 		// Looks like it's BMP with an incomplete header | ||||
| 		if d, err := imagemeta.FixBmpHeader(data); err == nil { | ||||
| 			format = "bmp" | ||||
| 			data = d | ||||
| 		} else { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		format = meta.Format() | ||||
| 	} | ||||
|  | ||||
| 	if imgtype, ok := imageTypes[format]; ok && vipsTypeSupportLoad[imgtype] { | ||||
| 		return &imageData{ | ||||
| 			Data: data, | ||||
| 			Type: imgtype, | ||||
| 		}, nil | ||||
| 	} | ||||
|  | ||||
| 	return nil, fmt.Errorf("Can't load %s from ICO", meta.Format()) | ||||
| } | ||||
|  | ||||
| func saveImageToFitBytes(ctx context.Context, po *processingOptions, img *vipsImage) ([]byte, context.CancelFunc, error) { | ||||
| 	var diff float64 | ||||
| 	quality := po.getQuality() | ||||
|  | ||||
| 	for { | ||||
| 		result, cancel, err := img.Save(po.Format, quality) | ||||
| 		if len(result) <= po.MaxBytes || quality <= 10 || err != nil { | ||||
| 			return result, cancel, err | ||||
| 		} | ||||
| 		cancel() | ||||
|  | ||||
| 		checkTimeout(ctx) | ||||
|  | ||||
| 		delta := float64(len(result)) / float64(po.MaxBytes) | ||||
| 		switch { | ||||
| 		case delta > 3: | ||||
| 			diff = 0.25 | ||||
| 		case delta > 1.5: | ||||
| 			diff = 0.5 | ||||
| 		default: | ||||
| 			diff = 0.75 | ||||
| 		} | ||||
| 		quality = int(float64(quality) * diff) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func processImage(ctx context.Context) ([]byte, context.CancelFunc, error) { | ||||
| 	runtime.LockOSThread() | ||||
| 	defer runtime.UnlockOSThread() | ||||
|  | ||||
| 	defer startDataDogSpan(ctx, "processing_image")() | ||||
| 	defer startNewRelicSegment(ctx, "Processing image")() | ||||
| 	defer startPrometheusDuration(prometheusProcessingDuration)() | ||||
|  | ||||
| 	defer vipsCleanup() | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	imgdata := getImageData(ctx) | ||||
|  | ||||
| 	switch { | ||||
| 	case po.Format == imageTypeUnknown: | ||||
| 		switch { | ||||
| 		case po.PreferAvif && canSwitchFormat(imgdata.Type, imageTypeUnknown, imageTypeAVIF): | ||||
| 			po.Format = imageTypeAVIF | ||||
| 		case po.PreferWebP && canSwitchFormat(imgdata.Type, imageTypeUnknown, imageTypeWEBP): | ||||
| 			po.Format = imageTypeWEBP | ||||
| 		case imageTypeSaveSupport(imgdata.Type) && imageTypeGoodForWeb(imgdata.Type): | ||||
| 			po.Format = imgdata.Type | ||||
| 		default: | ||||
| 			po.Format = imageTypeJPEG | ||||
| 		} | ||||
| 	case po.EnforceAvif && canSwitchFormat(imgdata.Type, po.Format, imageTypeAVIF): | ||||
| 		po.Format = imageTypeAVIF | ||||
| 	case po.EnforceWebP && canSwitchFormat(imgdata.Type, po.Format, imageTypeWEBP): | ||||
| 		po.Format = imageTypeWEBP | ||||
| 	} | ||||
|  | ||||
| 	if po.Format == imageTypeSVG { | ||||
| 		if imgdata.Type != imageTypeSVG { | ||||
| 			return []byte{}, func() {}, errConvertingNonSvgToSvg | ||||
| 		} | ||||
|  | ||||
| 		return imgdata.Data, func() {}, nil | ||||
| 	} | ||||
|  | ||||
| 	if imgdata.Type == imageTypeSVG && !vipsTypeSupportLoad[imageTypeSVG] { | ||||
| 		return []byte{}, func() {}, errSourceImageTypeNotSupported | ||||
| 	} | ||||
|  | ||||
| 	if imgdata.Type == imageTypeICO { | ||||
| 		icodata, err := getIcoData(imgdata) | ||||
| 		if err != nil { | ||||
| 			return nil, func() {}, err | ||||
| 		} | ||||
|  | ||||
| 		imgdata = icodata | ||||
| 	} | ||||
|  | ||||
| 	animationSupport := conf.MaxAnimationFrames > 1 && vipsSupportAnimation(imgdata.Type) && vipsSupportAnimation(po.Format) | ||||
|  | ||||
| 	pages := 1 | ||||
| 	if animationSupport { | ||||
| 		pages = -1 | ||||
| 	} | ||||
|  | ||||
| 	img := new(vipsImage) | ||||
| 	defer img.Clear() | ||||
|  | ||||
| 	if err := img.Load(imgdata.Data, imgdata.Type, 1, 1.0, pages); err != nil { | ||||
| 		return nil, func() {}, err | ||||
| 	} | ||||
|  | ||||
| 	if animationSupport && img.IsAnimated() { | ||||
| 		if err := transformAnimated(ctx, img, imgdata.Data, po, imgdata.Type); err != nil { | ||||
| 			return nil, func() {}, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		if err := transformImage(ctx, img, imgdata.Data, po, imgdata.Type); err != nil { | ||||
| 			return nil, func() {}, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return nil, func() {}, err | ||||
| 	} | ||||
|  | ||||
| 	if po.MaxBytes > 0 && canFitToBytes(po.Format) { | ||||
| 		return saveImageToFitBytes(ctx, po, img) | ||||
| 	} | ||||
|  | ||||
| 	return img.Save(po.Format, po.getQuality()) | ||||
| } | ||||
							
								
								
									
										48
									
								
								processing/apply_filters.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										48
									
								
								processing/apply_filters.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,48 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func applyFilters(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if po.Blur == 0 && po.Sharpen == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if err := copyMemoryAndCheckTimeout(pctx.ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// When image has alpha, we need to premultiply it to get rid of black edges | ||||
| 	if err := img.Premultiply(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if po.Blur > 0 { | ||||
| 		if err := img.Blur(po.Blur); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.Sharpen > 0 { | ||||
| 		if err := img.Sharpen(po.Sharpen); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := img.Unpremultiply(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := img.CastUchar(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return copyMemoryAndCheckTimeout(pctx.ctx, img) | ||||
| } | ||||
							
								
								
									
										52
									
								
								processing/calc_position.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								processing/calc_position.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| ) | ||||
|  | ||||
| func calcPosition(width, height, innerWidth, innerHeight int, gravity *options.GravityOptions, allowOverflow bool) (left, top int) { | ||||
| 	if gravity.Type == options.GravityFocusPoint { | ||||
| 		pointX := imath.Scale(width, gravity.X) | ||||
| 		pointY := imath.Scale(height, gravity.Y) | ||||
|  | ||||
| 		left = pointX - innerWidth/2 | ||||
| 		top = pointY - innerHeight/2 | ||||
| 	} else { | ||||
| 		offX, offY := int(gravity.X), int(gravity.Y) | ||||
|  | ||||
| 		left = (width-innerWidth+1)/2 + offX | ||||
| 		top = (height-innerHeight+1)/2 + offY | ||||
|  | ||||
| 		if gravity.Type == options.GravityNorth || gravity.Type == options.GravityNorthEast || gravity.Type == options.GravityNorthWest { | ||||
| 			top = 0 + offY | ||||
| 		} | ||||
|  | ||||
| 		if gravity.Type == options.GravityEast || gravity.Type == options.GravityNorthEast || gravity.Type == options.GravitySouthEast { | ||||
| 			left = width - innerWidth - offX | ||||
| 		} | ||||
|  | ||||
| 		if gravity.Type == options.GravitySouth || gravity.Type == options.GravitySouthEast || gravity.Type == options.GravitySouthWest { | ||||
| 			top = height - innerHeight - offY | ||||
| 		} | ||||
|  | ||||
| 		if gravity.Type == options.GravityWest || gravity.Type == options.GravityNorthWest || gravity.Type == options.GravitySouthWest { | ||||
| 			left = 0 + offX | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	var minX, maxX, minY, maxY int | ||||
|  | ||||
| 	if allowOverflow { | ||||
| 		minX, maxX = -innerWidth+1, width-1 | ||||
| 		minY, maxY = -innerHeight+1, height-1 | ||||
| 	} else { | ||||
| 		minX, maxX = 0, width-innerWidth | ||||
| 		minY, maxY = 0, height-innerHeight | ||||
| 	} | ||||
|  | ||||
| 	left = imath.Max(minX, imath.Min(left, maxX)) | ||||
| 	top = imath.Max(minY, imath.Min(top, maxY)) | ||||
|  | ||||
| 	return | ||||
| } | ||||
							
								
								
									
										14
									
								
								processing/copy_and_check_timeout.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								processing/copy_and_check_timeout.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/router" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func copyMemoryAndCheckTimeout(ctx context.Context, img *vips.Image) error { | ||||
| 	err := img.CopyMemory() | ||||
| 	router.CheckTimeout(ctx) | ||||
| 	return err | ||||
| } | ||||
							
								
								
									
										62
									
								
								processing/crop.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								processing/crop.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func cropImage(img *vips.Image, cropWidth, cropHeight int, gravity *options.GravityOptions) error { | ||||
| 	if cropWidth == 0 && cropHeight == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	imgWidth, imgHeight := img.Width(), img.Height() | ||||
|  | ||||
| 	cropWidth = imath.MinNonZero(cropWidth, imgWidth) | ||||
| 	cropHeight = imath.MinNonZero(cropHeight, imgHeight) | ||||
|  | ||||
| 	if cropWidth >= imgWidth && cropHeight >= imgHeight { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if gravity.Type == options.GravitySmart { | ||||
| 		if err := img.CopyMemory(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		if err := img.SmartCrop(cropWidth, cropHeight); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		// Applying additional modifications after smart crop causes SIGSEGV on Alpine | ||||
| 		// so we have to copy memory after it | ||||
| 		return img.CopyMemory() | ||||
| 	} | ||||
|  | ||||
| 	left, top := calcPosition(imgWidth, imgHeight, cropWidth, cropHeight, gravity, false) | ||||
| 	return img.Crop(left, top, cropWidth, cropHeight) | ||||
| } | ||||
|  | ||||
| func crop(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if err := cropImage(img, pctx.cropWidth, pctx.cropHeight, &pctx.cropGravity); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Crop image to the result size | ||||
| 	resultWidth := imath.Scale(po.Width, po.Dpr) | ||||
| 	resultHeight := imath.Scale(po.Height, po.Dpr) | ||||
|  | ||||
| 	if po.ResizingType == options.ResizeFillDown { | ||||
| 		if resultWidth > img.Width() { | ||||
| 			resultHeight = imath.Scale(resultHeight, float64(img.Width())/float64(resultWidth)) | ||||
| 			resultWidth = img.Width() | ||||
| 		} | ||||
|  | ||||
| 		if resultHeight > img.Height() { | ||||
| 			resultWidth = imath.Scale(resultWidth, float64(img.Height())/float64(resultHeight)) | ||||
| 			resultHeight = img.Height() | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return cropImage(img, resultWidth, resultHeight, &po.Gravity) | ||||
| } | ||||
							
								
								
									
										43
									
								
								processing/export_color_profile.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								processing/export_color_profile.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func exportColorProfile(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	keepProfile := !po.StripColorProfile && po.Format.SupportsColourProfile() | ||||
|  | ||||
| 	if pctx.iccImported { | ||||
| 		if keepProfile { | ||||
| 			// We imported ICC profile and want to keep it, | ||||
| 			// so we need to export it | ||||
| 			if err := img.ExportColourProfile(); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 		} else { | ||||
| 			// We imported ICC profile but don't want to keep it, | ||||
| 			// so we need to export image to sRGB for maximum compatibility | ||||
| 			if err := img.ExportColourProfileToSRGB(); err != nil { | ||||
| 				return err | ||||
| 			} | ||||
| 		} | ||||
| 	} else if !keepProfile { | ||||
| 		// We don't import ICC profile and don't want to keep it, | ||||
| 		// so we need to transform it to sRGB for maximum compatibility | ||||
| 		if err := img.TransformColourProfile(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if !keepProfile { | ||||
| 		return img.RemoveColourProfile() | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										20
									
								
								processing/extend.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								processing/extend.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,20 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func extend(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	resultWidth := imath.Scale(po.Width, po.Dpr) | ||||
| 	resultHeight := imath.Scale(po.Height, po.Dpr) | ||||
|  | ||||
| 	if !po.Extend.Enabled || (resultWidth <= img.Width() && resultHeight <= img.Height()) { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	offX, offY := calcPosition(resultWidth, resultHeight, img.Width(), img.Height(), &po.Extend.Gravity, false) | ||||
| 	return img.Embed(resultWidth, resultHeight, offX, offY) | ||||
| } | ||||
							
								
								
									
										25
									
								
								processing/finalize.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										25
									
								
								processing/finalize.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,25 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func finalize(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if err := img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := img.CastUchar(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if po.StripMetadata { | ||||
| 		if err := img.Strip(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return copyMemoryAndCheckTimeout(pctx.ctx, img) | ||||
| } | ||||
							
								
								
									
										33
									
								
								processing/fix_webp_size.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								processing/fix_webp_size.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| ) | ||||
|  | ||||
| // https://chromium.googlesource.com/webm/libwebp/+/refs/heads/master/src/webp/encode.h#529 | ||||
| const webpMaxDimension = 16383.0 | ||||
|  | ||||
| func fixWebpSize(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if po.Format != imagetype.WEBP { | ||||
| 		return nil | ||||
| 	} | ||||
| 	webpLimitShrink := float64(imath.Max(img.Width(), img.Height())) / webpMaxDimension | ||||
|  | ||||
| 	if webpLimitShrink <= 1.0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	scale := 1.0 / webpLimitShrink | ||||
| 	if err := img.Resize(scale, scale); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	log.Warningf("WebP dimension size is limited to %d. The image is rescaled to %dx%d", int(webpMaxDimension), img.Width(), img.Height()) | ||||
|  | ||||
| 	return copyMemoryAndCheckTimeout(pctx.ctx, img) | ||||
| } | ||||
							
								
								
									
										15
									
								
								processing/flatten.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								processing/flatten.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func flatten(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if !po.Flatten && po.Format.SupportsAlpha() { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	return img.Flatten(po.Background) | ||||
| } | ||||
							
								
								
									
										29
									
								
								processing/import_color_profile.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								processing/import_color_profile.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func importColorProfile(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if err := img.Rad2Float(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	convertToLinear := config.UseLinearColorspace && (pctx.wscale != 1 || pctx.hscale != 1) | ||||
|  | ||||
| 	if convertToLinear || !img.IsSRGB() { | ||||
| 		if err := img.ImportColourProfile(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		pctx.iccImported = true | ||||
| 	} | ||||
|  | ||||
| 	if convertToLinear { | ||||
| 		return img.LinearColourspace() | ||||
| 	} | ||||
|  | ||||
| 	return img.RgbColourspace() | ||||
| } | ||||
							
								
								
									
										26
									
								
								processing/padding.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								processing/padding.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func padding(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if !po.Padding.Enabled { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	paddingTop := imath.Scale(po.Padding.Top, po.Dpr) | ||||
| 	paddingRight := imath.Scale(po.Padding.Right, po.Dpr) | ||||
| 	paddingBottom := imath.Scale(po.Padding.Bottom, po.Dpr) | ||||
| 	paddingLeft := imath.Scale(po.Padding.Left, po.Dpr) | ||||
|  | ||||
| 	return img.Embed( | ||||
| 		img.Width()+paddingLeft+paddingRight, | ||||
| 		img.Height()+paddingTop+paddingBottom, | ||||
| 		paddingLeft, | ||||
| 		paddingTop, | ||||
| 	) | ||||
| } | ||||
							
								
								
									
										58
									
								
								processing/pipeline.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								processing/pipeline.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| type pipelineContext struct { | ||||
| 	ctx context.Context | ||||
|  | ||||
| 	imgtype imagetype.Type | ||||
|  | ||||
| 	trimmed bool | ||||
|  | ||||
| 	srcWidth  int | ||||
| 	srcHeight int | ||||
| 	angle     int | ||||
| 	flip      bool | ||||
|  | ||||
| 	cropWidth   int | ||||
| 	cropHeight  int | ||||
| 	cropGravity options.GravityOptions | ||||
|  | ||||
| 	wscale float64 | ||||
| 	hscale float64 | ||||
|  | ||||
| 	iccImported bool | ||||
| } | ||||
|  | ||||
| type pipelineStep func(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error | ||||
| type pipeline []pipelineStep | ||||
|  | ||||
| func (p pipeline) Run(ctx context.Context, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	pctx := pipelineContext{ | ||||
| 		ctx: ctx, | ||||
|  | ||||
| 		wscale: 1.0, | ||||
| 		hscale: 1.0, | ||||
|  | ||||
| 		cropGravity: po.Crop.Gravity, | ||||
| 	} | ||||
|  | ||||
| 	if pctx.cropGravity.Type == options.GravityUnknown { | ||||
| 		pctx.cropGravity = po.Gravity | ||||
| 	} | ||||
|  | ||||
| 	for _, step := range p { | ||||
| 		if err := step(&pctx, img, po, imgdata); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										176
									
								
								processing/prepare.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										176
									
								
								processing/prepare.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,176 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"math" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func extractMeta(img *vips.Image, baseAngle int, useOrientation bool) (int, int, int, bool) { | ||||
| 	width := img.Width() | ||||
| 	height := img.Height() | ||||
|  | ||||
| 	angle := 0 | ||||
| 	flip := false | ||||
|  | ||||
| 	if useOrientation { | ||||
| 		orientation := img.Orientation() | ||||
|  | ||||
| 		if orientation == 3 || orientation == 4 { | ||||
| 			angle = 180 | ||||
| 		} | ||||
| 		if orientation == 5 || orientation == 6 { | ||||
| 			angle = 90 | ||||
| 		} | ||||
| 		if orientation == 7 || orientation == 8 { | ||||
| 			angle = 270 | ||||
| 		} | ||||
| 		if orientation == 2 || orientation == 4 || orientation == 5 || orientation == 7 { | ||||
| 			flip = true | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if (angle+baseAngle)%180 != 0 { | ||||
| 		width, height = height, width | ||||
| 	} | ||||
|  | ||||
| 	return width, height, angle, flip | ||||
| } | ||||
|  | ||||
| func calcScale(width, height int, po *options.ProcessingOptions, imgtype imagetype.Type) (float64, float64) { | ||||
| 	var wshrink, hshrink float64 | ||||
|  | ||||
| 	srcW, srcH := float64(width), float64(height) | ||||
| 	dstW, dstH := float64(po.Width), float64(po.Height) | ||||
|  | ||||
| 	if po.Width == 0 { | ||||
| 		dstW = srcW | ||||
| 	} | ||||
|  | ||||
| 	if dstW == srcW { | ||||
| 		wshrink = 1 | ||||
| 	} else { | ||||
| 		wshrink = srcW / dstW | ||||
| 	} | ||||
|  | ||||
| 	if po.Height == 0 { | ||||
| 		dstH = srcH | ||||
| 	} | ||||
|  | ||||
| 	if dstH == srcH { | ||||
| 		hshrink = 1 | ||||
| 	} else { | ||||
| 		hshrink = srcH / dstH | ||||
| 	} | ||||
|  | ||||
| 	if wshrink != 1 || hshrink != 1 { | ||||
| 		rt := po.ResizingType | ||||
|  | ||||
| 		if rt == options.ResizeAuto { | ||||
| 			srcD := srcW - srcH | ||||
| 			dstD := dstW - dstH | ||||
|  | ||||
| 			if (srcD >= 0 && dstD >= 0) || (srcD < 0 && dstD < 0) { | ||||
| 				rt = options.ResizeFill | ||||
| 			} else { | ||||
| 				rt = options.ResizeFit | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		switch { | ||||
| 		case po.Width == 0 && rt != options.ResizeForce: | ||||
| 			wshrink = hshrink | ||||
| 		case po.Height == 0 && rt != options.ResizeForce: | ||||
| 			hshrink = wshrink | ||||
| 		case rt == options.ResizeFit: | ||||
| 			wshrink = math.Max(wshrink, hshrink) | ||||
| 			hshrink = wshrink | ||||
| 		case rt == options.ResizeFill || rt == options.ResizeFillDown: | ||||
| 			wshrink = math.Min(wshrink, hshrink) | ||||
| 			hshrink = wshrink | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if !po.Enlarge && imgtype != imagetype.SVG { | ||||
| 		if wshrink < 1 { | ||||
| 			hshrink /= wshrink | ||||
| 			wshrink = 1 | ||||
| 		} | ||||
| 		if hshrink < 1 { | ||||
| 			wshrink /= hshrink | ||||
| 			hshrink = 1 | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.MinWidth > 0 { | ||||
| 		if minShrink := srcW / float64(po.MinWidth); minShrink < wshrink { | ||||
| 			hshrink /= wshrink / minShrink | ||||
| 			wshrink = minShrink | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if po.MinHeight > 0 { | ||||
| 		if minShrink := srcH / float64(po.MinHeight); minShrink < hshrink { | ||||
| 			wshrink /= hshrink / minShrink | ||||
| 			hshrink = minShrink | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	wshrink /= po.Dpr | ||||
| 	hshrink /= po.Dpr | ||||
|  | ||||
| 	if wshrink > srcW { | ||||
| 		wshrink = srcW | ||||
| 	} | ||||
|  | ||||
| 	if hshrink > srcH { | ||||
| 		hshrink = srcH | ||||
| 	} | ||||
|  | ||||
| 	return 1.0 / wshrink, 1.0 / hshrink | ||||
| } | ||||
|  | ||||
| func calcCropSize(orig int, crop float64) int { | ||||
| 	switch { | ||||
| 	case crop == 0.0: | ||||
| 		return 0 | ||||
| 	case crop >= 1.0: | ||||
| 		return int(crop) | ||||
| 	default: | ||||
| 		return imath.Max(1, imath.Scale(orig, crop)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func prepare(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	pctx.imgtype = imagetype.Unknown | ||||
| 	if imgdata != nil { | ||||
| 		pctx.imgtype = imgdata.Type | ||||
| 	} | ||||
|  | ||||
| 	pctx.srcWidth, pctx.srcHeight, pctx.angle, pctx.flip = extractMeta(img, po.Rotate, po.AutoRotate) | ||||
|  | ||||
| 	pctx.cropWidth = calcCropSize(pctx.srcWidth, po.Crop.Width) | ||||
| 	pctx.cropHeight = calcCropSize(pctx.srcHeight, po.Crop.Height) | ||||
|  | ||||
| 	widthToScale := imath.MinNonZero(pctx.cropWidth, pctx.srcWidth) | ||||
| 	heightToScale := imath.MinNonZero(pctx.cropHeight, pctx.srcHeight) | ||||
|  | ||||
| 	pctx.wscale, pctx.hscale = calcScale(widthToScale, heightToScale, po, pctx.imgtype) | ||||
|  | ||||
| 	if pctx.cropWidth > 0 { | ||||
| 		pctx.cropWidth = imath.Max(1, imath.Scale(pctx.cropWidth, pctx.wscale)) | ||||
| 	} | ||||
| 	if pctx.cropHeight > 0 { | ||||
| 		pctx.cropHeight = imath.Max(1, imath.Scale(pctx.cropHeight, pctx.hscale)) | ||||
| 	} | ||||
| 	if pctx.cropGravity.Type != options.GravityFocusPoint { | ||||
| 		pctx.cropGravity.X *= pctx.wscale | ||||
| 		pctx.cropGravity.Y *= pctx.hscale | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										273
									
								
								processing/processing.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										273
									
								
								processing/processing.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,273 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"runtime" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/router" | ||||
| 	"github.com/imgproxy/imgproxy/v2/security" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| var mainPipeline = pipeline{ | ||||
| 	trim, | ||||
| 	prepare, | ||||
| 	scaleOnLoad, | ||||
| 	importColorProfile, | ||||
| 	scale, | ||||
| 	rotateAndFlip, | ||||
| 	crop, | ||||
| 	fixWebpSize, | ||||
| 	exportColorProfile, | ||||
| 	applyFilters, | ||||
| 	extend, | ||||
| 	padding, | ||||
| 	flatten, | ||||
| 	watermark, | ||||
| 	finalize, | ||||
| } | ||||
|  | ||||
| func imageTypeGoodForWeb(imgtype imagetype.Type) bool { | ||||
| 	return imgtype != imagetype.TIFF && | ||||
| 		imgtype != imagetype.BMP | ||||
| } | ||||
|  | ||||
| // src  - the source image format | ||||
| // dst  - what the user specified | ||||
| // want - what we want switch to | ||||
| func canSwitchFormat(src, dst, want imagetype.Type) bool { | ||||
| 	// If the format we want is not supported, we can't switch to it anyway | ||||
| 	return vips.SupportsSave(want) && | ||||
| 		// if src format does't support animation, we can switch to whatever we want | ||||
| 		(!src.SupportsAnimation() || | ||||
| 			// if user specified the format and it doesn't support animation, we can switch to whatever we want | ||||
| 			(dst != imagetype.Unknown && !dst.SupportsAnimation()) || | ||||
| 			// if the format we want supports animation, we can switch in any case | ||||
| 			want.SupportsAnimation()) | ||||
| } | ||||
|  | ||||
| func canFitToBytes(imgtype imagetype.Type) bool { | ||||
| 	switch imgtype { | ||||
| 	case imagetype.JPEG, imagetype.WEBP, imagetype.AVIF, imagetype.TIFF: | ||||
| 		return true | ||||
| 	default: | ||||
| 		return false | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func transformAnimated(ctx context.Context, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if po.Trim.Enabled { | ||||
| 		log.Warning("Trim is not supported for animated images") | ||||
| 		po.Trim.Enabled = false | ||||
| 	} | ||||
|  | ||||
| 	imgWidth := img.Width() | ||||
|  | ||||
| 	frameHeight, err := img.GetInt("page-height") | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	framesCount := imath.Min(img.Height()/frameHeight, config.MaxAnimationFrames) | ||||
|  | ||||
| 	// Double check dimensions because animated image has many frames | ||||
| 	if err = security.CheckDimensions(imgWidth, frameHeight*framesCount); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Vips 8.8+ supports n-pages and doesn't load the whole animated image on header access | ||||
| 	if nPages, _ := img.GetIntDefault("n-pages", 0); nPages > framesCount { | ||||
| 		// Load only the needed frames | ||||
| 		if err = img.Load(imgdata, 1, 1.0, framesCount); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	delay, err := img.GetIntSliceDefault("delay", nil) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	loop, err := img.GetIntDefault("loop", 0) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Legacy fields | ||||
| 	// TODO: remove this in major update | ||||
| 	gifLoop, err := img.GetIntDefault("gif-loop", -1) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	gifDelay, err := img.GetIntDefault("gif-delay", -1) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	watermarkEnabled := po.Watermark.Enabled | ||||
| 	po.Watermark.Enabled = false | ||||
| 	defer func() { po.Watermark.Enabled = watermarkEnabled }() | ||||
|  | ||||
| 	frames := make([]*vips.Image, framesCount) | ||||
| 	defer func() { | ||||
| 		for _, frame := range frames { | ||||
| 			if frame != nil { | ||||
| 				frame.Clear() | ||||
| 			} | ||||
| 		} | ||||
| 	}() | ||||
|  | ||||
| 	for i := 0; i < framesCount; i++ { | ||||
| 		frame := new(vips.Image) | ||||
|  | ||||
| 		if err = img.Extract(frame, 0, i*frameHeight, imgWidth, frameHeight); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
|  | ||||
| 		frames[i] = frame | ||||
|  | ||||
| 		if err = mainPipeline.Run(ctx, frame, po, nil); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.Arrayjoin(frames); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if watermarkEnabled && imagedata.Watermark != nil { | ||||
| 		if err = applyWatermark(img, imagedata.Watermark, &po.Watermark, framesCount); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err = img.CastUchar(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err = copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if len(delay) == 0 { | ||||
| 		delay = make([]int, framesCount) | ||||
| 		for i := range delay { | ||||
| 			delay[i] = 40 | ||||
| 		} | ||||
| 	} else if len(delay) > framesCount { | ||||
| 		delay = delay[:framesCount] | ||||
| 	} | ||||
|  | ||||
| 	img.SetInt("page-height", frames[0].Height()) | ||||
| 	img.SetIntSlice("delay", delay) | ||||
| 	img.SetInt("loop", loop) | ||||
| 	img.SetInt("n-pages", framesCount) | ||||
|  | ||||
| 	// Legacy fields | ||||
| 	// TODO: remove this in major update | ||||
| 	if gifLoop >= 0 { | ||||
| 		img.SetInt("gif-loop", gifLoop) | ||||
| 	} | ||||
| 	if gifDelay >= 0 { | ||||
| 		img.SetInt("gif-delay", gifDelay) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func saveImageToFitBytes(ctx context.Context, po *options.ProcessingOptions, img *vips.Image) (*imagedata.ImageData, error) { | ||||
| 	var diff float64 | ||||
| 	quality := po.GetQuality() | ||||
|  | ||||
| 	for { | ||||
| 		imgdata, err := img.Save(po.Format, quality) | ||||
| 		if len(imgdata.Data) <= po.MaxBytes || quality <= 10 || err != nil { | ||||
| 			return imgdata, err | ||||
| 		} | ||||
| 		imgdata.Close() | ||||
|  | ||||
| 		router.CheckTimeout(ctx) | ||||
|  | ||||
| 		delta := float64(len(imgdata.Data)) / float64(po.MaxBytes) | ||||
| 		switch { | ||||
| 		case delta > 3: | ||||
| 			diff = 0.25 | ||||
| 		case delta > 1.5: | ||||
| 			diff = 0.5 | ||||
| 		default: | ||||
| 			diff = 0.75 | ||||
| 		} | ||||
| 		quality = int(float64(quality) * diff) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func ProcessImage(ctx context.Context, imgdata *imagedata.ImageData, po *options.ProcessingOptions) (*imagedata.ImageData, error) { | ||||
| 	runtime.LockOSThread() | ||||
| 	defer runtime.UnlockOSThread() | ||||
|  | ||||
| 	defer vips.Cleanup() | ||||
|  | ||||
| 	switch { | ||||
| 	case po.Format == imagetype.Unknown: | ||||
| 		switch { | ||||
| 		case po.PreferAvif && canSwitchFormat(imgdata.Type, imagetype.Unknown, imagetype.AVIF): | ||||
| 			po.Format = imagetype.AVIF | ||||
| 		case po.PreferWebP && canSwitchFormat(imgdata.Type, imagetype.Unknown, imagetype.WEBP): | ||||
| 			po.Format = imagetype.WEBP | ||||
| 		case vips.SupportsSave(imgdata.Type) && imageTypeGoodForWeb(imgdata.Type): | ||||
| 			po.Format = imgdata.Type | ||||
| 		default: | ||||
| 			po.Format = imagetype.JPEG | ||||
| 		} | ||||
| 	case po.EnforceAvif && canSwitchFormat(imgdata.Type, po.Format, imagetype.AVIF): | ||||
| 		po.Format = imagetype.AVIF | ||||
| 	case po.EnforceWebP && canSwitchFormat(imgdata.Type, po.Format, imagetype.WEBP): | ||||
| 		po.Format = imagetype.WEBP | ||||
| 	} | ||||
|  | ||||
| 	if !vips.SupportsSave(po.Format) { | ||||
| 		return nil, fmt.Errorf("Can't save %s, probably not supported by your libvips", po.Format) | ||||
| 	} | ||||
|  | ||||
| 	animationSupport := config.MaxAnimationFrames > 1 && imgdata.Type.SupportsAnimation() && po.Format.SupportsAnimation() | ||||
|  | ||||
| 	pages := 1 | ||||
| 	if animationSupport { | ||||
| 		pages = -1 | ||||
| 	} | ||||
|  | ||||
| 	img := new(vips.Image) | ||||
| 	defer img.Clear() | ||||
|  | ||||
| 	if err := img.Load(imgdata, 1, 1.0, pages); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	if animationSupport && img.IsAnimated() { | ||||
| 		if err := transformAnimated(ctx, img, po, imgdata); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		if err := mainPipeline.Run(ctx, img, po, imgdata); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if err := copyMemoryAndCheckTimeout(ctx, img); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	if po.MaxBytes > 0 && canFitToBytes(po.Format) { | ||||
| 		return saveImageToFitBytes(ctx, po, img) | ||||
| 	} | ||||
|  | ||||
| 	return img.Save(po.Format, po.GetQuality()) | ||||
| } | ||||
							
								
								
									
										21
									
								
								processing/rotate_and_flip.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								processing/rotate_and_flip.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func rotateAndFlip(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if err := img.Rotate(pctx.angle); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if pctx.flip { | ||||
| 		if err := img.Flip(); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return img.Rotate(po.Rotate) | ||||
| } | ||||
							
								
								
									
										17
									
								
								processing/scale.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								processing/scale.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,17 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func scale(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if pctx.wscale != 1 || pctx.hscale != 1 { | ||||
| 		if err := img.Resize(pctx.wscale, pctx.hscale); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return copyMemoryAndCheckTimeout(pctx.ctx, img) | ||||
| } | ||||
							
								
								
									
										72
									
								
								processing/scale_on_load.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								processing/scale_on_load.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"math" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func canScaleOnLoad(imgtype imagetype.Type, scale float64) bool { | ||||
| 	if imgtype == imagetype.SVG { | ||||
| 		return true | ||||
| 	} | ||||
|  | ||||
| 	if config.DisableShrinkOnLoad || scale >= 1 { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	return imgtype == imagetype.JPEG || imgtype == imagetype.WEBP | ||||
| } | ||||
|  | ||||
| func calcJpegShink(scale float64, imgtype imagetype.Type) int { | ||||
| 	shrink := int(1.0 / scale) | ||||
|  | ||||
| 	switch { | ||||
| 	case shrink >= 8: | ||||
| 		return 8 | ||||
| 	case shrink >= 4: | ||||
| 		return 4 | ||||
| 	case shrink >= 2: | ||||
| 		return 2 | ||||
| 	} | ||||
|  | ||||
| 	return 1 | ||||
| } | ||||
|  | ||||
| func scaleOnLoad(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	prescale := math.Max(pctx.wscale, pctx.hscale) | ||||
|  | ||||
| 	if pctx.trimmed || prescale == 1 || imgdata == nil || !canScaleOnLoad(pctx.imgtype, prescale) { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	jpegShrink := calcJpegShink(prescale, pctx.imgtype) | ||||
|  | ||||
| 	if pctx.imgtype == imagetype.JPEG && jpegShrink == 1 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if err := img.Load(imgdata, jpegShrink, prescale, 1); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	// Update scales after scale-on-load | ||||
| 	newWidth, newHeight, _, _ := extractMeta(img, po.Rotate, po.AutoRotate) | ||||
|  | ||||
| 	pctx.wscale = float64(pctx.srcWidth) * pctx.wscale / float64(newWidth) | ||||
| 	if pctx.srcWidth == imath.Scale(pctx.srcWidth, pctx.wscale) { | ||||
| 		pctx.wscale = 1.0 | ||||
| 	} | ||||
|  | ||||
| 	pctx.hscale = float64(pctx.srcHeight) * pctx.hscale / float64(newHeight) | ||||
| 	if pctx.srcHeight == imath.Scale(pctx.srcHeight, pctx.hscale) { | ||||
| 		pctx.hscale = 1.0 | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										24
									
								
								processing/trim.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								processing/trim.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| func trim(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if !po.Trim.Enabled { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if err := img.Trim(po.Trim.Threshold, po.Trim.Smart, po.Trim.Color, po.Trim.EqualHor, po.Trim.EqualVer); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 	if err := copyMemoryAndCheckTimeout(pctx.ctx, img); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	pctx.trimmed = true | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										88
									
								
								processing/watermark.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										88
									
								
								processing/watermark.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,88 @@ | ||||
| package processing | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imath" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| var watermarkPipeline = pipeline{ | ||||
| 	prepare, | ||||
| 	scaleOnLoad, | ||||
| 	importColorProfile, | ||||
| 	scale, | ||||
| 	rotateAndFlip, | ||||
| 	exportColorProfile, | ||||
| 	finalize, | ||||
| } | ||||
|  | ||||
| func prepareWatermark(wm *vips.Image, wmData *imagedata.ImageData, opts *options.WatermarkOptions, imgWidth, imgHeight int) error { | ||||
| 	if err := wm.Load(wmData, 1, 1.0, 1); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	po := options.NewProcessingOptions() | ||||
| 	po.ResizingType = options.ResizeFit | ||||
| 	po.Dpr = 1 | ||||
| 	po.Enlarge = true | ||||
| 	po.Format = wmData.Type | ||||
|  | ||||
| 	if opts.Scale > 0 { | ||||
| 		po.Width = imath.Max(imath.Scale(imgWidth, opts.Scale), 1) | ||||
| 		po.Height = imath.Max(imath.Scale(imgHeight, opts.Scale), 1) | ||||
| 	} | ||||
|  | ||||
| 	if err := watermarkPipeline.Run(context.Background(), wm, po, wmData); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if opts.Replicate { | ||||
| 		return wm.Replicate(imgWidth, imgHeight) | ||||
| 	} | ||||
|  | ||||
| 	left, top := calcPosition(imgWidth, imgHeight, wm.Width(), wm.Height(), &opts.Gravity, true) | ||||
|  | ||||
| 	return wm.Embed(imgWidth, imgHeight, left, top) | ||||
| } | ||||
|  | ||||
| func applyWatermark(img *vips.Image, wmData *imagedata.ImageData, opts *options.WatermarkOptions, framesCount int) error { | ||||
| 	if err := img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if err := img.CopyMemory(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	wm := new(vips.Image) | ||||
| 	defer wm.Clear() | ||||
|  | ||||
| 	width := img.Width() | ||||
| 	height := img.Height() | ||||
|  | ||||
| 	if err := prepareWatermark(wm, wmData, opts, width, height/framesCount); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	if framesCount > 1 { | ||||
| 		if err := wm.Replicate(width, height); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	opacity := opts.Opacity * config.WatermarkOpacity | ||||
|  | ||||
| 	return img.ApplyWatermark(wm, opacity) | ||||
| } | ||||
|  | ||||
| func watermark(pctx *pipelineContext, img *vips.Image, po *options.ProcessingOptions, imgdata *imagedata.ImageData) error { | ||||
| 	if !po.Watermark.Enabled || imagedata.Watermark == nil { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	return applyWatermark(img, imagedata.Watermark, &po.Watermark, 1) | ||||
| } | ||||
| @@ -7,79 +7,78 @@ import ( | ||||
| 	"strconv" | ||||
| 	"strings" | ||||
| 	"time" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/errorreport" | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics" | ||||
| 	"github.com/imgproxy/imgproxy/v2/options" | ||||
| 	"github.com/imgproxy/imgproxy/v2/processing" | ||||
| 	"github.com/imgproxy/imgproxy/v2/router" | ||||
| 	"github.com/imgproxy/imgproxy/v2/security" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	processingSem chan struct{} | ||||
|  | ||||
| 	headerVaryValue string | ||||
| 	fallbackImage   *imageData | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	fallbackImageUsedCtxKey = ctxKey("fallbackImageUsed") | ||||
| ) | ||||
| type fallbackImageUsedCtxKey struct{} | ||||
|  | ||||
| func initProcessingHandler() error { | ||||
| 	var err error | ||||
|  | ||||
| 	processingSem = make(chan struct{}, conf.Concurrency) | ||||
| func initProcessingHandler() { | ||||
| 	processingSem = make(chan struct{}, config.Concurrency) | ||||
|  | ||||
| 	vary := make([]string, 0) | ||||
|  | ||||
| 	if conf.EnableWebpDetection || conf.EnforceWebp { | ||||
| 	if config.EnableWebpDetection || config.EnforceWebp { | ||||
| 		vary = append(vary, "Accept") | ||||
| 	} | ||||
|  | ||||
| 	if conf.EnableClientHints { | ||||
| 	if config.EnableClientHints { | ||||
| 		vary = append(vary, "DPR", "Viewport-Width", "Width") | ||||
| 	} | ||||
|  | ||||
| 	headerVaryValue = strings.Join(vary, ", ") | ||||
|  | ||||
| 	if fallbackImage, err = getFallbackImageData(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func respondWithImage(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter, data []byte) { | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	imgdata := getImageData(ctx) | ||||
|  | ||||
| func respondWithImage(reqID string, r *http.Request, rw http.ResponseWriter, resultData *imagedata.ImageData, po *options.ProcessingOptions, originURL string, originData *imagedata.ImageData) { | ||||
| 	var contentDisposition string | ||||
| 	if len(po.Filename) > 0 { | ||||
| 		contentDisposition = po.Format.ContentDisposition(po.Filename) | ||||
| 		contentDisposition = resultData.Type.ContentDisposition(po.Filename) | ||||
| 	} else { | ||||
| 		contentDisposition = po.Format.ContentDispositionFromURL(getImageURL(ctx)) | ||||
| 		contentDisposition = resultData.Type.ContentDispositionFromURL(originURL) | ||||
| 	} | ||||
|  | ||||
| 	rw.Header().Set("Content-Type", po.Format.Mime()) | ||||
| 	rw.Header().Set("Content-Type", resultData.Type.Mime()) | ||||
| 	rw.Header().Set("Content-Disposition", contentDisposition) | ||||
|  | ||||
| 	if conf.SetCanonicalHeader { | ||||
| 		origin := getImageURL(ctx) | ||||
| 		if strings.HasPrefix(origin, "https://") || strings.HasPrefix(origin, "http://") { | ||||
| 			linkHeader := fmt.Sprintf(`<%s>; rel="canonical"`, origin) | ||||
| 	if config.SetCanonicalHeader { | ||||
| 		if strings.HasPrefix(originURL, "https://") || strings.HasPrefix(originURL, "http://") { | ||||
| 			linkHeader := fmt.Sprintf(`<%s>; rel="canonical"`, originURL) | ||||
| 			rw.Header().Set("Link", linkHeader) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	var cacheControl, expires string | ||||
|  | ||||
| 	if conf.CacheControlPassthrough && imgdata.Headers != nil { | ||||
| 		if val, ok := imgdata.Headers["Cache-Control"]; ok { | ||||
| 	if config.CacheControlPassthrough && originData.Headers != nil { | ||||
| 		if val, ok := originData.Headers["Cache-Control"]; ok { | ||||
| 			cacheControl = val | ||||
| 		} | ||||
| 		if val, ok := imgdata.Headers["Expires"]; ok { | ||||
| 		if val, ok := originData.Headers["Expires"]; ok { | ||||
| 			expires = val | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	if len(cacheControl) == 0 && len(expires) == 0 { | ||||
| 		cacheControl = fmt.Sprintf("max-age=%d, public", conf.TTL) | ||||
| 		expires = time.Now().Add(time.Second * time.Duration(conf.TTL)).Format(http.TimeFormat) | ||||
| 		cacheControl = fmt.Sprintf("max-age=%d, public", config.TTL) | ||||
| 		expires = time.Now().Add(time.Second * time.Duration(config.TTL)).Format(http.TimeFormat) | ||||
| 	} | ||||
|  | ||||
| 	if len(cacheControl) > 0 { | ||||
| @@ -93,128 +92,174 @@ func respondWithImage(ctx context.Context, reqID string, r *http.Request, rw htt | ||||
| 		rw.Header().Set("Vary", headerVaryValue) | ||||
| 	} | ||||
|  | ||||
| 	if conf.EnableDebugHeaders { | ||||
| 		rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(imgdata.Data))) | ||||
| 	if config.EnableDebugHeaders { | ||||
| 		rw.Header().Set("X-Origin-Content-Length", strconv.Itoa(len(originData.Data))) | ||||
| 	} | ||||
|  | ||||
| 	rw.Header().Set("Content-Length", strconv.Itoa(len(data))) | ||||
| 	rw.Header().Set("Content-Length", strconv.Itoa(len(resultData.Data))) | ||||
| 	statusCode := 200 | ||||
| 	if getFallbackImageUsed(ctx) { | ||||
| 		statusCode = conf.FallbackImageHTTPCode | ||||
| 	if getFallbackImageUsed(r.Context()) { | ||||
| 		statusCode = config.FallbackImageHTTPCode | ||||
| 	} | ||||
| 	rw.WriteHeader(statusCode) | ||||
| 	rw.Write(data) | ||||
| 	rw.Write(resultData.Data) | ||||
|  | ||||
| 	imageURL := getImageURL(ctx) | ||||
|  | ||||
| 	logResponse(reqID, r, statusCode, nil, &imageURL, po) | ||||
| } | ||||
|  | ||||
| func respondWithNotModified(ctx context.Context, reqID string, r *http.Request, rw http.ResponseWriter) { | ||||
| 	rw.WriteHeader(304) | ||||
|  | ||||
| 	imageURL := getImageURL(ctx) | ||||
|  | ||||
| 	logResponse(reqID, r, 304, nil, &imageURL, getProcessingOptions(ctx)) | ||||
| 	router.LogResponse( | ||||
| 		reqID, r, statusCode, nil, | ||||
| 		log.Fields{ | ||||
| 			"image_url":          originURL, | ||||
| 			"processing_options": po, | ||||
| 		}, | ||||
| 	) | ||||
| } | ||||
|  | ||||
| func handleProcessing(reqID string, rw http.ResponseWriter, r *http.Request) { | ||||
| 	ctx := r.Context() | ||||
|  | ||||
| 	var dataDogCancel context.CancelFunc | ||||
| 	ctx, dataDogCancel, rw = startDataDogRootSpan(ctx, rw, r) | ||||
| 	defer dataDogCancel() | ||||
|  | ||||
| 	var newRelicCancel context.CancelFunc | ||||
| 	ctx, newRelicCancel, rw = startNewRelicTransaction(ctx, rw, r) | ||||
| 	defer newRelicCancel() | ||||
|  | ||||
| 	incrementPrometheusRequestsTotal() | ||||
| 	defer startPrometheusDuration(prometheusRequestDuration)() | ||||
|  | ||||
| 	select { | ||||
| 	case processingSem <- struct{}{}: | ||||
| 	case <-ctx.Done(): | ||||
| 		panic(newError(499, "Request was cancelled before processing", "Cancelled")) | ||||
| 	} | ||||
| 	defer func() { <-processingSem }() | ||||
|  | ||||
| 	ctx, timeoutCancel := context.WithTimeout(ctx, time.Duration(conf.WriteTimeout)*time.Second) | ||||
| 	ctx, timeoutCancel := context.WithTimeout(r.Context(), time.Duration(config.WriteTimeout)*time.Second) | ||||
| 	defer timeoutCancel() | ||||
|  | ||||
| 	ctx, err := parsePath(ctx, r) | ||||
| 	var metricsCancel context.CancelFunc | ||||
| 	ctx, metricsCancel, rw = metrics.StartRequest(ctx, rw, r) | ||||
| 	defer metricsCancel() | ||||
|  | ||||
| 	path := r.RequestURI | ||||
| 	if queryStart := strings.IndexByte(path, '?'); queryStart >= 0 { | ||||
| 		path = path[:queryStart] | ||||
| 	} | ||||
|  | ||||
| 	if len(config.PathPrefix) > 0 { | ||||
| 		path = strings.TrimPrefix(path, config.PathPrefix) | ||||
| 	} | ||||
|  | ||||
| 	path = strings.TrimPrefix(path, "/") | ||||
| 	signature := "" | ||||
|  | ||||
| 	if signatureEnd := strings.IndexByte(path, '/'); signatureEnd > 0 { | ||||
| 		signature = path[:signatureEnd] | ||||
| 		path = path[signatureEnd:] | ||||
| 	} else { | ||||
| 		panic(ierrors.New(404, fmt.Sprintf("Invalid path: %s", path), "Invalid URL")) | ||||
| 	} | ||||
|  | ||||
| 	if err := security.VerifySignature(signature, path); err != nil { | ||||
| 		panic(ierrors.New(403, err.Error(), "Forbidden")) | ||||
| 	} | ||||
|  | ||||
| 	po, imageURL, err := options.ParsePath(path, r.Header) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
|  | ||||
| 	ctx, downloadcancel, err := downloadImageCtx(ctx) | ||||
| 	defer downloadcancel() | ||||
| 	if err != nil { | ||||
| 		sendErrorToDataDog(ctx, err) | ||||
| 		sendErrorToNewRelic(ctx, err) | ||||
| 		incrementPrometheusErrorsTotal("download") | ||||
| 	if !security.VerifySourceURL(imageURL) { | ||||
| 		panic(ierrors.New(404, fmt.Sprintf("Source URL is not allowed: %s", imageURL), "Invalid source")) | ||||
| 	} | ||||
|  | ||||
| 		if fallbackImage == nil { | ||||
| 	// SVG is a special case. Though saving to svg is not supported, SVG->SVG is. | ||||
| 	if !vips.SupportsSave(po.Format) && po.Format != imagetype.Unknown && po.Format != imagetype.SVG { | ||||
| 		panic(ierrors.New( | ||||
| 			422, | ||||
| 			fmt.Sprintf("Resulting image format is not supported: %s", po.Format), | ||||
| 			"Invalid URL", | ||||
| 		)) | ||||
| 	} | ||||
|  | ||||
| 	// The heavy part start here, so we need to restrict concurrency | ||||
| 	select { | ||||
| 	case processingSem <- struct{}{}: | ||||
| 	case <-ctx.Done(): | ||||
| 		// We don't actually need to check timeout here, | ||||
| 		// but it's an easy way to check if this is an actual timeout | ||||
| 		// or the request was cancelled | ||||
| 		router.CheckTimeout(ctx) | ||||
| 	} | ||||
| 	defer func() { <-processingSem }() | ||||
|  | ||||
| 	originData, err := func() (*imagedata.ImageData, error) { | ||||
| 		defer metrics.StartDownloadingSegment(ctx)() | ||||
| 		return imagedata.Download(imageURL, "source image") | ||||
| 	}() | ||||
| 	if err == nil { | ||||
| 		defer originData.Close() | ||||
| 	} else { | ||||
| 		metrics.SendError(ctx, "download", err) | ||||
|  | ||||
| 		if imagedata.FallbackImage == nil { | ||||
| 			panic(err) | ||||
| 		} | ||||
|  | ||||
| 		if ierr, ok := err.(*imgproxyError); !ok || ierr.Unexpected { | ||||
| 			reportError(err, r) | ||||
| 		if ierr, ok := err.(*ierrors.Error); !ok || ierr.Unexpected { | ||||
| 			errorreport.Report(err, r) | ||||
| 		} | ||||
|  | ||||
| 		logWarning("Could not load image. Using fallback image: %s", err.Error()) | ||||
| 		ctx = setFallbackImageUsedCtx(ctx) | ||||
| 		ctx = context.WithValue(ctx, imageDataCtxKey, fallbackImage) | ||||
| 		log.Warningf("Could not load image. Using fallback image: %s", err.Error()) | ||||
| 		r = r.WithContext(setFallbackImageUsedCtx(r.Context())) | ||||
| 		originData = imagedata.FallbackImage | ||||
| 	} | ||||
|  | ||||
| 	checkTimeout(ctx) | ||||
| 	router.CheckTimeout(ctx) | ||||
|  | ||||
| 	if conf.ETagEnabled && !getFallbackImageUsed(ctx) { | ||||
| 		eTag := calcETag(ctx) | ||||
| 	if config.ETagEnabled && !getFallbackImageUsed(ctx) { | ||||
| 		eTag := calcETag(ctx, originData, po) | ||||
| 		rw.Header().Set("ETag", eTag) | ||||
|  | ||||
| 		if eTag == r.Header.Get("If-None-Match") { | ||||
| 			respondWithNotModified(ctx, reqID, r, rw) | ||||
| 			rw.WriteHeader(304) | ||||
| 			router.LogResponse(reqID, r, 304, nil, log.Fields{"image_url": imageURL}) | ||||
| 			return | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	checkTimeout(ctx) | ||||
| 	router.CheckTimeout(ctx) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	if len(po.SkipProcessingFormats) > 0 { | ||||
| 		imgdata := getImageData(ctx) | ||||
| 	if originData.Type == po.Format || po.Format == imagetype.Unknown { | ||||
| 		// Don't process SVG | ||||
| 		if originData.Type == imagetype.SVG { | ||||
| 			respondWithImage(reqID, r, rw, originData, po, imageURL, originData) | ||||
| 			return | ||||
| 		} | ||||
|  | ||||
| 		if imgdata.Type == po.Format || po.Format == imageTypeUnknown { | ||||
| 		if len(po.SkipProcessingFormats) > 0 { | ||||
| 			for _, f := range po.SkipProcessingFormats { | ||||
| 				if f == imgdata.Type { | ||||
| 					po.Format = imgdata.Type | ||||
| 					respondWithImage(ctx, reqID, r, rw, imgdata.Data) | ||||
| 				if f == originData.Type { | ||||
| 					respondWithImage(reqID, r, rw, originData, po, imageURL, originData) | ||||
| 					return | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	imageData, processcancel, err := processImage(ctx) | ||||
| 	defer processcancel() | ||||
| 	if err != nil { | ||||
| 		sendErrorToDataDog(ctx, err) | ||||
| 		sendErrorToNewRelic(ctx, err) | ||||
| 		incrementPrometheusErrorsTotal("processing") | ||||
| 		panic(err) | ||||
| 	if !vips.SupportsLoad(originData.Type) { | ||||
| 		panic(ierrors.New( | ||||
| 			422, | ||||
| 			fmt.Sprintf("Source image format is not supported: %s", originData.Type), | ||||
| 			"Invalid URL", | ||||
| 		)) | ||||
| 	} | ||||
|  | ||||
| 	checkTimeout(ctx) | ||||
| 	// At this point we can't allow requested format to be SVG as we can't save SVGs | ||||
| 	if po.Format == imagetype.SVG { | ||||
| 		panic(ierrors.New(422, "Resulting image format is not supported: svg", "Invalid URL")) | ||||
| 	} | ||||
|  | ||||
| 	respondWithImage(ctx, reqID, r, rw, imageData) | ||||
| 	resultData, err := func() (*imagedata.ImageData, error) { | ||||
| 		defer metrics.StartProcessingSegment(ctx)() | ||||
| 		return processing.ProcessImage(ctx, originData, po) | ||||
| 	}() | ||||
| 	if err != nil { | ||||
| 		metrics.SendError(ctx, "processing", err) | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	defer resultData.Close() | ||||
|  | ||||
| 	router.CheckTimeout(ctx) | ||||
|  | ||||
| 	respondWithImage(reqID, r, rw, resultData, po, imageURL, originData) | ||||
| } | ||||
|  | ||||
| func setFallbackImageUsedCtx(ctx context.Context) context.Context { | ||||
| 	return context.WithValue(ctx, fallbackImageUsedCtxKey, true) | ||||
| 	return context.WithValue(ctx, fallbackImageUsedCtxKey{}, true) | ||||
| } | ||||
|  | ||||
| func getFallbackImageUsed(ctx context.Context) bool { | ||||
| 	result, _ := ctx.Value(fallbackImageUsedCtxKey).(bool) | ||||
| 	result, _ := ctx.Value(fallbackImageUsedCtxKey{}).(bool) | ||||
| 	return result | ||||
| } | ||||
|   | ||||
							
								
								
									
										281
									
								
								processing_handler_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										281
									
								
								processing_handler_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,281 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"io" | ||||
| 	"net/http" | ||||
| 	"net/http/httptest" | ||||
| 	"os" | ||||
| 	"path/filepath" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagemeta" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/router" | ||||
| 	"github.com/imgproxy/imgproxy/v2/vips" | ||||
| 	"github.com/sirupsen/logrus" | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"github.com/stretchr/testify/suite" | ||||
| ) | ||||
|  | ||||
| type ProcessingHandlerTestSuite struct { | ||||
| 	suite.Suite | ||||
|  | ||||
| 	router *router.Router | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) SetupSuite() { | ||||
| 	config.Reset() | ||||
|  | ||||
| 	wd, err := os.Getwd() | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	config.LocalFileSystemRoot = filepath.Join(wd, "/testdata") | ||||
|  | ||||
| 	logrus.SetOutput(io.Discard) | ||||
|  | ||||
| 	initialize() | ||||
|  | ||||
| 	s.router = buildRouter() | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TeardownSuite() { | ||||
| 	shutdown() | ||||
| 	logrus.SetOutput(os.Stdout) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) SetupTest() { | ||||
| 	// We don't need config.LocalFileSystemRoot anymore as it is used | ||||
| 	// only during initialization | ||||
| 	config.Reset() | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) send(path string, header ...http.Header) *httptest.ResponseRecorder { | ||||
| 	req := httptest.NewRequest(http.MethodGet, path, nil) | ||||
| 	rw := httptest.NewRecorder() | ||||
|  | ||||
| 	if len(header) > 0 { | ||||
| 		req.Header = header[0] | ||||
| 	} | ||||
|  | ||||
| 	s.router.ServeHTTP(rw, req) | ||||
|  | ||||
| 	return rw | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) readTestFile(name string) []byte { | ||||
| 	wd, err := os.Getwd() | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	data, err := os.ReadFile(filepath.Join(wd, "testdata", name)) | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	return data | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) readBody(res *http.Response) []byte { | ||||
| 	data, err := io.ReadAll(res.Body) | ||||
| 	assert.Nil(s.T(), err) | ||||
| 	return data | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestRequest() { | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
| 	assert.Equal(s.T(), "image/png", res.Header.Get("Content-Type")) | ||||
|  | ||||
| 	meta, err := imagemeta.DecodeMeta(res.Body) | ||||
|  | ||||
| 	assert.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), "png", meta.Format()) | ||||
| 	assert.Equal(s.T(), 4, meta.Width()) | ||||
| 	assert.Equal(s.T(), 4, meta.Height()) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSignatureValidationFailure() { | ||||
| 	config.Keys = [][]byte{[]byte("test-key")} | ||||
| 	config.Salts = [][]byte{[]byte("test-salt")} | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 403, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSignatureValidationSuccess() { | ||||
| 	config.Keys = [][]byte{[]byte("test-key")} | ||||
| 	config.Salts = [][]byte{[]byte("test-salt")} | ||||
|  | ||||
| 	rw := s.send("/My9d3xq_PYpVHsPrCyww0Kh1w5KZeZhIlWhsa4az1TI/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSourceValidationFailure() { | ||||
| 	config.AllowedSources = []string{"https://"} | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 404, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSourceValidationSuccess() { | ||||
| 	config.AllowedSources = []string{"local:///"} | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSourceFormatNotSupported() { | ||||
| 	vips.DisableLoadSupport(imagetype.PNG) | ||||
| 	defer vips.ResetLoadSupport() | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 422, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestResultingFormatNotSupported() { | ||||
| 	vips.DisableSaveSupport(imagetype.PNG) | ||||
| 	defer vips.ResetSaveSupport() | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png@png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 422, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSkipProcessingConfig() { | ||||
| 	config.SkipProcessingFormats = []imagetype.Type{imagetype.PNG} | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
|  | ||||
| 	actual := s.readBody(res) | ||||
| 	expected := s.readTestFile("test1.png") | ||||
|  | ||||
| 	assert.True(s.T(), bytes.Equal(expected, actual)) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSkipProcessingPO() { | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/skp:png/plain/local:///test1.png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
|  | ||||
| 	actual := s.readBody(res) | ||||
| 	expected := s.readTestFile("test1.png") | ||||
|  | ||||
| 	assert.True(s.T(), bytes.Equal(expected, actual)) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSkipProcessingSameFormat() { | ||||
| 	config.SkipProcessingFormats = []imagetype.Type{imagetype.PNG} | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png@png") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
|  | ||||
| 	actual := s.readBody(res) | ||||
| 	expected := s.readTestFile("test1.png") | ||||
|  | ||||
| 	assert.True(s.T(), bytes.Equal(expected, actual)) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSkipProcessingDifferentFormat() { | ||||
| 	config.SkipProcessingFormats = []imagetype.Type{imagetype.PNG} | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png@jpg") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
|  | ||||
| 	actual := s.readBody(res) | ||||
| 	expected := s.readTestFile("test1.png") | ||||
|  | ||||
| 	assert.False(s.T(), bytes.Equal(expected, actual)) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestSkipProcessingSVG() { | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.svg") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
|  | ||||
| 	actual := s.readBody(res) | ||||
| 	expected := s.readTestFile("test1.svg") | ||||
|  | ||||
| 	assert.True(s.T(), bytes.Equal(expected, actual)) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestNotSkipProcessingSVGToJPG() { | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.svg@jpg") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 200, res.StatusCode) | ||||
|  | ||||
| 	actual := s.readBody(res) | ||||
| 	expected := s.readTestFile("test1.svg") | ||||
|  | ||||
| 	assert.False(s.T(), bytes.Equal(expected, actual)) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestErrorSavingToSVG() { | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/local:///test1.png@svg") | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), 422, res.StatusCode) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestCacheControlPassthrough() { | ||||
| 	config.CacheControlPassthrough = true | ||||
|  | ||||
| 	ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { | ||||
| 		data := s.readTestFile("test1.png") | ||||
| 		rw.Header().Set("Cache-Control", "fake-cache-control") | ||||
| 		rw.Header().Set("Expires", "fake-expires") | ||||
| 		rw.WriteHeader(200) | ||||
| 		rw.Write(data) | ||||
| 	})) | ||||
| 	defer ts.Close() | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/" + ts.URL) | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.Equal(s.T(), "fake-cache-control", res.Header.Get("Cache-Control")) | ||||
| 	assert.Equal(s.T(), "fake-expires", res.Header.Get("Expires")) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingHandlerTestSuite) TestCacheControlPassthroughDisabled() { | ||||
| 	config.CacheControlPassthrough = false | ||||
|  | ||||
| 	ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { | ||||
| 		data := s.readTestFile("test1.png") | ||||
| 		rw.Header().Set("Cache-Control", "fake-cache-control") | ||||
| 		rw.Header().Set("Expires", "fake-expires") | ||||
| 		rw.WriteHeader(200) | ||||
| 		rw.Write(data) | ||||
| 	})) | ||||
| 	defer ts.Close() | ||||
|  | ||||
| 	rw := s.send("/unsafe/rs:fill:4:4/plain/" + ts.URL) | ||||
| 	res := rw.Result() | ||||
|  | ||||
| 	assert.NotEqual(s.T(), "fake-cache-control", res.Header.Get("Cache-Control")) | ||||
| 	assert.NotEqual(s.T(), "fake-expires", res.Header.Get("Expires")) | ||||
| } | ||||
|  | ||||
| func TestProcessingHandler(t *testing.T) { | ||||
| 	suite.Run(t, new(ProcessingHandlerTestSuite)) | ||||
| } | ||||
| @@ -1,628 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"encoding/base64" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"net/url" | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"github.com/stretchr/testify/require" | ||||
| 	"github.com/stretchr/testify/suite" | ||||
| ) | ||||
|  | ||||
| type ProcessingOptionsTestSuite struct{ MainTestSuite } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) getRequest(uri string) *http.Request { | ||||
| 	return &http.Request{Method: "GET", RequestURI: uri, Header: make(http.Header)} | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URL() { | ||||
| 	imageURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/%s.png", base64.RawURLEncoding.EncodeToString([]byte(imageURL)))) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), imageURL, getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypePNG, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URLWithoutExtension() { | ||||
| 	imageURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/%s", base64.RawURLEncoding.EncodeToString([]byte(imageURL)))) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), imageURL, getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypeUnknown, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URLWithBase() { | ||||
| 	conf.BaseURL = "http://images.dev/" | ||||
|  | ||||
| 	imageURL := "lorem/ipsum.jpg?param=value" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/%s.png", base64.RawURLEncoding.EncodeToString([]byte(imageURL)))) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), fmt.Sprintf("%s%s", conf.BaseURL, imageURL), getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypePNG, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURL() { | ||||
| 	imageURL := "http://images.dev/lorem/ipsum.jpg" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/plain/%s@png", imageURL)) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), imageURL, getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypePNG, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLWithoutExtension() { | ||||
| 	imageURL := "http://images.dev/lorem/ipsum.jpg" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/plain/%s", imageURL)) | ||||
|  | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), imageURL, getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypeUnknown, getProcessingOptions(ctx).Format) | ||||
| } | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLEscaped() { | ||||
| 	imageURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/plain/%s@png", url.PathEscape(imageURL))) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), imageURL, getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypePNG, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLWithBase() { | ||||
| 	conf.BaseURL = "http://images.dev/" | ||||
|  | ||||
| 	imageURL := "lorem/ipsum.jpg" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/plain/%s@png", imageURL)) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), fmt.Sprintf("%s%s", conf.BaseURL, imageURL), getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypePNG, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePlainURLEscapedWithBase() { | ||||
| 	conf.BaseURL = "http://images.dev/" | ||||
|  | ||||
| 	imageURL := "lorem/ipsum.jpg?param=value" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/size:100:100/plain/%s@png", url.PathEscape(imageURL))) | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| 	assert.Equal(s.T(), fmt.Sprintf("%s%s", conf.BaseURL, imageURL), getImageURL(ctx)) | ||||
| 	assert.Equal(s.T(), imageTypePNG, getProcessingOptions(ctx).Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseURLAllowedSource() { | ||||
| 	conf.AllowedSources = []string{"local://", "http://images.dev/"} | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseURLNotAllowedSource() { | ||||
| 	conf.AllowedSources = []string{"local://", "http://images.dev/"} | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/s3://images/lorem/ipsum.jpg") | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Error(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathFormat() { | ||||
| 	req := s.getRequest("/unsafe/format:webp/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), imageTypeWEBP, po.Format) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathResize() { | ||||
| 	req := s.getRequest("/unsafe/resize:fill:100:200:1/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), resizeFill, po.ResizingType) | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| 	assert.Equal(s.T(), 200, po.Height) | ||||
| 	assert.True(s.T(), po.Enlarge) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathResizingType() { | ||||
| 	req := s.getRequest("/unsafe/resizing_type:fill/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), resizeFill, po.ResizingType) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathSize() { | ||||
| 	req := s.getRequest("/unsafe/size:100:200:1/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| 	assert.Equal(s.T(), 200, po.Height) | ||||
| 	assert.True(s.T(), po.Enlarge) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidth() { | ||||
| 	req := s.getRequest("/unsafe/width:100/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathHeight() { | ||||
| 	req := s.getRequest("/unsafe/height:100/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 100, po.Height) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathEnlarge() { | ||||
| 	req := s.getRequest("/unsafe/enlarge:1/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.True(s.T(), po.Enlarge) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathExtend() { | ||||
| 	req := s.getRequest("/unsafe/extend:1:so:10:20/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), true, po.Extend.Enabled) | ||||
| 	assert.Equal(s.T(), gravitySouth, po.Extend.Gravity.Type) | ||||
| 	assert.Equal(s.T(), 10.0, po.Extend.Gravity.X) | ||||
| 	assert.Equal(s.T(), 20.0, po.Extend.Gravity.Y) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathGravity() { | ||||
| 	req := s.getRequest("/unsafe/gravity:soea/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), gravitySouthEast, po.Gravity.Type) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathGravityFocuspoint() { | ||||
| 	req := s.getRequest("/unsafe/gravity:fp:0.5:0.75/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), gravityFocusPoint, po.Gravity.Type) | ||||
| 	assert.Equal(s.T(), 0.5, po.Gravity.X) | ||||
| 	assert.Equal(s.T(), 0.75, po.Gravity.Y) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathQuality() { | ||||
| 	req := s.getRequest("/unsafe/quality:55/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 55, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBackground() { | ||||
| 	req := s.getRequest("/unsafe/background:128:129:130/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.True(s.T(), po.Flatten) | ||||
| 	assert.Equal(s.T(), uint8(128), po.Background.R) | ||||
| 	assert.Equal(s.T(), uint8(129), po.Background.G) | ||||
| 	assert.Equal(s.T(), uint8(130), po.Background.B) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBackgroundHex() { | ||||
| 	req := s.getRequest("/unsafe/background:ffddee/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.True(s.T(), po.Flatten) | ||||
| 	assert.Equal(s.T(), uint8(0xff), po.Background.R) | ||||
| 	assert.Equal(s.T(), uint8(0xdd), po.Background.G) | ||||
| 	assert.Equal(s.T(), uint8(0xee), po.Background.B) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBackgroundDisable() { | ||||
| 	req := s.getRequest("/unsafe/background:fff/background:/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.False(s.T(), po.Flatten) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathBlur() { | ||||
| 	req := s.getRequest("/unsafe/blur:0.2/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathSharpen() { | ||||
| 	req := s.getRequest("/unsafe/sharpen:0.2/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Sharpen) | ||||
| } | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathDpr() { | ||||
| 	req := s.getRequest("/unsafe/dpr:2/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 2.0, po.Dpr) | ||||
| } | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWatermark() { | ||||
| 	req := s.getRequest("/unsafe/watermark:0.5:soea:10:20:0.6/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.True(s.T(), po.Watermark.Enabled) | ||||
| 	assert.Equal(s.T(), gravitySouthEast, po.Watermark.Gravity.Type) | ||||
| 	assert.Equal(s.T(), 10.0, po.Watermark.Gravity.X) | ||||
| 	assert.Equal(s.T(), 20.0, po.Watermark.Gravity.Y) | ||||
| 	assert.Equal(s.T(), 0.6, po.Watermark.Scale) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathPreset() { | ||||
| 	conf.Presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "resizing_type", Args: []string{"fill"}}, | ||||
| 	} | ||||
|  | ||||
| 	conf.Presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/preset:test1:test2/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), resizeFill, po.ResizingType) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 50, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathPresetDefault() { | ||||
| 	conf.Presets["default"] = urlOptions{ | ||||
| 		urlOption{Name: "resizing_type", Args: []string{"fill"}}, | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/quality:70/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), resizeFill, po.ResizingType) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 70, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathPresetLoopDetection() { | ||||
| 	conf.Presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "resizing_type", Args: []string{"fill"}}, | ||||
| 	} | ||||
|  | ||||
| 	conf.Presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/preset:test1:test2:test1/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	require.ElementsMatch(s.T(), po.UsedPresets, []string{"test1", "test2"}) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathCachebuster() { | ||||
| 	req := s.getRequest("/unsafe/cachebuster:123/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), "123", po.CacheBuster) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathStripMetadata() { | ||||
| 	req := s.getRequest("/unsafe/strip_metadata:true/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.True(s.T(), po.StripMetadata) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWebpDetection() { | ||||
| 	conf.EnableWebpDetection = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	req.Header.Set("Accept", "image/webp") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), true, po.PreferWebP) | ||||
| 	assert.Equal(s.T(), false, po.EnforceWebP) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWebpEnforce() { | ||||
| 	conf.EnforceWebp = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Accept", "image/webp") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), true, po.PreferWebP) | ||||
| 	assert.Equal(s.T(), true, po.EnforceWebP) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeader() { | ||||
| 	conf.EnableClientHints = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Width", "100") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeaderDisabled() { | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Width", "100") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 0, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeaderRedefine() { | ||||
| 	conf.EnableClientHints = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/width:150/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Width", "100") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 150, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeader() { | ||||
| 	conf.EnableClientHints = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Viewport-Width", "100") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 100, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeaderDisabled() { | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Viewport-Width", "100") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 0, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeaderRedefine() { | ||||
| 	conf.EnableClientHints = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/width:150/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("Viewport-Width", "100") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 150, po.Width) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathDprHeader() { | ||||
| 	conf.EnableClientHints = true | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("DPR", "2") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 2.0, po.Dpr) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathDprHeaderDisabled() { | ||||
| 	req := s.getRequest("/unsafe/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	req.Header.Set("DPR", "2") | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), 1.0, po.Dpr) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathSigned() { | ||||
| 	conf.Keys = []securityKey{securityKey("test-key")} | ||||
| 	conf.Salts = []securityKey{securityKey("test-salt")} | ||||
| 	conf.AllowInsecure = false | ||||
|  | ||||
| 	req := s.getRequest("/HcvNognEV1bW6f8zRqxNYuOkV0IUf1xloRb57CzbT4g/width:150/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathSignedInvalid() { | ||||
| 	conf.Keys = []securityKey{securityKey("test-key")} | ||||
| 	conf.Salts = []securityKey{securityKey("test-salt")} | ||||
| 	conf.AllowInsecure = false | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/width:150/plain/http://images.dev/lorem/ipsum.jpg@png") | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Error(s.T(), err) | ||||
| 	assert.Equal(s.T(), errInvalidSignature.Error(), err.Error()) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParsePathOnlyPresets() { | ||||
| 	conf.OnlyPresets = true | ||||
| 	conf.Presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 	} | ||||
| 	conf.Presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	req := s.getRequest("/unsafe/test1:test2/plain/http://images.dev/lorem/ipsum.jpg") | ||||
|  | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 50, po.Quality) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseSkipProcessing() { | ||||
| 	req := s.getRequest("/unsafe/skp:jpg:png/plain/http://images.dev/lorem/ipsum.jpg") | ||||
|  | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), []imageType{imageTypeJPEG, imageTypePNG}, po.SkipProcessingFormats) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseSkipProcessingInvalid() { | ||||
| 	req := s.getRequest("/unsafe/skp:jpg:png:bad_format/plain/http://images.dev/lorem/ipsum.jpg") | ||||
|  | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Error(s.T(), err) | ||||
| 	assert.Equal(s.T(), "Invalid image format in skip processing: bad_format", err.Error()) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseExpires() { | ||||
| 	req := s.getRequest("/unsafe/exp:32503669200/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseExpiresExpired() { | ||||
| 	req := s.getRequest("/unsafe/exp:1609448400/plain/http://images.dev/lorem/ipsum.jpg") | ||||
| 	_, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Error(s.T(), err) | ||||
| 	assert.Equal(s.T(), msgExpiredURL, err.Error()) | ||||
| } | ||||
|  | ||||
| func (s *ProcessingOptionsTestSuite) TestParseBase64URLOnlyPresets() { | ||||
| 	conf.OnlyPresets = true | ||||
| 	conf.Presets["test1"] = urlOptions{ | ||||
| 		urlOption{Name: "blur", Args: []string{"0.2"}}, | ||||
| 	} | ||||
| 	conf.Presets["test2"] = urlOptions{ | ||||
| 		urlOption{Name: "quality", Args: []string{"50"}}, | ||||
| 	} | ||||
|  | ||||
| 	imageURL := "http://images.dev/lorem/ipsum.jpg?param=value" | ||||
| 	req := s.getRequest(fmt.Sprintf("/unsafe/test1:test2/%s.png", base64.RawURLEncoding.EncodeToString([]byte(imageURL)))) | ||||
|  | ||||
| 	ctx, err := parsePath(context.Background(), req) | ||||
|  | ||||
| 	require.Nil(s.T(), err) | ||||
|  | ||||
| 	po := getProcessingOptions(ctx) | ||||
| 	assert.Equal(s.T(), float32(0.2), po.Blur) | ||||
| 	assert.Equal(s.T(), 50, po.Quality) | ||||
| } | ||||
| func TestProcessingOptions(t *testing.T) { | ||||
| 	suite.Run(t, new(ProcessingOptionsTestSuite)) | ||||
| } | ||||
							
								
								
									
										180
									
								
								prometheus.go
									
									
									
									
									
								
							
							
						
						
									
										180
									
								
								prometheus.go
									
									
									
									
									
								
							| @@ -1,180 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/prometheus/client_golang/prometheus" | ||||
| 	"github.com/prometheus/client_golang/prometheus/promhttp" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	prometheusEnabled = false | ||||
|  | ||||
| 	prometheusRequestsTotal      prometheus.Counter | ||||
| 	prometheusErrorsTotal        *prometheus.CounterVec | ||||
| 	prometheusRequestDuration    prometheus.Histogram | ||||
| 	prometheusDownloadDuration   prometheus.Histogram | ||||
| 	prometheusProcessingDuration prometheus.Histogram | ||||
| 	prometheusBufferSize         *prometheus.HistogramVec | ||||
| 	prometheusBufferDefaultSize  *prometheus.GaugeVec | ||||
| 	prometheusBufferMaxSize      *prometheus.GaugeVec | ||||
| 	prometheusVipsMemory         prometheus.GaugeFunc | ||||
| 	prometheusVipsMaxMemory      prometheus.GaugeFunc | ||||
| 	prometheusVipsAllocs         prometheus.GaugeFunc | ||||
| ) | ||||
|  | ||||
| func initPrometheus() { | ||||
| 	if len(conf.PrometheusBind) == 0 { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	prometheusRequestsTotal = prometheus.NewCounter(prometheus.CounterOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "requests_total", | ||||
| 		Help:      "A counter of the total number of HTTP requests imgproxy processed.", | ||||
| 	}) | ||||
|  | ||||
| 	prometheusErrorsTotal = prometheus.NewCounterVec(prometheus.CounterOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "errors_total", | ||||
| 		Help:      "A counter of the occurred errors separated by type.", | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	prometheusRequestDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "request_duration_seconds", | ||||
| 		Help:      "A histogram of the response latency.", | ||||
| 	}) | ||||
|  | ||||
| 	prometheusDownloadDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "download_duration_seconds", | ||||
| 		Help:      "A histogram of the source image downloading latency.", | ||||
| 	}) | ||||
|  | ||||
| 	prometheusProcessingDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "processing_duration_seconds", | ||||
| 		Help:      "A histogram of the image processing latency.", | ||||
| 	}) | ||||
|  | ||||
| 	prometheusBufferSize = prometheus.NewHistogramVec(prometheus.HistogramOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "buffer_size_bytes", | ||||
| 		Help:      "A histogram of the buffer size in bytes.", | ||||
| 		Buckets:   prometheus.ExponentialBuckets(1024, 2, 14), | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	prometheusBufferDefaultSize = prometheus.NewGaugeVec(prometheus.GaugeOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "buffer_default_size_bytes", | ||||
| 		Help:      "A gauge of the buffer default size in bytes.", | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	prometheusBufferMaxSize = prometheus.NewGaugeVec(prometheus.GaugeOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "buffer_max_size_bytes", | ||||
| 		Help:      "A gauge of the buffer max size in bytes.", | ||||
| 	}, []string{"type"}) | ||||
|  | ||||
| 	prometheusVipsMemory = prometheus.NewGaugeFunc(prometheus.GaugeOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "vips_memory_bytes", | ||||
| 		Help:      "A gauge of the vips tracked memory usage in bytes.", | ||||
| 	}, vipsGetMem) | ||||
|  | ||||
| 	prometheusVipsMaxMemory = prometheus.NewGaugeFunc(prometheus.GaugeOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "vips_max_memory_bytes", | ||||
| 		Help:      "A gauge of the max vips tracked memory usage in bytes.", | ||||
| 	}, vipsGetMemHighwater) | ||||
|  | ||||
| 	prometheusVipsAllocs = prometheus.NewGaugeFunc(prometheus.GaugeOpts{ | ||||
| 		Namespace: conf.PrometheusNamespace, | ||||
| 		Name:      "vips_allocs", | ||||
| 		Help:      "A gauge of the number of active vips allocations.", | ||||
| 	}, vipsGetAllocs) | ||||
|  | ||||
| 	prometheus.MustRegister( | ||||
| 		prometheusRequestsTotal, | ||||
| 		prometheusErrorsTotal, | ||||
| 		prometheusRequestDuration, | ||||
| 		prometheusDownloadDuration, | ||||
| 		prometheusProcessingDuration, | ||||
| 		prometheusBufferSize, | ||||
| 		prometheusBufferDefaultSize, | ||||
| 		prometheusBufferMaxSize, | ||||
| 		prometheusVipsMemory, | ||||
| 		prometheusVipsMaxMemory, | ||||
| 		prometheusVipsAllocs, | ||||
| 	) | ||||
|  | ||||
| 	prometheusEnabled = true | ||||
| } | ||||
|  | ||||
| func startPrometheusServer(cancel context.CancelFunc) error { | ||||
| 	if !prometheusEnabled { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	s := http.Server{Handler: promhttp.Handler()} | ||||
|  | ||||
| 	l, err := listenReuseport("tcp", conf.PrometheusBind) | ||||
| 	if err != nil { | ||||
| 		return fmt.Errorf("Can't start Prometheus metrics server: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	go func() { | ||||
| 		logNotice("Starting Prometheus server at %s", conf.PrometheusBind) | ||||
| 		if err := s.Serve(l); err != nil && err != http.ErrServerClosed { | ||||
| 			logError(err.Error()) | ||||
| 		} | ||||
| 		cancel() | ||||
| 	}() | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func startPrometheusDuration(m prometheus.Histogram) func() { | ||||
| 	if !prometheusEnabled { | ||||
| 		return func() {} | ||||
| 	} | ||||
|  | ||||
| 	t := time.Now() | ||||
| 	return func() { | ||||
| 		m.Observe(time.Since(t).Seconds()) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func incrementPrometheusErrorsTotal(t string) { | ||||
| 	if prometheusEnabled { | ||||
| 		prometheusErrorsTotal.With(prometheus.Labels{"type": t}).Inc() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func incrementPrometheusRequestsTotal() { | ||||
| 	if prometheusEnabled { | ||||
| 		prometheusRequestsTotal.Inc() | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func observePrometheusBufferSize(t string, size int) { | ||||
| 	if prometheusEnabled { | ||||
| 		prometheusBufferSize.With(prometheus.Labels{"type": t}).Observe(float64(size)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func setPrometheusBufferDefaultSize(t string, size int) { | ||||
| 	if prometheusEnabled { | ||||
| 		prometheusBufferDefaultSize.With(prometheus.Labels{"type": t}).Set(float64(size)) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func setPrometheusBufferMaxSize(t string, size int) { | ||||
| 	if prometheusEnabled { | ||||
| 		prometheusBufferMaxSize.With(prometheus.Labels{"type": t}).Set(float64(size)) | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										15
									
								
								reuseport/listen_no_reuseport.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								reuseport/listen_no_reuseport.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| // +build !linux,!darwin !go1.11 | ||||
|  | ||||
| package reuseport | ||||
|  | ||||
| import ( | ||||
| 	"net" | ||||
| ) | ||||
|  | ||||
| func Listen(network, address string) (net.Listener, error) { | ||||
| 	if config.SoReuseport { | ||||
| 		log.Warning("SO_REUSEPORT support is not implemented for your OS or Go version") | ||||
| 	} | ||||
|  | ||||
| 	return net.Listen(network, address) | ||||
| } | ||||
| @@ -1,7 +1,7 @@ | ||||
| // +build linux darwin | ||||
| // +build go1.11 | ||||
| 
 | ||||
| package main | ||||
| package reuseport | ||||
| 
 | ||||
| import ( | ||||
| 	"context" | ||||
| @@ -9,10 +9,12 @@ import ( | ||||
| 	"syscall" | ||||
| 
 | ||||
| 	"golang.org/x/sys/unix" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
| 
 | ||||
| func listenReuseport(network, address string) (net.Listener, error) { | ||||
| 	if !conf.SoReuseport { | ||||
| func Listen(network, address string) (net.Listener, error) { | ||||
| 	if !config.SoReuseport { | ||||
| 		return net.Listen(network, address) | ||||
| 	} | ||||
| 
 | ||||
							
								
								
									
										55
									
								
								router/logging.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								router/logging.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| package router | ||||
|  | ||||
| import ( | ||||
| 	"net/http" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| ) | ||||
|  | ||||
| func LogRequest(reqID string, r *http.Request) { | ||||
| 	path := r.RequestURI | ||||
|  | ||||
| 	log.WithFields(log.Fields{ | ||||
| 		"request_id": reqID, | ||||
| 		"method":     r.Method, | ||||
| 	}).Infof("Started %s", path) | ||||
| } | ||||
|  | ||||
| func LogResponse(reqID string, r *http.Request, status int, err *ierrors.Error, additional ...log.Fields) { | ||||
| 	var level log.Level | ||||
|  | ||||
| 	switch { | ||||
| 	case status >= 500: | ||||
| 		level = log.ErrorLevel | ||||
| 	case status >= 400: | ||||
| 		level = log.WarnLevel | ||||
| 	default: | ||||
| 		level = log.InfoLevel | ||||
| 	} | ||||
|  | ||||
| 	fields := log.Fields{ | ||||
| 		"request_id": reqID, | ||||
| 		"method":     r.Method, | ||||
| 		"status":     status, | ||||
| 	} | ||||
|  | ||||
| 	if err != nil { | ||||
| 		fields["error"] = err | ||||
|  | ||||
| 		if stack := err.FormatStack(); len(stack) > 0 { | ||||
| 			fields["stack"] = stack | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	for _, f := range additional { | ||||
| 		for k, v := range f { | ||||
| 			fields[k] = v | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	log.WithFields(fields).Logf( | ||||
| 		level, | ||||
| 		"Completed in %s %s", ctxTime(r.Context()), r.RequestURI, | ||||
| 	) | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package router | ||||
| 
 | ||||
| import ( | ||||
| 	"net/http" | ||||
| @@ -6,6 +6,7 @@ import ( | ||||
| 	"strings" | ||||
| 
 | ||||
| 	nanoid "github.com/matoous/go-nanoid/v2" | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| ) | ||||
| 
 | ||||
| const ( | ||||
| @@ -16,23 +17,23 @@ var ( | ||||
| 	requestIDRe = regexp.MustCompile(`^[A-Za-z0-9_\-]+$`) | ||||
| ) | ||||
| 
 | ||||
| type routeHandler func(string, http.ResponseWriter, *http.Request) | ||||
| type panicHandler func(string, http.ResponseWriter, *http.Request, error) | ||||
| type RouteHandler func(string, http.ResponseWriter, *http.Request) | ||||
| type PanicHandler func(string, http.ResponseWriter, *http.Request, error) | ||||
| 
 | ||||
| type route struct { | ||||
| 	Method  string | ||||
| 	Prefix  string | ||||
| 	Handler routeHandler | ||||
| 	Handler RouteHandler | ||||
| 	Exact   bool | ||||
| } | ||||
| 
 | ||||
| type router struct { | ||||
| type Router struct { | ||||
| 	prefix       string | ||||
| 	Routes       []*route | ||||
| 	PanicHandler panicHandler | ||||
| 	PanicHandler PanicHandler | ||||
| } | ||||
| 
 | ||||
| func (r *route) IsMatch(req *http.Request) bool { | ||||
| func (r *route) isMatch(req *http.Request) bool { | ||||
| 	if r.Method != req.Method { | ||||
| 		return false | ||||
| 	} | ||||
| @@ -44,34 +45,34 @@ func (r *route) IsMatch(req *http.Request) bool { | ||||
| 	return strings.HasPrefix(req.URL.Path, r.Prefix) | ||||
| } | ||||
| 
 | ||||
| func newRouter(prefix string) *router { | ||||
| 	return &router{ | ||||
| func New(prefix string) *Router { | ||||
| 	return &Router{ | ||||
| 		prefix: prefix, | ||||
| 		Routes: make([]*route, 0), | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func (r *router) Add(method, prefix string, handler routeHandler, exact bool) { | ||||
| func (r *Router) Add(method, prefix string, handler RouteHandler, exact bool) { | ||||
| 	r.Routes = append( | ||||
| 		r.Routes, | ||||
| 		&route{Method: method, Prefix: r.prefix + prefix, Handler: handler, Exact: exact}, | ||||
| 	) | ||||
| } | ||||
| 
 | ||||
| func (r *router) GET(prefix string, handler routeHandler, exact bool) { | ||||
| func (r *Router) GET(prefix string, handler RouteHandler, exact bool) { | ||||
| 	r.Add(http.MethodGet, prefix, handler, exact) | ||||
| } | ||||
| 
 | ||||
| func (r *router) OPTIONS(prefix string, handler routeHandler, exact bool) { | ||||
| func (r *Router) OPTIONS(prefix string, handler RouteHandler, exact bool) { | ||||
| 	r.Add(http.MethodOptions, prefix, handler, exact) | ||||
| } | ||||
| 
 | ||||
| func (r *router) HEAD(prefix string, handler routeHandler, exact bool) { | ||||
| func (r *Router) HEAD(prefix string, handler RouteHandler, exact bool) { | ||||
| 	r.Add(http.MethodHead, prefix, handler, exact) | ||||
| } | ||||
| 
 | ||||
| func (r *router) ServeHTTP(rw http.ResponseWriter, req *http.Request) { | ||||
| 	req = req.WithContext(setTimerSince(req.Context())) | ||||
| func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) { | ||||
| 	req = setRequestTime(req) | ||||
| 
 | ||||
| 	reqID := req.Header.Get(xRequestIDHeader) | ||||
| 
 | ||||
| @@ -92,16 +93,16 @@ func (r *router) ServeHTTP(rw http.ResponseWriter, req *http.Request) { | ||||
| 		} | ||||
| 	}() | ||||
| 
 | ||||
| 	logRequest(reqID, req) | ||||
| 	LogRequest(reqID, req) | ||||
| 
 | ||||
| 	for _, rr := range r.Routes { | ||||
| 		if rr.IsMatch(req) { | ||||
| 		if rr.isMatch(req) { | ||||
| 			rr.Handler(reqID, rw, req) | ||||
| 			return | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	logWarning("Route for %s is not defined", req.URL.Path) | ||||
| 	log.Warningf("Route for %s is not defined", req.URL.Path) | ||||
| 
 | ||||
| 	rw.WriteHeader(404) | ||||
| } | ||||
							
								
								
									
										43
									
								
								router/timer.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										43
									
								
								router/timer.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,43 @@ | ||||
| package router | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics" | ||||
| ) | ||||
|  | ||||
| type timerSinceCtxKey = struct{} | ||||
|  | ||||
| func setRequestTime(r *http.Request) *http.Request { | ||||
| 	return r.WithContext( | ||||
| 		context.WithValue(r.Context(), timerSinceCtxKey{}, time.Now()), | ||||
| 	) | ||||
| } | ||||
|  | ||||
| func ctxTime(ctx context.Context) time.Duration { | ||||
| 	if t, ok := ctx.Value(timerSinceCtxKey{}).(time.Time); ok { | ||||
| 		return time.Since(t) | ||||
| 	} | ||||
| 	return 0 | ||||
| } | ||||
|  | ||||
| func CheckTimeout(ctx context.Context) { | ||||
| 	select { | ||||
| 	case <-ctx.Done(): | ||||
| 		d := ctxTime(ctx) | ||||
|  | ||||
| 		if ctx.Err() != context.DeadlineExceeded { | ||||
| 			panic(ierrors.New(499, fmt.Sprintf("Request was cancelled after %v", d), "Cancelled")) | ||||
| 		} | ||||
|  | ||||
| 		metrics.SendTimeout(ctx, d) | ||||
|  | ||||
| 		panic(ierrors.New(503, fmt.Sprintf("Timeout after %v", d), "Timeout")) | ||||
| 	default: | ||||
| 		// Go ahead | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										16
									
								
								security/image_size.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								security/image_size.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| package security | ||||
|  | ||||
| import ( | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| ) | ||||
|  | ||||
| var ErrSourceResolutionTooBig = ierrors.New(422, "Source image resolution is too big", "Invalid source image") | ||||
|  | ||||
| func CheckDimensions(width, height int) error { | ||||
| 	if width*height > config.MaxSrcResolution { | ||||
| 		return ErrSourceResolutionTooBig | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										45
									
								
								security/signature.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								security/signature.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| package security | ||||
|  | ||||
| import ( | ||||
| 	"crypto/hmac" | ||||
| 	"crypto/sha256" | ||||
| 	"encoding/base64" | ||||
| 	"errors" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	ErrInvalidSignature         = errors.New("Invalid signature") | ||||
| 	ErrInvalidSignatureEncoding = errors.New("Invalid signature encoding") | ||||
| ) | ||||
|  | ||||
| func VerifySignature(signature, path string) error { | ||||
| 	if len(config.Keys) == 0 || len(config.Salts) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	messageMAC, err := base64.RawURLEncoding.DecodeString(signature) | ||||
| 	if err != nil { | ||||
| 		return ErrInvalidSignatureEncoding | ||||
| 	} | ||||
|  | ||||
| 	for i := 0; i < len(config.Keys); i++ { | ||||
| 		if hmac.Equal(messageMAC, signatureFor(path, config.Keys[i], config.Salts[i], config.SignatureSize)) { | ||||
| 			return nil | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return ErrInvalidSignature | ||||
| } | ||||
|  | ||||
| func signatureFor(str string, key, salt []byte, signatureSize int) []byte { | ||||
| 	mac := hmac.New(sha256.New, key) | ||||
| 	mac.Write(salt) | ||||
| 	mac.Write([]byte(str)) | ||||
| 	expectedMAC := mac.Sum(nil) | ||||
| 	if signatureSize < 32 { | ||||
| 		return expectedMAC[:signatureSize] | ||||
| 	} | ||||
| 	return expectedMAC | ||||
| } | ||||
							
								
								
									
										56
									
								
								security/signature_test.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								security/signature_test.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| package security | ||||
|  | ||||
| import ( | ||||
| 	"testing" | ||||
|  | ||||
| 	"github.com/stretchr/testify/assert" | ||||
| 	"github.com/stretchr/testify/suite" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| type SignatureTestSuite struct { | ||||
| 	suite.Suite | ||||
| } | ||||
|  | ||||
| func (s *SignatureTestSuite) SetupTest() { | ||||
| 	config.Reset() | ||||
|  | ||||
| 	config.Keys = [][]byte{[]byte("test-key")} | ||||
| 	config.Salts = [][]byte{[]byte("test-salt")} | ||||
| } | ||||
|  | ||||
| func (s *SignatureTestSuite) TestVerifySignature() { | ||||
| 	err := VerifySignature("dtLwhdnPPiu_epMl1LrzheLpvHas-4mwvY6L3Z8WwlY", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *SignatureTestSuite) TestVerifySignatureTruncated() { | ||||
| 	config.SignatureSize = 8 | ||||
|  | ||||
| 	err := VerifySignature("dtLwhdnPPis", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *SignatureTestSuite) TestVerifySignatureInvalid() { | ||||
| 	err := VerifySignature("dtLwhdnPPis", "asd") | ||||
| 	assert.Error(s.T(), err) | ||||
| } | ||||
|  | ||||
| func (s *SignatureTestSuite) TestVerifySignatureMultiplePairs() { | ||||
| 	config.Keys = append(config.Keys, []byte("test-key2")) | ||||
| 	config.Salts = append(config.Salts, []byte("test-salt2")) | ||||
|  | ||||
| 	err := VerifySignature("dtLwhdnPPiu_epMl1LrzheLpvHas-4mwvY6L3Z8WwlY", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	err = VerifySignature("jbDffNPt1-XBgDccsaE-XJB9lx8JIJqdeYIZKgOqZpg", "asd") | ||||
| 	assert.Nil(s.T(), err) | ||||
|  | ||||
| 	err = VerifySignature("dtLwhdnPPis", "asd") | ||||
| 	assert.Error(s.T(), err) | ||||
| } | ||||
|  | ||||
| func TestSignature(t *testing.T) { | ||||
| 	suite.Run(t, new(SignatureTestSuite)) | ||||
| } | ||||
							
								
								
									
										19
									
								
								security/source.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								security/source.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| package security | ||||
|  | ||||
| import ( | ||||
| 	"strings" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
|  | ||||
| func VerifySourceURL(imageURL string) bool { | ||||
| 	if len(config.AllowedSources) == 0 { | ||||
| 		return true | ||||
| 	} | ||||
| 	for _, val := range config.AllowedSources { | ||||
| 		if strings.HasPrefix(imageURL, string(val)) { | ||||
| 			return true | ||||
| 		} | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
							
								
								
									
										59
									
								
								server.go
									
									
									
									
									
								
							
							
						
						
									
										59
									
								
								server.go
									
									
									
									
									
								
							| @@ -7,17 +7,24 @@ import ( | ||||
| 	"net/http" | ||||
| 	"time" | ||||
|  | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| 	"golang.org/x/net/netutil" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/errorreport" | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| 	"github.com/imgproxy/imgproxy/v2/reuseport" | ||||
| 	"github.com/imgproxy/imgproxy/v2/router" | ||||
| ) | ||||
|  | ||||
| var ( | ||||
| 	imgproxyIsRunningMsg = []byte("imgproxy is running") | ||||
|  | ||||
| 	errInvalidSecret = newError(403, "Invalid secret", "Forbidden") | ||||
| 	errInvalidSecret = ierrors.New(403, "Invalid secret", "Forbidden") | ||||
| ) | ||||
|  | ||||
| func buildRouter() *router { | ||||
| 	r := newRouter(conf.PathPrefix) | ||||
| func buildRouter() *router.Router { | ||||
| 	r := router.New(config.PathPrefix) | ||||
|  | ||||
| 	r.PanicHandler = handlePanic | ||||
|  | ||||
| @@ -32,32 +39,28 @@ func buildRouter() *router { | ||||
| } | ||||
|  | ||||
| func startServer(cancel context.CancelFunc) (*http.Server, error) { | ||||
| 	l, err := listenReuseport(conf.Network, conf.Bind) | ||||
| 	l, err := reuseport.Listen(config.Network, config.Bind) | ||||
| 	if err != nil { | ||||
| 		return nil, fmt.Errorf("Can't start server: %s", err) | ||||
| 	} | ||||
| 	l = netutil.LimitListener(l, conf.MaxClients) | ||||
| 	l = netutil.LimitListener(l, config.MaxClients) | ||||
|  | ||||
| 	s := &http.Server{ | ||||
| 		Handler:        buildRouter(), | ||||
| 		ReadTimeout:    time.Duration(conf.ReadTimeout) * time.Second, | ||||
| 		ReadTimeout:    time.Duration(config.ReadTimeout) * time.Second, | ||||
| 		MaxHeaderBytes: 1 << 20, | ||||
| 	} | ||||
|  | ||||
| 	if conf.KeepAliveTimeout > 0 { | ||||
| 		s.IdleTimeout = time.Duration(conf.KeepAliveTimeout) * time.Second | ||||
| 	if config.KeepAliveTimeout > 0 { | ||||
| 		s.IdleTimeout = time.Duration(config.KeepAliveTimeout) * time.Second | ||||
| 	} else { | ||||
| 		s.SetKeepAlivesEnabled(false) | ||||
| 	} | ||||
|  | ||||
| 	if err := initProcessingHandler(); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	go func() { | ||||
| 		logNotice("Starting server at %s", conf.Bind) | ||||
| 		log.Infof("Starting server at %s", config.Bind) | ||||
| 		if err := s.Serve(l); err != nil && err != http.ErrServerClosed { | ||||
| 			logError(err.Error()) | ||||
| 			log.Error(err) | ||||
| 		} | ||||
| 		cancel() | ||||
| 	}() | ||||
| @@ -66,7 +69,7 @@ func startServer(cancel context.CancelFunc) (*http.Server, error) { | ||||
| } | ||||
|  | ||||
| func shutdownServer(s *http.Server) { | ||||
| 	logNotice("Shutting down the server...") | ||||
| 	log.Info("Shutting down the server...") | ||||
|  | ||||
| 	ctx, close := context.WithTimeout(context.Background(), 5*time.Second) | ||||
| 	defer close() | ||||
| @@ -74,10 +77,10 @@ func shutdownServer(s *http.Server) { | ||||
| 	s.Shutdown(ctx) | ||||
| } | ||||
|  | ||||
| func withCORS(h routeHandler) routeHandler { | ||||
| func withCORS(h router.RouteHandler) router.RouteHandler { | ||||
| 	return func(reqID string, rw http.ResponseWriter, r *http.Request) { | ||||
| 		if len(conf.AllowOrigin) > 0 { | ||||
| 			rw.Header().Set("Access-Control-Allow-Origin", conf.AllowOrigin) | ||||
| 		if len(config.AllowOrigin) > 0 { | ||||
| 			rw.Header().Set("Access-Control-Allow-Origin", config.AllowOrigin) | ||||
| 			rw.Header().Set("Access-Control-Allow-Methods", "GET, OPTIONS") | ||||
| 		} | ||||
|  | ||||
| @@ -85,12 +88,12 @@ func withCORS(h routeHandler) routeHandler { | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func withSecret(h routeHandler) routeHandler { | ||||
| 	if len(conf.Secret) == 0 { | ||||
| func withSecret(h router.RouteHandler) router.RouteHandler { | ||||
| 	if len(config.Secret) == 0 { | ||||
| 		return h | ||||
| 	} | ||||
|  | ||||
| 	authHeader := []byte(fmt.Sprintf("Bearer %s", conf.Secret)) | ||||
| 	authHeader := []byte(fmt.Sprintf("Bearer %s", config.Secret)) | ||||
|  | ||||
| 	return func(reqID string, rw http.ResponseWriter, r *http.Request) { | ||||
| 		if subtle.ConstantTimeCompare([]byte(r.Header.Get("Authorization")), authHeader) == 1 { | ||||
| @@ -102,17 +105,17 @@ func withSecret(h routeHandler) routeHandler { | ||||
| } | ||||
|  | ||||
| func handlePanic(reqID string, rw http.ResponseWriter, r *http.Request, err error) { | ||||
| 	ierr := wrapError(err, 3) | ||||
| 	ierr := ierrors.Wrap(err, 3) | ||||
|  | ||||
| 	if ierr.Unexpected { | ||||
| 		reportError(err, r) | ||||
| 		errorreport.Report(err, r) | ||||
| 	} | ||||
|  | ||||
| 	logResponse(reqID, r, ierr.StatusCode, ierr, nil, nil) | ||||
| 	router.LogResponse(reqID, r, ierr.StatusCode, ierr) | ||||
|  | ||||
| 	rw.WriteHeader(ierr.StatusCode) | ||||
|  | ||||
| 	if conf.DevelopmentErrorsMode { | ||||
| 	if config.DevelopmentErrorsMode { | ||||
| 		rw.Write([]byte(ierr.Message)) | ||||
| 	} else { | ||||
| 		rw.Write([]byte(ierr.PublicMessage)) | ||||
| @@ -120,18 +123,18 @@ func handlePanic(reqID string, rw http.ResponseWriter, r *http.Request, err erro | ||||
| } | ||||
|  | ||||
| func handleHealth(reqID string, rw http.ResponseWriter, r *http.Request) { | ||||
| 	logResponse(reqID, r, 200, nil, nil, nil) | ||||
| 	router.LogResponse(reqID, r, 200, nil) | ||||
| 	rw.WriteHeader(200) | ||||
| 	rw.Write(imgproxyIsRunningMsg) | ||||
| } | ||||
|  | ||||
| func handleHead(reqID string, rw http.ResponseWriter, r *http.Request) { | ||||
| 	logResponse(reqID, r, 200, nil, nil, nil) | ||||
| 	router.LogResponse(reqID, r, 200, nil) | ||||
| 	rw.WriteHeader(200) | ||||
| } | ||||
|  | ||||
| func handleFavicon(reqID string, rw http.ResponseWriter, r *http.Request) { | ||||
| 	logResponse(reqID, r, 200, nil, nil, nil) | ||||
| 	router.LogResponse(reqID, r, 200, nil) | ||||
| 	// TODO: Add a real favicon maybe? | ||||
| 	rw.WriteHeader(200) | ||||
| } | ||||
|   | ||||
							
								
								
									
										
											BIN
										
									
								
								testdata/test1.png
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								testdata/test1.png
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 1.4 KiB | 
							
								
								
									
										3
									
								
								testdata/test1.svg
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								testdata/test1.svg
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| <svg width="200" height="100"> | ||||
|   <rect width="190" height="90" style="fill:rgb(0,0,0);stroke-width:5;stroke:rgb(255,255,255)" /> | ||||
| </svg> | ||||
| After Width: | Height: | Size: 136 B | 
							
								
								
									
										36
									
								
								timer.go
									
									
									
									
									
								
							
							
						
						
									
										36
									
								
								timer.go
									
									
									
									
									
								
							| @@ -1,36 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"context" | ||||
| 	"fmt" | ||||
| 	"time" | ||||
| ) | ||||
|  | ||||
| var timerSinceCtxKey = ctxKey("timerSince") | ||||
|  | ||||
| func setTimerSince(ctx context.Context) context.Context { | ||||
| 	return context.WithValue(ctx, timerSinceCtxKey, time.Now()) | ||||
| } | ||||
|  | ||||
| func getTimerSince(ctx context.Context) time.Duration { | ||||
| 	return time.Since(ctx.Value(timerSinceCtxKey).(time.Time)) | ||||
| } | ||||
|  | ||||
| func checkTimeout(ctx context.Context) { | ||||
| 	select { | ||||
| 	case <-ctx.Done(): | ||||
| 		d := getTimerSince(ctx) | ||||
|  | ||||
| 		if ctx.Err() != context.DeadlineExceeded { | ||||
| 			panic(newError(499, fmt.Sprintf("Request was cancelled after %v", d), "Cancelled")) | ||||
| 		} | ||||
|  | ||||
| 		sendTimeoutToDataDog(ctx, d) | ||||
| 		sendTimeoutToNewRelic(ctx, d) | ||||
| 		incrementPrometheusErrorsTotal("timeout") | ||||
|  | ||||
| 		panic(newError(503, fmt.Sprintf("Timeout after %v", d), "Timeout")) | ||||
| 	default: | ||||
| 		// Go ahead | ||||
| 	} | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package azure | ||||
| 
 | ||||
| import ( | ||||
| 	"context" | ||||
| @@ -8,23 +8,24 @@ import ( | ||||
| 	"strings" | ||||
| 
 | ||||
| 	"github.com/Azure/azure-storage-blob-go/azblob" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
| 
 | ||||
| type azureTransport struct { | ||||
| type transport struct { | ||||
| 	serviceURL *azblob.ServiceURL | ||||
| } | ||||
| 
 | ||||
| func newAzureTransport() (http.RoundTripper, error) { | ||||
| 	credential, err := azblob.NewSharedKeyCredential(conf.ABSName, conf.ABSKey) | ||||
| func New() (http.RoundTripper, error) { | ||||
| 	credential, err := azblob.NewSharedKeyCredential(config.ABSName, config.ABSKey) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 
 | ||||
| 	pipeline := azblob.NewPipeline(credential, azblob.PipelineOptions{}) | ||||
| 
 | ||||
| 	endpoint := conf.ABSEndpoint | ||||
| 	endpoint := config.ABSEndpoint | ||||
| 	if len(endpoint) == 0 { | ||||
| 		endpoint = fmt.Sprintf("https://%s.blob.core.windows.net", conf.ABSName) | ||||
| 		endpoint = fmt.Sprintf("https://%s.blob.core.windows.net", config.ABSName) | ||||
| 	} | ||||
| 	endpointURL, err := url.Parse(endpoint) | ||||
| 	if err != nil { | ||||
| @@ -33,10 +34,10 @@ func newAzureTransport() (http.RoundTripper, error) { | ||||
| 
 | ||||
| 	serviceURL := azblob.NewServiceURL(*endpointURL, pipeline) | ||||
| 
 | ||||
| 	return azureTransport{&serviceURL}, nil | ||||
| 	return transport{&serviceURL}, nil | ||||
| } | ||||
| 
 | ||||
| func (t azureTransport) RoundTrip(req *http.Request) (resp *http.Response, err error) { | ||||
| func (t transport) RoundTrip(req *http.Request) (resp *http.Response, err error) { | ||||
| 	containerURL := t.serviceURL.NewContainerURL(strings.ToLower(req.URL.Host)) | ||||
| 	blobURL := containerURL.NewBlockBlobURL(strings.TrimPrefix(req.URL.Path, "/")) | ||||
| 
 | ||||
| @@ -1,19 +1,21 @@ | ||||
| package main | ||||
| package fs | ||||
| 
 | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"net/http" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
| 
 | ||||
| type fsTransport struct { | ||||
| type transport struct { | ||||
| 	fs http.Dir | ||||
| } | ||||
| 
 | ||||
| func newFsTransport() fsTransport { | ||||
| 	return fsTransport{fs: http.Dir(conf.LocalFileSystemRoot)} | ||||
| func New() transport { | ||||
| 	return transport{fs: http.Dir(config.LocalFileSystemRoot)} | ||||
| } | ||||
| 
 | ||||
| func (t fsTransport) RoundTrip(req *http.Request) (resp *http.Response, err error) { | ||||
| func (t transport) RoundTrip(req *http.Request) (resp *http.Response, err error) { | ||||
| 	f, err := t.fs.Open(req.URL.Path) | ||||
| 
 | ||||
| 	if err != nil { | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package gcs | ||||
| 
 | ||||
| import ( | ||||
| 	"context" | ||||
| @@ -8,21 +8,22 @@ import ( | ||||
| 	"strings" | ||||
| 
 | ||||
| 	"cloud.google.com/go/storage" | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"google.golang.org/api/option" | ||||
| ) | ||||
| 
 | ||||
| type gcsTransport struct { | ||||
| type transport struct { | ||||
| 	client *storage.Client | ||||
| } | ||||
| 
 | ||||
| func newGCSTransport() (http.RoundTripper, error) { | ||||
| func New() (http.RoundTripper, error) { | ||||
| 	var ( | ||||
| 		client *storage.Client | ||||
| 		err    error | ||||
| 	) | ||||
| 
 | ||||
| 	if len(conf.GCSKey) > 0 { | ||||
| 		client, err = storage.NewClient(context.Background(), option.WithCredentialsJSON([]byte(conf.GCSKey))) | ||||
| 	if len(config.GCSKey) > 0 { | ||||
| 		client, err = storage.NewClient(context.Background(), option.WithCredentialsJSON([]byte(config.GCSKey))) | ||||
| 	} else { | ||||
| 		client, err = storage.NewClient(context.Background()) | ||||
| 	} | ||||
| @@ -31,10 +32,10 @@ func newGCSTransport() (http.RoundTripper, error) { | ||||
| 		return nil, fmt.Errorf("Can't create GCS client: %s", err) | ||||
| 	} | ||||
| 
 | ||||
| 	return gcsTransport{client}, nil | ||||
| 	return transport{client}, nil | ||||
| } | ||||
| 
 | ||||
| func (t gcsTransport) RoundTrip(req *http.Request) (*http.Response, error) { | ||||
| func (t transport) RoundTrip(req *http.Request) (*http.Response, error) { | ||||
| 	bkt := t.client.Bucket(req.URL.Host) | ||||
| 	obj := bkt.Object(strings.TrimPrefix(req.URL.Path, "/")) | ||||
| 
 | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package s3 | ||||
| 
 | ||||
| import ( | ||||
| 	"fmt" | ||||
| @@ -7,22 +7,24 @@ import ( | ||||
| 	"github.com/aws/aws-sdk-go/aws" | ||||
| 	"github.com/aws/aws-sdk-go/aws/session" | ||||
| 	"github.com/aws/aws-sdk-go/service/s3" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| ) | ||||
| 
 | ||||
| // s3Transport implements RoundTripper for the 's3' protocol. | ||||
| type s3Transport struct { | ||||
| // transport implements RoundTripper for the 's3' protocol. | ||||
| type transport struct { | ||||
| 	svc *s3.S3 | ||||
| } | ||||
| 
 | ||||
| func newS3Transport() (http.RoundTripper, error) { | ||||
| func New() (http.RoundTripper, error) { | ||||
| 	s3Conf := aws.NewConfig() | ||||
| 
 | ||||
| 	if len(conf.S3Region) != 0 { | ||||
| 		s3Conf.Region = aws.String(conf.S3Region) | ||||
| 	if len(config.S3Region) != 0 { | ||||
| 		s3Conf.Region = aws.String(config.S3Region) | ||||
| 	} | ||||
| 
 | ||||
| 	if len(conf.S3Endpoint) != 0 { | ||||
| 		s3Conf.Endpoint = aws.String(conf.S3Endpoint) | ||||
| 	if len(config.S3Endpoint) != 0 { | ||||
| 		s3Conf.Endpoint = aws.String(config.S3Endpoint) | ||||
| 		s3Conf.S3ForcePathStyle = aws.Bool(true) | ||||
| 	} | ||||
| 
 | ||||
| @@ -35,10 +37,10 @@ func newS3Transport() (http.RoundTripper, error) { | ||||
| 		sess.Config.Region = aws.String("us-west-1") | ||||
| 	} | ||||
| 
 | ||||
| 	return s3Transport{s3.New(sess, s3Conf)}, nil | ||||
| 	return transport{s3.New(sess, s3Conf)}, nil | ||||
| } | ||||
| 
 | ||||
| func (t s3Transport) RoundTrip(req *http.Request) (resp *http.Response, err error) { | ||||
| func (t transport) RoundTrip(req *http.Request) (resp *http.Response, err error) { | ||||
| 	input := &s3.GetObjectInput{ | ||||
| 		Bucket: aws.String(req.URL.Host), | ||||
| 		Key:    aws.String(req.URL.Path), | ||||
							
								
								
									
										56
									
								
								utils.go
									
									
									
									
									
								
							
							
						
						
									
										56
									
								
								utils.go
									
									
									
									
									
								
							| @@ -1,56 +0,0 @@ | ||||
| package main | ||||
|  | ||||
| import ( | ||||
| 	"math" | ||||
| 	"strings" | ||||
| 	"unsafe" | ||||
| ) | ||||
|  | ||||
| func maxInt(a, b int) int { | ||||
| 	if a > b { | ||||
| 		return a | ||||
| 	} | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func minInt(a, b int) int { | ||||
| 	if a < b { | ||||
| 		return a | ||||
| 	} | ||||
| 	return b | ||||
| } | ||||
|  | ||||
| func minNonZeroInt(a, b int) int { | ||||
| 	switch { | ||||
| 	case a == 0: | ||||
| 		return b | ||||
| 	case b == 0: | ||||
| 		return a | ||||
| 	} | ||||
|  | ||||
| 	return minInt(a, b) | ||||
| } | ||||
|  | ||||
| func roundToInt(a float64) int { | ||||
| 	return int(math.Round(a)) | ||||
| } | ||||
|  | ||||
| func scaleInt(a int, scale float64) int { | ||||
| 	if a == 0 { | ||||
| 		return 0 | ||||
| 	} | ||||
|  | ||||
| 	return roundToInt(float64(a) * scale) | ||||
| } | ||||
|  | ||||
| func trimAfter(s string, sep byte) string { | ||||
| 	i := strings.IndexByte(s, sep) | ||||
| 	if i < 0 { | ||||
| 		return s | ||||
| 	} | ||||
| 	return s[:i] | ||||
| } | ||||
|  | ||||
| func ptrToBytes(ptr unsafe.Pointer, size int) []byte { | ||||
| 	return (*[math.MaxInt32]byte)(ptr)[:int(size):int(size)] | ||||
| } | ||||
							
								
								
									
										7
									
								
								version/version.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								version/version.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| package version | ||||
|  | ||||
| const version = "2.16.1" | ||||
|  | ||||
| func Version() string { | ||||
| 	return version | ||||
| } | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package vips | ||||
| 
 | ||||
| import "C" | ||||
| 
 | ||||
							
								
								
									
										34
									
								
								vips/color.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										34
									
								
								vips/color.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,34 @@ | ||||
| package vips | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"regexp" | ||||
| ) | ||||
|  | ||||
| var hexColorRegex = regexp.MustCompile("^([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$") | ||||
|  | ||||
| const ( | ||||
| 	hexColorLongFormat  = "%02x%02x%02x" | ||||
| 	hexColorShortFormat = "%1x%1x%1x" | ||||
| ) | ||||
|  | ||||
| type Color struct{ R, G, B uint8 } | ||||
|  | ||||
| func ColorFromHex(hexcolor string) (Color, error) { | ||||
| 	c := Color{} | ||||
|  | ||||
| 	if !hexColorRegex.MatchString(hexcolor) { | ||||
| 		return c, fmt.Errorf("Invalid hex color: %s", hexcolor) | ||||
| 	} | ||||
|  | ||||
| 	if len(hexcolor) == 3 { | ||||
| 		fmt.Sscanf(hexcolor, hexColorShortFormat, &c.R, &c.G, &c.B) | ||||
| 		c.R *= 17 | ||||
| 		c.G *= 17 | ||||
| 		c.B *= 17 | ||||
| 	} else { | ||||
| 		fmt.Sscanf(hexcolor, hexColorLongFormat, &c.R, &c.G, &c.B) | ||||
| 	} | ||||
|  | ||||
| 	return c, nil | ||||
| } | ||||
							
								
								
									
										132
									
								
								vips/ico.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										132
									
								
								vips/ico.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,132 @@ | ||||
| package vips | ||||
|  | ||||
| /* | ||||
| #include "vips.h" | ||||
| */ | ||||
| import "C" | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"encoding/binary" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"unsafe" | ||||
|  | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagemeta" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| ) | ||||
|  | ||||
| func (img *Image) loadIco(data []byte, shrink int, scale float64, pages int) error { | ||||
| 	icoMeta, err := imagemeta.DecodeIcoMeta(bytes.NewReader(data)) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	offset := icoMeta.BestImageOffset() | ||||
| 	size := icoMeta.BestImageSize() | ||||
|  | ||||
| 	internalData := data[offset : offset+size] | ||||
|  | ||||
| 	var format string | ||||
|  | ||||
| 	meta, err := imagemeta.DecodeMeta(bytes.NewReader(internalData)) | ||||
| 	if err != nil { | ||||
| 		// Looks like it's BMP with an incomplete header | ||||
| 		if d, err := imagemeta.FixBmpHeader(internalData); err == nil { | ||||
| 			format = "bmp" | ||||
| 			internalData = d | ||||
| 		} else { | ||||
| 			return err | ||||
| 		} | ||||
| 	} else { | ||||
| 		format = meta.Format() | ||||
| 	} | ||||
|  | ||||
| 	internalType, ok := imagetype.Types[format] | ||||
| 	if !ok || internalType == imagetype.ICO || !SupportsLoad(internalType) { | ||||
| 		return fmt.Errorf("Can't load %s from ICO", meta.Format()) | ||||
| 	} | ||||
|  | ||||
| 	imgdata := imagedata.ImageData{ | ||||
| 		Type: internalType, | ||||
| 		Data: internalData, | ||||
| 	} | ||||
|  | ||||
| 	return img.Load(&imgdata, shrink, scale, pages) | ||||
| } | ||||
|  | ||||
| func (img *Image) saveAsIco() (*imagedata.ImageData, error) { | ||||
| 	if img.Width() > 256 || img.Height() > 256 { | ||||
| 		return nil, errors.New("Image dimensions is too big. Max dimension size for ICO is 256") | ||||
| 	} | ||||
|  | ||||
| 	var ptr unsafe.Pointer | ||||
| 	imgsize := C.size_t(0) | ||||
|  | ||||
| 	defer func() { | ||||
| 		C.g_free_go(&ptr) | ||||
| 	}() | ||||
|  | ||||
| 	if C.vips_pngsave_go(img.VipsImage, &ptr, &imgsize, 0, 0, 256) != 0 { | ||||
| 		return nil, Error() | ||||
| 	} | ||||
|  | ||||
| 	b := ptrToBytes(ptr, int(imgsize)) | ||||
|  | ||||
| 	buf := new(bytes.Buffer) | ||||
| 	buf.Grow(22 + int(imgsize)) | ||||
|  | ||||
| 	// ICONDIR header | ||||
| 	if _, err := buf.Write([]byte{0, 0, 1, 0, 1, 0}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	// ICONDIRENTRY | ||||
| 	if _, err := buf.Write([]byte{ | ||||
| 		byte(img.Width() % 256), | ||||
| 		byte(img.Height() % 256), | ||||
| 	}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Number of colors. Not supported in our case | ||||
| 	if err := buf.WriteByte(0); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Reserved | ||||
| 	if err := buf.WriteByte(0); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Color planes. Always 1 in our case | ||||
| 	if _, err := buf.Write([]byte{1, 0}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Bits per pixel | ||||
| 	if img.HasAlpha() { | ||||
| 		if _, err := buf.Write([]byte{32, 0}); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		if _, err := buf.Write([]byte{24, 0}); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} | ||||
| 	// Image data size | ||||
| 	if err := binary.Write(buf, binary.LittleEndian, uint32(imgsize)); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Image data offset. Always 22 in our case | ||||
| 	if _, err := buf.Write([]byte{22, 0, 0, 0}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	if _, err := buf.Write(b); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	imgdata := imagedata.ImageData{ | ||||
| 		Type: imagetype.ICO, | ||||
| 		Data: buf.Bytes(), | ||||
| 	} | ||||
|  | ||||
| 	return &imgdata, nil | ||||
| } | ||||
							
								
								
									
										19
									
								
								vips/testing_helpers.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								vips/testing_helpers.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| package vips | ||||
|  | ||||
| import "github.com/imgproxy/imgproxy/v2/imagetype" | ||||
|  | ||||
| func DisableLoadSupport(it imagetype.Type) { | ||||
| 	typeSupportLoad[it] = false | ||||
| } | ||||
|  | ||||
| func ResetLoadSupport() { | ||||
| 	typeSupportLoad = make(map[imagetype.Type]bool) | ||||
| } | ||||
|  | ||||
| func DisableSaveSupport(it imagetype.Type) { | ||||
| 	typeSupportSave[it] = false | ||||
| } | ||||
|  | ||||
| func ResetSaveSupport() { | ||||
| 	typeSupportSave = make(map[imagetype.Type]bool) | ||||
| } | ||||
| @@ -200,16 +200,27 @@ vips_image_set_array_int_go(VipsImage *image, const char *name, const int *array | ||||
| #endif | ||||
| } | ||||
| 
 | ||||
| gboolean | ||||
| vips_image_hasalpha_go(VipsImage * in) { | ||||
|   return vips_image_hasalpha(in); | ||||
| } | ||||
| 
 | ||||
| int | ||||
| vips_addalpha_go(VipsImage *in, VipsImage **out) { | ||||
|   return vips_addalpha(in, out, NULL); | ||||
| } | ||||
| 
 | ||||
| int | ||||
| vips_premultiply_go(VipsImage *in, VipsImage **out) { | ||||
|   if (!vips_image_hasalpha(in)) | ||||
|     return vips_copy(in, out, NULL); | ||||
| 
 | ||||
|   return vips_premultiply(in, out, NULL); | ||||
| } | ||||
| 
 | ||||
| int | ||||
| vips_unpremultiply_go(VipsImage *in, VipsImage **out) { | ||||
|   if (!vips_image_hasalpha(in)) | ||||
|     return vips_copy(in, out, NULL); | ||||
| 
 | ||||
|   return vips_unpremultiply(in, out, NULL); | ||||
| } | ||||
| 
 | ||||
| int | ||||
| vips_copy_go(VipsImage *in, VipsImage **out) { | ||||
|   return vips_copy(in, out, NULL); | ||||
| @@ -227,37 +238,21 @@ vips_rad2float_go(VipsImage *in, VipsImage **out) { | ||||
| 
 | ||||
| int | ||||
| vips_resize_go(VipsImage *in, VipsImage **out, double wscale, double hscale) { | ||||
|   return vips_resize(in, out, wscale, "vscale", hscale, NULL); | ||||
| } | ||||
|   if (!vips_image_hasalpha(in)) | ||||
|     return vips_resize(in, out, wscale, "vscale", hscale, NULL); | ||||
| 
 | ||||
| int | ||||
| vips_resize_with_premultiply(VipsImage *in, VipsImage **out, double wscale, double hscale) { | ||||
| 	VipsBandFormat format; | ||||
|   VipsImage *tmp1, *tmp2; | ||||
|   VipsBandFormat format = vips_band_format(in); | ||||
| 
 | ||||
|   format = vips_band_format(in); | ||||
|   VipsImage *base = vips_image_new(); | ||||
| 	VipsImage **t = (VipsImage **) vips_object_local_array(VIPS_OBJECT(base), 3); | ||||
| 
 | ||||
|   if (vips_premultiply(in, &tmp1, NULL)) | ||||
|     return 1; | ||||
|   int res = | ||||
|     vips_premultiply(in, &t[0], NULL) || | ||||
|     vips_resize(t[0], &t[1], wscale, "vscale", hscale, NULL) || | ||||
|     vips_unpremultiply(t[1], &t[2], NULL) || | ||||
|     vips_cast(t[2], out, format, NULL); | ||||
| 
 | ||||
| 	if (vips_resize(tmp1, &tmp2, wscale, "vscale", hscale, NULL)) { | ||||
|     clear_image(&tmp1); | ||||
| 		return 1; | ||||
|   } | ||||
|   swap_and_clear(&tmp1, tmp2); | ||||
| 
 | ||||
|   if (vips_unpremultiply(tmp1, &tmp2, NULL)) { | ||||
|     clear_image(&tmp1); | ||||
| 		return 1; | ||||
|   } | ||||
|   swap_and_clear(&tmp1, tmp2); | ||||
| 
 | ||||
|   if (vips_cast(tmp1, out, format, NULL)) { | ||||
|     clear_image(&tmp1); | ||||
| 		return 1; | ||||
|   } | ||||
| 
 | ||||
|   clear_image(&tmp1); | ||||
|   clear_image(&base); | ||||
| 
 | ||||
|   return 0; | ||||
| } | ||||
| @@ -353,6 +348,9 @@ vips_sharpen_go(VipsImage *in, VipsImage **out, double sigma) { | ||||
| 
 | ||||
| int | ||||
| vips_flatten_go(VipsImage *in, VipsImage **out, double r, double g, double b) { | ||||
|   if (!vips_image_hasalpha(in)) | ||||
|     return vips_copy(in, out, NULL); | ||||
| 
 | ||||
|   VipsArrayDouble *bg = vips_array_double_newv(3, r, g, b); | ||||
|   int res = vips_flatten(in, out, "background", bg, NULL); | ||||
|   vips_area_unref((VipsArea *)bg); | ||||
| @@ -453,23 +451,24 @@ vips_replicate_go(VipsImage *in, VipsImage **out, int width, int height) { | ||||
| } | ||||
| 
 | ||||
| int | ||||
| vips_embed_go(VipsImage *in, VipsImage **out, int x, int y, int width, int height, double *bg, int bgn) { | ||||
|   VipsArrayDouble *bga = vips_array_double_new(bg, bgn); | ||||
|   int ret = vips_embed( | ||||
|     in, out, x, y, width, height, | ||||
|     "extend", VIPS_EXTEND_BACKGROUND, | ||||
|     "background", bga, | ||||
|     NULL | ||||
|   ); | ||||
|   vips_area_unref((VipsArea *)bga); | ||||
| vips_embed_go(VipsImage *in, VipsImage **out, int x, int y, int width, int height) { | ||||
|   VipsImage *base = vips_image_new(); | ||||
| 	VipsImage **t = (VipsImage **) vips_object_local_array(VIPS_OBJECT(base), 2); | ||||
| 
 | ||||
|   int ret = | ||||
|     vips_colourspace(in, &t[0], VIPS_INTERPRETATION_sRGB, NULL) || | ||||
|     vips_ensure_alpha(t[0], &t[1]) || | ||||
|     vips_embed(t[1], out, x, y, width, height, "extend", VIPS_EXTEND_BLACK, NULL); | ||||
| 
 | ||||
|   clear_image(&base); | ||||
| 
 | ||||
|   return ret; | ||||
| } | ||||
| 
 | ||||
| int | ||||
| vips_ensure_alpha(VipsImage *in, VipsImage **out) { | ||||
|   if (vips_image_hasalpha_go(in)) { | ||||
|   if (vips_image_hasalpha(in)) | ||||
|     return vips_copy(in, out, NULL); | ||||
|   } | ||||
| 
 | ||||
|   return vips_bandjoin_const1(in, out, 255, NULL); | ||||
| } | ||||
| @@ -477,28 +476,33 @@ vips_ensure_alpha(VipsImage *in, VipsImage **out) { | ||||
| int | ||||
| vips_apply_watermark(VipsImage *in, VipsImage *watermark, VipsImage **out, double opacity) { | ||||
|   VipsImage *base = vips_image_new(); | ||||
| 	VipsImage **t = (VipsImage **) vips_object_local_array(VIPS_OBJECT(base), 5); | ||||
| 	VipsImage **t = (VipsImage **) vips_object_local_array(VIPS_OBJECT(base), 6); | ||||
| 
 | ||||
|   if (vips_ensure_alpha(watermark, &t[0])) { | ||||
|     clear_image(&base); | ||||
| 		return 1; | ||||
|   } | ||||
| 
 | ||||
| 	if (opacity < 1) { | ||||
|     if ( | ||||
|       vips_extract_band(watermark, &t[0], 0, "n", watermark->Bands - 1, NULL) || | ||||
|       vips_extract_band(watermark, &t[1], watermark->Bands - 1, "n", 1, NULL) || | ||||
| 		  vips_linear1(t[1], &t[2], opacity, 0, NULL) || | ||||
|       vips_bandjoin2(t[0], t[2], &t[3], NULL) | ||||
|       vips_extract_band(t[0], &t[1], 0, "n", t[0]->Bands - 1, NULL) || | ||||
|       vips_extract_band(t[0], &t[2], t[0]->Bands - 1, "n", 1, NULL) || | ||||
| 		  vips_linear1(t[2], &t[3], opacity, 0, NULL) || | ||||
|       vips_bandjoin2(t[1], t[3], &t[4], NULL) | ||||
|     ) { | ||||
|       clear_image(&base); | ||||
| 			return 1; | ||||
| 		} | ||||
| 	} else { | ||||
|     if (vips_copy(watermark, &t[3], NULL)) { | ||||
|     if (vips_copy(t[0], &t[4], NULL)) { | ||||
|       clear_image(&base); | ||||
|       return 1; | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   int res = | ||||
|     vips_composite2(in, t[3], &t[4], VIPS_BLEND_MODE_OVER, "compositing_space", in->Type, NULL) || | ||||
|     vips_cast(t[4], out, vips_image_get_format(in), NULL); | ||||
|     vips_composite2(in, t[4], &t[5], VIPS_BLEND_MODE_OVER, "compositing_space", in->Type, NULL) || | ||||
|     vips_cast(t[5], out, vips_image_get_format(in), NULL); | ||||
| 
 | ||||
|   clear_image(&base); | ||||
| 
 | ||||
| @@ -1,4 +1,4 @@ | ||||
| package main | ||||
| package vips | ||||
| 
 | ||||
| /* | ||||
| #cgo pkg-config: vips | ||||
| @@ -8,26 +8,29 @@ package main | ||||
| */ | ||||
| import "C" | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"context" | ||||
| 	"encoding/binary" | ||||
| 	"errors" | ||||
| 	"fmt" | ||||
| 	"math" | ||||
| 	"os" | ||||
| 	"runtime" | ||||
| 	"unsafe" | ||||
| 
 | ||||
| 	log "github.com/sirupsen/logrus" | ||||
| 
 | ||||
| 	"github.com/imgproxy/imgproxy/v2/config" | ||||
| 	"github.com/imgproxy/imgproxy/v2/ierrors" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagedata" | ||||
| 	"github.com/imgproxy/imgproxy/v2/imagetype" | ||||
| 	"github.com/imgproxy/imgproxy/v2/metrics/prometheus" | ||||
| ) | ||||
| 
 | ||||
| type vipsImage struct { | ||||
| type Image struct { | ||||
| 	VipsImage *C.VipsImage | ||||
| } | ||||
| 
 | ||||
| var ( | ||||
| 	vipsTypeSupportLoad = make(map[imageType]bool) | ||||
| 	vipsTypeSupportSave = make(map[imageType]bool) | ||||
| 
 | ||||
| 	watermark *imageData | ||||
| 	typeSupportLoad = make(map[imagetype.Type]bool) | ||||
| 	typeSupportSave = make(map[imagetype.Type]bool) | ||||
| ) | ||||
| 
 | ||||
| var vipsConf struct { | ||||
| @@ -35,10 +38,9 @@ var vipsConf struct { | ||||
| 	PngInterlaced         C.int | ||||
| 	PngQuantize           C.int | ||||
| 	PngQuantizationColors C.int | ||||
| 	WatermarkOpacity      C.double | ||||
| } | ||||
| 
 | ||||
| func initVips() error { | ||||
| func Init() error { | ||||
| 	runtime.LockOSThread() | ||||
| 	defer runtime.UnlockOSThread() | ||||
| 
 | ||||
| @@ -66,62 +68,87 @@ func initVips() error { | ||||
| 		C.vips_cache_set_trace(C.gboolean(1)) | ||||
| 	} | ||||
| 
 | ||||
| 	for _, imgtype := range imageTypes { | ||||
| 		vipsTypeSupportLoad[imgtype] = int(C.vips_type_find_load_go(C.int(imgtype))) != 0 | ||||
| 		vipsTypeSupportSave[imgtype] = int(C.vips_type_find_save_go(C.int(imgtype))) != 0 | ||||
| 	} | ||||
| 	vipsConf.JpegProgressive = gbool(config.JpegProgressive) | ||||
| 	vipsConf.PngInterlaced = gbool(config.PngInterlaced) | ||||
| 	vipsConf.PngQuantize = gbool(config.PngQuantize) | ||||
| 	vipsConf.PngQuantizationColors = C.int(config.PngQuantizationColors) | ||||
| 
 | ||||
| 	if conf.JpegProgressive { | ||||
| 		vipsConf.JpegProgressive = C.int(1) | ||||
| 	} | ||||
| 
 | ||||
| 	if conf.PngInterlaced { | ||||
| 		vipsConf.PngInterlaced = C.int(1) | ||||
| 	} | ||||
| 
 | ||||
| 	if conf.PngQuantize { | ||||
| 		vipsConf.PngQuantize = C.int(1) | ||||
| 	} | ||||
| 
 | ||||
| 	vipsConf.PngQuantizationColors = C.int(conf.PngQuantizationColors) | ||||
| 
 | ||||
| 	vipsConf.WatermarkOpacity = C.double(conf.WatermarkOpacity) | ||||
| 
 | ||||
| 	if err := vipsLoadWatermark(); err != nil { | ||||
| 		C.vips_shutdown() | ||||
| 		return fmt.Errorf("Can't load watermark: %s", err) | ||||
| 	} | ||||
| 	prometheus.AddGaugeFunc( | ||||
| 		"vips_memory_bytes", | ||||
| 		"A gauge of the vips tracked memory usage in bytes.", | ||||
| 		GetMem, | ||||
| 	) | ||||
| 	prometheus.AddGaugeFunc( | ||||
| 		"vips_max_memory_bytes", | ||||
| 		"A gauge of the max vips tracked memory usage in bytes.", | ||||
| 		GetMemHighwater, | ||||
| 	) | ||||
| 	prometheus.AddGaugeFunc( | ||||
| 		"vips_allocs", | ||||
| 		"A gauge of the number of active vips allocations.", | ||||
| 		GetAllocs, | ||||
| 	) | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func shutdownVips() { | ||||
| func Shutdown() { | ||||
| 	C.vips_shutdown() | ||||
| } | ||||
| 
 | ||||
| func vipsGetMem() float64 { | ||||
| func GetMem() float64 { | ||||
| 	return float64(C.vips_tracked_get_mem()) | ||||
| } | ||||
| 
 | ||||
| func vipsGetMemHighwater() float64 { | ||||
| func GetMemHighwater() float64 { | ||||
| 	return float64(C.vips_tracked_get_mem_highwater()) | ||||
| } | ||||
| 
 | ||||
| func vipsGetAllocs() float64 { | ||||
| func GetAllocs() float64 { | ||||
| 	return float64(C.vips_tracked_get_allocs()) | ||||
| } | ||||
| 
 | ||||
| func vipsCleanup() { | ||||
| func Cleanup() { | ||||
| 	C.vips_cleanup() | ||||
| } | ||||
| 
 | ||||
| func vipsError() error { | ||||
| 	return newUnexpectedError(C.GoString(C.vips_error_buffer()), 1) | ||||
| func Error() error { | ||||
| 	return ierrors.NewUnexpected(C.GoString(C.vips_error_buffer()), 1) | ||||
| } | ||||
| 
 | ||||
| func vipsLoadWatermark() (err error) { | ||||
| 	watermark, err = getWatermarkData() | ||||
| 	return | ||||
| func SupportsLoad(it imagetype.Type) bool { | ||||
| 	if sup, ok := typeSupportLoad[it]; ok { | ||||
| 		return sup | ||||
| 	} | ||||
| 
 | ||||
| 	sup := false | ||||
| 	if it == imagetype.ICO { | ||||
| 		sup = true | ||||
| 	} else { | ||||
| 		sup = int(C.vips_type_find_load_go(C.int(it))) != 0 | ||||
| 	} | ||||
| 
 | ||||
| 	typeSupportLoad[it] = sup | ||||
| 
 | ||||
| 	return sup | ||||
| } | ||||
| 
 | ||||
| func SupportsSave(it imagetype.Type) bool { | ||||
| 	if sup, ok := typeSupportSave[it]; ok { | ||||
| 		return sup | ||||
| 	} | ||||
| 
 | ||||
| 	sup := false | ||||
| 	if it == imagetype.ICO { | ||||
| 		// We save ICO content as PNG so we need to check it | ||||
| 		sup = int(C.vips_type_find_save_go(C.int(imagetype.PNG))) != 0 | ||||
| 	} else { | ||||
| 		sup = int(C.vips_type_find_save_go(C.int(it))) != 0 | ||||
| 	} | ||||
| 
 | ||||
| 	typeSupportSave[it] = sup | ||||
| 
 | ||||
| 	return sup | ||||
| } | ||||
| 
 | ||||
| func gbool(b bool) C.gboolean { | ||||
| @@ -131,39 +158,51 @@ func gbool(b bool) C.gboolean { | ||||
| 	return C.gboolean(0) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Width() int { | ||||
| func ptrToBytes(ptr unsafe.Pointer, size int) []byte { | ||||
| 	return (*[math.MaxInt32]byte)(ptr)[:int(size):int(size)] | ||||
| } | ||||
| 
 | ||||
| func (img *Image) Width() int { | ||||
| 	return int(img.VipsImage.Xsize) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Height() int { | ||||
| func (img *Image) Height() int { | ||||
| 	return int(img.VipsImage.Ysize) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Load(data []byte, imgtype imageType, shrink int, scale float64, pages int) error { | ||||
| func (img *Image) Load(imgdata *imagedata.ImageData, shrink int, scale float64, pages int) error { | ||||
| 	if imgdata.Type == imagetype.ICO { | ||||
| 		return img.loadIco(imgdata.Data, shrink, scale, pages) | ||||
| 	} | ||||
| 
 | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	data := unsafe.Pointer(&imgdata.Data[0]) | ||||
| 	dataSize := C.size_t(len(imgdata.Data)) | ||||
| 	err := C.int(0) | ||||
| 
 | ||||
| 	switch imgtype { | ||||
| 	case imageTypeJPEG: | ||||
| 		err = C.vips_jpegload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), C.int(shrink), &tmp) | ||||
| 	case imageTypePNG: | ||||
| 		err = C.vips_pngload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), &tmp) | ||||
| 	case imageTypeWEBP: | ||||
| 		err = C.vips_webpload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), C.double(scale), C.int(pages), &tmp) | ||||
| 	case imageTypeGIF: | ||||
| 		err = C.vips_gifload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), C.int(pages), &tmp) | ||||
| 	case imageTypeSVG: | ||||
| 		err = C.vips_svgload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), C.double(scale), &tmp) | ||||
| 	case imageTypeHEIC, imageTypeAVIF: | ||||
| 		err = C.vips_heifload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), &tmp) | ||||
| 	case imageTypeBMP: | ||||
| 		err = C.vips_bmpload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), &tmp) | ||||
| 	case imageTypeTIFF: | ||||
| 		err = C.vips_tiffload_go(unsafe.Pointer(&data[0]), C.size_t(len(data)), &tmp) | ||||
| 	switch imgdata.Type { | ||||
| 	case imagetype.JPEG: | ||||
| 		err = C.vips_jpegload_go(data, dataSize, C.int(shrink), &tmp) | ||||
| 	case imagetype.PNG: | ||||
| 		err = C.vips_pngload_go(data, dataSize, &tmp) | ||||
| 	case imagetype.WEBP: | ||||
| 		err = C.vips_webpload_go(data, dataSize, C.double(scale), C.int(pages), &tmp) | ||||
| 	case imagetype.GIF: | ||||
| 		err = C.vips_gifload_go(data, dataSize, C.int(pages), &tmp) | ||||
| 	case imagetype.SVG: | ||||
| 		err = C.vips_svgload_go(data, dataSize, C.double(scale), &tmp) | ||||
| 	case imagetype.HEIC, imagetype.AVIF: | ||||
| 		err = C.vips_heifload_go(data, dataSize, &tmp) | ||||
| 	case imagetype.BMP: | ||||
| 		err = C.vips_bmpload_go(data, dataSize, &tmp) | ||||
| 	case imagetype.TIFF: | ||||
| 		err = C.vips_tiffload_go(data, dataSize, &tmp) | ||||
| 	default: | ||||
| 		return errors.New("Usupported image type to load") | ||||
| 	} | ||||
| 	if err != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| @@ -171,126 +210,59 @@ func (img *vipsImage) Load(data []byte, imgtype imageType, shrink int, scale flo | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Save(imgtype imageType, quality int) ([]byte, context.CancelFunc, error) { | ||||
| 	if imgtype == imageTypeICO { | ||||
| 		b, err := img.SaveAsIco() | ||||
| 		return b, func() {}, err | ||||
| func (img *Image) Save(imgtype imagetype.Type, quality int) (*imagedata.ImageData, error) { | ||||
| 	if imgtype == imagetype.ICO { | ||||
| 		return img.saveAsIco() | ||||
| 	} | ||||
| 
 | ||||
| 	var ptr unsafe.Pointer | ||||
| 
 | ||||
| 	cancel := func() { | ||||
| 		C.g_free_go(&ptr) | ||||
| 	} | ||||
| 
 | ||||
| 	err := C.int(0) | ||||
| 
 | ||||
| 	imgsize := C.size_t(0) | ||||
| 
 | ||||
| 	switch imgtype { | ||||
| 	case imageTypeJPEG: | ||||
| 	case imagetype.JPEG: | ||||
| 		err = C.vips_jpegsave_go(img.VipsImage, &ptr, &imgsize, C.int(quality), vipsConf.JpegProgressive) | ||||
| 	case imageTypePNG: | ||||
| 	case imagetype.PNG: | ||||
| 		err = C.vips_pngsave_go(img.VipsImage, &ptr, &imgsize, vipsConf.PngInterlaced, vipsConf.PngQuantize, vipsConf.PngQuantizationColors) | ||||
| 	case imageTypeWEBP: | ||||
| 	case imagetype.WEBP: | ||||
| 		err = C.vips_webpsave_go(img.VipsImage, &ptr, &imgsize, C.int(quality)) | ||||
| 	case imageTypeGIF: | ||||
| 	case imagetype.GIF: | ||||
| 		err = C.vips_gifsave_go(img.VipsImage, &ptr, &imgsize) | ||||
| 	case imageTypeAVIF: | ||||
| 	case imagetype.AVIF: | ||||
| 		err = C.vips_avifsave_go(img.VipsImage, &ptr, &imgsize, C.int(quality)) | ||||
| 	case imageTypeBMP: | ||||
| 	case imagetype.BMP: | ||||
| 		err = C.vips_bmpsave_go(img.VipsImage, &ptr, &imgsize) | ||||
| 	case imageTypeTIFF: | ||||
| 	case imagetype.TIFF: | ||||
| 		err = C.vips_tiffsave_go(img.VipsImage, &ptr, &imgsize, C.int(quality)) | ||||
| 	default: | ||||
| 		return nil, errors.New("Usupported image type to save") | ||||
| 	} | ||||
| 	if err != 0 { | ||||
| 		C.g_free_go(&ptr) | ||||
| 		return nil, cancel, vipsError() | ||||
| 		cancel() | ||||
| 		return nil, Error() | ||||
| 	} | ||||
| 
 | ||||
| 	b := ptrToBytes(ptr, int(imgsize)) | ||||
| 	imgdata := imagedata.ImageData{ | ||||
| 		Type: imgtype, | ||||
| 		Data: ptrToBytes(ptr, int(imgsize)), | ||||
| 	} | ||||
| 
 | ||||
| 	return b, cancel, nil | ||||
| 	imgdata.SetCancel(cancel) | ||||
| 
 | ||||
| 	return &imgdata, nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) SaveAsIco() ([]byte, error) { | ||||
| 	if img.Width() > 256 || img.Height() > 256 { | ||||
| 		return nil, errors.New("Image dimensions is too big. Max dimension size for ICO is 256") | ||||
| 	} | ||||
| 
 | ||||
| 	var ptr unsafe.Pointer | ||||
| 	imgsize := C.size_t(0) | ||||
| 
 | ||||
| 	defer func() { | ||||
| 		C.g_free_go(&ptr) | ||||
| 	}() | ||||
| 
 | ||||
| 	if C.vips_pngsave_go(img.VipsImage, &ptr, &imgsize, 0, 0, 256) != 0 { | ||||
| 		return nil, vipsError() | ||||
| 	} | ||||
| 
 | ||||
| 	b := ptrToBytes(ptr, int(imgsize)) | ||||
| 
 | ||||
| 	buf := new(bytes.Buffer) | ||||
| 	buf.Grow(22 + int(imgsize)) | ||||
| 
 | ||||
| 	// ICONDIR header | ||||
| 	if _, err := buf.Write([]byte{0, 0, 1, 0, 1, 0}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 
 | ||||
| 	// ICONDIRENTRY | ||||
| 	if _, err := buf.Write([]byte{ | ||||
| 		byte(img.Width() % 256), | ||||
| 		byte(img.Height() % 256), | ||||
| 	}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Number of colors. Not supported in our case | ||||
| 	if err := buf.WriteByte(0); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Reserved | ||||
| 	if err := buf.WriteByte(0); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Color planes. Always 1 in our case | ||||
| 	if _, err := buf.Write([]byte{1, 0}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Bits per pixel | ||||
| 	if img.HasAlpha() { | ||||
| 		if _, err := buf.Write([]byte{32, 0}); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} else { | ||||
| 		if _, err := buf.Write([]byte{24, 0}); err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} | ||||
| 	// Image data size | ||||
| 	if err := binary.Write(buf, binary.LittleEndian, uint32(imgsize)); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 	// Image data offset. Always 22 in our case | ||||
| 	if _, err := buf.Write([]byte{22, 0, 0, 0}); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 
 | ||||
| 	if _, err := buf.Write(b); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
| 
 | ||||
| 	return buf.Bytes(), nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Clear() { | ||||
| func (img *Image) Clear() { | ||||
| 	if img.VipsImage != nil { | ||||
| 		C.clear_image(&img.VipsImage) | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Arrayjoin(in []*vipsImage) error { | ||||
| func (img *Image) Arrayjoin(in []*Image) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	arr := make([]*C.VipsImage, len(in)) | ||||
| @@ -299,35 +271,53 @@ func (img *vipsImage) Arrayjoin(in []*vipsImage) error { | ||||
| 	} | ||||
| 
 | ||||
| 	if C.vips_arrayjoin_go(&arr[0], &tmp, C.int(len(arr))) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func vipsSupportAnimation(imgtype imageType) bool { | ||||
| 	return imgtype == imageTypeGIF || imgtype == imageTypeWEBP | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) IsAnimated() bool { | ||||
| func (img *Image) IsAnimated() bool { | ||||
| 	return C.vips_is_animated(img.VipsImage) > 0 | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) HasAlpha() bool { | ||||
| 	return C.vips_image_hasalpha_go(img.VipsImage) > 0 | ||||
| func (img *Image) HasAlpha() bool { | ||||
| 	return C.vips_image_hasalpha(img.VipsImage) > 0 | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) GetInt(name string) (int, error) { | ||||
| func (img *Image) Premultiply() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_premultiply_go(img.VipsImage, &tmp) != 0 { | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *Image) Unpremultiply() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_unpremultiply_go(img.VipsImage, &tmp) != 0 { | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *Image) GetInt(name string) (int, error) { | ||||
| 	var i C.int | ||||
| 
 | ||||
| 	if C.vips_image_get_int(img.VipsImage, cachedCString(name), &i) != 0 { | ||||
| 		return 0, vipsError() | ||||
| 		return 0, Error() | ||||
| 	} | ||||
| 	return int(i), nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) GetIntDefault(name string, def int) (int, error) { | ||||
| func (img *Image) GetIntDefault(name string, def int) (int, error) { | ||||
| 	if C.vips_image_get_typeof(img.VipsImage, cachedCString(name)) == 0 { | ||||
| 		return def, nil | ||||
| 	} | ||||
| @@ -335,12 +325,12 @@ func (img *vipsImage) GetIntDefault(name string, def int) (int, error) { | ||||
| 	return img.GetInt(name) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) GetIntSlice(name string) ([]int, error) { | ||||
| func (img *Image) GetIntSlice(name string) ([]int, error) { | ||||
| 	var ptr unsafe.Pointer | ||||
| 	size := C.int(0) | ||||
| 
 | ||||
| 	if C.vips_image_get_array_int_go(img.VipsImage, cachedCString(name), (**C.int)(unsafe.Pointer(&ptr)), &size) != 0 { | ||||
| 		return nil, vipsError() | ||||
| 		return nil, Error() | ||||
| 	} | ||||
| 
 | ||||
| 	if size == 0 { | ||||
| @@ -357,7 +347,7 @@ func (img *vipsImage) GetIntSlice(name string) ([]int, error) { | ||||
| 	return out, nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) GetIntSliceDefault(name string, def []int) ([]int, error) { | ||||
| func (img *Image) GetIntSliceDefault(name string, def []int) ([]int, error) { | ||||
| 	if C.vips_image_get_typeof(img.VipsImage, cachedCString(name)) == 0 { | ||||
| 		return def, nil | ||||
| 	} | ||||
| @@ -365,11 +355,11 @@ func (img *vipsImage) GetIntSliceDefault(name string, def []int) ([]int, error) | ||||
| 	return img.GetIntSlice(name) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) SetInt(name string, value int) { | ||||
| func (img *Image) SetInt(name string, value int) { | ||||
| 	C.vips_image_set_int(img.VipsImage, cachedCString(name), C.int(value)) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) SetIntSlice(name string, value []int) { | ||||
| func (img *Image) SetIntSlice(name string, value []int) { | ||||
| 	in := make([]C.int, len(value)) | ||||
| 	for i, el := range value { | ||||
| 		in[i] = C.int(el) | ||||
| @@ -377,12 +367,12 @@ func (img *vipsImage) SetIntSlice(name string, value []int) { | ||||
| 	C.vips_image_set_array_int_go(img.VipsImage, cachedCString(name), &in[0], C.int(len(value))) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) CastUchar() error { | ||||
| func (img *Image) CastUchar() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_image_get_format(img.VipsImage) != C.VIPS_FORMAT_UCHAR { | ||||
| 		if C.vips_cast_go(img.VipsImage, &tmp, C.VIPS_FORMAT_UCHAR) != 0 { | ||||
| 			return vipsError() | ||||
| 			return Error() | ||||
| 		} | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} | ||||
| @@ -390,12 +380,12 @@ func (img *vipsImage) CastUchar() error { | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Rad2Float() error { | ||||
| func (img *Image) Rad2Float() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_image_get_coding(img.VipsImage) == C.VIPS_CODING_RAD { | ||||
| 		if C.vips_rad2float_go(img.VipsImage, &tmp) != 0 { | ||||
| 			return vipsError() | ||||
| 			return Error() | ||||
| 		} | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} | ||||
| @@ -403,17 +393,11 @@ func (img *vipsImage) Rad2Float() error { | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Resize(wscale, hscale float64, hasAlpa bool) error { | ||||
| func (img *Image) Resize(wscale, hscale float64) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if hasAlpa { | ||||
| 		if C.vips_resize_with_premultiply(img.VipsImage, &tmp, C.double(wscale), C.double(hscale)) != 0 { | ||||
| 			return vipsError() | ||||
| 		} | ||||
| 	} else { | ||||
| 		if C.vips_resize_go(img.VipsImage, &tmp, C.double(wscale), C.double(hscale)) != 0 { | ||||
| 			return vipsError() | ||||
| 		} | ||||
| 	if C.vips_resize_go(img.VipsImage, &tmp, C.double(wscale), C.double(hscale)) != 0 { | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| @@ -421,17 +405,17 @@ func (img *vipsImage) Resize(wscale, hscale float64, hasAlpa bool) error { | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Orientation() C.int { | ||||
| func (img *Image) Orientation() C.int { | ||||
| 	return C.vips_get_orientation(img.VipsImage) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Rotate(angle int) error { | ||||
| func (img *Image) Rotate(angle int) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	vipsAngle := (angle / 90) % 4 | ||||
| 
 | ||||
| 	if C.vips_rot_go(img.VipsImage, &tmp, C.VipsAngle(vipsAngle)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.vips_autorot_remove_angle(tmp) | ||||
| @@ -440,47 +424,47 @@ func (img *vipsImage) Rotate(angle int) error { | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Flip() error { | ||||
| func (img *Image) Flip() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_flip_horizontal_go(img.VipsImage, &tmp) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Crop(left, top, width, height int) error { | ||||
| func (img *Image) Crop(left, top, width, height int) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_extract_area_go(img.VipsImage, &tmp, C.int(left), C.int(top), C.int(width), C.int(height)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Extract(out *vipsImage, left, top, width, height int) error { | ||||
| func (img *Image) Extract(out *Image, left, top, width, height int) error { | ||||
| 	if C.vips_extract_area_go(img.VipsImage, &out.VipsImage, C.int(left), C.int(top), C.int(width), C.int(height)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) SmartCrop(width, height int) error { | ||||
| func (img *Image) SmartCrop(width, height int) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_smartcrop_go(img.VipsImage, &tmp, C.int(width), C.int(height)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Trim(threshold float64, smart bool, color rgbColor, equalHor bool, equalVer bool) error { | ||||
| func (img *Image) Trim(threshold float64, smart bool, color Color, equalHor bool, equalVer bool) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if err := img.CopyMemory(); err != nil { | ||||
| @@ -490,58 +474,58 @@ func (img *vipsImage) Trim(threshold float64, smart bool, color rgbColor, equalH | ||||
| 	if C.vips_trim(img.VipsImage, &tmp, C.double(threshold), | ||||
| 		gbool(smart), C.double(color.R), C.double(color.G), C.double(color.B), | ||||
| 		gbool(equalHor), gbool(equalVer)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) EnsureAlpha() error { | ||||
| func (img *Image) EnsureAlpha() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_ensure_alpha(img.VipsImage, &tmp) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Flatten(bg rgbColor) error { | ||||
| func (img *Image) Flatten(bg Color) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_flatten_go(img.VipsImage, &tmp, C.double(bg.R), C.double(bg.G), C.double(bg.B)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Blur(sigma float32) error { | ||||
| func (img *Image) Blur(sigma float32) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_gaussblur_go(img.VipsImage, &tmp, C.double(sigma)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Sharpen(sigma float32) error { | ||||
| func (img *Image) Sharpen(sigma float32) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_sharpen_go(img.VipsImage, &tmp, C.double(sigma)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 
 | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) ImportColourProfile() error { | ||||
| func (img *Image) ImportColourProfile() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if img.VipsImage.Coding != C.VIPS_CODING_NONE { | ||||
| @@ -564,13 +548,13 @@ func (img *vipsImage) ImportColourProfile() error { | ||||
| 	if C.vips_icc_import_go(img.VipsImage, &tmp) == 0 { | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} else { | ||||
| 		logWarning("Can't import ICC profile: %s", vipsError()) | ||||
| 		log.Warningf("Can't import ICC profile: %s", Error()) | ||||
| 	} | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) ExportColourProfile() error { | ||||
| func (img *Image) ExportColourProfile() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	// Don't export is there's no embedded profile or embedded profile is sRGB | ||||
| @@ -581,13 +565,13 @@ func (img *vipsImage) ExportColourProfile() error { | ||||
| 	if C.vips_icc_export_go(img.VipsImage, &tmp) == 0 { | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} else { | ||||
| 		logWarning("Can't export ICC profile: %s", vipsError()) | ||||
| 		log.Warningf("Can't export ICC profile: %s", Error()) | ||||
| 	} | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) ExportColourProfileToSRGB() error { | ||||
| func (img *Image) ExportColourProfileToSRGB() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	// Don't export is there's no embedded profile or embedded profile is sRGB | ||||
| @@ -598,13 +582,13 @@ func (img *vipsImage) ExportColourProfileToSRGB() error { | ||||
| 	if C.vips_icc_export_srgb(img.VipsImage, &tmp) == 0 { | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} else { | ||||
| 		logWarning("Can't export ICC profile: %s", vipsError()) | ||||
| 		log.Warningf("Can't export ICC profile: %s", Error()) | ||||
| 	} | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) TransformColourProfile() error { | ||||
| func (img *Image) TransformColourProfile() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	// Don't transform is there's no embedded profile or embedded profile is sRGB | ||||
| @@ -615,42 +599,42 @@ func (img *vipsImage) TransformColourProfile() error { | ||||
| 	if C.vips_icc_transform_go(img.VipsImage, &tmp) == 0 { | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} else { | ||||
| 		logWarning("Can't transform ICC profile: %s", vipsError()) | ||||
| 		log.Warningf("Can't transform ICC profile: %s", Error()) | ||||
| 	} | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) RemoveColourProfile() error { | ||||
| func (img *Image) RemoveColourProfile() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_icc_remove(img.VipsImage, &tmp) == 0 { | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} else { | ||||
| 		logWarning("Can't remove ICC profile: %s", vipsError()) | ||||
| 		log.Warningf("Can't remove ICC profile: %s", Error()) | ||||
| 	} | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) IsSRGB() bool { | ||||
| func (img *Image) IsSRGB() bool { | ||||
| 	return img.VipsImage.Type == C.VIPS_INTERPRETATION_sRGB | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) LinearColourspace() error { | ||||
| func (img *Image) LinearColourspace() error { | ||||
| 	return img.Colorspace(C.VIPS_INTERPRETATION_scRGB) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) RgbColourspace() error { | ||||
| func (img *Image) RgbColourspace() error { | ||||
| 	return img.Colorspace(C.VIPS_INTERPRETATION_sRGB) | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Colorspace(colorspace C.VipsInterpretation) error { | ||||
| func (img *Image) Colorspace(colorspace C.VipsInterpretation) error { | ||||
| 	if img.VipsImage.Type != colorspace { | ||||
| 		var tmp *C.VipsImage | ||||
| 
 | ||||
| 		if C.vips_colourspace_go(img.VipsImage, &tmp, colorspace) != 0 { | ||||
| 			return vipsError() | ||||
| 			return Error() | ||||
| 		} | ||||
| 		C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	} | ||||
| @@ -658,73 +642,53 @@ func (img *vipsImage) Colorspace(colorspace C.VipsInterpretation) error { | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) CopyMemory() error { | ||||
| func (img *Image) CopyMemory() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 	if tmp = C.vips_image_copy_memory(img.VipsImage); tmp == nil { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Replicate(width, height int) error { | ||||
| func (img *Image) Replicate(width, height int) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_replicate_go(img.VipsImage, &tmp, C.int(width), C.int(height)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Embed(width, height int, offX, offY int, bg rgbColor, transpBg bool) error { | ||||
| func (img *Image) Embed(width, height int, offX, offY int) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if err := img.RgbColourspace(); err != nil { | ||||
| 		return err | ||||
| 	} | ||||
| 
 | ||||
| 	var bgc []C.double | ||||
| 	if transpBg { | ||||
| 		if !img.HasAlpha() { | ||||
| 			if C.vips_addalpha_go(img.VipsImage, &tmp) != 0 { | ||||
| 				return vipsError() | ||||
| 			} | ||||
| 			C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 		} | ||||
| 
 | ||||
| 		bgc = []C.double{C.double(0)} | ||||
| 	} else { | ||||
| 		bgc = []C.double{C.double(bg.R), C.double(bg.G), C.double(bg.B), 1.0} | ||||
| 	} | ||||
| 
 | ||||
| 	bgn := minInt(int(img.VipsImage.Bands), len(bgc)) | ||||
| 
 | ||||
| 	if C.vips_embed_go(img.VipsImage, &tmp, C.int(offX), C.int(offY), C.int(width), C.int(height), &bgc[0], C.int(bgn)) != 0 { | ||||
| 		return vipsError() | ||||
| 	if C.vips_embed_go(img.VipsImage, &tmp, C.int(offX), C.int(offY), C.int(width), C.int(height)) != 0 { | ||||
| 		return Error() | ||||
| 	} | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) ApplyWatermark(wm *vipsImage, opacity float64) error { | ||||
| func (img *Image) ApplyWatermark(wm *Image, opacity float64) error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_apply_watermark(img.VipsImage, wm.VipsImage, &tmp, C.double(opacity)) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 
 | ||||
| 	return nil | ||||
| } | ||||
| 
 | ||||
| func (img *vipsImage) Strip() error { | ||||
| func (img *Image) Strip() error { | ||||
| 	var tmp *C.VipsImage | ||||
| 
 | ||||
| 	if C.vips_strip(img.VipsImage, &tmp) != 0 { | ||||
| 		return vipsError() | ||||
| 		return Error() | ||||
| 	} | ||||
| 	C.swap_and_clear(&img.VipsImage, tmp) | ||||
| 
 | ||||
| @@ -4,19 +4,7 @@ | ||||
| #include <vips/vips7compat.h> | ||||
| #include <vips/vector.h> | ||||
| 
 | ||||
| enum ImgproxyImageTypes { | ||||
|   UNKNOWN = 0, | ||||
|   JPEG, | ||||
|   PNG, | ||||
|   WEBP, | ||||
|   GIF, | ||||
|   ICO, | ||||
|   SVG, | ||||
|   HEIC, | ||||
|   AVIF, | ||||
|   BMP, | ||||
|   TIFF | ||||
| }; | ||||
| #include "../imagetype/imagetype.h" | ||||
| 
 | ||||
| int vips_initialize(); | ||||
| 
 | ||||
| @@ -47,8 +35,9 @@ gboolean vips_is_animated(VipsImage * in); | ||||
| int vips_image_get_array_int_go(VipsImage *image, const char *name, int **out, int *n); | ||||
| void vips_image_set_array_int_go(VipsImage *image, const char *name, const int *array, int n); | ||||
| 
 | ||||
| gboolean vips_image_hasalpha_go(VipsImage * in); | ||||
| int vips_addalpha_go(VipsImage *in, VipsImage **out); | ||||
| int vips_premultiply_go(VipsImage *in, VipsImage **out); | ||||
| int vips_unpremultiply_go(VipsImage *in, VipsImage **out); | ||||
| 
 | ||||
| int vips_copy_go(VipsImage *in, VipsImage **out); | ||||
| 
 | ||||
| @@ -56,7 +45,6 @@ int vips_cast_go(VipsImage *in, VipsImage **out, VipsBandFormat format); | ||||
| int vips_rad2float_go(VipsImage *in, VipsImage **out); | ||||
| 
 | ||||
| int vips_resize_go(VipsImage *in, VipsImage **out, double wscale, double hscale); | ||||
| int vips_resize_with_premultiply(VipsImage *in, VipsImage **out, double wscale, double hscale); | ||||
| 
 | ||||
| int vips_icc_is_srgb_iec61966(VipsImage *in); | ||||
| int vips_has_embedded_icc(VipsImage *in); | ||||
| @@ -82,7 +70,7 @@ int vips_sharpen_go(VipsImage *in, VipsImage **out, double sigma); | ||||
| int vips_flatten_go(VipsImage *in, VipsImage **out, double r, double g, double b); | ||||
| 
 | ||||
| int vips_replicate_go(VipsImage *in, VipsImage **out, int across, int down); | ||||
| int vips_embed_go(VipsImage *in, VipsImage **out, int x, int y, int width, int height, double *bg, int bgn); | ||||
| int vips_embed_go(VipsImage *in, VipsImage **out, int x, int y, int width, int height); | ||||
| 
 | ||||
| int vips_ensure_alpha(VipsImage *in, VipsImage **out); | ||||
| 
 | ||||
		Reference in New Issue
	
	Block a user