mirror of
https://github.com/imgproxy/imgproxy.git
synced 2025-03-17 20:17:48 +02:00
Merge branch 'version/3'
This commit is contained in:
commit
5d787f9367
.circleci
.golangci.ymlCHANGELOG.mdbufpool
bufreader
config.goconfig
cookies
crypt.gocrypt_test.godocker
docs
GETTING_STARTED.md_sidebar.md
download.goassets
autoquality.mdchained_pipelines.mdchaining_the_processing.mdconfiguration.mddatadog.mdgenerating_the_url.mdgenerating_the_url_basic.mdgetting_the_image_info.mdimage_formats_support.mdindex.htmlmemory_usage_tweaks.mdobject_detection.mdpresets.mdsigning_the_url.mdwatermark.mderrorreport
errors_reporting.goetag.goetag
examples
signature-truncated.phpsignature.cssignature.dartsignature.exsignature.gosignature.javasignature.jssignature.phpsignature.pysignature.rbsignature.swift
fs_transport.gogo.modgo.sumgzippool.gohealthcheck.goierrors
image_data.goimage_type.goimagedata
imagemeta
imagetype
imath
listen_no_reuseport.golog.gologger
main.gomain_test.gomemory
metrics
newrelic.gooptions
gravity_type.gopresets.gopresets_test.goprocessing_options.goprocessing_options_test.goresize_type.gourl.gourl_options.go
process.go@ -1,10 +1,34 @@
|
|||||||
FROM debian:buster
|
FROM debian:bullseye-slim
|
||||||
|
|
||||||
RUN apt-get -qq update \
|
RUN apt-get -qq update \
|
||||||
&& apt-get install -y --no-install-recommends bash ca-certificates build-essential \
|
&& apt-get install -y --no-install-recommends \
|
||||||
curl git mercurial make binutils bison gcc gobject-introspection libglib2.0-dev \
|
bash \
|
||||||
libexpat1-dev libxml2-dev libfftw3-dev libjpeg-dev libpng-dev libwebp-dev libgif-dev \
|
ca-certificates \
|
||||||
libexif-dev liblcms2-dev libavcodec-dev libavformat-dev libavutil-dev libswscale-dev
|
build-essential \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
mercurial \
|
||||||
|
make \
|
||||||
|
binutils \
|
||||||
|
bison \
|
||||||
|
gcc \
|
||||||
|
gobject-introspection \
|
||||||
|
libglib2.0-dev \
|
||||||
|
libexpat1-dev \
|
||||||
|
libxml2-dev \
|
||||||
|
libfftw3-dev \
|
||||||
|
libjpeg-dev \
|
||||||
|
libpng-dev \
|
||||||
|
libwebp-dev \
|
||||||
|
libgif-dev \
|
||||||
|
librsvg2-dev \
|
||||||
|
libexif-dev \
|
||||||
|
liblcms2-dev \
|
||||||
|
libavcodec-dev \
|
||||||
|
libavformat-dev \
|
||||||
|
libavutil-dev \
|
||||||
|
libswscale-dev \
|
||||||
|
libopencv-dev
|
||||||
|
|
||||||
RUN curl -s -S -L https://raw.githubusercontent.com/moovweb/gvm/master/binscripts/gvm-installer | bash -
|
RUN curl -s -S -L https://raw.githubusercontent.com/moovweb/gvm/master/binscripts/gvm-installer | bash -
|
||||||
|
|
||||||
@ -12,7 +36,7 @@ RUN \
|
|||||||
mkdir /root/vips \
|
mkdir /root/vips \
|
||||||
&& cd /root/vips \
|
&& cd /root/vips \
|
||||||
&& curl -s -S -L -o vips_releases.json "https://api.github.com/repos/libvips/libvips/releases" \
|
&& curl -s -S -L -o vips_releases.json "https://api.github.com/repos/libvips/libvips/releases" \
|
||||||
&& for VIPS_VERSION in "8.8" "8.9" "8.10"; do \
|
&& for VIPS_VERSION in "8.9" "8.10" "8.11"; do \
|
||||||
mkdir $VIPS_VERSION \
|
mkdir $VIPS_VERSION \
|
||||||
&& export VIPS_RELEASE=$(grep -m 1 "\"tag_name\": \"v$VIPS_VERSION." vips_releases.json | sed -E 's/.*"v([^"]+)".*/\1/') \
|
&& export VIPS_RELEASE=$(grep -m 1 "\"tag_name\": \"v$VIPS_VERSION." vips_releases.json | sed -E 's/.*"v([^"]+)".*/\1/') \
|
||||||
&& echo "Building Vips $VIPS_RELEASE as $VIPS_VERSION" \
|
&& echo "Building Vips $VIPS_RELEASE as $VIPS_VERSION" \
|
||||||
|
@ -16,8 +16,8 @@ workflows:
|
|||||||
- checkout_code
|
- checkout_code
|
||||||
matrix:
|
matrix:
|
||||||
parameters:
|
parameters:
|
||||||
go_version: ["1.16", "1.15", "1.14"]
|
go_version: ["1.17", "1.16"] # Go 1.15 doesn't support io/fs
|
||||||
vips_version: ["8.10", "8.9", "8.8"]
|
vips_version: ["8.11", "8.10", "8.9"]
|
||||||
|
|
||||||
release:
|
release:
|
||||||
jobs:
|
jobs:
|
||||||
@ -39,6 +39,7 @@ executors:
|
|||||||
working_directory: /go/src/imgproxy
|
working_directory: /go/src/imgproxy
|
||||||
environment:
|
environment:
|
||||||
BASH_ENV: "/root/.bashrc"
|
BASH_ENV: "/root/.bashrc"
|
||||||
|
TEST_RESULTS: /tmp/test-results
|
||||||
|
|
||||||
commands:
|
commands:
|
||||||
setup_vips:
|
setup_vips:
|
||||||
@ -66,6 +67,12 @@ commands:
|
|||||||
echo 'export GOPATH=/go' >> $BASH_ENV
|
echo 'export GOPATH=/go' >> $BASH_ENV
|
||||||
echo 'export PATH="$PATH:$(go env GOPATH)/bin"' >> $BASH_ENV
|
echo 'export PATH="$PATH:$(go env GOPATH)/bin"' >> $BASH_ENV
|
||||||
echo 'export CGO_LDFLAGS_ALLOW="-s|-w"' >> $BASH_ENV
|
echo 'export CGO_LDFLAGS_ALLOW="-s|-w"' >> $BASH_ENV
|
||||||
|
- run:
|
||||||
|
name: Install gotestsum
|
||||||
|
command: |
|
||||||
|
mkdir -p /usr/local/bin && \
|
||||||
|
curl -Ls https://github.com/gotestyourself/gotestsum/releases/download/v1.6.4/gotestsum_1.6.4_linux_amd64.tar.gz | \
|
||||||
|
tar -xzC /usr/local/bin
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
checkout_code:
|
checkout_code:
|
||||||
@ -98,7 +105,7 @@ jobs:
|
|||||||
command: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | BINARY=golangci-lint sh -s -- -b $(go env GOPATH)/bin v1.18.0
|
command: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | BINARY=golangci-lint sh -s -- -b $(go env GOPATH)/bin v1.18.0
|
||||||
- run:
|
- run:
|
||||||
name: Lint imgproxy
|
name: Lint imgproxy
|
||||||
command: golangci-lint run .
|
command: golangci-lint run
|
||||||
|
|
||||||
build:
|
build:
|
||||||
executor: imgproxy
|
executor: imgproxy
|
||||||
@ -119,11 +126,18 @@ jobs:
|
|||||||
- go-modules-{{ checksum "go.sum" }}
|
- go-modules-{{ checksum "go.sum" }}
|
||||||
- run:
|
- run:
|
||||||
name: Build imgproxy
|
name: Build imgproxy
|
||||||
command: go test -v
|
command: |
|
||||||
|
mkdir -p $TEST_RESULTS && \
|
||||||
|
gotestsum --junitfile ${TEST_RESULTS}/gotestsum-report.xml -- ./...
|
||||||
- save_cache:
|
- save_cache:
|
||||||
key: go-modules-{{ checksum "go.sum" }}
|
key: go-modules-{{ checksum "go.sum" }}
|
||||||
paths:
|
paths:
|
||||||
- "/go/pkg/mod"
|
- "/go/pkg/mod"
|
||||||
|
- store_artifacts:
|
||||||
|
path: /tmp/test-results
|
||||||
|
destination: raw-test-output
|
||||||
|
- store_test_results:
|
||||||
|
path: /tmp/test-results
|
||||||
|
|
||||||
publish_github_release:
|
publish_github_release:
|
||||||
executor: imgproxy
|
executor: imgproxy
|
||||||
|
@ -35,12 +35,12 @@ issues:
|
|||||||
# False positives on CGO generated code
|
# False positives on CGO generated code
|
||||||
- linters: [staticcheck]
|
- linters: [staticcheck]
|
||||||
text: "SA4000:"
|
text: "SA4000:"
|
||||||
path: vips\.go
|
path: vips/*
|
||||||
|
|
||||||
# False positives on CGO generated code
|
# False positives on CGO generated code
|
||||||
- linters: [gocritic]
|
- linters: [gocritic]
|
||||||
text: "dupSubExpr"
|
text: "dupSubExpr"
|
||||||
path: vips\.go
|
path: vips/*
|
||||||
|
|
||||||
- linters: [stylecheck]
|
- linters: [stylecheck]
|
||||||
text: "ST1005:"
|
text: "ST1005:"
|
||||||
|
83
CHANGELOG.md
83
CHANGELOG.md
@ -1,6 +1,45 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
### Added
|
||||||
|
- Add `X-Origin-Width` and `X-Origin-Height` to debug headers.
|
||||||
|
- Add `IMGPROXY_COOKIE_PASSTHROUGH` and `IMGPROXY_COOKIE_BASE_URL` configs.
|
||||||
|
|
||||||
|
### Change
|
||||||
|
- `dpr` processing option doesn't enlarge image unless `enlarge` is true.
|
||||||
|
- `304 Not Modified` responses includes `Cache-Control`, `Expires`, and `Vary` headers.
|
||||||
|
- imgproxy responds with `500` HTTP code when the source image downloading error seems temporary (timeout, server error, etc).
|
||||||
|
- When `IMGPROXY_FALLBACK_IMAGE_HTTP_CODE` is zero, imgproxy responds with the usual HTTP code.
|
||||||
|
- BMP support doesn't require ImageMagick.
|
||||||
|
|
||||||
|
### Fix
|
||||||
|
- Fix Client Hints behavior. `Width` is physical size, so we should divide it by `DPR` value.
|
||||||
|
- Fix scale-on-load in some rare cases.
|
||||||
|
|
||||||
|
## [3.0.0.beta1] - 2021-10-01
|
||||||
|
### Added
|
||||||
|
- (pro) [Autoquality](https://docs.imgproxy.net/autoquality).
|
||||||
|
- (pro) [Object detection](https://docs.imgproxy.net/object_detection): `obj` [gravity](https://docs.imgproxy.net/generating_the_url?id=gravity) type, [blur_detections](https://docs.imgproxy.net/generating_the_url?id=blur-detections) processing option, [draw_detections](https://docs.imgproxy.net/generating_the_url?id=draw-detections) processing option.
|
||||||
|
- (pro) [Chained pipelines](https://docs.imgproxy.net/chained_pipelines)
|
||||||
|
- `IMGPROXY_FALLBACK_IMAGE_HTTP_CODE` config.
|
||||||
|
- (pro) [fallback_image_url](https://docs.imgproxy.net/generating_the_url?id=fallback-image-url) processing option.
|
||||||
|
- [expires](https://docs.imgproxy.net/generating_the_url?id=expires) processing option.
|
||||||
|
- [skip processing](https://docs.imgproxy.net/generating_the_url?id=skip-processing) processing option.
|
||||||
|
- [Datadog](./docs/datadog.md) metrics.
|
||||||
|
- `force` and `fill-down` resizing types.
|
||||||
|
- [min-width](https://docs.imgproxy.net/generating_the_url?id=min-width) and [min-height](https://docs.imgproxy.net/generating_the_url?id=min-height) processing options.
|
||||||
|
- [format_quality](https://docs.imgproxy.net/generating_the_url?id=format-quality) processing option.
|
||||||
|
|
||||||
|
### Change
|
||||||
|
- ETag generator & checker uses source image ETag when possible.
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
- Removed basic URL format, use [advanced one](./docs/generating_the_url.md) instead.
|
||||||
|
- Removed `IMGPROXY_MAX_SRC_DIMENSION` config, use `IMGPROXY_MAX_SRC_RESOLUTION` instead.
|
||||||
|
- Removed `IMGPROXY_GZIP_COMPRESSION` config.
|
||||||
|
- Removed `IMGPROXY_MAX_GIF_FRAMES` config, use `IMGPROXY_MAX_ANIMATION_FRAMES` instead.
|
||||||
|
- Removed `crop` resizing type, use [crop](./docs/generating_the_url.md#crop) processing option instead.
|
||||||
|
- Dropped old libvips (<8.8) support.
|
||||||
|
|
||||||
## [2.17.0] - 2021-09-07
|
## [2.17.0] - 2021-09-07
|
||||||
### Added
|
### Added
|
||||||
@ -60,11 +99,11 @@
|
|||||||
### Added
|
### Added
|
||||||
- AVIF support.
|
- AVIF support.
|
||||||
- Azure Blob Storage support.
|
- Azure Blob Storage support.
|
||||||
- `IMGPROXY_STRIP_COLOR_PROFILE` config and [strip_color_profile](https://docs.imgproxy.net/generating_the_url_advanced?id=strip-color-profile) processing option.
|
- `IMGPROXY_STRIP_COLOR_PROFILE` config and [strip_color_profile](https://docs.imgproxy.net/generating_the_url?id=strip-color-profile) processing option.
|
||||||
- `IMGPROXY_FORMAT_QUALITY` config.
|
- `IMGPROXY_FORMAT_QUALITY` config.
|
||||||
- `IMGPROXY_AUTO_ROTATE` config and [auto_rotate](https://docs.imgproxy.net/generating_the_url_advanced?id=auto-rotate) processing option.
|
- `IMGPROXY_AUTO_ROTATE` config and [auto_rotate](https://docs.imgproxy.net/generating_the_url?id=auto-rotate) processing option.
|
||||||
- [rotate](https://docs.imgproxy.net/generating_the_url_advanced?id=rotate) processing option.
|
- [rotate](https://docs.imgproxy.net/generating_the_url?id=rotate) processing option.
|
||||||
- `width` and `height` arguments of the [crop](https://docs.imgproxy.net/generating_the_url_advanced?id=crop) processing option can be less than `1` that is treated by imgproxy as a relative value (a.k.a. crop by percentage).
|
- `width` and `height` arguments of the [crop](https://docs.imgproxy.net/generating_the_url?id=crop) processing option can be less than `1` that is treated by imgproxy as a relative value (a.k.a. crop by percentage).
|
||||||
- (pro) Remove Adobe Illustrator garbage from SVGs.
|
- (pro) Remove Adobe Illustrator garbage from SVGs.
|
||||||
- (pro) Add IPTC tags to the `/info` response.
|
- (pro) Add IPTC tags to the `/info` response.
|
||||||
|
|
||||||
@ -85,9 +124,9 @@
|
|||||||
### Added
|
### Added
|
||||||
- Ability to skip processing of some formats. See [Skip processing](https://docs.imgproxy.net/configuration?id=skip-processing).
|
- Ability to skip processing of some formats. See [Skip processing](https://docs.imgproxy.net/configuration?id=skip-processing).
|
||||||
- (pro) PDF support.
|
- (pro) PDF support.
|
||||||
- (pro) [video_thumbnail_second](https://docs.imgproxy.net/generating_the_url_advanced?id=video-thumbnail-second) processing option.
|
- (pro) [video_thumbnail_second](https://docs.imgproxy.net/generating_the_url?id=video-thumbnail-second) processing option.
|
||||||
- (pro) [page](https://docs.imgproxy.net/generating_the_url_advanced?id=page) processing option.
|
- (pro) [page](https://docs.imgproxy.net/generating_the_url?id=page) processing option.
|
||||||
- (pro) [background_alpha](https://docs.imgproxy.net/generating_the_url_advanced?id=background-alpha) processing option.
|
- (pro) [background_alpha](https://docs.imgproxy.net/generating_the_url?id=background-alpha) processing option.
|
||||||
- (pro) `IMGPROXY_VIDEO_THUMBNAIL_PROBE_SIZE` and `IMGPROXY_VIDEO_THUMBNAIL_MAX_ANALYZE_DURATION` configs.
|
- (pro) `IMGPROXY_VIDEO_THUMBNAIL_PROBE_SIZE` and `IMGPROXY_VIDEO_THUMBNAIL_MAX_ANALYZE_DURATION` configs.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
@ -105,8 +144,8 @@
|
|||||||
## [2.14.0] - 2020-07-17
|
## [2.14.0] - 2020-07-17
|
||||||
### Added
|
### Added
|
||||||
- `IMGPROXY_PROMETHEUS_NAMESPACE` config.
|
- `IMGPROXY_PROMETHEUS_NAMESPACE` config.
|
||||||
- [strip_metadata](https://docs.imgproxy.net/generating_the_url_advanced?id=strip-metadata) processing option.
|
- [strip_metadata](https://docs.imgproxy.net/generating_the_url?id=strip-metadata) processing option.
|
||||||
- (pro) Configurable unsharpening. See [Unsharpening](https://docs.imgproxy.net/configuration?id=unsharpening) configs and [unsharpening](https://docs.imgproxy.net/generating_the_url_advanced?id=unsharpening) processing option.
|
- (pro) Configurable unsharpening. See [Unsharpening](https://docs.imgproxy.net/configuration?id=unsharpening) configs and [unsharpening](https://docs.imgproxy.net/generating_the_url?id=unsharpening) processing option.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- Better for libvips memory metrics for Prometheus.
|
- Better for libvips memory metrics for Prometheus.
|
||||||
@ -128,7 +167,7 @@
|
|||||||
## [2.13.0] - 2020-04-22
|
## [2.13.0] - 2020-04-22
|
||||||
### Added
|
### Added
|
||||||
- Fallback images.
|
- Fallback images.
|
||||||
- [padding](https://docs.imgproxy.net/generating_the_url_advanced?id=padding) processing option.
|
- [padding](https://docs.imgproxy.net/generating_the_url?id=padding) processing option.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- Optimized memory usage. Especially when dealing with animated images.
|
- Optimized memory usage. Especially when dealing with animated images.
|
||||||
@ -255,7 +294,7 @@
|
|||||||
## [2.4.0] - 2019-08-20
|
## [2.4.0] - 2019-08-20
|
||||||
### Added
|
### Added
|
||||||
- `SO_REUSEPORT` socker option support. Can be enabled with `IMGPROXY_SO_REUSEPORT`.
|
- `SO_REUSEPORT` socker option support. Can be enabled with `IMGPROXY_SO_REUSEPORT`.
|
||||||
- [filename](./docs/generating_the_url_advanced.md#filename) option.
|
- [filename](./docs/generating_the_url.md#filename) option.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- Better handling if non-sRGB images.
|
- Better handling if non-sRGB images.
|
||||||
@ -272,8 +311,8 @@
|
|||||||
- `libvips` v8.8 support: better processing of animated GIFs, built-in CMYK profile, better WebP scale-on-load, etc;
|
- `libvips` v8.8 support: better processing of animated GIFs, built-in CMYK profile, better WebP scale-on-load, etc;
|
||||||
- Animated WebP support. `IMGPROXY_MAX_GIF_FRAMES` is deprecated, use `IMGPROXY_MAX_ANIMATION_FRAMES`;
|
- Animated WebP support. `IMGPROXY_MAX_GIF_FRAMES` is deprecated, use `IMGPROXY_MAX_ANIMATION_FRAMES`;
|
||||||
- [HEIC support](./docs/image_formats_support.md#heic-support);
|
- [HEIC support](./docs/image_formats_support.md#heic-support);
|
||||||
- [crop](./docs/generating_the_url_advanced.md#crop) processing option. `resizing_type:crop` is deprecated;
|
- [crop](./docs/generating_the_url.md#crop) processing option. `resizing_type:crop` is deprecated;
|
||||||
- Offsets for [gravity](./docs/generating_the_url_advanced.md#gravity);
|
- Offsets for [gravity](./docs/generating_the_url.md#gravity);
|
||||||
- Resizing type `auto`. If both source and resulting dimensions have the same orientation (portrait or landscape), imgproxy will use `fill`. Otherwise, it will use `fit`;
|
- Resizing type `auto`. If both source and resulting dimensions have the same orientation (portrait or landscape), imgproxy will use `fill`. Otherwise, it will use `fit`;
|
||||||
- Development errors mode. When `IMGPROXY_DEVELOPMENT_ERRORS_MODE` is true, imgproxy will respond with detailed error messages. Not recommended for production because some errors may contain stack trace;
|
- Development errors mode. When `IMGPROXY_DEVELOPMENT_ERRORS_MODE` is true, imgproxy will respond with detailed error messages. Not recommended for production because some errors may contain stack trace;
|
||||||
- `IMGPROXY_KEEP_ALIVE_TIMEOUT` config.
|
- `IMGPROXY_KEEP_ALIVE_TIMEOUT` config.
|
||||||
@ -331,7 +370,7 @@ Fixed processing of images with embedded profiles that was broken in v2.2.8.
|
|||||||
|
|
||||||
## [2.2.5] - 2019-02-21
|
## [2.2.5] - 2019-02-21
|
||||||
### Added
|
### Added
|
||||||
- [extend](./docs/generating_the_url_advanced.md#extend) processing option.
|
- [extend](./docs/generating_the_url.md#extend) processing option.
|
||||||
- `vips_memory_bytes`, `vips_max_memory_bytes` and `vips_allocs` metrics for Prometheus.
|
- `vips_memory_bytes`, `vips_max_memory_bytes` and `vips_allocs` metrics for Prometheus.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
@ -401,15 +440,15 @@ Fixed processing of images with embedded profiles that was broken in v2.2.8.
|
|||||||
|
|
||||||
## [2.1.0] - 2018-11-16
|
## [2.1.0] - 2018-11-16
|
||||||
### Added
|
### Added
|
||||||
- [Plain source URLs](./docs/generating_the_url_advanced.md#plain) support.
|
- [Plain source URLs](./docs/generating_the_url.md#plain) support.
|
||||||
- [Serving images from Google Cloud Storage](./docs/serving_files_from_google_cloud_storage.md).
|
- [Serving images from Google Cloud Storage](./docs/serving_files_from_google_cloud_storage.md).
|
||||||
- [Full support of GIFs](./docs/image_formats_support.md#gif-support) including animated ones.
|
- [Full support of GIFs](./docs/image_formats_support.md#gif-support) including animated ones.
|
||||||
- [Watermarks](./docs/watermark.md).
|
- [Watermarks](./docs/watermark.md).
|
||||||
- [New Relic](./docs/new_relic.md) metrics.
|
- [New Relic](./docs/new_relic.md) metrics.
|
||||||
- [Prometheus](./docs/prometheus.md) metrics.
|
- [Prometheus](./docs/prometheus.md) metrics.
|
||||||
- [DPR](./docs/generating_the_url_advanced.md#dpr) option (thanks to [selul](https://github.com/selul)).
|
- [DPR](./docs/generating_the_url.md#dpr) option (thanks to [selul](https://github.com/selul)).
|
||||||
- [Cache buster](./docs/generating_the_url_advanced.md#cache-buster) option.
|
- [Cache buster](./docs/generating_the_url.md#cache-buster) option.
|
||||||
- [Quality](./docs/generating_the_url_advanced.md#quality) option.
|
- [Quality](./docs/generating_the_url.md#quality) option.
|
||||||
- Support for custom [Amazon S3](./docs/serving_files_from_s3.md) endpoints.
|
- Support for custom [Amazon S3](./docs/serving_files_from_s3.md) endpoints.
|
||||||
- Support for [Amazon S3](./docs/serving_files_from_s3.md) versioning.
|
- Support for [Amazon S3](./docs/serving_files_from_s3.md) versioning.
|
||||||
- [Client hints](./docs/configuration.md#client-hints-support) support (thanks to [selul](https://github.com/selul)).
|
- [Client hints](./docs/configuration.md#client-hints-support) support (thanks to [selul](https://github.com/selul)).
|
||||||
@ -438,13 +477,13 @@ Fixed processing of images with embedded profiles that was broken in v2.2.8.
|
|||||||
## [2.0.0] - 2018-10-08
|
## [2.0.0] - 2018-10-08
|
||||||
All-You-Ever-Wanted release! :tada:
|
All-You-Ever-Wanted release! :tada:
|
||||||
### Added
|
### Added
|
||||||
- [New advanced URL format](./docs/generating_the_url_advanced.md). Unleash the full power of imgproxy v2.0.
|
- [New advanced URL format](./docs/generating_the_url.md). Unleash the full power of imgproxy v2.0.
|
||||||
- [Presets](./docs/presets.md). Shorten your urls by reusing processing options.
|
- [Presets](./docs/presets.md). Shorten your urls by reusing processing options.
|
||||||
- [Serving images from Amazon S3](./docs/serving_files_from_s3.md). Thanks to [@crohr](https://github.com/crohr), now we have a way to serve files from private S3 buckets.
|
- [Serving images from Amazon S3](./docs/serving_files_from_s3.md). Thanks to [@crohr](https://github.com/crohr), now we have a way to serve files from private S3 buckets.
|
||||||
- [Autoconverting to WebP when supported by browser](./docs/configuration.md#webp-support-detection) (disabled by default). Use WebP as resulting format when browser supports it.
|
- [Autoconverting to WebP when supported by browser](./docs/configuration.md#webp-support-detection) (disabled by default). Use WebP as resulting format when browser supports it.
|
||||||
- [Gaussian blur](./docs/generating_the_url_advanced.md#blur) and [sharpen](./docs/generating_the_url_advanced.md#sharpen) filters. Make your images look better than before.
|
- [Gaussian blur](./docs/generating_the_url.md#blur) and [sharpen](./docs/generating_the_url.md#sharpen) filters. Make your images look better than before.
|
||||||
- [Focus point gravity](./docs/generating_the_url_advanced.md#gravity). Tell imgproxy what point will be the center of the image.
|
- [Focus point gravity](./docs/generating_the_url.md#gravity). Tell imgproxy what point will be the center of the image.
|
||||||
- [Background color](./docs/generating_the_url_advanced.md#background). Control the color of background when converting PNG with alpha-channel to JPEG.
|
- [Background color](./docs/generating_the_url.md#background). Control the color of background when converting PNG with alpha-channel to JPEG.
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- Key and salt are not required anymore. When key or salt is not specified, signature checking is disabled.
|
- Key and salt are not required anymore. When key or salt is not specified, signature checking is disabled.
|
||||||
|
@ -1,10 +1,14 @@
|
|||||||
package main
|
package bufpool
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"runtime"
|
"runtime"
|
||||||
"sort"
|
"sort"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imath"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/metrics/prometheus"
|
||||||
)
|
)
|
||||||
|
|
||||||
type intSlice []int
|
type intSlice []int
|
||||||
@ -13,7 +17,7 @@ func (p intSlice) Len() int { return len(p) }
|
|||||||
func (p intSlice) Less(i, j int) bool { return p[i] < p[j] }
|
func (p intSlice) Less(i, j int) bool { return p[i] < p[j] }
|
||||||
func (p intSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
func (p intSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||||
|
|
||||||
type bufPool struct {
|
type Pool struct {
|
||||||
name string
|
name string
|
||||||
defaultSize int
|
defaultSize int
|
||||||
maxSize int
|
maxSize int
|
||||||
@ -25,12 +29,12 @@ type bufPool struct {
|
|||||||
mutex sync.Mutex
|
mutex sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
func newBufPool(name string, n int, defaultSize int) *bufPool {
|
func New(name string, n int, defaultSize int) *Pool {
|
||||||
pool := bufPool{
|
pool := Pool{
|
||||||
name: name,
|
name: name,
|
||||||
defaultSize: defaultSize,
|
defaultSize: defaultSize,
|
||||||
buffers: make([]*bytes.Buffer, n),
|
buffers: make([]*bytes.Buffer, n),
|
||||||
calls: make(intSlice, conf.BufferPoolCalibrationThreshold),
|
calls: make(intSlice, config.BufferPoolCalibrationThreshold),
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range pool.buffers {
|
for i := range pool.buffers {
|
||||||
@ -40,7 +44,7 @@ func newBufPool(name string, n int, defaultSize int) *bufPool {
|
|||||||
return &pool
|
return &pool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *bufPool) calibrateAndClean() {
|
func (p *Pool) calibrateAndClean() {
|
||||||
sort.Sort(p.calls)
|
sort.Sort(p.calls)
|
||||||
|
|
||||||
pos := int(float64(len(p.calls)) * 0.95)
|
pos := int(float64(len(p.calls)) * 0.95)
|
||||||
@ -49,8 +53,8 @@ func (p *bufPool) calibrateAndClean() {
|
|||||||
p.callInd = 0
|
p.callInd = 0
|
||||||
p.maxSize = p.normalizeSize(score)
|
p.maxSize = p.normalizeSize(score)
|
||||||
|
|
||||||
p.defaultSize = maxInt(p.defaultSize, p.calls[0])
|
p.defaultSize = imath.Max(p.defaultSize, p.calls[0])
|
||||||
p.maxSize = maxInt(p.defaultSize, p.maxSize)
|
p.maxSize = imath.Max(p.defaultSize, p.maxSize)
|
||||||
|
|
||||||
cleaned := false
|
cleaned := false
|
||||||
|
|
||||||
@ -65,13 +69,11 @@ func (p *bufPool) calibrateAndClean() {
|
|||||||
runtime.GC()
|
runtime.GC()
|
||||||
}
|
}
|
||||||
|
|
||||||
if prometheusEnabled {
|
prometheus.SetBufferDefaultSize(p.name, p.defaultSize)
|
||||||
setPrometheusBufferDefaultSize(p.name, p.defaultSize)
|
prometheus.SetBufferMaxSize(p.name, p.maxSize)
|
||||||
setPrometheusBufferMaxSize(p.name, p.maxSize)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *bufPool) Get(size int) *bytes.Buffer {
|
func (p *Pool) Get(size int) *bytes.Buffer {
|
||||||
p.mutex.Lock()
|
p.mutex.Lock()
|
||||||
defer p.mutex.Unlock()
|
defer p.mutex.Unlock()
|
||||||
|
|
||||||
@ -113,7 +115,7 @@ func (p *bufPool) Get(size int) *bytes.Buffer {
|
|||||||
|
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
|
|
||||||
growSize := maxInt(size, p.defaultSize)
|
growSize := imath.Max(size, p.defaultSize)
|
||||||
|
|
||||||
if growSize > buf.Cap() {
|
if growSize > buf.Cap() {
|
||||||
buf.Grow(growSize)
|
buf.Grow(growSize)
|
||||||
@ -122,7 +124,7 @@ func (p *bufPool) Get(size int) *bytes.Buffer {
|
|||||||
return buf
|
return buf
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *bufPool) Put(buf *bytes.Buffer) {
|
func (p *Pool) Put(buf *bytes.Buffer) {
|
||||||
p.mutex.Lock()
|
p.mutex.Lock()
|
||||||
defer p.mutex.Unlock()
|
defer p.mutex.Unlock()
|
||||||
|
|
||||||
@ -143,8 +145,8 @@ func (p *bufPool) Put(buf *bytes.Buffer) {
|
|||||||
if b == nil {
|
if b == nil {
|
||||||
p.buffers[i] = buf
|
p.buffers[i] = buf
|
||||||
|
|
||||||
if prometheusEnabled && buf.Cap() > 0 {
|
if buf.Cap() > 0 {
|
||||||
observePrometheusBufferSize(p.name, buf.Cap())
|
prometheus.ObserveBufferSize(p.name, buf.Cap())
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return
|
||||||
@ -152,6 +154,6 @@ func (p *bufPool) Put(buf *bytes.Buffer) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *bufPool) normalizeSize(n int) int {
|
func (p *Pool) normalizeSize(n int) int {
|
||||||
return (n/bytes.MinRead + 2) * bytes.MinRead
|
return (n/bytes.MinRead + 2) * bytes.MinRead
|
||||||
}
|
}
|
104
bufreader/bufreader.go
Normal file
104
bufreader/bufreader.go
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
package bufreader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Reader struct {
|
||||||
|
r io.Reader
|
||||||
|
buf *bytes.Buffer
|
||||||
|
cur int
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(r io.Reader, buf *bytes.Buffer) *Reader {
|
||||||
|
br := Reader{
|
||||||
|
r: r,
|
||||||
|
buf: buf,
|
||||||
|
}
|
||||||
|
return &br
|
||||||
|
}
|
||||||
|
|
||||||
|
func (br *Reader) Read(p []byte) (int, error) {
|
||||||
|
if err := br.fill(br.cur + len(p)); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
n := copy(p, br.buf.Bytes()[br.cur:])
|
||||||
|
br.cur += n
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (br *Reader) ReadByte() (byte, error) {
|
||||||
|
if err := br.fill(br.cur + 1); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
b := br.buf.Bytes()[br.cur]
|
||||||
|
br.cur++
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (br *Reader) Discard(n int) (int, error) {
|
||||||
|
if n < 0 {
|
||||||
|
return 0, bufio.ErrNegativeCount
|
||||||
|
}
|
||||||
|
if n == 0 {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := br.fill(br.cur + n); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
n = imath.Min(n, br.buf.Len()-br.cur)
|
||||||
|
br.cur += n
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (br *Reader) Peek(n int) ([]byte, error) {
|
||||||
|
if n < 0 {
|
||||||
|
return []byte{}, bufio.ErrNegativeCount
|
||||||
|
}
|
||||||
|
if n == 0 {
|
||||||
|
return []byte{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := br.fill(br.cur + n); err != nil {
|
||||||
|
return []byte{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if n > br.buf.Len()-br.cur {
|
||||||
|
return br.buf.Bytes()[br.cur:], io.EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
return br.buf.Bytes()[br.cur : br.cur+n], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (br *Reader) Flush() error {
|
||||||
|
_, err := br.buf.ReadFrom(br.r)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (br *Reader) fill(need int) error {
|
||||||
|
n := need - br.buf.Len()
|
||||||
|
if n <= 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
n = imath.Max(4096, n)
|
||||||
|
|
||||||
|
if _, err := br.buf.ReadFrom(io.LimitReader(br.r, int64(n))); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Nothing was read, it's EOF
|
||||||
|
if br.cur == br.buf.Len() {
|
||||||
|
return io.EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
661
config.go
661
config.go
@ -1,661 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"encoding/hex"
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"math"
|
|
||||||
"os"
|
|
||||||
"regexp"
|
|
||||||
"runtime"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func intEnvConfig(i *int, name string) {
|
|
||||||
if env, err := strconv.Atoi(os.Getenv(name)); err == nil {
|
|
||||||
*i = env
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func floatEnvConfig(i *float64, name string) {
|
|
||||||
if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil {
|
|
||||||
*i = env
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func megaIntEnvConfig(f *int, name string) {
|
|
||||||
if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil {
|
|
||||||
*f = int(env * 1000000)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func strEnvConfig(s *string, name string) {
|
|
||||||
if env := os.Getenv(name); len(env) > 0 {
|
|
||||||
*s = env
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func boolEnvConfig(b *bool, name string) {
|
|
||||||
if env, err := strconv.ParseBool(os.Getenv(name)); err == nil {
|
|
||||||
*b = env
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func imageTypesEnvConfig(it *[]imageType, name string) {
|
|
||||||
*it = []imageType{}
|
|
||||||
|
|
||||||
if env := os.Getenv(name); len(env) > 0 {
|
|
||||||
parts := strings.Split(env, ",")
|
|
||||||
|
|
||||||
for _, p := range parts {
|
|
||||||
pt := strings.TrimSpace(p)
|
|
||||||
if t, ok := imageTypes[pt]; ok {
|
|
||||||
*it = append(*it, t)
|
|
||||||
} else {
|
|
||||||
logWarning("Unknown image format to skip: %s", pt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func formatQualityEnvConfig(m map[imageType]int, name string) {
|
|
||||||
if env := os.Getenv(name); len(env) > 0 {
|
|
||||||
parts := strings.Split(env, ",")
|
|
||||||
|
|
||||||
for _, p := range parts {
|
|
||||||
i := strings.Index(p, "=")
|
|
||||||
if i < 0 {
|
|
||||||
logWarning("Invalid format quality string: %s", p)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
imgtypeStr, qStr := strings.TrimSpace(p[:i]), strings.TrimSpace(p[i+1:])
|
|
||||||
|
|
||||||
imgtype, ok := imageTypes[imgtypeStr]
|
|
||||||
if !ok {
|
|
||||||
logWarning("Invalid format: %s", p)
|
|
||||||
}
|
|
||||||
|
|
||||||
q, err := strconv.Atoi(qStr)
|
|
||||||
if err != nil || q <= 0 || q > 100 {
|
|
||||||
logWarning("Invalid quality: %s", p)
|
|
||||||
}
|
|
||||||
|
|
||||||
m[imgtype] = q
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func hexEnvConfig(b *[]securityKey, name string) error {
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if env := os.Getenv(name); len(env) > 0 {
|
|
||||||
parts := strings.Split(env, ",")
|
|
||||||
|
|
||||||
keys := make([]securityKey, len(parts))
|
|
||||||
|
|
||||||
for i, part := range parts {
|
|
||||||
if keys[i], err = hex.DecodeString(part); err != nil {
|
|
||||||
return fmt.Errorf("%s expected to be hex-encoded strings. Invalid: %s\n", name, part)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
*b = keys
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func hexFileConfig(b *[]securityKey, filepath string) error {
|
|
||||||
if len(filepath) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := os.Open(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Can't open file %s\n", filepath)
|
|
||||||
}
|
|
||||||
|
|
||||||
keys := []securityKey{}
|
|
||||||
|
|
||||||
scanner := bufio.NewScanner(f)
|
|
||||||
for scanner.Scan() {
|
|
||||||
part := scanner.Text()
|
|
||||||
|
|
||||||
if len(part) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if key, err := hex.DecodeString(part); err == nil {
|
|
||||||
keys = append(keys, key)
|
|
||||||
} else {
|
|
||||||
return fmt.Errorf("%s expected to contain hex-encoded strings. Invalid: %s\n", filepath, part)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scanner.Err(); err != nil {
|
|
||||||
return fmt.Errorf("Failed to read file %s: %s", filepath, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
*b = keys
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func presetEnvConfig(p presets, name string) error {
|
|
||||||
if env := os.Getenv(name); len(env) > 0 {
|
|
||||||
presetStrings := strings.Split(env, ",")
|
|
||||||
|
|
||||||
for _, presetStr := range presetStrings {
|
|
||||||
if err := parsePreset(p, presetStr); err != nil {
|
|
||||||
return fmt.Errorf(err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func presetFileConfig(p presets, filepath string) error {
|
|
||||||
if len(filepath) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := os.Open(filepath)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Can't open file %s\n", filepath)
|
|
||||||
}
|
|
||||||
|
|
||||||
scanner := bufio.NewScanner(f)
|
|
||||||
for scanner.Scan() {
|
|
||||||
if err := parsePreset(p, scanner.Text()); err != nil {
|
|
||||||
return fmt.Errorf(err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := scanner.Err(); err != nil {
|
|
||||||
return fmt.Errorf("Failed to read presets file: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func patternsEnvConfig(s *[]*regexp.Regexp, name string) {
|
|
||||||
if env := os.Getenv(name); len(env) > 0 {
|
|
||||||
parts := strings.Split(env, ",")
|
|
||||||
result := make([]*regexp.Regexp, len(parts))
|
|
||||||
|
|
||||||
for i, p := range parts {
|
|
||||||
result[i] = regexpFromPattern(strings.TrimSpace(p))
|
|
||||||
}
|
|
||||||
|
|
||||||
*s = result
|
|
||||||
} else {
|
|
||||||
*s = []*regexp.Regexp{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func regexpFromPattern(pattern string) *regexp.Regexp {
|
|
||||||
var result strings.Builder
|
|
||||||
// Perform prefix matching
|
|
||||||
result.WriteString("^")
|
|
||||||
for i, part := range strings.Split(pattern, "*") {
|
|
||||||
// Add a regexp match all without slashes for each wildcard character
|
|
||||||
if i > 0 {
|
|
||||||
result.WriteString("[^/]*")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Quote other parts of the pattern
|
|
||||||
result.WriteString(regexp.QuoteMeta(part))
|
|
||||||
}
|
|
||||||
// It is safe to use regexp.MustCompile since the expression is always valid
|
|
||||||
return regexp.MustCompile(result.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
type config struct {
|
|
||||||
Network string
|
|
||||||
Bind string
|
|
||||||
ReadTimeout int
|
|
||||||
WriteTimeout int
|
|
||||||
KeepAliveTimeout int
|
|
||||||
DownloadTimeout int
|
|
||||||
Concurrency int
|
|
||||||
MaxClients int
|
|
||||||
|
|
||||||
TTL int
|
|
||||||
CacheControlPassthrough bool
|
|
||||||
SetCanonicalHeader bool
|
|
||||||
|
|
||||||
SoReuseport bool
|
|
||||||
|
|
||||||
PathPrefix string
|
|
||||||
|
|
||||||
MaxSrcDimension int
|
|
||||||
MaxSrcResolution int
|
|
||||||
MaxSrcFileSize int
|
|
||||||
MaxAnimationFrames int
|
|
||||||
MaxSvgCheckBytes int
|
|
||||||
|
|
||||||
JpegProgressive bool
|
|
||||||
PngInterlaced bool
|
|
||||||
PngQuantize bool
|
|
||||||
PngQuantizationColors int
|
|
||||||
AvifSpeed int
|
|
||||||
Quality int
|
|
||||||
FormatQuality map[imageType]int
|
|
||||||
GZipCompression int
|
|
||||||
StripMetadata bool
|
|
||||||
StripColorProfile bool
|
|
||||||
AutoRotate bool
|
|
||||||
|
|
||||||
EnableWebpDetection bool
|
|
||||||
EnforceWebp bool
|
|
||||||
EnableAvifDetection bool
|
|
||||||
EnforceAvif bool
|
|
||||||
EnableClientHints bool
|
|
||||||
|
|
||||||
SkipProcessingFormats []imageType
|
|
||||||
|
|
||||||
UseLinearColorspace bool
|
|
||||||
DisableShrinkOnLoad bool
|
|
||||||
|
|
||||||
Keys []securityKey
|
|
||||||
Salts []securityKey
|
|
||||||
AllowInsecure bool
|
|
||||||
SignatureSize int
|
|
||||||
|
|
||||||
Secret string
|
|
||||||
|
|
||||||
AllowOrigin string
|
|
||||||
|
|
||||||
UserAgent string
|
|
||||||
|
|
||||||
IgnoreSslVerification bool
|
|
||||||
DevelopmentErrorsMode bool
|
|
||||||
|
|
||||||
AllowedSources []*regexp.Regexp
|
|
||||||
LocalFileSystemRoot string
|
|
||||||
S3Enabled bool
|
|
||||||
S3Region string
|
|
||||||
S3Endpoint string
|
|
||||||
GCSEnabled bool
|
|
||||||
GCSKey string
|
|
||||||
ABSEnabled bool
|
|
||||||
ABSName string
|
|
||||||
ABSKey string
|
|
||||||
ABSEndpoint string
|
|
||||||
|
|
||||||
ETagEnabled bool
|
|
||||||
|
|
||||||
BaseURL string
|
|
||||||
|
|
||||||
Presets presets
|
|
||||||
OnlyPresets bool
|
|
||||||
|
|
||||||
WatermarkData string
|
|
||||||
WatermarkPath string
|
|
||||||
WatermarkURL string
|
|
||||||
WatermarkOpacity float64
|
|
||||||
|
|
||||||
FallbackImageData string
|
|
||||||
FallbackImagePath string
|
|
||||||
FallbackImageURL string
|
|
||||||
|
|
||||||
NewRelicAppName string
|
|
||||||
NewRelicKey string
|
|
||||||
|
|
||||||
PrometheusBind string
|
|
||||||
PrometheusNamespace string
|
|
||||||
|
|
||||||
BugsnagKey string
|
|
||||||
BugsnagStage string
|
|
||||||
HoneybadgerKey string
|
|
||||||
HoneybadgerEnv string
|
|
||||||
SentryDSN string
|
|
||||||
SentryEnvironment string
|
|
||||||
SentryRelease string
|
|
||||||
AirbrakeProjecID int
|
|
||||||
AirbrakeProjecKey string
|
|
||||||
AirbrakeEnv string
|
|
||||||
|
|
||||||
ReportDownloadingErrors bool
|
|
||||||
|
|
||||||
EnableDebugHeaders bool
|
|
||||||
|
|
||||||
FreeMemoryInterval int
|
|
||||||
DownloadBufferSize int
|
|
||||||
GZipBufferSize int
|
|
||||||
BufferPoolCalibrationThreshold int
|
|
||||||
}
|
|
||||||
|
|
||||||
var conf = config{
|
|
||||||
Network: "tcp",
|
|
||||||
Bind: ":8080",
|
|
||||||
ReadTimeout: 10,
|
|
||||||
WriteTimeout: 10,
|
|
||||||
KeepAliveTimeout: 10,
|
|
||||||
DownloadTimeout: 5,
|
|
||||||
Concurrency: runtime.NumCPU() * 2,
|
|
||||||
TTL: 3600,
|
|
||||||
MaxSrcResolution: 16800000,
|
|
||||||
MaxAnimationFrames: 1,
|
|
||||||
MaxSvgCheckBytes: 32 * 1024,
|
|
||||||
SignatureSize: 32,
|
|
||||||
PngQuantizationColors: 256,
|
|
||||||
Quality: 80,
|
|
||||||
AvifSpeed: 5,
|
|
||||||
FormatQuality: map[imageType]int{imageTypeAVIF: 50},
|
|
||||||
StripMetadata: true,
|
|
||||||
StripColorProfile: true,
|
|
||||||
AutoRotate: true,
|
|
||||||
UserAgent: fmt.Sprintf("imgproxy/%s", version),
|
|
||||||
Presets: make(presets),
|
|
||||||
WatermarkOpacity: 1,
|
|
||||||
BugsnagStage: "production",
|
|
||||||
HoneybadgerEnv: "production",
|
|
||||||
SentryEnvironment: "production",
|
|
||||||
SentryRelease: fmt.Sprintf("imgproxy/%s", version),
|
|
||||||
AirbrakeEnv: "production",
|
|
||||||
ReportDownloadingErrors: true,
|
|
||||||
FreeMemoryInterval: 10,
|
|
||||||
BufferPoolCalibrationThreshold: 1024,
|
|
||||||
}
|
|
||||||
|
|
||||||
func configure() error {
|
|
||||||
keyPath := flag.String("keypath", "", "path of the file with hex-encoded key")
|
|
||||||
saltPath := flag.String("saltpath", "", "path of the file with hex-encoded salt")
|
|
||||||
presetsPath := flag.String("presets", "", "path of the file with presets")
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
if port := os.Getenv("PORT"); len(port) > 0 {
|
|
||||||
conf.Bind = fmt.Sprintf(":%s", port)
|
|
||||||
}
|
|
||||||
|
|
||||||
strEnvConfig(&conf.Network, "IMGPROXY_NETWORK")
|
|
||||||
strEnvConfig(&conf.Bind, "IMGPROXY_BIND")
|
|
||||||
intEnvConfig(&conf.ReadTimeout, "IMGPROXY_READ_TIMEOUT")
|
|
||||||
intEnvConfig(&conf.WriteTimeout, "IMGPROXY_WRITE_TIMEOUT")
|
|
||||||
intEnvConfig(&conf.KeepAliveTimeout, "IMGPROXY_KEEP_ALIVE_TIMEOUT")
|
|
||||||
intEnvConfig(&conf.DownloadTimeout, "IMGPROXY_DOWNLOAD_TIMEOUT")
|
|
||||||
intEnvConfig(&conf.Concurrency, "IMGPROXY_CONCURRENCY")
|
|
||||||
intEnvConfig(&conf.MaxClients, "IMGPROXY_MAX_CLIENTS")
|
|
||||||
|
|
||||||
intEnvConfig(&conf.TTL, "IMGPROXY_TTL")
|
|
||||||
boolEnvConfig(&conf.CacheControlPassthrough, "IMGPROXY_CACHE_CONTROL_PASSTHROUGH")
|
|
||||||
boolEnvConfig(&conf.SetCanonicalHeader, "IMGPROXY_SET_CANONICAL_HEADER")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.SoReuseport, "IMGPROXY_SO_REUSEPORT")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.PathPrefix, "IMGPROXY_PATH_PREFIX")
|
|
||||||
|
|
||||||
intEnvConfig(&conf.MaxSrcDimension, "IMGPROXY_MAX_SRC_DIMENSION")
|
|
||||||
megaIntEnvConfig(&conf.MaxSrcResolution, "IMGPROXY_MAX_SRC_RESOLUTION")
|
|
||||||
intEnvConfig(&conf.MaxSrcFileSize, "IMGPROXY_MAX_SRC_FILE_SIZE")
|
|
||||||
intEnvConfig(&conf.MaxSvgCheckBytes, "IMGPROXY_MAX_SVG_CHECK_BYTES")
|
|
||||||
|
|
||||||
if _, ok := os.LookupEnv("IMGPROXY_MAX_GIF_FRAMES"); ok {
|
|
||||||
logWarning("`IMGPROXY_MAX_GIF_FRAMES` is deprecated and will be removed in future versions. Use `IMGPROXY_MAX_ANIMATION_FRAMES` instead")
|
|
||||||
intEnvConfig(&conf.MaxAnimationFrames, "IMGPROXY_MAX_GIF_FRAMES")
|
|
||||||
}
|
|
||||||
intEnvConfig(&conf.MaxAnimationFrames, "IMGPROXY_MAX_ANIMATION_FRAMES")
|
|
||||||
|
|
||||||
patternsEnvConfig(&conf.AllowedSources, "IMGPROXY_ALLOWED_SOURCES")
|
|
||||||
|
|
||||||
intEnvConfig(&conf.AvifSpeed, "IMGPROXY_AVIF_SPEED")
|
|
||||||
boolEnvConfig(&conf.JpegProgressive, "IMGPROXY_JPEG_PROGRESSIVE")
|
|
||||||
boolEnvConfig(&conf.PngInterlaced, "IMGPROXY_PNG_INTERLACED")
|
|
||||||
boolEnvConfig(&conf.PngQuantize, "IMGPROXY_PNG_QUANTIZE")
|
|
||||||
intEnvConfig(&conf.PngQuantizationColors, "IMGPROXY_PNG_QUANTIZATION_COLORS")
|
|
||||||
intEnvConfig(&conf.Quality, "IMGPROXY_QUALITY")
|
|
||||||
formatQualityEnvConfig(conf.FormatQuality, "IMGPROXY_FORMAT_QUALITY")
|
|
||||||
intEnvConfig(&conf.GZipCompression, "IMGPROXY_GZIP_COMPRESSION")
|
|
||||||
boolEnvConfig(&conf.StripMetadata, "IMGPROXY_STRIP_METADATA")
|
|
||||||
boolEnvConfig(&conf.StripColorProfile, "IMGPROXY_STRIP_COLOR_PROFILE")
|
|
||||||
boolEnvConfig(&conf.AutoRotate, "IMGPROXY_AUTO_ROTATE")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.EnableWebpDetection, "IMGPROXY_ENABLE_WEBP_DETECTION")
|
|
||||||
boolEnvConfig(&conf.EnforceWebp, "IMGPROXY_ENFORCE_WEBP")
|
|
||||||
boolEnvConfig(&conf.EnableAvifDetection, "IMGPROXY_ENABLE_AVIF_DETECTION")
|
|
||||||
boolEnvConfig(&conf.EnforceAvif, "IMGPROXY_ENFORCE_AVIF")
|
|
||||||
boolEnvConfig(&conf.EnableClientHints, "IMGPROXY_ENABLE_CLIENT_HINTS")
|
|
||||||
|
|
||||||
imageTypesEnvConfig(&conf.SkipProcessingFormats, "IMGPROXY_SKIP_PROCESSING_FORMATS")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.UseLinearColorspace, "IMGPROXY_USE_LINEAR_COLORSPACE")
|
|
||||||
boolEnvConfig(&conf.DisableShrinkOnLoad, "IMGPROXY_DISABLE_SHRINK_ON_LOAD")
|
|
||||||
|
|
||||||
if err := hexEnvConfig(&conf.Keys, "IMGPROXY_KEY"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := hexEnvConfig(&conf.Salts, "IMGPROXY_SALT"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
intEnvConfig(&conf.SignatureSize, "IMGPROXY_SIGNATURE_SIZE")
|
|
||||||
|
|
||||||
if err := hexFileConfig(&conf.Keys, *keyPath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := hexFileConfig(&conf.Salts, *saltPath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
strEnvConfig(&conf.Secret, "IMGPROXY_SECRET")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.AllowOrigin, "IMGPROXY_ALLOW_ORIGIN")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.UserAgent, "IMGPROXY_USER_AGENT")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.IgnoreSslVerification, "IMGPROXY_IGNORE_SSL_VERIFICATION")
|
|
||||||
boolEnvConfig(&conf.DevelopmentErrorsMode, "IMGPROXY_DEVELOPMENT_ERRORS_MODE")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.LocalFileSystemRoot, "IMGPROXY_LOCAL_FILESYSTEM_ROOT")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.S3Enabled, "IMGPROXY_USE_S3")
|
|
||||||
strEnvConfig(&conf.S3Region, "IMGPROXY_S3_REGION")
|
|
||||||
strEnvConfig(&conf.S3Endpoint, "IMGPROXY_S3_ENDPOINT")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.GCSEnabled, "IMGPROXY_USE_GCS")
|
|
||||||
strEnvConfig(&conf.GCSKey, "IMGPROXY_GCS_KEY")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.ABSEnabled, "IMGPROXY_USE_ABS")
|
|
||||||
strEnvConfig(&conf.ABSName, "IMGPROXY_ABS_NAME")
|
|
||||||
strEnvConfig(&conf.ABSKey, "IMGPROXY_ABS_KEY")
|
|
||||||
strEnvConfig(&conf.ABSEndpoint, "IMGPROXY_ABS_ENDPOINT")
|
|
||||||
|
|
||||||
boolEnvConfig(&conf.ETagEnabled, "IMGPROXY_USE_ETAG")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.BaseURL, "IMGPROXY_BASE_URL")
|
|
||||||
|
|
||||||
if err := presetEnvConfig(conf.Presets, "IMGPROXY_PRESETS"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := presetFileConfig(conf.Presets, *presetsPath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
boolEnvConfig(&conf.OnlyPresets, "IMGPROXY_ONLY_PRESETS")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.WatermarkData, "IMGPROXY_WATERMARK_DATA")
|
|
||||||
strEnvConfig(&conf.WatermarkPath, "IMGPROXY_WATERMARK_PATH")
|
|
||||||
strEnvConfig(&conf.WatermarkURL, "IMGPROXY_WATERMARK_URL")
|
|
||||||
floatEnvConfig(&conf.WatermarkOpacity, "IMGPROXY_WATERMARK_OPACITY")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.FallbackImageData, "IMGPROXY_FALLBACK_IMAGE_DATA")
|
|
||||||
strEnvConfig(&conf.FallbackImagePath, "IMGPROXY_FALLBACK_IMAGE_PATH")
|
|
||||||
strEnvConfig(&conf.FallbackImageURL, "IMGPROXY_FALLBACK_IMAGE_URL")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.NewRelicAppName, "IMGPROXY_NEW_RELIC_APP_NAME")
|
|
||||||
strEnvConfig(&conf.NewRelicKey, "IMGPROXY_NEW_RELIC_KEY")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.PrometheusBind, "IMGPROXY_PROMETHEUS_BIND")
|
|
||||||
strEnvConfig(&conf.PrometheusNamespace, "IMGPROXY_PROMETHEUS_NAMESPACE")
|
|
||||||
|
|
||||||
strEnvConfig(&conf.BugsnagKey, "IMGPROXY_BUGSNAG_KEY")
|
|
||||||
strEnvConfig(&conf.BugsnagStage, "IMGPROXY_BUGSNAG_STAGE")
|
|
||||||
strEnvConfig(&conf.HoneybadgerKey, "IMGPROXY_HONEYBADGER_KEY")
|
|
||||||
strEnvConfig(&conf.HoneybadgerEnv, "IMGPROXY_HONEYBADGER_ENV")
|
|
||||||
strEnvConfig(&conf.SentryDSN, "IMGPROXY_SENTRY_DSN")
|
|
||||||
strEnvConfig(&conf.SentryEnvironment, "IMGPROXY_SENTRY_ENVIRONMENT")
|
|
||||||
strEnvConfig(&conf.SentryRelease, "IMGPROXY_SENTRY_RELEASE")
|
|
||||||
intEnvConfig(&conf.AirbrakeProjecID, "IMGPROXY_AIRBRAKE_PROJECT_ID")
|
|
||||||
strEnvConfig(&conf.AirbrakeProjecKey, "IMGPROXY_AIRBRAKE_PROJECT_KEY")
|
|
||||||
strEnvConfig(&conf.AirbrakeEnv, "IMGPROXY_AIRBRAKE_ENVIRONMENT")
|
|
||||||
boolEnvConfig(&conf.ReportDownloadingErrors, "IMGPROXY_REPORT_DOWNLOADING_ERRORS")
|
|
||||||
boolEnvConfig(&conf.EnableDebugHeaders, "IMGPROXY_ENABLE_DEBUG_HEADERS")
|
|
||||||
|
|
||||||
intEnvConfig(&conf.FreeMemoryInterval, "IMGPROXY_FREE_MEMORY_INTERVAL")
|
|
||||||
intEnvConfig(&conf.DownloadBufferSize, "IMGPROXY_DOWNLOAD_BUFFER_SIZE")
|
|
||||||
intEnvConfig(&conf.GZipBufferSize, "IMGPROXY_GZIP_BUFFER_SIZE")
|
|
||||||
intEnvConfig(&conf.BufferPoolCalibrationThreshold, "IMGPROXY_BUFFER_POOL_CALIBRATION_THRESHOLD")
|
|
||||||
|
|
||||||
if len(conf.Keys) != len(conf.Salts) {
|
|
||||||
return fmt.Errorf("Number of keys and number of salts should be equal. Keys: %d, salts: %d", len(conf.Keys), len(conf.Salts))
|
|
||||||
}
|
|
||||||
if len(conf.Keys) == 0 {
|
|
||||||
logWarning("No keys defined, so signature checking is disabled")
|
|
||||||
conf.AllowInsecure = true
|
|
||||||
}
|
|
||||||
if len(conf.Salts) == 0 {
|
|
||||||
logWarning("No salts defined, so signature checking is disabled")
|
|
||||||
conf.AllowInsecure = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.SignatureSize < 1 || conf.SignatureSize > 32 {
|
|
||||||
return fmt.Errorf("Signature size should be within 1 and 32, now - %d\n", conf.SignatureSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.Bind) == 0 {
|
|
||||||
return fmt.Errorf("Bind address is not defined")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.ReadTimeout <= 0 {
|
|
||||||
return fmt.Errorf("Read timeout should be greater than 0, now - %d\n", conf.ReadTimeout)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.WriteTimeout <= 0 {
|
|
||||||
return fmt.Errorf("Write timeout should be greater than 0, now - %d\n", conf.WriteTimeout)
|
|
||||||
}
|
|
||||||
if conf.KeepAliveTimeout < 0 {
|
|
||||||
return fmt.Errorf("KeepAlive timeout should be greater than or equal to 0, now - %d\n", conf.KeepAliveTimeout)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.DownloadTimeout <= 0 {
|
|
||||||
return fmt.Errorf("Download timeout should be greater than 0, now - %d\n", conf.DownloadTimeout)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.Concurrency <= 0 {
|
|
||||||
return fmt.Errorf("Concurrency should be greater than 0, now - %d\n", conf.Concurrency)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.MaxClients <= 0 {
|
|
||||||
conf.MaxClients = conf.Concurrency * 10
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.TTL <= 0 {
|
|
||||||
return fmt.Errorf("TTL should be greater than 0, now - %d\n", conf.TTL)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.MaxSrcDimension < 0 {
|
|
||||||
return fmt.Errorf("Max src dimension should be greater than or equal to 0, now - %d\n", conf.MaxSrcDimension)
|
|
||||||
} else if conf.MaxSrcDimension > 0 {
|
|
||||||
logWarning("IMGPROXY_MAX_SRC_DIMENSION is deprecated and can be removed in future versions. Use IMGPROXY_MAX_SRC_RESOLUTION")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.MaxSrcResolution <= 0 {
|
|
||||||
return fmt.Errorf("Max src resolution should be greater than 0, now - %d\n", conf.MaxSrcResolution)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.MaxSrcFileSize < 0 {
|
|
||||||
return fmt.Errorf("Max src file size should be greater than or equal to 0, now - %d\n", conf.MaxSrcFileSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.MaxAnimationFrames <= 0 {
|
|
||||||
return fmt.Errorf("Max animation frames should be greater than 0, now - %d\n", conf.MaxAnimationFrames)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.PngQuantizationColors < 2 {
|
|
||||||
return fmt.Errorf("Png quantization colors should be greater than 1, now - %d\n", conf.PngQuantizationColors)
|
|
||||||
} else if conf.PngQuantizationColors > 256 {
|
|
||||||
return fmt.Errorf("Png quantization colors can't be greater than 256, now - %d\n", conf.PngQuantizationColors)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.Quality <= 0 {
|
|
||||||
return fmt.Errorf("Quality should be greater than 0, now - %d\n", conf.Quality)
|
|
||||||
} else if conf.Quality > 100 {
|
|
||||||
return fmt.Errorf("Quality can't be greater than 100, now - %d\n", conf.Quality)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.AvifSpeed <= 0 {
|
|
||||||
return fmt.Errorf("Avif speed should be greater than 0, now - %d\n", conf.AvifSpeed)
|
|
||||||
} else if conf.AvifSpeed > 8 {
|
|
||||||
return fmt.Errorf("Avif speed can't be greater than 8, now - %d\n", conf.AvifSpeed)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.GZipCompression < 0 {
|
|
||||||
return fmt.Errorf("GZip compression should be greater than or equal to 0, now - %d\n", conf.GZipCompression)
|
|
||||||
} else if conf.GZipCompression > 9 {
|
|
||||||
return fmt.Errorf("GZip compression can't be greater than 9, now - %d\n", conf.GZipCompression)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.GZipCompression > 0 {
|
|
||||||
logWarning("GZip compression is deprecated and can be removed in future versions")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.IgnoreSslVerification {
|
|
||||||
logWarning("Ignoring SSL verification is very unsafe")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.LocalFileSystemRoot != "" {
|
|
||||||
stat, err := os.Stat(conf.LocalFileSystemRoot)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Cannot use local directory: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !stat.IsDir() {
|
|
||||||
return fmt.Errorf("Cannot use local directory: not a directory")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.LocalFileSystemRoot == "/" {
|
|
||||||
logWarning("Exposing root via IMGPROXY_LOCAL_FILESYSTEM_ROOT is unsafe")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := os.LookupEnv("IMGPROXY_USE_GCS"); !ok && len(conf.GCSKey) > 0 {
|
|
||||||
logWarning("Set IMGPROXY_USE_GCS to true since it may be required by future versions to enable GCS support")
|
|
||||||
conf.GCSEnabled = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.WatermarkOpacity <= 0 {
|
|
||||||
return fmt.Errorf("Watermark opacity should be greater than 0")
|
|
||||||
} else if conf.WatermarkOpacity > 1 {
|
|
||||||
return fmt.Errorf("Watermark opacity should be less than or equal to 1")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.PrometheusBind) > 0 && conf.PrometheusBind == conf.Bind {
|
|
||||||
return fmt.Errorf("Can't use the same binding for the main server and Prometheus")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.FreeMemoryInterval <= 0 {
|
|
||||||
return fmt.Errorf("Free memory interval should be greater than zero")
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.DownloadBufferSize < 0 {
|
|
||||||
return fmt.Errorf("Download buffer size should be greater than or equal to 0")
|
|
||||||
} else if conf.DownloadBufferSize > math.MaxInt32 {
|
|
||||||
return fmt.Errorf("Download buffer size can't be greater than %d", math.MaxInt32)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.GZipBufferSize < 0 {
|
|
||||||
return fmt.Errorf("GZip buffer size should be greater than or equal to 0")
|
|
||||||
} else if conf.GZipBufferSize > math.MaxInt32 {
|
|
||||||
return fmt.Errorf("GZip buffer size can't be greater than %d", math.MaxInt32)
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.BufferPoolCalibrationThreshold < 64 {
|
|
||||||
return fmt.Errorf("Buffer pool calibration threshold should be greater than or equal to 64")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
545
config/config.go
Normal file
545
config/config.go
Normal file
@ -0,0 +1,545 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config/configurators"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
Network string
|
||||||
|
Bind string
|
||||||
|
ReadTimeout int
|
||||||
|
WriteTimeout int
|
||||||
|
KeepAliveTimeout int
|
||||||
|
DownloadTimeout int
|
||||||
|
Concurrency int
|
||||||
|
MaxClients int
|
||||||
|
|
||||||
|
TTL int
|
||||||
|
CacheControlPassthrough bool
|
||||||
|
SetCanonicalHeader bool
|
||||||
|
|
||||||
|
SoReuseport bool
|
||||||
|
|
||||||
|
PathPrefix string
|
||||||
|
|
||||||
|
MaxSrcResolution int
|
||||||
|
MaxSrcFileSize int
|
||||||
|
MaxAnimationFrames int
|
||||||
|
MaxSvgCheckBytes int
|
||||||
|
|
||||||
|
JpegProgressive bool
|
||||||
|
PngInterlaced bool
|
||||||
|
PngQuantize bool
|
||||||
|
PngQuantizationColors int
|
||||||
|
AvifSpeed int
|
||||||
|
Quality int
|
||||||
|
FormatQuality map[imagetype.Type]int
|
||||||
|
StripMetadata bool
|
||||||
|
StripColorProfile bool
|
||||||
|
AutoRotate bool
|
||||||
|
|
||||||
|
EnableWebpDetection bool
|
||||||
|
EnforceWebp bool
|
||||||
|
EnableAvifDetection bool
|
||||||
|
EnforceAvif bool
|
||||||
|
EnableClientHints bool
|
||||||
|
|
||||||
|
SkipProcessingFormats []imagetype.Type
|
||||||
|
|
||||||
|
UseLinearColorspace bool
|
||||||
|
DisableShrinkOnLoad bool
|
||||||
|
|
||||||
|
Keys [][]byte
|
||||||
|
Salts [][]byte
|
||||||
|
SignatureSize int
|
||||||
|
|
||||||
|
Secret string
|
||||||
|
|
||||||
|
AllowOrigin string
|
||||||
|
|
||||||
|
UserAgent string
|
||||||
|
|
||||||
|
IgnoreSslVerification bool
|
||||||
|
DevelopmentErrorsMode bool
|
||||||
|
|
||||||
|
AllowedSources []*regexp.Regexp
|
||||||
|
|
||||||
|
CookiePassthrough bool
|
||||||
|
CookieBaseURL string
|
||||||
|
|
||||||
|
LocalFileSystemRoot string
|
||||||
|
S3Enabled bool
|
||||||
|
S3Region string
|
||||||
|
S3Endpoint string
|
||||||
|
GCSEnabled bool
|
||||||
|
GCSKey string
|
||||||
|
ABSEnabled bool
|
||||||
|
ABSName string
|
||||||
|
ABSKey string
|
||||||
|
ABSEndpoint string
|
||||||
|
|
||||||
|
ETagEnabled bool
|
||||||
|
|
||||||
|
BaseURL string
|
||||||
|
|
||||||
|
Presets []string
|
||||||
|
OnlyPresets bool
|
||||||
|
|
||||||
|
WatermarkData string
|
||||||
|
WatermarkPath string
|
||||||
|
WatermarkURL string
|
||||||
|
WatermarkOpacity float64
|
||||||
|
|
||||||
|
FallbackImageData string
|
||||||
|
FallbackImagePath string
|
||||||
|
FallbackImageURL string
|
||||||
|
FallbackImageHTTPCode int
|
||||||
|
|
||||||
|
DataDogEnable bool
|
||||||
|
|
||||||
|
NewRelicAppName string
|
||||||
|
NewRelicKey string
|
||||||
|
|
||||||
|
PrometheusBind string
|
||||||
|
PrometheusNamespace string
|
||||||
|
|
||||||
|
BugsnagKey string
|
||||||
|
BugsnagStage string
|
||||||
|
|
||||||
|
HoneybadgerKey string
|
||||||
|
HoneybadgerEnv string
|
||||||
|
|
||||||
|
SentryDSN string
|
||||||
|
SentryEnvironment string
|
||||||
|
SentryRelease string
|
||||||
|
|
||||||
|
AirbrakeProjecID int
|
||||||
|
AirbrakeProjecKey string
|
||||||
|
AirbrakeEnv string
|
||||||
|
|
||||||
|
ReportDownloadingErrors bool
|
||||||
|
|
||||||
|
EnableDebugHeaders bool
|
||||||
|
|
||||||
|
FreeMemoryInterval int
|
||||||
|
DownloadBufferSize int
|
||||||
|
BufferPoolCalibrationThreshold int
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
keyPath = flag.String("keypath", "", "path of the file with hex-encoded key")
|
||||||
|
saltPath = flag.String("saltpath", "", "path of the file with hex-encoded salt")
|
||||||
|
presetsPath = flag.String("presets", "", "path of the file with presets")
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
Reset()
|
||||||
|
}
|
||||||
|
|
||||||
|
func Reset() {
|
||||||
|
Network = "tcp"
|
||||||
|
Bind = ":8080"
|
||||||
|
ReadTimeout = 10
|
||||||
|
WriteTimeout = 10
|
||||||
|
KeepAliveTimeout = 10
|
||||||
|
DownloadTimeout = 5
|
||||||
|
Concurrency = runtime.NumCPU() * 2
|
||||||
|
MaxClients = 0
|
||||||
|
|
||||||
|
TTL = 3600
|
||||||
|
CacheControlPassthrough = false
|
||||||
|
SetCanonicalHeader = false
|
||||||
|
|
||||||
|
SoReuseport = false
|
||||||
|
|
||||||
|
PathPrefix = ""
|
||||||
|
|
||||||
|
MaxSrcResolution = 16800000
|
||||||
|
MaxSrcFileSize = 0
|
||||||
|
MaxAnimationFrames = 1
|
||||||
|
MaxSvgCheckBytes = 32 * 1024
|
||||||
|
|
||||||
|
JpegProgressive = false
|
||||||
|
PngInterlaced = false
|
||||||
|
PngQuantize = false
|
||||||
|
PngQuantizationColors = 256
|
||||||
|
AvifSpeed = 5
|
||||||
|
Quality = 80
|
||||||
|
FormatQuality = map[imagetype.Type]int{imagetype.AVIF: 50}
|
||||||
|
StripMetadata = true
|
||||||
|
StripColorProfile = true
|
||||||
|
AutoRotate = true
|
||||||
|
|
||||||
|
EnableWebpDetection = false
|
||||||
|
EnforceWebp = false
|
||||||
|
EnableAvifDetection = false
|
||||||
|
EnforceAvif = false
|
||||||
|
EnableClientHints = false
|
||||||
|
|
||||||
|
SkipProcessingFormats = make([]imagetype.Type, 0)
|
||||||
|
|
||||||
|
UseLinearColorspace = false
|
||||||
|
DisableShrinkOnLoad = false
|
||||||
|
|
||||||
|
Keys = make([][]byte, 0)
|
||||||
|
Salts = make([][]byte, 0)
|
||||||
|
SignatureSize = 32
|
||||||
|
|
||||||
|
Secret = ""
|
||||||
|
|
||||||
|
AllowOrigin = ""
|
||||||
|
|
||||||
|
UserAgent = fmt.Sprintf("imgproxy/%s", version.Version())
|
||||||
|
|
||||||
|
IgnoreSslVerification = false
|
||||||
|
DevelopmentErrorsMode = false
|
||||||
|
|
||||||
|
AllowedSources = make([]*regexp.Regexp, 0)
|
||||||
|
|
||||||
|
CookiePassthrough = false
|
||||||
|
CookieBaseURL = ""
|
||||||
|
|
||||||
|
LocalFileSystemRoot = ""
|
||||||
|
S3Enabled = false
|
||||||
|
S3Region = ""
|
||||||
|
S3Endpoint = ""
|
||||||
|
GCSEnabled = false
|
||||||
|
GCSKey = ""
|
||||||
|
ABSEnabled = false
|
||||||
|
ABSName = ""
|
||||||
|
ABSKey = ""
|
||||||
|
ABSEndpoint = ""
|
||||||
|
|
||||||
|
ETagEnabled = false
|
||||||
|
|
||||||
|
BaseURL = ""
|
||||||
|
|
||||||
|
Presets = make([]string, 0)
|
||||||
|
OnlyPresets = false
|
||||||
|
|
||||||
|
WatermarkData = ""
|
||||||
|
WatermarkPath = ""
|
||||||
|
WatermarkURL = ""
|
||||||
|
WatermarkOpacity = 1
|
||||||
|
|
||||||
|
FallbackImageData = ""
|
||||||
|
FallbackImagePath = ""
|
||||||
|
FallbackImageURL = ""
|
||||||
|
FallbackImageHTTPCode = 200
|
||||||
|
|
||||||
|
DataDogEnable = false
|
||||||
|
|
||||||
|
NewRelicAppName = ""
|
||||||
|
NewRelicKey = ""
|
||||||
|
|
||||||
|
PrometheusBind = ""
|
||||||
|
PrometheusNamespace = ""
|
||||||
|
|
||||||
|
BugsnagKey = ""
|
||||||
|
BugsnagStage = "production"
|
||||||
|
|
||||||
|
HoneybadgerKey = ""
|
||||||
|
HoneybadgerEnv = "production"
|
||||||
|
|
||||||
|
SentryDSN = ""
|
||||||
|
SentryEnvironment = "production"
|
||||||
|
SentryRelease = fmt.Sprintf("imgproxy/%s", version.Version())
|
||||||
|
|
||||||
|
AirbrakeProjecID = 0
|
||||||
|
AirbrakeProjecKey = ""
|
||||||
|
AirbrakeEnv = "production"
|
||||||
|
|
||||||
|
ReportDownloadingErrors = true
|
||||||
|
|
||||||
|
EnableDebugHeaders = false
|
||||||
|
|
||||||
|
FreeMemoryInterval = 10
|
||||||
|
DownloadBufferSize = 0
|
||||||
|
BufferPoolCalibrationThreshold = 1024
|
||||||
|
}
|
||||||
|
|
||||||
|
func Configure() error {
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if port := os.Getenv("PORT"); len(port) > 0 {
|
||||||
|
Bind = fmt.Sprintf(":%s", port)
|
||||||
|
}
|
||||||
|
|
||||||
|
configurators.String(&Network, "IMGPROXY_NETWORK")
|
||||||
|
configurators.String(&Bind, "IMGPROXY_BIND")
|
||||||
|
configurators.Int(&ReadTimeout, "IMGPROXY_READ_TIMEOUT")
|
||||||
|
configurators.Int(&WriteTimeout, "IMGPROXY_WRITE_TIMEOUT")
|
||||||
|
configurators.Int(&KeepAliveTimeout, "IMGPROXY_KEEP_ALIVE_TIMEOUT")
|
||||||
|
configurators.Int(&DownloadTimeout, "IMGPROXY_DOWNLOAD_TIMEOUT")
|
||||||
|
configurators.Int(&Concurrency, "IMGPROXY_CONCURRENCY")
|
||||||
|
configurators.Int(&MaxClients, "IMGPROXY_MAX_CLIENTS")
|
||||||
|
|
||||||
|
configurators.Int(&TTL, "IMGPROXY_TTL")
|
||||||
|
configurators.Bool(&CacheControlPassthrough, "IMGPROXY_CACHE_CONTROL_PASSTHROUGH")
|
||||||
|
configurators.Bool(&SetCanonicalHeader, "IMGPROXY_SET_CANONICAL_HEADER")
|
||||||
|
|
||||||
|
configurators.Bool(&SoReuseport, "IMGPROXY_SO_REUSEPORT")
|
||||||
|
|
||||||
|
configurators.String(&PathPrefix, "IMGPROXY_PATH_PREFIX")
|
||||||
|
|
||||||
|
configurators.MegaInt(&MaxSrcResolution, "IMGPROXY_MAX_SRC_RESOLUTION")
|
||||||
|
configurators.Int(&MaxSrcFileSize, "IMGPROXY_MAX_SRC_FILE_SIZE")
|
||||||
|
configurators.Int(&MaxSvgCheckBytes, "IMGPROXY_MAX_SVG_CHECK_BYTES")
|
||||||
|
|
||||||
|
configurators.Int(&MaxAnimationFrames, "IMGPROXY_MAX_ANIMATION_FRAMES")
|
||||||
|
|
||||||
|
configurators.Patterns(&AllowedSources, "IMGPROXY_ALLOWED_SOURCES")
|
||||||
|
|
||||||
|
configurators.Bool(&JpegProgressive, "IMGPROXY_JPEG_PROGRESSIVE")
|
||||||
|
configurators.Bool(&PngInterlaced, "IMGPROXY_PNG_INTERLACED")
|
||||||
|
configurators.Bool(&PngQuantize, "IMGPROXY_PNG_QUANTIZE")
|
||||||
|
configurators.Int(&PngQuantizationColors, "IMGPROXY_PNG_QUANTIZATION_COLORS")
|
||||||
|
configurators.Int(&AvifSpeed, "IMGPROXY_AVIF_SPEED")
|
||||||
|
configurators.Int(&Quality, "IMGPROXY_QUALITY")
|
||||||
|
if err := configurators.ImageTypesQuality(FormatQuality, "IMGPROXY_FORMAT_QUALITY"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
configurators.Bool(&StripMetadata, "IMGPROXY_STRIP_METADATA")
|
||||||
|
configurators.Bool(&StripColorProfile, "IMGPROXY_STRIP_COLOR_PROFILE")
|
||||||
|
configurators.Bool(&AutoRotate, "IMGPROXY_AUTO_ROTATE")
|
||||||
|
|
||||||
|
configurators.Bool(&EnableWebpDetection, "IMGPROXY_ENABLE_WEBP_DETECTION")
|
||||||
|
configurators.Bool(&EnforceWebp, "IMGPROXY_ENFORCE_WEBP")
|
||||||
|
configurators.Bool(&EnableAvifDetection, "IMGPROXY_ENABLE_AVIF_DETECTION")
|
||||||
|
configurators.Bool(&EnforceAvif, "IMGPROXY_ENFORCE_AVIF")
|
||||||
|
configurators.Bool(&EnableClientHints, "IMGPROXY_ENABLE_CLIENT_HINTS")
|
||||||
|
|
||||||
|
if err := configurators.ImageTypes(&SkipProcessingFormats, "IMGPROXY_SKIP_PROCESSING_FORMATS"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
configurators.Bool(&UseLinearColorspace, "IMGPROXY_USE_LINEAR_COLORSPACE")
|
||||||
|
configurators.Bool(&DisableShrinkOnLoad, "IMGPROXY_DISABLE_SHRINK_ON_LOAD")
|
||||||
|
|
||||||
|
if err := configurators.Hex(&Keys, "IMGPROXY_KEY"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := configurators.Hex(&Salts, "IMGPROXY_SALT"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
configurators.Int(&SignatureSize, "IMGPROXY_SIGNATURE_SIZE")
|
||||||
|
|
||||||
|
if err := configurators.HexFile(&Keys, *keyPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := configurators.HexFile(&Salts, *saltPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
configurators.String(&Secret, "IMGPROXY_SECRET")
|
||||||
|
|
||||||
|
configurators.String(&AllowOrigin, "IMGPROXY_ALLOW_ORIGIN")
|
||||||
|
|
||||||
|
configurators.String(&UserAgent, "IMGPROXY_USER_AGENT")
|
||||||
|
|
||||||
|
configurators.Bool(&IgnoreSslVerification, "IMGPROXY_IGNORE_SSL_VERIFICATION")
|
||||||
|
configurators.Bool(&DevelopmentErrorsMode, "IMGPROXY_DEVELOPMENT_ERRORS_MODE")
|
||||||
|
|
||||||
|
configurators.Bool(&CookiePassthrough, "IMGPROXY_COOKIE_PASSTHROUGH")
|
||||||
|
configurators.String(&CookieBaseURL, "IMGPROXY_COOKIE_BASE_URL")
|
||||||
|
|
||||||
|
configurators.String(&LocalFileSystemRoot, "IMGPROXY_LOCAL_FILESYSTEM_ROOT")
|
||||||
|
|
||||||
|
configurators.Bool(&S3Enabled, "IMGPROXY_USE_S3")
|
||||||
|
configurators.String(&S3Region, "IMGPROXY_S3_REGION")
|
||||||
|
configurators.String(&S3Endpoint, "IMGPROXY_S3_ENDPOINT")
|
||||||
|
|
||||||
|
configurators.Bool(&GCSEnabled, "IMGPROXY_USE_GCS")
|
||||||
|
configurators.String(&GCSKey, "IMGPROXY_GCS_KEY")
|
||||||
|
|
||||||
|
configurators.Bool(&ABSEnabled, "IMGPROXY_USE_ABS")
|
||||||
|
configurators.String(&ABSName, "IMGPROXY_ABS_NAME")
|
||||||
|
configurators.String(&ABSKey, "IMGPROXY_ABS_KEY")
|
||||||
|
configurators.String(&ABSEndpoint, "IMGPROXY_ABS_ENDPOINT")
|
||||||
|
|
||||||
|
configurators.Bool(&ETagEnabled, "IMGPROXY_USE_ETAG")
|
||||||
|
|
||||||
|
configurators.String(&BaseURL, "IMGPROXY_BASE_URL")
|
||||||
|
|
||||||
|
configurators.StringSlice(&Presets, "IMGPROXY_PRESETS")
|
||||||
|
if err := configurators.StringSliceFile(&Presets, *presetsPath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
configurators.Bool(&OnlyPresets, "IMGPROXY_ONLY_PRESETS")
|
||||||
|
|
||||||
|
configurators.String(&WatermarkData, "IMGPROXY_WATERMARK_DATA")
|
||||||
|
configurators.String(&WatermarkPath, "IMGPROXY_WATERMARK_PATH")
|
||||||
|
configurators.String(&WatermarkURL, "IMGPROXY_WATERMARK_URL")
|
||||||
|
configurators.Float(&WatermarkOpacity, "IMGPROXY_WATERMARK_OPACITY")
|
||||||
|
|
||||||
|
configurators.String(&FallbackImageData, "IMGPROXY_FALLBACK_IMAGE_DATA")
|
||||||
|
configurators.String(&FallbackImagePath, "IMGPROXY_FALLBACK_IMAGE_PATH")
|
||||||
|
configurators.String(&FallbackImageURL, "IMGPROXY_FALLBACK_IMAGE_URL")
|
||||||
|
configurators.Int(&FallbackImageHTTPCode, "IMGPROXY_FALLBACK_IMAGE_HTTP_CODE")
|
||||||
|
|
||||||
|
configurators.Bool(&DataDogEnable, "IMGPROXY_DATADOG_ENABLE")
|
||||||
|
|
||||||
|
configurators.String(&NewRelicAppName, "IMGPROXY_NEW_RELIC_APP_NAME")
|
||||||
|
configurators.String(&NewRelicKey, "IMGPROXY_NEW_RELIC_KEY")
|
||||||
|
|
||||||
|
configurators.String(&PrometheusBind, "IMGPROXY_PROMETHEUS_BIND")
|
||||||
|
configurators.String(&PrometheusNamespace, "IMGPROXY_PROMETHEUS_NAMESPACE")
|
||||||
|
|
||||||
|
configurators.String(&BugsnagKey, "IMGPROXY_BUGSNAG_KEY")
|
||||||
|
configurators.String(&BugsnagStage, "IMGPROXY_BUGSNAG_STAGE")
|
||||||
|
configurators.String(&HoneybadgerKey, "IMGPROXY_HONEYBADGER_KEY")
|
||||||
|
configurators.String(&HoneybadgerEnv, "IMGPROXY_HONEYBADGER_ENV")
|
||||||
|
configurators.String(&SentryDSN, "IMGPROXY_SENTRY_DSN")
|
||||||
|
configurators.String(&SentryEnvironment, "IMGPROXY_SENTRY_ENVIRONMENT")
|
||||||
|
configurators.String(&SentryRelease, "IMGPROXY_SENTRY_RELEASE")
|
||||||
|
configurators.Int(&AirbrakeProjecID, "IMGPROXY_AIRBRAKE_PROJECT_ID")
|
||||||
|
configurators.String(&AirbrakeProjecKey, "IMGPROXY_AIRBRAKE_PROJECT_KEY")
|
||||||
|
configurators.String(&AirbrakeEnv, "IMGPROXY_AIRBRAKE_ENVIRONMENT")
|
||||||
|
configurators.Bool(&ReportDownloadingErrors, "IMGPROXY_REPORT_DOWNLOADING_ERRORS")
|
||||||
|
configurators.Bool(&EnableDebugHeaders, "IMGPROXY_ENABLE_DEBUG_HEADERS")
|
||||||
|
|
||||||
|
configurators.Int(&FreeMemoryInterval, "IMGPROXY_FREE_MEMORY_INTERVAL")
|
||||||
|
configurators.Int(&DownloadBufferSize, "IMGPROXY_DOWNLOAD_BUFFER_SIZE")
|
||||||
|
configurators.Int(&BufferPoolCalibrationThreshold, "IMGPROXY_BUFFER_POOL_CALIBRATION_THRESHOLD")
|
||||||
|
|
||||||
|
if len(Keys) != len(Salts) {
|
||||||
|
return fmt.Errorf("Number of keys and number of salts should be equal. Keys: %d, salts: %d", len(Keys), len(Salts))
|
||||||
|
}
|
||||||
|
if len(Keys) == 0 {
|
||||||
|
log.Warning("No keys defined, so signature checking is disabled")
|
||||||
|
}
|
||||||
|
if len(Salts) == 0 {
|
||||||
|
log.Warning("No salts defined, so signature checking is disabled")
|
||||||
|
}
|
||||||
|
|
||||||
|
if SignatureSize < 1 || SignatureSize > 32 {
|
||||||
|
return fmt.Errorf("Signature size should be within 1 and 32, now - %d\n", SignatureSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(Bind) == 0 {
|
||||||
|
return fmt.Errorf("Bind address is not defined")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ReadTimeout <= 0 {
|
||||||
|
return fmt.Errorf("Read timeout should be greater than 0, now - %d\n", ReadTimeout)
|
||||||
|
}
|
||||||
|
|
||||||
|
if WriteTimeout <= 0 {
|
||||||
|
return fmt.Errorf("Write timeout should be greater than 0, now - %d\n", WriteTimeout)
|
||||||
|
}
|
||||||
|
if KeepAliveTimeout < 0 {
|
||||||
|
return fmt.Errorf("KeepAlive timeout should be greater than or equal to 0, now - %d\n", KeepAliveTimeout)
|
||||||
|
}
|
||||||
|
|
||||||
|
if DownloadTimeout <= 0 {
|
||||||
|
return fmt.Errorf("Download timeout should be greater than 0, now - %d\n", DownloadTimeout)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Concurrency <= 0 {
|
||||||
|
return fmt.Errorf("Concurrency should be greater than 0, now - %d\n", Concurrency)
|
||||||
|
}
|
||||||
|
|
||||||
|
if MaxClients <= 0 {
|
||||||
|
MaxClients = Concurrency * 10
|
||||||
|
}
|
||||||
|
|
||||||
|
if TTL <= 0 {
|
||||||
|
return fmt.Errorf("TTL should be greater than 0, now - %d\n", TTL)
|
||||||
|
}
|
||||||
|
|
||||||
|
if MaxSrcResolution <= 0 {
|
||||||
|
return fmt.Errorf("Max src resolution should be greater than 0, now - %d\n", MaxSrcResolution)
|
||||||
|
}
|
||||||
|
|
||||||
|
if MaxSrcFileSize < 0 {
|
||||||
|
return fmt.Errorf("Max src file size should be greater than or equal to 0, now - %d\n", MaxSrcFileSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if MaxAnimationFrames <= 0 {
|
||||||
|
return fmt.Errorf("Max animation frames should be greater than 0, now - %d\n", MaxAnimationFrames)
|
||||||
|
}
|
||||||
|
|
||||||
|
if PngQuantizationColors < 2 {
|
||||||
|
return fmt.Errorf("Png quantization colors should be greater than 1, now - %d\n", PngQuantizationColors)
|
||||||
|
} else if PngQuantizationColors > 256 {
|
||||||
|
return fmt.Errorf("Png quantization colors can't be greater than 256, now - %d\n", PngQuantizationColors)
|
||||||
|
}
|
||||||
|
|
||||||
|
if AvifSpeed <= 0 {
|
||||||
|
return fmt.Errorf("Avif speed should be greater than 0, now - %d\n", AvifSpeed)
|
||||||
|
} else if AvifSpeed > 8 {
|
||||||
|
return fmt.Errorf("Avif speed can't be greater than 8, now - %d\n", AvifSpeed)
|
||||||
|
}
|
||||||
|
|
||||||
|
if Quality <= 0 {
|
||||||
|
return fmt.Errorf("Quality should be greater than 0, now - %d\n", Quality)
|
||||||
|
} else if Quality > 100 {
|
||||||
|
return fmt.Errorf("Quality can't be greater than 100, now - %d\n", Quality)
|
||||||
|
}
|
||||||
|
|
||||||
|
if IgnoreSslVerification {
|
||||||
|
log.Warning("Ignoring SSL verification is very unsafe")
|
||||||
|
}
|
||||||
|
|
||||||
|
if LocalFileSystemRoot != "" {
|
||||||
|
stat, err := os.Stat(LocalFileSystemRoot)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Cannot use local directory: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !stat.IsDir() {
|
||||||
|
return fmt.Errorf("Cannot use local directory: not a directory")
|
||||||
|
}
|
||||||
|
|
||||||
|
if LocalFileSystemRoot == "/" {
|
||||||
|
log.Warning("Exposing root via IMGPROXY_LOCAL_FILESYSTEM_ROOT is unsafe")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := os.LookupEnv("IMGPROXY_USE_GCS"); !ok && len(GCSKey) > 0 {
|
||||||
|
log.Warning("Set IMGPROXY_USE_GCS to true since it may be required by future versions to enable GCS support")
|
||||||
|
GCSEnabled = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if WatermarkOpacity <= 0 {
|
||||||
|
return fmt.Errorf("Watermark opacity should be greater than 0")
|
||||||
|
} else if WatermarkOpacity > 1 {
|
||||||
|
return fmt.Errorf("Watermark opacity should be less than or equal to 1")
|
||||||
|
}
|
||||||
|
|
||||||
|
if FallbackImageHTTPCode < 100 || FallbackImageHTTPCode > 599 {
|
||||||
|
return fmt.Errorf("Fallback image HTTP code should be between 100 and 599")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(PrometheusBind) > 0 && PrometheusBind == Bind {
|
||||||
|
return fmt.Errorf("Can't use the same binding for the main server and Prometheus")
|
||||||
|
}
|
||||||
|
|
||||||
|
if FreeMemoryInterval <= 0 {
|
||||||
|
return fmt.Errorf("Free memory interval should be greater than zero")
|
||||||
|
}
|
||||||
|
|
||||||
|
if DownloadBufferSize < 0 {
|
||||||
|
return fmt.Errorf("Download buffer size should be greater than or equal to 0")
|
||||||
|
} else if DownloadBufferSize > math.MaxInt32 {
|
||||||
|
return fmt.Errorf("Download buffer size can't be greater than %d", math.MaxInt32)
|
||||||
|
}
|
||||||
|
|
||||||
|
if BufferPoolCalibrationThreshold < 64 {
|
||||||
|
return fmt.Errorf("Buffer pool calibration threshold should be greater than or equal to 64")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
219
config/configurators/configurators.go
Normal file
219
config/configurators/configurators.go
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
package configurators
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Int(i *int, name string) {
|
||||||
|
if env, err := strconv.Atoi(os.Getenv(name)); err == nil {
|
||||||
|
*i = env
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Float(i *float64, name string) {
|
||||||
|
if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil {
|
||||||
|
*i = env
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func MegaInt(f *int, name string) {
|
||||||
|
if env, err := strconv.ParseFloat(os.Getenv(name), 64); err == nil {
|
||||||
|
*f = int(env * 1000000)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func String(s *string, name string) {
|
||||||
|
if env := os.Getenv(name); len(env) > 0 {
|
||||||
|
*s = env
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func StringSlice(s *[]string, name string) {
|
||||||
|
if env := os.Getenv(name); len(env) > 0 {
|
||||||
|
parts := strings.Split(env, ",")
|
||||||
|
|
||||||
|
for i, p := range parts {
|
||||||
|
parts[i] = strings.TrimSpace(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
*s = parts
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
*s = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func StringSliceFile(s *[]string, filepath string) error {
|
||||||
|
if len(filepath) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Open(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Can't open file %s\n", filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
for scanner.Scan() {
|
||||||
|
if str := scanner.Text(); len(str) != 0 && !strings.HasPrefix(str, "#") {
|
||||||
|
*s = append(*s, str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return fmt.Errorf("Failed to read presets file: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Bool(b *bool, name string) {
|
||||||
|
if env, err := strconv.ParseBool(os.Getenv(name)); err == nil {
|
||||||
|
*b = env
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ImageTypes(it *[]imagetype.Type, name string) error {
|
||||||
|
*it = []imagetype.Type{}
|
||||||
|
|
||||||
|
if env := os.Getenv(name); len(env) > 0 {
|
||||||
|
parts := strings.Split(env, ",")
|
||||||
|
|
||||||
|
for _, p := range parts {
|
||||||
|
pt := strings.TrimSpace(p)
|
||||||
|
if t, ok := imagetype.Types[pt]; ok {
|
||||||
|
*it = append(*it, t)
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("Unknown image format to skip: %s", pt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ImageTypesQuality(m map[imagetype.Type]int, name string) error {
|
||||||
|
if env := os.Getenv(name); len(env) > 0 {
|
||||||
|
parts := strings.Split(env, ",")
|
||||||
|
|
||||||
|
for _, p := range parts {
|
||||||
|
i := strings.Index(p, "=")
|
||||||
|
if i < 0 {
|
||||||
|
return fmt.Errorf("Invalid format quality string: %s", p)
|
||||||
|
}
|
||||||
|
|
||||||
|
imgtypeStr, qStr := strings.TrimSpace(p[:i]), strings.TrimSpace(p[i+1:])
|
||||||
|
|
||||||
|
imgtype, ok := imagetype.Types[imgtypeStr]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("Invalid format: %s", p)
|
||||||
|
}
|
||||||
|
|
||||||
|
q, err := strconv.Atoi(qStr)
|
||||||
|
if err != nil || q <= 0 || q > 100 {
|
||||||
|
return fmt.Errorf("Invalid quality: %s", p)
|
||||||
|
}
|
||||||
|
|
||||||
|
m[imgtype] = q
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Hex(b *[][]byte, name string) error {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if env := os.Getenv(name); len(env) > 0 {
|
||||||
|
parts := strings.Split(env, ",")
|
||||||
|
|
||||||
|
keys := make([][]byte, len(parts))
|
||||||
|
|
||||||
|
for i, part := range parts {
|
||||||
|
if keys[i], err = hex.DecodeString(part); err != nil {
|
||||||
|
return fmt.Errorf("%s expected to be hex-encoded strings. Invalid: %s\n", name, part)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*b = keys
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func HexFile(b *[][]byte, filepath string) error {
|
||||||
|
if len(filepath) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Open(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Can't open file %s\n", filepath)
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := [][]byte{}
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
for scanner.Scan() {
|
||||||
|
part := scanner.Text()
|
||||||
|
|
||||||
|
if len(part) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if key, err := hex.DecodeString(part); err == nil {
|
||||||
|
keys = append(keys, key)
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("%s expected to contain hex-encoded strings. Invalid: %s\n", filepath, part)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return fmt.Errorf("Failed to read file %s: %s", filepath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
*b = keys
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Patterns(s *[]*regexp.Regexp, name string) {
|
||||||
|
if env := os.Getenv(name); len(env) > 0 {
|
||||||
|
parts := strings.Split(env, ",")
|
||||||
|
result := make([]*regexp.Regexp, len(parts))
|
||||||
|
|
||||||
|
for i, p := range parts {
|
||||||
|
result[i] = RegexpFromPattern(strings.TrimSpace(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
*s = result
|
||||||
|
} else {
|
||||||
|
*s = []*regexp.Regexp{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RegexpFromPattern(pattern string) *regexp.Regexp {
|
||||||
|
var result strings.Builder
|
||||||
|
// Perform prefix matching
|
||||||
|
result.WriteString("^")
|
||||||
|
for i, part := range strings.Split(pattern, "*") {
|
||||||
|
// Add a regexp match all without slashes for each wildcard character
|
||||||
|
if i > 0 {
|
||||||
|
result.WriteString("[^/]*")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Quote other parts of the pattern
|
||||||
|
result.WriteString(regexp.QuoteMeta(part))
|
||||||
|
}
|
||||||
|
// It is safe to use regexp.MustCompile since the expression is always valid
|
||||||
|
return regexp.MustCompile(result.String())
|
||||||
|
}
|
61
cookies/cookies.go
Normal file
61
cookies/cookies.go
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
package cookies
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/cookiejar"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
|
"golang.org/x/net/publicsuffix"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
func JarFromRequest(r *http.Request) (*cookiejar.Jar, error) {
|
||||||
|
jar, err := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if r == nil {
|
||||||
|
return jar, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var cookieBase *url.URL
|
||||||
|
|
||||||
|
if len(config.CookieBaseURL) > 0 {
|
||||||
|
if cookieBase, err = url.Parse(config.CookieBaseURL); err != nil {
|
||||||
|
return nil, fmt.Errorf("can't parse cookie base URL: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if cookieBase == nil {
|
||||||
|
scheme := r.Header.Get("X-Forwarded-Proto")
|
||||||
|
if len(scheme) == 0 {
|
||||||
|
scheme = "http"
|
||||||
|
}
|
||||||
|
|
||||||
|
host := r.Header.Get("X-Forwarded-Host")
|
||||||
|
if len(host) == 0 {
|
||||||
|
host = r.Header.Get("Host")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(host) == 0 {
|
||||||
|
return jar, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
port := r.Header.Get("X-Forwarded-Port")
|
||||||
|
if len(port) > 0 {
|
||||||
|
host = host + ":" + port
|
||||||
|
}
|
||||||
|
|
||||||
|
cookieBase = &url.URL{
|
||||||
|
Scheme: scheme,
|
||||||
|
Host: host,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jar.SetCookies(cookieBase, r.Cookies())
|
||||||
|
|
||||||
|
return jar, nil
|
||||||
|
}
|
41
crypt.go
41
crypt.go
@ -1,41 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/hmac"
|
|
||||||
"crypto/sha256"
|
|
||||||
"encoding/base64"
|
|
||||||
"errors"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
errInvalidSignature = errors.New("Invalid signature")
|
|
||||||
errInvalidSignatureEncoding = errors.New("Invalid signature encoding")
|
|
||||||
)
|
|
||||||
|
|
||||||
type securityKey []byte
|
|
||||||
|
|
||||||
func validatePath(signature, path string) error {
|
|
||||||
messageMAC, err := base64.RawURLEncoding.DecodeString(signature)
|
|
||||||
if err != nil {
|
|
||||||
return errInvalidSignatureEncoding
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < len(conf.Keys); i++ {
|
|
||||||
if hmac.Equal(messageMAC, signatureFor(path, i)) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return errInvalidSignature
|
|
||||||
}
|
|
||||||
|
|
||||||
func signatureFor(str string, pairInd int) []byte {
|
|
||||||
mac := hmac.New(sha256.New, conf.Keys[pairInd])
|
|
||||||
mac.Write(conf.Salts[pairInd])
|
|
||||||
mac.Write([]byte(str))
|
|
||||||
expectedMAC := mac.Sum(nil)
|
|
||||||
if conf.SignatureSize < 32 {
|
|
||||||
return expectedMAC[:conf.SignatureSize]
|
|
||||||
}
|
|
||||||
return expectedMAC
|
|
||||||
}
|
|
@ -1,52 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/suite"
|
|
||||||
)
|
|
||||||
|
|
||||||
type CryptTestSuite struct{ MainTestSuite }
|
|
||||||
|
|
||||||
func (s *CryptTestSuite) SetupTest() {
|
|
||||||
s.MainTestSuite.SetupTest()
|
|
||||||
|
|
||||||
conf.Keys = []securityKey{securityKey("test-key")}
|
|
||||||
conf.Salts = []securityKey{securityKey("test-salt")}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *CryptTestSuite) TestValidatePath() {
|
|
||||||
err := validatePath("dtLwhdnPPiu_epMl1LrzheLpvHas-4mwvY6L3Z8WwlY", "asd")
|
|
||||||
assert.Nil(s.T(), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *CryptTestSuite) TestValidatePathTruncated() {
|
|
||||||
conf.SignatureSize = 8
|
|
||||||
|
|
||||||
err := validatePath("dtLwhdnPPis", "asd")
|
|
||||||
assert.Nil(s.T(), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *CryptTestSuite) TestValidatePathInvalid() {
|
|
||||||
err := validatePath("dtLwhdnPPis", "asd")
|
|
||||||
assert.Error(s.T(), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *CryptTestSuite) TestValidatePathMultiplePairs() {
|
|
||||||
conf.Keys = append(conf.Keys, securityKey("test-key2"))
|
|
||||||
conf.Salts = append(conf.Salts, securityKey("test-salt2"))
|
|
||||||
|
|
||||||
err := validatePath("dtLwhdnPPiu_epMl1LrzheLpvHas-4mwvY6L3Z8WwlY", "asd")
|
|
||||||
assert.Nil(s.T(), err)
|
|
||||||
|
|
||||||
err = validatePath("jbDffNPt1-XBgDccsaE-XJB9lx8JIJqdeYIZKgOqZpg", "asd")
|
|
||||||
assert.Nil(s.T(), err)
|
|
||||||
|
|
||||||
err = validatePath("dtLwhdnPPis", "asd")
|
|
||||||
assert.Error(s.T(), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCrypt(t *testing.T) {
|
|
||||||
suite.Run(t, new(CryptTestSuite))
|
|
||||||
}
|
|
@ -1,4 +1,4 @@
|
|||||||
ARG BASE_IMAGE_VERSION="v1.4.0"
|
ARG BASE_IMAGE_VERSION="v3.0.0"
|
||||||
|
|
||||||
FROM darthsim/imgproxy-base:${BASE_IMAGE_VERSION}
|
FROM darthsim/imgproxy-base:${BASE_IMAGE_VERSION}
|
||||||
LABEL maintainer="Sergey Alexandrovich <darthsim@gmail.com>"
|
LABEL maintainer="Sergey Alexandrovich <darthsim@gmail.com>"
|
||||||
@ -13,6 +13,7 @@ FROM debian:bullseye-slim
|
|||||||
LABEL maintainer="Sergey Alexandrovich <darthsim@gmail.com>"
|
LABEL maintainer="Sergey Alexandrovich <darthsim@gmail.com>"
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
|
&& apt-get upgrade -y \
|
||||||
&& apt-get install -y --no-install-recommends \
|
&& apt-get install -y --no-install-recommends \
|
||||||
bash \
|
bash \
|
||||||
ca-certificates \
|
ca-certificates \
|
||||||
|
@ -22,11 +22,12 @@ That's it! No further configuration is needed, but if you want to unleash the fu
|
|||||||
## Resize an image
|
## Resize an image
|
||||||
|
|
||||||
After you've successfully installed imgproxy, you might want to see if it is working correctly. To check that, you can use the following URL to get the resized image of Matt Damon from "The Martian" movie (replace `localhost:8080` with your domain if you installed imgproxy on a remote server):
|
After you've successfully installed imgproxy, you might want to see if it is working correctly. To check that, you can use the following URL to get the resized image of Matt Damon from "The Martian" movie (replace `localhost:8080` with your domain if you installed imgproxy on a remote server):
|
||||||
[http://localhost:8080/insecure/fill/300/400/sm/0/aHR0cHM6Ly9tLm1l/ZGlhLWFtYXpvbi5j/b20vaW1hZ2VzL00v/TVY1Qk1tUTNabVk0/TnpZdFkyVm1ZaTAw/WkRSbUxUZ3lPREF0/WldZelpqaGxOemsx/TnpVMlhrRXlYa0Zx/Y0dkZVFYVnlOVGMz/TWpVek5USUAuanBn.jpg](http://localhost:8080/insecure/fill/300/400/sm/0/aHR0cHM6Ly9tLm1l/ZGlhLWFtYXpvbi5j/b20vaW1hZ2VzL00v/TVY1Qk1tUTNabVk0/TnpZdFkyVm1ZaTAw/WkRSbUxUZ3lPREF0/WldZelpqaGxOemsx/TnpVMlhrRXlYa0Zx/Y0dkZVFYVnlOVGMz/TWpVek5USUAuanBn.jpg)
|
|
||||||
|
[http://localhost:8080/insecure/rs:fill:300:400/g:sm/aHR0cHM6Ly9tLm1l/ZGlhLWFtYXpvbi5j/b20vaW1hZ2VzL00v/TVY1Qk1tUTNabVk0/TnpZdFkyVm1ZaTAw/WkRSbUxUZ3lPREF0/WldZelpqaGxOemsx/TnpVMlhrRXlYa0Zx/Y0dkZVFYVnlOVGMz/TWpVek5USUAuanBn.jpg](http://localhost:8080/insecure/rs:fill:300:400/g:sm/aHR0cHM6Ly9tLm1l/ZGlhLWFtYXpvbi5j/b20vaW1hZ2VzL00v/TVY1Qk1tUTNabVk0/TnpZdFkyVm1ZaTAw/WkRSbUxUZ3lPREF0/WldZelpqaGxOemsx/TnpVMlhrRXlYa0Zx/Y0dkZVFYVnlOVGMz/TWpVek5USUAuanBn.jpg)
|
||||||
|
|
||||||
Here's [the original image](https://m.media-amazon.com/images/M/MV5BMmQ3ZmY4NzYtY2VmYi00ZDRmLTgyODAtZWYzZjhlNzk1NzU2XkEyXkFqcGdeQXVyNTc3MjUzNTI@.jpg), just for reference. Using the URL above, imgproxy is told to resize it to fill the area of `300x400` size with "smart" gravity. "Smart" means that the `libvips` library chooses the most "interesting" part of the image.
|
Here's [the original image](https://m.media-amazon.com/images/M/MV5BMmQ3ZmY4NzYtY2VmYi00ZDRmLTgyODAtZWYzZjhlNzk1NzU2XkEyXkFqcGdeQXVyNTc3MjUzNTI@.jpg), just for reference. Using the URL above, imgproxy is told to resize it to fill the area of `300x400` size with "smart" gravity. "Smart" means that the `libvips` library chooses the most "interesting" part of the image.
|
||||||
|
|
||||||
Learn more on how to generate imgproxy URLs in the [Generating the URL](generating_the_url_basic.md) guide.
|
Learn more on how to generate imgproxy URLs in the [Generating the URL](generating_the_url.md) guide.
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
|
@ -1,18 +1,21 @@
|
|||||||
* [Getting started](GETTING_STARTED)
|
* [Getting started](GETTING_STARTED)
|
||||||
* [Installation](installation)
|
* [Installation](installation)
|
||||||
* [Configuration](configuration)
|
* [Configuration](configuration)
|
||||||
* [Generating the URL (Basic)](generating_the_url_basic)
|
* [Generating the URL](generating_the_url)
|
||||||
* [Generating the URL (Advanced)](generating_the_url_advanced)
|
* [Getting the image info<i class='badge badge-pro'></i>](getting_the_image_info)
|
||||||
* [Getting the image info <img class='pro-badge' src='assets/pro.svg' alt='pro' />](getting_the_image_info)
|
|
||||||
* [Signing the URL](signing_the_url)
|
* [Signing the URL](signing_the_url)
|
||||||
* [Watermark](watermark)
|
* [Watermark](watermark)
|
||||||
* [Presets](presets)
|
* [Presets](presets)
|
||||||
|
* [Object detection<i class='badge badge-pro'></i><i class='badge badge-v3'></i>](object_detection)
|
||||||
|
* [Autoquality<i class='badge badge-pro'></i><i class='badge badge-v3'></i>](autoquality)
|
||||||
|
* [Chained pipelines<i class='badge badge-pro'></i><i class='badge badge-v3'></i>](chained_pipelines)
|
||||||
* [Serving local files](serving_local_files)
|
* [Serving local files](serving_local_files)
|
||||||
* [Serving files from Amazon S3](serving_files_from_s3)
|
* [Serving files from Amazon S3](serving_files_from_s3)
|
||||||
* [Serving files from Google Cloud Storage](serving_files_from_google_cloud_storage)
|
* [Serving files from Google Cloud Storage](serving_files_from_google_cloud_storage)
|
||||||
* [Serving files from Azure Blob Storage](serving_files_from_azure_blob_storage.md)
|
* [Serving files from Azure Blob Storage](serving_files_from_azure_blob_storage.md)
|
||||||
* [New Relic](new_relic)
|
* [New Relic](new_relic)
|
||||||
* [Prometheus](prometheus)
|
* [Prometheus](prometheus)
|
||||||
|
* [Datadog<i class='badge badge-v3'></i>](datadog)
|
||||||
* [Image formats support](image_formats_support)
|
* [Image formats support](image_formats_support)
|
||||||
* [About processing pipeline](about_processing_pipeline)
|
* [About processing pipeline](about_processing_pipeline)
|
||||||
* [Health check](healthcheck)
|
* [Health check](healthcheck)
|
||||||
|
@ -85,13 +85,34 @@ body::before {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.pro-badge {
|
i.badge::before {
|
||||||
height: 1em;
|
display: inline-block;
|
||||||
|
height: 1.5em;
|
||||||
|
line-height: 1.5em;
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
|
border: 1px solid;
|
||||||
|
font-size: .6em;
|
||||||
|
font-weight: 700;
|
||||||
|
font-style: normal;
|
||||||
|
border-radius: 0.75em;
|
||||||
|
padding: 0 .5em;
|
||||||
}
|
}
|
||||||
|
|
||||||
h1 .pro-badge, h2 .pro-badge, h3 .pro-badge, h4 .pro-badge {
|
i.badge-pro::before {
|
||||||
margin-left: .25ch;
|
content: "pro";
|
||||||
|
color: #177bd3;
|
||||||
|
border-color: #177bd3;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
i.badge-v3::before {
|
||||||
|
content: "v3";
|
||||||
|
color: #ff7401;
|
||||||
|
border-color: #ff7401;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 i.badge, h2 i.badge, h3 i.badge, h4 i.badge, .sidebar-nav i.badge {
|
||||||
|
margin-left: .5ch;
|
||||||
}
|
}
|
||||||
|
|
||||||
.github-edit-btn {
|
.github-edit-btn {
|
||||||
|
127
docs/autoquality.md
Normal file
127
docs/autoquality.md
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
# Autoquality<i class='badge badge-pro'></i><i class='badge badge-v3'></i>
|
||||||
|
|
||||||
|
imgproxy can calculate quality for your resulting images so they fit the selected metric the best. The supported methods are [none](#none), [size](#autoquality-by-file-size), [dssim](#autoquality-by-dssim), and [ml](#autoquality-with-ml).
|
||||||
|
|
||||||
|
**⚠️Warning:** Autoquality requires the image to be saved several times. Use it only when you prefer the resulting size and quality over the speed.
|
||||||
|
|
||||||
|
You can enable autoquality with [config](configuration.md#autoquality) (for all images) or with [processing options](generating_the_url.md#autoquality) (for each image individually).
|
||||||
|
|
||||||
|
## None
|
||||||
|
|
||||||
|
Disable the autoquality.
|
||||||
|
|
||||||
|
**Method name:** `none`
|
||||||
|
|
||||||
|
#### Config example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
IMGPROXY_AUTOQUALITY_METHOD="none"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Processing options example
|
||||||
|
|
||||||
|
```
|
||||||
|
.../autoquality:none/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Autoquality by file size
|
||||||
|
|
||||||
|
With this method, imgproxy will try to degrade the quality so your image fit the desired file size.
|
||||||
|
|
||||||
|
**Method name:** `size`
|
||||||
|
|
||||||
|
**Target:** desired file size
|
||||||
|
|
||||||
|
#### Config example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
IMGPROXY_AUTOQUALITY_METHOD="size"
|
||||||
|
# Change value to the desired size in bytes
|
||||||
|
IMGPROXY_AUTOQUALITY_TARGET=10240
|
||||||
|
IMGPROXY_AUTOQUALITY_MIN=10
|
||||||
|
IMGPROXY_AUTOQUALITY_MAX=80
|
||||||
|
# Quality 50 for AVIF is pretty the same as 80 for JPEG
|
||||||
|
IMGPROXY_AUTOQUALITY_FORMAT_MAX="avif=50"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Processing options example
|
||||||
|
|
||||||
|
```
|
||||||
|
.../autoquality:size:10240:10:80/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Autoquality by DSSIM
|
||||||
|
|
||||||
|
With this method imgproxy will try to select the quality so the saved image would have the desired [DSSIM](https://en.wikipedia.org/wiki/Structural_similarity#Structural_Dissimilarity) value.
|
||||||
|
|
||||||
|
**Method name:** `dssim`
|
||||||
|
|
||||||
|
**Target:** desired DSSIM value
|
||||||
|
|
||||||
|
#### Config example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
IMGPROXY_AUTOQUALITY_METHOD="dssim"
|
||||||
|
# Change value to the desired DSSIM
|
||||||
|
IMGPROXY_AUTOQUALITY_TARGET=0.02
|
||||||
|
# We're happy enough if the resulting DSSIM will differ from the desired by 0.001
|
||||||
|
IMGPROXY_AUTOQUALITY_ALLOWED_ERROR=0.001
|
||||||
|
IMGPROXY_AUTOQUALITY_MIN=70
|
||||||
|
IMGPROXY_AUTOQUALITY_MAX=80
|
||||||
|
# Quality 50 for AVIF is pretty the same as 80 for JPEG
|
||||||
|
IMGPROXY_AUTOQUALITY_FORMAT_MIN="avif=40"
|
||||||
|
IMGPROXY_AUTOQUALITY_FORMAT_MAX="avif=50"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Processing options example
|
||||||
|
|
||||||
|
```
|
||||||
|
.../autoquality:dssim:0.02:70:80:0.001/...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Autoquality with ML
|
||||||
|
|
||||||
|
This method is almost the same as [DSSIM](#autoquality-by-dssim) but imgproxy will try to predict the initial quality using neural networks. Requires neural networs to be configured (see the config examlpe or the config documentation). If a neural network for the resulting format is not provided, [DSSIM](#autoquality-by-dssim) method will be used instead.
|
||||||
|
|
||||||
|
**📝Note:** When this method is used, imgproxy will save JPEG images with the most optimal [advanced JPEG compression](configuration.md#advanced-jpeg-compression) settings ignoring config and processing options.
|
||||||
|
|
||||||
|
**Method name:** `ml`
|
||||||
|
|
||||||
|
**Target:** desired DSSIM value
|
||||||
|
|
||||||
|
#### Config example
|
||||||
|
|
||||||
|
```bash
|
||||||
|
IMGPROXY_AUTOQUALITY_METHOD="ml"
|
||||||
|
# Change value to the desired DSSIM
|
||||||
|
IMGPROXY_AUTOQUALITY_TARGET=0.02
|
||||||
|
# We're happy enough if the resulting DSSIM will differ from the desired by 0.001
|
||||||
|
IMGPROXY_AUTOQUALITY_ALLOWED_ERROR=0.001
|
||||||
|
IMGPROXY_AUTOQUALITY_MIN=70
|
||||||
|
IMGPROXY_AUTOQUALITY_MAX=80
|
||||||
|
# Quality 50 for AVIF is pretty the same as 80 for JPEG
|
||||||
|
IMGPROXY_AUTOQUALITY_FORMAT_MIN="avif=40"
|
||||||
|
IMGPROXY_AUTOQUALITY_FORMAT_MAX="avif=50"
|
||||||
|
# Neural networks paths for JPEG, WebP, and AVIF
|
||||||
|
IMGPROXY_AUTOQUALITY_JPEG_NET="/networks/autoquality-jpeg.pb"
|
||||||
|
IMGPROXY_AUTOQUALITY_WEBP_NET="/networks/autoquality-webp.pb"
|
||||||
|
IMGPROXY_AUTOQUALITY_AVIF_NET="/networks/autoquality-avif.pb"
|
||||||
|
```
|
||||||
|
|
||||||
|
**📝Note:** If you trust your autoquality neural network, you may want to set `IMGPROXY_AUTOQUALITY_ALLOWED_ERROR` to 1 (maximum possible DSSIM value). In this case, imgproxy will always use the quality predicted by the neural network.
|
||||||
|
|
||||||
|
#### Processing options example
|
||||||
|
|
||||||
|
```
|
||||||
|
.../autoquality:ml:0.02:70:80:0.001/...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Neural networks format
|
||||||
|
|
||||||
|
Neural networs should fit the following requirements:
|
||||||
|
* Tensorflow frozen graph format.
|
||||||
|
* Input layer size is 416x416.
|
||||||
|
* Output layer size is 1x100
|
||||||
|
* Output layer values are logits of quality probabilities.
|
||||||
|
|
||||||
|
If you're an imgproxy Pro user and you want to train your own network but you don't know how, feel free to contact the imgproxy team for intructions.
|
58
docs/chained_pipelines.md
Normal file
58
docs/chained_pipelines.md
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# Chained pipelines<i class='badge badge-pro'></i><i class='badge badge-v3'></i>
|
||||||
|
|
||||||
|
Though imgproxy's [processing pipeline](about_processing_pipeline.md) is suitable for most cases, sometimes it's handy to run multiple chained pipelines with different options.
|
||||||
|
|
||||||
|
imgproxy Pro allows you to start a new pipeline by inserting a section with a minus sign (`-`) to the URL path:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../width:500/crop:1000/-/trim:10/...
|
||||||
|
^ the new pipeline starts here
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 1: Multiple watermarks
|
||||||
|
|
||||||
|
If you need to place multiple watermarks on the same image, you can use chained pipelines for that:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../rs:fit:500:500/wm:0.5:nowe/wmu:aW1hZ2UxCg/-/wm:0.7:soea/wmu:aW1hZ2UyCg/...
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, the first pipeline resizes the image and places the first watermark, and the second pipeline places the second watermark.
|
||||||
|
|
||||||
|
### Example 2: Fast trim
|
||||||
|
|
||||||
|
The `trim` operation is pretty heavy as it involves loading the whole image to the memory at the very start of processing. However, if you're going to scale down your image and the trim accuracy is not very important to you, it's better to move trimming to a separate pipeline.
|
||||||
|
|
||||||
|
```
|
||||||
|
.../rs:fit:500:500/-/trim:10/...
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, the first pipeline resizes the image, and the second pipeline trims the result. Since the result of the first pipeline is already resized and loaded to the memory, trimming will be done much faster.
|
||||||
|
|
||||||
|
## Using with presets
|
||||||
|
|
||||||
|
You can use presets in your chained pipelines, and you can use chained pipelines in your presets. However, the behaior may be not obvious. The rules are the following:
|
||||||
|
|
||||||
|
* Prest is applied to the pipeline where is was used.
|
||||||
|
* Preset may contain chained pipelined, and ones will be chained to the pipeline where the preset was used.
|
||||||
|
* Chained pipelines from the preset and from the URL are merged.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
If we have the following preset
|
||||||
|
|
||||||
|
```
|
||||||
|
test=width:300/height:300/-/width:200/height:200/-/width:100/height:200
|
||||||
|
```
|
||||||
|
|
||||||
|
and the following URL
|
||||||
|
|
||||||
|
```
|
||||||
|
.../width:400/-/preset:test/width:500/-/width:600/...
|
||||||
|
```
|
||||||
|
|
||||||
|
The result will look like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../width:400/-/width:500/height:300/-/width:600/height:200/-/width:100/height:200/...
|
||||||
|
```
|
58
docs/chaining_the_processing.md
Normal file
58
docs/chaining_the_processing.md
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
# Chaining the processing<i class='badge badge-pro'></i>
|
||||||
|
|
||||||
|
Though imgproxy's [processing pipeline](about_processing_pipeline.md) is suitable for most cases, sometimes it's handy to run multiple chained pipelines with different options.
|
||||||
|
|
||||||
|
imgproxy Pro allows you to start a new pipeline by inserting a section with a minus sign (`-`) to the URL path:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../width:500/crop:1000/-/trim:10/...
|
||||||
|
^ the new pipeline starts here
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example 1: Multiple watermarks
|
||||||
|
|
||||||
|
If you need to place multiple watermarks on the same image, you can use chained pipelines for that:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../rs:fit:500:500/wm:0.5:nowe/wmu:aW1hZ2UxCg/-/wm:0.7:soea/wmu:aW1hZ2UyCg/...
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, the first pipeline resizes the image and places the first watermark, and the second pipeline places the second watermark.
|
||||||
|
|
||||||
|
### Example 2: Fast trim
|
||||||
|
|
||||||
|
The `trim` operation is pretty heavy as it involves loading the whole image to the memory at the very start of processing. However, if you're going to scale down your image and the trim accuracy is not very important to you, it's better to move trimming to a separate pipeline.
|
||||||
|
|
||||||
|
```
|
||||||
|
.../rs:fit:500:500/-/trim:10/...
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, the first pipeline resizes the image, and the second pipeline trims the result. Since the result of the first pipeline is already resized and loaded to the memory, trimming will be done much faster.
|
||||||
|
|
||||||
|
## Using with presets
|
||||||
|
|
||||||
|
You can use presets in your chained pipelines, and you can use chained pipelines in your presets. However, the behaior may be not obvious. The rules are the following:
|
||||||
|
|
||||||
|
* Prest is applied to the pipeline where is was used.
|
||||||
|
* Preset may contain chained pipelined, and ones will be chained to the pipeline where the preset was used.
|
||||||
|
* Chained pipelines from the preset and from the URL are merged.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
If we have the following preset
|
||||||
|
|
||||||
|
```
|
||||||
|
test=width:300/height:300/-/width:200/height:200/-/width:100/height:200
|
||||||
|
```
|
||||||
|
|
||||||
|
and the following URL
|
||||||
|
|
||||||
|
```
|
||||||
|
.../width:400/-/preset:test/width:500/-/width:600/...
|
||||||
|
```
|
||||||
|
|
||||||
|
The result will look like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../width:400/-/width:500/height:300/-/width:600/height:200/-/width:100/height:200/...
|
||||||
|
```
|
@ -41,10 +41,13 @@ echo $(xxd -g 2 -l 64 -p /dev/random | tr -d '\n')
|
|||||||
* `IMGPROXY_PATH_PREFIX`: URL path prefix. Example: when set to `/abc/def`, imgproxy URL will be `/abc/def/%signature/%processing_options/%source_url`. Default: blank.
|
* `IMGPROXY_PATH_PREFIX`: URL path prefix. Example: when set to `/abc/def`, imgproxy URL will be `/abc/def/%signature/%processing_options/%source_url`. Default: blank.
|
||||||
* `IMGPROXY_USER_AGENT`: User-Agent header that will be sent with source image request. Default: `imgproxy/%current_version`;
|
* `IMGPROXY_USER_AGENT`: User-Agent header that will be sent with source image request. Default: `imgproxy/%current_version`;
|
||||||
* `IMGPROXY_USE_ETAG`: when `true`, enables using [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) HTTP header for HTTP cache control. Default: false;
|
* `IMGPROXY_USE_ETAG`: when `true`, enables using [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) HTTP header for HTTP cache control. Default: false;
|
||||||
* `IMGPROXY_CUSTOM_REQUEST_HEADERS`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> list of custom headers that imgproxy will send while requesting the source image, divided by `\;` (can be redefined by `IMGPROXY_CUSTOM_HEADERS_SEPARATOR`). Example: `X-MyHeader1=Lorem\;X-MyHeader2=Ipsum`;
|
* `IMGPROXY_CUSTOM_REQUEST_HEADERS`: <i class='badge badge-pro'></i> list of custom headers that imgproxy will send while requesting the source image, divided by `\;` (can be redefined by `IMGPROXY_CUSTOM_HEADERS_SEPARATOR`). Example: `X-MyHeader1=Lorem\;X-MyHeader2=Ipsum`;
|
||||||
* `IMGPROXY_CUSTOM_RESPONSE_HEADERS`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> list of custom response headers, divided by `\;` (can be redefined by `IMGPROXY_CUSTOM_HEADERS_SEPARATOR`). Example: `X-MyHeader1=Lorem\;X-MyHeader2=Ipsum`;
|
* `IMGPROXY_CUSTOM_RESPONSE_HEADERS`: <i class='badge badge-pro'></i> list of custom response headers, divided by `\;` (can be redefined by `IMGPROXY_CUSTOM_HEADERS_SEPARATOR`). Example: `X-MyHeader1=Lorem\;X-MyHeader2=Ipsum`;
|
||||||
* `IMGPROXY_CUSTOM_HEADERS_SEPARATOR`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> string that will be used as a custom headers separator. Default: `\;`;
|
* `IMGPROXY_CUSTOM_HEADERS_SEPARATOR`: <i class='badge badge-pro'></i> string that will be used as a custom headers separator. Default: `\;`;
|
||||||
* `IMGPROXY_ENABLE_DEBUG_HEADERS`: when `true`, imgproxy will add `X-Origin-Content-Length` header with the value is size of the source image. Default: `false`.
|
* `IMGPROXY_ENABLE_DEBUG_HEADERS`: when `true`, imgproxy will add debug headers to the response. Default: `false`. The following headers will be added:
|
||||||
|
* `X-Origin-Content-Length`: size of the source image.
|
||||||
|
* `X-Origin-Width`: width of the source image.
|
||||||
|
* `X-Origin-Height`: height of the source image.
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
@ -85,20 +88,30 @@ Also you may want imgproxy to respond with the same error message that it writes
|
|||||||
|
|
||||||
* `IMGPROXY_DEVELOPMENT_ERRORS_MODE`: when true, imgproxy will respond with detailed error messages. Not recommended for production because some errors may contain stack trace.
|
* `IMGPROXY_DEVELOPMENT_ERRORS_MODE`: when true, imgproxy will respond with detailed error messages. Not recommended for production because some errors may contain stack trace.
|
||||||
|
|
||||||
|
## Cookies
|
||||||
|
|
||||||
|
imgproxy can pass through cookies in image requests. This can be activated with `IMGPROXY_COOKIE_PASSTHROUGH`. Unfortunately a `Cookie` header doesn't contain information for which URLs these cookies are applicable, so imgproxy can only assume (or must be told).
|
||||||
|
|
||||||
|
When cookie forwarding is activated, imgproxy by default assumes the scope of the cookies to be all URLs with the same hostname/port and request scheme as given by the headers `X-Forwarded-Host`, `X-Forwarded-Port`, `X-Forwarded-Scheme` or `Host`. To change that use `IMGPROXY_COOKIE_BASE_URL`.
|
||||||
|
|
||||||
|
* `IMGPROXY_COOKIE_PASSTHROUGH`: when `true`, incoming cookies will be passed through to the image request if they are applicable for the image URL. Default: false;
|
||||||
|
|
||||||
|
* `IMGPROXY_COOKIE_BASE_URL`: when set, assume that cookies have a scope of this URL for the incoming request (instead of using the request headers). If the cookies are applicable to the image URL too, they will be passed along in the image request.
|
||||||
|
|
||||||
|
|
||||||
## Compression
|
## Compression
|
||||||
|
|
||||||
* `IMGPROXY_QUALITY`: default quality of the resulting image, percentage. Default: `80`;
|
* `IMGPROXY_QUALITY`: default quality of the resulting image, percentage. Default: `80`;
|
||||||
* `IMGPROXY_FORMAT_QUALITY`: default quality of the resulting image per format, comma divided. Example: `jpeg=70,avif=40,webp=60`. When value for the resulting format is not set, `IMGPROXY_QUALITY` value is used. Default: `avif=50`.
|
* `IMGPROXY_FORMAT_QUALITY`: default quality of the resulting image per format, comma divided. Example: `jpeg=70,avif=40,webp=60`. When value for the resulting format is not set, `IMGPROXY_QUALITY` value is used. Default: `avif=50`.
|
||||||
* `IMGPROXY_GZIP_COMPRESSION`: GZip compression level. Default: `5`.
|
|
||||||
|
|
||||||
### Advanced JPEG compression
|
### Advanced JPEG compression
|
||||||
|
|
||||||
* `IMGPROXY_JPEG_PROGRESSIVE`: when true, enables progressive JPEG compression. Default: false;
|
* `IMGPROXY_JPEG_PROGRESSIVE`: when true, enables progressive JPEG compression. Default: false;
|
||||||
* `IMGPROXY_JPEG_NO_SUBSAMPLE`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> when true, chrominance subsampling is disabled. This will improve quality at the cost of larger file size. Default: false;
|
* `IMGPROXY_JPEG_NO_SUBSAMPLE`: <i class='badge badge-pro'></i> when true, chrominance subsampling is disabled. This will improve quality at the cost of larger file size. Default: false;
|
||||||
* `IMGPROXY_JPEG_TRELLIS_QUANT`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> when true, enables trellis quantisation for each 8x8 block. Reduces file size but increases compression time. Default: false;
|
* `IMGPROXY_JPEG_TRELLIS_QUANT`: <i class='badge badge-pro'></i> when true, enables trellis quantisation for each 8x8 block. Reduces file size but increases compression time. Default: false;
|
||||||
* `IMGPROXY_JPEG_OVERSHOOT_DERINGING`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> when true, enables overshooting of samples with extreme values. Overshooting may reduce ringing artifacts from compression, in particular in areas where black text appears on a white background. Default: false;
|
* `IMGPROXY_JPEG_OVERSHOOT_DERINGING`: <i class='badge badge-pro'></i> when true, enables overshooting of samples with extreme values. Overshooting may reduce ringing artifacts from compression, in particular in areas where black text appears on a white background. Default: false;
|
||||||
* `IMGPROXY_JPEG_OPTIMIZE_SCANS`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> when true, split the spectrum of DCT coefficients into separate scans. Reduces file size but increases compression time. Requires `IMGPROXY_JPEG_PROGRESSIVE` to be true. Default: false;
|
* `IMGPROXY_JPEG_OPTIMIZE_SCANS`: <i class='badge badge-pro'></i> when true, split the spectrum of DCT coefficients into separate scans. Reduces file size but increases compression time. Requires `IMGPROXY_JPEG_PROGRESSIVE` to be true. Default: false;
|
||||||
* `IMGPROXY_JPEG_QUANT_TABLE`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> quantization table to use. Supported values are:
|
* `IMGPROXY_JPEG_QUANT_TABLE`: <i class='badge badge-pro'></i> quantization table to use. Supported values are:
|
||||||
* `0`: Table from JPEG Annex K (default);
|
* `0`: Table from JPEG Annex K (default);
|
||||||
* `1`: Flat table;
|
* `1`: Flat table;
|
||||||
* `2`: Table tuned for MSSIM on Kodak image set;
|
* `2`: Table tuned for MSSIM on Kodak image set;
|
||||||
@ -119,13 +132,31 @@ Also you may want imgproxy to respond with the same error message that it writes
|
|||||||
|
|
||||||
### Advanced GIF compression
|
### Advanced GIF compression
|
||||||
|
|
||||||
* `IMGPROXY_GIF_OPTIMIZE_FRAMES`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> when true, enables GIF frames optimization. This may produce a smaller result, but may increase compression time.
|
* `IMGPROXY_GIF_OPTIMIZE_FRAMES`: <i class='badge badge-pro'></i> when true, enables GIF frames optimization. This may produce a smaller result, but may increase compression time.
|
||||||
* `IMGPROXY_GIF_OPTIMIZE_TRANSPARENCY`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> when true, enables GIF transparency optimization. This may produce a smaller result, but may increase compression time.
|
* `IMGPROXY_GIF_OPTIMIZE_TRANSPARENCY`: <i class='badge badge-pro'></i> when true, enables GIF transparency optimization. This may produce a smaller result, but may increase compression time.
|
||||||
|
|
||||||
### Advanced AVIF compression
|
### Advanced AVIF compression
|
||||||
|
|
||||||
* `IMGPROXY_AVIF_SPEED`: controls the CPU effort spent improving compression. 0 slowest - 8 fastest. Default: `5`;
|
* `IMGPROXY_AVIF_SPEED`: controls the CPU effort spent improving compression. 0 slowest - 8 fastest. Default: `5`;
|
||||||
|
|
||||||
|
### Autoquality
|
||||||
|
|
||||||
|
imgproxy can calculate the quality of the resulting image based on selected metric. Read more in the [Autoquality](autoquality.md) guide.
|
||||||
|
|
||||||
|
**⚠️Warning:** Autoquality requires the image to be saved several times. Use it only when you prefer the resulting size and quality over the speed.
|
||||||
|
|
||||||
|
* `IMGPROXY_AUTOQUALITY_METHOD`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> the method of quality calculation. Default: `none`.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_TARGET`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> desired value of the autoquality method metric. Default: 0.02.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_MIN`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> minimal quality imgproxy can use. Default: 70.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_FORMAT_MIN`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> minimal quality imgproxy can use per format, comma divided. Example: `jpeg=70,avif=40,webp=60`. When value for the resulting format is not set, `IMGPROXY_AUTOQUALITY_MIN` value is used. Default: `avif=40`.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_MAX`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> maximal quality imgproxy can use. Default: 80.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_FORMAT_MAX`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> maximal quality imgproxy can use per format, comma divided. Example: `jpeg=70,avif=40,webp=60`. When value for the resulting format is not set, `IMGPROXY_AUTOQUALITY_MAX` value is used. Default: `avif=50`.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_ALLOWED_ERROR`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> allowed `IMGPROXY_AUTOQUALITY_TARGET` error. Applicable only to `dssim` and `ml` methods. Default: 0.001.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_MAX_RESOLUTION`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> when value is greater then zero and the result resolution exceeds the value, autoquality won't be used. Default: 0.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_JPEG_NET`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> path to the neural network for JPEG.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_WEBP_NET`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> path to the neural network for WebP.
|
||||||
|
* `IMGPROXY_AUTOQUALITY_AVIF_NET`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> path to the neural network for AVIF.
|
||||||
|
|
||||||
## AVIF/WebP support detection
|
## AVIF/WebP support detection
|
||||||
|
|
||||||
imgproxy can use the `Accept` HTTP header to detect if the browser supports AVIF or WebP and use it as the default format. This feature is disabled by default and can be enabled by the following options:
|
imgproxy can use the `Accept` HTTP header to detect if the browser supports AVIF or WebP and use it as the default format. This feature is disabled by default and can be enabled by the following options:
|
||||||
@ -155,10 +186,10 @@ imgproxy can use the `Width`, `Viewport-Width` or `DPR` HTTP headers to determin
|
|||||||
|
|
||||||
imgproxy Pro can extract specific frames of videos to create thumbnails. The feature is disabled by default, but can be enabled with `IMGPROXY_ENABLE_VIDEO_THUMBNAILS`.
|
imgproxy Pro can extract specific frames of videos to create thumbnails. The feature is disabled by default, but can be enabled with `IMGPROXY_ENABLE_VIDEO_THUMBNAILS`.
|
||||||
|
|
||||||
* `IMGPROXY_ENABLE_VIDEO_THUMBNAILS`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> then true, enables video thumbnails generation. Default: false;
|
* `IMGPROXY_ENABLE_VIDEO_THUMBNAILS`: <i class='badge badge-pro'></i> then true, enables video thumbnails generation. Default: false;
|
||||||
* `IMGPROXY_VIDEO_THUMBNAIL_SECOND`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> the timestamp of the frame in seconds that will be used for a thumbnail. Default: 1.
|
* `IMGPROXY_VIDEO_THUMBNAIL_SECOND`: <i class='badge badge-pro'></i> the timestamp of the frame in seconds that will be used for a thumbnail. Default: 1.
|
||||||
* `IMGPROXY_VIDEO_THUMBNAIL_PROBE_SIZE`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> the maximum amount of bytes used to determine the format. Lower values can decrease memory usage but can produce inaccurate data or even lead to errors. Default: 5000000.
|
* `IMGPROXY_VIDEO_THUMBNAIL_PROBE_SIZE`: <i class='badge badge-pro'></i> the maximum amount of bytes used to determine the format. Lower values can decrease memory usage but can produce inaccurate data or even lead to errors. Default: 5000000.
|
||||||
* `IMGPROXY_VIDEO_THUMBNAIL_MAX_ANALYZE_DURATION`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> the maximum of milliseconds used to get the stream info. Low values can decrease memory usage but can produce inaccurate data or even lead to errors. When set to 0, the heuristic is used. Default: 0.
|
* `IMGPROXY_VIDEO_THUMBNAIL_MAX_ANALYZE_DURATION`: <i class='badge badge-pro'></i> the maximum of milliseconds used to get the stream info. Low values can decrease memory usage but can produce inaccurate data or even lead to errors. When set to 0, the heuristic is used. Default: 0.
|
||||||
|
|
||||||
**⚠️Warning:** Though using `IMGPROXY_VIDEO_THUMBNAIL_PROBE_SIZE` and `IMGPROXY_VIDEO_THUMBNAIL_MAX_ANALYZE_DURATION` can lower the memory footprint of video thumbnails generation, you should use them in production only when you know what are you doing.
|
**⚠️Warning:** Though using `IMGPROXY_VIDEO_THUMBNAIL_PROBE_SIZE` and `IMGPROXY_VIDEO_THUMBNAIL_MAX_ANALYZE_DURATION` can lower the memory footprint of video thumbnails generation, you should use them in production only when you know what are you doing.
|
||||||
|
|
||||||
@ -168,7 +199,7 @@ imgproxy Pro can extract specific frames of videos to create thumbnails. The fea
|
|||||||
* `IMGPROXY_WATERMARK_PATH`: path to the locally stored image;
|
* `IMGPROXY_WATERMARK_PATH`: path to the locally stored image;
|
||||||
* `IMGPROXY_WATERMARK_URL`: watermark image URL;
|
* `IMGPROXY_WATERMARK_URL`: watermark image URL;
|
||||||
* `IMGPROXY_WATERMARK_OPACITY`: watermark base opacity;
|
* `IMGPROXY_WATERMARK_OPACITY`: watermark base opacity;
|
||||||
* `IMGPROXY_WATERMARKS_CACHE_SIZE`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> size of custom watermarks cache. When set to `0`, watermarks cache is disabled. By default 256 watermarks are cached.
|
* `IMGPROXY_WATERMARKS_CACHE_SIZE`: <i class='badge badge-pro'></i> size of custom watermarks cache. When set to `0`, watermarks cache is disabled. By default 256 watermarks are cached.
|
||||||
|
|
||||||
Read more about watermarks in the [Watermark](watermark.md) guide.
|
Read more about watermarks in the [Watermark](watermark.md) guide.
|
||||||
|
|
||||||
@ -176,12 +207,23 @@ Read more about watermarks in the [Watermark](watermark.md) guide.
|
|||||||
|
|
||||||
imgproxy Pro can apply unsharpening mask to your images.
|
imgproxy Pro can apply unsharpening mask to your images.
|
||||||
|
|
||||||
* `IMGPROXY_UNSHARPENING_MODE`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> controls when unsharpenning mask should be applied. The following modes are supported:
|
* `IMGPROXY_UNSHARPENING_MODE`: <i class='badge badge-pro'></i> controls when unsharpenning mask should be applied. The following modes are supported:
|
||||||
* `auto`: _(default)_ apply unsharpening mask only when image is downscaled and `sharpen` option is not set.
|
* `auto`: _(default)_ apply unsharpening mask only when image is downscaled and `sharpen` option is not set.
|
||||||
* `none`: don't apply the unsharpening mask.
|
* `none`: don't apply the unsharpening mask.
|
||||||
* `always`: always apply the unsharpening mask.
|
* `always`: always apply the unsharpening mask.
|
||||||
* `IMGPROXY_UNSHARPENING_WEIGHT`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> a floating-point number that defines how neighbor pixels will affect the current pixel. Greater the value - sharper the image. Should be greater than zero. Default: `1`.
|
* `IMGPROXY_UNSHARPENING_WEIGHT`: <i class='badge badge-pro'></i> a floating-point number that defines how neighbor pixels will affect the current pixel. Greater the value - sharper the image. Should be greater than zero. Default: `1`.
|
||||||
* `IMGPROXY_UNSHARPENING_DIVIDOR`: <img class='pro-badge' src='assets/pro.svg' alt='pro' /> a floating-point number that defines the unsharpening strength. Lesser the value - sharper the image. Should be greater than zero. Default: `24`.
|
* `IMGPROXY_UNSHARPENING_DIVIDOR`: <i class='badge badge-pro'></i> a floating-point number that defines the unsharpening strength. Lesser the value - sharper the image. Should be greater than zero. Default: `24`.
|
||||||
|
|
||||||
|
## Object detection
|
||||||
|
|
||||||
|
imgproxy can detect objects on the image and use them for smart crop, bluring the detections, or drawing the detections.
|
||||||
|
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_CONFIG`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> path to the neural network config. Default: blank.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_WEIGHTS`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> path to the neural network weights. Default: blank.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_CLASSES`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> path to the text file with the classes names, one by line. Default: blank.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_NET_SIZE`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> the size of the neural network input. The width and the heights of the inputs should be the same, so this config value should be a single number. Default: 416.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_CONFIDENCE_THRESHOLD`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> the detections with confidences below this value will be discarded. Default: 0.2.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_NMS_THRESHOLD`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> non max supression threshold. Don't change this if you don't know what you're doing. Default: 0.4.
|
||||||
|
|
||||||
## Fallback image
|
## Fallback image
|
||||||
|
|
||||||
@ -190,6 +232,8 @@ You can set up a fallback image that will be used in case imgproxy can't fetch t
|
|||||||
* `IMGPROXY_FALLBACK_IMAGE_DATA`: Base64-encoded image data. You can easily calculate it with `base64 tmp/fallback.png | tr -d '\n'`;
|
* `IMGPROXY_FALLBACK_IMAGE_DATA`: Base64-encoded image data. You can easily calculate it with `base64 tmp/fallback.png | tr -d '\n'`;
|
||||||
* `IMGPROXY_FALLBACK_IMAGE_PATH`: path to the locally stored image;
|
* `IMGPROXY_FALLBACK_IMAGE_PATH`: path to the locally stored image;
|
||||||
* `IMGPROXY_FALLBACK_IMAGE_URL`: fallback image URL.
|
* `IMGPROXY_FALLBACK_IMAGE_URL`: fallback image URL.
|
||||||
|
* `IMGPROXY_FALLBACK_IMAGE_HTTP_CODE`: <i class='badge badge-v3'></i> HTTP code for the fallback image response. When set to zero, imgproxy will respond with the usual HTTP code. Default: `200`.
|
||||||
|
* `IMGPROXY_FALLBACK_IMAGES_CACHE_SIZE`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> size of custom fallback images cache. When set to `0`, fallback images cache is disabled. By default 256 fallback images are cached.
|
||||||
|
|
||||||
## Skip processing
|
## Skip processing
|
||||||
|
|
||||||
@ -199,7 +243,7 @@ You can configure imgproxy to skip processing of some formats:
|
|||||||
|
|
||||||
**📝Note:** Processing can be skipped only when the requested format is the same as the source format.
|
**📝Note:** Processing can be skipped only when the requested format is the same as the source format.
|
||||||
|
|
||||||
**📝Note:** Video thumbnails processing can't be skipped.
|
**📝Note:** Video thumbnail processing can't be skipped.
|
||||||
|
|
||||||
## Presets
|
## Presets
|
||||||
|
|
||||||
@ -289,6 +333,14 @@ imgproxy can collect its metrics for Prometheus. Specify binding for Prometheus
|
|||||||
|
|
||||||
Check out the [Prometheus](prometheus.md) guide to learn more.
|
Check out the [Prometheus](prometheus.md) guide to learn more.
|
||||||
|
|
||||||
|
## Datadog metrics
|
||||||
|
|
||||||
|
imgproxy can send its metrics to Datadog:
|
||||||
|
|
||||||
|
* `IMGPROXY_DATADOG_ENABLE`: <i class='badge badge-v3'></i> when `true`, enables sending metrics to Datadog. Default: false;
|
||||||
|
|
||||||
|
Check out the [Datadog](datadog.md) guide to learn more.
|
||||||
|
|
||||||
## Error reporting
|
## Error reporting
|
||||||
|
|
||||||
imgproxy can report occurred errors to Bugsnag, Honeybadger and Sentry:
|
imgproxy can report occurred errors to Bugsnag, Honeybadger and Sentry:
|
||||||
|
27
docs/datadog.md
Normal file
27
docs/datadog.md
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# Datadog<i class='badge badge-v3'></i>
|
||||||
|
|
||||||
|
imgproxy can send its metrics to Datadog. To use this feature, do the following:
|
||||||
|
|
||||||
|
1. Install & configure the Datadog Trace Agent (>= 5.21.1);
|
||||||
|
2. Set `IMGPROXY_DATADOG_ENABLE` environment variable to `true`;
|
||||||
|
3. Configure the Datadog tracer using `ENV` variables provided by [the package](https://github.com/DataDog/dd-trace-go):
|
||||||
|
|
||||||
|
* `DD_AGENT_HOST` – sets the address to connect to for sending metrics to the Datadog Agent. Default: `localhost`
|
||||||
|
* `DD_TRACE_AGENT_PORT` – sets the Datadog Agent Trace port. Default: `8126`
|
||||||
|
* `DD_DOGSTATSD_PORT` – set the DogStatsD port. Default: `8125`
|
||||||
|
* `DD_SERVICE` – sets desired application name. Default: `imgproxy`
|
||||||
|
* `DD_ENV` - sets the environment to which all traces will be submitted. Default: empty
|
||||||
|
* `DD_TRACE_SOURCE_HOSTNAME` - allows specifying the hostname with which to mark outgoing traces. Default: empty
|
||||||
|
* `DD_TRACE_REPORT_HOSTNAME` - when `true`, sets hostname to `os.Hostname()` with which to mark outgoing traces. Default: `false`
|
||||||
|
* `DD_TAGS` - sets a key/value pair which will be set as a tag on all traces. Example: `DD_TAGS=datacenter:njc,key2:value2`. Default: empty
|
||||||
|
* `DD_TRACE_ANALYTICS_ENABLED` - allows specifying whether Trace Search & Analytics should be enabled for integrations. Default: `false`
|
||||||
|
* `DD_RUNTIME_METRICS_ENABLED` - enables automatic collection of runtime metrics every 10 seconds. Default: `false`
|
||||||
|
* `DD_TRACE_STARTUP_LOGS` – causes various startup info to be written when the tracer starts. Default: `true`
|
||||||
|
* `DD_TRACE_DEBUG` – enables detailed logs. Default: `false`
|
||||||
|
|
||||||
|
imgproxy will send the following info to Datadog:
|
||||||
|
|
||||||
|
* Response time;
|
||||||
|
* Image downloading time;
|
||||||
|
* Image processing time;
|
||||||
|
* Errors that occurred while downloading and processing image.
|
@ -1,10 +1,6 @@
|
|||||||
# Generating the URL (Advanced)
|
# Generating the URL
|
||||||
|
|
||||||
This guide describes the advanced URL format that allows the use of all the imgproxy features. Read our [Generating the URL (Basic)](generating_the_url_basic.md) guide to learn about the _basic_ URL format that is compatible with imgproxy 1.x.
|
The URL should contain the signature, processing options, and source URL, like this:
|
||||||
|
|
||||||
## Format definition
|
|
||||||
|
|
||||||
The advanced URL should contain the signature, processing options, and source URL, like this:
|
|
||||||
|
|
||||||
```
|
```
|
||||||
/%signature/%processing_options/plain/%source_url@%extension
|
/%signature/%processing_options/plain/%source_url@%extension
|
||||||
@ -13,13 +9,13 @@ The advanced URL should contain the signature, processing options, and source UR
|
|||||||
|
|
||||||
Check out the [example](#example) at the end of this guide.
|
Check out the [example](#example) at the end of this guide.
|
||||||
|
|
||||||
### Signature
|
## Signature
|
||||||
|
|
||||||
Signature protects your URL from being altered by an attacker. It is highly recommended to sign imgproxy URLs in production.
|
Signature protects your URL from being altered by an attacker. It is highly recommended to sign imgproxy URLs in production.
|
||||||
|
|
||||||
Once you set up your [URL signature](configuration.md#url-signature), check out the [Signing the URL](signing_the_url.md) guide to know how to sign your URLs. Otherwise, use any string here.
|
Once you set up your [URL signature](configuration.md#url-signature), check out the [Signing the URL](signing_the_url.md) guide to know how to sign your URLs. Otherwise, use any string here.
|
||||||
|
|
||||||
### Processing options
|
## Processing options
|
||||||
|
|
||||||
Processing options should be specified as URL parts divided by slashes (`/`). Processing option has the following format:
|
Processing options should be specified as URL parts divided by slashes (`/`). Processing option has the following format:
|
||||||
|
|
||||||
@ -31,7 +27,7 @@ The list of processing options does not define imgproxy's processing pipeline. I
|
|||||||
|
|
||||||
imgproxy supports the following processing options:
|
imgproxy supports the following processing options:
|
||||||
|
|
||||||
#### Resize
|
### Resize
|
||||||
|
|
||||||
```
|
```
|
||||||
resize:%resizing_type:%width:%height:%enlarge:%extend
|
resize:%resizing_type:%width:%height:%enlarge:%extend
|
||||||
@ -40,7 +36,7 @@ rs:%resizing_type:%width:%height:%enlarge:%extend
|
|||||||
|
|
||||||
Meta-option that defines the [resizing type](#resizing-type), [width](#width), [height](#height), [enlarge](#enlarge), and [extend](#extend). All arguments are optional and can be omitted to use their default values.
|
Meta-option that defines the [resizing type](#resizing-type), [width](#width), [height](#height), [enlarge](#enlarge), and [extend](#extend). All arguments are optional and can be omitted to use their default values.
|
||||||
|
|
||||||
#### Size
|
### Size
|
||||||
|
|
||||||
```
|
```
|
||||||
size:%width:%height:%enlarge:%extend
|
size:%width:%height:%enlarge:%extend
|
||||||
@ -49,7 +45,7 @@ s:%width:%height:%enlarge:%extend
|
|||||||
|
|
||||||
Meta-option that defines the [width](#width), [height](#height), [enlarge](#enlarge), and [extend](#extend). All arguments are optional and can be omitted to use their default values.
|
Meta-option that defines the [width](#width), [height](#height), [enlarge](#enlarge), and [extend](#extend). All arguments are optional and can be omitted to use their default values.
|
||||||
|
|
||||||
#### Resizing type
|
### Resizing type
|
||||||
|
|
||||||
```
|
```
|
||||||
resizing_type:%resizing_type
|
resizing_type:%resizing_type
|
||||||
@ -60,11 +56,13 @@ Defines how imgproxy will resize the source image. Supported resizing types are:
|
|||||||
|
|
||||||
* `fit`: resizes the image while keeping aspect ratio to fit given size;
|
* `fit`: resizes the image while keeping aspect ratio to fit given size;
|
||||||
* `fill`: resizes the image while keeping aspect ratio to fill given size and cropping projecting parts;
|
* `fill`: resizes the image while keeping aspect ratio to fill given size and cropping projecting parts;
|
||||||
|
* `fill-down`: <i class='badge badge-v3'></i> same as `fill`, but if the resized image is smaller than the requested size, imgproxy will crop the result to keep the requested aspect ratio;
|
||||||
|
* `force`: resizes the image without keeping aspect ratio;
|
||||||
* `auto`: if both source and resulting dimensions have the same orientation (portrait or landscape), imgproxy will use `fill`. Otherwise, it will use `fit`.
|
* `auto`: if both source and resulting dimensions have the same orientation (portrait or landscape), imgproxy will use `fill`. Otherwise, it will use `fit`.
|
||||||
|
|
||||||
Default: `fit`
|
Default: `fit`
|
||||||
|
|
||||||
#### Resizing algorithm<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=resizing-algorithm
|
### Resizing algorithm<i class='badge badge-pro'></i> :id=resizing-algorithm
|
||||||
|
|
||||||
```
|
```
|
||||||
resizing_algorithm:%algorithm
|
resizing_algorithm:%algorithm
|
||||||
@ -75,29 +73,55 @@ Defines the algorithm that imgproxy will use for resizing. Supported algorithms
|
|||||||
|
|
||||||
Default: `lanczos3`
|
Default: `lanczos3`
|
||||||
|
|
||||||
#### Width
|
### Width
|
||||||
|
|
||||||
```
|
```
|
||||||
width:%width
|
width:%width
|
||||||
w:%width
|
w:%width
|
||||||
```
|
```
|
||||||
|
|
||||||
Defines the width of the resulting image. When set to `0`, imgproxy will calculate the resulting width using the defined height and source aspect ratio.
|
Defines the width of the resulting image. When set to `0`, imgproxy will calculate the resulting width using the defined height and source aspect ratio. When set to `0` and rezizing type is `force`, imgproxy will keep the original width.
|
||||||
|
|
||||||
Default: `0`
|
Default: `0`
|
||||||
|
|
||||||
#### Height
|
### Height
|
||||||
|
|
||||||
```
|
```
|
||||||
height:%height
|
height:%height
|
||||||
h:%height
|
h:%height
|
||||||
```
|
```
|
||||||
|
|
||||||
Defines the height of the resulting image. When set to `0`, imgproxy will calculate resulting height using the defined width and source aspect ratio.
|
Defines the height of the resulting image. When set to `0`, imgproxy will calculate resulting height using the defined width and source aspect ratio. When set to `0` and rezizing type is `force`, imgproxy will keep the original height.
|
||||||
|
|
||||||
Default: `0`
|
Default: `0`
|
||||||
|
|
||||||
#### Dpr
|
### Min width<i class='badge badge-v3'></i> :id=min-width
|
||||||
|
|
||||||
|
```
|
||||||
|
min-width:%width
|
||||||
|
mw:%width
|
||||||
|
```
|
||||||
|
|
||||||
|
Defines the minimum width of the resulting image.
|
||||||
|
|
||||||
|
**⚠️Warning:** When both `width` and `min-width` are set, the final image will be cropped according to `width`, so use this combination with care.
|
||||||
|
|
||||||
|
Default: `0`
|
||||||
|
|
||||||
|
### Min height<i class='badge badge-v3'></i> :id=min-height
|
||||||
|
|
||||||
|
```
|
||||||
|
min-height:%height
|
||||||
|
mh:%height
|
||||||
|
```
|
||||||
|
|
||||||
|
Defines the minimum height of the resulting image.
|
||||||
|
|
||||||
|
**⚠️Warning:** When both `height` and `min-height` are set, the final image will be cropped according to `height`, so use this combination with care.
|
||||||
|
|
||||||
|
Default: `0`
|
||||||
|
|
||||||
|
### Dpr
|
||||||
|
|
||||||
```
|
```
|
||||||
dpr:%dpr
|
dpr:%dpr
|
||||||
@ -107,7 +131,7 @@ When set, imgproxy will multiply the image dimensions according to this factor f
|
|||||||
|
|
||||||
Default: `1`
|
Default: `1`
|
||||||
|
|
||||||
#### Enlarge
|
### Enlarge
|
||||||
|
|
||||||
```
|
```
|
||||||
enlarge:%enlarge
|
enlarge:%enlarge
|
||||||
@ -118,7 +142,7 @@ When set to `1`, `t` or `true`, imgproxy will enlarge the image if it is smaller
|
|||||||
|
|
||||||
Default: false
|
Default: false
|
||||||
|
|
||||||
#### Extend
|
### Extend
|
||||||
|
|
||||||
```
|
```
|
||||||
extend:%extend:%gravity
|
extend:%extend:%gravity
|
||||||
@ -130,7 +154,7 @@ ex:%extend:%gravity
|
|||||||
|
|
||||||
Default: `false:ce:0:0`
|
Default: `false:ce:0:0`
|
||||||
|
|
||||||
#### Gravity
|
### Gravity
|
||||||
|
|
||||||
```
|
```
|
||||||
gravity:%type:%x_offset:%y_offset
|
gravity:%type:%x_offset:%y_offset
|
||||||
@ -155,10 +179,11 @@ Default: `ce:0:0`
|
|||||||
|
|
||||||
**Special gravities**:
|
**Special gravities**:
|
||||||
|
|
||||||
* `gravity:sm` - smart gravity. `libvips` detects the most "interesting" section of the image and considers it as the center of the resulting image. Offsets are not applicable here;
|
* `gravity:sm`: smart gravity. `libvips` detects the most "interesting" section of the image and considers it as the center of the resulting image. Offsets are not applicable here;
|
||||||
* `gravity:fp:%x:%y` - focus point gravity. `x` and `y` are floating point numbers between 0 and 1 that define the coordinates of the center of the resulting image. Treat 0 and 1 as right/left for `x` and top/bottom for `y`.
|
* `gravity:obj:%class_name1:%class_name2:...:%class_nameN`: <i class='badge badge-pro'></i> <i class='badge badge-v3'></i> object-oriented gravity. imgproxy [detects objects](object_detection.md) of provided classes on the image and calculates the resulting image center using their positions. If class names are omited, imgproxy will use all the detected objects.
|
||||||
|
* `gravity:fp:%x:%y`: focus point gravity. `x` and `y` are floating point numbers between 0 and 1 that define the coordinates of the center of the resulting image. Treat 0 and 1 as right/left for `x` and top/bottom for `y`.
|
||||||
|
|
||||||
#### Crop
|
### Crop
|
||||||
|
|
||||||
```
|
```
|
||||||
crop:%width:%height:%gravity
|
crop:%width:%height:%gravity
|
||||||
@ -173,25 +198,7 @@ Defines an area of the image to be processed (crop before resize).
|
|||||||
* When `width` or `height` is set to `0`, imgproxy will use the full width/height of the source image.
|
* When `width` or `height` is set to `0`, imgproxy will use the full width/height of the source image.
|
||||||
* `gravity` _(optional)_ accepts the same values as [gravity](#gravity) option. When `gravity` is not set, imgproxy will use the value of the [gravity](#gravity) option.
|
* `gravity` _(optional)_ accepts the same values as [gravity](#gravity) option. When `gravity` is not set, imgproxy will use the value of the [gravity](#gravity) option.
|
||||||
|
|
||||||
#### Padding
|
### Trim
|
||||||
|
|
||||||
```
|
|
||||||
padding:%top:%right:%bottom:%left
|
|
||||||
pd:%top:%right:%bottom:%left
|
|
||||||
```
|
|
||||||
|
|
||||||
Defines padding size in css manner. All arguments are optional but at least one dimension must be set. Padded space is filled according to [background](#background) option.
|
|
||||||
|
|
||||||
* `top` - top padding (and all other sides if they won't be set explicitly);
|
|
||||||
* `right` - right padding (and left if it won't be set explicitly);
|
|
||||||
* `bottom` - bottom padding;
|
|
||||||
* `left` - left padding.
|
|
||||||
|
|
||||||
**📝Note:** Padding is applied after all image transformations (except watermark) and enlarges generated image which means that if your resize dimensions were 100x200px and you applied `padding:10` option then you will get 120x220px image.
|
|
||||||
|
|
||||||
**📝Note:** Padding follows [dpr](#dpr) option so it will be scaled too if you set it.
|
|
||||||
|
|
||||||
#### Trim
|
|
||||||
|
|
||||||
```
|
```
|
||||||
trim:%threshold:%color:%equal_hor:%equal_ver
|
trim:%threshold:%color:%equal_hor:%equal_ver
|
||||||
@ -213,7 +220,34 @@ Removes surrounding background.
|
|||||||
|
|
||||||
**📝Note:** Trimming of animated images is not supported.
|
**📝Note:** Trimming of animated images is not supported.
|
||||||
|
|
||||||
#### Rotate
|
### Padding
|
||||||
|
|
||||||
|
```
|
||||||
|
padding:%top:%right:%bottom:%left
|
||||||
|
pd:%top:%right:%bottom:%left
|
||||||
|
```
|
||||||
|
|
||||||
|
Defines padding size in css manner. All arguments are optional but at least one dimension must be set. Padded space is filled according to [background](#background) option.
|
||||||
|
|
||||||
|
* `top` - top padding (and all other sides if they won't be set explicitly);
|
||||||
|
* `right` - right padding (and left if it won't be set explicitly);
|
||||||
|
* `bottom` - bottom padding;
|
||||||
|
* `left` - left padding.
|
||||||
|
|
||||||
|
**📝Note:** Padding is applied after all image transformations (except watermark) and enlarges generated image which means that if your resize dimensions were 100x200px and you applied `padding:10` option then you will get 120x220px image.
|
||||||
|
|
||||||
|
**📝Note:** Padding follows [dpr](#dpr) option so it will be scaled too if you set it.
|
||||||
|
|
||||||
|
### Auto Rotate
|
||||||
|
|
||||||
|
```
|
||||||
|
auto_rotate:%auto_rotate
|
||||||
|
ar:%auto_rotate
|
||||||
|
```
|
||||||
|
|
||||||
|
When set to `1`, `t` or `true`, imgproxy will automatically rotate images based onon the EXIF Orientation parameter (if available in the image meta data). The orientation tag will be removed from the image anyway. Normally this is controlled by the [IMGPROXY_AUTO_ROTATE](configuration.md#miscellaneous) configuration but this procesing option allows the configuration to be set for each request.
|
||||||
|
|
||||||
|
### Rotate
|
||||||
|
|
||||||
```
|
```
|
||||||
rotate:%angle
|
rotate:%angle
|
||||||
@ -224,35 +258,9 @@ Rotates the image on the specified angle. The orientation from the image metadat
|
|||||||
|
|
||||||
**📝Note:** Only 0/90/180/270/etc degrees angles are supported.
|
**📝Note:** Only 0/90/180/270/etc degrees angles are supported.
|
||||||
|
|
||||||
Default: 0.
|
|
||||||
|
|
||||||
#### Quality
|
|
||||||
|
|
||||||
```
|
|
||||||
quality:%quality
|
|
||||||
q:%quality
|
|
||||||
```
|
|
||||||
|
|
||||||
Redefines quality of the resulting image, percentage. When `0`, quality is assumed based on `IMGPROXY_QUALITY` and `IMGPROXY_FORMAT_QUALITY`.
|
|
||||||
|
|
||||||
Default: 0.
|
|
||||||
|
|
||||||
#### Max Bytes
|
|
||||||
|
|
||||||
```
|
|
||||||
max_bytes:%bytes
|
|
||||||
mb:%bytes
|
|
||||||
```
|
|
||||||
|
|
||||||
When set, imgproxy automatically degrades the quality of the image until the image is under the specified amount of bytes.
|
|
||||||
|
|
||||||
**📝Note:** Applicable only to `jpg`, `webp`, `heic`, and `tiff`.
|
|
||||||
|
|
||||||
**⚠️Warning:** When `max_bytes` is set, imgproxy saves image multiple times to achieve specified image size.
|
|
||||||
|
|
||||||
Default: 0
|
Default: 0
|
||||||
|
|
||||||
#### Background
|
### Background
|
||||||
|
|
||||||
```
|
```
|
||||||
background:%R:%G:%B
|
background:%R:%G:%B
|
||||||
@ -268,7 +276,7 @@ With no arguments provided, disables any background manipulations.
|
|||||||
|
|
||||||
Default: disabled
|
Default: disabled
|
||||||
|
|
||||||
#### Background alpha<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=background-alpha
|
### Background alpha<i class='badge badge-pro'></i> :id=background-alpha
|
||||||
|
|
||||||
```
|
```
|
||||||
background_alpha:%alpha
|
background_alpha:%alpha
|
||||||
@ -279,7 +287,7 @@ Adds alpha channel to `background`. `alpha` is a positive floating point number
|
|||||||
|
|
||||||
Default: 1
|
Default: 1
|
||||||
|
|
||||||
#### Adjust<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=adjust
|
### Adjust<i class='badge badge-pro'></i> :id=adjust
|
||||||
|
|
||||||
```
|
```
|
||||||
adjust:%brightness:%contrast:%saturation
|
adjust:%brightness:%contrast:%saturation
|
||||||
@ -288,7 +296,7 @@ a:%brightness:%contrast:%saturation
|
|||||||
|
|
||||||
Meta-option that defines the [brightness](#brightness), [contrast](#contrast), and [saturation](#saturation). All arguments are optional and can be omitted to use their default values.
|
Meta-option that defines the [brightness](#brightness), [contrast](#contrast), and [saturation](#saturation). All arguments are optional and can be omitted to use their default values.
|
||||||
|
|
||||||
#### Brightness<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=brightness
|
### Brightness<i class='badge badge-pro'></i> :id=brightness
|
||||||
|
|
||||||
```
|
```
|
||||||
brightness:%brightness
|
brightness:%brightness
|
||||||
@ -299,7 +307,7 @@ When set, imgproxy will adjust brightness of the resulting image. `brightness` i
|
|||||||
|
|
||||||
Default: 0
|
Default: 0
|
||||||
|
|
||||||
#### Contrast<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=contrast
|
### Contrast<i class='badge badge-pro'></i> :id=contrast
|
||||||
|
|
||||||
```
|
```
|
||||||
contrast:%contrast
|
contrast:%contrast
|
||||||
@ -310,7 +318,7 @@ When set, imgproxy will adjust contrast of the resulting image. `contrast` is a
|
|||||||
|
|
||||||
Default: 1
|
Default: 1
|
||||||
|
|
||||||
#### Saturation<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=saturation
|
### Saturation<i class='badge badge-pro'></i> :id=saturation
|
||||||
|
|
||||||
```
|
```
|
||||||
saturation:%saturation
|
saturation:%saturation
|
||||||
@ -321,7 +329,7 @@ When set, imgproxy will adjust saturation of the resulting image. `saturation` i
|
|||||||
|
|
||||||
Default: 1
|
Default: 1
|
||||||
|
|
||||||
#### Blur
|
### Blur
|
||||||
|
|
||||||
```
|
```
|
||||||
blur:%sigma
|
blur:%sigma
|
||||||
@ -332,7 +340,7 @@ When set, imgproxy will apply the gaussian blur filter to the resulting image. `
|
|||||||
|
|
||||||
Default: disabled
|
Default: disabled
|
||||||
|
|
||||||
#### Sharpen
|
### Sharpen
|
||||||
|
|
||||||
```
|
```
|
||||||
sharpen:%sigma
|
sharpen:%sigma
|
||||||
@ -345,7 +353,7 @@ As an approximate guideline, use 0.5 sigma for 4 pixels/mm (display resolution),
|
|||||||
|
|
||||||
Default: disabled
|
Default: disabled
|
||||||
|
|
||||||
#### Pixelate<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=pixelate
|
### Pixelate
|
||||||
|
|
||||||
```
|
```
|
||||||
pixelate:%size
|
pixelate:%size
|
||||||
@ -356,7 +364,7 @@ When set, imgproxy will apply the pixelate filter to the resulting image. `size`
|
|||||||
|
|
||||||
Default: disabled
|
Default: disabled
|
||||||
|
|
||||||
#### Unsharpening<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=unsharpening
|
### Unsharpening<i class='badge badge-pro'></i> :id=unsharpening
|
||||||
|
|
||||||
```
|
```
|
||||||
unsharpening:%mode:%weight:%dividor
|
unsharpening:%mode:%weight:%dividor
|
||||||
@ -365,7 +373,27 @@ ush:%mode:%weight:%dividor
|
|||||||
|
|
||||||
Allows redefining unsharpening options. All arguments have the same meaning as [Unsharpening](configuration.md#unsharpening) configs. All arguments are optional and can be omitted.
|
Allows redefining unsharpening options. All arguments have the same meaning as [Unsharpening](configuration.md#unsharpening) configs. All arguments are optional and can be omitted.
|
||||||
|
|
||||||
#### Watermark
|
### Blur detections<i class='badge badge-pro'></i><i class='badge badge-v3'></i> :id=blur-detections
|
||||||
|
|
||||||
|
```
|
||||||
|
blur_detections:%sigma:%class_name1:%class_name2:...:%class_nameN
|
||||||
|
bd:%sigma:%class_name1:%class_name2:...:%class_nameN
|
||||||
|
```
|
||||||
|
|
||||||
|
imgproxy [detects objects](object_detection.md) of provided classes and blus them. If class names are omitted, imgproxy blurs all the detected objects.
|
||||||
|
|
||||||
|
`sigma` defines the size of a mask imgproxy will use.
|
||||||
|
|
||||||
|
### Draw detections<i class='badge badge-pro'></i><i class='badge badge-v3'></i> :id=draw-detections
|
||||||
|
|
||||||
|
```
|
||||||
|
draw_detections:%draw:%class_name1:%class_name2:...:%class_nameN
|
||||||
|
dd:%draw:%class_name1:%class_name2:...:%class_nameN
|
||||||
|
```
|
||||||
|
|
||||||
|
When `draw` is set to `1`, `t` or `true`, imgproxy [detects objects](object_detection.md) of provided classes and draws their bounding boxed. If class names are omitted, imgproxy draws the bounding boxes of all the detected objects.
|
||||||
|
|
||||||
|
### Watermark
|
||||||
|
|
||||||
```
|
```
|
||||||
watermark:%opacity:%position:%x_offset:%y_offset:%scale
|
watermark:%opacity:%position:%x_offset:%y_offset:%scale
|
||||||
@ -391,7 +419,7 @@ Puts watermark on the processed image.
|
|||||||
|
|
||||||
Default: disabled
|
Default: disabled
|
||||||
|
|
||||||
#### Watermark URL<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=watermark
|
### Watermark URL<i class='badge badge-pro'></i> :id=watermark-url
|
||||||
|
|
||||||
```
|
```
|
||||||
watermark_url:%url
|
watermark_url:%url
|
||||||
@ -402,7 +430,7 @@ When set, imgproxy will use the image from the specified URL as a watermark. `ur
|
|||||||
|
|
||||||
Default: blank
|
Default: blank
|
||||||
|
|
||||||
#### Style<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=style
|
### Style<i class='badge badge-pro'></i> :id=style
|
||||||
|
|
||||||
```
|
```
|
||||||
style:%style
|
style:%style
|
||||||
@ -413,7 +441,71 @@ When set, imgproxy will prepend `<style>` node with provided content to the `<sv
|
|||||||
|
|
||||||
Default: blank
|
Default: blank
|
||||||
|
|
||||||
#### JPEG options<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=jpeg-options
|
### Strip Metadata
|
||||||
|
|
||||||
|
```
|
||||||
|
strip_metadata:%strip_metadata
|
||||||
|
sm:%strip_metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
When set to `1`, `t` or `true`, imgproxy will strip the metadata (EXIF, IPTC, etc.) on JPEG and WebP output images. Normally this is controlled by the [IMGPROXY_STRIP_METADATA](configuration.md#miscellaneous) configuration but this procesing option allows the configuration to be set for each request.
|
||||||
|
|
||||||
|
### Strip Color Profile
|
||||||
|
|
||||||
|
```
|
||||||
|
strip_color_profile:%strip_color_profile
|
||||||
|
scp:%strip_color_profile
|
||||||
|
```
|
||||||
|
|
||||||
|
When set to `1`, `t` or `true`, imgproxy will transform the embedded color profile (ICC) to sRGB and remove it from the image. Otherwise, imgproxy will try to keep it as is. Normally this is controlled by the [IMGPROXY_STRIP_COLOR_PROFILE](configuration.md#miscellaneous) configuration but this procesing option allows the configuration to be set for each request.
|
||||||
|
|
||||||
|
### Quality
|
||||||
|
|
||||||
|
```
|
||||||
|
quality:%quality
|
||||||
|
q:%quality
|
||||||
|
```
|
||||||
|
|
||||||
|
Redefines quality of the resulting image, percentage. When `0`, quality is assumed based on `IMGPROXY_QUALITY` and [format_quality](#format-quality).
|
||||||
|
|
||||||
|
Default: 0.
|
||||||
|
|
||||||
|
### Format quality<i class='badge badge-v3'></i> :id=format-quality
|
||||||
|
|
||||||
|
```
|
||||||
|
format_quality:%format1:%quality1:%format2:%quality2:...:%formatN:%qualityN
|
||||||
|
fq:%format1:%quality1:%format2:%quality2:...:%formatN:%qualityN
|
||||||
|
```
|
||||||
|
|
||||||
|
Adds or redefines `IMGPROXY_FORMAT_QUALITY` values.
|
||||||
|
|
||||||
|
### Autoquality<i class='badge badge-pro'></i><i class='badge badge-v3'></i> :id=autoquality
|
||||||
|
|
||||||
|
```
|
||||||
|
autoquality:%method:%target:%min_quality:%max_quality:%allowed_error
|
||||||
|
aq:%method:%target:%min_quality:%max_quality:%allowed_error
|
||||||
|
```
|
||||||
|
|
||||||
|
Redefines autoqiality settings. All arguments have the same meaning as [Autoquality](configuration.md#autoqiality) configs. All arguments are optional and can be omitted.
|
||||||
|
|
||||||
|
**⚠️Warning:** Autoquality requires the image to be saved several times. Use it only when you prefer the resulting size and quality over the speed.
|
||||||
|
|
||||||
|
### Max Bytes
|
||||||
|
|
||||||
|
```
|
||||||
|
max_bytes:%bytes
|
||||||
|
mb:%bytes
|
||||||
|
```
|
||||||
|
|
||||||
|
When set, imgproxy automatically degrades the quality of the image until the image is under the specified amount of bytes.
|
||||||
|
|
||||||
|
**📝Note:** Applicable only to `jpg`, `webp`, `heic`, and `tiff`.
|
||||||
|
|
||||||
|
**⚠️Warning:** When `max_bytes` is set, imgproxy saves image multiple times to achieve specified image size.
|
||||||
|
|
||||||
|
Default: 0
|
||||||
|
|
||||||
|
### JPEG options<i class='badge badge-pro'></i> :id=jpeg-options
|
||||||
|
|
||||||
```
|
```
|
||||||
jpeg_options:%progressive:%no_subsample:%trellis_quant:%overshoot_deringing:%optimize_scans:%quant_table
|
jpeg_options:%progressive:%no_subsample:%trellis_quant:%overshoot_deringing:%optimize_scans:%quant_table
|
||||||
@ -422,7 +514,7 @@ jpgo:%progressive:%no_subsample:%trellis_quant:%overshoot_deringing:%optimize_sc
|
|||||||
|
|
||||||
Allows redefining JPEG saving options. All arguments have the same meaning as [Advanced JPEG compression](configuration.md#advanced-jpeg-compression) configs. All arguments are optional and can be omitted.
|
Allows redefining JPEG saving options. All arguments have the same meaning as [Advanced JPEG compression](configuration.md#advanced-jpeg-compression) configs. All arguments are optional and can be omitted.
|
||||||
|
|
||||||
#### PNG options<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=png-options
|
### PNG options<i class='badge badge-pro'></i> :id=png-options
|
||||||
|
|
||||||
```
|
```
|
||||||
png_options:%interlaced:%quantize:%quantization_colors
|
png_options:%interlaced:%quantize:%quantization_colors
|
||||||
@ -431,7 +523,7 @@ pngo:%interlaced:%quantize:%quantization_colors
|
|||||||
|
|
||||||
Allows redefining PNG saving options. All arguments have the same meaning as [Advanced PNG compression](configuration.md#advanced-png-compression) configs. All arguments are optional and can be omitted.
|
Allows redefining PNG saving options. All arguments have the same meaning as [Advanced PNG compression](configuration.md#advanced-png-compression) configs. All arguments are optional and can be omitted.
|
||||||
|
|
||||||
#### GIF options<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=gif-options
|
### GIF options<i class='badge badge-pro'></i> :id=gif-options
|
||||||
|
|
||||||
```
|
```
|
||||||
gif_options:%optimize_frames:%optimize_transparency
|
gif_options:%optimize_frames:%optimize_transparency
|
||||||
@ -440,7 +532,19 @@ gifo:%optimize_frames:%optimize_transparency
|
|||||||
|
|
||||||
Allows redefining GIF saving options. All arguments have the same meaning as [Advanced GIF compression](configuration.md#advanced-gif-compression) configs. All arguments are optional and can be omitted.
|
Allows redefining GIF saving options. All arguments have the same meaning as [Advanced GIF compression](configuration.md#advanced-gif-compression) configs. All arguments are optional and can be omitted.
|
||||||
|
|
||||||
#### Page<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=page
|
### Format
|
||||||
|
|
||||||
|
```
|
||||||
|
format:%extension
|
||||||
|
f:%extension
|
||||||
|
ext:%extension
|
||||||
|
```
|
||||||
|
|
||||||
|
Specifies the resulting image format. Alias for [extension](#extension) URL part.
|
||||||
|
|
||||||
|
Default: `jpg`
|
||||||
|
|
||||||
|
### Page<i class='badge badge-pro'></i> :id=page
|
||||||
|
|
||||||
```
|
```
|
||||||
page:%page
|
page:%page
|
||||||
@ -451,7 +555,7 @@ When source image supports pagination (PDF, TIFF) or animation (GIF, WebP), this
|
|||||||
|
|
||||||
Default: 0
|
Default: 0
|
||||||
|
|
||||||
#### Video thumbnail second<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=video-thumbnail-second
|
### Video thumbnail second<i class='badge badge-pro'></i> :id=video-thumbnail-second
|
||||||
|
|
||||||
```
|
```
|
||||||
video_thumbnail_second:%second
|
video_thumbnail_second:%second
|
||||||
@ -460,20 +564,35 @@ vts:%second
|
|||||||
|
|
||||||
Allows redefining `IMGPROXY_VIDEO_THUMBNAIL_SECOND` config.
|
Allows redefining `IMGPROXY_VIDEO_THUMBNAIL_SECOND` config.
|
||||||
|
|
||||||
#### Preset
|
### Fallback image URL<i class='badge badge-pro'></i><i class='badge badge-v3'></i> :id=fallback-image-url
|
||||||
|
|
||||||
|
You can use a custom fallback image specifying its URL with `fallback_image_url` processing option:
|
||||||
|
|
||||||
```
|
```
|
||||||
preset:%preset_name1:%preset_name2:...:%preset_nameN
|
fallback_image_url:%url
|
||||||
pr:%preset_name1:%preset_name2:...:%preset_nameN
|
fiu:%url
|
||||||
```
|
```
|
||||||
|
|
||||||
Defines a list of presets to be used by imgproxy. Feel free to use as many presets in a single URL as you need.
|
Where `url` is Base64-encoded URL of the custom fallback image.
|
||||||
|
|
||||||
Read more about presets in the [Presets](presets.md) guide.
|
Default: blank
|
||||||
|
|
||||||
|
### Skip processing<i class='badge badge-v3'></i> :id=skip-processing
|
||||||
|
|
||||||
|
```
|
||||||
|
skip_processing:%extension1:%extension2:...:%extensionN
|
||||||
|
skp:%extension1:%extension2:...:%extensionN
|
||||||
|
```
|
||||||
|
|
||||||
|
When set, imgproxy will skip the processing of listed formats. Also available as [IMGPROXY_SKIP_PROCESSING_FORMATS](configuration.md#skip-processing) configuration.
|
||||||
|
|
||||||
|
**📝Note:** Processing can be skipped only when the requested format is the same as the source format.
|
||||||
|
|
||||||
|
**📝Note:** Video thumbnail processing can't be skipped.
|
||||||
|
|
||||||
Default: empty
|
Default: empty
|
||||||
|
|
||||||
#### Cache buster
|
### Cache buster
|
||||||
|
|
||||||
```
|
```
|
||||||
cachebuster:%string
|
cachebuster:%string
|
||||||
@ -486,34 +605,18 @@ It's highly recommended to prefer `cachebuster` option over URL query string bec
|
|||||||
|
|
||||||
Default: empty
|
Default: empty
|
||||||
|
|
||||||
#### Strip Metadata
|
### Expires<i class='badge badge-v3'></i> :id=expires
|
||||||
|
|
||||||
```
|
```
|
||||||
strip_metadata:%strip_metadata
|
expires:%timestamp
|
||||||
sm:%strip_metadata
|
exp:%timestamp
|
||||||
```
|
```
|
||||||
|
|
||||||
When set to `1`, `t` or `true`, imgproxy will strip the metadata (EXIF, IPTC, etc.) on JPEG and WebP output images. Normally this is controlled by the [IMGPROXY_STRIP_METADATA](configuration.md#miscellaneous) configuration but this procesing option allows the configuration to be set for each request.
|
When set, imgproxy will check provided unix timestamp and return 404 when expired.
|
||||||
|
|
||||||
#### Strip Color Profile
|
Default: empty
|
||||||
|
|
||||||
```
|
### Filename
|
||||||
strip_color_profile:%strip_color_profile
|
|
||||||
scp:%strip_color_profile
|
|
||||||
```
|
|
||||||
|
|
||||||
When set to `1`, `t` or `true`, imgproxy will transform the embedded color profile (ICC) to sRGB and remove it from the image. Otherwise, imgproxy will try to keep it as is. Normally this is controlled by the [IMGPROXY_STRIP_COLOR_PROFILE](configuration.md#miscellaneous) configuration but this procesing option allows the configuration to be set for each request.
|
|
||||||
|
|
||||||
#### Auto Rotate
|
|
||||||
|
|
||||||
```
|
|
||||||
auto_rotate:%auto_rotate
|
|
||||||
ar:%auto_rotate
|
|
||||||
```
|
|
||||||
|
|
||||||
When set to `1`, `t` or `true`, imgproxy will automatically rotate images based onon the EXIF Orientation parameter (if available in the image meta data). The orientation tag will be removed from the image anyway. Normally this is controlled by the [IMGPROXY_AUTO_ROTATE](configuration.md#miscellaneous) configuration but this procesing option allows the configuration to be set for each request.
|
|
||||||
|
|
||||||
#### Filename
|
|
||||||
|
|
||||||
```
|
```
|
||||||
filename:%string
|
filename:%string
|
||||||
@ -524,23 +627,24 @@ Defines a filename for `Content-Disposition` header. When not specified, imgprox
|
|||||||
|
|
||||||
Default: empty
|
Default: empty
|
||||||
|
|
||||||
#### Format
|
### Preset
|
||||||
|
|
||||||
```
|
```
|
||||||
format:%extension
|
preset:%preset_name1:%preset_name2:...:%preset_nameN
|
||||||
f:%extension
|
pr:%preset_name1:%preset_name2:...:%preset_nameN
|
||||||
ext:%extension
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Specifies the resulting image format. Alias for [extension](#extension) URL part.
|
Defines a list of presets to be used by imgproxy. Feel free to use as many presets in a single URL as you need.
|
||||||
|
|
||||||
Default: `jpg`
|
Read more about presets in the [Presets](presets.md) guide.
|
||||||
|
|
||||||
### Source URL
|
Default: empty
|
||||||
|
|
||||||
|
## Source URL
|
||||||
|
|
||||||
There are two ways to specify source url:
|
There are two ways to specify source url:
|
||||||
|
|
||||||
#### Plain
|
### Plain
|
||||||
|
|
||||||
The source URL can be provided as is, prendended by `/plain/` part:
|
The source URL can be provided as is, prendended by `/plain/` part:
|
||||||
|
|
||||||
@ -556,7 +660,7 @@ When using plain source URL, you can specify the [extension](#extension) after `
|
|||||||
/plain/http://example.com/images/curiosity.jpg@png
|
/plain/http://example.com/images/curiosity.jpg@png
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Base64 encoded
|
### Base64 encoded
|
||||||
|
|
||||||
The source URL can be encoded with URL-safe Base64. The encoded URL can be split with `/` for your needs:
|
The source URL can be encoded with URL-safe Base64. The encoded URL can be split with `/` for your needs:
|
||||||
|
|
||||||
@ -570,7 +674,7 @@ When using encoded source URL, you can specify the [extension](#extension) after
|
|||||||
/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
||||||
```
|
```
|
||||||
|
|
||||||
### Extension
|
## Extension
|
||||||
|
|
||||||
Extension specifies the format of the resulting image. Read about image formats support [here](image_formats_support.md).
|
Extension specifies the format of the resulting image. Read about image formats support [here](image_formats_support.md).
|
||||||
|
|
@ -1,106 +0,0 @@
|
|||||||
# Generating the URL (Basic)
|
|
||||||
|
|
||||||
This guide describes the simple URL format that is easy to use but doesn't support the whole range of imgproxy features. This URL format is mostly supported for backwards compatibility with imgproxy 1.x. Please read our [Generating the URL (Advanced)](generating_the_url_advanced.md) guide to learn about the advanced URL format.
|
|
||||||
|
|
||||||
## Format definition
|
|
||||||
|
|
||||||
The basic URL should contain the signature, resize parameters, and source URL, like this:
|
|
||||||
|
|
||||||
```
|
|
||||||
/%signature/%resizing_type/%width/%height/%gravity/%enlarge/plain/%source_url@%extension
|
|
||||||
/%signature/%resizing_type/%width/%height/%gravity/%enlarge/%encoded_source_url.%extension
|
|
||||||
```
|
|
||||||
|
|
||||||
Check out the [example](#example) at the end of this guide.
|
|
||||||
|
|
||||||
### Signature
|
|
||||||
|
|
||||||
Signature protects your URL from being modified by an attacker. It is highly recommended to sign imgproxy URLs in a production environment.
|
|
||||||
|
|
||||||
Once you set up your [URL signature](configuration.md#url-signature), check out the [Signing the URL](signing_the_url.md) guide to learn about how to sign your URLs. Otherwise, use any string here.
|
|
||||||
|
|
||||||
### Resizing types
|
|
||||||
|
|
||||||
imgproxy supports the following resizing types:
|
|
||||||
|
|
||||||
* `fit`: resizes the image while keeping aspect ratio to fit given size;
|
|
||||||
* `fill`: resizes the image while keeping aspect ratio to fill given size and cropping projecting parts;
|
|
||||||
* `auto`: if both source and resulting dimensions have the same orientation (portrait or landscape), imgproxy will use `fill`. Otherwise, it will use `fit`.
|
|
||||||
|
|
||||||
### Width and height
|
|
||||||
|
|
||||||
Width and height parameters define the size of the resulting image in pixels. Depending on the resizing type applied, the dimensions may differ from the requested ones.
|
|
||||||
|
|
||||||
### Gravity
|
|
||||||
|
|
||||||
When imgproxy needs to cut some parts of the image, it is guided by the gravity. The following values are supported:
|
|
||||||
|
|
||||||
* `no`: north (top edge);
|
|
||||||
* `so`: south (bottom edge);
|
|
||||||
* `ea`: east (right edge);
|
|
||||||
* `we`: west (left edge);
|
|
||||||
* `noea`: north-east (top-right corner);
|
|
||||||
* `nowe`: north-west (top-left corner);
|
|
||||||
* `soea`: south-east (bottom-right corner);
|
|
||||||
* `sowe`: south-west (bottom-left corner);
|
|
||||||
* `ce`: center;
|
|
||||||
* `sm`: smart. `libvips` detects the most "interesting" section of the image and considers it as the center of the resulting image;
|
|
||||||
* `fp:%x:%y` - focus point. `x` and `y` are floating point numbers between 0 and 1 that describe the coordinates of the center of the resulting image. Treat 0 and 1 as right/left for `x` and top/bottom for `y`.
|
|
||||||
|
|
||||||
### Enlarge
|
|
||||||
|
|
||||||
When set to `1`, `t` or `true`, imgproxy will enlarge the image if it is smaller than the given size.
|
|
||||||
|
|
||||||
### Source URL
|
|
||||||
|
|
||||||
There are two ways to specify source url:
|
|
||||||
|
|
||||||
#### Plain
|
|
||||||
|
|
||||||
The source URL can be provided as is, prepended by `/plain/` part:
|
|
||||||
|
|
||||||
```
|
|
||||||
/plain/http://example.com/images/curiosity.jpg
|
|
||||||
```
|
|
||||||
|
|
||||||
**📝Note:** If the source URL contains query string or `@`, you need to escape it.
|
|
||||||
|
|
||||||
When using plain source URL, you can specify the [extension](#extension) after `@`:
|
|
||||||
|
|
||||||
```
|
|
||||||
/plain/http://example.com/images/curiosity.jpg@png
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Base64 encoded
|
|
||||||
|
|
||||||
The source URL can be encoded with URL-safe Base64. The encoded URL can be split with `/` for your needs:
|
|
||||||
|
|
||||||
```
|
|
||||||
/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn
|
|
||||||
```
|
|
||||||
|
|
||||||
When using encoded source URL, you can specify the [extension](#extension) after `.`:
|
|
||||||
|
|
||||||
```
|
|
||||||
/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
|
||||||
```
|
|
||||||
|
|
||||||
### Extension
|
|
||||||
|
|
||||||
Extension specifies the format of the resulting image. Read about image formats support [here](image_formats_support.md).
|
|
||||||
|
|
||||||
The extension part can be omitted. In this case, imgproxy will use source image format as resulting one. If source image format is not supported as resulting, imgproxy will use `jpg`. You also can [enable WebP support detection](configuration.md#webp-support-detection) to use it as default resulting format when possible.
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
Signed imgproxy URL that resizes `http://example.com/images/curiosity.jpg` to fill `300x400` area with smart gravity without enlarging, and converts the image to `png`:
|
|
||||||
|
|
||||||
```
|
|
||||||
http://imgproxy.example.com/AfrOrF3gWeDA6VOlDG4TzxMv39O7MXnF4CXpKUwGqRM/fill/300/400/sm/0/plain/http://example.com/images/curiosity.jpg@png
|
|
||||||
```
|
|
||||||
|
|
||||||
The same URL with Base64-encoded source URL will look like this:
|
|
||||||
|
|
||||||
```
|
|
||||||
http://imgproxy.example.com/AfrOrF3gWeDA6VOlDG4TzxMv39O7MXnF4CXpKUwGqRM/fill/300/400/sm/0/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
|
||||||
```
|
|
@ -1,4 +1,4 @@
|
|||||||
# Getting the image info<img class='pro-badge' src='assets/pro.svg' alt='pro' />
|
# Getting the image info<i class='badge badge-pro'></i>
|
||||||
|
|
||||||
imgproxy can fetch and return the source image info without downloading the whole image.
|
imgproxy can fetch and return the source image info without downloading the whole image.
|
||||||
|
|
||||||
@ -47,7 +47,10 @@ imgproxy responses with JSON body and returns the following info:
|
|||||||
* `width`: image/video width;
|
* `width`: image/video width;
|
||||||
* `height`: image/video height;
|
* `height`: image/video height;
|
||||||
* `size`: file size. Can be zero if the image source doesn't set `Content-Length` header properly;
|
* `size`: file size. Can be zero if the image source doesn't set `Content-Length` header properly;
|
||||||
* `exif`: JPEG exif data.
|
* `exif`: Exif data;
|
||||||
|
* `iptc`: IPTC data.
|
||||||
|
|
||||||
|
**📝Note:** There are lots of IPTC tags in the spec, but imgproxy supports only a few of them. If you need some tags to be supported, just contact us.
|
||||||
|
|
||||||
#### Example (JPEG)
|
#### Example (JPEG)
|
||||||
|
|
||||||
@ -63,6 +66,12 @@ imgproxy responses with JSON body and returns the following info:
|
|||||||
"Date and Time": "2016:09:11 22:15:03",
|
"Date and Time": "2016:09:11 22:15:03",
|
||||||
"Model": "NIKON D810",
|
"Model": "NIKON D810",
|
||||||
"Software": "Adobe Photoshop Lightroom 6.1 (Windows)"
|
"Software": "Adobe Photoshop Lightroom 6.1 (Windows)"
|
||||||
|
},
|
||||||
|
"iptc": {
|
||||||
|
"Name": "Spider-Man",
|
||||||
|
"Caption": "Spider-Man swings on the web",
|
||||||
|
"Copyright Notice": "Daily Bugle",
|
||||||
|
"Keywords": ["spider-man", "menance", "offender"]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
@ -14,9 +14,9 @@ At the moment, imgproxy supports only the most popular image formats:
|
|||||||
| HEIC | `heic` | Yes | No |
|
| HEIC | `heic` | Yes | No |
|
||||||
| BMP | `bmp` | Yes | Yes |
|
| BMP | `bmp` | Yes | Yes |
|
||||||
| TIFF | `tiff` | Yes | Yes |
|
| TIFF | `tiff` | Yes | Yes |
|
||||||
| PDF <img class='pro-badge' src='assets/pro.svg' alt='pro' /> | `pdf` | Yes | No |
|
| PDF<i class='badge badge-pro'></i> | `pdf` | Yes | No |
|
||||||
| MP4 (h264) <img class='pro-badge' src='assets/pro.svg' alt='pro' /> | `mp4` | [See notes](#video-thumbnails) | Yes |
|
| MP4 (h264)<i class='badge badge-pro'></i> | `mp4` | [See notes](#video-thumbnails) | Yes |
|
||||||
| Other video formats <img class='pro-badge' src='assets/pro.svg' alt='pro' /> | | [See notes](#video-thumbnails) | No |
|
| Other video formats<i class='badge badge-pro'></i> | | [See notes](#video-thumbnails) | No |
|
||||||
|
|
||||||
## GIF support
|
## GIF support
|
||||||
|
|
||||||
@ -58,13 +58,13 @@ Since processing of animated images is pretty heavy, only one frame is processed
|
|||||||
|
|
||||||
**📝Note:** imgproxy summarizes all frames resolutions while checking source image resolution.
|
**📝Note:** imgproxy summarizes all frames resolutions while checking source image resolution.
|
||||||
|
|
||||||
## Converting animated images to MP4<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=converting-animated-images-to-mp4
|
## Converting animated images to MP4<i class='badge badge-pro'></i> :id=converting-animated-images-to-mp4
|
||||||
|
|
||||||
Animated images results can be converted to MP4 by specifying `mp4` extension.
|
Animated images results can be converted to MP4 by specifying `mp4` extension.
|
||||||
|
|
||||||
Since MP4 requires usage of a `<video>` tag instead of `<img>`, automatic conversion to MP4 is not provided.
|
Since MP4 requires usage of a `<video>` tag instead of `<img>`, automatic conversion to MP4 is not provided.
|
||||||
|
|
||||||
## Video thumbnails<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=video-thumbnails
|
## Video thumbnails<i class='badge badge-pro'></i> :id=video-thumbnails
|
||||||
|
|
||||||
If you provide a video as a source, imgproxy takes its specific frame to create a thumbnail. Doing this imgproxy downloads only the amount of data required to reach the needed frame.
|
If you provide a video as a source, imgproxy takes its specific frame to create a thumbnail. Doing this imgproxy downloads only the amount of data required to reach the needed frame.
|
||||||
|
|
||||||
|
@ -58,6 +58,7 @@
|
|||||||
subMaxLevel: 2,
|
subMaxLevel: 2,
|
||||||
auto2top: true,
|
auto2top: true,
|
||||||
routerMode: window.DOCSIFY_ROUTER_MODE || "hash",
|
routerMode: window.DOCSIFY_ROUTER_MODE || "hash",
|
||||||
|
noEmoji: true,
|
||||||
search: {
|
search: {
|
||||||
namespace: 'docs-imgproxy',
|
namespace: 'docs-imgproxy',
|
||||||
depth: 6
|
depth: 6
|
||||||
@ -80,6 +81,7 @@
|
|||||||
<script src="//unpkg.com/docsify/lib/plugins/search.min.js"></script>
|
<script src="//unpkg.com/docsify/lib/plugins/search.min.js"></script>
|
||||||
<script src="//unpkg.com/docsify-pagination/dist/docsify-pagination.min.js"></script>
|
<script src="//unpkg.com/docsify-pagination/dist/docsify-pagination.min.js"></script>
|
||||||
<script src="https://unpkg.com/docsify-copy-code@2"></script>
|
<script src="https://unpkg.com/docsify-copy-code@2"></script>
|
||||||
|
<script src="//cdn.jsdelivr.net/npm/prismjs@1/components/prism-bash.min.js"></script>
|
||||||
<link href="https://fonts.googleapis.com/css?family=Fira+Mono|Roboto:400,700&display=swap" rel="stylesheet">
|
<link href="https://fonts.googleapis.com/css?family=Fira+Mono|Roboto:400,700&display=swap" rel="stylesheet">
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -8,10 +8,6 @@ There are some imgproxy options that can help you to optimize memory usage and d
|
|||||||
|
|
||||||
imgproxy uses memory buffers to download source images. While these buffers are empty at the start by default, they can grow to a required size when imgproxy downloads an image. Allocating new memory to grow the buffers can cause memory fragmentation. Allocating required memory at the start can eliminate much of memory fragmentation since buffers won't grow. Setting `IMGPROXY_DOWNLOAD_BUFFER_SIZE` will tell imgproxy to initialize download buffers with _at least_ the specified size. It's recommended to use the estimated 95 percentile of your image sizes as the initial download buffers size.
|
imgproxy uses memory buffers to download source images. While these buffers are empty at the start by default, they can grow to a required size when imgproxy downloads an image. Allocating new memory to grow the buffers can cause memory fragmentation. Allocating required memory at the start can eliminate much of memory fragmentation since buffers won't grow. Setting `IMGPROXY_DOWNLOAD_BUFFER_SIZE` will tell imgproxy to initialize download buffers with _at least_ the specified size. It's recommended to use the estimated 95 percentile of your image sizes as the initial download buffers size.
|
||||||
|
|
||||||
### IMGPROXY_GZIP_BUFFER_SIZE
|
|
||||||
|
|
||||||
The same as `IMGPROXY_DOWNLOAD_BUFFER_SIZE` but for GZip buffers. If you use GZip compression of the resulting images, you can reduce memory fragmentation by using the estimated maximum size of the GZipped resulting image as the initial size of GZip buffers.
|
|
||||||
|
|
||||||
### IMGPROXY_FREE_MEMORY_INTERVAL
|
### IMGPROXY_FREE_MEMORY_INTERVAL
|
||||||
|
|
||||||
Working with a large amount of data can cause allocating some memory that is not used most of the time. That's why imgproxy enforces Go's garbage collector to free as much memory as possible and return it to the OS. The default interval of this action is 10 seconds, but you can change it by setting `IMGPROXY_FREE_MEMORY_INTERVAL`. Decreasing the interval can smooth the memory usage graph but it can also slow down imgproxy a little. Increasing has the opposite effect.
|
Working with a large amount of data can cause allocating some memory that is not used most of the time. That's why imgproxy enforces Go's garbage collector to free as much memory as possible and return it to the OS. The default interval of this action is 10 seconds, but you can change it by setting `IMGPROXY_FREE_MEMORY_INTERVAL`. Decreasing the interval can smooth the memory usage graph but it can also slow down imgproxy a little. Increasing has the opposite effect.
|
||||||
|
41
docs/object_detection.md
Normal file
41
docs/object_detection.md
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
# Object detection<i class='badge badge-pro'></i><i class='badge badge-v3'></i>
|
||||||
|
|
||||||
|
imgproxy can detect objects on the image and use them for smart crop, bluring the detections, or drawing the detections.
|
||||||
|
|
||||||
|
For object detection purposes, imgproxy uses [Darknet YOLO](https://github.com/AlexeyAB/darknet) model. We provide Docker images with a model trained for face detection, but you can use any Darknet YOLO model found in the [zoo](https://github.com/AlexeyAB/darknet/wiki/YOLOv4-model-zoo) or you can train your own model following the [guide](https://github.com/AlexeyAB/darknet#how-to-train-to-detect-your-custom-objects).
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
You need to define four config variables to enable object detection:
|
||||||
|
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_CONFIG`: path to the neural network config.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_WEIGHTS`: path to the neural network weights.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_CLASSES`: path to the text file with the classes names, one by line.
|
||||||
|
* `IMGPROXY_OBJECT_DETECTION_NET_SIZE`: the size of the neural network input. The width and the heights of the inputs should be the same, so this config value should be a single number. Default: 416.
|
||||||
|
|
||||||
|
Read the [configuration](configuration.md#object-detection) guide for more config values info.
|
||||||
|
|
||||||
|
## Usage examples
|
||||||
|
### Object-oriented crop
|
||||||
|
|
||||||
|
You can [crop](https://docs.imgproxy.net/generating_the_url?id=crop) your images and keep objects of desired classes in frame:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../crop:256:256/g:obj:face/...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bluring detections
|
||||||
|
|
||||||
|
You can [blur objects](https://docs.imgproxy.net/generating_the_url?id=blur-detections) of desired classes for anonymization or hiding NSFW content:
|
||||||
|
|
||||||
|
```
|
||||||
|
.../blur_detections:7:face/...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Draw detections
|
||||||
|
|
||||||
|
You can make imgproxy [draw bounding boxes](https://docs.imgproxy.net/generating_the_url?id=draw-detections) of detected objects of desired classes (handy for testing your models):
|
||||||
|
|
||||||
|
```
|
||||||
|
.../draw_detections:1:face/...
|
||||||
|
```
|
@ -1,6 +1,6 @@
|
|||||||
# Presets
|
# Presets
|
||||||
|
|
||||||
imgproxy preset is a named set of processing options. Presets can be used in [advanced URL format](generating_the_url_advanced.md#preset) to get shorter and somewhat readable URLs.
|
imgproxy preset is a named set of processing options. Presets can be used in [URLs](generating_the_url.md#preset) to make them shorter and somewhat readable.
|
||||||
|
|
||||||
## Presets definition
|
## Presets definition
|
||||||
|
|
||||||
@ -10,7 +10,7 @@ The preset definition looks like this:
|
|||||||
%preset_name=%processing_options
|
%preset_name=%processing_options
|
||||||
```
|
```
|
||||||
|
|
||||||
Processing options should be defined in the same way as you define them in the [advanced URL format](generating_the_url_advanced.md#preset). For example, here is a preset named `awesome` that sets the resizing type to `fill` and resulting format to `jpg`:
|
Processing options should be defined in the same way as you define them in the [URLs](generating_the_url.md#processing-options). For example, here is a preset named `awesome` that sets the resizing type to `fill` and resulting format to `jpg`:
|
||||||
|
|
||||||
```
|
```
|
||||||
awesome=resizing_type:fill/format:jpg
|
awesome=resizing_type:fill/format:jpg
|
||||||
|
@ -22,9 +22,8 @@ echo $(xxd -g 2 -l 64 -p /dev/random | tr -d '\n')
|
|||||||
Signature is an URL-safe Base64-encoded HMAC digest of the rest of the path, including the leading `/`. Here is how it is calculated:
|
Signature is an URL-safe Base64-encoded HMAC digest of the rest of the path, including the leading `/`. Here is how it is calculated:
|
||||||
|
|
||||||
* Take the path part after the signature:
|
* Take the path part after the signature:
|
||||||
* For [basic URL format](generating_the_url_basic.md): `/%resizing_type/%width/%height/%gravity/%enlarge/%encoded_url.%extension` or `/%resizing_type/%width/%height/%gravity/%enlarge/plain/%plain_url@%extension`;
|
* For [processing URLs](generating_the_url.md): `/%processing_options/%encoded_url.%extension` or `/%processing_options/plain/%plain_url@%extension`;
|
||||||
* For [advanced URL format](generating_the_url_advanced.md): `/%processing_options/%encoded_url.%extension` or `/%processing_options/plain/%plain_url@%extension`;
|
* For [info URLs](getting_the_image_info.md): `/%encoded_url` or `/plain/%plain_url`;
|
||||||
* For [info URL](getting_the_image_info.md): `/%encoded_url` or `/plain/%plain_url`;
|
|
||||||
* Add salt to the beginning;
|
* Add salt to the beginning;
|
||||||
* Calculate the HMAC digest using SHA256;
|
* Calculate the HMAC digest using SHA256;
|
||||||
* Encode the result with URL-safe Base64.
|
* Encode the result with URL-safe Base64.
|
||||||
@ -38,7 +37,7 @@ And here is a step-by-step example of calculating the URL signature:
|
|||||||
Assume that you have the following unsigned URL:
|
Assume that you have the following unsigned URL:
|
||||||
|
|
||||||
```
|
```
|
||||||
http://imgproxy.example.com/insecure/fill/300/400/sm/0/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
http://imgproxy.example.com/insecure/rs:fill:300:400:0/g:sm/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
||||||
```
|
```
|
||||||
|
|
||||||
To sign it, you need to configure imgproxy to use your key/salt pair. Let's say, your key and salt are `secret` and `hello` — that translates to `736563726574` and `68656C6C6F` in hex encoding. This key/salt pair is quite weak for production use but will do for this example. Run your imgproxy using this key/salt pair:
|
To sign it, you need to configure imgproxy to use your key/salt pair. Let's say, your key and salt are `secret` and `hello` — that translates to `736563726574` and `68656C6C6F` in hex encoding. This key/salt pair is quite weak for production use but will do for this example. Run your imgproxy using this key/salt pair:
|
||||||
@ -52,19 +51,19 @@ Note that all your unsigned URL will stop working since imgproxy now checks sign
|
|||||||
First, you need to take the path after the signature and add the salt to the beginning:
|
First, you need to take the path after the signature and add the salt to the beginning:
|
||||||
|
|
||||||
```
|
```
|
||||||
hello/fill/300/400/sm/0/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
hello/rs:fill:300:400:0/g:sm/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
||||||
```
|
```
|
||||||
|
|
||||||
Then calculate the HMAC digest of this string using SHA256 and encode it with URL-safe Base64:
|
Then calculate the HMAC digest of this string using SHA256 and encode it with URL-safe Base64:
|
||||||
|
|
||||||
```
|
```
|
||||||
AfrOrF3gWeDA6VOlDG4TzxMv39O7MXnF4CXpKUwGqRM
|
oKfUtW34Dvo2BGQehJFR4Nr0_rIjOtdtzJ3QFsUcXH8
|
||||||
```
|
```
|
||||||
|
|
||||||
And finally put the signature to your URL:
|
And finally put the signature to your URL:
|
||||||
|
|
||||||
```
|
```
|
||||||
http://imgproxy.example.com/AfrOrF3gWeDA6VOlDG4TzxMv39O7MXnF4CXpKUwGqRM/fill/300/400/sm/0/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
http://imgproxy.example.com/oKfUtW34Dvo2BGQehJFR4Nr0_rIjOtdtzJ3QFsUcXH8/rs:fill:300:400:0/g:sm/aHR0cDovL2V4YW1w/bGUuY29tL2ltYWdl/cy9jdXJpb3NpdHku/anBn.png
|
||||||
```
|
```
|
||||||
|
|
||||||
Now you got the URL that you can use to resize the image securely.
|
Now you got the URL that you can use to resize the image securely.
|
||||||
|
@ -19,7 +19,7 @@ You can also specify the base opacity of watermark with `IMGPROXY_WATERMARK_OPAC
|
|||||||
|
|
||||||
## Watermarking an image
|
## Watermarking an image
|
||||||
|
|
||||||
Watermarks are only available with [advanced URL format](generating_the_url_advanced.md). Use `watermark` processing option to put the watermark on the processed image:
|
Use `watermark` processing option to put the watermark on the processed image:
|
||||||
|
|
||||||
```
|
```
|
||||||
watermark:%opacity:%position:%x_offset:%y_offset:%scale
|
watermark:%opacity:%position:%x_offset:%y_offset:%scale
|
||||||
@ -43,7 +43,7 @@ Where arguments are:
|
|||||||
* `x_offset`, `y_offset` - (optional) specify watermark offset by X and Y axes. Not applicable to `re` position;
|
* `x_offset`, `y_offset` - (optional) specify watermark offset by X and Y axes. Not applicable to `re` position;
|
||||||
* `scale` - (optional) floating point number that defines watermark size relative to the resulting image size. When set to `0` or omitted, watermark size won't be changed.
|
* `scale` - (optional) floating point number that defines watermark size relative to the resulting image size. When set to `0` or omitted, watermark size won't be changed.
|
||||||
|
|
||||||
## Custom watermarks<img class='pro-badge' src='assets/pro.svg' alt='pro' /> :id=custom-watermarks
|
## Custom watermarks<i class='badge badge-pro'></i> :id=custom-watermarks
|
||||||
|
|
||||||
You can use a custom watermark specifying its URL with `watermark_url` processing option:
|
You can use a custom watermark specifying its URL with `watermark_url` processing option:
|
||||||
|
|
||||||
|
255
download.go
255
download.go
@ -1,255 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"compress/gzip"
|
|
||||||
"context"
|
|
||||||
"crypto/tls"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"net"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/imgproxy/imgproxy/v2/imagemeta"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
downloadClient *http.Client
|
|
||||||
|
|
||||||
imageDataCtxKey = ctxKey("imageData")
|
|
||||||
cacheControlHeaderCtxKey = ctxKey("cacheControlHeader")
|
|
||||||
expiresHeaderCtxKey = ctxKey("expiresHeader")
|
|
||||||
|
|
||||||
errSourceDimensionsTooBig = newError(422, "Source image dimensions are too big", "Invalid source image")
|
|
||||||
errSourceResolutionTooBig = newError(422, "Source image resolution is too big", "Invalid source image")
|
|
||||||
errSourceFileTooBig = newError(422, "Source image file is too big", "Invalid source image")
|
|
||||||
errSourceImageTypeNotSupported = newError(422, "Source image type not supported", "Invalid source image")
|
|
||||||
)
|
|
||||||
|
|
||||||
const msgSourceImageIsUnreachable = "Source image is unreachable"
|
|
||||||
|
|
||||||
var downloadBufPool *bufPool
|
|
||||||
|
|
||||||
type limitReader struct {
|
|
||||||
r io.Reader
|
|
||||||
left int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lr *limitReader) Read(p []byte) (n int, err error) {
|
|
||||||
n, err = lr.r.Read(p)
|
|
||||||
lr.left -= n
|
|
||||||
|
|
||||||
if err == nil && lr.left < 0 {
|
|
||||||
err = errSourceFileTooBig
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func initDownloading() error {
|
|
||||||
transport := &http.Transport{
|
|
||||||
Proxy: http.ProxyFromEnvironment,
|
|
||||||
MaxIdleConns: conf.Concurrency,
|
|
||||||
MaxIdleConnsPerHost: conf.Concurrency,
|
|
||||||
DisableCompression: true,
|
|
||||||
DialContext: (&net.Dialer{KeepAlive: 600 * time.Second}).DialContext,
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.IgnoreSslVerification {
|
|
||||||
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.LocalFileSystemRoot != "" {
|
|
||||||
transport.RegisterProtocol("local", newFsTransport())
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.S3Enabled {
|
|
||||||
if t, err := newS3Transport(); err != nil {
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
transport.RegisterProtocol("s3", t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.GCSEnabled {
|
|
||||||
if t, err := newGCSTransport(); err != nil {
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
transport.RegisterProtocol("gs", t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.ABSEnabled {
|
|
||||||
if t, err := newAzureTransport(); err != nil {
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
transport.RegisterProtocol("abs", t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
downloadClient = &http.Client{
|
|
||||||
Timeout: time.Duration(conf.DownloadTimeout) * time.Second,
|
|
||||||
Transport: transport,
|
|
||||||
}
|
|
||||||
|
|
||||||
downloadBufPool = newBufPool("download", conf.Concurrency, conf.DownloadBufferSize)
|
|
||||||
|
|
||||||
imagemeta.SetMaxSvgCheckRead(conf.MaxSvgCheckBytes)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type httpError interface {
|
|
||||||
Timeout() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkTimeoutErr(err error) error {
|
|
||||||
if httpErr, ok := err.(httpError); ok && httpErr.Timeout() {
|
|
||||||
return errors.New("The image request timed out")
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkDimensions(width, height int) error {
|
|
||||||
if conf.MaxSrcDimension > 0 && (width > conf.MaxSrcDimension || height > conf.MaxSrcDimension) {
|
|
||||||
return errSourceDimensionsTooBig
|
|
||||||
}
|
|
||||||
|
|
||||||
if width*height > conf.MaxSrcResolution {
|
|
||||||
return errSourceResolutionTooBig
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkTypeAndDimensions(r io.Reader) (imageType, error) {
|
|
||||||
meta, err := imagemeta.DecodeMeta(r)
|
|
||||||
if err == imagemeta.ErrFormat {
|
|
||||||
return imageTypeUnknown, errSourceImageTypeNotSupported
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return imageTypeUnknown, newUnexpectedError(checkTimeoutErr(err).Error(), 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
imgtype, imgtypeOk := imageTypes[meta.Format()]
|
|
||||||
if !imgtypeOk || !imageTypeLoadSupport(imgtype) {
|
|
||||||
return imageTypeUnknown, errSourceImageTypeNotSupported
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = checkDimensions(meta.Width(), meta.Height()); err != nil {
|
|
||||||
return imageTypeUnknown, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return imgtype, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func readAndCheckImage(r io.Reader, contentLength int) (*imageData, error) {
|
|
||||||
if conf.MaxSrcFileSize > 0 && contentLength > conf.MaxSrcFileSize {
|
|
||||||
return nil, errSourceFileTooBig
|
|
||||||
}
|
|
||||||
|
|
||||||
buf := downloadBufPool.Get(contentLength)
|
|
||||||
cancel := func() { downloadBufPool.Put(buf) }
|
|
||||||
|
|
||||||
if conf.MaxSrcFileSize > 0 {
|
|
||||||
r = &limitReader{r: r, left: conf.MaxSrcFileSize}
|
|
||||||
}
|
|
||||||
|
|
||||||
imgtype, err := checkTypeAndDimensions(io.TeeReader(r, buf))
|
|
||||||
if err != nil {
|
|
||||||
cancel()
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err = buf.ReadFrom(r); err != nil {
|
|
||||||
cancel()
|
|
||||||
return nil, newError(404, checkTimeoutErr(err).Error(), msgSourceImageIsUnreachable)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &imageData{buf.Bytes(), imgtype, cancel}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func requestImage(imageURL string) (*http.Response, error) {
|
|
||||||
req, err := http.NewRequest("GET", imageURL, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, newError(404, err.Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors)
|
|
||||||
}
|
|
||||||
|
|
||||||
req.Header.Set("User-Agent", conf.UserAgent)
|
|
||||||
|
|
||||||
res, err := downloadClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return res, newError(404, checkTimeoutErr(err).Error(), msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors)
|
|
||||||
}
|
|
||||||
|
|
||||||
if res.StatusCode != 200 {
|
|
||||||
body, _ := ioutil.ReadAll(res.Body)
|
|
||||||
msg := fmt.Sprintf("Can't download image; Status: %d; %s", res.StatusCode, string(body))
|
|
||||||
return res, newError(404, msg, msgSourceImageIsUnreachable).SetUnexpected(conf.ReportDownloadingErrors)
|
|
||||||
}
|
|
||||||
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func downloadImage(ctx context.Context) (context.Context, context.CancelFunc, error) {
|
|
||||||
imageURL := getImageURL(ctx)
|
|
||||||
|
|
||||||
if newRelicEnabled {
|
|
||||||
newRelicCancel := startNewRelicSegment(ctx, "Downloading image")
|
|
||||||
defer newRelicCancel()
|
|
||||||
}
|
|
||||||
|
|
||||||
if prometheusEnabled {
|
|
||||||
defer startPrometheusDuration(prometheusDownloadDuration)()
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := requestImage(imageURL)
|
|
||||||
if res != nil {
|
|
||||||
defer res.Body.Close()
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return ctx, func() {}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
body := res.Body
|
|
||||||
contentLength := int(res.ContentLength)
|
|
||||||
|
|
||||||
if res.Header.Get("Content-Encoding") == "gzip" {
|
|
||||||
gzipBody, errGzip := gzip.NewReader(res.Body)
|
|
||||||
if gzipBody != nil {
|
|
||||||
defer gzipBody.Close()
|
|
||||||
}
|
|
||||||
if errGzip != nil {
|
|
||||||
return ctx, func() {}, err
|
|
||||||
}
|
|
||||||
body = gzipBody
|
|
||||||
contentLength = 0
|
|
||||||
}
|
|
||||||
|
|
||||||
imgdata, err := readAndCheckImage(body, contentLength)
|
|
||||||
if err != nil {
|
|
||||||
return ctx, func() {}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx = context.WithValue(ctx, imageDataCtxKey, imgdata)
|
|
||||||
ctx = context.WithValue(ctx, cacheControlHeaderCtxKey, res.Header.Get("Cache-Control"))
|
|
||||||
ctx = context.WithValue(ctx, expiresHeaderCtxKey, res.Header.Get("Expires"))
|
|
||||||
|
|
||||||
return ctx, imgdata.Close, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func getImageData(ctx context.Context) *imageData {
|
|
||||||
return ctx.Value(imageDataCtxKey).(*imageData)
|
|
||||||
}
|
|
||||||
|
|
||||||
func getCacheControlHeader(ctx context.Context) string {
|
|
||||||
str, _ := ctx.Value(cacheControlHeaderCtxKey).(string)
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
func getExpiresHeader(ctx context.Context) string {
|
|
||||||
str, _ := ctx.Value(expiresHeaderCtxKey).(string)
|
|
||||||
return str
|
|
||||||
}
|
|
32
errorreport/airbrake/airbrake.go
Normal file
32
errorreport/airbrake/airbrake.go
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package airbrake
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/airbrake/gobrake/v5"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
var notifier *gobrake.Notifier
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
if len(config.AirbrakeProjecKey) > 0 {
|
||||||
|
notifier = gobrake.NewNotifierWithOptions(&gobrake.NotifierOptions{
|
||||||
|
ProjectId: int64(config.AirbrakeProjecID),
|
||||||
|
ProjectKey: config.AirbrakeProjecKey,
|
||||||
|
Environment: config.AirbrakeEnv,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Report(err error, req *http.Request) {
|
||||||
|
if notifier != nil {
|
||||||
|
notifier.Notify(err, req)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Close() {
|
||||||
|
if notifier != nil {
|
||||||
|
notifier.Close()
|
||||||
|
}
|
||||||
|
}
|
26
errorreport/bugsnag/bugsnag.go
Normal file
26
errorreport/bugsnag/bugsnag.go
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package bugsnag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/bugsnag/bugsnag-go/v2"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
var enabled bool
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
if len(config.BugsnagKey) > 0 {
|
||||||
|
bugsnag.Configure(bugsnag.Configuration{
|
||||||
|
APIKey: config.BugsnagKey,
|
||||||
|
ReleaseStage: config.BugsnagStage,
|
||||||
|
})
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Report(err error, req *http.Request) {
|
||||||
|
if enabled {
|
||||||
|
bugsnag.Notify(err, req)
|
||||||
|
}
|
||||||
|
}
|
28
errorreport/errorreport.go
Normal file
28
errorreport/errorreport.go
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
package errorreport
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/errorreport/airbrake"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/errorreport/bugsnag"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/errorreport/honeybadger"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/errorreport/sentry"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
bugsnag.Init()
|
||||||
|
honeybadger.Init()
|
||||||
|
sentry.Init()
|
||||||
|
airbrake.Init()
|
||||||
|
}
|
||||||
|
|
||||||
|
func Report(err error, req *http.Request) {
|
||||||
|
bugsnag.Report(err, req)
|
||||||
|
honeybadger.Report(err, req)
|
||||||
|
sentry.Report(err, req)
|
||||||
|
airbrake.Report(err, req)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Close() {
|
||||||
|
airbrake.Close()
|
||||||
|
}
|
38
errorreport/honeybadger/honeybadger.go
Normal file
38
errorreport/honeybadger/honeybadger.go
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
package honeybadger
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/honeybadger-io/honeybadger-go"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
enabled bool
|
||||||
|
|
||||||
|
headersReplacer = strings.NewReplacer("-", "_")
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
if len(config.HoneybadgerKey) > 0 {
|
||||||
|
honeybadger.Configure(honeybadger.Configuration{
|
||||||
|
APIKey: config.HoneybadgerKey,
|
||||||
|
Env: config.HoneybadgerEnv,
|
||||||
|
})
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Report(err error, req *http.Request) {
|
||||||
|
if enabled {
|
||||||
|
headers := make(honeybadger.CGIData)
|
||||||
|
|
||||||
|
for k, v := range req.Header {
|
||||||
|
key := "HTTP_" + headersReplacer.Replace(strings.ToUpper(k))
|
||||||
|
headers[key] = v[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
honeybadger.Notify(err, req.URL, headers)
|
||||||
|
}
|
||||||
|
}
|
39
errorreport/sentry/sentry.go
Normal file
39
errorreport/sentry/sentry.go
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
package sentry
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/getsentry/sentry-go"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
enabled bool
|
||||||
|
|
||||||
|
timeout = 5 * time.Second
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
if len(config.SentryDSN) > 0 {
|
||||||
|
sentry.Init(sentry.ClientOptions{
|
||||||
|
Dsn: config.SentryDSN,
|
||||||
|
Release: config.SentryRelease,
|
||||||
|
Environment: config.SentryEnvironment,
|
||||||
|
})
|
||||||
|
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Report(err error, req *http.Request) {
|
||||||
|
if enabled {
|
||||||
|
hub := sentry.CurrentHub().Clone()
|
||||||
|
hub.Scope().SetRequest(req)
|
||||||
|
hub.Scope().SetLevel(sentry.LevelError)
|
||||||
|
eventID := hub.CaptureException(err)
|
||||||
|
if eventID != nil {
|
||||||
|
hub.Flush(timeout)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,98 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/airbrake/gobrake/v5"
|
|
||||||
"github.com/bugsnag/bugsnag-go/v2"
|
|
||||||
"github.com/getsentry/sentry-go"
|
|
||||||
"github.com/honeybadger-io/honeybadger-go"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
bugsnagEnabled bool
|
|
||||||
honeybadgerEnabled bool
|
|
||||||
sentryEnabled bool
|
|
||||||
airbrakeEnabled bool
|
|
||||||
airbrake *gobrake.Notifier
|
|
||||||
|
|
||||||
headersReplacer = strings.NewReplacer("-", "_")
|
|
||||||
sentryTimeout = 5 * time.Second
|
|
||||||
)
|
|
||||||
|
|
||||||
func initErrorsReporting() {
|
|
||||||
if len(conf.BugsnagKey) > 0 {
|
|
||||||
bugsnag.Configure(bugsnag.Configuration{
|
|
||||||
APIKey: conf.BugsnagKey,
|
|
||||||
ReleaseStage: conf.BugsnagStage,
|
|
||||||
})
|
|
||||||
bugsnagEnabled = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.HoneybadgerKey) > 0 {
|
|
||||||
honeybadger.Configure(honeybadger.Configuration{
|
|
||||||
APIKey: conf.HoneybadgerKey,
|
|
||||||
Env: conf.HoneybadgerEnv,
|
|
||||||
})
|
|
||||||
honeybadgerEnabled = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.SentryDSN) > 0 {
|
|
||||||
sentry.Init(sentry.ClientOptions{
|
|
||||||
Dsn: conf.SentryDSN,
|
|
||||||
Release: conf.SentryRelease,
|
|
||||||
Environment: conf.SentryEnvironment,
|
|
||||||
})
|
|
||||||
|
|
||||||
sentryEnabled = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.AirbrakeProjecKey) > 0 {
|
|
||||||
airbrake = gobrake.NewNotifierWithOptions(&gobrake.NotifierOptions{
|
|
||||||
ProjectId: int64(conf.AirbrakeProjecID),
|
|
||||||
ProjectKey: conf.AirbrakeProjecKey,
|
|
||||||
Environment: conf.AirbrakeEnv,
|
|
||||||
})
|
|
||||||
|
|
||||||
airbrakeEnabled = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func closeErrorsReporting() {
|
|
||||||
if airbrake != nil {
|
|
||||||
airbrake.Close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func reportError(err error, req *http.Request) {
|
|
||||||
if bugsnagEnabled {
|
|
||||||
bugsnag.Notify(err, req)
|
|
||||||
}
|
|
||||||
|
|
||||||
if honeybadgerEnabled {
|
|
||||||
headers := make(honeybadger.CGIData)
|
|
||||||
|
|
||||||
for k, v := range req.Header {
|
|
||||||
key := "HTTP_" + headersReplacer.Replace(strings.ToUpper(k))
|
|
||||||
headers[key] = v[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
honeybadger.Notify(err, req.URL, headers)
|
|
||||||
}
|
|
||||||
|
|
||||||
if sentryEnabled {
|
|
||||||
hub := sentry.CurrentHub().Clone()
|
|
||||||
hub.Scope().SetRequest(req)
|
|
||||||
hub.Scope().SetLevel(sentry.LevelError)
|
|
||||||
eventID := hub.CaptureException(err)
|
|
||||||
if eventID != nil {
|
|
||||||
hub.Flush(sentryTimeout)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if airbrakeEnabled {
|
|
||||||
airbrake.Notify(err, req)
|
|
||||||
}
|
|
||||||
}
|
|
44
etag.go
44
etag.go
@ -1,44 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"crypto/sha256"
|
|
||||||
"encoding/hex"
|
|
||||||
"encoding/json"
|
|
||||||
"hash"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type eTagCalc struct {
|
|
||||||
hash hash.Hash
|
|
||||||
enc *json.Encoder
|
|
||||||
}
|
|
||||||
|
|
||||||
var eTagCalcPool = sync.Pool{
|
|
||||||
New: func() interface{} {
|
|
||||||
h := sha256.New()
|
|
||||||
|
|
||||||
enc := json.NewEncoder(h)
|
|
||||||
enc.SetEscapeHTML(false)
|
|
||||||
enc.SetIndent("", "")
|
|
||||||
|
|
||||||
return &eTagCalc{h, enc}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func calcETag(ctx context.Context) string {
|
|
||||||
c := eTagCalcPool.Get().(*eTagCalc)
|
|
||||||
defer eTagCalcPool.Put(c)
|
|
||||||
|
|
||||||
c.hash.Reset()
|
|
||||||
c.hash.Write(getImageData(ctx).Data)
|
|
||||||
footprint := c.hash.Sum(nil)
|
|
||||||
|
|
||||||
c.hash.Reset()
|
|
||||||
c.hash.Write(footprint)
|
|
||||||
c.hash.Write([]byte(version))
|
|
||||||
c.enc.Encode(conf)
|
|
||||||
c.enc.Encode(getProcessingOptions(ctx))
|
|
||||||
|
|
||||||
return hex.EncodeToString(c.hash.Sum(nil))
|
|
||||||
}
|
|
153
etag/etag.go
Normal file
153
etag/etag.go
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
package etag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"hash"
|
||||||
|
"net/textproto"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagedata"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/options"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
type eTagCalc struct {
|
||||||
|
hash hash.Hash
|
||||||
|
enc *json.Encoder
|
||||||
|
}
|
||||||
|
|
||||||
|
var eTagCalcPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
h := sha256.New()
|
||||||
|
|
||||||
|
enc := json.NewEncoder(h)
|
||||||
|
enc.SetEscapeHTML(false)
|
||||||
|
enc.SetIndent("", "")
|
||||||
|
|
||||||
|
return &eTagCalc{h, enc}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
type Handler struct {
|
||||||
|
poHashActual, poHashExpected string
|
||||||
|
|
||||||
|
imgEtagActual, imgEtagExpected string
|
||||||
|
imgHashActual, imgHashExpected string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) ParseExpectedETag(etag string) {
|
||||||
|
// We suuport only a single ETag value
|
||||||
|
if i := strings.IndexByte(etag, ','); i >= 0 {
|
||||||
|
etag = textproto.TrimString(etag[:i])
|
||||||
|
}
|
||||||
|
|
||||||
|
etagLen := len(etag)
|
||||||
|
|
||||||
|
// ETag is empty or invalid
|
||||||
|
if etagLen < 2 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// We support strong ETags only
|
||||||
|
if etag[0] != '"' || etag[etagLen-1] != '"' {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove quotes
|
||||||
|
etag = etag[1 : etagLen-1]
|
||||||
|
|
||||||
|
i := strings.Index(etag, "/")
|
||||||
|
if i < 0 || i > etagLen-3 {
|
||||||
|
// Doesn't look like imgproxy ETag
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
poPart, imgPartMark, imgPart := etag[:i], etag[i+1], etag[i+2:]
|
||||||
|
|
||||||
|
switch imgPartMark {
|
||||||
|
case 'R':
|
||||||
|
imgPartDec, err := base64.RawStdEncoding.DecodeString(imgPart)
|
||||||
|
if err == nil {
|
||||||
|
h.imgEtagExpected = string(imgPartDec)
|
||||||
|
}
|
||||||
|
case 'D':
|
||||||
|
h.imgHashExpected = imgPart
|
||||||
|
default:
|
||||||
|
// Unknown image part mark
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.poHashExpected = poPart
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) ProcessingOptionsMatch() bool {
|
||||||
|
return h.poHashActual == h.poHashExpected
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) SetActualProcessingOptions(po *options.ProcessingOptions) bool {
|
||||||
|
c := eTagCalcPool.Get().(*eTagCalc)
|
||||||
|
defer eTagCalcPool.Put(c)
|
||||||
|
|
||||||
|
c.hash.Reset()
|
||||||
|
c.hash.Write([]byte(version.Version()))
|
||||||
|
c.enc.Encode(po)
|
||||||
|
|
||||||
|
h.poHashActual = base64.RawURLEncoding.EncodeToString(c.hash.Sum(nil))
|
||||||
|
|
||||||
|
return h.ProcessingOptionsMatch()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) ImageEtagExpected() string {
|
||||||
|
return h.imgEtagExpected
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) SetActualImageData(imgdata *imagedata.ImageData) bool {
|
||||||
|
var haveActualImgETag bool
|
||||||
|
h.imgEtagActual, haveActualImgETag = imgdata.Headers["ETag"]
|
||||||
|
haveActualImgETag = haveActualImgETag && len(h.imgEtagActual) > 0
|
||||||
|
|
||||||
|
// Just in case server didn't check ETag properly and returned the same one
|
||||||
|
// as we expected
|
||||||
|
if haveActualImgETag && h.imgEtagExpected == h.imgEtagActual {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
haveExpectedImgHash := len(h.imgHashExpected) != 0
|
||||||
|
|
||||||
|
if !haveActualImgETag || haveExpectedImgHash {
|
||||||
|
c := eTagCalcPool.Get().(*eTagCalc)
|
||||||
|
defer eTagCalcPool.Put(c)
|
||||||
|
|
||||||
|
c.hash.Reset()
|
||||||
|
c.hash.Write(imgdata.Data)
|
||||||
|
|
||||||
|
h.imgHashActual = base64.RawURLEncoding.EncodeToString(c.hash.Sum(nil))
|
||||||
|
|
||||||
|
return haveExpectedImgHash && h.imgHashActual == h.imgHashExpected
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) GenerateActualETag() string {
|
||||||
|
return h.generate(h.poHashActual, h.imgEtagActual, h.imgHashActual)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) GenerateExpectedETag() string {
|
||||||
|
return h.generate(h.poHashExpected, h.imgEtagExpected, h.imgHashExpected)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) generate(poHash, imgEtag, imgHash string) string {
|
||||||
|
imgPartMark := 'D'
|
||||||
|
imgPart := imgHash
|
||||||
|
if len(imgEtag) != 0 {
|
||||||
|
imgPartMark = 'R'
|
||||||
|
imgPart = base64.RawURLEncoding.EncodeToString([]byte(imgEtag))
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(`"%s/%c%s"`, poHash, imgPartMark, imgPart)
|
||||||
|
}
|
142
etag/etag_test.go
Normal file
142
etag/etag_test.go
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
package etag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagedata"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/options"
|
||||||
|
"github.com/sirupsen/logrus"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
po = options.NewProcessingOptions()
|
||||||
|
|
||||||
|
imgWithETag = imagedata.ImageData{
|
||||||
|
Data: []byte("Hello Test"),
|
||||||
|
Headers: map[string]string{"ETag": `"loremipsumdolor"`},
|
||||||
|
}
|
||||||
|
imgWithoutETag = imagedata.ImageData{
|
||||||
|
Data: []byte("Hello Test"),
|
||||||
|
}
|
||||||
|
|
||||||
|
etagReq string
|
||||||
|
etagData string
|
||||||
|
)
|
||||||
|
|
||||||
|
type EtagTestSuite struct {
|
||||||
|
suite.Suite
|
||||||
|
|
||||||
|
h Handler
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) SetupSuite() {
|
||||||
|
logrus.SetOutput(ioutil.Discard)
|
||||||
|
|
||||||
|
s.h.SetActualProcessingOptions(po)
|
||||||
|
s.h.SetActualImageData(&imgWithETag)
|
||||||
|
etagReq = s.h.GenerateActualETag()
|
||||||
|
|
||||||
|
s.h.SetActualImageData(&imgWithoutETag)
|
||||||
|
etagData = s.h.GenerateActualETag()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TeardownSuite() {
|
||||||
|
logrus.SetOutput(os.Stdout)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) SetupTest() {
|
||||||
|
s.h = Handler{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestGenerateActualReq() {
|
||||||
|
s.h.SetActualProcessingOptions(po)
|
||||||
|
s.h.SetActualImageData(&imgWithETag)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), etagReq, s.h.GenerateActualETag())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestGenerateActualData() {
|
||||||
|
s.h.SetActualProcessingOptions(po)
|
||||||
|
s.h.SetActualImageData(&imgWithoutETag)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), etagData, s.h.GenerateActualETag())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestGenerateExpectedReq() {
|
||||||
|
s.h.ParseExpectedETag(etagReq)
|
||||||
|
assert.Equal(s.T(), etagReq, s.h.GenerateExpectedETag())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestGenerateExpectedData() {
|
||||||
|
s.h.ParseExpectedETag(etagData)
|
||||||
|
assert.Equal(s.T(), etagData, s.h.GenerateExpectedETag())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestProcessingOptionsCheckSuccess() {
|
||||||
|
s.h.ParseExpectedETag(etagReq)
|
||||||
|
|
||||||
|
assert.True(s.T(), s.h.SetActualProcessingOptions(po))
|
||||||
|
assert.True(s.T(), s.h.ProcessingOptionsMatch())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestProcessingOptionsCheckFailure() {
|
||||||
|
i := strings.Index(etagReq, "/")
|
||||||
|
wrongEtag := `"wrongpohash` + etagReq[i:]
|
||||||
|
|
||||||
|
s.h.ParseExpectedETag(wrongEtag)
|
||||||
|
|
||||||
|
assert.False(s.T(), s.h.SetActualProcessingOptions(po))
|
||||||
|
assert.False(s.T(), s.h.ProcessingOptionsMatch())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageETagExpectedPresent() {
|
||||||
|
s.h.ParseExpectedETag(etagReq)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), imgWithETag.Headers["ETag"], s.h.ImageEtagExpected())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageETagExpectedBlank() {
|
||||||
|
s.h.ParseExpectedETag(etagData)
|
||||||
|
|
||||||
|
assert.Empty(s.T(), s.h.ImageEtagExpected())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageDataCheckDataToDataSuccess() {
|
||||||
|
s.h.ParseExpectedETag(etagData)
|
||||||
|
assert.True(s.T(), s.h.SetActualImageData(&imgWithoutETag))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageDataCheckDataToDataFailure() {
|
||||||
|
i := strings.Index(etagData, "/")
|
||||||
|
wrongEtag := etagData[:i] + `/Dwrongimghash"`
|
||||||
|
|
||||||
|
s.h.ParseExpectedETag(wrongEtag)
|
||||||
|
assert.False(s.T(), s.h.SetActualImageData(&imgWithoutETag))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageDataCheckDataToReqSuccess() {
|
||||||
|
s.h.ParseExpectedETag(etagData)
|
||||||
|
assert.True(s.T(), s.h.SetActualImageData(&imgWithETag))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageDataCheckDataToReqFailure() {
|
||||||
|
i := strings.Index(etagData, "/")
|
||||||
|
wrongEtag := etagData[:i] + `/Dwrongimghash"`
|
||||||
|
|
||||||
|
s.h.ParseExpectedETag(wrongEtag)
|
||||||
|
assert.False(s.T(), s.h.SetActualImageData(&imgWithETag))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *EtagTestSuite) TestImageDataCheckReqToDataFailure() {
|
||||||
|
s.h.ParseExpectedETag(etagReq)
|
||||||
|
assert.False(s.T(), s.h.SetActualImageData(&imgWithoutETag))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEtag(t *testing.T) {
|
||||||
|
suite.Run(t, new(EtagTestSuite))
|
||||||
|
}
|
@ -25,7 +25,7 @@ $extension = 'png';
|
|||||||
$url = 'http://img.example.com/pretty/image.jpg';
|
$url = 'http://img.example.com/pretty/image.jpg';
|
||||||
$encodedUrl = rtrim(strtr(base64_encode($url), '+/', '-_'), '=');
|
$encodedUrl = rtrim(strtr(base64_encode($url), '+/', '-_'), '=');
|
||||||
|
|
||||||
$path = "/{$resize}/{$width}/{$height}/{$gravity}/{$enlarge}/{$encodedUrl}.{$extension}";
|
$path = "/rs:{$resize}:{$width}:{$height}:{$enlarge}/g:{$gravity}/{$encodedUrl}.{$extension}";
|
||||||
$signature = hash_hmac('sha256', $saltBin.$path, $keyBin, true);
|
$signature = hash_hmac('sha256', $saltBin.$path, $keyBin, true);
|
||||||
$signature = pack('A'.IMGPROXY_SIGNATURE_SIZE, $signature);
|
$signature = pack('A'.IMGPROXY_SIGNATURE_SIZE, $signature);
|
||||||
$signature = rtrim(strtr(base64_encode($signature), '+/', '-_'), '=');
|
$signature = rtrim(strtr(base64_encode($signature), '+/', '-_'), '=');
|
||||||
|
@ -35,7 +35,7 @@ namespace ImgProxy.Examples
|
|||||||
var saltBin = HexadecimalStringToByteArray(salt);
|
var saltBin = HexadecimalStringToByteArray(salt);
|
||||||
|
|
||||||
var encodedUrl = EncodeBase64URLSafeString(url);
|
var encodedUrl = EncodeBase64URLSafeString(url);
|
||||||
var path = $"/{resize}/{width}/{height}/{gravity}/{enlarge}/{encodedUrl}.{extension}";
|
var path = $"/rs:{resize}:{width}:{height}:{enlarge}/g:{gravity}/{encodedUrl}.{extension}";
|
||||||
|
|
||||||
var passwordWithSaltBytes = new List<byte>();
|
var passwordWithSaltBytes = new List<byte>();
|
||||||
passwordWithSaltBytes.AddRange(saltBin);
|
passwordWithSaltBytes.AddRange(saltBin);
|
||||||
|
@ -3,30 +3,30 @@ import 'package:convert/convert.dart';
|
|||||||
import 'package:crypto/crypto.dart';
|
import 'package:crypto/crypto.dart';
|
||||||
|
|
||||||
void main() {
|
void main() {
|
||||||
var key = hex.decode("943b421c9eb07c830af81030552c86009268de4e532ba2ee2eab8247c6da0881");
|
var key = hex.decode("943b421c9eb07c830af81030552c86009268de4e532ba2ee2eab8247c6da0881");
|
||||||
var salt = hex.decode("520f986b998545b4785e0defbc4f3c1203f22de2374a3d53cb7a7fe9fea309c5");
|
var salt = hex.decode("520f986b998545b4785e0defbc4f3c1203f22de2374a3d53cb7a7fe9fea309c5");
|
||||||
var url = "http://img.example.com/pretty/image.jpg";
|
var url = "http://img.example.com/pretty/image.jpg";
|
||||||
var resizing_type = 'fill';
|
var resizing_type = 'fill';
|
||||||
var width = 300;
|
var width = 300;
|
||||||
var height = 300;
|
var height = 300;
|
||||||
var gravity = 'no';
|
var gravity = 'no';
|
||||||
var enlarge = 1;
|
var enlarge = 1;
|
||||||
var extension = 'png';
|
var extension = 'png';
|
||||||
|
|
||||||
var url_encoded = urlSafeBase64(utf8.encode(url));
|
|
||||||
|
|
||||||
var path = "/$resizing_type/$width/$height/$gravity/$enlarge/$url_encoded.$extension";
|
var url_encoded = urlSafeBase64(utf8.encode(url));
|
||||||
|
|
||||||
|
var path = "/rs:$resizing_type:$width:$height:$enlarge/g:$gravity/$url_encoded.$extension";
|
||||||
|
|
||||||
var signature = sign(salt, utf8.encode(path), key);
|
var signature = sign(salt, utf8.encode(path), key);
|
||||||
print("/$signature/$path");
|
print("/$signature/$path");
|
||||||
}
|
}
|
||||||
|
|
||||||
String urlSafeBase64(buffer) {
|
String urlSafeBase64(buffer) {
|
||||||
return base64.encode(buffer).replaceAll("=", "").replaceAll("+", "-").replaceAll("/", "_");
|
return base64.encode(buffer).replaceAll("=", "").replaceAll("+", "-").replaceAll("/", "_");
|
||||||
}
|
}
|
||||||
|
|
||||||
String sign(salt, path, key) {
|
String sign(salt, path, key) {
|
||||||
var hmac = Hmac(sha256, key);
|
var hmac = Hmac(sha256, key);
|
||||||
var digest = hmac.convert(salt + path);
|
var digest = hmac.convert(salt + path);
|
||||||
return urlSafeBase64(digest.bytes);
|
return urlSafeBase64(digest.bytes);
|
||||||
}
|
}
|
||||||
|
@ -13,11 +13,8 @@ defmodule App.Imgproxy do
|
|||||||
defp build_path(img_url, opts) do
|
defp build_path(img_url, opts) do
|
||||||
Path.join([
|
Path.join([
|
||||||
"/",
|
"/",
|
||||||
opts.resize,
|
"rs:#{opts.resize}:#{opts.width}:#{opts.height}:#{opts.enlarge}",
|
||||||
to_string(opts.width),
|
"g:#{opts.gravity}",
|
||||||
to_string(opts.height),
|
|
||||||
opts.gravity,
|
|
||||||
to_string(opts.enlarge),
|
|
||||||
Base.url_encode64(img_url, padding: false) <> "." <> opts.extension
|
Base.url_encode64(img_url, padding: false) <> "." <> opts.extension
|
||||||
])
|
])
|
||||||
end
|
end
|
||||||
|
@ -34,7 +34,7 @@ func main() {
|
|||||||
url := "http://img.example.com/pretty/image.jpg"
|
url := "http://img.example.com/pretty/image.jpg"
|
||||||
encodedURL := base64.RawURLEncoding.EncodeToString([]byte(url))
|
encodedURL := base64.RawURLEncoding.EncodeToString([]byte(url))
|
||||||
|
|
||||||
path := fmt.Sprintf("/%s/%d/%d/%s/%d/%s.%s", resize, width, height, gravity, enlarge, encodedURL, extension)
|
path := fmt.Sprintf("/rs:%s:%d:%d:%d/g:%s/%s.%s", resize, width, height, enlarge, gravity, encodedURL, extension)
|
||||||
|
|
||||||
mac := hmac.New(sha256.New, keyBin)
|
mac := hmac.New(sha256.New, keyBin)
|
||||||
mac.Write(saltBin)
|
mac.Write(saltBin)
|
||||||
|
@ -10,7 +10,7 @@ import javax.crypto.spec.SecretKeySpec;
|
|||||||
import static org.junit.jupiter.api.Assertions.*;
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
public class ImgProxy{
|
public class ImgProxy{
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWithJavaHmacApacheBase64ImgProxyTest() throws Exception {
|
public void testWithJavaHmacApacheBase64ImgProxyTest() throws Exception {
|
||||||
byte[] key = hexStringToByteArray("943b421c9eb07c830af81030552c86009268de4e532ba2ee2eab8247c6da0881");
|
byte[] key = hexStringToByteArray("943b421c9eb07c830af81030552c86009268de4e532ba2ee2eab8247c6da0881");
|
||||||
@ -33,7 +33,7 @@ public class ImgProxy{
|
|||||||
|
|
||||||
String encodedUrl = Base64.getUrlEncoder().withoutPadding().encodeToString(url.getBytes());
|
String encodedUrl = Base64.getUrlEncoder().withoutPadding().encodeToString(url.getBytes());
|
||||||
|
|
||||||
String path = "/" + resize + "/" + width + "/" + height + "/" + gravity + "/" + enlarge + "/" + encodedUrl + "." + extension;
|
String path = "/rs:" + resize + ":" + width + ":" + height + ":" + enlarge + "/g:" + gravity + "/" + encodedUrl + "." + extension;
|
||||||
|
|
||||||
Mac sha256HMAC = Mac.getInstance(HMACSHA256);
|
Mac sha256HMAC = Mac.getInstance(HMACSHA256);
|
||||||
SecretKeySpec secretKey = new SecretKeySpec(key, HMACSHA256);
|
SecretKeySpec secretKey = new SecretKeySpec(key, HMACSHA256);
|
||||||
|
@ -24,7 +24,7 @@ const gravity = 'no'
|
|||||||
const enlarge = 1
|
const enlarge = 1
|
||||||
const extension = 'png'
|
const extension = 'png'
|
||||||
const encoded_url = urlSafeBase64(url)
|
const encoded_url = urlSafeBase64(url)
|
||||||
const path = `/${resizing_type}/${width}/${height}/${gravity}/${enlarge}/${encoded_url}.${extension}`
|
const path = `/rs:${resizing_type}:${width}:${height}:${enlarge}/g:${gravity}/${encoded_url}.${extension}`
|
||||||
|
|
||||||
const signature = sign(SALT, path, KEY)
|
const signature = sign(SALT, path, KEY)
|
||||||
const result = `/${signature}${path}`
|
const result = `/${signature}${path}`
|
||||||
|
@ -23,7 +23,7 @@ $extension = 'png';
|
|||||||
$url = 'http://img.example.com/pretty/image.jpg';
|
$url = 'http://img.example.com/pretty/image.jpg';
|
||||||
$encodedUrl = rtrim(strtr(base64_encode($url), '+/', '-_'), '=');
|
$encodedUrl = rtrim(strtr(base64_encode($url), '+/', '-_'), '=');
|
||||||
|
|
||||||
$path = "/{$resize}/{$width}/{$height}/{$gravity}/{$enlarge}/{$encodedUrl}.{$extension}";
|
$path = "/rs:{$resize}:{$width}:{$height}:{$enlarge}/g:{$gravity}/{$encodedUrl}.{$extension}";
|
||||||
|
|
||||||
$signature = rtrim(strtr(base64_encode(hash_hmac('sha256', $saltBin.$path, $keyBin, true)), '+/', '-_'), '=');
|
$signature = rtrim(strtr(base64_encode(hash_hmac('sha256', $saltBin.$path, $keyBin, true)), '+/', '-_'), '=');
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ encoded_url = base64.urlsafe_b64encode(url).rstrip(b"=").decode()
|
|||||||
# You can trim padding spaces to get good-looking url
|
# You can trim padding spaces to get good-looking url
|
||||||
encoded_url = '/'.join(textwrap.wrap(encoded_url, 16))
|
encoded_url = '/'.join(textwrap.wrap(encoded_url, 16))
|
||||||
|
|
||||||
path = "/{resize}/{width}/{height}/{gravity}/{enlarge}/{encoded_url}.{extension}".format(
|
path = "/rs:{resize}:{width}:{height}:{enlarge}/g:{gravity}/{encoded_url}.{extension}".format(
|
||||||
encoded_url=encoded_url,
|
encoded_url=encoded_url,
|
||||||
resize="fill",
|
resize="fill",
|
||||||
width=300,
|
width=300,
|
||||||
|
@ -16,7 +16,7 @@ gravity = "no"
|
|||||||
enlarge = 1
|
enlarge = 1
|
||||||
extension = "png"
|
extension = "png"
|
||||||
|
|
||||||
path = "/#{resize}/#{width}/#{height}/#{gravity}/#{enlarge}/#{encoded_url}.#{extension}"
|
path = "/rs:#{resize}:#{width}:#{height}:#{enlarge}/g:#{gravity}/#{encoded_url}.#{extension}"
|
||||||
|
|
||||||
digest = OpenSSL::Digest.new("sha256")
|
digest = OpenSSL::Digest.new("sha256")
|
||||||
# You can trim padding spaces to get good-looking url
|
# You can trim padding spaces to get good-looking url
|
||||||
|
@ -56,7 +56,7 @@ let originalUrl = "http://img.example.com/pretty/image.jpg";
|
|||||||
let encodedUrl = customBase64(input: Data(originalUrl.utf8))
|
let encodedUrl = customBase64(input: Data(originalUrl.utf8))
|
||||||
let format = "png";
|
let format = "png";
|
||||||
|
|
||||||
let partialPath = "/\(resizing)/\(width)/\(height)/\(gravity)/\(enlarge)/\(encodedUrl).\(format)"
|
let partialPath = "/rs:\(resizing):\(width):\(height):\(enlarge)/g:\(gravity)/\(encodedUrl).\(format)"
|
||||||
let toSign = salt + partialPath.utf8
|
let toSign = salt + partialPath.utf8
|
||||||
|
|
||||||
let signature = toSign.hmac256(key: key)
|
let signature = toSign.hmac256(key: key)
|
||||||
|
@ -1,44 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
type fsTransport struct {
|
|
||||||
fs http.Dir
|
|
||||||
}
|
|
||||||
|
|
||||||
func newFsTransport() fsTransport {
|
|
||||||
return fsTransport{fs: http.Dir(conf.LocalFileSystemRoot)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t fsTransport) RoundTrip(req *http.Request) (resp *http.Response, err error) {
|
|
||||||
f, err := t.fs.Open(req.URL.Path)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
fi, err := f.Stat()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if fi.IsDir() {
|
|
||||||
return nil, fmt.Errorf("%s is a directory", req.URL.Path)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &http.Response{
|
|
||||||
Status: "200 OK",
|
|
||||||
StatusCode: 200,
|
|
||||||
Proto: "HTTP/1.0",
|
|
||||||
ProtoMajor: 1,
|
|
||||||
ProtoMinor: 0,
|
|
||||||
Header: make(http.Header),
|
|
||||||
ContentLength: fi.Size(),
|
|
||||||
Body: f,
|
|
||||||
Close: true,
|
|
||||||
Request: req,
|
|
||||||
}, nil
|
|
||||||
}
|
|
9
go.mod
9
go.mod
@ -1,10 +1,11 @@
|
|||||||
module github.com/imgproxy/imgproxy/v2
|
module github.com/imgproxy/imgproxy/v3
|
||||||
|
|
||||||
go 1.13
|
go 1.15
|
||||||
|
|
||||||
require (
|
require (
|
||||||
cloud.google.com/go/storage v1.15.0
|
cloud.google.com/go/storage v1.15.0
|
||||||
github.com/Azure/azure-storage-blob-go v0.13.0
|
github.com/Azure/azure-storage-blob-go v0.13.0
|
||||||
|
github.com/Microsoft/go-winio v0.4.16 // indirect
|
||||||
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d // indirect
|
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d // indirect
|
||||||
github.com/airbrake/gobrake/v5 v5.0.3
|
github.com/airbrake/gobrake/v5 v5.0.3
|
||||||
github.com/aws/aws-sdk-go v1.38.65
|
github.com/aws/aws-sdk-go v1.38.65
|
||||||
@ -16,14 +17,18 @@ require (
|
|||||||
github.com/ianlancetaylor/cgosymbolizer v0.0.0-20201204192058-7acc97e53614 // indirect
|
github.com/ianlancetaylor/cgosymbolizer v0.0.0-20201204192058-7acc97e53614 // indirect
|
||||||
github.com/matoous/go-nanoid/v2 v2.0.0
|
github.com/matoous/go-nanoid/v2 v2.0.0
|
||||||
github.com/newrelic/go-agent/v3 v3.13.0
|
github.com/newrelic/go-agent/v3 v3.13.0
|
||||||
|
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||||
|
github.com/philhofer/fwd v1.1.1 // indirect
|
||||||
github.com/prometheus/client_golang v1.11.0
|
github.com/prometheus/client_golang v1.11.0
|
||||||
github.com/sirupsen/logrus v1.8.1
|
github.com/sirupsen/logrus v1.8.1
|
||||||
github.com/stretchr/testify v1.7.0
|
github.com/stretchr/testify v1.7.0
|
||||||
|
go.uber.org/automaxprocs v1.4.0 // indirect
|
||||||
golang.org/x/image v0.0.0-20201208152932-35266b937fa6
|
golang.org/x/image v0.0.0-20201208152932-35266b937fa6
|
||||||
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420
|
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420
|
||||||
golang.org/x/sys v0.0.0-20210603125802-9665404d3644
|
golang.org/x/sys v0.0.0-20210603125802-9665404d3644
|
||||||
golang.org/x/text v0.3.6
|
golang.org/x/text v0.3.6
|
||||||
google.golang.org/api v0.48.0
|
google.golang.org/api v0.48.0
|
||||||
|
gopkg.in/DataDog/dd-trace-go.v1 v1.29.1
|
||||||
)
|
)
|
||||||
|
|
||||||
replace git.apache.org/thrift.git => github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999
|
replace git.apache.org/thrift.git => github.com/apache/thrift v0.0.0-20180902110319-2566ecd5d999
|
||||||
|
21
go.sum
21
go.sum
@ -59,7 +59,11 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03
|
|||||||
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
|
||||||
github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno=
|
github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno=
|
||||||
github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo=
|
github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo=
|
||||||
|
github.com/DataDog/datadog-go v4.4.0+incompatible h1:R7WqXWP4fIOAqWJtUKmSfuc7eDsBT58k9AY5WSHVosk=
|
||||||
|
github.com/DataDog/datadog-go v4.4.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||||
github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY=
|
github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY=
|
||||||
|
github.com/Microsoft/go-winio v0.4.16 h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=
|
||||||
|
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
|
||||||
github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0=
|
github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0=
|
||||||
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUWq3EgK3CesDbo8upS2Vm9/P3FtgI+Jk=
|
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d h1:G0m3OIz70MZUWq3EgK3CesDbo8upS2Vm9/P3FtgI+Jk=
|
||||||
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
|
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
|
||||||
@ -225,6 +229,7 @@ github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hf
|
|||||||
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
|
github.com/google/pprof v0.0.0-20210125172800-10e9aeb4a998/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
@ -341,9 +346,16 @@ github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9k
|
|||||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||||
github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE=
|
github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE=
|
||||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||||
|
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
|
||||||
|
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
|
||||||
|
github.com/pborman/uuid v1.2.0 h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=
|
||||||
github.com/pborman/uuid v1.2.0 h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=
|
github.com/pborman/uuid v1.2.0 h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=
|
||||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||||
|
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/philhofer/fwd v1.1.1 h1:GdGcTjf5RNAxwS4QLsiMzJYj5KEvPJD3Abr261yRQXQ=
|
||||||
|
github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
|
||||||
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
||||||
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
||||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
@ -380,6 +392,7 @@ github.com/shirou/gopsutil v2.20.9+incompatible h1:msXs2frUV+O/JLva9EDLpuJ84PrFs
|
|||||||
github.com/shirou/gopsutil v2.20.9+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
github.com/shirou/gopsutil v2.20.9+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
|
||||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
|
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
||||||
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
|
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
|
||||||
@ -402,6 +415,8 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5
|
|||||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/tinylib/msgp v1.1.2 h1:gWmO7n0Ys2RBEb7GPYB9Ujq8Mk5p2U08lRnmMcGy6BQ=
|
||||||
|
github.com/tinylib/msgp v1.1.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||||
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
||||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||||
@ -432,6 +447,8 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
|
|||||||
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
|
||||||
go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M=
|
go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M=
|
||||||
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
|
||||||
|
go.uber.org/automaxprocs v1.4.0 h1:CpDZl6aOlLhReez+8S3eEotD7Jx0Os++lemPlMULQP0=
|
||||||
|
go.uber.org/automaxprocs v1.4.0/go.mod h1:/mTEdr7LvHhs0v7mjdxDreTz1OG5zdZGqgOnhWiR/+Q=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
@ -573,6 +590,7 @@ golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
@ -629,6 +647,7 @@ golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
|||||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs=
|
||||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
@ -809,6 +828,8 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
|
|||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
||||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
|
gopkg.in/DataDog/dd-trace-go.v1 v1.29.1 h1:XP/LXNz3F/CBYhcD+3J3bJ9TtISIOnewXt6oIEb6ezE=
|
||||||
|
gopkg.in/DataDog/dd-trace-go.v1 v1.29.1/go.mod h1:FLwUDeuH0z5hkvgvd04/M3MHQN4AF5pQDnedeWRWvok=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
70
gzippool.go
70
gzippool.go
@ -1,70 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"compress/gzip"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"io/ioutil"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type gzipPool struct {
|
|
||||||
mutex sync.Mutex
|
|
||||||
top *gzipPoolEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
type gzipPoolEntry struct {
|
|
||||||
gz *gzip.Writer
|
|
||||||
next *gzipPoolEntry
|
|
||||||
}
|
|
||||||
|
|
||||||
func newGzipPool(n int) (*gzipPool, error) {
|
|
||||||
pool := new(gzipPool)
|
|
||||||
|
|
||||||
for i := 0; i < n; i++ {
|
|
||||||
if err := pool.grow(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return pool, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *gzipPool) grow() error {
|
|
||||||
gz, err := gzip.NewWriterLevel(ioutil.Discard, conf.GZipCompression)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Can't init GZip compression: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
p.top = &gzipPoolEntry{
|
|
||||||
gz: gz,
|
|
||||||
next: p.top,
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *gzipPool) Get(w io.Writer) *gzip.Writer {
|
|
||||||
p.mutex.Lock()
|
|
||||||
defer p.mutex.Unlock()
|
|
||||||
|
|
||||||
if p.top == nil {
|
|
||||||
p.grow()
|
|
||||||
}
|
|
||||||
|
|
||||||
gz := p.top.gz
|
|
||||||
gz.Reset(w)
|
|
||||||
|
|
||||||
p.top = p.top.next
|
|
||||||
|
|
||||||
return gz
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *gzipPool) Put(gz *gzip.Writer) {
|
|
||||||
p.mutex.Lock()
|
|
||||||
defer p.mutex.Unlock()
|
|
||||||
|
|
||||||
gz.Reset(ioutil.Discard)
|
|
||||||
|
|
||||||
p.top = &gzipPoolEntry{gz: gz, next: p.top}
|
|
||||||
}
|
|
@ -7,14 +7,17 @@ import (
|
|||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config/configurators"
|
||||||
)
|
)
|
||||||
|
|
||||||
func healthcheck() int {
|
func healthcheck() int {
|
||||||
network := conf.Network
|
network := config.Network
|
||||||
bind := conf.Bind
|
bind := config.Bind
|
||||||
|
|
||||||
strEnvConfig(&network, "IMGPROXY_NETWORK")
|
configurators.String(&network, "IMGPROXY_NETWORK")
|
||||||
strEnvConfig(&bind, "IMGPROXY_BIND")
|
configurators.String(&bind, "IMGPROXY_BIND")
|
||||||
|
|
||||||
httpc := http.Client{
|
httpc := http.Client{
|
||||||
Transport: &http.Transport{
|
Transport: &http.Transport{
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package main
|
package ierrors
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -6,7 +6,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type imgproxyError struct {
|
type Error struct {
|
||||||
StatusCode int
|
StatusCode int
|
||||||
Message string
|
Message string
|
||||||
PublicMessage string
|
PublicMessage string
|
||||||
@ -15,11 +15,11 @@ type imgproxyError struct {
|
|||||||
stack []uintptr
|
stack []uintptr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *imgproxyError) Error() string {
|
func (e *Error) Error() string {
|
||||||
return e.Message
|
return e.Message
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *imgproxyError) FormatStack() string {
|
func (e *Error) FormatStack() string {
|
||||||
if e.stack == nil {
|
if e.stack == nil {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
@ -27,25 +27,20 @@ func (e *imgproxyError) FormatStack() string {
|
|||||||
return formatStack(e.stack)
|
return formatStack(e.stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *imgproxyError) StackTrace() []uintptr {
|
func (e *Error) StackTrace() []uintptr {
|
||||||
return e.stack
|
return e.stack
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *imgproxyError) SetUnexpected(u bool) *imgproxyError {
|
func New(status int, msg string, pub string) *Error {
|
||||||
e.Unexpected = u
|
return &Error{
|
||||||
return e
|
|
||||||
}
|
|
||||||
|
|
||||||
func newError(status int, msg string, pub string) *imgproxyError {
|
|
||||||
return &imgproxyError{
|
|
||||||
StatusCode: status,
|
StatusCode: status,
|
||||||
Message: msg,
|
Message: msg,
|
||||||
PublicMessage: pub,
|
PublicMessage: pub,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func newUnexpectedError(msg string, skip int) *imgproxyError {
|
func NewUnexpected(msg string, skip int) *Error {
|
||||||
return &imgproxyError{
|
return &Error{
|
||||||
StatusCode: 500,
|
StatusCode: 500,
|
||||||
Message: msg,
|
Message: msg,
|
||||||
PublicMessage: "Internal error",
|
PublicMessage: "Internal error",
|
||||||
@ -55,6 +50,22 @@ func newUnexpectedError(msg string, skip int) *imgproxyError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Wrap(err error, skip int) *Error {
|
||||||
|
if ierr, ok := err.(*Error); ok {
|
||||||
|
return ierr
|
||||||
|
}
|
||||||
|
return NewUnexpected(err.Error(), skip+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func WrapWithPrefix(err error, skip int, prefix string) *Error {
|
||||||
|
if ierr, ok := err.(*Error); ok {
|
||||||
|
newErr := *ierr
|
||||||
|
newErr.Message = fmt.Sprintf("%s: %s", prefix, ierr.Message)
|
||||||
|
return &newErr
|
||||||
|
}
|
||||||
|
return NewUnexpected(fmt.Sprintf("%s: %s", prefix, err), skip+1)
|
||||||
|
}
|
||||||
|
|
||||||
func callers(skip int) []uintptr {
|
func callers(skip int) []uintptr {
|
||||||
stack := make([]uintptr, 10)
|
stack := make([]uintptr, 10)
|
||||||
n := runtime.Callers(skip, stack)
|
n := runtime.Callers(skip, stack)
|
104
image_data.go
104
image_data.go
@ -1,104 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"encoding/base64"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
type imageData struct {
|
|
||||||
Data []byte
|
|
||||||
Type imageType
|
|
||||||
|
|
||||||
cancel context.CancelFunc
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *imageData) Close() {
|
|
||||||
if d.cancel != nil {
|
|
||||||
d.cancel()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getWatermarkData() (*imageData, error) {
|
|
||||||
if len(conf.WatermarkData) > 0 {
|
|
||||||
return base64ImageData(conf.WatermarkData, "watermark")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.WatermarkPath) > 0 {
|
|
||||||
return fileImageData(conf.WatermarkPath, "watermark")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.WatermarkURL) > 0 {
|
|
||||||
return remoteImageData(conf.WatermarkURL, "watermark")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFallbackImageData() (*imageData, error) {
|
|
||||||
if len(conf.FallbackImageData) > 0 {
|
|
||||||
return base64ImageData(conf.FallbackImageData, "fallback image")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.FallbackImagePath) > 0 {
|
|
||||||
return fileImageData(conf.FallbackImagePath, "fallback image")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(conf.FallbackImageURL) > 0 {
|
|
||||||
return remoteImageData(conf.FallbackImageURL, "fallback image")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func base64ImageData(encoded, desc string) (*imageData, error) {
|
|
||||||
data, err := base64.StdEncoding.DecodeString(encoded)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't decode %s data: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
imgtype, err := checkTypeAndDimensions(bytes.NewReader(data))
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't decode %s: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &imageData{Data: data, Type: imgtype}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func fileImageData(path, desc string) (*imageData, error) {
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't read %s: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fi, err := f.Stat()
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't read %s: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
imgdata, err := readAndCheckImage(f, int(fi.Size()))
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't read %s: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return imgdata, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func remoteImageData(imageURL, desc string) (*imageData, error) {
|
|
||||||
res, err := requestImage(imageURL)
|
|
||||||
if res != nil {
|
|
||||||
defer res.Body.Close()
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't download %s: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
imgdata, err := readAndCheckImage(res.Body, int(res.ContentLength))
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("Can't download %s: %s", desc, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return imgdata, err
|
|
||||||
}
|
|
134
image_type.go
134
image_type.go
@ -1,134 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
/*
|
|
||||||
#cgo LDFLAGS: -s -w
|
|
||||||
#include "vips.h"
|
|
||||||
*/
|
|
||||||
import "C"
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type imageType int
|
|
||||||
|
|
||||||
const (
|
|
||||||
imageTypeUnknown = imageType(C.UNKNOWN)
|
|
||||||
imageTypeJPEG = imageType(C.JPEG)
|
|
||||||
imageTypePNG = imageType(C.PNG)
|
|
||||||
imageTypeWEBP = imageType(C.WEBP)
|
|
||||||
imageTypeGIF = imageType(C.GIF)
|
|
||||||
imageTypeICO = imageType(C.ICO)
|
|
||||||
imageTypeSVG = imageType(C.SVG)
|
|
||||||
imageTypeHEIC = imageType(C.HEIC)
|
|
||||||
imageTypeAVIF = imageType(C.AVIF)
|
|
||||||
imageTypeBMP = imageType(C.BMP)
|
|
||||||
imageTypeTIFF = imageType(C.TIFF)
|
|
||||||
|
|
||||||
contentDispositionFilenameFallback = "image"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
imageTypes = map[string]imageType{
|
|
||||||
"jpeg": imageTypeJPEG,
|
|
||||||
"jpg": imageTypeJPEG,
|
|
||||||
"png": imageTypePNG,
|
|
||||||
"webp": imageTypeWEBP,
|
|
||||||
"gif": imageTypeGIF,
|
|
||||||
"ico": imageTypeICO,
|
|
||||||
"svg": imageTypeSVG,
|
|
||||||
"heic": imageTypeHEIC,
|
|
||||||
"avif": imageTypeAVIF,
|
|
||||||
"bmp": imageTypeBMP,
|
|
||||||
"tiff": imageTypeTIFF,
|
|
||||||
}
|
|
||||||
|
|
||||||
mimes = map[imageType]string{
|
|
||||||
imageTypeJPEG: "image/jpeg",
|
|
||||||
imageTypePNG: "image/png",
|
|
||||||
imageTypeWEBP: "image/webp",
|
|
||||||
imageTypeGIF: "image/gif",
|
|
||||||
imageTypeICO: "image/x-icon",
|
|
||||||
imageTypeSVG: "image/svg+xml",
|
|
||||||
imageTypeHEIC: "image/heif",
|
|
||||||
imageTypeAVIF: "image/avif",
|
|
||||||
imageTypeBMP: "image/bmp",
|
|
||||||
imageTypeTIFF: "image/tiff",
|
|
||||||
}
|
|
||||||
|
|
||||||
contentDispositionsFmt = map[imageType]string{
|
|
||||||
imageTypeJPEG: "inline; filename=\"%s.jpg\"",
|
|
||||||
imageTypePNG: "inline; filename=\"%s.png\"",
|
|
||||||
imageTypeWEBP: "inline; filename=\"%s.webp\"",
|
|
||||||
imageTypeGIF: "inline; filename=\"%s.gif\"",
|
|
||||||
imageTypeICO: "inline; filename=\"%s.ico\"",
|
|
||||||
imageTypeSVG: "inline; filename=\"%s.svg\"",
|
|
||||||
imageTypeHEIC: "inline; filename=\"%s.heic\"",
|
|
||||||
imageTypeAVIF: "inline; filename=\"%s.avif\"",
|
|
||||||
imageTypeBMP: "inline; filename=\"%s.bmp\"",
|
|
||||||
imageTypeTIFF: "inline; filename=\"%s.tiff\"",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
func (it imageType) String() string {
|
|
||||||
for k, v := range imageTypes {
|
|
||||||
if v == it {
|
|
||||||
return k
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (it imageType) MarshalJSON() ([]byte, error) {
|
|
||||||
for k, v := range imageTypes {
|
|
||||||
if v == it {
|
|
||||||
return []byte(fmt.Sprintf("%q", k)), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return []byte("null"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (it imageType) Mime() string {
|
|
||||||
if mime, ok := mimes[it]; ok {
|
|
||||||
return mime
|
|
||||||
}
|
|
||||||
|
|
||||||
return "application/octet-stream"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (it imageType) ContentDisposition(filename string) string {
|
|
||||||
format, ok := contentDispositionsFmt[it]
|
|
||||||
if !ok {
|
|
||||||
return "inline"
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Sprintf(format, filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (it imageType) ContentDispositionFromURL(imageURL string) string {
|
|
||||||
url, err := url.Parse(imageURL)
|
|
||||||
if err != nil {
|
|
||||||
return it.ContentDisposition(contentDispositionFilenameFallback)
|
|
||||||
}
|
|
||||||
|
|
||||||
_, filename := filepath.Split(url.Path)
|
|
||||||
if len(filename) == 0 {
|
|
||||||
return it.ContentDisposition(contentDispositionFilenameFallback)
|
|
||||||
}
|
|
||||||
|
|
||||||
return it.ContentDisposition(strings.TrimSuffix(filename, filepath.Ext(filename)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (it imageType) SupportsAlpha() bool {
|
|
||||||
return it != imageTypeJPEG && it != imageTypeBMP
|
|
||||||
}
|
|
||||||
|
|
||||||
func (it imageType) SupportsColourProfile() bool {
|
|
||||||
return it == imageTypeJPEG ||
|
|
||||||
it == imageTypePNG ||
|
|
||||||
it == imageTypeWEBP ||
|
|
||||||
it == imageTypeAVIF
|
|
||||||
}
|
|
215
imagedata/download.go
Normal file
215
imagedata/download.go
Normal file
@ -0,0 +1,215 @@
|
|||||||
|
package imagedata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"compress/gzip"
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/http/cookiejar"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/ierrors"
|
||||||
|
|
||||||
|
azureTransport "github.com/imgproxy/imgproxy/v3/transport/azure"
|
||||||
|
fsTransport "github.com/imgproxy/imgproxy/v3/transport/fs"
|
||||||
|
gcsTransport "github.com/imgproxy/imgproxy/v3/transport/gcs"
|
||||||
|
s3Transport "github.com/imgproxy/imgproxy/v3/transport/s3"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
downloadClient *http.Client
|
||||||
|
|
||||||
|
enabledSchemes = map[string]struct{}{
|
||||||
|
"http": {},
|
||||||
|
"https": {},
|
||||||
|
}
|
||||||
|
|
||||||
|
imageHeadersToStore = []string{
|
||||||
|
"Cache-Control",
|
||||||
|
"Expires",
|
||||||
|
"ETag",
|
||||||
|
}
|
||||||
|
|
||||||
|
// For tests
|
||||||
|
redirectAllRequestsTo string
|
||||||
|
)
|
||||||
|
|
||||||
|
const msgSourceImageIsUnreachable = "Source image is unreachable"
|
||||||
|
|
||||||
|
type ErrorNotModified struct {
|
||||||
|
Message string
|
||||||
|
Headers map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorNotModified) Error() string {
|
||||||
|
return e.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
func initDownloading() error {
|
||||||
|
transport := &http.Transport{
|
||||||
|
Proxy: http.ProxyFromEnvironment,
|
||||||
|
MaxIdleConns: config.Concurrency,
|
||||||
|
MaxIdleConnsPerHost: config.Concurrency,
|
||||||
|
DisableCompression: true,
|
||||||
|
DialContext: (&net.Dialer{KeepAlive: 600 * time.Second}).DialContext,
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.IgnoreSslVerification {
|
||||||
|
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
registerProtocol := func(scheme string, rt http.RoundTripper) {
|
||||||
|
transport.RegisterProtocol(scheme, rt)
|
||||||
|
enabledSchemes[scheme] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.LocalFileSystemRoot != "" {
|
||||||
|
registerProtocol("local", fsTransport.New())
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.S3Enabled {
|
||||||
|
if t, err := s3Transport.New(); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
registerProtocol("s3", t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.GCSEnabled {
|
||||||
|
if t, err := gcsTransport.New(); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
registerProtocol("gs", t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.ABSEnabled {
|
||||||
|
if t, err := azureTransport.New(); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
registerProtocol("abs", t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
downloadClient = &http.Client{
|
||||||
|
Timeout: time.Duration(config.DownloadTimeout) * time.Second,
|
||||||
|
Transport: transport,
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func headersToStore(res *http.Response) map[string]string {
|
||||||
|
m := make(map[string]string)
|
||||||
|
|
||||||
|
for _, h := range imageHeadersToStore {
|
||||||
|
if val := res.Header.Get(h); len(val) != 0 {
|
||||||
|
m[h] = val
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func requestImage(imageURL string, header http.Header, jar *cookiejar.Jar) (*http.Response, error) {
|
||||||
|
req, err := http.NewRequest("GET", imageURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ierrors.New(404, err.Error(), msgSourceImageIsUnreachable)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := enabledSchemes[req.URL.Scheme]; !ok {
|
||||||
|
return nil, ierrors.New(
|
||||||
|
404,
|
||||||
|
fmt.Sprintf("Unknown sheme: %s", req.URL.Scheme),
|
||||||
|
msgSourceImageIsUnreachable,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if jar != nil {
|
||||||
|
for _, cookie := range jar.Cookies(req.URL) {
|
||||||
|
req.AddCookie(cookie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("User-Agent", config.UserAgent)
|
||||||
|
|
||||||
|
for k, v := range header {
|
||||||
|
if len(v) > 0 {
|
||||||
|
req.Header.Set(k, v[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := downloadClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ierrors.New(500, checkTimeoutErr(err).Error(), msgSourceImageIsUnreachable)
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.StatusCode == http.StatusNotModified {
|
||||||
|
return nil, &ErrorNotModified{Message: "Not Modified", Headers: headersToStore(res)}
|
||||||
|
}
|
||||||
|
|
||||||
|
if res.StatusCode != 200 {
|
||||||
|
body, _ := ioutil.ReadAll(res.Body)
|
||||||
|
res.Body.Close()
|
||||||
|
|
||||||
|
status := 404
|
||||||
|
if res.StatusCode >= 500 {
|
||||||
|
status = 500
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := fmt.Sprintf("Status: %d; %s", res.StatusCode, string(body))
|
||||||
|
return nil, ierrors.New(status, msg, msgSourceImageIsUnreachable)
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func download(imageURL string, header http.Header, jar *cookiejar.Jar) (*ImageData, error) {
|
||||||
|
// We use this for testing
|
||||||
|
if len(redirectAllRequestsTo) > 0 {
|
||||||
|
imageURL = redirectAllRequestsTo
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := requestImage(imageURL, header, jar)
|
||||||
|
if res != nil {
|
||||||
|
defer res.Body.Close()
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
body := res.Body
|
||||||
|
contentLength := int(res.ContentLength)
|
||||||
|
|
||||||
|
if res.Header.Get("Content-Encoding") == "gzip" {
|
||||||
|
gzipBody, errGzip := gzip.NewReader(res.Body)
|
||||||
|
if gzipBody != nil {
|
||||||
|
defer gzipBody.Close()
|
||||||
|
}
|
||||||
|
if errGzip != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
body = gzipBody
|
||||||
|
contentLength = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
imgdata, err := readAndCheckImage(body, contentLength)
|
||||||
|
if err != nil {
|
||||||
|
return nil, ierrors.Wrap(err, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
imgdata.Headers = headersToStore(res)
|
||||||
|
|
||||||
|
return imgdata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func RedirectAllRequestsTo(u string) {
|
||||||
|
redirectAllRequestsTo = u
|
||||||
|
}
|
||||||
|
|
||||||
|
func StopRedirectingRequests() {
|
||||||
|
redirectAllRequestsTo = ""
|
||||||
|
}
|
142
imagedata/image_data.go
Normal file
142
imagedata/image_data.go
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
package imagedata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/cookiejar"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/ierrors"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
Watermark *ImageData
|
||||||
|
FallbackImage *ImageData
|
||||||
|
)
|
||||||
|
|
||||||
|
type ImageData struct {
|
||||||
|
Type imagetype.Type
|
||||||
|
Data []byte
|
||||||
|
Headers map[string]string
|
||||||
|
|
||||||
|
cancel context.CancelFunc
|
||||||
|
cancelOnce sync.Once
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ImageData) Close() {
|
||||||
|
d.cancelOnce.Do(func() {
|
||||||
|
if d.cancel != nil {
|
||||||
|
d.cancel()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ImageData) SetCancel(cancel context.CancelFunc) {
|
||||||
|
d.cancel = cancel
|
||||||
|
}
|
||||||
|
|
||||||
|
func Init() error {
|
||||||
|
initRead()
|
||||||
|
|
||||||
|
if err := initDownloading(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := loadWatermark(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := loadFallbackImage(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadWatermark() (err error) {
|
||||||
|
if len(config.WatermarkData) > 0 {
|
||||||
|
Watermark, err = FromBase64(config.WatermarkData, "watermark")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.WatermarkPath) > 0 {
|
||||||
|
Watermark, err = FromFile(config.WatermarkPath, "watermark")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.WatermarkURL) > 0 {
|
||||||
|
Watermark, err = Download(config.WatermarkURL, "watermark", nil, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadFallbackImage() (err error) {
|
||||||
|
if len(config.FallbackImageData) > 0 {
|
||||||
|
FallbackImage, err = FromBase64(config.FallbackImageData, "fallback image")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.FallbackImagePath) > 0 {
|
||||||
|
FallbackImage, err = FromFile(config.FallbackImagePath, "fallback image")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(config.FallbackImageURL) > 0 {
|
||||||
|
FallbackImage, err = Download(config.FallbackImageURL, "fallback image", nil, nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func FromBase64(encoded, desc string) (*ImageData, error) {
|
||||||
|
dec := base64.NewDecoder(base64.StdEncoding, strings.NewReader(encoded))
|
||||||
|
size := 4 * (len(encoded)/3 + 1)
|
||||||
|
|
||||||
|
imgdata, err := readAndCheckImage(dec, size)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Can't decode %s: %s", desc, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return imgdata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func FromFile(path, desc string) (*ImageData, error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Can't read %s: %s", desc, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fi, err := f.Stat()
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Can't read %s: %s", desc, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
imgdata, err := readAndCheckImage(f, int(fi.Size()))
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Can't read %s: %s", desc, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return imgdata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Download(imageURL, desc string, header http.Header, jar *cookiejar.Jar) (*ImageData, error) {
|
||||||
|
imgdata, err := download(imageURL, header, jar)
|
||||||
|
if err != nil {
|
||||||
|
if nmErr, ok := err.(*ErrorNotModified); ok {
|
||||||
|
nmErr.Message = fmt.Sprintf("Can't download %s: %s", desc, nmErr.Message)
|
||||||
|
return nil, nmErr
|
||||||
|
}
|
||||||
|
return nil, ierrors.WrapWithPrefix(err, 1, fmt.Sprintf("Can't download %s", desc))
|
||||||
|
}
|
||||||
|
|
||||||
|
return imgdata, nil
|
||||||
|
}
|
78
imagedata/read.go
Normal file
78
imagedata/read.go
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
package imagedata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/bufpool"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/bufreader"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/ierrors"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagemeta"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/security"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrSourceFileTooBig = ierrors.New(422, "Source image file is too big", "Invalid source image")
|
||||||
|
ErrSourceImageTypeNotSupported = ierrors.New(422, "Source image type not supported", "Invalid source image")
|
||||||
|
)
|
||||||
|
|
||||||
|
var downloadBufPool *bufpool.Pool
|
||||||
|
|
||||||
|
func initRead() {
|
||||||
|
downloadBufPool = bufpool.New("download", config.Concurrency, config.DownloadBufferSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
type hardLimitReader struct {
|
||||||
|
r io.Reader
|
||||||
|
left int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lr *hardLimitReader) Read(p []byte) (n int, err error) {
|
||||||
|
if lr.left <= 0 {
|
||||||
|
return 0, ErrSourceFileTooBig
|
||||||
|
}
|
||||||
|
if len(p) > lr.left {
|
||||||
|
p = p[0:lr.left]
|
||||||
|
}
|
||||||
|
n, err = lr.r.Read(p)
|
||||||
|
lr.left -= n
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func readAndCheckImage(r io.Reader, contentLength int) (*ImageData, error) {
|
||||||
|
if config.MaxSrcFileSize > 0 && contentLength > config.MaxSrcFileSize {
|
||||||
|
return nil, ErrSourceFileTooBig
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := downloadBufPool.Get(contentLength)
|
||||||
|
cancel := func() { downloadBufPool.Put(buf) }
|
||||||
|
|
||||||
|
if config.MaxSrcFileSize > 0 {
|
||||||
|
r = &hardLimitReader{r: r, left: config.MaxSrcFileSize}
|
||||||
|
}
|
||||||
|
|
||||||
|
br := bufreader.New(r, buf)
|
||||||
|
|
||||||
|
meta, err := imagemeta.DecodeMeta(br)
|
||||||
|
if err == imagemeta.ErrFormat {
|
||||||
|
return nil, ErrSourceImageTypeNotSupported
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, checkTimeoutErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = security.CheckDimensions(meta.Width(), meta.Height()); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = br.Flush(); err != nil {
|
||||||
|
cancel()
|
||||||
|
return nil, checkTimeoutErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ImageData{
|
||||||
|
Data: buf.Bytes(),
|
||||||
|
Type: meta.Format(),
|
||||||
|
cancel: cancel,
|
||||||
|
}, nil
|
||||||
|
}
|
14
imagedata/timeout.go
Normal file
14
imagedata/timeout.go
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
package imagedata
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
type httpError interface {
|
||||||
|
Timeout() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkTimeoutErr(err error) error {
|
||||||
|
if httpErr, ok := err.(httpError); ok && httpErr.Timeout() {
|
||||||
|
return errors.New("The image request timed out")
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
@ -4,6 +4,8 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
var bmpMagick = []byte("BM")
|
var bmpMagick = []byte("BM")
|
||||||
@ -42,7 +44,7 @@ func DecodeBmpMeta(r io.Reader) (Meta, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "bmp",
|
format: imagetype.BMP,
|
||||||
width: width,
|
width: width,
|
||||||
height: height,
|
height: height,
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -2,6 +2,8 @@ package imagemeta
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
func DecodeGifMeta(r io.Reader) (Meta, error) {
|
func DecodeGifMeta(r io.Reader) (Meta, error) {
|
||||||
@ -13,7 +15,7 @@ func DecodeGifMeta(r io.Reader) (Meta, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "gif",
|
format: imagetype.GIF,
|
||||||
width: int(tmp[6]) + int(tmp[7])<<8,
|
width: int(tmp[6]) + int(tmp[7])<<8,
|
||||||
height: int(tmp[8]) + int(tmp[9])<<8,
|
height: int(tmp[8]) + int(tmp[9])<<8,
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -7,6 +7,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
const heifBoxHeaderSize = int64(8)
|
const heifBoxHeaderSize = int64(8)
|
||||||
@ -20,12 +22,12 @@ type heifDiscarder interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type heifData struct {
|
type heifData struct {
|
||||||
Format string
|
Format imagetype.Type
|
||||||
Width, Height int64
|
Width, Height int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *heifData) IsFilled() bool {
|
func (d *heifData) IsFilled() bool {
|
||||||
return len(d.Format) > 0 && d.Width > 0 && d.Height > 0
|
return d.Format != imagetype.Unknown && d.Width > 0 && d.Height > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func heifReadN(r io.Reader, n int64) (b []byte, err error) {
|
func heifReadN(r io.Reader, n int64) (b []byte, err error) {
|
||||||
@ -73,12 +75,12 @@ func heifReadBoxHeader(r io.Reader) (boxType string, boxDataSize int64, err erro
|
|||||||
|
|
||||||
func heifAssignFormat(d *heifData, brand []byte) bool {
|
func heifAssignFormat(d *heifData, brand []byte) bool {
|
||||||
if bytes.Equal(brand, heicBrand) {
|
if bytes.Equal(brand, heicBrand) {
|
||||||
d.Format = "heic"
|
d.Format = imagetype.HEIC
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
if bytes.Equal(brand, avifBrand) {
|
if bytes.Equal(brand, avifBrand) {
|
||||||
d.Format = "avif"
|
d.Format = imagetype.AVIF
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,6 +4,8 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
type IcoMeta struct {
|
type IcoMeta struct {
|
||||||
@ -69,7 +71,7 @@ func DecodeIcoMeta(r io.Reader) (*IcoMeta, error) {
|
|||||||
|
|
||||||
return &IcoMeta{
|
return &IcoMeta{
|
||||||
Meta: &meta{
|
Meta: &meta{
|
||||||
format: "ico",
|
format: imagetype.ICO,
|
||||||
width: width,
|
width: width,
|
||||||
height: height,
|
height: height,
|
||||||
},
|
},
|
||||||
|
@ -6,10 +6,12 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Meta interface {
|
type Meta interface {
|
||||||
Format() string
|
Format() imagetype.Type
|
||||||
Width() int
|
Width() int
|
||||||
Height() int
|
Height() int
|
||||||
}
|
}
|
||||||
@ -17,11 +19,11 @@ type Meta interface {
|
|||||||
type DecodeMetaFunc func(io.Reader) (Meta, error)
|
type DecodeMetaFunc func(io.Reader) (Meta, error)
|
||||||
|
|
||||||
type meta struct {
|
type meta struct {
|
||||||
format string
|
format imagetype.Type
|
||||||
width, height int
|
width, height int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *meta) Format() string {
|
func (m *meta) Format() imagetype.Type {
|
||||||
return m.format
|
return m.format
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +93,7 @@ func DecodeMeta(r io.Reader) (Meta, error) {
|
|||||||
if ok, err := IsSVG(rr); err != nil {
|
if ok, err := IsSVG(rr); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else if ok {
|
} else if ok {
|
||||||
return &meta{format: "svg", width: 1, height: 1}, nil
|
return &meta{format: imagetype.SVG, width: 1, height: 1}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, ErrFormat
|
return nil, ErrFormat
|
||||||
|
@ -3,6 +3,8 @@ package imagemeta
|
|||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -101,7 +103,7 @@ func DecodeJpegMeta(rr io.Reader) (Meta, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "jpeg",
|
format: imagetype.JPEG,
|
||||||
width: int(tmp[3])<<8 + int(tmp[4]),
|
width: int(tmp[3])<<8 + int(tmp[4]),
|
||||||
height: int(tmp[1])<<8 + int(tmp[2]),
|
height: int(tmp[1])<<8 + int(tmp[2]),
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -4,6 +4,8 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
var pngMagick = []byte("\x89PNG\r\n\x1a\n")
|
var pngMagick = []byte("\x89PNG\r\n\x1a\n")
|
||||||
@ -28,7 +30,7 @@ func DecodePngMeta(r io.Reader) (Meta, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "png",
|
format: imagetype.PNG,
|
||||||
width: int(binary.BigEndian.Uint32(tmp[8:12])),
|
width: int(binary.BigEndian.Uint32(tmp[8:12])),
|
||||||
height: int(binary.BigEndian.Uint32(tmp[12:16])),
|
height: int(binary.BigEndian.Uint32(tmp[12:16])),
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -6,13 +6,11 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"sync/atomic"
|
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
"golang.org/x/text/encoding/charmap"
|
"golang.org/x/text/encoding/charmap"
|
||||||
)
|
)
|
||||||
|
|
||||||
var maxSvgBytes int64 = 32 * 1024
|
|
||||||
|
|
||||||
type svgHeader struct {
|
type svgHeader struct {
|
||||||
XMLName xml.Name
|
XMLName xml.Name
|
||||||
}
|
}
|
||||||
@ -24,12 +22,8 @@ func xmlCharsetReader(charset string, input io.Reader) (io.Reader, error) {
|
|||||||
return nil, fmt.Errorf("Unknown SVG charset: %s", charset)
|
return nil, fmt.Errorf("Unknown SVG charset: %s", charset)
|
||||||
}
|
}
|
||||||
|
|
||||||
func SetMaxSvgCheckRead(n int) {
|
|
||||||
atomic.StoreInt64(&maxSvgBytes, int64(n))
|
|
||||||
}
|
|
||||||
|
|
||||||
func IsSVG(r io.Reader) (bool, error) {
|
func IsSVG(r io.Reader) (bool, error) {
|
||||||
maxBytes := int(atomic.LoadInt64(&maxSvgBytes))
|
maxBytes := config.MaxSvgCheckBytes
|
||||||
|
|
||||||
var h svgHeader
|
var h svgHeader
|
||||||
|
|
||||||
|
@ -5,6 +5,8 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -105,7 +107,7 @@ func DecodeTiffMeta(rr io.Reader) (Meta, error) {
|
|||||||
|
|
||||||
if width > 0 && height > 0 {
|
if width > 0 && height > 0 {
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "tiff",
|
format: imagetype.TIFF,
|
||||||
width: width,
|
width: width,
|
||||||
height: height,
|
height: height,
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
"golang.org/x/image/riff"
|
"golang.org/x/image/riff"
|
||||||
"golang.org/x/image/vp8"
|
"golang.org/x/image/vp8"
|
||||||
"golang.org/x/image/vp8l"
|
"golang.org/x/image/vp8l"
|
||||||
@ -59,7 +60,7 @@ func DecodeWebpMeta(r io.Reader) (Meta, error) {
|
|||||||
fh, err := d.DecodeFrameHeader()
|
fh, err := d.DecodeFrameHeader()
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "webp",
|
format: imagetype.WEBP,
|
||||||
width: fh.Width,
|
width: fh.Width,
|
||||||
height: fh.Height,
|
height: fh.Height,
|
||||||
}, err
|
}, err
|
||||||
@ -71,7 +72,7 @@ func DecodeWebpMeta(r io.Reader) (Meta, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "webp",
|
format: imagetype.WEBP,
|
||||||
width: conf.Width,
|
width: conf.Width,
|
||||||
height: conf.Height,
|
height: conf.Height,
|
||||||
}, nil
|
}, nil
|
||||||
@ -89,7 +90,7 @@ func DecodeWebpMeta(r io.Reader) (Meta, error) {
|
|||||||
heightMinusOne := uint32(buf[7]) | uint32(buf[8])<<8 | uint32(buf[9])<<16
|
heightMinusOne := uint32(buf[7]) | uint32(buf[8])<<8 | uint32(buf[9])<<16
|
||||||
|
|
||||||
return &meta{
|
return &meta{
|
||||||
format: "webp",
|
format: imagetype.WEBP,
|
||||||
width: int(widthMinusOne) + 1,
|
width: int(widthMinusOne) + 1,
|
||||||
height: int(heightMinusOne) + 1,
|
height: int(heightMinusOne) + 1,
|
||||||
}, nil
|
}, nil
|
||||||
|
132
imagetype/imagetype.go
Normal file
132
imagetype/imagetype.go
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
package imagetype
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Type int
|
||||||
|
|
||||||
|
const (
|
||||||
|
Unknown Type = iota
|
||||||
|
JPEG
|
||||||
|
PNG
|
||||||
|
WEBP
|
||||||
|
GIF
|
||||||
|
ICO
|
||||||
|
SVG
|
||||||
|
HEIC
|
||||||
|
AVIF
|
||||||
|
BMP
|
||||||
|
TIFF
|
||||||
|
)
|
||||||
|
|
||||||
|
const contentDispositionFilenameFallback = "image"
|
||||||
|
|
||||||
|
var (
|
||||||
|
Types = map[string]Type{
|
||||||
|
"jpeg": JPEG,
|
||||||
|
"jpg": JPEG,
|
||||||
|
"png": PNG,
|
||||||
|
"webp": WEBP,
|
||||||
|
"gif": GIF,
|
||||||
|
"ico": ICO,
|
||||||
|
"svg": SVG,
|
||||||
|
"heic": HEIC,
|
||||||
|
"avif": AVIF,
|
||||||
|
"bmp": BMP,
|
||||||
|
"tiff": TIFF,
|
||||||
|
}
|
||||||
|
|
||||||
|
mimes = map[Type]string{
|
||||||
|
JPEG: "image/jpeg",
|
||||||
|
PNG: "image/png",
|
||||||
|
WEBP: "image/webp",
|
||||||
|
GIF: "image/gif",
|
||||||
|
ICO: "image/x-icon",
|
||||||
|
SVG: "image/svg+xml",
|
||||||
|
HEIC: "image/heif",
|
||||||
|
AVIF: "image/avif",
|
||||||
|
BMP: "image/bmp",
|
||||||
|
TIFF: "image/tiff",
|
||||||
|
}
|
||||||
|
|
||||||
|
contentDispositionsFmt = map[Type]string{
|
||||||
|
JPEG: "inline; filename=\"%s.jpg\"",
|
||||||
|
PNG: "inline; filename=\"%s.png\"",
|
||||||
|
WEBP: "inline; filename=\"%s.webp\"",
|
||||||
|
GIF: "inline; filename=\"%s.gif\"",
|
||||||
|
ICO: "inline; filename=\"%s.ico\"",
|
||||||
|
SVG: "inline; filename=\"%s.svg\"",
|
||||||
|
HEIC: "inline; filename=\"%s.heic\"",
|
||||||
|
AVIF: "inline; filename=\"%s.avif\"",
|
||||||
|
BMP: "inline; filename=\"%s.bmp\"",
|
||||||
|
TIFF: "inline; filename=\"%s.tiff\"",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (it Type) String() string {
|
||||||
|
for k, v := range Types {
|
||||||
|
if v == it {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) MarshalJSON() ([]byte, error) {
|
||||||
|
for k, v := range Types {
|
||||||
|
if v == it {
|
||||||
|
return []byte(fmt.Sprintf("%q", k)), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []byte("null"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) Mime() string {
|
||||||
|
if mime, ok := mimes[it]; ok {
|
||||||
|
return mime
|
||||||
|
}
|
||||||
|
|
||||||
|
return "application/octet-stream"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) ContentDisposition(filename string) string {
|
||||||
|
format, ok := contentDispositionsFmt[it]
|
||||||
|
if !ok {
|
||||||
|
return "inline"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(format, filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) ContentDispositionFromURL(imageURL string) string {
|
||||||
|
url, err := url.Parse(imageURL)
|
||||||
|
if err != nil {
|
||||||
|
return it.ContentDisposition(contentDispositionFilenameFallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, filename := filepath.Split(url.Path)
|
||||||
|
if len(filename) == 0 {
|
||||||
|
return it.ContentDisposition(contentDispositionFilenameFallback)
|
||||||
|
}
|
||||||
|
|
||||||
|
return it.ContentDisposition(strings.TrimSuffix(filename, filepath.Ext(filename)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) SupportsAlpha() bool {
|
||||||
|
return it != JPEG && it != BMP
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) SupportsAnimation() bool {
|
||||||
|
return it == GIF || it == WEBP
|
||||||
|
}
|
||||||
|
|
||||||
|
func (it Type) SupportsColourProfile() bool {
|
||||||
|
return it == JPEG ||
|
||||||
|
it == PNG ||
|
||||||
|
it == WEBP ||
|
||||||
|
it == AVIF
|
||||||
|
}
|
40
imath/imath.go
Normal file
40
imath/imath.go
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
package imath
|
||||||
|
|
||||||
|
import "math"
|
||||||
|
|
||||||
|
func Max(a, b int) int {
|
||||||
|
if a > b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func Min(a, b int) int {
|
||||||
|
if a < b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func MinNonZero(a, b int) int {
|
||||||
|
switch {
|
||||||
|
case a == 0:
|
||||||
|
return b
|
||||||
|
case b == 0:
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
|
||||||
|
return Min(a, b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Round(a float64) int {
|
||||||
|
return int(math.Round(a))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Scale(a int, scale float64) int {
|
||||||
|
if a == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return Round(float64(a) * scale)
|
||||||
|
}
|
@ -1,15 +0,0 @@
|
|||||||
// +build !linux,!darwin !go1.11
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net"
|
|
||||||
)
|
|
||||||
|
|
||||||
func listenReuseport(network, address string) (net.Listener, error) {
|
|
||||||
if conf.SoReuseport {
|
|
||||||
logWarning("SO_REUSEPORT support is not implemented for your OS or Go version")
|
|
||||||
}
|
|
||||||
|
|
||||||
return net.Listen(network, address)
|
|
||||||
}
|
|
112
log.go
112
log.go
@ -1,112 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
logrus "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
func initLog() error {
|
|
||||||
logFormat := "pretty"
|
|
||||||
strEnvConfig(&logFormat, "IMGPROXY_LOG_FORMAT")
|
|
||||||
|
|
||||||
switch logFormat {
|
|
||||||
case "structured":
|
|
||||||
logrus.SetFormatter(&logStructuredFormatter{})
|
|
||||||
case "json":
|
|
||||||
logrus.SetFormatter(&logrus.JSONFormatter{})
|
|
||||||
default:
|
|
||||||
logrus.SetFormatter(newLogPrettyFormatter())
|
|
||||||
}
|
|
||||||
|
|
||||||
logLevel := "info"
|
|
||||||
strEnvConfig(&logLevel, "IMGPROXY_LOG_LEVEL")
|
|
||||||
|
|
||||||
levelLogLevel, err := logrus.ParseLevel(logLevel)
|
|
||||||
if err != nil {
|
|
||||||
levelLogLevel = logrus.InfoLevel
|
|
||||||
}
|
|
||||||
|
|
||||||
logrus.SetLevel(levelLogLevel)
|
|
||||||
|
|
||||||
if isSyslogEnabled() {
|
|
||||||
slHook, err := newSyslogHook()
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Unable to connect to syslog daemon: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
logrus.AddHook(slHook)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func logRequest(reqID string, r *http.Request) {
|
|
||||||
path := r.RequestURI
|
|
||||||
|
|
||||||
logrus.WithFields(logrus.Fields{
|
|
||||||
"request_id": reqID,
|
|
||||||
"method": r.Method,
|
|
||||||
}).Infof("Started %s", path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logResponse(reqID string, r *http.Request, status int, err *imgproxyError, imageURL *string, po *processingOptions) {
|
|
||||||
var level logrus.Level
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case status >= 500:
|
|
||||||
level = logrus.ErrorLevel
|
|
||||||
case status >= 400:
|
|
||||||
level = logrus.WarnLevel
|
|
||||||
default:
|
|
||||||
level = logrus.InfoLevel
|
|
||||||
}
|
|
||||||
|
|
||||||
fields := logrus.Fields{
|
|
||||||
"request_id": reqID,
|
|
||||||
"method": r.Method,
|
|
||||||
"status": status,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fields["error"] = err
|
|
||||||
|
|
||||||
if stack := err.FormatStack(); len(stack) > 0 {
|
|
||||||
fields["stack"] = stack
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if imageURL != nil {
|
|
||||||
fields["image_url"] = *imageURL
|
|
||||||
}
|
|
||||||
|
|
||||||
if po != nil {
|
|
||||||
fields["processing_options"] = po
|
|
||||||
}
|
|
||||||
|
|
||||||
logrus.WithFields(fields).Logf(
|
|
||||||
level,
|
|
||||||
"Completed in %s %s", getTimerSince(r.Context()), r.RequestURI,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logNotice(f string, args ...interface{}) {
|
|
||||||
logrus.Infof(f, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logWarning(f string, args ...interface{}) {
|
|
||||||
logrus.Warnf(f, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logError(f string, args ...interface{}) {
|
|
||||||
logrus.Errorf(f, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logFatal(f string, args ...interface{}) {
|
|
||||||
logrus.Fatalf(f, args...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logDebug(f string, args ...interface{}) {
|
|
||||||
logrus.Debugf(f, args...)
|
|
||||||
}
|
|
@ -1,4 +1,4 @@
|
|||||||
package main
|
package logger
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
@ -30,12 +30,12 @@ func (p logKeys) Len() int { return len(p) }
|
|||||||
func (p logKeys) Less(i, j int) bool { return logKeysPriorities[p[i]] > logKeysPriorities[p[j]] }
|
func (p logKeys) Less(i, j int) bool { return logKeysPriorities[p[i]] > logKeysPriorities[p[j]] }
|
||||||
func (p logKeys) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
func (p logKeys) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
||||||
|
|
||||||
type logPrettyFormatter struct {
|
type prettyFormatter struct {
|
||||||
levelFormat string
|
levelFormat string
|
||||||
}
|
}
|
||||||
|
|
||||||
func newLogPrettyFormatter() *logPrettyFormatter {
|
func newPrettyFormatter() *prettyFormatter {
|
||||||
f := new(logPrettyFormatter)
|
f := new(prettyFormatter)
|
||||||
|
|
||||||
levelLenMax := 0
|
levelLenMax := 0
|
||||||
for _, level := range logrus.AllLevels {
|
for _, level := range logrus.AllLevels {
|
||||||
@ -50,7 +50,7 @@ func newLogPrettyFormatter() *logPrettyFormatter {
|
|||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *logPrettyFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
func (f *prettyFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||||
keys := make([]string, 0, len(entry.Data))
|
keys := make([]string, 0, len(entry.Data))
|
||||||
for k := range entry.Data {
|
for k := range entry.Data {
|
||||||
if k != "stack" {
|
if k != "stack" {
|
||||||
@ -97,7 +97,7 @@ func (f *logPrettyFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
|||||||
return b.Bytes(), nil
|
return b.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *logPrettyFormatter) appendValue(b *bytes.Buffer, value interface{}) {
|
func (f *prettyFormatter) appendValue(b *bytes.Buffer, value interface{}) {
|
||||||
strValue, ok := value.(string)
|
strValue, ok := value.(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
strValue = fmt.Sprint(value)
|
strValue = fmt.Sprint(value)
|
||||||
@ -110,9 +110,9 @@ func (f *logPrettyFormatter) appendValue(b *bytes.Buffer, value interface{}) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type logStructuredFormatter struct{}
|
type structuredFormatter struct{}
|
||||||
|
|
||||||
func (f *logStructuredFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
func (f *structuredFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
||||||
keys := make([]string, 0, len(entry.Data))
|
keys := make([]string, 0, len(entry.Data))
|
||||||
for k := range entry.Data {
|
for k := range entry.Data {
|
||||||
keys = append(keys, k)
|
keys = append(keys, k)
|
||||||
@ -141,7 +141,7 @@ func (f *logStructuredFormatter) Format(entry *logrus.Entry) ([]byte, error) {
|
|||||||
return b.Bytes(), nil
|
return b.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *logStructuredFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) {
|
func (f *structuredFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) {
|
||||||
if b.Len() != 0 {
|
if b.Len() != 0 {
|
||||||
b.WriteByte(' ')
|
b.WriteByte(' ')
|
||||||
}
|
}
|
48
logger/log.go
Normal file
48
logger/log.go
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
package logger
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
logrus "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config/configurators"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() error {
|
||||||
|
log.SetOutput(os.Stdout)
|
||||||
|
|
||||||
|
logFormat := "pretty"
|
||||||
|
logLevel := "info"
|
||||||
|
|
||||||
|
configurators.String(&logFormat, "IMGPROXY_LOG_FORMAT")
|
||||||
|
configurators.String(&logLevel, "IMGPROXY_LOG_LEVEL")
|
||||||
|
|
||||||
|
switch logFormat {
|
||||||
|
case "structured":
|
||||||
|
logrus.SetFormatter(&structuredFormatter{})
|
||||||
|
case "json":
|
||||||
|
logrus.SetFormatter(&logrus.JSONFormatter{})
|
||||||
|
default:
|
||||||
|
logrus.SetFormatter(newPrettyFormatter())
|
||||||
|
}
|
||||||
|
|
||||||
|
levelLogLevel, err := logrus.ParseLevel(logLevel)
|
||||||
|
if err != nil {
|
||||||
|
levelLogLevel = logrus.InfoLevel
|
||||||
|
}
|
||||||
|
|
||||||
|
logrus.SetLevel(levelLogLevel)
|
||||||
|
|
||||||
|
if isSyslogEnabled() {
|
||||||
|
slHook, err := newSyslogHook()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Unable to connect to syslog daemon: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
logrus.AddHook(slHook)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -1,10 +1,11 @@
|
|||||||
package main
|
package logger
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log/syslog"
|
"log/syslog"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config/configurators"
|
||||||
"github.com/sirupsen/logrus"
|
"github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -24,7 +25,7 @@ type syslogHook struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func isSyslogEnabled() (enabled bool) {
|
func isSyslogEnabled() (enabled bool) {
|
||||||
boolEnvConfig(&enabled, "IMGPROXY_SYSLOG_ENABLE")
|
configurators.Bool(&enabled, "IMGPROXY_SYSLOG_ENABLE")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -37,16 +38,16 @@ func newSyslogHook() (*syslogHook, error) {
|
|||||||
levelStr = "notice"
|
levelStr = "notice"
|
||||||
)
|
)
|
||||||
|
|
||||||
strEnvConfig(&network, "IMGPROXY_SYSLOG_NETWORK")
|
configurators.String(&network, "IMGPROXY_SYSLOG_NETWORK")
|
||||||
strEnvConfig(&addr, "IMGPROXY_SYSLOG_ADDRESS")
|
configurators.String(&addr, "IMGPROXY_SYSLOG_ADDRESS")
|
||||||
strEnvConfig(&tag, "IMGPROXY_SYSLOG_TAG")
|
configurators.String(&tag, "IMGPROXY_SYSLOG_TAG")
|
||||||
strEnvConfig(&levelStr, "IMGPROXY_SYSLOG_LEVEL")
|
configurators.String(&levelStr, "IMGPROXY_SYSLOG_LEVEL")
|
||||||
|
|
||||||
if l, ok := syslogLevels[levelStr]; ok {
|
if l, ok := syslogLevels[levelStr]; ok {
|
||||||
level = l
|
level = l
|
||||||
} else {
|
} else {
|
||||||
level = logrus.InfoLevel
|
level = logrus.InfoLevel
|
||||||
logWarning("Syslog level '%s' is invalid, 'info' is used", levelStr)
|
logrus.Warningf("Syslog level '%s' is invalid, 'info' is used", levelStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
w, err := syslog.Dial(network, addr, syslog.LOG_NOTICE, tag)
|
w, err := syslog.Dial(network, addr, syslog.LOG_NOTICE, tag)
|
||||||
@ -54,7 +55,7 @@ func newSyslogHook() (*syslogHook, error) {
|
|||||||
return &syslogHook{
|
return &syslogHook{
|
||||||
writer: w,
|
writer: w,
|
||||||
levels: logrus.AllLevels[:int(level)+1],
|
levels: logrus.AllLevels[:int(level)+1],
|
||||||
formatter: &logStructuredFormatter{},
|
formatter: &structuredFormatter{},
|
||||||
}, err
|
}, err
|
||||||
}
|
}
|
||||||
|
|
76
main.go
76
main.go
@ -3,81 +3,95 @@ package main
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
"runtime"
|
|
||||||
"syscall"
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"go.uber.org/automaxprocs/maxprocs"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/errorreport"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagedata"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/logger"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/memory"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/metrics"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/metrics/prometheus"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/options"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/version"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/vips"
|
||||||
)
|
)
|
||||||
|
|
||||||
const version = "2.17.0"
|
|
||||||
|
|
||||||
type ctxKey string
|
|
||||||
|
|
||||||
func initialize() error {
|
func initialize() error {
|
||||||
log.SetOutput(os.Stdout)
|
if err := logger.Init(); err != nil {
|
||||||
|
|
||||||
if err := initLog(); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := configure(); err != nil {
|
maxprocs.Set(maxprocs.Logger(log.Debugf))
|
||||||
|
|
||||||
|
if err := config.Configure(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := initNewrelic(); err != nil {
|
if err := metrics.Init(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
initPrometheus()
|
if err := imagedata.Init(); err != nil {
|
||||||
|
|
||||||
if err := initDownloading(); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
initErrorsReporting()
|
initProcessingHandler()
|
||||||
|
|
||||||
if err := initVips(); err != nil {
|
errorreport.Init()
|
||||||
|
|
||||||
|
if err := vips.Init(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := checkPresets(conf.Presets); err != nil {
|
if err := options.ParsePresets(config.Presets); err != nil {
|
||||||
shutdownVips()
|
vips.Shutdown()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := options.ValidatePresets(); err != nil {
|
||||||
|
vips.Shutdown()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func shutdown() {
|
||||||
|
vips.Shutdown()
|
||||||
|
metrics.Stop()
|
||||||
|
errorreport.Close()
|
||||||
|
}
|
||||||
|
|
||||||
func run() error {
|
func run() error {
|
||||||
if err := initialize(); err != nil {
|
if err := initialize(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
defer shutdownVips()
|
defer shutdown()
|
||||||
defer closeErrorsReporting()
|
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
var logMemStats = len(os.Getenv("IMGPROXY_LOG_MEM_STATS")) > 0
|
var logMemStats = len(os.Getenv("IMGPROXY_LOG_MEM_STATS")) > 0
|
||||||
|
|
||||||
for range time.Tick(time.Duration(conf.FreeMemoryInterval) * time.Second) {
|
for range time.Tick(time.Duration(config.FreeMemoryInterval) * time.Second) {
|
||||||
freeMemory()
|
memory.Free()
|
||||||
|
|
||||||
if logMemStats {
|
if logMemStats {
|
||||||
var m runtime.MemStats
|
memory.LogStats()
|
||||||
runtime.ReadMemStats(&m)
|
|
||||||
logDebug("MEMORY USAGE: Sys=%d HeapIdle=%d HeapInuse=%d", m.Sys/1024/1024, m.HeapIdle/1024/1024, m.HeapInuse/1024/1024)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
|
||||||
if prometheusEnabled {
|
if err := prometheus.StartServer(cancel); err != nil {
|
||||||
if err := startPrometheusServer(cancel); err != nil {
|
return err
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
s, err := startServer(cancel)
|
s, err := startServer(cancel)
|
||||||
@ -103,12 +117,12 @@ func main() {
|
|||||||
case "health":
|
case "health":
|
||||||
os.Exit(healthcheck())
|
os.Exit(healthcheck())
|
||||||
case "version":
|
case "version":
|
||||||
fmt.Println(version)
|
fmt.Println(version.Version())
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := run(); err != nil {
|
if err := run(); err != nil {
|
||||||
logFatal(err.Error())
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
27
main_test.go
27
main_test.go
@ -1,27 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/suite"
|
|
||||||
)
|
|
||||||
|
|
||||||
type MainTestSuite struct {
|
|
||||||
suite.Suite
|
|
||||||
|
|
||||||
oldConf config
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMain(m *testing.M) {
|
|
||||||
initialize()
|
|
||||||
os.Exit(m.Run())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *MainTestSuite) SetupTest() {
|
|
||||||
s.oldConf = conf
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *MainTestSuite) TearDownTest() {
|
|
||||||
conf = s.oldConf
|
|
||||||
}
|
|
@ -1,9 +1,9 @@
|
|||||||
// +build !linux
|
// +build !linux
|
||||||
|
|
||||||
package main
|
package memory
|
||||||
|
|
||||||
import "runtime/debug"
|
import "runtime/debug"
|
||||||
|
|
||||||
func freeMemory() {
|
func Free() {
|
||||||
debug.FreeOSMemory()
|
debug.FreeOSMemory()
|
||||||
}
|
}
|
@ -1,6 +1,6 @@
|
|||||||
// +build linux
|
// +build linux
|
||||||
|
|
||||||
package main
|
package memory
|
||||||
|
|
||||||
/*
|
/*
|
||||||
#include <features.h>
|
#include <features.h>
|
||||||
@ -13,7 +13,7 @@ void malloc_trim(size_t pad){}
|
|||||||
import "C"
|
import "C"
|
||||||
import "runtime/debug"
|
import "runtime/debug"
|
||||||
|
|
||||||
func freeMemory() {
|
func Free() {
|
||||||
debug.FreeOSMemory()
|
debug.FreeOSMemory()
|
||||||
|
|
||||||
C.malloc_trim(0)
|
C.malloc_trim(0)
|
23
memory/stats.go
Normal file
23
memory/stats.go
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
package memory
|
||||||
|
|
||||||
|
import (
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/vips"
|
||||||
|
)
|
||||||
|
|
||||||
|
func LogStats() {
|
||||||
|
var m runtime.MemStats
|
||||||
|
runtime.ReadMemStats(&m)
|
||||||
|
log.Debugf(
|
||||||
|
"GO MEMORY USAGE: Sys=%d HeapIdle=%d HeapInuse=%d",
|
||||||
|
m.Sys/1024/1024, m.HeapIdle/1024/1024, m.HeapInuse/1024/1024,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.Debugf(
|
||||||
|
"VIPS MEMORY USAGE: Cur=%d Max=%d Allocs=%d",
|
||||||
|
int(vips.GetMem())/1024/1024, int(vips.GetMemHighwater())/1024/1024, int(vips.GetAllocs()),
|
||||||
|
)
|
||||||
|
}
|
118
metrics/datadog/datadog.go
Normal file
118
metrics/datadog/datadog.go
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
package datadog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/ext"
|
||||||
|
"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/version"
|
||||||
|
)
|
||||||
|
|
||||||
|
type spanCtxKey struct{}
|
||||||
|
|
||||||
|
var enabled bool
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
if !config.DataDogEnable {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
name := os.Getenv("DD_SERVICE")
|
||||||
|
if len(name) == 0 {
|
||||||
|
name = "imgproxy"
|
||||||
|
}
|
||||||
|
|
||||||
|
tracer.Start(
|
||||||
|
tracer.WithService(name),
|
||||||
|
tracer.WithServiceVersion(version.Version()),
|
||||||
|
tracer.WithLogger(dataDogLogger{}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Stop() {
|
||||||
|
if enabled {
|
||||||
|
tracer.Stop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartRootSpan(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) {
|
||||||
|
if !enabled {
|
||||||
|
return ctx, func() {}, rw
|
||||||
|
}
|
||||||
|
|
||||||
|
span := tracer.StartSpan(
|
||||||
|
"request",
|
||||||
|
tracer.Measured(),
|
||||||
|
tracer.SpanType("web"),
|
||||||
|
tracer.Tag(ext.HTTPMethod, r.Method),
|
||||||
|
tracer.Tag(ext.HTTPURL, r.RequestURI),
|
||||||
|
)
|
||||||
|
cancel := func() { span.Finish() }
|
||||||
|
newRw := dataDogResponseWriter{rw, span}
|
||||||
|
|
||||||
|
return context.WithValue(ctx, spanCtxKey{}, span), cancel, newRw
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartSpan(ctx context.Context, name string) context.CancelFunc {
|
||||||
|
if !enabled {
|
||||||
|
return func() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if rootSpan, ok := ctx.Value(spanCtxKey{}).(tracer.Span); ok {
|
||||||
|
span := tracer.StartSpan(name, tracer.Measured(), tracer.ChildOf(rootSpan.Context()))
|
||||||
|
return func() { span.Finish() }
|
||||||
|
}
|
||||||
|
|
||||||
|
return func() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendError(ctx context.Context, err error) {
|
||||||
|
if !enabled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if rootSpan, ok := ctx.Value(spanCtxKey{}).(tracer.Span); ok {
|
||||||
|
rootSpan.Finish(tracer.WithError(err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendTimeout(ctx context.Context, d time.Duration) {
|
||||||
|
if !enabled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if rootSpan, ok := ctx.Value(spanCtxKey{}).(tracer.Span); ok {
|
||||||
|
rootSpan.SetTag("timeout_duration", d)
|
||||||
|
rootSpan.Finish(tracer.WithError(errors.New("Timeout")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type dataDogLogger struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l dataDogLogger) Log(msg string) {
|
||||||
|
log.Info(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
type dataDogResponseWriter struct {
|
||||||
|
rw http.ResponseWriter
|
||||||
|
span tracer.Span
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ddrw dataDogResponseWriter) Header() http.Header {
|
||||||
|
return ddrw.rw.Header()
|
||||||
|
}
|
||||||
|
func (ddrw dataDogResponseWriter) Write(data []byte) (int, error) {
|
||||||
|
return ddrw.rw.Write(data)
|
||||||
|
}
|
||||||
|
func (ddrw dataDogResponseWriter) WriteHeader(statusCode int) {
|
||||||
|
ddrw.span.SetTag(ext.HTTPCode, statusCode)
|
||||||
|
ddrw.rw.WriteHeader(statusCode)
|
||||||
|
}
|
81
metrics/metrics.go
Normal file
81
metrics/metrics.go
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
package metrics
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/metrics/datadog"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/metrics/newrelic"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/metrics/prometheus"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() error {
|
||||||
|
prometheus.Init()
|
||||||
|
|
||||||
|
if err := newrelic.Init(); err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
datadog.Init()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Stop() {
|
||||||
|
datadog.Stop()
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartRequest(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) {
|
||||||
|
promCancel := prometheus.StartRequest()
|
||||||
|
ctx, nrCancel, rw := newrelic.StartTransaction(ctx, rw, r)
|
||||||
|
ctx, ddCancel, rw := datadog.StartRootSpan(ctx, rw, r)
|
||||||
|
|
||||||
|
cancel := func() {
|
||||||
|
promCancel()
|
||||||
|
nrCancel()
|
||||||
|
ddCancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
return ctx, cancel, rw
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartDownloadingSegment(ctx context.Context) context.CancelFunc {
|
||||||
|
promCancel := prometheus.StartDownloadingSegment()
|
||||||
|
nrCancel := newrelic.StartSegment(ctx, "Downloading image")
|
||||||
|
ddCancel := datadog.StartSpan(ctx, "downloading_image")
|
||||||
|
|
||||||
|
cancel := func() {
|
||||||
|
promCancel()
|
||||||
|
nrCancel()
|
||||||
|
ddCancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
return cancel
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartProcessingSegment(ctx context.Context) context.CancelFunc {
|
||||||
|
promCancel := prometheus.StartProcessingSegment()
|
||||||
|
nrCancel := newrelic.StartSegment(ctx, "Processing image")
|
||||||
|
ddCancel := datadog.StartSpan(ctx, "processing_image")
|
||||||
|
|
||||||
|
cancel := func() {
|
||||||
|
promCancel()
|
||||||
|
nrCancel()
|
||||||
|
ddCancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
return cancel
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendError(ctx context.Context, errType string, err error) {
|
||||||
|
prometheus.IncrementErrorsTotal(errType)
|
||||||
|
newrelic.SendError(ctx, err)
|
||||||
|
datadog.SendError(ctx, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendTimeout(ctx context.Context, d time.Duration) {
|
||||||
|
prometheus.IncrementErrorsTotal("timeout")
|
||||||
|
newrelic.SendTimeout(ctx, d)
|
||||||
|
datadog.SendTimeout(ctx, d)
|
||||||
|
}
|
96
metrics/newrelic/newrelic.go
Normal file
96
metrics/newrelic/newrelic.go
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
package newrelic
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/newrelic/go-agent/v3/newrelic"
|
||||||
|
)
|
||||||
|
|
||||||
|
type transactionCtxKey struct{}
|
||||||
|
|
||||||
|
var (
|
||||||
|
enabled = false
|
||||||
|
|
||||||
|
newRelicApp *newrelic.Application
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() error {
|
||||||
|
if len(config.NewRelicKey) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
name := config.NewRelicAppName
|
||||||
|
if len(name) == 0 {
|
||||||
|
name = "imgproxy"
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
|
||||||
|
newRelicApp, err = newrelic.NewApplication(
|
||||||
|
newrelic.ConfigAppName(name),
|
||||||
|
newrelic.ConfigLicense(config.NewRelicKey),
|
||||||
|
)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Can't init New Relic agent: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartTransaction(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) {
|
||||||
|
if !enabled {
|
||||||
|
return ctx, func() {}, rw
|
||||||
|
}
|
||||||
|
|
||||||
|
txn := newRelicApp.StartTransaction("request")
|
||||||
|
txn.SetWebRequestHTTP(r)
|
||||||
|
newRw := txn.SetWebResponse(rw)
|
||||||
|
cancel := func() { txn.End() }
|
||||||
|
return context.WithValue(ctx, transactionCtxKey{}, txn), cancel, newRw
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartSegment(ctx context.Context, name string) context.CancelFunc {
|
||||||
|
if !enabled {
|
||||||
|
return func() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if txn, ok := ctx.Value(transactionCtxKey{}).(*newrelic.Transaction); ok {
|
||||||
|
segment := txn.StartSegment(name)
|
||||||
|
return func() { segment.End() }
|
||||||
|
}
|
||||||
|
|
||||||
|
return func() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendError(ctx context.Context, err error) {
|
||||||
|
if !enabled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if txn, ok := ctx.Value(transactionCtxKey{}).(*newrelic.Transaction); ok {
|
||||||
|
txn.NoticeError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SendTimeout(ctx context.Context, d time.Duration) {
|
||||||
|
if !enabled {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if txn, ok := ctx.Value(transactionCtxKey{}).(*newrelic.Transaction); ok {
|
||||||
|
txn.NoticeError(newrelic.Error{
|
||||||
|
Message: "Timeout",
|
||||||
|
Class: "Timeout",
|
||||||
|
Attributes: map[string]interface{}{
|
||||||
|
"time": d.Seconds(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
181
metrics/prometheus/prometheus.go
Normal file
181
metrics/prometheus/prometheus.go
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
package prometheus
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/reuseport"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
enabled = false
|
||||||
|
|
||||||
|
requestsTotal prometheus.Counter
|
||||||
|
errorsTotal *prometheus.CounterVec
|
||||||
|
requestDuration prometheus.Histogram
|
||||||
|
downloadDuration prometheus.Histogram
|
||||||
|
processingDuration prometheus.Histogram
|
||||||
|
bufferSize *prometheus.HistogramVec
|
||||||
|
bufferDefaultSize *prometheus.GaugeVec
|
||||||
|
bufferMaxSize *prometheus.GaugeVec
|
||||||
|
)
|
||||||
|
|
||||||
|
func Init() {
|
||||||
|
if len(config.PrometheusBind) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
requestsTotal = prometheus.NewCounter(prometheus.CounterOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "requests_total",
|
||||||
|
Help: "A counter of the total number of HTTP requests imgproxy processed.",
|
||||||
|
})
|
||||||
|
|
||||||
|
errorsTotal = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "errors_total",
|
||||||
|
Help: "A counter of the occurred errors separated by type.",
|
||||||
|
}, []string{"type"})
|
||||||
|
|
||||||
|
requestDuration = prometheus.NewHistogram(prometheus.HistogramOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "request_duration_seconds",
|
||||||
|
Help: "A histogram of the response latency.",
|
||||||
|
})
|
||||||
|
|
||||||
|
downloadDuration = prometheus.NewHistogram(prometheus.HistogramOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "download_duration_seconds",
|
||||||
|
Help: "A histogram of the source image downloading latency.",
|
||||||
|
})
|
||||||
|
|
||||||
|
processingDuration = prometheus.NewHistogram(prometheus.HistogramOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "processing_duration_seconds",
|
||||||
|
Help: "A histogram of the image processing latency.",
|
||||||
|
})
|
||||||
|
|
||||||
|
bufferSize = prometheus.NewHistogramVec(prometheus.HistogramOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "buffer_size_bytes",
|
||||||
|
Help: "A histogram of the buffer size in bytes.",
|
||||||
|
Buckets: prometheus.ExponentialBuckets(1024, 2, 14),
|
||||||
|
}, []string{"type"})
|
||||||
|
|
||||||
|
bufferDefaultSize = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "buffer_default_size_bytes",
|
||||||
|
Help: "A gauge of the buffer default size in bytes.",
|
||||||
|
}, []string{"type"})
|
||||||
|
|
||||||
|
bufferMaxSize = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: "buffer_max_size_bytes",
|
||||||
|
Help: "A gauge of the buffer max size in bytes.",
|
||||||
|
}, []string{"type"})
|
||||||
|
|
||||||
|
prometheus.MustRegister(
|
||||||
|
requestsTotal,
|
||||||
|
errorsTotal,
|
||||||
|
requestDuration,
|
||||||
|
downloadDuration,
|
||||||
|
processingDuration,
|
||||||
|
bufferSize,
|
||||||
|
bufferDefaultSize,
|
||||||
|
bufferMaxSize,
|
||||||
|
)
|
||||||
|
|
||||||
|
enabled = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartServer(cancel context.CancelFunc) error {
|
||||||
|
if !enabled {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
s := http.Server{Handler: promhttp.Handler()}
|
||||||
|
|
||||||
|
l, err := reuseport.Listen("tcp", config.PrometheusBind)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Can't start Prometheus metrics server: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
log.Infof("Starting Prometheus server at %s", config.PrometheusBind)
|
||||||
|
if err := s.Serve(l); err != nil && err != http.ErrServerClosed {
|
||||||
|
log.Error(err)
|
||||||
|
}
|
||||||
|
cancel()
|
||||||
|
}()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartRequest() context.CancelFunc {
|
||||||
|
return startDuration(requestDuration)
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartDownloadingSegment() context.CancelFunc {
|
||||||
|
return startDuration(downloadDuration)
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartProcessingSegment() context.CancelFunc {
|
||||||
|
return startDuration(processingDuration)
|
||||||
|
}
|
||||||
|
|
||||||
|
func startDuration(m prometheus.Histogram) context.CancelFunc {
|
||||||
|
if !enabled {
|
||||||
|
return func() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
t := time.Now()
|
||||||
|
return func() {
|
||||||
|
m.Observe(time.Since(t).Seconds())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func IncrementErrorsTotal(t string) {
|
||||||
|
if enabled {
|
||||||
|
errorsTotal.With(prometheus.Labels{"type": t}).Inc()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func IncrementRequestsTotal() {
|
||||||
|
if enabled {
|
||||||
|
requestsTotal.Inc()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ObserveBufferSize(t string, size int) {
|
||||||
|
if enabled {
|
||||||
|
bufferSize.With(prometheus.Labels{"type": t}).Observe(float64(size))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetBufferDefaultSize(t string, size int) {
|
||||||
|
if enabled {
|
||||||
|
bufferDefaultSize.With(prometheus.Labels{"type": t}).Set(float64(size))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetBufferMaxSize(t string, size int) {
|
||||||
|
if enabled {
|
||||||
|
bufferMaxSize.With(prometheus.Labels{"type": t}).Set(float64(size))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func AddGaugeFunc(name, help string, f func() float64) {
|
||||||
|
gauge := prometheus.NewGaugeFunc(prometheus.GaugeOpts{
|
||||||
|
Namespace: config.PrometheusNamespace,
|
||||||
|
Name: name,
|
||||||
|
Help: help,
|
||||||
|
}, f)
|
||||||
|
prometheus.MustRegister(gauge)
|
||||||
|
}
|
74
newrelic.go
74
newrelic.go
@ -1,74 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/newrelic/go-agent/v3/newrelic"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
newRelicEnabled = false
|
|
||||||
|
|
||||||
newRelicApp *newrelic.Application
|
|
||||||
|
|
||||||
newRelicTransactionCtxKey = ctxKey("newRelicTransaction")
|
|
||||||
)
|
|
||||||
|
|
||||||
func initNewrelic() error {
|
|
||||||
if len(conf.NewRelicKey) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
name := conf.NewRelicAppName
|
|
||||||
if len(name) == 0 {
|
|
||||||
name = "imgproxy"
|
|
||||||
}
|
|
||||||
|
|
||||||
var err error
|
|
||||||
|
|
||||||
newRelicApp, err = newrelic.NewApplication(
|
|
||||||
newrelic.ConfigAppName(name),
|
|
||||||
newrelic.ConfigLicense(conf.NewRelicKey),
|
|
||||||
)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Can't init New Relic agent: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
newRelicEnabled = true
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func startNewRelicTransaction(ctx context.Context, rw http.ResponseWriter, r *http.Request) (context.Context, context.CancelFunc, http.ResponseWriter) {
|
|
||||||
txn := newRelicApp.StartTransaction("request")
|
|
||||||
txn.SetWebRequestHTTP(r)
|
|
||||||
newRw := txn.SetWebResponse(rw)
|
|
||||||
cancel := func() { txn.End() }
|
|
||||||
return context.WithValue(ctx, newRelicTransactionCtxKey, txn), cancel, newRw
|
|
||||||
}
|
|
||||||
|
|
||||||
func startNewRelicSegment(ctx context.Context, name string) context.CancelFunc {
|
|
||||||
txn := ctx.Value(newRelicTransactionCtxKey).(*newrelic.Transaction)
|
|
||||||
segment := txn.StartSegment(name)
|
|
||||||
return func() { segment.End() }
|
|
||||||
}
|
|
||||||
|
|
||||||
func sendErrorToNewRelic(ctx context.Context, err error) {
|
|
||||||
txn := ctx.Value(newRelicTransactionCtxKey).(*newrelic.Transaction)
|
|
||||||
txn.NoticeError(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func sendTimeoutToNewRelic(ctx context.Context, d time.Duration) {
|
|
||||||
txn := ctx.Value(newRelicTransactionCtxKey).(*newrelic.Transaction)
|
|
||||||
txn.NoticeError(newrelic.Error{
|
|
||||||
Message: "Timeout",
|
|
||||||
Class: "Timeout",
|
|
||||||
Attributes: map[string]interface{}{
|
|
||||||
"time": d.Seconds(),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
52
options/gravity_type.go
Normal file
52
options/gravity_type.go
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
package options
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
type GravityType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
GravityUnknown GravityType = iota
|
||||||
|
GravityCenter
|
||||||
|
GravityNorth
|
||||||
|
GravityEast
|
||||||
|
GravitySouth
|
||||||
|
GravityWest
|
||||||
|
GravityNorthWest
|
||||||
|
GravityNorthEast
|
||||||
|
GravitySouthWest
|
||||||
|
GravitySouthEast
|
||||||
|
GravitySmart
|
||||||
|
GravityFocusPoint
|
||||||
|
)
|
||||||
|
|
||||||
|
var gravityTypes = map[string]GravityType{
|
||||||
|
"ce": GravityCenter,
|
||||||
|
"no": GravityNorth,
|
||||||
|
"ea": GravityEast,
|
||||||
|
"so": GravitySouth,
|
||||||
|
"we": GravityWest,
|
||||||
|
"nowe": GravityNorthWest,
|
||||||
|
"noea": GravityNorthEast,
|
||||||
|
"sowe": GravitySouthWest,
|
||||||
|
"soea": GravitySouthEast,
|
||||||
|
"sm": GravitySmart,
|
||||||
|
"fp": GravityFocusPoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gt GravityType) String() string {
|
||||||
|
for k, v := range gravityTypes {
|
||||||
|
if v == gt {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gt GravityType) MarshalJSON() ([]byte, error) {
|
||||||
|
for k, v := range gravityTypes {
|
||||||
|
if v == gt {
|
||||||
|
return []byte(fmt.Sprintf("%q", k)), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []byte("null"), nil
|
||||||
|
}
|
@ -1,13 +1,23 @@
|
|||||||
package main
|
package options
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type presets map[string]urlOptions
|
var presets map[string]urlOptions
|
||||||
|
|
||||||
func parsePreset(p presets, presetStr string) error {
|
func ParsePresets(presetStrs []string) error {
|
||||||
|
for _, presetStr := range presetStrs {
|
||||||
|
if err := parsePreset(presetStr); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parsePreset(presetStr string) error {
|
||||||
presetStr = strings.Trim(presetStr, " ")
|
presetStr = strings.Trim(presetStr, " ")
|
||||||
|
|
||||||
if len(presetStr) == 0 || strings.HasPrefix(presetStr, "#") {
|
if len(presetStr) == 0 || strings.HasPrefix(presetStr, "#") {
|
||||||
@ -38,16 +48,19 @@ func parsePreset(p presets, presetStr string) error {
|
|||||||
return fmt.Errorf("Invalid preset value: %s", presetStr)
|
return fmt.Errorf("Invalid preset value: %s", presetStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
p[name] = opts
|
if presets == nil {
|
||||||
|
presets = make(map[string]urlOptions)
|
||||||
|
}
|
||||||
|
presets[name] = opts
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkPresets(p presets) error {
|
func ValidatePresets() error {
|
||||||
var po processingOptions
|
var po ProcessingOptions
|
||||||
|
|
||||||
for name, opts := range p {
|
for name, opts := range presets {
|
||||||
if err := applyProcessingOptions(&po, opts); err != nil {
|
if err := applyURLOptions(&po, opts); err != nil {
|
||||||
return fmt.Errorf("Error in preset `%s`: %s", name, err)
|
return fmt.Errorf("Error in preset `%s`: %s", name, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -1,109 +1,102 @@
|
|||||||
package main
|
package options
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/stretchr/testify/suite"
|
"github.com/stretchr/testify/suite"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PresetsTestSuite struct{ MainTestSuite }
|
type PresetsTestSuite struct{ suite.Suite }
|
||||||
|
|
||||||
|
func (s *PresetsTestSuite) SetupTest() {
|
||||||
|
config.Reset()
|
||||||
|
// Reset presets
|
||||||
|
presets = make(map[string]urlOptions)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePreset() {
|
func (s *PresetsTestSuite) TestParsePreset() {
|
||||||
p := make(presets)
|
err := parsePreset("test=resize:fit:100:200/sharpen:2")
|
||||||
|
|
||||||
err := parsePreset(p, "test=resize:fit:100:200/sharpen:2")
|
|
||||||
|
|
||||||
require.Nil(s.T(), err)
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
assert.Equal(s.T(), urlOptions{
|
assert.Equal(s.T(), urlOptions{
|
||||||
urlOption{Name: "resize", Args: []string{"fit", "100", "200"}},
|
urlOption{Name: "resize", Args: []string{"fit", "100", "200"}},
|
||||||
urlOption{Name: "sharpen", Args: []string{"2"}},
|
urlOption{Name: "sharpen", Args: []string{"2"}},
|
||||||
}, p["test"])
|
}, presets["test"])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePresetInvalidString() {
|
func (s *PresetsTestSuite) TestParsePresetInvalidString() {
|
||||||
p := make(presets)
|
|
||||||
|
|
||||||
presetStr := "resize:fit:100:200/sharpen:2"
|
presetStr := "resize:fit:100:200/sharpen:2"
|
||||||
err := parsePreset(p, presetStr)
|
err := parsePreset(presetStr)
|
||||||
|
|
||||||
assert.Equal(s.T(), fmt.Errorf("Invalid preset string: %s", presetStr), err)
|
assert.Equal(s.T(), fmt.Errorf("Invalid preset string: %s", presetStr), err)
|
||||||
assert.Empty(s.T(), p)
|
assert.Empty(s.T(), presets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePresetEmptyName() {
|
func (s *PresetsTestSuite) TestParsePresetEmptyName() {
|
||||||
p := make(presets)
|
|
||||||
|
|
||||||
presetStr := "=resize:fit:100:200/sharpen:2"
|
presetStr := "=resize:fit:100:200/sharpen:2"
|
||||||
err := parsePreset(p, presetStr)
|
err := parsePreset(presetStr)
|
||||||
|
|
||||||
assert.Equal(s.T(), fmt.Errorf("Empty preset name: %s", presetStr), err)
|
assert.Equal(s.T(), fmt.Errorf("Empty preset name: %s", presetStr), err)
|
||||||
assert.Empty(s.T(), p)
|
assert.Empty(s.T(), presets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePresetEmptyValue() {
|
func (s *PresetsTestSuite) TestParsePresetEmptyValue() {
|
||||||
p := make(presets)
|
|
||||||
|
|
||||||
presetStr := "test="
|
presetStr := "test="
|
||||||
err := parsePreset(p, presetStr)
|
err := parsePreset(presetStr)
|
||||||
|
|
||||||
assert.Equal(s.T(), fmt.Errorf("Empty preset value: %s", presetStr), err)
|
assert.Equal(s.T(), fmt.Errorf("Empty preset value: %s", presetStr), err)
|
||||||
assert.Empty(s.T(), p)
|
assert.Empty(s.T(), presets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePresetInvalidValue() {
|
func (s *PresetsTestSuite) TestParsePresetInvalidValue() {
|
||||||
p := make(presets)
|
|
||||||
|
|
||||||
presetStr := "test=resize:fit:100:200/sharpen:2/blur"
|
presetStr := "test=resize:fit:100:200/sharpen:2/blur"
|
||||||
err := parsePreset(p, presetStr)
|
err := parsePreset(presetStr)
|
||||||
|
|
||||||
assert.Equal(s.T(), fmt.Errorf("Invalid preset value: %s", presetStr), err)
|
assert.Equal(s.T(), fmt.Errorf("Invalid preset value: %s", presetStr), err)
|
||||||
assert.Empty(s.T(), p)
|
assert.Empty(s.T(), presets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePresetEmptyString() {
|
func (s *PresetsTestSuite) TestParsePresetEmptyString() {
|
||||||
p := make(presets)
|
err := parsePreset(" ")
|
||||||
|
|
||||||
err := parsePreset(p, " ")
|
|
||||||
|
|
||||||
assert.Nil(s.T(), err)
|
assert.Nil(s.T(), err)
|
||||||
assert.Empty(s.T(), p)
|
assert.Empty(s.T(), presets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestParsePresetComment() {
|
func (s *PresetsTestSuite) TestParsePresetComment() {
|
||||||
p := make(presets)
|
err := parsePreset("# test=resize:fit:100:200/sharpen:2")
|
||||||
|
|
||||||
err := parsePreset(p, "# test=resize:fit:100:200/sharpen:2")
|
|
||||||
|
|
||||||
assert.Nil(s.T(), err)
|
assert.Nil(s.T(), err)
|
||||||
assert.Empty(s.T(), p)
|
assert.Empty(s.T(), presets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestCheckPresets() {
|
func (s *PresetsTestSuite) TestValidatePresets() {
|
||||||
p := presets{
|
presets = map[string]urlOptions{
|
||||||
"test": urlOptions{
|
"test": urlOptions{
|
||||||
urlOption{Name: "resize", Args: []string{"fit", "100", "200"}},
|
urlOption{Name: "resize", Args: []string{"fit", "100", "200"}},
|
||||||
urlOption{Name: "sharpen", Args: []string{"2"}},
|
urlOption{Name: "sharpen", Args: []string{"2"}},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
err := checkPresets(p)
|
err := ValidatePresets()
|
||||||
|
|
||||||
assert.Nil(s.T(), err)
|
assert.Nil(s.T(), err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *PresetsTestSuite) TestCheckPresetsInvalid() {
|
func (s *PresetsTestSuite) TestValidatePresetsInvalid() {
|
||||||
p := presets{
|
presets = map[string]urlOptions{
|
||||||
"test": urlOptions{
|
"test": urlOptions{
|
||||||
urlOption{Name: "resize", Args: []string{"fit", "-1", "-2"}},
|
urlOption{Name: "resize", Args: []string{"fit", "-1", "-2"}},
|
||||||
urlOption{Name: "sharpen", Args: []string{"2"}},
|
urlOption{Name: "sharpen", Args: []string{"2"}},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
err := checkPresets(p)
|
err := ValidatePresets()
|
||||||
|
|
||||||
assert.Error(s.T(), err)
|
assert.Error(s.T(), err)
|
||||||
}
|
}
|
File diff suppressed because it is too large
Load Diff
596
options/processing_options_test.go
Normal file
596
options/processing_options_test.go
Normal file
@ -0,0 +1,596 @@
|
|||||||
|
package options
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/stretchr/testify/suite"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
"github.com/imgproxy/imgproxy/v3/imagetype"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ProcessingOptionsTestSuite struct{ suite.Suite }
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) SetupTest() {
|
||||||
|
config.Reset()
|
||||||
|
// Reset presets
|
||||||
|
presets = make(map[string]urlOptions)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseBase64URL() {
|
||||||
|
originURL := "http://images.dev/lorem/ipsum.jpg?param=value"
|
||||||
|
path := fmt.Sprintf("/size:100:100/%s.png", base64.RawURLEncoding.EncodeToString([]byte(originURL)))
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), originURL, imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.PNG, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseBase64URLWithoutExtension() {
|
||||||
|
originURL := "http://images.dev/lorem/ipsum.jpg?param=value"
|
||||||
|
path := fmt.Sprintf("/size:100:100/%s", base64.RawURLEncoding.EncodeToString([]byte(originURL)))
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), originURL, imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.Unknown, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseBase64URLWithBase() {
|
||||||
|
config.BaseURL = "http://images.dev/"
|
||||||
|
|
||||||
|
originURL := "lorem/ipsum.jpg?param=value"
|
||||||
|
path := fmt.Sprintf("/size:100:100/%s.png", base64.RawURLEncoding.EncodeToString([]byte(originURL)))
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), fmt.Sprintf("%s%s", config.BaseURL, originURL), imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.PNG, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePlainURL() {
|
||||||
|
originURL := "http://images.dev/lorem/ipsum.jpg"
|
||||||
|
path := fmt.Sprintf("/size:100:100/plain/%s@png", originURL)
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), originURL, imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.PNG, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePlainURLWithoutExtension() {
|
||||||
|
originURL := "http://images.dev/lorem/ipsum.jpg"
|
||||||
|
path := fmt.Sprintf("/size:100:100/plain/%s", originURL)
|
||||||
|
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), originURL, imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.Unknown, po.Format)
|
||||||
|
}
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePlainURLEscaped() {
|
||||||
|
originURL := "http://images.dev/lorem/ipsum.jpg?param=value"
|
||||||
|
path := fmt.Sprintf("/size:100:100/plain/%s@png", url.PathEscape(originURL))
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), originURL, imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.PNG, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePlainURLWithBase() {
|
||||||
|
config.BaseURL = "http://images.dev/"
|
||||||
|
|
||||||
|
originURL := "lorem/ipsum.jpg"
|
||||||
|
path := fmt.Sprintf("/size:100:100/plain/%s@png", originURL)
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), fmt.Sprintf("%s%s", config.BaseURL, originURL), imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.PNG, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePlainURLEscapedWithBase() {
|
||||||
|
config.BaseURL = "http://images.dev/"
|
||||||
|
|
||||||
|
originURL := "lorem/ipsum.jpg?param=value"
|
||||||
|
path := fmt.Sprintf("/size:100:100/plain/%s@png", url.PathEscape(originURL))
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
assert.Equal(s.T(), fmt.Sprintf("%s%s", config.BaseURL, originURL), imageURL)
|
||||||
|
assert.Equal(s.T(), imagetype.PNG, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
// func (s *ProcessingOptionsTestSuite) TestParseURLAllowedSource() {
|
||||||
|
// config.AllowedSources = []string{"local://", "http://images.dev/"}
|
||||||
|
|
||||||
|
// path := "/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
// _, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
// require.Nil(s.T(), err)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func (s *ProcessingOptionsTestSuite) TestParseURLNotAllowedSource() {
|
||||||
|
// config.AllowedSources = []string{"local://", "http://images.dev/"}
|
||||||
|
|
||||||
|
// path := "/plain/s3://images/lorem/ipsum.jpg"
|
||||||
|
// _, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
// require.Error(s.T(), err)
|
||||||
|
// }
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathFormat() {
|
||||||
|
path := "/format:webp/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), imagetype.WEBP, po.Format)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathResize() {
|
||||||
|
path := "/resize:fill:100:200:1/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), ResizeFill, po.ResizingType)
|
||||||
|
assert.Equal(s.T(), 100, po.Width)
|
||||||
|
assert.Equal(s.T(), 200, po.Height)
|
||||||
|
assert.True(s.T(), po.Enlarge)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathResizingType() {
|
||||||
|
path := "/resizing_type:fill/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), ResizeFill, po.ResizingType)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathSize() {
|
||||||
|
path := "/size:100:200:1/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 100, po.Width)
|
||||||
|
assert.Equal(s.T(), 200, po.Height)
|
||||||
|
assert.True(s.T(), po.Enlarge)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWidth() {
|
||||||
|
path := "/width:100/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 100, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathHeight() {
|
||||||
|
path := "/height:100/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 100, po.Height)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathEnlarge() {
|
||||||
|
path := "/enlarge:1/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.True(s.T(), po.Enlarge)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathExtend() {
|
||||||
|
path := "/extend:1:so:10:20/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), true, po.Extend.Enabled)
|
||||||
|
assert.Equal(s.T(), GravitySouth, po.Extend.Gravity.Type)
|
||||||
|
assert.Equal(s.T(), 10.0, po.Extend.Gravity.X)
|
||||||
|
assert.Equal(s.T(), 20.0, po.Extend.Gravity.Y)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathGravity() {
|
||||||
|
path := "/gravity:soea/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), GravitySouthEast, po.Gravity.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathGravityFocuspoint() {
|
||||||
|
path := "/gravity:fp:0.5:0.75/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), GravityFocusPoint, po.Gravity.Type)
|
||||||
|
assert.Equal(s.T(), 0.5, po.Gravity.X)
|
||||||
|
assert.Equal(s.T(), 0.75, po.Gravity.Y)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathQuality() {
|
||||||
|
path := "/quality:55/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 55, po.Quality)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathBackground() {
|
||||||
|
path := "/background:128:129:130/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.True(s.T(), po.Flatten)
|
||||||
|
assert.Equal(s.T(), uint8(128), po.Background.R)
|
||||||
|
assert.Equal(s.T(), uint8(129), po.Background.G)
|
||||||
|
assert.Equal(s.T(), uint8(130), po.Background.B)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathBackgroundHex() {
|
||||||
|
path := "/background:ffddee/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.True(s.T(), po.Flatten)
|
||||||
|
assert.Equal(s.T(), uint8(0xff), po.Background.R)
|
||||||
|
assert.Equal(s.T(), uint8(0xdd), po.Background.G)
|
||||||
|
assert.Equal(s.T(), uint8(0xee), po.Background.B)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathBackgroundDisable() {
|
||||||
|
path := "/background:fff/background:/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.False(s.T(), po.Flatten)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathBlur() {
|
||||||
|
path := "/blur:0.2/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), float32(0.2), po.Blur)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathSharpen() {
|
||||||
|
path := "/sharpen:0.2/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), float32(0.2), po.Sharpen)
|
||||||
|
}
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathDpr() {
|
||||||
|
path := "/dpr:2/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 2.0, po.Dpr)
|
||||||
|
}
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWatermark() {
|
||||||
|
path := "/watermark:0.5:soea:10:20:0.6/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.True(s.T(), po.Watermark.Enabled)
|
||||||
|
assert.Equal(s.T(), GravitySouthEast, po.Watermark.Gravity.Type)
|
||||||
|
assert.Equal(s.T(), 10.0, po.Watermark.Gravity.X)
|
||||||
|
assert.Equal(s.T(), 20.0, po.Watermark.Gravity.Y)
|
||||||
|
assert.Equal(s.T(), 0.6, po.Watermark.Scale)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathPreset() {
|
||||||
|
presets["test1"] = urlOptions{
|
||||||
|
urlOption{Name: "resizing_type", Args: []string{"fill"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
presets["test2"] = urlOptions{
|
||||||
|
urlOption{Name: "blur", Args: []string{"0.2"}},
|
||||||
|
urlOption{Name: "quality", Args: []string{"50"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
path := "/preset:test1:test2/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), ResizeFill, po.ResizingType)
|
||||||
|
assert.Equal(s.T(), float32(0.2), po.Blur)
|
||||||
|
assert.Equal(s.T(), 50, po.Quality)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathPresetDefault() {
|
||||||
|
presets["default"] = urlOptions{
|
||||||
|
urlOption{Name: "resizing_type", Args: []string{"fill"}},
|
||||||
|
urlOption{Name: "blur", Args: []string{"0.2"}},
|
||||||
|
urlOption{Name: "quality", Args: []string{"50"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
path := "/quality:70/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), ResizeFill, po.ResizingType)
|
||||||
|
assert.Equal(s.T(), float32(0.2), po.Blur)
|
||||||
|
assert.Equal(s.T(), 70, po.Quality)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathPresetLoopDetection() {
|
||||||
|
presets["test1"] = urlOptions{
|
||||||
|
urlOption{Name: "resizing_type", Args: []string{"fill"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
presets["test2"] = urlOptions{
|
||||||
|
urlOption{Name: "blur", Args: []string{"0.2"}},
|
||||||
|
urlOption{Name: "quality", Args: []string{"50"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
path := "/preset:test1:test2:test1/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
require.ElementsMatch(s.T(), po.UsedPresets, []string{"test1", "test2"})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathCachebuster() {
|
||||||
|
path := "/cachebuster:123/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), "123", po.CacheBuster)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathStripMetadata() {
|
||||||
|
path := "/strip_metadata:true/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.True(s.T(), po.StripMetadata)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWebpDetection() {
|
||||||
|
config.EnableWebpDetection = true
|
||||||
|
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
headers := http.Header{"Accept": []string{"image/webp"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), true, po.PreferWebP)
|
||||||
|
assert.Equal(s.T(), false, po.EnforceWebP)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWebpEnforce() {
|
||||||
|
config.EnforceWebp = true
|
||||||
|
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Accept": []string{"image/webp"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), true, po.PreferWebP)
|
||||||
|
assert.Equal(s.T(), true, po.EnforceWebP)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeader() {
|
||||||
|
config.EnableClientHints = true
|
||||||
|
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Width": []string{"100"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 100, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeaderDisabled() {
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Width": []string{"100"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 0, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathWidthHeaderRedefine() {
|
||||||
|
config.EnableClientHints = true
|
||||||
|
|
||||||
|
path := "/width:150/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Width": []string{"100"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 150, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeader() {
|
||||||
|
config.EnableClientHints = true
|
||||||
|
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Viewport-Width": []string{"100"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 100, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeaderDisabled() {
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Viewport-Width": []string{"100"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 0, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathViewportWidthHeaderRedefine() {
|
||||||
|
config.EnableClientHints = true
|
||||||
|
|
||||||
|
path := "/width:150/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Viewport-Width": []string{"100"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 150, po.Width)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathDprHeader() {
|
||||||
|
config.EnableClientHints = true
|
||||||
|
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Dpr": []string{"2"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 2.0, po.Dpr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathDprHeaderDisabled() {
|
||||||
|
path := "/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
headers := http.Header{"Dpr": []string{"2"}}
|
||||||
|
po, _, err := ParsePath(path, headers)
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), 1.0, po.Dpr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// func (s *ProcessingOptionsTestSuite) TestParsePathSigned() {
|
||||||
|
// config.Keys = [][]byte{[]byte("test-key")}
|
||||||
|
// config.Salts = [][]byte{[]byte("test-salt")}
|
||||||
|
|
||||||
|
// path := "/HcvNognEV1bW6f8zRqxNYuOkV0IUf1xloRb57CzbT4g/width:150/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
// _, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
// require.Nil(s.T(), err)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// func (s *ProcessingOptionsTestSuite) TestParsePathSignedInvalid() {
|
||||||
|
// config.Keys = [][]byte{[]byte("test-key")}
|
||||||
|
// config.Salts = [][]byte{[]byte("test-salt")}
|
||||||
|
|
||||||
|
// path := "/unsafe/width:150/plain/http://images.dev/lorem/ipsum.jpg@png"
|
||||||
|
// _, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
// require.Error(s.T(), err)
|
||||||
|
// assert.Equal(s.T(), signature.ErrInvalidSignature.Error(), err.Error())
|
||||||
|
// }
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParsePathOnlyPresets() {
|
||||||
|
config.OnlyPresets = true
|
||||||
|
presets["test1"] = urlOptions{
|
||||||
|
urlOption{Name: "blur", Args: []string{"0.2"}},
|
||||||
|
}
|
||||||
|
presets["test2"] = urlOptions{
|
||||||
|
urlOption{Name: "quality", Args: []string{"50"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
path := "/test1:test2/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), float32(0.2), po.Blur)
|
||||||
|
assert.Equal(s.T(), 50, po.Quality)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseSkipProcessing() {
|
||||||
|
path := "/skp:jpg:png/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
|
||||||
|
po, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), []imagetype.Type{imagetype.JPEG, imagetype.PNG}, po.SkipProcessingFormats)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseSkipProcessingInvalid() {
|
||||||
|
path := "/skp:jpg:png:bad_format/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
|
||||||
|
_, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Error(s.T(), err)
|
||||||
|
assert.Equal(s.T(), "Invalid image format in skip processing: bad_format", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseExpires() {
|
||||||
|
path := "/exp:32503669200/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
_, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseExpiresExpired() {
|
||||||
|
path := "/exp:1609448400/plain/http://images.dev/lorem/ipsum.jpg"
|
||||||
|
_, _, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Error(s.T(), err)
|
||||||
|
assert.Equal(s.T(), errExpiredURL.Error(), err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ProcessingOptionsTestSuite) TestParseBase64URLOnlyPresets() {
|
||||||
|
config.OnlyPresets = true
|
||||||
|
presets["test1"] = urlOptions{
|
||||||
|
urlOption{Name: "blur", Args: []string{"0.2"}},
|
||||||
|
}
|
||||||
|
presets["test2"] = urlOptions{
|
||||||
|
urlOption{Name: "quality", Args: []string{"50"}},
|
||||||
|
}
|
||||||
|
|
||||||
|
originURL := "http://images.dev/lorem/ipsum.jpg?param=value"
|
||||||
|
path := fmt.Sprintf("/test1:test2/%s.png", base64.RawURLEncoding.EncodeToString([]byte(originURL)))
|
||||||
|
|
||||||
|
po, imageURL, err := ParsePath(path, make(http.Header))
|
||||||
|
|
||||||
|
require.Nil(s.T(), err)
|
||||||
|
|
||||||
|
assert.Equal(s.T(), float32(0.2), po.Blur)
|
||||||
|
assert.Equal(s.T(), 50, po.Quality)
|
||||||
|
assert.Equal(s.T(), originURL, imageURL)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestProcessingOptions(t *testing.T) {
|
||||||
|
suite.Run(t, new(ProcessingOptionsTestSuite))
|
||||||
|
}
|
39
options/resize_type.go
Normal file
39
options/resize_type.go
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
package options
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
type ResizeType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
ResizeFit ResizeType = iota
|
||||||
|
ResizeFill
|
||||||
|
ResizeFillDown
|
||||||
|
ResizeForce
|
||||||
|
ResizeAuto
|
||||||
|
)
|
||||||
|
|
||||||
|
var resizeTypes = map[string]ResizeType{
|
||||||
|
"fit": ResizeFit,
|
||||||
|
"fill": ResizeFill,
|
||||||
|
"fill-down": ResizeFillDown,
|
||||||
|
"force": ResizeForce,
|
||||||
|
"auto": ResizeAuto,
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rt ResizeType) String() string {
|
||||||
|
for k, v := range resizeTypes {
|
||||||
|
if v == rt {
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (rt ResizeType) MarshalJSON() ([]byte, error) {
|
||||||
|
for k, v := range resizeTypes {
|
||||||
|
if v == rt {
|
||||||
|
return []byte(fmt.Sprintf("%q", k)), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []byte("null"), nil
|
||||||
|
}
|
85
options/url.go
Normal file
85
options/url.go
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
package options
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/imgproxy/imgproxy/v3/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
const urlTokenPlain = "plain"
|
||||||
|
|
||||||
|
func addBaseURL(u string) string {
|
||||||
|
if len(config.BaseURL) == 0 || strings.HasPrefix(u, config.BaseURL) {
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%s%s", config.BaseURL, u)
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeBase64URL(parts []string) (string, string, error) {
|
||||||
|
var format string
|
||||||
|
|
||||||
|
encoded := strings.Join(parts, "")
|
||||||
|
urlParts := strings.Split(encoded, ".")
|
||||||
|
|
||||||
|
if len(urlParts[0]) == 0 {
|
||||||
|
return "", "", errors.New("Image URL is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(urlParts) > 2 {
|
||||||
|
return "", "", fmt.Errorf("Multiple formats are specified: %s", encoded)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(urlParts) == 2 && len(urlParts[1]) > 0 {
|
||||||
|
format = urlParts[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
imageURL, err := base64.RawURLEncoding.DecodeString(strings.TrimRight(urlParts[0], "="))
|
||||||
|
if err != nil {
|
||||||
|
return "", "", fmt.Errorf("Invalid url encoding: %s", encoded)
|
||||||
|
}
|
||||||
|
|
||||||
|
return addBaseURL(string(imageURL)), format, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodePlainURL(parts []string) (string, string, error) {
|
||||||
|
var format string
|
||||||
|
|
||||||
|
encoded := strings.Join(parts, "/")
|
||||||
|
urlParts := strings.Split(encoded, "@")
|
||||||
|
|
||||||
|
if len(urlParts[0]) == 0 {
|
||||||
|
return "", "", errors.New("Image URL is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(urlParts) > 2 {
|
||||||
|
return "", "", fmt.Errorf("Multiple formats are specified: %s", encoded)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(urlParts) == 2 && len(urlParts[1]) > 0 {
|
||||||
|
format = urlParts[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
unescaped, err := url.PathUnescape(urlParts[0])
|
||||||
|
if err != nil {
|
||||||
|
return "", "", fmt.Errorf("Invalid url encoding: %s", encoded)
|
||||||
|
}
|
||||||
|
|
||||||
|
return addBaseURL(unescaped), format, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func DecodeURL(parts []string) (string, string, error) {
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return "", "", errors.New("Image URL is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if parts[0] == urlTokenPlain && len(parts) > 1 {
|
||||||
|
return decodePlainURL(parts[1:])
|
||||||
|
}
|
||||||
|
|
||||||
|
return decodeBase64URL(parts)
|
||||||
|
}
|
36
options/url_options.go
Normal file
36
options/url_options.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package options
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
type urlOption struct {
|
||||||
|
Name string
|
||||||
|
Args []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type urlOptions []urlOption
|
||||||
|
|
||||||
|
func parseURLOptions(opts []string) (urlOptions, []string) {
|
||||||
|
parsed := make(urlOptions, 0, len(opts))
|
||||||
|
urlStart := len(opts) + 1
|
||||||
|
|
||||||
|
for i, opt := range opts {
|
||||||
|
args := strings.Split(opt, ":")
|
||||||
|
|
||||||
|
if len(args) == 1 {
|
||||||
|
urlStart = i
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
parsed = append(parsed, urlOption{Name: args[0], Args: args[1:]})
|
||||||
|
}
|
||||||
|
|
||||||
|
var rest []string
|
||||||
|
|
||||||
|
if urlStart < len(opts) {
|
||||||
|
rest = opts[urlStart:]
|
||||||
|
} else {
|
||||||
|
rest = []string{}
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed, rest
|
||||||
|
}
|
873
process.go
873
process.go
@ -1,873 +0,0 @@
|
|||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"math"
|
|
||||||
"runtime"
|
|
||||||
|
|
||||||
"github.com/imgproxy/imgproxy/v2/imagemeta"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
msgSmartCropNotSupported = "Smart crop is not supported by used version of libvips"
|
|
||||||
|
|
||||||
// https://chromium.googlesource.com/webm/libwebp/+/refs/heads/master/src/webp/encode.h#529
|
|
||||||
webpMaxDimension = 16383.0
|
|
||||||
)
|
|
||||||
|
|
||||||
var errConvertingNonSvgToSvg = newError(422, "Converting non-SVG images to SVG is not supported", "Converting non-SVG images to SVG is not supported")
|
|
||||||
|
|
||||||
func imageTypeLoadSupport(imgtype imageType) bool {
|
|
||||||
return imgtype == imageTypeSVG ||
|
|
||||||
imgtype == imageTypeICO ||
|
|
||||||
vipsTypeSupportLoad[imgtype]
|
|
||||||
}
|
|
||||||
|
|
||||||
func imageTypeSaveSupport(imgtype imageType) bool {
|
|
||||||
return imgtype == imageTypeSVG || vipsTypeSupportSave[imgtype]
|
|
||||||
}
|
|
||||||
|
|
||||||
func imageTypeGoodForWeb(imgtype imageType) bool {
|
|
||||||
return imgtype != imageTypeTIFF &&
|
|
||||||
imgtype != imageTypeBMP
|
|
||||||
}
|
|
||||||
|
|
||||||
func canSwitchFormat(src, dst, want imageType) bool {
|
|
||||||
return imageTypeSaveSupport(want) &&
|
|
||||||
(!vipsSupportAnimation(src) ||
|
|
||||||
(dst != imageTypeUnknown && !vipsSupportAnimation(dst)) ||
|
|
||||||
vipsSupportAnimation(want))
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractMeta(img *vipsImage, baseAngle int, useOrientation bool) (int, int, int, bool) {
|
|
||||||
width := img.Width()
|
|
||||||
height := img.Height()
|
|
||||||
|
|
||||||
angle := 0
|
|
||||||
flip := false
|
|
||||||
|
|
||||||
if useOrientation {
|
|
||||||
orientation := img.Orientation()
|
|
||||||
|
|
||||||
if orientation == 3 || orientation == 4 {
|
|
||||||
angle = 180
|
|
||||||
}
|
|
||||||
if orientation == 5 || orientation == 6 {
|
|
||||||
angle = 90
|
|
||||||
}
|
|
||||||
if orientation == 7 || orientation == 8 {
|
|
||||||
angle = 270
|
|
||||||
}
|
|
||||||
if orientation == 2 || orientation == 4 || orientation == 5 || orientation == 7 {
|
|
||||||
flip = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (angle+baseAngle)%180 != 0 {
|
|
||||||
width, height = height, width
|
|
||||||
}
|
|
||||||
|
|
||||||
return width, height, angle, flip
|
|
||||||
}
|
|
||||||
|
|
||||||
func calcScale(width, height int, po *processingOptions, imgtype imageType) float64 {
|
|
||||||
var shrink float64
|
|
||||||
|
|
||||||
srcW, srcH := float64(width), float64(height)
|
|
||||||
dstW, dstH := float64(po.Width), float64(po.Height)
|
|
||||||
|
|
||||||
if po.Width == 0 {
|
|
||||||
dstW = srcW
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Height == 0 {
|
|
||||||
dstH = srcH
|
|
||||||
}
|
|
||||||
|
|
||||||
if dstW == srcW && dstH == srcH {
|
|
||||||
shrink = 1
|
|
||||||
} else {
|
|
||||||
wshrink := srcW / dstW
|
|
||||||
hshrink := srcH / dstH
|
|
||||||
|
|
||||||
rt := po.ResizingType
|
|
||||||
|
|
||||||
if rt == resizeAuto {
|
|
||||||
srcD := width - height
|
|
||||||
dstD := po.Width - po.Height
|
|
||||||
|
|
||||||
if (srcD >= 0 && dstD >= 0) || (srcD < 0 && dstD < 0) {
|
|
||||||
rt = resizeFill
|
|
||||||
} else {
|
|
||||||
rt = resizeFit
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case po.Width == 0:
|
|
||||||
shrink = hshrink
|
|
||||||
case po.Height == 0:
|
|
||||||
shrink = wshrink
|
|
||||||
case rt == resizeFit:
|
|
||||||
shrink = math.Max(wshrink, hshrink)
|
|
||||||
default:
|
|
||||||
shrink = math.Min(wshrink, hshrink)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !po.Enlarge && shrink < 1 && imgtype != imageTypeSVG {
|
|
||||||
shrink = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
shrink /= po.Dpr
|
|
||||||
|
|
||||||
if shrink > srcW {
|
|
||||||
shrink = srcW
|
|
||||||
}
|
|
||||||
|
|
||||||
if shrink > srcH {
|
|
||||||
shrink = srcH
|
|
||||||
}
|
|
||||||
|
|
||||||
return 1.0 / shrink
|
|
||||||
}
|
|
||||||
|
|
||||||
func canScaleOnLoad(imgtype imageType, scale float64) bool {
|
|
||||||
if imgtype == imageTypeSVG {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.DisableShrinkOnLoad || scale >= 1 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return imgtype == imageTypeJPEG || imgtype == imageTypeWEBP
|
|
||||||
}
|
|
||||||
|
|
||||||
func canFitToBytes(imgtype imageType) bool {
|
|
||||||
switch imgtype {
|
|
||||||
case imageTypeJPEG, imageTypeWEBP, imageTypeAVIF, imageTypeTIFF:
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func calcJpegShink(scale float64, imgtype imageType) int {
|
|
||||||
shrink := int(1.0 / scale)
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case shrink >= 8:
|
|
||||||
return 8
|
|
||||||
case shrink >= 4:
|
|
||||||
return 4
|
|
||||||
case shrink >= 2:
|
|
||||||
return 2
|
|
||||||
}
|
|
||||||
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
func calcCropSize(orig int, crop float64) int {
|
|
||||||
switch {
|
|
||||||
case crop == 0.0:
|
|
||||||
return 0
|
|
||||||
case crop >= 1.0:
|
|
||||||
return int(crop)
|
|
||||||
default:
|
|
||||||
return maxInt(1, scaleInt(orig, crop))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func calcPosition(width, height, innerWidth, innerHeight int, gravity *gravityOptions, allowOverflow bool) (left, top int) {
|
|
||||||
if gravity.Type == gravityFocusPoint {
|
|
||||||
pointX := scaleInt(width, gravity.X)
|
|
||||||
pointY := scaleInt(height, gravity.Y)
|
|
||||||
|
|
||||||
left = pointX - innerWidth/2
|
|
||||||
top = pointY - innerHeight/2
|
|
||||||
} else {
|
|
||||||
offX, offY := int(gravity.X), int(gravity.Y)
|
|
||||||
|
|
||||||
left = (width-innerWidth+1)/2 + offX
|
|
||||||
top = (height-innerHeight+1)/2 + offY
|
|
||||||
|
|
||||||
if gravity.Type == gravityNorth || gravity.Type == gravityNorthEast || gravity.Type == gravityNorthWest {
|
|
||||||
top = 0 + offY
|
|
||||||
}
|
|
||||||
|
|
||||||
if gravity.Type == gravityEast || gravity.Type == gravityNorthEast || gravity.Type == gravitySouthEast {
|
|
||||||
left = width - innerWidth - offX
|
|
||||||
}
|
|
||||||
|
|
||||||
if gravity.Type == gravitySouth || gravity.Type == gravitySouthEast || gravity.Type == gravitySouthWest {
|
|
||||||
top = height - innerHeight - offY
|
|
||||||
}
|
|
||||||
|
|
||||||
if gravity.Type == gravityWest || gravity.Type == gravityNorthWest || gravity.Type == gravitySouthWest {
|
|
||||||
left = 0 + offX
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var minX, maxX, minY, maxY int
|
|
||||||
|
|
||||||
if allowOverflow {
|
|
||||||
minX, maxX = -innerWidth+1, width-1
|
|
||||||
minY, maxY = -innerHeight+1, height-1
|
|
||||||
} else {
|
|
||||||
minX, maxX = 0, width-innerWidth
|
|
||||||
minY, maxY = 0, height-innerHeight
|
|
||||||
}
|
|
||||||
|
|
||||||
left = maxInt(minX, minInt(left, maxX))
|
|
||||||
top = maxInt(minY, minInt(top, maxY))
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func cropImage(img *vipsImage, cropWidth, cropHeight int, gravity *gravityOptions) error {
|
|
||||||
if cropWidth == 0 && cropHeight == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
imgWidth, imgHeight := img.Width(), img.Height()
|
|
||||||
|
|
||||||
cropWidth = minNonZeroInt(cropWidth, imgWidth)
|
|
||||||
cropHeight = minNonZeroInt(cropHeight, imgHeight)
|
|
||||||
|
|
||||||
if cropWidth >= imgWidth && cropHeight >= imgHeight {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if gravity.Type == gravitySmart {
|
|
||||||
if err := img.CopyMemory(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := img.SmartCrop(cropWidth, cropHeight); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// Applying additional modifications after smart crop causes SIGSEGV on Alpine
|
|
||||||
// so we have to copy memory after it
|
|
||||||
return img.CopyMemory()
|
|
||||||
}
|
|
||||||
|
|
||||||
left, top := calcPosition(imgWidth, imgHeight, cropWidth, cropHeight, gravity, false)
|
|
||||||
return img.Crop(left, top, cropWidth, cropHeight)
|
|
||||||
}
|
|
||||||
|
|
||||||
func prepareWatermark(wm *vipsImage, wmData *imageData, opts *watermarkOptions, imgWidth, imgHeight int) error {
|
|
||||||
if err := wm.Load(wmData.Data, wmData.Type, 1, 1.0, 1); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
po := newProcessingOptions()
|
|
||||||
po.ResizingType = resizeFit
|
|
||||||
po.Dpr = 1
|
|
||||||
po.Enlarge = true
|
|
||||||
po.Format = wmData.Type
|
|
||||||
|
|
||||||
if opts.Scale > 0 {
|
|
||||||
po.Width = maxInt(scaleInt(imgWidth, opts.Scale), 1)
|
|
||||||
po.Height = maxInt(scaleInt(imgHeight, opts.Scale), 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := transformImage(context.Background(), wm, wmData.Data, po, wmData.Type); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := wm.EnsureAlpha(); err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if opts.Replicate {
|
|
||||||
return wm.Replicate(imgWidth, imgHeight)
|
|
||||||
}
|
|
||||||
|
|
||||||
left, top := calcPosition(imgWidth, imgHeight, wm.Width(), wm.Height(), &opts.Gravity, true)
|
|
||||||
|
|
||||||
return wm.Embed(imgWidth, imgHeight, left, top, rgbColor{0, 0, 0}, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func applyWatermark(img *vipsImage, wmData *imageData, opts *watermarkOptions, framesCount int) error {
|
|
||||||
if err := img.RgbColourspace(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := img.CopyMemory(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
wm := new(vipsImage)
|
|
||||||
defer wm.Clear()
|
|
||||||
|
|
||||||
width := img.Width()
|
|
||||||
height := img.Height()
|
|
||||||
|
|
||||||
if err := prepareWatermark(wm, wmData, opts, width, height/framesCount); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if framesCount > 1 {
|
|
||||||
if err := wm.Replicate(width, height); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
opacity := opts.Opacity * conf.WatermarkOpacity
|
|
||||||
|
|
||||||
return img.ApplyWatermark(wm, opacity)
|
|
||||||
}
|
|
||||||
|
|
||||||
func copyMemoryAndCheckTimeout(ctx context.Context, img *vipsImage) error {
|
|
||||||
err := img.CopyMemory()
|
|
||||||
checkTimeout(ctx)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func transformImage(ctx context.Context, img *vipsImage, data []byte, po *processingOptions, imgtype imageType) error {
|
|
||||||
var (
|
|
||||||
err error
|
|
||||||
trimmed bool
|
|
||||||
)
|
|
||||||
|
|
||||||
if po.Trim.Enabled {
|
|
||||||
if err = img.Trim(po.Trim.Threshold, po.Trim.Smart, po.Trim.Color, po.Trim.EqualHor, po.Trim.EqualVer); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
trimmed = true
|
|
||||||
}
|
|
||||||
|
|
||||||
srcWidth, srcHeight, angle, flip := extractMeta(img, po.Rotate, po.AutoRotate)
|
|
||||||
|
|
||||||
cropWidth := calcCropSize(srcWidth, po.Crop.Width)
|
|
||||||
cropHeight := calcCropSize(srcHeight, po.Crop.Height)
|
|
||||||
|
|
||||||
cropGravity := po.Crop.Gravity
|
|
||||||
if cropGravity.Type == gravityUnknown {
|
|
||||||
cropGravity = po.Gravity
|
|
||||||
}
|
|
||||||
|
|
||||||
widthToScale := minNonZeroInt(cropWidth, srcWidth)
|
|
||||||
heightToScale := minNonZeroInt(cropHeight, srcHeight)
|
|
||||||
|
|
||||||
scale := calcScale(widthToScale, heightToScale, po, imgtype)
|
|
||||||
|
|
||||||
if cropWidth > 0 {
|
|
||||||
cropWidth = maxInt(1, scaleInt(cropWidth, scale))
|
|
||||||
}
|
|
||||||
if cropHeight > 0 {
|
|
||||||
cropHeight = maxInt(1, scaleInt(cropHeight, scale))
|
|
||||||
}
|
|
||||||
if cropGravity.Type != gravityFocusPoint {
|
|
||||||
cropGravity.X *= scale
|
|
||||||
cropGravity.Y *= scale
|
|
||||||
}
|
|
||||||
|
|
||||||
if !trimmed && scale != 1 && data != nil && canScaleOnLoad(imgtype, scale) {
|
|
||||||
jpegShrink := calcJpegShink(scale, imgtype)
|
|
||||||
|
|
||||||
if imgtype != imageTypeJPEG || jpegShrink != 1 {
|
|
||||||
// Do some scale-on-load
|
|
||||||
if err = img.Load(data, imgtype, jpegShrink, scale, 1); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update scale after scale-on-load
|
|
||||||
newWidth, newHeight, _, _ := extractMeta(img, po.Rotate, po.AutoRotate)
|
|
||||||
if srcWidth > srcHeight {
|
|
||||||
scale = float64(srcWidth) * scale / float64(newWidth)
|
|
||||||
} else {
|
|
||||||
scale = float64(srcHeight) * scale / float64(newHeight)
|
|
||||||
}
|
|
||||||
if srcWidth == scaleInt(srcWidth, scale) && srcHeight == scaleInt(srcHeight, scale) {
|
|
||||||
scale = 1.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.Rad2Float(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
iccImported := false
|
|
||||||
convertToLinear := conf.UseLinearColorspace && scale != 1
|
|
||||||
|
|
||||||
if convertToLinear {
|
|
||||||
if err = img.ImportColourProfile(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
iccImported = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if convertToLinear {
|
|
||||||
if err = img.LinearColourspace(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if err = img.RgbColourspace(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hasAlpha := img.HasAlpha()
|
|
||||||
|
|
||||||
if scale != 1 {
|
|
||||||
if err = img.Resize(scale, hasAlpha); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.Rotate(angle); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if flip {
|
|
||||||
if err = img.Flip(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.Rotate(po.Rotate); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
dprWidth := scaleInt(po.Width, po.Dpr)
|
|
||||||
dprHeight := scaleInt(po.Height, po.Dpr)
|
|
||||||
|
|
||||||
if err = cropImage(img, cropWidth, cropHeight, &cropGravity); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err = cropImage(img, dprWidth, dprHeight, &po.Gravity); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Format == imageTypeWEBP {
|
|
||||||
webpLimitShrink := float64(maxInt(img.Width(), img.Height())) / webpMaxDimension
|
|
||||||
|
|
||||||
if webpLimitShrink > 1.0 {
|
|
||||||
if err = img.Resize(1.0/webpLimitShrink, hasAlpha); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
logWarning("WebP dimension size is limited to %d. The image is rescaled to %dx%d", int(webpMaxDimension), img.Width(), img.Height())
|
|
||||||
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
keepProfile := !po.StripColorProfile && po.Format.SupportsColourProfile()
|
|
||||||
|
|
||||||
if iccImported {
|
|
||||||
if keepProfile {
|
|
||||||
// We imported ICC profile and want to keep it,
|
|
||||||
// so we need to export it
|
|
||||||
if err = img.ExportColourProfile(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// We imported ICC profile but don't want to keep it,
|
|
||||||
// so we need to export image to sRGB for maximum compatibility
|
|
||||||
if err = img.ExportColourProfileToSRGB(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if !keepProfile {
|
|
||||||
// We don't import ICC profile and don't want to keep it,
|
|
||||||
// so we need to transform it to sRGB for maximum compatibility
|
|
||||||
if err = img.TransformColourProfile(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.RgbColourspace(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if !keepProfile {
|
|
||||||
if err = img.RemoveColourProfile(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
transparentBg := po.Format.SupportsAlpha() && !po.Flatten
|
|
||||||
|
|
||||||
if hasAlpha && !transparentBg {
|
|
||||||
if err = img.Flatten(po.Background); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Blur > 0 {
|
|
||||||
if err = img.Blur(po.Blur); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Sharpen > 0 {
|
|
||||||
if err = img.Sharpen(po.Sharpen); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Extend.Enabled && (dprWidth > img.Width() || dprHeight > img.Height()) {
|
|
||||||
offX, offY := calcPosition(dprWidth, dprHeight, img.Width(), img.Height(), &po.Extend.Gravity, false)
|
|
||||||
if err = img.Embed(dprWidth, dprHeight, offX, offY, po.Background, transparentBg); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Padding.Enabled {
|
|
||||||
paddingTop := scaleInt(po.Padding.Top, po.Dpr)
|
|
||||||
paddingRight := scaleInt(po.Padding.Right, po.Dpr)
|
|
||||||
paddingBottom := scaleInt(po.Padding.Bottom, po.Dpr)
|
|
||||||
paddingLeft := scaleInt(po.Padding.Left, po.Dpr)
|
|
||||||
if err = img.Embed(
|
|
||||||
img.Width()+paddingLeft+paddingRight,
|
|
||||||
img.Height()+paddingTop+paddingBottom,
|
|
||||||
paddingLeft,
|
|
||||||
paddingTop,
|
|
||||||
po.Background,
|
|
||||||
transparentBg,
|
|
||||||
); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Watermark.Enabled && watermark != nil {
|
|
||||||
if err = applyWatermark(img, watermark, &po.Watermark, 1); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.RgbColourspace(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := img.CastUchar(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.StripMetadata {
|
|
||||||
if err := img.Strip(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return copyMemoryAndCheckTimeout(ctx, img)
|
|
||||||
}
|
|
||||||
|
|
||||||
func transformAnimated(ctx context.Context, img *vipsImage, data []byte, po *processingOptions, imgtype imageType) error {
|
|
||||||
if po.Trim.Enabled {
|
|
||||||
logWarning("Trim is not supported for animated images")
|
|
||||||
po.Trim.Enabled = false
|
|
||||||
}
|
|
||||||
|
|
||||||
imgWidth := img.Width()
|
|
||||||
|
|
||||||
frameHeight, err := img.GetInt("page-height")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
framesCount := minInt(img.Height()/frameHeight, conf.MaxAnimationFrames)
|
|
||||||
|
|
||||||
// Double check dimensions because animated image has many frames
|
|
||||||
if err = checkDimensions(imgWidth, frameHeight*framesCount); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Vips 8.8+ supports n-pages and doesn't load the whole animated image on header access
|
|
||||||
if nPages, _ := img.GetIntDefault("n-pages", 0); nPages > framesCount {
|
|
||||||
// Load only the needed frames
|
|
||||||
if err = img.Load(data, imgtype, 1, 1.0, framesCount); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
delay, err := img.GetIntSliceDefault("delay", nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
loop, err := img.GetIntDefault("loop", 0)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Legacy fields
|
|
||||||
// TODO: remove this in major update
|
|
||||||
gifLoop, err := img.GetIntDefault("gif-loop", -1)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
gifDelay, err := img.GetIntDefault("gif-delay", -1)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
watermarkEnabled := po.Watermark.Enabled
|
|
||||||
po.Watermark.Enabled = false
|
|
||||||
defer func() { po.Watermark.Enabled = watermarkEnabled }()
|
|
||||||
|
|
||||||
frames := make([]*vipsImage, framesCount)
|
|
||||||
defer func() {
|
|
||||||
for _, frame := range frames {
|
|
||||||
if frame != nil {
|
|
||||||
frame.Clear()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
for i := 0; i < framesCount; i++ {
|
|
||||||
frame := new(vipsImage)
|
|
||||||
|
|
||||||
if err = img.Extract(frame, 0, i*frameHeight, imgWidth, frameHeight); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
frames[i] = frame
|
|
||||||
|
|
||||||
if err = transformImage(ctx, frame, nil, po, imgtype); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, frame); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.Arrayjoin(frames); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if watermarkEnabled && watermark != nil {
|
|
||||||
if err = applyWatermark(img, watermark, &po.Watermark, framesCount); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = img.CastUchar(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(delay) == 0 {
|
|
||||||
delay = make([]int, framesCount)
|
|
||||||
for i := range delay {
|
|
||||||
delay[i] = 40
|
|
||||||
}
|
|
||||||
} else if len(delay) > framesCount {
|
|
||||||
delay = delay[:framesCount]
|
|
||||||
}
|
|
||||||
|
|
||||||
img.SetInt("page-height", frames[0].Height())
|
|
||||||
img.SetIntSlice("delay", delay)
|
|
||||||
img.SetInt("loop", loop)
|
|
||||||
img.SetInt("n-pages", framesCount)
|
|
||||||
|
|
||||||
// Legacy fields
|
|
||||||
// TODO: remove this in major update
|
|
||||||
if gifLoop >= 0 {
|
|
||||||
img.SetInt("gif-loop", gifLoop)
|
|
||||||
}
|
|
||||||
if gifDelay >= 0 {
|
|
||||||
img.SetInt("gif-delay", gifDelay)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getIcoData(imgdata *imageData) (*imageData, error) {
|
|
||||||
icoMeta, err := imagemeta.DecodeIcoMeta(bytes.NewReader(imgdata.Data))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
offset := icoMeta.BestImageOffset()
|
|
||||||
size := icoMeta.BestImageSize()
|
|
||||||
|
|
||||||
data := imgdata.Data[offset : offset+size]
|
|
||||||
|
|
||||||
var format string
|
|
||||||
|
|
||||||
meta, err := imagemeta.DecodeMeta(bytes.NewReader(data))
|
|
||||||
if err != nil {
|
|
||||||
// Looks like it's BMP with an incomplete header
|
|
||||||
if d, err := imagemeta.FixBmpHeader(data); err == nil {
|
|
||||||
format = "bmp"
|
|
||||||
data = d
|
|
||||||
} else {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
format = meta.Format()
|
|
||||||
}
|
|
||||||
|
|
||||||
if imgtype, ok := imageTypes[format]; ok && vipsTypeSupportLoad[imgtype] {
|
|
||||||
return &imageData{
|
|
||||||
Data: data,
|
|
||||||
Type: imgtype,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, fmt.Errorf("Can't load %s from ICO", meta.Format())
|
|
||||||
}
|
|
||||||
|
|
||||||
func saveImageToFitBytes(ctx context.Context, po *processingOptions, img *vipsImage) ([]byte, context.CancelFunc, error) {
|
|
||||||
var diff float64
|
|
||||||
quality := po.getQuality()
|
|
||||||
|
|
||||||
for {
|
|
||||||
result, cancel, err := img.Save(po.Format, quality)
|
|
||||||
if len(result) <= po.MaxBytes || quality <= 10 || err != nil {
|
|
||||||
return result, cancel, err
|
|
||||||
}
|
|
||||||
cancel()
|
|
||||||
|
|
||||||
checkTimeout(ctx)
|
|
||||||
|
|
||||||
delta := float64(len(result)) / float64(po.MaxBytes)
|
|
||||||
switch {
|
|
||||||
case delta > 3:
|
|
||||||
diff = 0.25
|
|
||||||
case delta > 1.5:
|
|
||||||
diff = 0.5
|
|
||||||
default:
|
|
||||||
diff = 0.75
|
|
||||||
}
|
|
||||||
quality = int(float64(quality) * diff)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func processImage(ctx context.Context) ([]byte, context.CancelFunc, error) {
|
|
||||||
runtime.LockOSThread()
|
|
||||||
defer runtime.UnlockOSThread()
|
|
||||||
|
|
||||||
if newRelicEnabled {
|
|
||||||
newRelicCancel := startNewRelicSegment(ctx, "Processing image")
|
|
||||||
defer newRelicCancel()
|
|
||||||
}
|
|
||||||
|
|
||||||
if prometheusEnabled {
|
|
||||||
defer startPrometheusDuration(prometheusProcessingDuration)()
|
|
||||||
}
|
|
||||||
|
|
||||||
defer vipsCleanup()
|
|
||||||
|
|
||||||
po := getProcessingOptions(ctx)
|
|
||||||
imgdata := getImageData(ctx)
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case po.Format == imageTypeUnknown:
|
|
||||||
switch {
|
|
||||||
case po.PreferAvif && canSwitchFormat(imgdata.Type, imageTypeUnknown, imageTypeAVIF):
|
|
||||||
po.Format = imageTypeAVIF
|
|
||||||
case po.PreferWebP && canSwitchFormat(imgdata.Type, imageTypeUnknown, imageTypeWEBP):
|
|
||||||
po.Format = imageTypeWEBP
|
|
||||||
case imageTypeSaveSupport(imgdata.Type) && imageTypeGoodForWeb(imgdata.Type):
|
|
||||||
po.Format = imgdata.Type
|
|
||||||
default:
|
|
||||||
po.Format = imageTypeJPEG
|
|
||||||
}
|
|
||||||
case po.EnforceAvif && canSwitchFormat(imgdata.Type, po.Format, imageTypeAVIF):
|
|
||||||
po.Format = imageTypeAVIF
|
|
||||||
case po.EnforceWebP && canSwitchFormat(imgdata.Type, po.Format, imageTypeWEBP):
|
|
||||||
po.Format = imageTypeWEBP
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.Format == imageTypeSVG {
|
|
||||||
if imgdata.Type != imageTypeSVG {
|
|
||||||
return []byte{}, func() {}, errConvertingNonSvgToSvg
|
|
||||||
}
|
|
||||||
|
|
||||||
return imgdata.Data, func() {}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if imgdata.Type == imageTypeSVG && !vipsTypeSupportLoad[imageTypeSVG] {
|
|
||||||
return []byte{}, func() {}, errSourceImageTypeNotSupported
|
|
||||||
}
|
|
||||||
|
|
||||||
if imgdata.Type == imageTypeICO {
|
|
||||||
icodata, err := getIcoData(imgdata)
|
|
||||||
if err != nil {
|
|
||||||
return nil, func() {}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
imgdata = icodata
|
|
||||||
}
|
|
||||||
|
|
||||||
if !vipsSupportSmartcrop {
|
|
||||||
if po.Gravity.Type == gravitySmart {
|
|
||||||
logWarning(msgSmartCropNotSupported)
|
|
||||||
po.Gravity.Type = gravityCenter
|
|
||||||
}
|
|
||||||
if po.Crop.Gravity.Type == gravitySmart {
|
|
||||||
logWarning(msgSmartCropNotSupported)
|
|
||||||
po.Crop.Gravity.Type = gravityCenter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.ResizingType == resizeCrop {
|
|
||||||
logWarning("`crop` resizing type is deprecated and will be removed in future versions. Use `crop` processing option instead")
|
|
||||||
|
|
||||||
po.Crop.Width, po.Crop.Height = float64(po.Width), float64(po.Height)
|
|
||||||
|
|
||||||
po.ResizingType = resizeFit
|
|
||||||
po.Width, po.Height = 0, 0
|
|
||||||
}
|
|
||||||
|
|
||||||
animationSupport := conf.MaxAnimationFrames > 1 && vipsSupportAnimation(imgdata.Type) && vipsSupportAnimation(po.Format)
|
|
||||||
|
|
||||||
pages := 1
|
|
||||||
if animationSupport {
|
|
||||||
pages = -1
|
|
||||||
}
|
|
||||||
|
|
||||||
img := new(vipsImage)
|
|
||||||
defer img.Clear()
|
|
||||||
|
|
||||||
if err := img.Load(imgdata.Data, imgdata.Type, 1, 1.0, pages); err != nil {
|
|
||||||
return nil, func() {}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if animationSupport && img.IsAnimated() {
|
|
||||||
if err := transformAnimated(ctx, img, imgdata.Data, po, imgdata.Type); err != nil {
|
|
||||||
return nil, func() {}, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if err := transformImage(ctx, img, imgdata.Data, po, imgdata.Type); err != nil {
|
|
||||||
return nil, func() {}, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := copyMemoryAndCheckTimeout(ctx, img); err != nil {
|
|
||||||
return nil, func() {}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if po.MaxBytes > 0 && canFitToBytes(po.Format) {
|
|
||||||
return saveImageToFitBytes(ctx, po, img)
|
|
||||||
}
|
|
||||||
|
|
||||||
return img.Save(po.Format, po.getQuality())
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user