Initial commit
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
bin/
|
||||
config.yml
|
21
LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2017 Sergey Alexandrovich
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
22
Makefile
Normal file
@ -0,0 +1,22 @@
|
||||
current_dir := $(abspath $(dir $(lastword $(MAKEFILE_LIST))))
|
||||
vendor := $(current_dir)/_vendor
|
||||
goenv := GOPATH="$(vendor):$(GOPATH)"
|
||||
|
||||
all: clean vendorize build
|
||||
|
||||
clean:
|
||||
rm -rf bin/
|
||||
|
||||
vendorize:
|
||||
cd $(current_dir)
|
||||
GOPATH=$(vendor) go get -d
|
||||
find $(vendor) -name ".git" -type d | xargs rm -rf
|
||||
|
||||
clean-vendor:
|
||||
rm -rf $(vendor)
|
||||
|
||||
hard-vendorize: clean-vendor vendorize
|
||||
|
||||
build:
|
||||
cd $(current_dir)
|
||||
$(goenv) go build -v -ldflags '-w -s' -o bin/server
|
12
_vendor/src/github.com/h2non/bimg/.editorconfig
Normal file
@ -0,0 +1,12 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tabs
|
||||
indent_size = 2
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
8
_vendor/src/github.com/h2non/bimg/.gitignore
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
/bimg
|
||||
/bundle
|
||||
bin
|
||||
/*.jpg
|
||||
/*.png
|
||||
/*.webp
|
||||
/fixtures/*_out.*
|
||||
/.idea/
|
98
_vendor/src/github.com/h2non/bimg/.travis.yml
Normal file
@ -0,0 +1,98 @@
|
||||
language: go
|
||||
|
||||
dist: trusty
|
||||
sudo: false
|
||||
|
||||
go:
|
||||
- 1.6
|
||||
- 1.7
|
||||
- 1.8
|
||||
- tip
|
||||
|
||||
env:
|
||||
- LIBVIPS=7.42.3
|
||||
- LIBVIPS=8.2.3
|
||||
- LIBVIPS=8.3.3
|
||||
- LIBVIPS=8.4.5
|
||||
- LIBVIPS=8.5.5
|
||||
- LIBVIPS=master
|
||||
|
||||
matrix:
|
||||
allow_failures:
|
||||
- env: LIBVIPS=7.42.3
|
||||
- env: LIBVIPS=8.2.3
|
||||
- env: LIBVIPS=8.3.3
|
||||
|
||||
cache:
|
||||
apt:
|
||||
directories:
|
||||
- $HOME/libvips
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- gobject-introspection
|
||||
- gtk-doc-tools
|
||||
- libcfitsio3-dev
|
||||
- libfftw3-dev
|
||||
- libgif-dev
|
||||
- libgs-dev
|
||||
- libgsf-1-dev
|
||||
- libmatio-dev
|
||||
- libopenslide-dev
|
||||
- liborc-0.4-dev
|
||||
- libpango1.0-dev
|
||||
- libpoppler-glib-dev
|
||||
- libwebp-dev
|
||||
|
||||
# VIPS 8.3.3 requires Poppler 0.30 which is not released on Trusty.
|
||||
before_install:
|
||||
- >
|
||||
test "$LIBVIPS" != "master" -a "$LIBVIPS" \< "8.5" \
|
||||
&& wget http://www.vips.ecs.soton.ac.uk/supported/${LIBVIPS%.*}/vips-${LIBVIPS}.tar.gz -O vips.tgz \
|
||||
|| echo ":-)"
|
||||
- >
|
||||
test "$LIBVIPS" != "master" -a "$LIBVIPS" \> "8.5" \
|
||||
&& wget https://github.com/jcupitt/libvips/releases/download/v${LIBVIPS}/vips-${LIBVIPS}.tar.gz -O vips.tgz \
|
||||
|| echo ":-)"
|
||||
- >
|
||||
test $LIBVIPS == "master"\
|
||||
&& wget https://github.com/jcupitt/libvips/archive/${LIBVIPS}.tar.gz -O vips.tgz \
|
||||
|| echo ":-)"
|
||||
- mkdir libvips
|
||||
- tar xf vips.tgz -C libvips --strip-components 1
|
||||
- cd libvips
|
||||
- test -f autogen.sh && ./autogen.sh || ./bootstrap.sh
|
||||
- >
|
||||
CXXFLAGS=-D_GLIBCXX_USE_CXX11_ABI=0
|
||||
./configure
|
||||
--disable-debug
|
||||
--disable-dependency-tracking
|
||||
--disable-introspection
|
||||
--disable-static
|
||||
--enable-gtk-doc-html=no
|
||||
--enable-gtk-doc=no
|
||||
--enable-pyvips8=no
|
||||
--without-orc
|
||||
--without-python
|
||||
--prefix=$HOME/libvips
|
||||
$1
|
||||
- make
|
||||
- make install
|
||||
- cd ..
|
||||
- export PATH=$PATH:$HOME/libvips/bin
|
||||
- export PKG_CONFIG_PATH=$PKG_CONFIG_PATH:$HOME/libvips/lib/pkgconfig
|
||||
- export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$HOME/libvips/lib
|
||||
- vips --vips-version
|
||||
|
||||
before_script:
|
||||
- go get -u github.com/golang/lint/golint
|
||||
|
||||
script:
|
||||
- diff -u <(echo -n) <(gofmt -s -d ./)
|
||||
- diff -u <(echo -n) <(go vet ./)
|
||||
- diff -u <(echo -n) <(golint ./)
|
||||
- go test -v -race -covermode=atomic -coverprofile=coverage.out
|
||||
|
||||
after_success:
|
||||
- goveralls -coverprofile=coverage.out -service=travis-ci
|
85
_vendor/src/github.com/h2non/bimg/History.md
Normal file
@ -0,0 +1,85 @@
|
||||
|
||||
## v1.0.9 / 2017-05-25
|
||||
|
||||
* Merge pull request #156 from Dynom/SmartCropToGravity
|
||||
* Adding a test, verifying both ways of enabling SmartCrop work
|
||||
* Merge pull request #149 from waldophotos/master
|
||||
* Replacing SmartCrop with a Gravity option
|
||||
* refactor(docs): v8.4
|
||||
* Change for older LIBVIPS versions. `vips_bandjoin_const1` is added in libvips 8.2.
|
||||
* Second try, watermarking memory issue fix
|
||||
|
||||
## v1.0.8 / 2017-05-18
|
||||
|
||||
* Merge pull request #145 from greut/smartcrop
|
||||
* Merge pull request #155 from greut/libvips8.5.5
|
||||
* Update libvips to 8.5.5.
|
||||
* Adding basic smartcrop support.
|
||||
* Merge pull request #153 from abracadaber/master
|
||||
* Added Linux Mint 17.3+ distro names
|
||||
* feat(docs): add new maintainer notice (thanks to @kirillDanshin)
|
||||
* Merge pull request #152 from greut/libvips85
|
||||
* Download latest version of libvips from github.
|
||||
* Merge pull request #147 from h2non/revert-143-master
|
||||
* Revert "Fix for memory issue when watermarking images"
|
||||
* Merge pull request #146 from greut/minor-major
|
||||
* Merge pull request #143 from waldophotos/master
|
||||
* Merge pull request #144 from greut/go18
|
||||
* Fix tests where minor/major were mixed up
|
||||
* Enabled go 1.8 builds.
|
||||
* Fix the unref of images, when image isn't transparent
|
||||
* Fix for memory issue when watermarking images
|
||||
* feat(docs): add maintainers sections
|
||||
* Merge pull request #132 from jaume-pinyol/WATERMARK_SUPPORT
|
||||
* Add support for image watermarks
|
||||
* Merge pull request #131 from greut/versions
|
||||
* Running tests on more specific versions.
|
||||
* refactor(preinstall.sh): remove deprecation notice
|
||||
* Update preinstall.sh
|
||||
* fix(requirements): required libvips 7.42
|
||||
* fix(History): typo
|
||||
* chore(History): add breaking change note
|
||||
|
||||
## v1.0.7 / 13-01-2017
|
||||
|
||||
- fix(#128): crop image calculation for missing width or height axis.
|
||||
- feat: add TIFF save output format (**note**: this introduces a minor interface breaking change in `bimg.IsImageTypeSupportedByVips` auxiliary function).
|
||||
|
||||
## v1.0.6 / 12-11-2016
|
||||
|
||||
- feat(#118): handle 16-bit PNGs.
|
||||
- feat(#119): adds JPEG2000 file for the type tests.
|
||||
- feat(#121): test bimg against multiple libvips versions.
|
||||
|
||||
## v1.0.5 / 01-10-2016
|
||||
|
||||
- feat(#92): support Extend param with optional background.
|
||||
- fix(#106): allow image area extraction without explicit x/y axis.
|
||||
- feat(api): add Extend type with `libvips` enum alias.
|
||||
|
||||
## v1.0.4 / 29-09-2016
|
||||
|
||||
- fix(#111): safe check of magick image type support.
|
||||
|
||||
## v1.0.3 / 28-09-2016
|
||||
|
||||
- fix(#95): better image type inference and support check.
|
||||
- fix(background): pass proper background RGB color for PNG image conversion.
|
||||
- feat(types): validate supported image types by current `libvips` compilation.
|
||||
- feat(types): consistent SVG image checking.
|
||||
- feat(api): add public functions `VipsIsTypeSupported()`, `IsImageTypeSupportedByVips()` and `IsSVGImage()`.
|
||||
|
||||
## v1.0.2 / 27-09-2016
|
||||
|
||||
- feat(#95): support GIF, SVG and PDF formats.
|
||||
- fix(#108): auto-width and height calculations now round instead of floor.
|
||||
|
||||
## v1.0.1 / 22-06-2016
|
||||
|
||||
- fix(#90): Do not not dereference the original image a second time.
|
||||
|
||||
## v1.0.0 / 21-04-2016
|
||||
|
||||
- refactor(api): breaking changes: normalize public members to follow Go naming idioms.
|
||||
- feat(version): bump to major version. API contract won't be compromised in `v1`.
|
||||
- feat(docs): add missing inline godoc documentation.
|
24
_vendor/src/github.com/h2non/bimg/LICENSE
Normal file
@ -0,0 +1,24 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (c) Tomas Aparicio and contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
347
_vendor/src/github.com/h2non/bimg/README.md
Normal file
@ -0,0 +1,347 @@
|
||||
# bimg [![Build Status](https://travis-ci.org/h2non/bimg.svg)](https://travis-ci.org/h2non/bimg) [![GoDoc](https://godoc.org/github.com/h2non/bimg?status.svg)](https://godoc.org/github.com/h2non/bimg) [![Go Report Card](http://goreportcard.com/badge/h2non/bimg)](http://goreportcard.com/report/h2non/bimg) [![Coverage Status](https://coveralls.io/repos/github/h2non/bimg/badge.svg?branch=master)](https://coveralls.io/github/h2non/bimg?branch=master) ![License](https://img.shields.io/badge/license-MIT-blue.svg)
|
||||
|
||||
Small [Go](http://golang.org) package for fast high-level image processing using [libvips](https://github.com/jcupitt/libvips) via C bindings, providing a simple, elegant and fluent [programmatic API](#examples).
|
||||
|
||||
bimg was designed to be a small and efficient library supporting a common set of [image operations](#supported-image-operations) such as crop, resize, rotate, zoom or watermark. It can read JPEG, PNG, WEBP natively, and optionally TIFF, PDF, GIF and SVG formats if `libvips@8.3+` is compiled with proper library bindings.
|
||||
|
||||
bimg is able to output images as JPEG, PNG and WEBP formats, including transparent conversion across them.
|
||||
|
||||
bimg uses internally libvips, a powerful library written in C for image processing which requires a [low memory footprint](http://www.vips.ecs.soton.ac.uk/index.php?title=Speed_and_Memory_Use)
|
||||
and it's typically 4x faster than using the quickest ImageMagick and GraphicsMagick settings or Go native `image` package, and in some cases it's even 8x faster processing JPEG images.
|
||||
|
||||
If you're looking for an HTTP based image processing solution, see [imaginary](https://github.com/h2non/imaginary).
|
||||
|
||||
bimg was heavily inspired in [sharp](https://github.com/lovell/sharp), its homologous package built for [node.js](http://nodejs.org). bimg is used in production environments processing thousands of images per day.
|
||||
|
||||
**v1 notice**: `bimg` introduces some minor breaking changes in `v1` release.
|
||||
If you're using `gopkg.in`, you can still rely in the `v0` without worrying about API breaking changes.
|
||||
|
||||
`bimg` is currently maintained by [Kirill Danshin](https://github.com/kirillDanshin).
|
||||
|
||||
## Contents
|
||||
|
||||
- [Supported image operations](#supported-image-operations)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Installation](#installation)
|
||||
- [Performance](#performance)
|
||||
- [Benchmark](#benchmark)
|
||||
- [Examples](#examples)
|
||||
- [Debugging](#debugging)
|
||||
- [API](#api)
|
||||
- [Authors](#authors)
|
||||
- [Credits](#credits)
|
||||
|
||||
## Supported image operations
|
||||
|
||||
- Resize
|
||||
- Enlarge
|
||||
- Crop (including smart crop support)
|
||||
- Rotate (with auto-rotate based on EXIF orientation)
|
||||
- Flip (with auto-flip based on EXIF metadata)
|
||||
- Flop
|
||||
- Zoom
|
||||
- Thumbnail
|
||||
- Extract area
|
||||
- Watermark (using text or image)
|
||||
- Gaussian blur effect
|
||||
- Custom output color space (RGB, grayscale...)
|
||||
- Format conversion (with additional quality/compression settings)
|
||||
- EXIF metadata (size, alpha channel, profile, orientation...)
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- [libvips](https://github.com/jcupitt/libvips) 7.42+ or 8+ (8.4+ recommended)
|
||||
- C compatible compiler such as gcc 4.6+ or clang 3.0+
|
||||
- Go 1.3+
|
||||
|
||||
**Note**: `libvips` v8.3+ is required for GIF, PDF and SVG support.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get -u gopkg.in/h2non/bimg.v1
|
||||
```
|
||||
|
||||
### libvips
|
||||
|
||||
Run the following script as `sudo` (supports OSX, Debian/Ubuntu, Redhat, Fedora, Amazon Linux):
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/h2non/bimg/master/preinstall.sh | sudo bash -
|
||||
```
|
||||
|
||||
If you wanna take the advantage of [OpenSlide](http://openslide.org/), simply add `--with-openslide` to enable it:
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/h2non/bimg/master/preinstall.sh | sudo bash -s --with-openslide
|
||||
```
|
||||
|
||||
The [install script](https://github.com/h2non/bimg/blob/master/preinstall.sh) requires `curl` and `pkg-config`.
|
||||
|
||||
## Performance
|
||||
|
||||
libvips is probably the faster open source solution for image processing.
|
||||
Here you can see some performance test comparisons for multiple scenarios:
|
||||
|
||||
- [libvips speed and memory usage](http://www.vips.ecs.soton.ac.uk/index.php?title=Speed_and_Memory_Use)
|
||||
|
||||
## Benchmark
|
||||
|
||||
Tested using Go 1.5.1 and libvips-7.42.3 in OSX i7 2.7Ghz
|
||||
```
|
||||
BenchmarkRotateJpeg-8 20 64686945 ns/op
|
||||
BenchmarkResizeLargeJpeg-8 20 63390416 ns/op
|
||||
BenchmarkResizePng-8 100 18147294 ns/op
|
||||
BenchmarkResizeWebP-8 100 20836741 ns/op
|
||||
BenchmarkConvertToJpeg-8 100 12831812 ns/op
|
||||
BenchmarkConvertToPng-8 10 128901422 ns/op
|
||||
BenchmarkConvertToWebp-8 10 204027990 ns/op
|
||||
BenchmarkCropJpeg-8 30 59068572 ns/op
|
||||
BenchmarkCropPng-8 10 117303259 ns/op
|
||||
BenchmarkCropWebP-8 10 107060659 ns/op
|
||||
BenchmarkExtractJpeg-8 50 30708919 ns/op
|
||||
BenchmarkExtractPng-8 3000 595546 ns/op
|
||||
BenchmarkExtractWebp-8 3000 386379 ns/op
|
||||
BenchmarkZoomJpeg-8 10 160005424 ns/op
|
||||
BenchmarkZoomPng-8 30 44561047 ns/op
|
||||
BenchmarkZoomWebp-8 10 126732678 ns/op
|
||||
BenchmarkWatermarkJpeg-8 20 79006133 ns/op
|
||||
BenchmarkWatermarPng-8 200 8197291 ns/op
|
||||
BenchmarkWatermarWebp-8 30 49360369 ns/op
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```go
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"gopkg.in/h2non/bimg.v1"
|
||||
)
|
||||
```
|
||||
|
||||
#### Resize
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).Resize(800, 600)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
size, err := bimg.NewImage(newImage).Size()
|
||||
if size.Width == 400 && size.Height == 300 {
|
||||
fmt.Println("The image size is valid")
|
||||
}
|
||||
|
||||
bimg.Write("new.jpg", newImage)
|
||||
```
|
||||
|
||||
#### Rotate
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).Rotate(90)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
bimg.Write("new.jpg", newImage)
|
||||
```
|
||||
|
||||
#### Convert
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).Convert(bimg.PNG)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
if bimg.NewImage(newImage).Type() == "png" {
|
||||
fmt.Fprintln(os.Stderr, "The image was converted into png")
|
||||
}
|
||||
```
|
||||
|
||||
#### Force resize
|
||||
|
||||
Force resize operation without perserving the aspect ratio:
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).ForceResize(1000, 500)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
size := bimg.Size(newImage)
|
||||
if size.Width != 1000 || size.Height != 500 {
|
||||
fmt.Fprintln(os.Stderr, "Incorrect image size")
|
||||
}
|
||||
```
|
||||
|
||||
#### Custom colour space (black & white)
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).Colourspace(bimg.INTERPRETATION_B_W)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
colourSpace, _ := bimg.ImageInterpretation(newImage)
|
||||
if colourSpace != bimg.INTERPRETATION_B_W {
|
||||
fmt.Fprintln(os.Stderr, "Invalid colour space")
|
||||
}
|
||||
```
|
||||
|
||||
#### Custom options
|
||||
|
||||
See [Options](https://godoc.org/github.com/h2non/bimg#Options) struct to discover all the available fields
|
||||
|
||||
```go
|
||||
options := bimg.Options{
|
||||
Width: 800,
|
||||
Height: 600,
|
||||
Crop: true,
|
||||
Quality: 95,
|
||||
Rotate: 180,
|
||||
Interlace: true,
|
||||
}
|
||||
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).Process(options)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
bimg.Write("new.jpg", newImage)
|
||||
```
|
||||
|
||||
#### Watermark
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
watermark := bimg.Watermark{
|
||||
Text: "Chuck Norris (c) 2315",
|
||||
Opacity: 0.25,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
Margin: 150,
|
||||
Font: "sans bold 12",
|
||||
Background: bimg.Color{255, 255, 255},
|
||||
}
|
||||
|
||||
newImage, err := bimg.NewImage(buffer).Watermark(watermark)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
bimg.Write("new.jpg", newImage)
|
||||
```
|
||||
|
||||
#### Fluent interface
|
||||
|
||||
```go
|
||||
buffer, err := bimg.Read("image.jpg")
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
image := bimg.NewImage(buffer)
|
||||
|
||||
// first crop image
|
||||
_, err := image.CropByWidth(300)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
// then flip it
|
||||
newImage, err := image.Flip()
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
|
||||
// save the cropped and flipped image
|
||||
bimg.Write("new.jpg", newImage)
|
||||
```
|
||||
|
||||
## Debugging
|
||||
|
||||
Run the process passing the `DEBUG` environment variable
|
||||
```
|
||||
DEBUG=bimg ./app
|
||||
```
|
||||
|
||||
Enable libvips traces (note that a lot of data will be written in stdout):
|
||||
```
|
||||
VIPS_TRACE=1 ./app
|
||||
```
|
||||
|
||||
You can also dump a core on failure, as [John Cuppit](https://github.com/jcupitt) said:
|
||||
```c
|
||||
g_log_set_always_fatal(
|
||||
G_LOG_FLAG_RECURSION |
|
||||
G_LOG_FLAG_FATAL |
|
||||
G_LOG_LEVEL_ERROR |
|
||||
G_LOG_LEVEL_CRITICAL |
|
||||
G_LOG_LEVEL_WARNING );
|
||||
```
|
||||
|
||||
Or set the G_DEBUG environment variable:
|
||||
```
|
||||
export G_DEBUG=fatal-warnings,fatal-criticals
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
See [godoc reference](https://godoc.org/github.com/h2non/bimg) for detailed API documentation.
|
||||
|
||||
## Authors
|
||||
|
||||
- [Tomás Aparicio](https://github.com/h2non) - Original author and architect.
|
||||
- [Kirill Danshin](https://github.com/kirillDanshin) - Maintainer since April 2017.
|
||||
|
||||
## Credits
|
||||
|
||||
People who recurrently contributed to improve `bimg` in some way.
|
||||
|
||||
- [John Cupitt](https://github.com/jcupitt)
|
||||
- [Yoan Blanc](https://github.com/greut)
|
||||
- [Christophe Eblé](https://github.com/chreble)
|
||||
- [Brant Fitzsimmons](https://github.com/bfitzsimmons)
|
||||
- [Thomas Meson](https://github.com/zllak)
|
||||
|
||||
Thank you!
|
||||
|
||||
## License
|
||||
|
||||
MIT - Tomas Aparicio
|
||||
|
||||
[![views](https://sourcegraph.com/api/repos/github.com/h2non/bimg/.counters/views.svg)](https://sourcegraph.com/github.com/h2non/bimg)
|
15
_vendor/src/github.com/h2non/bimg/file.go
Normal file
@ -0,0 +1,15 @@
|
||||
package bimg
|
||||
|
||||
import "io/ioutil"
|
||||
|
||||
// Read reads all the content of the given file path
|
||||
// and returns it as byte buffer.
|
||||
func Read(path string) ([]byte, error) {
|
||||
return ioutil.ReadFile(path)
|
||||
}
|
||||
|
||||
// Write writes the given byte buffer into disk
|
||||
// to the given file path.
|
||||
func Write(path string, buf []byte) error {
|
||||
return ioutil.WriteFile(path, buf, 0644)
|
||||
}
|
38
_vendor/src/github.com/h2non/bimg/file_test.go
Normal file
@ -0,0 +1,38 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRead(t *testing.T) {
|
||||
buf, err := Read("fixtures/test.jpg")
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Cannot read the image: %#v", err)
|
||||
}
|
||||
|
||||
if len(buf) == 0 {
|
||||
t.Fatal("Empty buffer")
|
||||
}
|
||||
|
||||
if DetermineImageType(buf) != JPEG {
|
||||
t.Fatal("Image is not jpeg")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrite(t *testing.T) {
|
||||
buf, err := Read("fixtures/test.jpg")
|
||||
|
||||
if err != nil {
|
||||
t.Errorf("Cannot read the image: %#v", err)
|
||||
}
|
||||
|
||||
if len(buf) == 0 {
|
||||
t.Fatal("Empty buffer")
|
||||
}
|
||||
|
||||
err = Write("fixtures/test_write_out.jpg", buf)
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot write the file: %#v", err)
|
||||
}
|
||||
}
|
BIN
_vendor/src/github.com/h2non/bimg/fixtures/corrupt.jpg
Normal file
After Width: | Height: | Size: 46 KiB |
After Width: | Height: | Size: 810 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test.gif
Normal file
After Width: | Height: | Size: 635 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test.jp2
Normal file
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test.jpg
Normal file
After Width: | Height: | Size: 52 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test.pdf
Normal file
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test.png
Normal file
After Width: | Height: | Size: 604 KiB |
725
_vendor/src/github.com/h2non/bimg/fixtures/test.svg
Normal file
@ -0,0 +1,725 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<svg id="svg2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 900 900" version="1.1">
|
||||
<g id="g4" fill="none" transform="matrix(1.7656463,0,0,1.7656463,324.90716,255.00942)">
|
||||
<g id="g6" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path8" d="m-122.3,84.285s0.1,1.894-0.73,1.875c-0.82-0.019-17.27-48.094-37.8-45.851,0,0,17.78-7.353,38.53,43.976z"/>
|
||||
</g>
|
||||
<g id="g10" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path12" d="m-118.77,81.262s-0.55,1.816-1.32,1.517c-0.77-0.298,0.11-51.104-19.95-55.978,0,0,19.22-0.864,21.27,54.461z"/>
|
||||
</g>
|
||||
<g id="g14" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path16" d="m-91.284,123.59s1.636,0.96,1.166,1.64c-0.471,0.67-49.642-12.13-59.102,6.23,0,0,3.68-18.89,57.936-7.87z"/>
|
||||
</g>
|
||||
<g id="g18" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path20" d="m-94.093,133.8s1.856,0.4,1.622,1.19c-0.233,0.79-50.939,4.13-54.129,24.53,0,0-2.46-19.08,52.507-25.72z"/>
|
||||
</g>
|
||||
<g id="g22" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path24" d="m-98.304,128.28s1.778,0.66,1.432,1.41-50.998-3.34-57.128,16.37c0,0,0.35-19.24,55.696-17.78z"/>
|
||||
</g>
|
||||
<g id="g26" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path28" d="m-109.01,110.07s1.31,1.38,0.67,1.9-44.38-25.336-58.53-10.29c0,0,8.74-17.147,57.86,8.39z"/>
|
||||
</g>
|
||||
<g id="g30" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path32" d="m-116.55,114.26s1.45,1.22,0.88,1.81c-0.58,0.59-46.97-20.148-59.32-3.6,0,0,6.74-18.023,58.44,1.79z"/>
|
||||
</g>
|
||||
<g id="g34" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path36" d="m-119.15,118.34s1.6,1,1.11,1.67c-0.49,0.66-49.27-13.56-59.25,4.51,0,0,4.22-18.77,58.14-6.18z"/>
|
||||
</g>
|
||||
<g id="g38" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path40" d="m-108.42,118.95s1.12,1.53,0.42,1.97c-0.7,0.43-40.77-30.818-56.73-17.71,0,0,10.87-15.884,56.31,15.74z"/>
|
||||
</g>
|
||||
<g id="g42" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path44" d="m-128.2,90s0.6,1.8-0.2,2-29.4-41.8-48.6-34.2c0,0,15.2-11.8,48.8,32.2z"/>
|
||||
</g>
|
||||
<g id="g46" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path48" d="m-127.5,96.979s0.97,1.629,0.23,1.996c-0.74,0.368-37.72-34.476-54.83-22.914,0,0,12.3-14.8,54.6,20.918z"/>
|
||||
</g>
|
||||
<g id="g50" stroke-width="0.17200001" stroke="#000" fill="#FFF">
|
||||
<path id="path52" d="m-127.62,101.35s1.12,1.53,0.42,1.97c-0.7,0.43-40.77-30.818-56.73-17.713,0,0,10.87-15.881,56.31,15.743z"/>
|
||||
</g>
|
||||
<g id="g54" stroke="#000" fill="#FFF">
|
||||
<path id="path56" d="m-129.83,103.06c0.5,6.05,1.49,12.62,3.23,15.74,0,0-3.6,12.4,5.2,25.6,0,0-0.4,7.2,1.2,10.4,0,0,4,8.4,8.8,9.2,3.88,0.65,12.607,3.72,22.468,5.12,0,0,17.132,14.08,13.932,26.88,0,0-0.4,16.4-4,18,0,0,11.6-11.2,2,5.6l-4.4,18.8s25.6-21.6,10-3.2l-10,26s19.6-18.4,12.4-10l-3.2,8.8s43.2-27.2,12.4,2.4c0,0,8-3.6,12.4-0.8,0,0,6.8-1.2,6,0.4,0,0-20.8,10.4-24.4,28.8,0,0,8.4-10,5.2,0.8l0.4,11.6s4-21.6,3.6,16c0,0,19.2-18,7.6,2.8v16.8s15.2-16.4,8.8-3.6c0,0,10-8.8,6,6.4,0,0-0.8,10.4,3.6-0.8,0,0,16-30.6,10-4.4,0,0-0.8,19.2,4,4.4,0,0,0.4,10.4,9.6,17.6,0,0-1.2-50.8,11.6-14.8l4,16.4s2.8-9.2,2.4-14.4l8,8s15.2-22.8,12-9.6c0,0-7.6,16-6,20.8,0,0,16.8-34.8,18-36.4,0,0-2,42.4,8.8,6.4,0,0,5.6,12,2.8,16.4,0,0,8-8,7.2-11.2,0,0,4.6-8.2,7.4,5.4,0,0,1.8,9.4,3.4,6.2,0,0,4,24,5.2,1.2,0,0,1.6-13.6-5.6-25.2,0,0,0.8-3.2-2-7.2,0,0,13.6,21.6,6.4-7.2,0,0,11.201,8,12.401,8,0,0-13.601-23.2-4.801-18.4,0,0-5.2-10.4,12.801,1.6,0,0-16.001-16,1.6-6.4,0,0,7.999,6.4,0.4-3.6,0,0-14.401-16,7.599,2,0,0,11.6,16.4,12.4,19.2,0,0-10-29.2-14.4-32,0,0,8.4-36.4,49.6-20.8,0,0,6.8,17.2,11.2-1.2,0,0,12.8-6.4,24,21.2,0,0,4-13.6,3.2-16.4,0,0,6.8,1.2,6,0,0,0,13.2,4.4,14.4,3.6,0,0,6.8,6.8,7.2,3.2,0,0,9.2,2.8,7.2-0.8,0,0,8.8,15.6,9.2,19.2l2.4-14,2,2.8s1.6-7.6,0.8-8.8,20,6.8,24.8,27.6l2,8.4s6-14.8,4.4-18.8c0,0,5.2,0.8,5.6,5.2,0,0,4-23.2-0.8-29.2,0,0,4.4-0.8,5.6,2.8v-7.2s7.2,0.8,7.2-1.6c0,0,4.4-4,6.4,0.8,0,0-12.4-35.2,6-16,0,0,7.2,10.8,3.6-8s-7.6-20.4-2.8-20.8c0,0,0.8-3.6-1.2-5.2s1.2,0,1.2,0,4.8,4-0.4-18c0,0,6.4,1.6-5.6-27.6,0,0,2.8-2.4-1.2-10.8,0,0,8,4.4,10.8,2.8,0,0-0.4-1.6-3.6-5.6,0,0-21.6-54.8-1.2-32.8,0,0,11.85,13.55,5.45-9.25,0,0-9.11-24.009-8.33-28.305l-429.55,23.015z"/>
|
||||
</g>
|
||||
<g id="g58" stroke="#000" fill="#cc7226">
|
||||
<path id="path60" d="m299.72,80.245c0.62,0.181,2.83,1.305,4.08,2.955,0,0,6.8,10.8,1.6-7.6,0,0-9.2-28.8-0.4-17.6,0,0,6,7.2,2.8-6.4-3.86-16.427-6.4-22.8-6.4-22.8s11.6,4.8-15.2-34.8l8.8,3.6s-19.6-39.6-41.2-44.8l-8-6s38.4-38,25.6-74.8c0,0-6.8-5.2-16.4,4,0,0-6.4,4.8-12.4,3.2,0,0-30.8,1.2-32.8,1.2s-36.8-37.2-102.4-19.6c0,0-5.2,2-9.599,0.8,0,0-18.401-16-67.201,6.8,0,0-10,2-11.6,2s-4.4,0-12.4,6.4-8.4,7.2-10.4,8.8c0,0-16.4,11.2-21.2,12,0,0-11.6,6.4-16,16.4l-3.6,1.2s-1.6,7.2-2,8.4c0,0-4.8,3.6-5.6,9.2,0,0-8.8,6-8.4,10.4,0,0-1.6,5.2-2.4,10,0,0-7.2,4.8-6.4,7.6,0,0-7.6,14-6.4,20.8,0,0-6.4-0.4-9.2,2,0,0-0.8,4.8-2.4,5.2,0,0-2.8,1.2-0.4,5.2,0,0-1.6,2.8-2,4.4,0,0,0.8,2.8-3.6,8.4,0,0-6.4,18.8-4.4,24,0,0,0.4,4.8-2.4,6.4,0,0-3.6-0.4,4.8,11.6,0,0,0.8,1.2-2.4,3.6,0,0-17.2,3.6-19.6,20,0,0-13.6,14.8-13.6,20,0,2.305,0.27,5.452,0.97,10.06,0,0-0.57,8.34,27.03,9.14s402.72-31.355,402.72-31.355z"/>
|
||||
</g>
|
||||
<g id="g62" fill="#cc7226">
|
||||
<path id="path64" d="m-115.6,102.6c-25-39.4-10.6,17-10.6,17,8.8,34.4,138.4-3.2,138.4-3.2s168.8-30.4,180-34.4,106.4,2.4,106.4,2.4l-5.6-16.8c-64.8-46.4-84-23.2-97.6-27.2s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2-31.74-22.951-16.8,8.8c16,34-58.4,39.2-75.2,28s7.2,18.4,7.2,18.4c18.4,20-16,3.2-16,3.2-34.4-12.8-58.4,12.8-61.6,13.6s-8,4-8.8-2.4-8.31-23.101-40,3.2c-20,16.6-33.8-5.4-33.8-5.4l-2.8,11.6z"/>
|
||||
</g>
|
||||
<g id="g66" fill="#e87f3a">
|
||||
<path id="path68" d="m133.51,25.346c-6.4,0.8-31.77-22.939-16.8,8.8,16.6,35.2-58.4,39.2-75.2,28-16.801-11.2,7.2,18.4,7.2,18.4,18.4,20.004-16.001,3.2-16.001,3.2-34.4-12.8-58.4,12.8-61.6,13.6s-8,4.004-8.8-2.4c-0.8-6.4-8.179-22.934-40,3.2-21.236,17.344-34.729-4.109-34.729-4.109l-3.2,10.113c-25-39.804-9.93,18.51-9.93,18.51,8.81,34.4,139.06-4.51,139.06-4.51s168.8-30.404,180-34.404,105.53,2.327,105.53,2.327l-5.53-17.309c-64.8-46.4-83.2-22.618-96.8-26.618s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g70" fill="#ea8c4d">
|
||||
<path id="path72" d="m134.82,27.091c-6.4,0.8-31.14-23.229-16.8,8.8,16.2,36.201-58.401,39.201-75.201,28.001s7.2,18.4,7.2,18.4c18.4,19.998-16,3.2-16,3.2-34.4-12.8-58.401,12.8-61.601,13.6s-8,3.998-8.8-2.4c-0.8-6.4-8.048-22.767-40,3.2-22.473,18.088-35.658-2.818-35.658-2.818l-3.6,8.616c-23.8-38.998-9.25,20.02-9.25,20.02,8.8,34.4,139.71-5.82,139.71-5.82s168.8-30.398,180-34.398,104.65,2.254,104.65,2.254l-5.45-17.818c-64.8-46.4-82.4-22.037-96-26.037s-11.2,5.6-14.4,6.401c-3.2,0.8-42.4-24.001-48.8-23.201z"/>
|
||||
</g>
|
||||
<g id="g74" fill="#ec9961">
|
||||
<path id="path76" d="m136.13,28.837c-6.4,0.8-31.13-23.232-16.8,8.8,16.8,37.556-58.936,38.845-75.202,28-16.8-11.2,7.2,18.4,7.2,18.4,18.4,20.003-16,3.2-16,3.2-34.4-12.8-58.4,12.803-61.6,13.603s-8,4-8.8-2.403c-0.8-6.4-7.917-22.598-40.001,3.203-23.709,18.83-36.587-1.53-36.587-1.53l-4,7.13c-21.8-36.803-8.58,21.52-8.58,21.52,8.8,34.4,140.37-7.12,140.37-7.12s168.8-30.403,180-34.403,103.78,2.182,103.78,2.182l-5.38-18.327c-64.8-46.401-81.6-21.455-95.2-25.455s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g78" fill="#eea575">
|
||||
<path id="path80" d="m137.44,30.583c-6.4,0.8-30.63-23.454-16.8,8.8,16.8,39.2-58.403,39.2-75.203,28s7.2,18.4,7.2,18.4c18.4,19.997-16,3.2-16,3.2-34.4-12.8-58.4,12.797-61.6,13.597s-8,4-8.8-2.4c-0.8-6.397-7.785-22.428-40,3.2-24.946,19.58-37.507-0.23-37.507-0.23l-4.4,5.63c-19.8-34.798-7.91,23.04-7.91,23.04,8.8,34.4,141.02-8.44,141.02-8.44s168.8-30.397,180-34.397,102.91,2.109,102.91,2.109l-5.31-18.837c-64.8-46.4-80.8-20.872-94.4-24.872s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g82" fill="#f1b288">
|
||||
<path id="path84" d="m138.75,32.328c-6.4,0.8-32.37-22.651-16.8,8.8,19.2,38.8-58.404,39.2-75.204,28s7.2,18.4,7.2,18.4c18.4,20.002-16,3.2-16,3.2-34.4-12.8-58.4,12.802-61.6,13.602s-8,4-8.8-2.4c-0.8-6.402-7.654-22.265-40,3.2-26.182,20.33-38.436,1.05-38.436,1.05l-4.8,4.15c-18-33.202-7.24,24.54-7.24,24.54,8.8,34.4,141.68-9.74,141.68-9.74s168.8-30.402,180-34.402,102.03,2.036,102.03,2.036l-5.23-19.345c-64.8-46.4-80-20.291-93.6-24.291s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g86" fill="#f3bf9c">
|
||||
<path id="path88" d="m140.06,34.073c-6.4,0.8-32.75-22.46-16.8,8.8,20.4,40.001-58.405,39.201-75.205,28.001s7.2,18.4,7.2,18.4c18.4,19.996-16,3.2-16,3.2-34.4-12.8-58.4,12.796-61.6,13.596s-8,4-8.8-2.4c-0.8-6.396-7.523-22.092-40,3.2-27.419,21.08-39.365,2.35-39.365,2.35l-5.2,2.65c-16-30.196-6.56,26.06-6.56,26.06,8.8,34.4,142.32-11.06,142.32-11.06s168.8-30.396,180-34.396,101.16,1.963,101.16,1.963l-5.16-19.854c-64.8-46.4-79.2-19.709-92.8-23.709-13.6-4.001-11.2,5.6-14.4,6.4s-42.4-24.001-48.8-23.201z"/>
|
||||
</g>
|
||||
<g id="g90" fill="#f5ccb0">
|
||||
<path id="path92" d="m141.36,35.819c-6.4,0.8-33.84-21.875-16.8,8.8,22,39.6-58.396,39.2-75.196,28s7.2,18.4,7.2,18.4c18.4,20.001-16,3.2-16,3.2-34.4-12.8-58.4,12.801-61.6,13.601s-8,4-8.8-2.4c-0.8-6.401-7.391-21.928-40,3.2-28.655,21.82-40.294,3.64-40.294,3.64l-5.6,1.16c-14.4-28.401-5.89,27.56-5.89,27.56,8.8,34.4,142.98-12.36,142.98-12.36s168.8-30.401,180-34.401,100.3,1.891,100.3,1.891l-5.1-20.364c-64.8-46.4-78.4-19.127-92-23.127s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g94" fill="#f8d8c4">
|
||||
<path id="path96" d="m142.67,37.565c-6.4,0.8-33.84-21.876-16.8,8.8,22,39.6-58.396,39.2-75.196,28s7.2,18.4,7.2,18.4c18.4,19.995-16,3.2-16,3.2-34.401-12.8-58.401,12.795-61.601,13.595s-8,4-8.8-2.4-7.259-21.755-40,3.2c-29.891,22.57-41.213,4.93-41.213,4.93l-6-0.33c-13.61-26.396-5.22,29.08-5.22,29.08,8.8,34.4,143.63-13.68,143.63-13.68s168.8-30.395,180-34.395,99.42,1.818,99.42,1.818l-5.01-20.873c-64.81-46.4-77.61-18.545-91.21-22.545s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g98" fill="#fae5d7">
|
||||
<path id="path100" d="m143.98,39.31c-6.4,0.8-33.45-22.087-16.8,8.8,22,40.8-58.397,39.2-75.197,28s7.2,18.4,7.2,18.4c18.4,20-16,3.2-16,3.2-34.4-12.8-58.4,12.8-61.6,13.6-3.201,0.8-8.001,4-8.801-2.4s-7.128-21.592-40,3.2c-31.127,23.31-42.142,6.22-42.142,6.22l-6.4-1.82c-13-24-4.55,30.58-4.55,30.58,8.8,34.4,144.29-14.98,144.29-14.98s168.8-30.4,180-34.4,98.55,1.746,98.55,1.746l-4.95-21.382c-64.8-46.401-76.8-17.964-90.4-21.964s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2z"/>
|
||||
</g>
|
||||
<g id="g102" fill="#fcf2eb">
|
||||
<path id="path104" d="m145.29,41.055c-6.4,0.8-32.37-22.644-16.8,8.8,21.2,42.801-58.398,39.201-75.198,28.001s7.2,18.4,7.2,18.4c18.4,20.004-16,3.2-16,3.2-34.4-12.8-58.4,12.804-61.6,13.604s-8,4-8.8-2.4-6.997-21.428-40,3.2c-32.365,24.05-43.072,7.5-43.072,7.5l-6.8-3.3c-12.8-23.204-3.87,32.09-3.87,32.09,8.8,34.4,144.94-16.29,144.94-16.29s168.8-30.4,180-34.404c11.2-4,97.67,1.674,97.67,1.674l-4.87-21.893c-64.8-46.4-76-17.381-89.6-21.381-13.6-4.001-11.2,5.6-14.4,6.4s-42.4-24.001-48.8-23.201z"/>
|
||||
</g>
|
||||
<g id="g106" fill="#FFF">
|
||||
<path id="path108" d="m-115.8,119.6c-12.8-22-3.2,33.6-3.2,33.6,8.8,34.4,145.6-17.6,145.6-17.6s168.8-30.4,180-34.4,96.8,1.6,96.8,1.6l-4.8-22.4c-64.8-46.4-75.2-16.8-88.8-20.8s-11.2,5.6-14.4,6.4-42.4-24-48.8-23.2-31.62-23.007-16.8,8.8c22.23,47.707-60.759,37.627-75.2,28-16.8-11.2,7.2,18.4,7.2,18.4,18.4,20-16,3.2-16,3.2-34.4-12.8-58.4,12.8-61.6,13.6s-8,4-8.8-2.4-6.865-21.256-40,3.2c-33.6,24.8-44,8.8-44,8.8l-7.2-4.8z"/>
|
||||
</g>
|
||||
<g id="g110" fill="#000">
|
||||
<path id="path112" d="m-74.2,149.6s-7.2,11.6,13.6,24.8c0,0,1.4,1.4-16.6-2.8,0,0-6.2-2-7.8-12.4,0,0-4.8-4.4-9.6-10s20.4,0.4,20.4,0.4z"/>
|
||||
</g>
|
||||
<g id="g114" fill="#CCC">
|
||||
<path id="path116" d="m65.8,102s17.698,26.82,17.1,31.6c-1.3,10.4-1.5,20,1.7,24,3.201,4,12.001,37.2,12.001,37.2s-0.4,1.2,11.999-36.8c0,0,11.6-16-8.4-34.4,0,0-35.2-28.8-34.4-21.6z"/>
|
||||
</g>
|
||||
<g id="g118" fill="#000">
|
||||
<path id="path120" d="m-54.2,176.4s11.2,7.2-3.2,38.4l6.4-2.4s-0.8,11.2-4,13.6l7.2-3.2s4.8,8,0.8,12.8c0,0,16.8,8,16,14.4,0,0,6.4-8,2.4-14.4s-11.2-2.4-10.4-20.8l-8.8,3.2s5.6-8.8,5.6-15.2l-8,2.4s15.469-26.58,4.8-28c-6-0.8-8.8-0.8-8.8-0.8z"/>
|
||||
</g>
|
||||
<g id="g122" fill="#CCC">
|
||||
<path id="path124" d="m-21.8,193.2s2.8-4.4,0-3.6-34,15.6-40,25.2c0,0,34.4-24.4,40-21.6z"/>
|
||||
</g>
|
||||
<g id="g126" fill="#CCC">
|
||||
<path id="path128" d="m-11.4,201.2s2.8-4.4,0-3.6-34,15.6-40,25.2c0,0,34.4-24.4,40-21.6z"/>
|
||||
</g>
|
||||
<g id="g130" fill="#CCC">
|
||||
<path id="path132" d="m1.8,186s2.8-4.4,0-3.6-34,15.6-40,25.2c0,0,34.4-24.4,40-21.6z"/>
|
||||
</g>
|
||||
<g id="g134" fill="#CCC">
|
||||
<path id="path136" d="m-21.4,229.6s0-6-2.8-5.2-38.8,18.4-44.8,28c0,0,42-25.6,47.6-22.8z"/>
|
||||
</g>
|
||||
<g id="g138" fill="#CCC">
|
||||
<path id="path140" d="m-20.2,218.8s1.2-4.8-1.6-4c-2,0-28.4,11.6-34.4,21.2,0,0,29.6-21.6,36-17.2z"/>
|
||||
</g>
|
||||
<g id="g142" fill="#CCC">
|
||||
<path id="path144" d="m-34.6,266.4-10,7.6s10.4-7.6,14-6.4c0,0-6.8,11.2-7.6,16.4,0,0,10.4-12.8,16-12.4,0,0,7.6,0.4,7.6,11.2,0,0,5.6-10.4,8.8-10,0,0,1.2,6.4,0,13.2,0,0,4-7.6,8-6,0,0,6.4-2,5.6,9.6,0,0,0,10.4-0.8,13.2,0,0,5.6-26.4,8-26.8,0,0,8-1.2,12.8,7.6,0,0-4-7.6,0.8-5.6,0,0,10.8,1.6,14,8.4,0,0-6.8-12-1.2-8.8l8,6.4s8.4,21.2,10.4,22.8c0,0-7.6-21.6-6-21.6,0,0-2-12,3.2,2.8,0,0-3.2-14,2.4-13.2s10,10.8,18.4,8.4c0,0,9.601,5.6,11.601-63.6l-124,46.8z"/>
|
||||
</g>
|
||||
<g id="g146" fill="#000">
|
||||
<path id="path148" d="m-29.8,173.6s14.8-6,54.8,0c0,0,7.2,0.4,14-8.4s33.6-16,40-14l9.601,6.4,0.8,1.2s12.399,10.4,12.799,18-14.399,55.6-24,71.6c-9.6,16-19.2,28.4-38.4,26,0,0-20.8-4-46.4,0,0,0-29.2-1.6-32-9.6s11.2-23.2,11.2-23.2,4.4-8.4,3.2-22.8-0.8-42.4-5.6-45.2z"/>
|
||||
</g>
|
||||
<g id="g150" fill="#e5668c">
|
||||
<path id="path152" d="M-7.8,175.6c8.4,18.4-21.2,83.6-21.2,83.6-2,1.6,12.66,7.65,22.8,5.2,10.946-2.64,51.2,1.6,51.2,1.6,23.6-15.6,36.4-60,36.4-60s10.401-24-7.2-27.2c-17.6-3.2-82-3.2-82-3.2z"/>
|
||||
</g>
|
||||
<g id="g154" fill="#b23259">
|
||||
<path id="path156" d="m-9.831,206.5c3.326-12.79,4.91-24.59,2.031-30.9,0,0,62.4,6.4,73.6-14.4,4.241-7.87,19.001,22.8,18.6,32.4,0,0-63,14.4-77.8,3.2l-16.431,9.7z"/>
|
||||
</g>
|
||||
<g id="g158" fill="#a5264c">
|
||||
<path id="path160" d="m-5.4,222.8s2,7.2-0.4,11.2c0,0-1.6,0.8-2.8,1.2,0,0,1.2,3.6,7.2,5.2,0,0,2,4.4,4.4,4.8s7.2,6,11.2,4.8,15.2-5.2,15.2-5.2,5.6-3.2,14.4,0.4c0,0,2.375-0.8,2.8-4.8,0.5-4.7,3.6-8.4,5.6-10.4s11.6-14.8,10.4-15.2-68,8-68,8z"/>
|
||||
</g>
|
||||
<g id="g162" stroke="#000" fill="#ff727f">
|
||||
<path id="path164" d="m-9.8,174.4s-2.8,22.4,0.4,30.8,2.4,10.4,1.6,14.4,3.6,14,9.2,20l12,1.6s15.2-3.6,24.4-0.8c0,0,8.994,1.34,12.4-13.6,0,0,4.8-6.4,12-9.2s14.4-44.4,10.4-52.4-18.4-12.4-34.4,3.2-18-1.2-48,6z"/>
|
||||
</g>
|
||||
<g id="g166" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path168" d="m-8.2,249.2s-0.8-2-5.2-2.4c0,0-22.4-3.6-30.8-16,0,0-6.8-5.6-2.4,6,0,0,10.4,20.4,17.2,23.2,0,0,16.4,4,21.2-10.8z"/>
|
||||
</g>
|
||||
<g id="g170" fill="#cc3f4c">
|
||||
<path id="path172" d="m71.742,185.23c0.659-7.91,2.612-16.52,0.858-20.03-6.446-12.89-23.419-7.5-34.4,3.2-16,15.6-18-1.2-48,6,0,0-1.745,13.96-0.905,23.98,0,0,37.305-11.58,38.105-5.98,0,0,1.6-3.2,10.8-3.2s31.942-1.17,33.542-3.97z"/>
|
||||
</g>
|
||||
<g id="g174" stroke-width="2" stroke="#a51926">
|
||||
<path id="path176" d="m28.6,175.2s4.8,4.8,1.2,14.4c0,0-14.4,16-12.4,30"/>
|
||||
</g>
|
||||
<g id="g178" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path180" d="m-19.4,260s-4.4-12.8,4.4-6l3.6,3.6c-1.2,1.6-6.8,5.6-8,2.4z"/>
|
||||
</g>
|
||||
<g id="g182" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path184" d="m-14.36,261.2s-3.52-10.24,3.52-4.8l2.88,2.88c-4.56,1.28,0,3.84-6.4,1.92z"/>
|
||||
</g>
|
||||
<g id="g186" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path188" d="m-9.56,261.2s-3.52-10.24,3.52-4.8l2.88,2.88c-3.36,1.28,0,3.84-6.4,1.92z"/>
|
||||
</g>
|
||||
<g id="g190" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path192" d="m-2.96,261.4s-3.52-10.24,3.52-4.8c0,0,4.383,2.33,2.881,2.88-2.961,1.08,0,3.84-6.401,1.92z"/>
|
||||
</g>
|
||||
<g id="g194" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path196" d="m3.52,261.32s-3.52-10.24,3.521-4.8l2.88,2.88c-0.96,1.28,0,3.84-6.401,1.92z"/>
|
||||
</g>
|
||||
<g id="g198" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path200" d="m10.2,262s-4.8-12.4,4.4-6l3.6,3.6c-1.2,1.6,0,4.8-8,2.4z"/>
|
||||
</g>
|
||||
<g id="g202" stroke-width="2" stroke="#a5264c">
|
||||
<path id="path204" d="m-18.2,244.8s13.2-2.8,19.2,0.4c0,0,6,1.2,7.2,0.8s4.4-0.8,4.4-0.8"/>
|
||||
</g>
|
||||
<g id="g206" stroke-width="2" stroke="#a5264c">
|
||||
<path id="path208" d="m15.8,253.6s12-13.6,24-9.2c7.016,2.57,6-0.8,6.8-3.6s1-7,6-10"/>
|
||||
</g>
|
||||
<g id="g210" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path212" d="m33,237.6s-4-10.8-6.8,2-6,16.4-7.6,19.2c0,0,0,5.2,8.4,4.8,0,0,10.8-0.4,11.2-3.2s-1.2-14.4-5.2-22.8z"/>
|
||||
</g>
|
||||
<g id="g214" stroke-width="2" stroke="#a5264c">
|
||||
<path id="path216" d="m47,244.8s3.6-2.4,6-1.2"/>
|
||||
</g>
|
||||
<g id="g218" stroke-width="2" stroke="#a5264c">
|
||||
<path id="path220" d="m53.5,228.4s2.9-4.9,7.7-5.7"/>
|
||||
</g>
|
||||
<g id="g222" fill="#b2b2b2">
|
||||
<path id="path224" d="m-25.8,265.2s18,3.2,22.4,1.6l0.4,2-20.8-1.2s-11.6-5.6-2-2.4z"/>
|
||||
</g>
|
||||
<g id="g226" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path228" d="m-11.8,172,19.6,0.8s7.2,30.8,3.6,38.4c0,0-1.2,2.8-4-2.8,0,0-18.4-32.8-21.6-34.8s1.2-1.6,2.4-1.6z"/>
|
||||
</g>
|
||||
<g id="g230" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path232" d="m-88.9,169.3s8.9,1.7,21.5,4.3c0,0,4.8,22.4,8,27.2s-0.4,4.8-4,2-18.4-16.8-20.4-21.2-5.1-12.3-5.1-12.3z"/>
|
||||
</g>
|
||||
<g id="g234" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path236" d="m-67.039,173.82s5.8,1.55,6.809,3.76c1.008,2.22-1.202,5.51-1.202,5.51s-1,3.31-2.202,1.15c-1.202-2.17-4.074-9.83-3.405-10.42z"/>
|
||||
</g>
|
||||
<g id="g238" fill="#000">
|
||||
<path id="path240" d="m-67,173.6s3.6,5.2,7.2,5.2,3.982-0.41,6.8,0.2c4.6,1,4.2-1,10.8,0.2,2.64,0.48,5.2-0.4,8,0.8s6,0.4,7.2-1.6,6-6.2,6-6.2-12.8,1.8-15.6,2.6c0,0-22.4,1.2-30.4-1.2z"/>
|
||||
</g>
|
||||
<g id="g242" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path244" d="m-22.4,173.8s-6.45,3.5-6.85,5.9,5.25,6.1,5.25,6.1,2.75,4.6,3.35,2.2-0.95-13.8-1.75-14.2z"/>
|
||||
</g>
|
||||
<g id="g246" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path248" d="m-59.885,179.26s7.007,11.19,7.224-0.02c0,0,0.557-1.26-1.203-1.28-6.075-0.07-4.554-4.18-6.021,1.3z"/>
|
||||
</g>
|
||||
<g id="g250" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path252" d="m-52.707,179.51s7.921,11.19,7.285-0.09c0,0,0.007-0.33-1.746-0.48-4.747-0.42-4.402-4.94-5.539,0.57z"/>
|
||||
</g>
|
||||
<g id="g254" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path256" d="m-45.494,179.52s7.96,10.63,7.291,0.96c0,0,0.119-1.23-1.535-1.53-3.892-0.71-4.103-3.95-5.756,0.57z"/>
|
||||
</g>
|
||||
<g id="g258" stroke-width="0.5" stroke="#000" fill="#FFC">
|
||||
<path id="path260" d="m-38.618,179.6s7.9,11.56,8.248,1.78c0,0,1.644-1.38-0.102-1.6-5.818-0.74-5.02-5.19-8.146-0.18z"/>
|
||||
</g>
|
||||
<g id="g262" fill="#e5e5b2">
|
||||
<path id="path264" d="m-74.792,183.13-7.658-1.53c-2.6-5-4.7-11.15-4.7-11.15s6.35,1,18.85,3.8c0,0,0.876,3.32,2.348,9.11l-8.84-0.23z"/>
|
||||
</g>
|
||||
<g id="g266" fill="#e5e5b2">
|
||||
<path id="path268" d="m-9.724,178.47c-1.666-2.51-2.983-4.26-3.633-4.67-3.013-1.88,1.13-1.51,2.259-1.51l18.454,0.76s0.524,2.24,1.208,5.63c0,0-10.088-2.01-18.288-0.21z"/>
|
||||
</g>
|
||||
<g id="g270" fill="#cc7226">
|
||||
<path id="path272" d="m43.88,40.321c27.721,3.96,53.241-31.68,55.001-41.361,1.759-9.68-8.36-21.56-8.36-21.56,1.32-3.08-3.52-17.16-8.8-26.4s-21.181-8.266-38.721-9.24c-15.84-0.88-34.32,22.44-35.64,24.2s4.84,40.041,6.16,45.761-1.32,32.12-1.32,32.12c34.24-9.1,3.96-7.48,31.68-3.52z"/>
|
||||
</g>
|
||||
<g id="g274" fill="#ea8e51">
|
||||
<path id="path276" d="m8.088-33.392c-1.296,1.728,4.752,39.313,6.048,44.929s-1.296,31.536-1.296,31.536c32.672-8.88,3.888-7.344,31.104-3.456,27.217,3.888,52.273-31.104,54.001-40.609,1.728-9.504-8.208-21.168-8.208-21.168,1.296-3.024-3.456-16.848-8.64-25.92s-20.795-8.115-38.017-9.072c-15.552-0.864-33.696,22.032-34.992,23.76z"/>
|
||||
</g>
|
||||
<g id="g278" fill="#efaa7c">
|
||||
<path id="path280" d="m8.816-32.744c-1.272,1.696,4.664,38.585,5.936,44.097s-1.272,30.952-1.272,30.952c31.404-9.16,3.816-7.208,30.528-3.392,26.713,3.816,51.305-30.528,53.001-39.857,1.696-9.328-8.056-20.776-8.056-20.776,1.272-2.968-3.392-16.536-8.48-25.44s-20.41-7.965-37.313-8.904c-15.264-0.848-33.072,21.624-34.344,23.32z"/>
|
||||
</g>
|
||||
<g id="g282" fill="#f4c6a8">
|
||||
<path id="path284" d="m9.544-32.096c-1.248,1.664,4.576,37.857,5.824,43.265s-1.248,30.368-1.248,30.368c29.436-9.04,3.744-7.072,29.952-3.328,26.209,3.744,50.337-29.952,52.001-39.104,1.664-9.153-7.904-20.385-7.904-20.385,1.248-2.912-3.328-16.224-8.32-24.96s-20.025-7.815-36.609-8.736c-14.976-0.832-32.448,21.216-33.696,22.88z"/>
|
||||
</g>
|
||||
<g id="g286" fill="#f9e2d3">
|
||||
<path id="path288" d="m10.272-31.448c-1.224,1.632,4.488,37.129,5.712,42.433s-1.224,29.784-1.224,29.784c27.868-8.92,3.672-6.936,29.376-3.264,25.705,3.672,49.369-29.376,51.001-38.353,1.632-8.976-7.752-19.992-7.752-19.992,1.224-2.856-3.264-15.912-8.16-24.48s-19.64-7.665-35.905-8.568c-14.688-0.816-31.824,20.808-33.048,22.44z"/>
|
||||
</g>
|
||||
<g id="g290" fill="#FFF">
|
||||
<path id="path292" d="M44.2,36.8c25.2,3.6,48.401-28.8,50.001-37.6s-7.6-19.6-7.6-19.6c1.2-2.8-3.201-15.6-8.001-24s-19.254-7.514-35.2-8.4c-14.4-0.8-31.2,20.4-32.4,22s4.4,36.4,5.6,41.6-1.2,29.2-1.2,29.2c25.5-8.6,3.6-6.8,28.8-3.2z"/>
|
||||
</g>
|
||||
<g id="g294" fill="#CCC">
|
||||
<path id="path296" d="m90.601,2.8s-27.801,7.6-39.401,6c0,0-15.8-6.6-24.6,15.2,0,0-3.6,7.2-5.6,9.2s69.601-30.4,69.601-30.4z"/>
|
||||
</g>
|
||||
<g id="g298" fill="#000">
|
||||
<path id="path300" d="m94.401,0.6s-29.001,12.2-39.001,11.8c0,0-16.4-4.6-24.8,10,0,0-8.4,9.2-11.6,10.8,0,0-0.4,1.6,6-2.4l10.4,5.2s14.8,9.6,24.4-6.4c0,0,4-11.2,4-13.2s21.2-7.6,22.801-8c1.6-0.4,8.2-4.6,7.8-7.8z"/>
|
||||
</g>
|
||||
<g id="g302" fill="#99cc32">
|
||||
<path id="path304" d="m47,36.514c-6.872,0-15.245-3.865-15.245-10.114,0-6.248,8.373-12.513,15.245-12.513,6.874,0,12.446,5.065,12.446,11.313,0,6.249-5.572,11.314-12.446,11.314z"/>
|
||||
</g>
|
||||
<g id="g306" fill="#659900">
|
||||
<path id="path308" d="m43.377,19.83c-4.846,0.722-9.935,2.225-9.863,2.009,1.54-4.619,7.901-7.952,13.486-7.952,4.296,0,8.084,1.978,10.32,4.988,0,0-5.316-0.33-13.943,0.955z"/>
|
||||
</g>
|
||||
<g id="g310" fill="#FFF">
|
||||
<path id="path312" d="m55.4,19.6s-4.4-3.2-4.4-1c0,0,3.6,4.4,4.4,1z"/>
|
||||
</g>
|
||||
<g id="g314" fill="#000">
|
||||
<path id="path316" d="m45.4,27.726c-2.499,0-4.525-2.026-4.525-4.526,0-2.499,2.026-4.525,4.525-4.525,2.5,0,4.526,2.026,4.526,4.525,0,2.5-2.026,4.526-4.526,4.526z"/>
|
||||
</g>
|
||||
<g id="g318" fill="#cc7226">
|
||||
<path id="path320" d="m-58.6,14.4s-3.2-21.2-0.8-25.6c0,0,10.8-10,10.4-13.6,0,0-0.4-18-1.6-18.8s-8.8-6.8-14.8-0.4c0,0-10.4,18-9.6,24.4v2s-7.6-0.4-9.2,1.6c0,0-1.2,5.2-2.4,5.6,0,0-2.8,2.4-0.8,5.2,0,0-2,2.4-1.6,6.4l7.6,4s2,14.4,12.8,19.6c4.836,2.329,8-4.4,10-10.4z"/>
|
||||
</g>
|
||||
<g id="g322" fill="#FFF">
|
||||
<path id="path324" d="m-59.6,12.56s-2.88-19.08-0.72-23.04c0,0,9.72-9,9.36-12.24,0,0-0.36-16.2-1.44-16.92s-7.92-6.12-13.32-0.36c0,0-9.36,16.2-8.64,21.96v1.8s-6.84-0.36-8.28,1.44c0,0-1.08,4.68-2.16,5.04,0,0-2.52,2.16-0.72,4.68,0,0-1.8,2.16-1.44,5.76l6.84,3.6s1.8,12.96,11.52,17.64c4.352,2.095,7.2-3.96,9-9.36z"/>
|
||||
</g>
|
||||
<g id="g326" fill="#eb955c">
|
||||
<path id="path328" d="m-51.05-42.61c-1.09-0.86-8.58-6.63-14.43-0.39,0,0-10.14,17.55-9.36,23.79v1.95s-7.41-0.39-8.97,1.56c0,0-1.17,5.07-2.34,5.46,0,0-2.73,2.34-0.78,5.07,0,0-1.95,2.34-1.56,6.24l7.41,3.9s1.95,14.04,12.48,19.11c4.714,2.27,7.8-4.29,9.75-10.14,0,0-3.12-20.67-0.78-24.96,0,0,10.53-9.75,10.14-13.26,0,0-0.39-17.55-1.56-18.33z"/>
|
||||
</g>
|
||||
<g id="g330" fill="#f2b892">
|
||||
<path id="path332" d="m-51.5-41.62c-0.98-0.92-8.36-6.46-14.06-0.38,0,0-9.88,17.1-9.12,23.18v1.9s-7.22-0.38-8.74,1.52c0,0-1.14,4.94-2.28,5.32,0,0-2.66,2.28-0.76,4.94,0,0-1.9,2.28-1.52,6.08l7.22,3.8s1.9,13.68,12.16,18.62c4.594,2.212,7.6-4.18,9.5-9.88,0,0-3.04-20.14-0.76-24.32,0,0,10.26-9.5,9.88-12.92,0,0-0.38-17.1-1.52-17.86z"/>
|
||||
</g>
|
||||
<g id="g334" fill="#f8dcc8">
|
||||
<path id="path336" d="m-51.95-40.63c-0.87-0.98-8.14-6.29-13.69-0.37,0,0-9.62,16.65-8.88,22.57v1.85s-7.03-0.37-8.51,1.48c0,0-1.11,4.81-2.22,5.18,0,0-2.59,2.22-0.74,4.81,0,0-1.85,2.22-1.48,5.92l7.03,3.7s1.85,13.32,11.84,18.13c4.473,2.154,7.4-4.07,9.25-9.62,0,0-2.96-19.61-0.74-23.68,0,0,9.99-9.25,9.62-12.58,0,0-0.37-16.65-1.48-17.39z"/>
|
||||
</g>
|
||||
<g id="g338" fill="#FFF">
|
||||
<path id="path340" d="m-59.6,12.46s-2.88-18.98-0.72-22.94c0,0,9.72-9,9.36-12.24,0,0-0.36-16.2-1.44-16.92-0.76-1.04-7.92-6.12-13.32-0.36,0,0-9.36,16.2-8.64,21.96v1.8s-6.84-0.36-8.28,1.44c0,0-1.08,4.68-2.16,5.04,0,0-2.52,2.16-0.72,4.68,0,0-1.8,2.16-1.44,5.76l6.84,3.6s1.8,12.96,11.52,17.64c4.352,2.095,7.2-4.06,9-9.46z"/>
|
||||
</g>
|
||||
<g id="g342" fill="#CCC">
|
||||
<path id="path344" d="m-62.7,6.2s-21.6-10.2-22.5-11c0,0,9.1,8.2,9.9,8.2s12.6,2.8,12.6,2.8z"/>
|
||||
</g>
|
||||
<g id="g346" fill="#000">
|
||||
<path id="path348" d="m-79.8,0s18.4,3.6,18.4,8c0,2.912-0.243,16.331-5.6,14.8-8.4-2.4-4.8-16.8-12.8-22.8z"/>
|
||||
</g>
|
||||
<g id="g350" fill="#99cc32">
|
||||
<path id="path352" d="m-71.4,3.8s8.978,1.474,10,4.2c0.6,1.6,1.263,9.908-4.2,11-4.552,0.911-6.782-9.31-5.8-15.2z"/>
|
||||
</g>
|
||||
<g id="g354" fill="#000">
|
||||
<path id="path356" d="m14.595,46.349c-0.497-1.742,0.814-1.611,2.605-2.149,2-0.6,14.2-4.4,15-7s14,1.8,14,1.8c1.8,0.8,6.2,3.4,6.2,3.4,4.8,1.2,11.4,1.6,11.4,1.6,2.4,1,5.8,3.8,5.8,3.8,14.6,10.2,27.001,3,27.001,3,19.999-6.6,13.999-23.8,13.999-23.8-3-9,0.2-12.4,0.2-12.4,0.2-3.8,7.4,2.6,7.4,2.6,2.6,4.2,3.4,9.2,3.4,9.2,8,11.2,4.6-6.6,4.6-6.6,0.2-1-2.6-4.6-2.6-5.8s-1.8-4.6-1.8-4.6c-3-3.4-0.6-10.4-0.6-10.4,1.8-13.8-0.4-12-0.4-12-1.2-1.8-10.4,8.2-10.4,8.2-2.2,3.4-8.2,5-8.2,5-2.799,1.8-6.199,0.4-6.199,0.4-2.6-0.4-8.2,6.6-8.2,6.6,2.8-0.2,5.2,4.2,7.6,4.4s4.2-2.4,5.799-3c1.6-0.6,4.4,5.2,4.4,5.2,0.4,2.6-5.2,7.4-5.2,7.4-0.4,4.6-1.999,3-1.999,3-3-0.6-4.2,3.2-5.2,7.8s-5.2,5-5.2,5c-1.6,7.4-2.801,4.4-2.801,4.4-0.2-5.6-6.2,0.2-6.2,0.2-1.2,2-5.8-0.2-5.8-0.2-6.8-2-4.4-4-4.4-4,1.8-2.2,13,0,13,0,2.2-1.6-5.8-5.6-5.8-5.6-0.6-1.8,0.4-6.2,0.4-6.2,1.2-3.2,8-8.8,8-8.8,9.401-1.2,6.601-2.8,6.601-2.8-6.2-5.2-12.001,2.4-12.001,2.4-2.2,6.2-19.6,21.2-19.6,21.2-4.8,3.4-2.2-3.4-6.2,0s-24.6-5.6-24.6-5.6c-11.562-1.193-14.294,14.549-17.823,11.429,0,0,5.418,8.52,3.818,2.92z"/>
|
||||
</g>
|
||||
<g id="g358" fill="#000">
|
||||
<path id="path360" d="m209.4-120s-25.6,8-28.4,26.8c0,0-2.4,22.8,18,40.4,0,0,0.4,6.4,2.4,9.6,0,0-1.6,4.8,17.2-2.8l27.2-8.4s6.4-2.4,11.6-11.2,20.4-27.6,16.8-52.8c0,0,1.2-11.2-4.8-11.6,0,0-8.4-1.6-15.6,6,0,0-6.8,3.2-9.2,2.8l-35.2,1.2z"/>
|
||||
</g>
|
||||
<g id="g362" fill="#000">
|
||||
<path id="path364" d="m264.02-120.99s2.1-8.93-2.74-4.09c0,0-7.04,5.72-14.52,5.72,0,0-14.52,2.2-18.92,15.4,0,0-3.96,26.84,3.96,32.56,0,0,4.84,7.48,11.88,0.88s22.54-36.83,20.34-50.47z"/>
|
||||
</g>
|
||||
<g id="g366" fill="#323232">
|
||||
<path id="path368" d="m263.65-120.63s2.09-8.75-2.66-3.99c0,0-6.92,5.61-14.26,5.61,0,0-14.26,2.16-18.58,15.12,0,0-3.89,26.354,3.89,31.97,0,0,4.75,7.344,11.66,0.864,6.92-6.48,22.11-36.184,19.95-49.574z"/>
|
||||
</g>
|
||||
<g id="g370" fill="#666">
|
||||
<path id="path372" d="m263.27-120.27s2.08-8.56-2.58-3.9c0,0-6.78,5.51-13.99,5.51,0,0-14,2.12-18.24,14.84,0,0-3.81,25.868,3.82,31.38,0,0,4.66,7.208,11.45,0.848,6.78-6.36,21.66-35.538,19.54-48.678z"/>
|
||||
</g>
|
||||
<g id="g374" fill="#999">
|
||||
<path id="path376" d="m262.9-119.92s2.07-8.37-2.51-3.79c0,0-6.65,5.41-13.73,5.41,0,0-13.72,2.08-17.88,14.56,0,0-3.75,25.372,3.74,30.78,0,0,4.58,7.072,11.23,0.832,6.66-6.24,21.23-34.892,19.15-47.792z"/>
|
||||
</g>
|
||||
<g id="g378" fill="#CCC">
|
||||
<path id="path380" d="m262.53-119.56s2.06-8.18-2.43-3.7c0,0-6.53,5.31-13.47,5.31,0,0-13.46,2.04-17.54,14.28,0,0-3.67,24.886,3.67,30.19,0,0,4.49,6.936,11.02,0.816,6.52-6.12,20.79-34.246,18.75-46.896z"/>
|
||||
</g>
|
||||
<g id="g382" fill="#FFF">
|
||||
<path id="path384" d="m262.15-119.2s2.05-8-2.35-3.6c0,0-6.4,5.2-13.2,5.2,0,0-13.2,2-17.2,14,0,0-3.6,24.4,3.6,29.6,0,0,4.4,6.8,10.8,0.8s20.35-33.6,18.35-46z"/>
|
||||
</g>
|
||||
<g id="g386" fill="#992600">
|
||||
<path id="path388" d="m50.6,84s-20.4-19.2-28.4-20c0,0-34.4-4-49.2,14,0,0,17.6-20.4,45.2-14.8,0,0-21.6-4.4-34-1.2l-26.4,14-2.8,4.8s4-14.8,22.4-20.8c0,0,22.8-4.8,33.6,0,0,0-21.6-6.8-31.6-4.8,0,0-30.4-2.4-43.2,24,0,0,4-14.4,18.8-21.6,0,0,13.6-8.8,34-6,0,0,14.4,3.2,19.6,5.6s4-0.4-4.4-5.2c0,0-5.6-10-19.6-9.6,0,0-42.8,3.6-53.2,15.6,0,0,13.6-11.2,24-14,0,0,22.4-8,30.8-7.2,0,0,24.8,1,32.4-3,0,0-11.2,5-8,8.2s10,10.8,10,12,24.2,23.3,27.8,27.7l2.2,2.3z"/>
|
||||
</g>
|
||||
<g id="g390" fill="#CCC">
|
||||
<path id="path392" d="m189,278s-15.5-36.5-28-46c0,0,26,16,29.5,34,0,0,0,10-1.5,12z"/>
|
||||
</g>
|
||||
<g id="g394" fill="#CCC">
|
||||
<path id="path396" d="m236,285.5s-26.5-55-45-79c0,0,43.5,37.5,48.5,64l0.5,5.5-3-2.5s-0.5,9-1,12z"/>
|
||||
</g>
|
||||
<g id="g398" fill="#CCC">
|
||||
<path id="path400" d="m292.5,237s-62.5-59.5-64-62c0,0,60.5,66,63.5,73.5,0,0-2-9,0.5-11.5z"/>
|
||||
</g>
|
||||
<g id="g402" fill="#CCC">
|
||||
<path id="path404" d="m104,280.5s19.5-52,38.5-29.5c0,0,15,10,14.5,13,0,0-4-6.5-22-6,0,0-19-3-31,22.5z"/>
|
||||
</g>
|
||||
<g id="g406" fill="#CCC">
|
||||
<path id="path408" d="m294.5,153s-45-28.5-52.5-30c-11.81-2.36,49.5,29,54.5,39.5,0,0,2-2.5-2-9.5z"/>
|
||||
</g>
|
||||
<g id="g410" fill="#000">
|
||||
<path id="path412" d="m143.8,259.6s20.4-2,27.2-8.8l4.4,3.6,17.6-38.4,3.6,5.2s14.4-14.8,13.6-22.8,12.8,6,12.8,6-0.8-11.6,6.4-4.8c0,0-2.4-15.6,6-7.6,0,0-10.54-30.16,12-4.4,5.6,6.4,1.2-0.4,1.2-0.4s-26-48-4.4-33.6c0,0,2-22.8,0.8-27.2s-3.2-26.8-8-32,0.4-6.8,6-1.6c0,0-11.2-24,2-12,0,0-3.6-15.2-8-18,0,0-5.6-17.2,9.6-6.4,0,0-4.4-12.4-7.6-15.6,0,0-11.6-27.6-4.4-22.8l4.4,3.6s-6.8-14-0.4-9.6,6.4,4,6.4,4-21.2-33.2-0.8-15.6c0,0-8.16-13.918-11.6-20.8,0,0-18.8-20.4-4.4-14l4.8,1.6s-8.8-10-16.8-11.6,2.4-8,8.8-6,22,9.6,22,9.6,12.8,18.8,16.8,19.2c0,0-20-7.6-14,0.4,0,0,14.4,14,7.2,13.6,0,0-6,7.2-1.2,16,0,0-18.46-18.391-3.6,7.2l6.8,16.4s-24.4-24.8-13.2-2.8c0,0,17.2,23.6,19.2,24s6.4,9.2,6.4,9.2l-4.4-2,5.2,8.8s-11.2-12-5.2,1.2l5.6,14.4s-20.4-22-6.8,7.6c0,0-16.4-5.2-7.6,12,0,0-1.6,16-1.2,21.2s1.6,33.6-2.8,41.6,6,27.2,8,31.2,5.6,14.8-3.2,5.6-4.4-3.6-2.4,5.2,8,24.4,7.2,30c0,0-1.2,1.2-4.4-2.4,0,0-14.8-22.8-13.2-8.4,0,0-1.2,8-4.4,16.8,0,0-3.2,10.8-3.2,2,0,0-3.2-16.8-6-9.2s-6.4,13.6-9.2,16-8-20.4-9.2-10c0,0-12-12.4-16.8,4l-11.6,16.4s-0.4-12.4-1.6-6.4c0,0-30,6-40.4,1.6z"/>
|
||||
</g>
|
||||
<g id="g414" fill="#000">
|
||||
<path id="path416" d="m109.4-97.2s-11.599-8-15.599-7.6,27.599-8.8,68.799,18.8c0,0,4.8,2.8,8.4,2.4,0,0,3.2,2.4,0.4,6,0,0-8.8,9.6,2.4,20.8,0,0,18.4,6.8,12.8-2,0,0,10.8,4,13.2,8s1.2,0,1.2,0l-12.4-12.4s-5.2-2-8-10.4-5.2-18.4-0.8-21.6c0,0-4,4.4-3.2,0.4s4.4-7.6,6-8,18-16.2,24.8-16.6c0,0-9.2,1.4-12.2,0.4s-29.6-12.4-35.6-13.6c0,0-16.8-6.6-4.8-4.6,0,0,35.8,3.8,54,17,0,0-7.2-8.4-25.6-15.4,0,0-22.2-12.6-57.4-7.6,0,0-17.8,3.2-25.6,5,0,0-2.599-0.6-3.199-1s-12.401-9.4-40.001-2.4c0,0-17,4.6-25.6,9.4,0,0-15.2,1.2-18.8,4.4,0,0-18.6,14.6-20.6,15.4s-13.4,8.4-14.2,8.8c0,0,24.6-6.6,27-9s19.8-5,22.2-3.6,10.8,0.8,1.2,1.4c0,0,75.6,14.8,76.4,16.8s4.8,0.8,4.8,0.8z"/>
|
||||
</g>
|
||||
<g id="g418" fill="#cc7226">
|
||||
<path id="path420" d="m180.8-106.4s-10.2-7.4-12.2-7.4-14.4-10.2-18.6-9.8-16.4-9.6-43.8-1.4c0,0-0.6-2,3-2.8,0,0,6.4-2.2,6.8-2.8,0,0,20.2-4.2,27.4-0.6,0,0,9.2,2.6,15.4,8.8,0,0,11.2,3.2,14.4,2.2,0,0,8.8,2.2,9.2,4,0,0,5.8,3,4,5.6,0,0,0.4,1.6-5.6,4.2z"/>
|
||||
</g>
|
||||
<g id="g422" fill="#cc7226">
|
||||
<path id="path424" d="m168.33-108.51c0.81,0.63,1.83,0.73,2.43,1.54,0.24,0.31-0.05,0.64-0.37,0.74-1.04,0.31-2.1-0.26-3.24,0.33-0.4,0.21-1.04,0.03-1.6-0.12-1.63-0.44-3.46-0.47-5.15,0.22-1.98-1.13-4.34-0.54-6.42-1.55-0.06-0.02-0.28,0.32-0.36,0.3-3.04-1.15-6.79-0.87-9.22-3.15-2.43-0.41-4.78-0.87-7.21-1.55-1.82-0.51-3.23-1.5-4.85-2.33-1.38-0.71-2.83-1.23-4.37-1.61-1.86-0.45-3.69-0.34-5.58-0.86-0.1-0.02-0.29,0.32-0.37,0.3-0.32-0.11-0.62-0.69-0.79-0.64-1.68,0.52-3.17-0.45-4.83-0.11-1.18-1.22-2.9-0.98-4.45-1.42-2.97-0.85-6.12,0.42-9.15-0.58,4.11-1.84,8.8-0.61,12.86-2.68,2.33-1.18,4.99-0.08,7.56-0.84,0.49-0.15,1.18-0.35,1.58,0.32,0.14-0.14,0.32-0.37,0.38-0.35,2.44,1.16,4.76,2.43,7.24,3.5,0.34,0.15,0.88-0.09,1.13,0.12,1.52,1.21,3.46,1.11,4.85,2.33,1.7-0.5,3.49-0.12,5.22-0.75,0.08-0.02,0.31,0.32,0.34,0.3,1.14-0.75,2.29-0.48,3.18-0.18,0.34,0.12,1,0.37,1.31,0.44,1.12,0.27,1.98,0.75,3.16,0.94,0.11,0.02,0.3-0.32,0.37-0.3,1.12,0.44,2.16,0.39,2.82,1.55,0.14-0.14,0.3-0.37,0.38-0.35,1.03,0.34,1.68,1.1,2.78,1.34,0.48,0.1,1.1,0.73,1.67,0.91,2.39,0.73,4.24,2.26,6.43,3.15,0.76,0.31,1.64,0.55,2.27,1.04z"/>
|
||||
</g>
|
||||
<g id="g426" fill="#cc7226">
|
||||
<path id="path428" d="m91.696-122.74c-2.518-1.72-4.886-2.83-7.328-4.62-0.181-0.13-0.541,0.04-0.743-0.08-1.007-0.61-1.895-1.19-2.877-1.89-0.539-0.38-1.36-0.37-1.868-0.63-2.544-1.29-5.173-1.85-7.68-3.04,0.682-0.64,1.804-0.39,2.4-1.2,0.195,0.28,0.433,0.56,0.786,0.37,1.678-0.9,3.528-1.05,5.204-0.96,1.704,0.09,3.424,0.39,5.199,0.67,0.307,0.04,0.506,0.56,0.829,0.66,2.228,0.66,4.617,0.14,6.736,0.98,1.591,0.63,3.161,1.45,4.4,2.72,0.252,0.26-0.073,0.57-0.353,0.76,0.388-0.11,0.661,0.1,0.772,0.41,0.084,0.24,0.084,0.54,0,0.78-0.112,0.31-0.391,0.41-0.765,0.46-1.407,0.19,0.365-1.19-0.335-0.74-1.273,0.82-0.527,2.22-1.272,3.49-0.28-0.19-0.51-0.41-0.4-0.8,0.234,0.52-0.368,0.81-0.536,1.13-0.385,0.72-1.284,2.14-2.169,1.53z"/>
|
||||
</g>
|
||||
<g id="g430" fill="#cc7226">
|
||||
<path id="path432" d="m59.198-115.39c-3.154-0.79-6.204-0.68-9.22-1.96-0.067-0.02-0.29,0.32-0.354,0.3-1.366-0.6-2.284-1.56-3.36-2.61-0.913-0.89-2.571-0.5-3.845-0.99-0.324-0.12-0.527-0.63-0.828-0.67-1.219-0.16-2.146-1.11-3.191-1.68,2.336-0.8,4.747-0.76,7.209-1.15,0.113-0.02,0.258,0.31,0.391,0.31,0.136,0,0.266-0.23,0.4-0.36,0.195,0.28,0.497,0.61,0.754,0.35,0.548-0.54,1.104-0.35,1.644-0.31,0.144,0.01,0.269,0.32,0.402,0.32,0.136,0,0.267-0.32,0.4-0.32,0.136,0,0.267,0.32,0.4,0.32,0.136,0,0.266-0.23,0.4-0.36,0.692,0.78,1.577,0.23,2.399,0.41,1.038,0.22,1.305,1.37,2.379,1.67,4.715,1.3,8.852,3.45,13.215,5.54,0.307,0.14,0.517,0.39,0.407,0.78,0.267,0,0.58-0.09,0.77,0.04,1.058,0.74,2.099,1.28,2.796,2.38,0.216,0.34-0.113,0.75-0.346,0.7-4.429-1-8.435-1.61-12.822-2.71z"/>
|
||||
</g>
|
||||
<g id="g434" fill="#cc7226">
|
||||
<path id="path436" d="m45.338-71.179c-1.592-1.219-2.176-3.25-3.304-5.042-0.214-0.34,0.06-0.654,0.377-0.743,0.56-0.159,1.103,0.319,1.512,0.521,1.745,0.862,3.28,2.104,5.277,2.243,1.99,2.234,6.25,2.619,6.257,6,0.001,0.859-1.427-0.059-1.857,0.8-2.451-1.003-4.84-0.9-7.22-2.367-0.617-0.381-0.287-0.834-1.042-1.412z"/>
|
||||
</g>
|
||||
<g id="g438" fill="#cc7226">
|
||||
<path id="path440" d="m17.8-123.76c0.135,0,7.166,0.24,7.149,0.35-0.045,0.31-7.775,1.36-8.139,1.19-0.164-0.08-7.676,2.35-7.81,2.22,0.268-0.14,8.534-3.76,8.8-3.76z"/>
|
||||
</g>
|
||||
<g id="g442" fill="#000">
|
||||
<path id="path444" d="m33.2-114s-14.8,1.8-19.2,3-23,8.8-26,10.8c0,0-13.4,5.4-30.4,25.4,0,0,7.6-3.4,9.8-6.2,0,0,13.6-12.6,13.4-10,0,0,12.2-8.6,11.6-6.4,0,0,24.4-11.2,22.4-8,0,0,21.6-4.6,20.6-2.6,0,0,18.8,4.4,16,4.6,0,0-5.8,1.2,0.6,4.8,0,0-3.4,4.4-8.8,0.4s-2.4-1.8-7.4-0.8c0,0-2.6,0.8-7.2-3.2,0,0-5.6-4.6-14.4-1,0,0-30.6,12.6-32.6,13.2,0,0-3.6,2.8-6,6.4,0,0-5.8,4.4-8.8,5.8,0,0-12.8,11.6-14,13,0,0-3.4,5.2-4.2,5.6,0,0,6.4-3.8,8.4-5.8,0,0,14-10,19.4-10.8,0,0,4.4-3,5.2-4.4,0,0,14.4-9.2,18.6-9.2,0,0,9.2,5.2,11.6-1.8,0,0,5.8-1.8,11.4-0.6,0,0,3.2-2.6,2.4-4.8,0,0,1.6-1.8,2.6,2,0,0,3.4,3.6,8.2,1.6,0,0,4-0.2,2,2.2,0,0-4.4,3.8-16.2,4,0,0-12.4,0.6-28.8,8.2,0,0-29.8,10.4-39,20.8,0,0-6.4,8.8-11.8,10,0,0-5.8,0.8-11.8,8.2,0,0,9.8-5.8,18.8-5.8,0,0,4-2.4,0.2,1.2,0,0-3.6,7.6-2,13,0,0-0.6,5.2-1.4,6.8,0,0-7.8,12.8-7.8,15.2s1.2,12.2,1.6,12.8-1-1.6,2.8,0.8,6.6,4,7.4,6.8-2-5.4-2.2-7.2-4.4-9-3.6-11.4c0,0,1,1,1.8,2.4,0,0-0.6-0.6,0-4.2,0,0,0.8-5.2,2.2-8.4s3.4-7,3.8-7.8,0.4-6.6,1.8-4l3.4,2.6s-2.8-2.6-0.6-4.8c0,0-1-5.6,0.8-8.2,0,0,7-8.4,8.6-9.4s0.2-0.6,0.2-0.6,6-4.2,0.2-2.6c0,0-4,1.6-7,1.6,0,0-7.6,2-3.6-2.2s14-9.6,17.8-9.4l0.8,1.6,11.2-2.4-1.2,0.8s-0.2-0.2,4-0.6,10,1,11.4-0.8,4.8-2.8,4.4-1.4-0.6,3.4-0.6,3.4,5-5.8,4.4-3.6-8.8,7.4-10.2,13.6l10.4-8.2,3.6-3s3.6,2.2,3.8,0.6,4.8-7.4,6-7.2,3.2-2.6,3,0,7.4,8,7.4,8,3.2-1.8,4.6-0.4,5.6-19.8,5.6-19.8l25-10.6,43.6-3.4-16.999-6.8-61.001-11.4z"/>
|
||||
</g>
|
||||
<g id="g446" stroke-width="2" stroke="#4c0000">
|
||||
<path id="path448" d="m51.4,85s-15-16.8-23.4-19.4c0,0-13.4-6.8-38,1"/>
|
||||
</g>
|
||||
<g id="g450" stroke-width="2" stroke="#4c0000">
|
||||
<path id="path452" d="m24.8,64.2s-25.2-8-40.6-3.8c0,0-18.4,2-26.8,15.8"/>
|
||||
</g>
|
||||
<g id="g454" stroke-width="2" stroke="#4c0000">
|
||||
<path id="path456" d="m21.2,63s-17-7.2-31.8-9.4c0,0-16.6-2.6-33.2,4.6,0,0-12.2,6-17.6,16.2"/>
|
||||
</g>
|
||||
<g id="g458" stroke-width="2" stroke="#4c0000">
|
||||
<path id="path460" d="m22.2,63.4s-15.4-11-16.4-12.4c0,0-7-11-20-11.4,0,0-21.4,0.8-38.6,8.8"/>
|
||||
</g>
|
||||
<g id="g462" fill="#000">
|
||||
<path id="path464" d="M20.895,54.407c1.542,1.463,28.505,30.393,28.505,30.393,35.2,36.6,7.2,2.4,7.2,2.4-7.6-4.8-16.8-23.6-16.8-23.6-1.2-2.8,14,7.2,14,7.2,4,0.8,17.6,20,17.6,20-6.8-2.4-2,4.8-2,4.8,2.8,2,23.201,17.6,23.201,17.6,3.6,4,7.599,5.6,7.599,5.6,14-5.2,7.6,8,7.6,8,2.4,6.8,8-4.8,8-4.8,11.2-16.8-5.2-14.4-5.2-14.4-30,2.8-36.8-13.2-36.8-13.2-2.4-2.4,6.4,0,6.4,0,8.401,2-7.2-12.4-7.2-12.4,2.4,0,11.6,6.8,11.6,6.8,10.401,9.2,12.401,7.2,12.401,7.2,17.999-8.8,28.399-1.2,28.399-1.2,2,1.6-3.6,8.4-2,13.6s6.4,17.6,6.4,17.6c-2.4,1.6-2,12.4-2,12.4,16.8,23.2,7.2,21.2,7.2,21.2-15.6-0.4-0.8,7.2-0.8,7.2,3.2,2,12,9.2,12,9.2-2.8-1.2-4.4,4-4.4,4,4.8,4,2,8.8,2,8.8-6,1.2-7.2,5.2-7.2,5.2,6.8,8-3.2,8.4-3.2,8.4,3.6,4.4-1.2,16.4-1.2,16.4-4.8,0-11.2,5.6-11.2,5.6,2.4,4.8-8,10.4-8,10.4-8.4,1.6-5.6,8.4-5.6,8.4-7.999,6-10.399,22-10.399,22-0.8,10.4-3.2,13.6,2,11.6,5.199-2,4.399-14.4,4.399-14.4-4.799-15.6,38-31.6,38-31.6,4-1.6,4.8-6.8,4.8-6.8,2,0.4,10.8,8,10.8,8,7.6,11.2,8,2,8,2,1.2-3.6-0.4-9.6-0.4-9.6,6-21.6-8-28-8-28-10-33.6,4-25.2,4-25.2,2.8,5.6,13.6,10.8,13.6,10.8l3.6-2.4c-1.6-4.8,6.8-10.8,6.8-10.8,2.8,6.4,8.8-1.6,8.8-1.6,3.6-24.4,16-10,16-10,4,1.2,5.2-5.6,5.2-5.6,3.6-10.4,0-24,0-24,3.6-0.4,13.2,5.6,13.2,5.6,2.8-3.6-6.4-20.4-2.4-18s8.4,4,8.4,4c0.8-2-9.2-14.4-9.2-14.4-4.4-2.8-9.6-23.2-9.6-23.2,7.2,3.6-2.8-11.6-2.8-11.6,0-3.2,6-14.4,6-14.4-0.8-6.8,0-6.4,0-6.4,2.8,1.2,10.8,2.8,4-3.6s0.8-11.2,0.8-11.2c4.4-2.8-9.2-2.4-9.2-2.4-5.2-4.4-4.8-8.4-4.8-8.4,8,2-6.4-12.4-8.8-16s7.2-8.8,7.2-8.8c13.2-3.6,1.6-6.8,1.6-6.8-19.6,0.4-8.8-10.4-8.8-10.4,6,0.4,4.4-2,4.4-2-5.2-1.2-14.8-7.6-14.8-7.6-4-3.6-0.4-2.8-0.4-2.8,16.8,1.2-12-10-12-10,8,0-10-10.4-10-10.4-2-1.6-5.2-9.2-5.2-9.2-6-5.2-10.8-12-10.8-12-0.4-4.4-5.2-9.2-5.2-9.2-11.6-13.6-17.2-13.2-17.2-13.2-14.8-3.6-20-2.8-20-2.8l-52.8,4.4c-26.4,12.8-18.6,33.8-18.6,33.8,6.4,8.4,15.6,4.6,15.6,4.6,4.6-6.2,16.2-4,16.2-4,20.401,3.2,17.801-0.4,17.801-0.4-2.4-4.6-18.601-10.8-18.801-11.4s-9-4-9-4c-3-1.2-7.4-10.4-7.4-10.4-3.2-3.4,12.6,2.4,12.6,2.4-1.2,1,6.2,5,6.2,5,17.401-1,28.001,9.8,28.001,9.8,10.799,16.6,10.999,8.4,10.999,8.4,2.8-9.4-9-30.6-9-30.6,0.4-2,8.6,4.6,8.6,4.6,1.4-2,2.2,3.8,2.2,3.8,0.2,2.4,4,10.4,4,10.4,2.8,13,6.4,5.6,6.4,5.6l4.6,9.4c1.4,2.6-4.6,10.2-4.6,10.2-0.2,2.8,0.6,2.6-5,10.2s-2.2,12-2.2,12c-1.4,6.6,7.4,6.2,7.4,6.2,2.6,2.2,6,2.2,6,2.2,1.8,2,4.2,1.4,4.2,1.4,1.6-3.8,7.8-1.8,7.8-1.8,1.4-2.4,9.6-2.8,9.6-2.8,1-2.6,1.4-4.2,4.8-4.8s-21.2-43.6-21.2-43.6c6.4-0.8-1.8-13.2-1.8-13.2-2.2-6.6,9.2,8,11.4,9.4s3.2,3.6,1.6,3.4-3.4,2-2,2.2,14.4,15.2,17.8,25.4,9.4,14.2,15.6,20.2,5.4,30.2,5.4,30.2c-0.4,8.8,5.6,19.4,5.6,19.4,2,3.8-2.2,22-2.2,22-2,2.2-0.6,3-0.6,3,1,1.2,7.8,14.4,7.8,14.4-1.8-0.2,1.8,3.4,1.8,3.4,5.2,6-1.2,3-1.2,3-6-1.6,1,8.2,1,8.2,1.2,1.8-7.8-2.8-7.8-2.8-9.2-0.6,2.4,6.6,2.4,6.6,8.6,7.2-2.8,2.8-2.8,2.8-4.6-1.8-1.4,5-1.4,5,3.2,1.6,20.4,8.6,20.4,8.6,0.4,3.8-2.6,8.8-2.6,8.8,0.4,4-1.8,7.4-1.8,7.4-1.2,8.2-1.8,9-1.8,9-4.2,0.2-11.6,14-11.6,14-1.8,2.6-12,14.6-12,14.6-2,7-20-0.2-20-0.2-6.6,3.4-4.6,0-4.6,0-0.4-2.2,4.4-8.2,4.4-8.2,7-2.6,4.4-13.4,4.4-13.4,4-1.4-7.2-4.2-7-5.4s6-2.6,6-2.6c8-2,3.6-4.4,3.6-4.4-0.6-4,2.4-9.6,2.4-9.6,11.6-0.8,0-17,0-17-10.8-7.6-11.8-13.4-11.8-13.4,12.6-8.2,4.4-20.6,4.6-24.2s1.4-25.2,1.4-25.2c-2-6.2-5-19.8-5-19.8,2.2-5.2,9.6-17.8,9.6-17.8,2.8-4.2,11.6-9,9.4-12s-10-1.2-10-1.2c-7.8-1.4-7.2,3.8-7.2,3.8-1.6,1-2.4,6-2.4,6-0.72,7.933-9.6,14.2-9.6,14.2-11.2,6.2-2,10.2-2,10.2,6,6.6-3.8,6.8-3.8,6.8-11-1.8-2.8,8.4-2.8,8.4,10.8,12.8,7.8,15.6,7.8,15.6-10.2,1,2.4,10.2,2.4,10.2s-0.8-2-0.6-0.2,3.2,6,4,8-3.2,2.2-3.2,2.2c0.6,9.6-14.8,5.4-14.8,5.4l-1.6,0.2c-1.6,0.2-12.8-0.6-18.6-2.8s-12.599-2.2-12.599-2.2-4,1.8-11.601,1.6c-7.6-0.2-15.6,2.6-15.6,2.6-4.4-0.4,4.2-4.8,4.4-4.6s5.8-5.4-2.2-4.8c-21.797,1.635-32.6-8.6-32.6-8.6-2-1.4-4.6-4.2-4.6-4.2-10-2,1.4,12.4,1.4,12.4,1.2,1.4-0.2,2.4-0.2,2.4-0.8-1.6-8.6-7-8.6-7-2.811-0.973-4.174-2.307-6.505-4.793z"/>
|
||||
</g>
|
||||
<g id="g466" fill="#4c0000">
|
||||
<path id="path468" d="m-3,42.8s11.6,5.6,14.2,8.4,16.6,14.2,16.6,14.2-5.4-2-8-3.8-13.4-10-13.4-10-3.8-6-9.4-8.8z"/>
|
||||
</g>
|
||||
<g id="g470" fill="#99cc32">
|
||||
<path id="path472" d="M-61.009,11.603c0.337-0.148-0.187-2.86-0.391-3.403-1.022-2.726-10-4.2-10-4.2-0.227,1.365-0.282,2.961-0.176,4.599,0,0,4.868,5.519,10.567,3.004z"/>
|
||||
</g>
|
||||
<g id="g474" fill="#659900">
|
||||
<path id="path476" d="M-61.009,11.403c-0.449,0.158-0.015-2.734-0.191-3.203-1.022-2.726-10.2-4.3-10.2-4.3-0.227,1.365-0.282,2.961-0.176,4.599,0,0,4.268,5.119,10.567,2.904z"/>
|
||||
</g>
|
||||
<g id="g478" fill="#000">
|
||||
<path id="path480" d="m-65.4,11.546c-0.625,0-1.131-1.14-1.131-2.546,0-1.405,0.506-2.545,1.131-2.545s1.132,1.14,1.132,2.545c0,1.406-0.507,2.546-1.132,2.546z"/>
|
||||
</g>
|
||||
<g id="g482" fill="#000">
|
||||
<path id="path484" d="M-65.4,9z"/>
|
||||
</g>
|
||||
<g id="g486" fill="#000">
|
||||
<path id="path488" d="m-111,109.6s-5.6,10,19.2,4c0,0,14-1.2,16.4-3.6,1.2,0.8,9.566,3.73,12.4,4.4,6.8,1.6,15.2-8.4,15.2-8.4s4.6-10.5,7.4-10.5-0.4,1.6-0.4,1.6-6.6,10.1-6.2,11.7c0,0-5.2,20-21.2,20.8,0,0-16.15,0.95-14.8,6.8,0,0,8.8-2.4,11.2,0,0,0,10.8-0.4,2.8,6l-6.8,11.6s0.14,3.92-10,0.4c-9.8-3.4-20.1-16.3-20.1-16.3s-15.95-14.55-5.1-28.5z"/>
|
||||
</g>
|
||||
<g id="g490" fill="#e59999">
|
||||
<path id="path492" d="m-112.2,113.6s-2,9.6,34.8-0.8l6.8,0.8c2.4,0.8,14.4,3.6,16.4,2.4,0,0-7.2,13.6-18.8,12,0,0-13.2,1.6-12.8,6.4,0,0,4,7.2,8.8,9.6,0,0,2.8,2.4,2.4,5.6s-3.2,4.8-5.2,5.6-5.2-2.4-6.8-2.4-10-6.4-14.4-11.2-12.8-16.8-12.4-19.6,1.2-8.4,1.2-8.4z"/>
|
||||
</g>
|
||||
<g id="g494" fill="#b26565">
|
||||
<path id="path496" d="m-109,131.05c2.6,3.95,5.8,8.15,8,10.55,4.4,4.8,12.8,11.2,14.4,11.2s4.8,3.2,6.8,2.4,4.8-2.4,5.2-5.6-2.4-5.6-2.4-5.6c-3.066-1.53-5.806-5.02-7.385-7.35,0,0,0.185,2.55-5.015,1.75s-10.4-3.6-12-6.8-4-5.6-2.4-2,4,7.2,5.6,7.6,1.2,1.6-1.2,1.2-5.2-0.8-9.6-6z"/>
|
||||
</g>
|
||||
<g id="g498" fill="#992600">
|
||||
<path id="path500" d="m-111.6,110s1.8-13.6,3-17.6c0,0-0.8-6.8,1.6-11s4.4-10.4,7.4-15.8,3.2-9.4,7.2-11,10-10.2,12.8-11.2,2.6-0.2,2.6-0.2,6.8-14.8,20.4-10.8c0,0-16.2-2.8-0.4-12.2,0,0-4.8,1.1-1.5-5.9,2.201-4.668,1.7,2.1-9.3,13.9,0,0-5,8.6-10.2,11.6s-17.2,10-18.4,13.8-4.4,9.6-6.4,11.2-4.8,5.8-5.2,9.2c0,0-1.2,4-2.6,5.2s-1.6,4.4-1.6,6.4-2,4.8-1.8,7.2c0,0,0.8,19,0.4,21l2-3.8z"/>
|
||||
</g>
|
||||
<g id="g502" fill="#FFF">
|
||||
<path id="path504" d="m-120.2,114.6s-2-1.4-6.4,4.6c0,0,7.3,33,7.3,34.4,0,0,1.1-2.1-0.2-9.3s-2.2-19.9-2.2-19.9l1.5-9.8z"/>
|
||||
</g>
|
||||
<g id="g506" fill="#992600">
|
||||
<path id="path508" d="m-98.6,54s-17.6,3.2-17.2,32.4l-0.8,24.8s-1.2-25.6-2.4-27.2,2.8-12.8-0.4-6.8c0,0-14,14-6,35.2,0,0,1.5,3.3-1.5-1.3,0,0-4.6-12.6-3.5-19,0,0,0.2-2.2,2.1-5,0,0,8.6-11.7,11.3-14,0,0,1.8-14.4,17.2-19.6,0,0,5.7-2.3,1.2,0.5z"/>
|
||||
</g>
|
||||
<g id="g510" fill="#000">
|
||||
<path id="path512" d="m40.8-12.2c0.66-0.354,0.651-1.324,1.231-1.497,1.149-0.344,1.313-1.411,1.831-2.195,0.873-1.319,1.066-2.852,1.648-4.343,0.272-0.7,0.299-1.655-0.014-2.315-1.174-2.481-1.876-4.93-3.318-7.356-0.268-0.45-0.53-1.244-0.731-1.842-0.463-1.384-1.72-2.375-2.58-3.695-0.288-0.441,0.237-1.366-0.479-1.45-0.897-0.105-2.346-0.685-2.579,0.341-0.588,2.587,0.423,5.11,1.391,7.552-0.782,0.692-0.448,1.613-0.296,2.38,0.71,3.606-0.488,6.958-1.249,10.432-0.023,0.104,0.319,0.302,0.291,0.364-1.222,2.686-2.674,5.131-4.493,7.512-0.758,0.992-1.63,1.908-2.127,2.971-0.368,0.787-0.776,1.753-0.526,2.741-3.435,2.78-5.685,6.625-8.296,10.471-0.462,0.68-0.171,1.889,0.38,2.158,0.813,0.398,1.769-0.626,2.239-1.472,0.389-0.698,0.742-1.348,1.233-1.991,0.133-0.175-0.046-0.594,0.089-0.715,2.633-2.347,4.302-5.283,6.755-7.651,1.95-0.329,3.487-1.327,5.235-2.34,0.308-0.179,0.832,0.07,1.122-0.125,1.753-1.177,1.751-3.213,1.857-5.123,0.05-0.884,0.246-2.201,1.386-2.812z"/>
|
||||
</g>
|
||||
<g id="g514" fill="#000">
|
||||
<path id="path516" d="m31.959-16.666c0.124-0.077-0.031-0.5,0.078-0.716,0.162-0.324,0.565-0.512,0.727-0.836,0.109-0.216-0.054-0.596,0.082-0.738,2.333-2.447,2.59-5.471,1.554-8.444,1.024-0.62,1.085-1.882,0.66-2.729-0.853-1.7-1.046-3.626-2.021-5.169-0.802-1.269-2.38-2.513-3.751-1.21-0.421,0.4-0.742,1.187-0.464,1.899,0.064,0.163,0.349,0.309,0.322,0.391-0.107,0.324-0.653,0.548-0.659,0.82-0.03,1.496-0.984,3.007-0.354,4.336,0.772,1.629,1.591,3.486,2.267,5.262-1.234,2.116-0.201,4.565-1.954,6.442-0.136,0.146-0.127,0.532-0.005,0.734,0.292,0.486,0.698,0.892,1.184,1.184,0.202,0.121,0.55,0.123,0.75-0.001,0.578-0.362,0.976-0.849,1.584-1.225z"/>
|
||||
</g>
|
||||
<g id="g518" fill="#000">
|
||||
<path id="path520" d="m94.771-26.977c1.389,1.792,1.679,4.587-0.37,5.977,0.55,3.309,3.901,1.33,5.999,0.8-0.11-0.388,0.12-0.732,0.4-0.737,1.06-0.015,1.74-1.047,2.8-0.863,0.44-1.557,2.07-2.259,2.72-3.639,1.72-3.695,1.13-7.968-1.45-11.214-0.2-0.254,0.01-0.771-0.11-1.133-0.76-2.211-2.82-2.526-4.76-3.214-1.176-3.875-1.837-7.906-3.599-11.6-1.614-0.25-2.312-1.989-3.649-2.709-1.333-0.719-1.901,0.86-1.86,1.906,0.007,0.205,0.459,0.429,0.289,0.794-0.076,0.164-0.336,0.275-0.336,0.409,0.001,0.135,0.222,0.266,0.356,0.4-0.918,0.82-2.341,1.297-2.636,2.442-0.954,3.71,1.619,6.835,3.287,10.036,0.591,1.135-0.145,2.406-0.905,3.614-0.438,0.695-0.33,1.822-0.054,2.678,0.752,2.331,2.343,4.07,3.878,6.053z"/>
|
||||
</g>
|
||||
<g id="g522" fill="#000">
|
||||
<path id="path524" d="m57.611-8.591c-1.487,1.851-4.899,4.42-1.982,6.348,0.194,0.129,0.564,0.133,0.737-0.001,2.021-1.565,4.024-2.468,6.46-3.05,0.124-0.029,0.398,0.438,0.767,0.277,1.613-0.703,3.623-0.645,4.807-1.983,3.767,0.224,7.332-0.892,10.723-2.2,1.161-0.448,2.431-1.007,3.632-1.509,1.376-0.576,2.58-1.504,3.692-2.645,0.133-0.136,0.487-0.046,0.754-0.046-0.04-0.863,0.922-0.99,1.169-1.612,0.092-0.232-0.058-0.628,0.075-0.73,2.138-1.63,3.058-3.648,1.889-6.025-0.285-0.578-0.534-1.196-1.1-1.672-1.085-0.911-2.187-0.057-3.234-0.361-0.159,0.628-0.888,0.456-1.274,0.654-0.859,0.439-2.192-0.146-3.051,0.292-1.362,0.695-2.603,0.864-4.025,1.241-0.312,0.082-1.09-0.014-1.25,0.613-0.134-0.134-0.282-0.368-0.388-0.346-1.908,0.396-3.168,0.61-4.469,2.302-0.103,0.133-0.545-0.046-0.704,0.089-0.957,0.808-1.362,2.042-2.463,2.714-0.201,0.123-0.553-0.045-0.747,0.084-0.646,0.431-1.013,1.072-1.655,1.519-0.329,0.229-0.729-0.096-0.697-0.352,0.245-1.947,0.898-3.734,0.323-5.61,2.077-2.52,4.594-4.469,6.4-7.2,0.015-2.166,0.707-4.312,0.594-6.389-0.01-0.193-0.298-0.926-0.424-1.273-0.312-0.854,0.594-1.92-0.25-2.644-1.404-1.203-2.696-0.327-3.52,1.106-1.838,0.39-3.904,1.083-5.482-0.151-1.007-0.787-1.585-1.693-2.384-2.749-0.985-1.302-0.65-2.738-0.58-4.302,0.006-0.128-0.309-0.264-0.309-0.398,0.001-0.135,0.221-0.266,0.355-0.4-0.706-0.626-0.981-1.684-2-2,0.305-1.092-0.371-1.976-1.242-2.278-1.995-0.691-3.672,1.221-5.564,1.294-0.514,0.019-0.981-1.019-1.63-1.344-0.432-0.216-1.136-0.249-1.498,0.017-0.688,0.504-1.277,0.618-2.035,0.823-1.617,0.436-2.895,1.53-4.375,2.385-1.485,0.857-2.44,2.294-3.52,3.614-0.941,1.152-1.077,3.566,0.343,4.066,1.843,0.65,3.147-2.053,5.113-1.727,0.312,0.051,0.518,0.362,0.408,0.75,0.389,0.109,0.607-0.12,0.8-0.4,0.858,1.019,2.022,1.356,2.96,2.229,0.97,0.904,2.716,0.486,3.731,1.483,1.529,1.502,0.97,4.183,2.909,5.488-0.586,1.313-1.193,2.59-1.528,4.017-0.282,1.206,0.712,2.403,1.923,2.312,1.258-0.094,1.52-0.853,2.005-1.929,0.267,0.267,0.736,0.564,0.695,0.78-0.457,2.387-1.484,4.38-1.942,6.811-0.059,0.317-0.364,0.519-0.753,0.409-0.468,4.149-4.52,6.543-7.065,9.708-0.403,0.502-0.407,1.751,0.002,2.154,1.403,1.387,3.363-0.159,5.063-0.662,0.213-1.206,1.072-2.148,2.404-2.092,0.256,0.01,0.491-0.532,0.815-0.662,0.348-0.138,0.85,0.086,1.136-0.112,1.729-1.195,3.137-2.301,4.875-3.49,0.192-0.131,0.536,0.028,0.752-0.08,0.325-0.162,0.512-0.549,0.835-0.734,0.348-0.2,0.59,0.09,0.783,0.37-0.646,0.349-0.65,1.306-1.232,1.508-0.775,0.268-1.336,0.781-2.01,1.228-0.292,0.193-0.951-0.055-1.055,0.124-0.598,1.028-1.782,1.466-2.492,2.349z"/>
|
||||
</g>
|
||||
<g id="g526" fill="#000">
|
||||
<path id="path528" d="m2.2-58s-9.238-2.872-20.4,22.8c0,0-2.4,5.2-4.8,7.2s-13.6,5.6-15.6,9.6l-10.4,16s14.8-16,18-18.4c0,0,8-8.4,4.8-1.6,0,0-14,10.8-12.8,20,0,0-5.6,14.4-6.4,16.4,0,0,16-32,18.4-33.2s3.6-1.2,2.4,2.4-1.6,20-4.4,22c0,0,8-20.4,7.2-23.6,0,0,3.2-3.6,5.6,1.6l-1.2,16,4.4,12s-2.4-11.2-0.8-26.8c0,0-2-10.4,2-4.8s13.6,11.6,13.6,16.4c0,0-5.2-17.6-14.4-22.4l-4,6-1.2-2s-3.6-0.8,0.8-7.6,4-7.6,4-7.6,6.4,7.2,8,7.2c0,0,13.2-7.6,14.4,16.8,0,0,6.8-14.4-2.4-21.2,0,0-14.8-2-13.6-7.2l7.2-12.4c3.6-5.2,2-2.4,2-2.4z"/>
|
||||
</g>
|
||||
<g id="g530" fill="#000">
|
||||
<path id="path532" d="m-17.8-41.6-16,5.2-7.2,9.6s17.2-10,21.2-11.2,2-3.6,2-3.6z"/>
|
||||
</g>
|
||||
<g id="g534" fill="#000">
|
||||
<path id="path536" d="m-57.8-35.2s-2,1.2-2.4,4-2.8,3.2-2,6,2.8,5.2,2.8,1.2,1.6-6,2.4-7.2,2.4-5.6-0.8-4z"/>
|
||||
</g>
|
||||
<g id="g538" fill="#000">
|
||||
<path id="path540" d="m-66.6,26s-8.4-4-11.6-7.6-2.748,1.566-7.6,1.2c-5.847-0.441-4.8-16.4-4.8-16.4l-4,7.6s-1.2,14.4,6.8,12c3.907-1.172,5.2,0.4,3.6,1.2s5.6,1.2,2.8,2.8,11.6-3.6,9.2,6.8l5.6-7.6z"/>
|
||||
</g>
|
||||
<g id="g542" fill="#000">
|
||||
<path id="path544" d="m-79.2,40.4s-15.4,4.4-19-5.2c0,0-4.8,2.4-2.6,5.4s3.4,3.4,3.4,3.4,5.4,1.2,4.8,2-3,4.2-3,4.2,10.2-6,16.4-9.8z"/>
|
||||
</g>
|
||||
<g id="g546" fill="#FFF">
|
||||
<path id="path548" d="m149.2,118.6c-0.43,2.14-2.1,2.94-4,3.6-1.92-0.96-4.51-4.06-6.4-2-0.47-0.48-1.25-0.54-1.6-1.2-0.46-0.9-0.19-1.94-0.53-2.74-0.55-1.28-1.25-2.64-1.07-4.06,1.81-0.71,2.4-2.62,1.93-4.38-0.07-0.26-0.5-0.45-0.3-0.8,0.19-0.33,0.5-0.55,0.77-0.82-0.13,0.14-0.28,0.37-0.39,0.35-0.61-0.11-0.49-0.75-0.36-1.13,0.59-1.75,2.6-2.01,3.95-0.82,0.26-0.56,0.77-0.37,1.2-0.4-0.05-0.58,0.36-1.11,0.56-1.53,0.52-1.09,2.14,0.01,2.94-0.6,1.08-0.83,2.14-1.52,3.22-0.92,1.81,1.01,3.52,2.22,4.72,3.97,0.57,0.83,0.81,2.11,0.75,3.07-0.04,0.65-1.42,0.29-1.76,1.22-0.65,1.75,1.19,2.27,1.94,3.61,0.2,0.35-0.06,0.65-0.38,0.75-0.41,0.13-1.19-0.06-1.06,0.39,0.98,3.19-1.78,3.87-4.13,4.44z"/>
|
||||
</g>
|
||||
<g id="g550" fill="#FFF">
|
||||
<path id="path552" d="m139.6,138.2c-0.01-1.74-1.61-3.49-0.4-5.2,0.14,0.14,0.27,0.36,0.4,0.36,0.14,0,0.27-0.22,0.4-0.36,1.5,2.22,5.15,3.14,5.01,5.99-0.03,0.45-1.11,1.37-0.21,2.01-1.81,1.35-1.87,3.72-2.8,5.6-1.24-0.28-2.45-0.65-3.6-1.2,0.35-1.48,0.24-3.17,1.06-4.49,0.43-0.7,0.14-1.78,0.14-2.71z"/>
|
||||
</g>
|
||||
<g id="g554" fill="#CCC">
|
||||
<path id="path556" d="m-26.6,129.2s-16.858,10.14-2.8-5.2c8.8-9.6,18.8-15.2,18.8-15.2s10.4-4.4,14-5.6,18.8-6.4,22-6.8,12.8-4.4,19.6-0.4,14.8,8.4,14.8,8.4-16.4-8.4-20-6-10.8,2-16.8,5.2c0,0-14.8,4.4-18,6.4s-13.6,13.6-15.2,12.8,0.4-1.2,1.6-4-0.8-4.4-8.8,2-9.2,8.4-9.2,8.4z"/>
|
||||
</g>
|
||||
<g id="g558" fill="#000">
|
||||
<path id="path560" d="m-19.195,123.23s1.41-13.04,9.888-11.37c0,0,8.226-4.17,10.948-6.14,0,0,8.139-1.7,9.449-2.32,18.479-8.698,33.198-4.179,33.745-5.299,0.546-1.119,20.171,5.999,23.78,10.079,0.391,0.45-10.231-5.59-19.929-7.48-8.273-1.617-29.875,0.24-40.781,5.78-2.973,1.51-11.918,7.29-14.449,7.18s-12.651,9.57-12.651,9.57z"/>
|
||||
</g>
|
||||
<g id="g562" fill="#CCC">
|
||||
<path id="path564" d="m-23,148.8s-15.2-2.4,1.6-4c0,0,18-2,22-7.2,0,0,13.6-9.2,16.4-9.6s32.8-7.6,33.2-10,6-2.4,7.6-1.6,0.8,2-2,2.8-34,17.2-40.4,18.4-18,8.8-22.8,10-15.6,1.2-15.6,1.2z"/>
|
||||
</g>
|
||||
<g id="g566" fill="#000">
|
||||
<path id="path568" d="m-3.48,141.4s-8.582-0.83,0.019-1.64c0,0,8.816-3.43,10.864-6.09,0,0,6.964-4.71,8.397-4.92,1.434-0.2,15.394-3.89,15.599-5.12s34.271-13.81,38.691-10.62c2.911,2.1-6.99,0.43-16.624,4.84-1.355,0.62-35.208,15.2-38.485,15.82-3.277,0.61-9.216,4.5-11.674,5.12-2.457,0.61-6.787,2.61-6.787,2.61z"/>
|
||||
</g>
|
||||
<g id="g570" fill="#000">
|
||||
<path id="path572" d="m-11.4,143.6s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g574" fill="#000">
|
||||
<path id="path576" d="m-18.6,145.2s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g578" fill="#000">
|
||||
<path id="path580" d="m-29,146.8s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g582" fill="#000">
|
||||
<path id="path584" d="m-36.6,147.6s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g586" fill="#000">
|
||||
<path id="path588" d="m1.8,108,3.2,1.6c-1.2,1.6-4.4,1.2-4.4,1.2l1.2-2.8z"/>
|
||||
</g>
|
||||
<g id="g590" fill="#000">
|
||||
<path id="path592" d="m-8.2,113.6s6.506-2.14,4,1.2c-1.2,1.6-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g594" fill="#000">
|
||||
<path id="path596" d="m-19.4,118.4s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g598" fill="#000">
|
||||
<path id="path600" d="m-27,124.4s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g602" fill="#000">
|
||||
<path id="path604" d="m-33.8,129.2s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g606" fill="#000">
|
||||
<path id="path608" d="m5.282,135.6s6.921-0.53,5.324,1.6c-1.597,2.12-4.792,1.06-4.792,1.06l-0.532-2.66z"/>
|
||||
</g>
|
||||
<g id="g610" fill="#000">
|
||||
<path id="path612" d="m15.682,130.8s6.921-0.53,5.324,1.6c-1.597,2.12-4.792,1.06-4.792,1.06l-0.532-2.66z"/>
|
||||
</g>
|
||||
<g id="g614" fill="#000">
|
||||
<path id="path616" d="m26.482,126.4s6.921-0.53,5.324,1.6c-1.597,2.12-4.792,1.06-4.792,1.06l-0.532-2.66z"/>
|
||||
</g>
|
||||
<g id="g618" fill="#000">
|
||||
<path id="path620" d="m36.882,121.6s6.921-0.53,5.324,1.6c-1.597,2.12-4.792,1.06-4.792,1.06l-0.532-2.66z"/>
|
||||
</g>
|
||||
<g id="g622" fill="#000">
|
||||
<path id="path624" d="m9.282,103.6s6.921-0.53,5.324,1.6c-1.597,2.12-5.592,1.86-5.592,1.86l0.268-3.46z"/>
|
||||
</g>
|
||||
<g id="g626" fill="#000">
|
||||
<path id="path628" d="m19.282,100.4s6.921-0.534,5.324,1.6c-1.597,2.12-5.992,1.86-5.992,1.86l0.668-3.46z"/>
|
||||
</g>
|
||||
<g id="g630" fill="#000">
|
||||
<path id="path632" d="m-3.4,140.4s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g634" fill="#992600">
|
||||
<path id="path636" d="m-76.6,41.2s-4.4,8.8-4.8,12c0,0,0.8-8.8,2-10.8s2.8-1.2,2.8-1.2z"/>
|
||||
</g>
|
||||
<g id="g638" fill="#992600">
|
||||
<path id="path640" d="m-95,55.2s-3.2,14.4-2.8,17.2c0,0-1.2-11.6-0.8-12.8s3.6-4.4,3.6-4.4z"/>
|
||||
</g>
|
||||
<g id="g642" fill="#CCC">
|
||||
<path id="path644" d="m-74.2-19.4-0.2,3.2-2.2,0.2s14.2,12.6,14.8,20.2c0,0,0.8-8.2-12.4-23.6z"/>
|
||||
</g>
|
||||
<g id="g646" fill="#000">
|
||||
<path id="path648" d="m-70.216-18.135c-0.431-0.416-0.212-1.161-0.62-1.421-0.809-0.516,1.298-0.573,1.07-1.289-0.383-1.206-0.196-1.227-0.318-2.503-0.057-0.598,0.531-2.138,0.916-2.578,1.446-1.652,0.122-4.584,1.762-6.135,0.304-0.289,0.68-0.841,0.965-1.259,0.659-0.963,1.843-1.451,2.793-2.279,0.318-0.276,0.117-1.103,0.686-1.011,0.714,0.115,1.955-0.015,1.91,0.826-0.113,2.12-1.442,3.84-2.722,5.508,0.451,0.704-0.007,1.339-0.291,1.896-1.335,2.62-1.146,5.461-1.32,8.301-0.005,0.085-0.312,0.163-0.304,0.216,0.353,2.335,0.937,4.534,1.816,6.763,0.366,0.93,0.837,1.825,0.987,2.752,0.111,0.686,0.214,1.519-0.194,2.224,2.035,2.89,0.726,5.541,1.895,9.072,0.207,0.625,1.899,2.539,1.436,2.378-2.513-0.871-2.625-1.269-2.802-2.022-0.146-0.623-0.476-2-0.713-2.602-0.064-0.164-0.235-2.048-0.313-2.17-1.513-2.382-0.155-2.206-1.525-4.564-1.428-0.68-2.394-1.784-3.517-2.946-0.198-0.204,0.945-0.928,0.764-1.141-1.092-1.289-2.245-2.056-1.909-3.549,0.155-0.69,0.292-1.747-0.452-2.467z"/>
|
||||
</g>
|
||||
<g id="g650" fill="#000">
|
||||
<path id="path652" d="m-73.8-16.4s0.4,6.8,2.8,8.4,1.2,0.8-2-0.4-2-2-2-2-2.8,0.4-0.4,2.4,6,4.4,4.4,4.4-9.2-4-9.2-6.8-1-6.9-1-6.9,1.1-0.8,5.9-0.7c0,0,1.4,0.7,1.5,1.6z"/>
|
||||
</g>
|
||||
<g id="g654" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path656" d="m-74.6,2.2s-8.52-2.791-27,0.6c0,0,9.031-2.078,27.8,0.2,10.3,1.25-0.8-0.8-0.8-0.8z"/>
|
||||
</g>
|
||||
<g id="g658" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path660" d="m-72.502,2.129s-8.246-3.518-26.951-1.737c0,0,9.178-1.289,27.679,2.603,10.154,2.136-0.728-0.866-0.728-0.866z"/>
|
||||
</g>
|
||||
<g id="g662" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path664" d="m-70.714,2.222s-7.962-4.121-26.747-3.736c0,0,9.248-0.604,27.409,4.654,9.966,2.885-0.662-0.918-0.662-0.918z"/>
|
||||
</g>
|
||||
<g id="g666" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path668" d="m-69.444,2.445s-6.824-4.307-23.698-5.405c0,0,8.339,0.17,24.22,6.279,8.716,3.353-0.522-0.874-0.522-0.874z"/>
|
||||
</g>
|
||||
<g id="g670" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path672" d="m45.84,12.961s-0.93,0.644-0.716-0.537c0.215-1.181,28.423-14.351,32.037-14.101,0,0-30.248,13.206-31.321,14.638z"/>
|
||||
</g>
|
||||
<g id="g674" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path676" d="m42.446,13.6s-0.876,0.715-0.755-0.479,27.208-16.539,30.83-16.573c0,0-29.117,15.541-30.075,17.052z"/>
|
||||
</g>
|
||||
<g id="g678" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path680" d="m39.16,14.975s-0.828,0.772-0.786-0.428c0.042-1.199,19.859-16.696,29.671-18.57,0,0-18.03,8.127-28.885,18.998z"/>
|
||||
</g>
|
||||
<g id="g682" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path684" d="m36.284,16.838s-0.745,0.694-0.707-0.385c0.038-1.08,17.872-15.027,26.703-16.713,0,0-16.226,7.314-25.996,17.098z"/>
|
||||
</g>
|
||||
<g id="g686" fill="#CCC">
|
||||
<path id="path688" d="m4.6,164.8s-15.2-2.4,1.6-4c0,0,18-2,22-7.2,0,0,13.6-9.2,16.4-9.6s19.2-4,19.6-6.4,6.4-4.8,8-4,1.6,10-1.2,10.8-21.6,8-28,9.2-18,8.8-22.8,10-15.6,1.2-15.6,1.2z"/>
|
||||
</g>
|
||||
<g id="g690" fill="#000">
|
||||
<path id="path692" d="m77.6,127.4s-3,1.6-4.2,4.2c0,0-6.4,10.6-20.6,13.8,0,0-23,9-30.8,11,0,0-13.4,5-20.8,4.2,0,0-7,0.2-0.8,1.8,0,0,20.2-2,23.6-3.8,0,0,15.6-5.2,18.6-7.8s21.2-7.6,23.4-9.6,12-10.4,11.6-13.8z"/>
|
||||
</g>
|
||||
<g id="g694" fill="#000">
|
||||
<path id="path696" d="m18.882,158.91s5.229-0.23,4.076,1.32-3.601,0.68-3.601,0.68l-0.475-2z"/>
|
||||
</g>
|
||||
<g id="g698" fill="#000">
|
||||
<path id="path700" d="m11.68,160.26s5.228-0.22,4.076,1.33c-1.153,1.55-3.601,0.67-3.601,0.67l-0.475-2z"/>
|
||||
</g>
|
||||
<g id="g702" fill="#000">
|
||||
<path id="path704" d="m1.251,161.51s5.229-0.23,4.076,1.32-3.601,0.68-3.601,0.68l-0.475-2z"/>
|
||||
</g>
|
||||
<g id="g706" fill="#000">
|
||||
<path id="path708" d="m-6.383,162.06s5.229-0.23,4.076,1.32-3.601,0.67-3.601,0.67l-0.475-1.99z"/>
|
||||
</g>
|
||||
<g id="g710" fill="#000">
|
||||
<path id="path712" d="m35.415,151.51s6.96-0.3,5.425,1.76c-1.534,2.07-4.793,0.9-4.793,0.9l-0.632-2.66z"/>
|
||||
</g>
|
||||
<g id="g714" fill="#000">
|
||||
<path id="path716" d="m45.73,147.09s5.959-3.3,5.425,1.76c-0.27,2.55-4.793,0.9-4.793,0.9l-0.632-2.66z"/>
|
||||
</g>
|
||||
<g id="g718" fill="#000">
|
||||
<path id="path720" d="m54.862,144.27s7.159-3.7,5.425,1.77c-0.778,2.44-4.794,0.9-4.794,0.9l-0.631-2.67z"/>
|
||||
</g>
|
||||
<g id="g722" fill="#000">
|
||||
<path id="path724" d="m64.376,139.45s4.359-4.9,5.425,1.76c0.406,2.54-4.793,0.9-4.793,0.9l-0.632-2.66z"/>
|
||||
</g>
|
||||
<g id="g726" fill="#000">
|
||||
<path id="path728" d="m26.834,156s5.228-0.23,4.076,1.32c-1.153,1.55-3.602,0.68-3.602,0.68l-0.474-2z"/>
|
||||
</g>
|
||||
<g id="g730" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path732" d="m62.434,34.603s-0.726,0.665-0.727-0.406c0-1.07,17.484-14.334,26.327-15.718,0,0-16.099,6.729-25.6,16.124z"/>
|
||||
</g>
|
||||
<g id="g734" fill="#000">
|
||||
<path id="path736" d="m65.4,98.4s22.001,22.4,31.201,26c0,0,9.199,11.2,5.199,37.2,0,0-3.199,7.6-6.399-13.2,0,0,3.2-25.2-8-9.2,0,0-8.401-9.9-2.001-9.6,0,0,3.201,2,3.601,0.4s-7.601-15.2-24.801-29.6,1.2-2,1.2-2z"/>
|
||||
</g>
|
||||
<g id="g738" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path740" d="m7,137.2s-0.2-1.8,1.6-1,96,7,127.6,31c0,0-45.199-23.2-129.2-30z"/>
|
||||
</g>
|
||||
<g id="g742" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path744" d="m17.4,132.8s-0.2-1.8,1.6-1,138.4-0.2,162,32.2c0,0-22-25.2-163.6-31.2z"/>
|
||||
</g>
|
||||
<g id="g746" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path748" d="m29,128.8s-0.2-1.8,1.6-1,175.2-12.2,198.8,20.2c0,0-9.6-25.6-200.4-19.2z"/>
|
||||
</g>
|
||||
<g id="g750" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path752" d="m39,124s-0.2-1.8,1.6-1,124-37.8,147.6-5.4c0,0-13.4-24.6-149.2,6.4z"/>
|
||||
</g>
|
||||
<g id="g754" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path756" d="m-19,146.8s-0.2-1.8,1.6-1,19.6,3,21.6,41.8c0,0-7.2-42-23.2-40.8z"/>
|
||||
</g>
|
||||
<g id="g758" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path760" d="m-27.8,148.4s-0.2-1.8,1.6-1,16-3.8,13.2,35c0,0,1.2-35.2-14.8-34z"/>
|
||||
</g>
|
||||
<g id="g762" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path764" d="m-35.8,148.8s-0.2-1.8,1.6-1,17.2,1.4,4.8,23.8c0,0,9.6-24-6.4-22.8z"/>
|
||||
</g>
|
||||
<g id="g766" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path768" d="m11.526,104.46s-0.444,2,1.105,0.79c16.068-12.628,48.51-71.53,104.2-77.164,0,0-38.312-12.11-105.3,76.374z"/>
|
||||
</g>
|
||||
<g id="g770" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path772" d="m22.726,102.66s-1.363-1.19,0.505-1.81c1.868-0.63,114.31-73.13,153.6-65.164,0,0-27.11-7.51-154.1,66.974z"/>
|
||||
</g>
|
||||
<g id="g774" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path776" d="m1.885,108.77s-0.509,1.6,1.202,0.62c8.975-5.12,12.59-62.331,56.167-63.586,0,0-32.411-14.714-57.369,62.966z"/>
|
||||
</g>
|
||||
<g id="g778" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path780" d="m-18.038,119.79s-1.077,1.29,0.876,1.03c10.246-1.33,31.651-42.598,76.09-37.519,0,0-31.966-14.346-76.966,36.489z"/>
|
||||
</g>
|
||||
<g id="g782" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path784" d="m-6.8,113.67s-0.811,1.47,1.058,0.84c9.799-3.27,22.883-47.885,67.471-51.432,0,0-34.126-7.943-68.529,50.592z"/>
|
||||
</g>
|
||||
<g id="g786" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path788" d="m-25.078,124.91s-0.873,1.04,0.709,0.84c8.299-1.08,25.637-34.51,61.633-30.396,0,0-25.893-11.62-62.342,29.556z"/>
|
||||
</g>
|
||||
<g id="g790" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path792" d="m-32.677,130.82s-1.005,1.05,0.586,0.93c4.168-0.31,34.806-33.39,53.274-17.89,0,0-12.015-18.721-53.86,16.96z"/>
|
||||
</g>
|
||||
<g id="g794" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path796" d="m36.855,98.898s-1.201-1.355,0.731-1.74c1.932-0.384,122.63-58.097,160.59-45.231,0,0-25.94-10.874-161.32,46.971z"/>
|
||||
</g>
|
||||
<g id="g798" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path800" d="m3.4,163.2s-0.2-1.8,1.6-1,17.2,1.4,4.8,23.8c0,0,9.6-24-6.4-22.8z"/>
|
||||
</g>
|
||||
<g id="g802" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path804" d="m13.8,161.6s-0.2-1.8,1.6-1,19.6,3,21.6,41.8c0,0-7.2-42-23.2-40.8z"/>
|
||||
</g>
|
||||
<g id="g806" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path808" d="m20.6,160s-0.2-1.8,1.6-1,26.4,4.2,50,36.6c0,0-35.6-36.8-51.6-35.6z"/>
|
||||
</g>
|
||||
<g id="g810" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path812" d="m28.225,157.97s-0.437-1.76,1.453-1.2c1.89,0.55,22.324-1.35,60.421,32.83,0,0-46.175-34.94-61.874-31.63z"/>
|
||||
</g>
|
||||
<g id="g814" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path816" d="m38.625,153.57s-0.437-1.76,1.453-1.2c1.89,0.55,36.724,5.05,88.422,40.03,0,0-74.176-42.14-89.875-38.83z"/>
|
||||
</g>
|
||||
<g id="g818" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path820" d="m-1.8,142s-0.2-1.8,1.6-1,55.2,3.4,85.6,30.2c0,0-34.901-24.77-87.2-29.2z"/>
|
||||
</g>
|
||||
<g id="g822" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path824" d="m-11.8,146s-0.2-1.8,1.6-1,26.4,4.2,50,36.6c0,0-35.6-36.8-51.6-35.6z"/>
|
||||
</g>
|
||||
<g id="g826" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path828" d="m49.503,148.96s-0.565-1.72,1.361-1.3c1.926,0.41,36.996,2.34,91.116,33.44,0,0-77.663-34.4-92.477-32.14z"/>
|
||||
</g>
|
||||
<g id="g830" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path832" d="m57.903,146.56s-0.565-1.72,1.361-1.3c1.926,0.41,36.996,2.34,91.116,33.44,0,0-77.063-34.8-92.477-32.14z"/>
|
||||
</g>
|
||||
<g id="g834" stroke-width="0.1" stroke="#000" fill="#FFF">
|
||||
<path id="path836" d="m67.503,141.56s-0.565-1.72,1.361-1.3c1.926,0.41,44.996,4.74,134.72,39.04,0,0-120.66-40.4-136.08-37.74z"/>
|
||||
</g>
|
||||
<g id="g838" fill="#000">
|
||||
<path id="path840" d="m-43.8,148.4s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g842" fill="#000">
|
||||
<path id="path844" d="m-13,162.4s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g846" fill="#000">
|
||||
<path id="path848" d="m-21.8,162s5.2-0.4,4,1.2-3.6,0.8-3.6,0.8l-0.4-2z"/>
|
||||
</g>
|
||||
<g id="g850" fill="#000">
|
||||
<path id="path852" d="m-117.17,150.18s5.05,1.32,3.39,2.44-3.67-0.42-3.67-0.42l0.28-2.02z"/>
|
||||
</g>
|
||||
<g id="g854" fill="#000">
|
||||
<path id="path856" d="m-115.17,140.58s5.05,1.32,3.39,2.44-3.67-0.42-3.67-0.42l0.28-2.02z"/>
|
||||
</g>
|
||||
<g id="g858" fill="#000">
|
||||
<path id="path860" d="m-122.37,136.18s5.05,1.32,3.39,2.44-3.67-0.42-3.67-0.42l0.28-2.02z"/>
|
||||
</g>
|
||||
<g id="g862" fill="#CCC">
|
||||
<path id="path864" d="m-42.6,211.2-5.6,2c-2,0-13.2,3.6-18.8,13.6,0,0,12.4-9.6,24.4-15.6z"/>
|
||||
</g>
|
||||
<g id="g866" fill="#CCC">
|
||||
<path id="path868" d="m45.116,303.85c0.141,0.25,0.196,0.67,0.488,0.69,0.658,0.04,1.891,0.34,1.766-0.29-0.848-4.31-1.722-9.25-5.855-11.05-0.639-0.28-2.081,0.13-2.155,1.02-0.127,1.52-0.244,2.87,0.065,4.33,0.3,1.43,2.458,1.43,3.375,0.05,0.936,1.67,1.368,3.52,2.316,5.25z"/>
|
||||
</g>
|
||||
<g id="g870" fill="#CCC">
|
||||
<path id="path872" d="m34.038,308.58c0.748,1.41,0.621,3.27,2.036,3.84,0.74,0.29,2.59-0.68,2.172-1.76-0.802-2.06-1.19-4.3-2.579-6.11-0.2-0.26,0.04-0.79-0.12-1.12-0.594-1.22-1.739-1.96-3.147-1.63-1.115,2.2,0.033,4.33,1.555,6.04,0.136,0.15-0.03,0.53,0.083,0.74z"/>
|
||||
</g>
|
||||
<g id="g874" fill="#CCC">
|
||||
<path id="path876" d="m-5.564,303.39c-0.108-0.38-0.146-0.84,0.019-1.16,0.531-1.03,1.324-2.15,0.987-3.18-0.348-1.05-1.464-0.87-2.114-0.3-1.135,0.99-1.184,2.82-1.875,4.18-0.196,0.38-0.145,0.96-0.586,1.35-0.474,0.42-0.914,1.94-0.818,2.51,0.053,0.32-0.13,10.22,0.092,9.96,0.619-0.73,3.669-10.47,3.738-11.36,0.057-0.73,0.789-1.19,0.557-2z"/>
|
||||
</g>
|
||||
<g id="g878" fill="#CCC">
|
||||
<path id="path880" d="m-31.202,296.6c2.634-2.5,5.424-5.46,4.982-9.17-0.116-0.98-1.891-0.45-2.078,0.39-0.802,3.63-2.841,6.29-5.409,8.68-2.196,2.05-4.058,8.39-4.293,8.9,3.697-5.26,5.954-8,6.798-8.8z"/>
|
||||
</g>
|
||||
<g id="g882" fill="#CCC">
|
||||
<path id="path884" d="m-44.776,290.64c0.523-0.38,0.221-0.87,0.438-1.2,0.953-1.46,2.254-2.7,2.272-4.44,0.003-0.28-0.375-0.59-0.71-0.36-0.277,0.18-0.619,0.31-0.727,0.44-2.03,2.45-3.43,5.12-4.873,7.93-0.183,0.36-1.327,4.85-1.014,4.96,0.239,0.09,1.959-4.09,2.169-4.21,1.263-0.68,1.275-2.3,2.445-3.12z"/>
|
||||
</g>
|
||||
<g id="g886" fill="#CCC">
|
||||
<path id="path888" d="m-28.043,310.18c0.444-0.87,2.02-2.07,1.907-2.96-0.118-0.93,0.35-2.37-0.562-1.68-1.257,0.94-4.706,2.29-4.976,8.1-0.026,0.57,2.948-2.12,3.631-3.46z"/>
|
||||
</g>
|
||||
<g id="g890" fill="#CCC">
|
||||
<path id="path892" d="m-13.6,293c0.4-0.67,1.108-0.19,1.567-0.46,0.648-0.37,1.259-0.93,1.551-1.58,0.97-2.14,2.739-3.96,2.882-6.36-1.491-1.4-2.17,0.64-2.8,1.6-1.323-1.65-2.322,0.23-3.622,0.75-0.07,0.03-0.283-0.32-0.358-0.29-1.177,0.44-1.857,1.52-2.855,2.3-0.171,0.13-0.576-0.05-0.723,0.09-0.652,0.6-1.625,0.93-1.905,1.61-1.11,2.7-4.25,4.8-6.137,12.34,0.381,0.91,4.512-6.64,4.999-7.34,0.836-1.2,0.954,1.66,2.23,1,0.051-0.03,0.237,0.21,0.371,0.34,0.194-0.28,0.412-0.51,0.8-0.4,0-0.4-0.134-0.96,0.067-1.11,1.237-0.98,1.153-2.05,1.933-3.29,0.458,0.79,1.519,0.07,2,0.8z"/>
|
||||
</g>
|
||||
<g id="g894" fill="#CCC">
|
||||
<path id="path896" d="m46.2,347.4s7.4-20.4,3-31.6c0,0,11.4,21.6,6.8,32.8,0,0-0.4-10.4-4.4-15.4,0,0-4,12.8-5.4,14.2z"/>
|
||||
</g>
|
||||
<g id="g898" fill="#CCC">
|
||||
<path id="path900" d="m31.4,344.8s5.4-8.8-2.6-27.2c0,0-0.8,20.4-7.6,31.4,0,0,14.2-20.2,10.2-4.2z"/>
|
||||
</g>
|
||||
<g id="g902" fill="#CCC">
|
||||
<path id="path904" d="m21.4,342.8s-0.2-20,0.2-23c0,0-3.8,16.6-14,26.2,0,0,14.4-12,13.8-3.2z"/>
|
||||
</g>
|
||||
<g id="g906" fill="#CCC">
|
||||
<path id="path908" d="m11.8,310.8s6,13.6-4,32c0,0,6.4-12.2,1.6-19.2,0,0,2.6-3.4,2.4-12.8z"/>
|
||||
</g>
|
||||
<g id="g910" fill="#CCC">
|
||||
<path id="path912" d="m-7.4,342.4s-1-15.6,0.8-17.8c0,0,0.2-6.4-0.2-7.4,0,0,4-6.2,4.2,1.2,0,0,1.4,7.8,4.2,12.4,0,0,3.6,5.4,3.4,11.8,0,0-10-30.2-12.4-0.2z"/>
|
||||
</g>
|
||||
<g id="g914" fill="#CCC">
|
||||
<path id="path916" d="m-11,314.8s-6.6,10.8-8.4,29.8c0,0-1.4-6.2,2.4-20.6,0,0,4.2-15.4,6-9.2z"/>
|
||||
</g>
|
||||
<g id="g918" fill="#CCC">
|
||||
<path id="path920" d="m-32.8,334.6s5-5.4,6.4-10.4c0,0,3.6-15.8-2.8-7.2,0,0,0.2,8-8,15.4,0,0,4.8-2.4,4.4,2.2z"/>
|
||||
</g>
|
||||
<g id="g922" fill="#CCC">
|
||||
<path id="path924" d="m-38.6,329.6s3.4-17.4,4.2-18.2c0,0,1.8-3.4-1-0.2,0,0-8.8,19.2-12.8,25.8,0,0,8-9.2,9.6-7.4z"/>
|
||||
</g>
|
||||
<g id="g926" fill="#CCC">
|
||||
<path id="path928" d="m-44.4,313s11.6-22.4-10.2,3.4c0,0,11-9.8,10.2-3.4z"/>
|
||||
</g>
|
||||
<g id="g930" fill="#CCC">
|
||||
<path id="path932" d="m-59.8,298.4s4.8-18.8,7.4-18.6l1.6,1.6s-6,9.6-5.4,19.4c0,0-0.6-9.6-3.6-2.4z"/>
|
||||
</g>
|
||||
<g id="g934" fill="#CCC">
|
||||
<path id="path936" d="m270.5,287s-12-10-14.5-13.5c0,0,13.5,18.5,13.5,25.5,0,0,2.5-7.5,1-12z"/>
|
||||
</g>
|
||||
<g id="g938" fill="#CCC">
|
||||
<path id="path940" d="m276,265s-21-15-24.5-22.5c0,0,26.5,29.5,26.5,34,0,0,0.5-9-2-11.5z"/>
|
||||
</g>
|
||||
<g id="g942" fill="#CCC">
|
||||
<path id="path944" d="m293,111s-12-8-13.5-6c0,0,10.5,6.5,13,15,0,0-1.5-9,0.5-9z"/>
|
||||
</g>
|
||||
<g id="g946" fill="#CCC">
|
||||
<path id="path948" d="m301.5,191.5-17.5-12s19,17,19.5,21l-2-9z"/>
|
||||
</g>
|
||||
<g id="g950" stroke="#000">
|
||||
<path id="path952" d="m-89.25,169,22,4.75"/>
|
||||
</g>
|
||||
<g id="g954" stroke="#000">
|
||||
<path id="path956" d="m-39,331s-0.5-3.5-9.5,7"/>
|
||||
</g>
|
||||
<g id="g958" stroke="#000">
|
||||
<path id="path960" d="m-33.5,336s2-6.5-4.5-2"/>
|
||||
</g>
|
||||
<g id="g962" stroke="#000">
|
||||
<path id="path964" d="m20.5,344.5s1.5-11-10,2"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 67 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test.webp
Normal file
After Width: | Height: | Size: 30 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test_icc_prophoto.jpg
Normal file
After Width: | Height: | Size: 142 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test_issue.jpg
Normal file
After Width: | Height: | Size: 44 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/test_square.jpg
Normal file
After Width: | Height: | Size: 134 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/transparent.png
Normal file
After Width: | Height: | Size: 57 KiB |
BIN
_vendor/src/github.com/h2non/bimg/fixtures/vertical.jpg
Normal file
After Width: | Height: | Size: 2.9 MiB |
223
_vendor/src/github.com/h2non/bimg/image.go
Normal file
@ -0,0 +1,223 @@
|
||||
package bimg
|
||||
|
||||
// Image provides a simple method DSL to transform a given image as byte buffer.
|
||||
type Image struct {
|
||||
buffer []byte
|
||||
}
|
||||
|
||||
// NewImage creates a new Image struct with method DSL.
|
||||
func NewImage(buf []byte) *Image {
|
||||
return &Image{buf}
|
||||
}
|
||||
|
||||
// Resize resizes the image to fixed width and height.
|
||||
func (i *Image) Resize(width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Embed: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// ForceResize resizes with custom size (aspect ratio won't be maintained).
|
||||
func (i *Image) ForceResize(width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Force: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// ResizeAndCrop resizes the image to fixed width and height with additional crop transformation.
|
||||
func (i *Image) ResizeAndCrop(width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Embed: true,
|
||||
Crop: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// SmartCrop produces a thumbnail aiming at focus on the interesting part.
|
||||
func (i *Image) SmartCrop(width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Crop: true,
|
||||
Gravity: GravitySmart,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Extract area from the by X/Y axis in the current image.
|
||||
func (i *Image) Extract(top, left, width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Top: top,
|
||||
Left: left,
|
||||
AreaWidth: width,
|
||||
AreaHeight: height,
|
||||
}
|
||||
|
||||
if top == 0 && left == 0 {
|
||||
options.Top = -1
|
||||
}
|
||||
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Enlarge enlarges the image by width and height. Aspect ratio is maintained.
|
||||
func (i *Image) Enlarge(width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Enlarge: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// EnlargeAndCrop enlarges the image by width and height with additional crop transformation.
|
||||
func (i *Image) EnlargeAndCrop(width, height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Enlarge: true,
|
||||
Crop: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Crop crops the image to the exact size specified.
|
||||
func (i *Image) Crop(width, height int, gravity Gravity) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Height: height,
|
||||
Gravity: gravity,
|
||||
Crop: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// CropByWidth crops an image by width only param (auto height).
|
||||
func (i *Image) CropByWidth(width int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: width,
|
||||
Crop: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// CropByHeight crops an image by height (auto width).
|
||||
func (i *Image) CropByHeight(height int) ([]byte, error) {
|
||||
options := Options{
|
||||
Height: height,
|
||||
Crop: true,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Thumbnail creates a thumbnail of the image by the a given width by aspect ratio 4:4.
|
||||
func (i *Image) Thumbnail(pixels int) ([]byte, error) {
|
||||
options := Options{
|
||||
Width: pixels,
|
||||
Height: pixels,
|
||||
Crop: true,
|
||||
Quality: 95,
|
||||
}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Watermark adds text as watermark on the given image.
|
||||
func (i *Image) Watermark(w Watermark) ([]byte, error) {
|
||||
options := Options{Watermark: w}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// WatermarkImage adds image as watermark on the given image.
|
||||
func (i *Image) WatermarkImage(w WatermarkImage) ([]byte, error) {
|
||||
options := Options{WatermarkImage: w}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Zoom zooms the image by the given factor.
|
||||
// You should probably call Extract() before.
|
||||
func (i *Image) Zoom(factor int) ([]byte, error) {
|
||||
options := Options{Zoom: factor}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Rotate rotates the image by given angle degrees (0, 90, 180 or 270).
|
||||
func (i *Image) Rotate(a Angle) ([]byte, error) {
|
||||
options := Options{Rotate: a}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Flip flips the image about the vertical Y axis.
|
||||
func (i *Image) Flip() ([]byte, error) {
|
||||
options := Options{Flip: true}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Flop flops the image about the horizontal X axis.
|
||||
func (i *Image) Flop() ([]byte, error) {
|
||||
options := Options{Flop: true}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Convert converts image to another format.
|
||||
func (i *Image) Convert(t ImageType) ([]byte, error) {
|
||||
options := Options{Type: t}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Colourspace performs a color space conversion bsaed on the given interpretation.
|
||||
func (i *Image) Colourspace(c Interpretation) ([]byte, error) {
|
||||
options := Options{Interpretation: c}
|
||||
return i.Process(options)
|
||||
}
|
||||
|
||||
// Process processes the image based on the given transformation options,
|
||||
// talking with libvips bindings accordingly and returning the resultant
|
||||
// image buffer.
|
||||
func (i *Image) Process(o Options) ([]byte, error) {
|
||||
image, err := Resize(i.buffer, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
i.buffer = image
|
||||
return image, nil
|
||||
}
|
||||
|
||||
// Metadata returns the image metadata (size, alpha channel, profile, EXIF rotation).
|
||||
func (i *Image) Metadata() (ImageMetadata, error) {
|
||||
return Metadata(i.buffer)
|
||||
}
|
||||
|
||||
// Interpretation gets the image interpretation type.
|
||||
// See: http://www.vips.ecs.soton.ac.uk/supported/current/doc/html/libvips/VipsImage.html#VipsInterpretation
|
||||
func (i *Image) Interpretation() (Interpretation, error) {
|
||||
return ImageInterpretation(i.buffer)
|
||||
}
|
||||
|
||||
// ColourspaceIsSupported checks if the current image
|
||||
// color space is supported.
|
||||
func (i *Image) ColourspaceIsSupported() (bool, error) {
|
||||
return ColourspaceIsSupported(i.buffer)
|
||||
}
|
||||
|
||||
// Type returns the image type format (jpeg, png, webp, tiff).
|
||||
func (i *Image) Type() string {
|
||||
return DetermineImageTypeName(i.buffer)
|
||||
}
|
||||
|
||||
// Size returns the image size as form of width and height pixels.
|
||||
func (i *Image) Size() (ImageSize, error) {
|
||||
return Size(i.buffer)
|
||||
}
|
||||
|
||||
// Image returns the current resultant image image buffer.
|
||||
func (i *Image) Image() []byte {
|
||||
return i.buffer
|
||||
}
|
496
_vendor/src/github.com/h2non/bimg/image_test.go
Normal file
@ -0,0 +1,496 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestImageResize(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Resize(300, 240)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 300, 240)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_resize_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageGifResize(t *testing.T) {
|
||||
_, err := initImage("test.gif").Resize(300, 240)
|
||||
if err == nil {
|
||||
t.Errorf("GIF shouldn't be saved within VIPS")
|
||||
}
|
||||
}
|
||||
|
||||
func TestImagePdfResize(t *testing.T) {
|
||||
_, err := initImage("test.pdf").Resize(300, 240)
|
||||
if err == nil {
|
||||
t.Errorf("PDF cannot be saved within VIPS")
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageSvgResize(t *testing.T) {
|
||||
_, err := initImage("test.svg").Resize(300, 240)
|
||||
if err == nil {
|
||||
t.Errorf("SVG cannot be saved within VIPS")
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageGifToJpeg(t *testing.T) {
|
||||
if VipsMajorVersion >= 8 && VipsMinorVersion > 2 {
|
||||
i := initImage("test.gif")
|
||||
options := Options{
|
||||
Type: JPEG,
|
||||
}
|
||||
buf, err := i.Process(options)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_gif.jpg", buf)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImagePdfToJpeg(t *testing.T) {
|
||||
if VipsMajorVersion >= 8 && VipsMinorVersion > 2 {
|
||||
i := initImage("test.pdf")
|
||||
options := Options{
|
||||
Type: JPEG,
|
||||
}
|
||||
buf, err := i.Process(options)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_pdf.jpg", buf)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageSvgToJpeg(t *testing.T) {
|
||||
if VipsMajorVersion >= 8 && VipsMinorVersion > 2 {
|
||||
i := initImage("test.svg")
|
||||
options := Options{
|
||||
Type: JPEG,
|
||||
}
|
||||
buf, err := i.Process(options)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_svg.jpg", buf)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageResizeAndCrop(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").ResizeAndCrop(300, 200)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 300, 200)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_resize_crop_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageExtract(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Extract(100, 100, 300, 200)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 300, 200)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_extract_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageExtractZero(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Extract(0, 0, 300, 200)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 300, 200)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_extract_zero_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageEnlarge(t *testing.T) {
|
||||
buf, err := initImage("test.png").Enlarge(500, 375)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 500, 375)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_enlarge_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageEnlargeAndCrop(t *testing.T) {
|
||||
buf, err := initImage("test.png").EnlargeAndCrop(800, 480)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 800, 480)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_enlarge_crop_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageCrop(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Crop(800, 600, GravityNorth)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 800, 600)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_crop_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageCropByWidth(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").CropByWidth(600)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 600, 1050)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_crop_width_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageCropByHeight(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").CropByHeight(300)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 1680, 300)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_crop_height_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageThumbnail(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Thumbnail(100)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 100, 100)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_thumbnail_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageWatermark(t *testing.T) {
|
||||
image := initImage("test.jpg")
|
||||
_, err := image.Crop(800, 600, GravityNorth)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
buf, err := image.Watermark(Watermark{
|
||||
Text: "Copy me if you can",
|
||||
Opacity: 0.5,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
Background: Color{255, 255, 255},
|
||||
})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 800, 600)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if DetermineImageType(buf) != JPEG {
|
||||
t.Fatal("Image is not jpeg")
|
||||
}
|
||||
|
||||
Write("fixtures/test_watermark_text_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageWatermarkWithImage(t *testing.T) {
|
||||
image := initImage("test.jpg")
|
||||
watermark, _ := imageBuf("transparent.png")
|
||||
|
||||
_, err := image.Crop(800, 600, GravityNorth)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
buf, err := image.WatermarkImage(WatermarkImage{Left: 100, Top: 100, Buf: watermark})
|
||||
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 800, 600)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if DetermineImageType(buf) != JPEG {
|
||||
t.Fatal("Image is not jpeg")
|
||||
}
|
||||
|
||||
Write("fixtures/test_watermark_image_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageWatermarkNoReplicate(t *testing.T) {
|
||||
image := initImage("test.jpg")
|
||||
_, err := image.Crop(800, 600, GravityNorth)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
buf, err := image.Watermark(Watermark{
|
||||
Text: "Copy me if you can",
|
||||
Opacity: 0.5,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
NoReplicate: true,
|
||||
Background: Color{255, 255, 255},
|
||||
})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 800, 600)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if DetermineImageType(buf) != JPEG {
|
||||
t.Fatal("Image is not jpeg")
|
||||
}
|
||||
|
||||
Write("fixtures/test_watermark_replicate_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageZoom(t *testing.T) {
|
||||
image := initImage("test.jpg")
|
||||
|
||||
_, err := image.Extract(100, 100, 400, 300)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot extract the image: %s", err)
|
||||
}
|
||||
|
||||
buf, err := image.Zoom(1)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %s", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 800, 600)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_zoom_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageFlip(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Flip()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
Write("fixtures/test_flip_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageFlop(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Flop()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
Write("fixtures/test_flop_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageRotate(t *testing.T) {
|
||||
buf, err := initImage("test_flip_out.jpg").Rotate(90)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
Write("fixtures/test_image_rotate_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageConvert(t *testing.T) {
|
||||
buf, err := initImage("test.jpg").Convert(PNG)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
Write("fixtures/test_image_convert_out.png", buf)
|
||||
}
|
||||
|
||||
func TestTransparentImageConvert(t *testing.T) {
|
||||
image := initImage("transparent.png")
|
||||
options := Options{
|
||||
Type: JPEG,
|
||||
Background: Color{255, 255, 255},
|
||||
}
|
||||
buf, err := image.Process(options)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
Write("fixtures/test_transparent_image_convert_out.jpg", buf)
|
||||
}
|
||||
|
||||
func TestImageMetadata(t *testing.T) {
|
||||
data, err := initImage("test.png").Metadata()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
if data.Alpha != true {
|
||||
t.Fatal("Invalid alpha channel")
|
||||
}
|
||||
if data.Size.Width != 400 {
|
||||
t.Fatal("Invalid width size")
|
||||
}
|
||||
if data.Type != "png" {
|
||||
t.Fatal("Invalid image type")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInterpretation(t *testing.T) {
|
||||
interpretation, err := initImage("test.jpg").Interpretation()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
if interpretation != InterpretationSRGB {
|
||||
t.Errorf("Invalid interpretation: %d", interpretation)
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageColourspace(t *testing.T) {
|
||||
tests := []struct {
|
||||
file string
|
||||
interpretation Interpretation
|
||||
}{
|
||||
{"test.jpg", InterpretationSRGB},
|
||||
{"test.jpg", InterpretationBW},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
buf, err := initImage(test.file).Colourspace(test.interpretation)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
interpretation, err := ImageInterpretation(buf)
|
||||
if interpretation != test.interpretation {
|
||||
t.Errorf("Invalid colourspace")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageColourspaceIsSupported(t *testing.T) {
|
||||
supported, err := initImage("test.jpg").ColourspaceIsSupported()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
if supported != true {
|
||||
t.Errorf("Non-supported colourspace")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFluentInterface(t *testing.T) {
|
||||
image := initImage("test.jpg")
|
||||
_, err := image.CropByWidth(300)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
_, err = image.Flip()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
_, err = image.Convert(PNG)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
data, _ := image.Metadata()
|
||||
if data.Alpha != false {
|
||||
t.Fatal("Invalid alpha channel")
|
||||
}
|
||||
if data.Size.Width != 300 {
|
||||
t.Fatal("Invalid width size")
|
||||
}
|
||||
if data.Type != "png" {
|
||||
t.Fatal("Invalid image type")
|
||||
}
|
||||
|
||||
Write("fixtures/test_image_fluent_out.png", image.Image())
|
||||
}
|
||||
|
||||
func TestImageSmartCrop(t *testing.T) {
|
||||
|
||||
if !(VipsMajorVersion >= 8 && VipsMinorVersion > 4) {
|
||||
t.Skipf("Skipping this test, libvips doesn't meet version requirement %s > 8.4", VipsVersion)
|
||||
}
|
||||
|
||||
i := initImage("northern_cardinal_bird.jpg")
|
||||
buf, err := i.SmartCrop(300, 300)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
|
||||
err = assertSize(buf, 300, 300)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
Write("fixtures/test_smart_crop.jpg", buf)
|
||||
}
|
||||
|
||||
func initImage(file string) *Image {
|
||||
buf, _ := imageBuf(file)
|
||||
return NewImage(buf)
|
||||
}
|
||||
|
||||
func imageBuf(file string) ([]byte, error) {
|
||||
return Read(path.Join("fixtures", file))
|
||||
}
|
||||
|
||||
func assertSize(buf []byte, width, height int) error {
|
||||
size, err := NewImage(buf).Size()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if size.Width != width || size.Height != height {
|
||||
return fmt.Errorf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
return nil
|
||||
}
|
77
_vendor/src/github.com/h2non/bimg/metadata.go
Normal file
@ -0,0 +1,77 @@
|
||||
package bimg
|
||||
|
||||
/*
|
||||
#cgo pkg-config: vips
|
||||
#include "vips/vips.h"
|
||||
*/
|
||||
import "C"
|
||||
|
||||
// ImageSize represents the image width and height values
|
||||
type ImageSize struct {
|
||||
Width int
|
||||
Height int
|
||||
}
|
||||
|
||||
// ImageMetadata represents the basic metadata fields
|
||||
type ImageMetadata struct {
|
||||
Orientation int
|
||||
Channels int
|
||||
Alpha bool
|
||||
Profile bool
|
||||
Type string
|
||||
Space string
|
||||
Colourspace string
|
||||
Size ImageSize
|
||||
}
|
||||
|
||||
// Size returns the image size by width and height pixels.
|
||||
func Size(buf []byte) (ImageSize, error) {
|
||||
metadata, err := Metadata(buf)
|
||||
if err != nil {
|
||||
return ImageSize{}, err
|
||||
}
|
||||
|
||||
return ImageSize{
|
||||
Width: int(metadata.Size.Width),
|
||||
Height: int(metadata.Size.Height),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ColourspaceIsSupported checks if the image colourspace is supported by libvips.
|
||||
func ColourspaceIsSupported(buf []byte) (bool, error) {
|
||||
return vipsColourspaceIsSupportedBuffer(buf)
|
||||
}
|
||||
|
||||
// ImageInterpretation returns the image interpretation type.
|
||||
// See: http://www.vips.ecs.soton.ac.uk/supported/current/doc/html/libvips/VipsImage.html#VipsInterpretation
|
||||
func ImageInterpretation(buf []byte) (Interpretation, error) {
|
||||
return vipsInterpretationBuffer(buf)
|
||||
}
|
||||
|
||||
// Metadata returns the image metadata (size, type, alpha channel, profile, EXIF orientation...).
|
||||
func Metadata(buf []byte) (ImageMetadata, error) {
|
||||
defer C.vips_thread_shutdown()
|
||||
|
||||
image, imageType, err := vipsRead(buf)
|
||||
if err != nil {
|
||||
return ImageMetadata{}, err
|
||||
}
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
size := ImageSize{
|
||||
Width: int(image.Xsize),
|
||||
Height: int(image.Ysize),
|
||||
}
|
||||
|
||||
metadata := ImageMetadata{
|
||||
Size: size,
|
||||
Channels: int(image.Bands),
|
||||
Orientation: vipsExifOrientation(image),
|
||||
Alpha: vipsHasAlpha(image),
|
||||
Profile: vipsHasProfile(image),
|
||||
Space: vipsSpace(image),
|
||||
Type: ImageTypeName(imageType),
|
||||
}
|
||||
|
||||
return metadata, nil
|
||||
}
|
124
_vendor/src/github.com/h2non/bimg/metadata_test.go
Normal file
@ -0,0 +1,124 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSize(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
width int
|
||||
height int
|
||||
}{
|
||||
{"test.jpg", 1680, 1050},
|
||||
{"test.png", 400, 300},
|
||||
{"test.webp", 550, 368},
|
||||
}
|
||||
for _, file := range files {
|
||||
size, err := Size(readFile(file.name))
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot read the image: %#v", err)
|
||||
}
|
||||
|
||||
if size.Width != file.width || size.Height != file.height {
|
||||
t.Fatalf("Unexpected image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMetadata(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
format string
|
||||
orientation int
|
||||
alpha bool
|
||||
profile bool
|
||||
space string
|
||||
}{
|
||||
{"test.jpg", "jpeg", 0, false, false, "srgb"},
|
||||
{"test_icc_prophoto.jpg", "jpeg", 0, false, true, "srgb"},
|
||||
{"test.png", "png", 0, true, false, "srgb"},
|
||||
{"test.webp", "webp", 0, false, false, "srgb"},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
metadata, err := Metadata(readFile(file.name))
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot read the image: %s -> %s", file.name, err)
|
||||
}
|
||||
|
||||
if metadata.Type != file.format {
|
||||
t.Fatalf("Unexpected image format: %s", file.format)
|
||||
}
|
||||
if metadata.Orientation != file.orientation {
|
||||
t.Fatalf("Unexpected image orientation: %d != %d", metadata.Orientation, file.orientation)
|
||||
}
|
||||
if metadata.Alpha != file.alpha {
|
||||
t.Fatalf("Unexpected image alpha: %t != %t", metadata.Alpha, file.alpha)
|
||||
}
|
||||
if metadata.Profile != file.profile {
|
||||
t.Fatalf("Unexpected image profile: %t != %t", metadata.Profile, file.profile)
|
||||
}
|
||||
if metadata.Space != file.space {
|
||||
t.Fatalf("Unexpected image profile: %t != %t", metadata.Profile, file.profile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageInterpretation(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
interpretation Interpretation
|
||||
}{
|
||||
{"test.jpg", InterpretationSRGB},
|
||||
{"test.png", InterpretationSRGB},
|
||||
{"test.webp", InterpretationSRGB},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
interpretation, err := ImageInterpretation(readFile(file.name))
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot read the image: %s -> %s", file.name, err)
|
||||
}
|
||||
if interpretation != file.interpretation {
|
||||
t.Fatalf("Unexpected image interpretation")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestColourspaceIsSupported(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
}{
|
||||
{"test.jpg"},
|
||||
{"test.png"},
|
||||
{"test.webp"},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
supported, err := ColourspaceIsSupported(readFile(file.name))
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot read the image: %s -> %s", file.name, err)
|
||||
}
|
||||
if supported != true {
|
||||
t.Fatalf("Unsupported image colourspace")
|
||||
}
|
||||
}
|
||||
|
||||
supported, err := initImage("test.jpg").ColourspaceIsSupported()
|
||||
if err != nil {
|
||||
t.Errorf("Cannot process the image: %#v", err)
|
||||
}
|
||||
if supported != true {
|
||||
t.Errorf("Non-supported colourspace")
|
||||
}
|
||||
}
|
||||
|
||||
func readFile(file string) []byte {
|
||||
data, _ := os.Open(path.Join("fixtures", file))
|
||||
buf, _ := ioutil.ReadAll(data)
|
||||
return buf
|
||||
}
|
218
_vendor/src/github.com/h2non/bimg/options.go
Normal file
@ -0,0 +1,218 @@
|
||||
package bimg
|
||||
|
||||
/*
|
||||
#cgo pkg-config: vips
|
||||
#include "vips/vips.h"
|
||||
*/
|
||||
import "C"
|
||||
|
||||
const (
|
||||
// Quality defines the default JPEG quality to be used.
|
||||
Quality = 80
|
||||
// MaxSize defines the maximum pixels width or height supported.
|
||||
MaxSize = 16383
|
||||
)
|
||||
|
||||
// Gravity represents the image gravity value.
|
||||
type Gravity int
|
||||
|
||||
const (
|
||||
// GravityCentre represents the centre value used for image gravity orientation.
|
||||
GravityCentre Gravity = iota
|
||||
// GravityNorth represents the north value used for image gravity orientation.
|
||||
GravityNorth
|
||||
// GravityEast represents the east value used for image gravity orientation.
|
||||
GravityEast
|
||||
// GravitySouth represents the south value used for image gravity orientation.
|
||||
GravitySouth
|
||||
// GravityWest represents the west value used for image gravity orientation.
|
||||
GravityWest
|
||||
// GravitySmart enables libvips Smart Crop algorithm for image gravity orientation.
|
||||
GravitySmart
|
||||
)
|
||||
|
||||
// Interpolator represents the image interpolation value.
|
||||
type Interpolator int
|
||||
|
||||
const (
|
||||
// Bicubic interpolation value.
|
||||
Bicubic Interpolator = iota
|
||||
// Bilinear interpolation value.
|
||||
Bilinear
|
||||
// Nohalo interpolation value.
|
||||
Nohalo
|
||||
)
|
||||
|
||||
var interpolations = map[Interpolator]string{
|
||||
Bicubic: "bicubic",
|
||||
Bilinear: "bilinear",
|
||||
Nohalo: "nohalo",
|
||||
}
|
||||
|
||||
func (i Interpolator) String() string {
|
||||
return interpolations[i]
|
||||
}
|
||||
|
||||
// Angle represents the image rotation angle value.
|
||||
type Angle int
|
||||
|
||||
const (
|
||||
// D0 represents the rotation angle 0 degrees.
|
||||
D0 Angle = 0
|
||||
// D45 represents the rotation angle 90 degrees.
|
||||
D45 Angle = 45
|
||||
// D90 represents the rotation angle 90 degrees.
|
||||
D90 Angle = 90
|
||||
// D135 represents the rotation angle 90 degrees.
|
||||
D135 Angle = 135
|
||||
// D180 represents the rotation angle 180 degrees.
|
||||
D180 Angle = 180
|
||||
// D235 represents the rotation angle 235 degrees.
|
||||
D235 Angle = 235
|
||||
// D270 represents the rotation angle 270 degrees.
|
||||
D270 Angle = 270
|
||||
// D315 represents the rotation angle 180 degrees.
|
||||
D315 Angle = 315
|
||||
)
|
||||
|
||||
// Direction represents the image direction value.
|
||||
type Direction int
|
||||
|
||||
const (
|
||||
// Horizontal represents the orizontal image direction value.
|
||||
Horizontal Direction = C.VIPS_DIRECTION_HORIZONTAL
|
||||
// Vertical represents the vertical image direction value.
|
||||
Vertical Direction = C.VIPS_DIRECTION_VERTICAL
|
||||
)
|
||||
|
||||
// Interpretation represents the image interpretation type.
|
||||
// See: http://www.vips.ecs.soton.ac.uk/supported/current/doc/html/libvips/VipsImage.html#VipsInterpretation
|
||||
type Interpretation int
|
||||
|
||||
const (
|
||||
// InterpretationError points to the libvips interpretation error type.
|
||||
InterpretationError Interpretation = C.VIPS_INTERPRETATION_ERROR
|
||||
// InterpretationMultiband points to its libvips interpretation equivalent type.
|
||||
InterpretationMultiband Interpretation = C.VIPS_INTERPRETATION_MULTIBAND
|
||||
// InterpretationBW points to its libvips interpretation equivalent type.
|
||||
InterpretationBW Interpretation = C.VIPS_INTERPRETATION_B_W
|
||||
// InterpretationCMYK points to its libvips interpretation equivalent type.
|
||||
InterpretationCMYK Interpretation = C.VIPS_INTERPRETATION_CMYK
|
||||
// InterpretationRGB points to its libvips interpretation equivalent type.
|
||||
InterpretationRGB Interpretation = C.VIPS_INTERPRETATION_RGB
|
||||
// InterpretationSRGB points to its libvips interpretation equivalent type.
|
||||
InterpretationSRGB Interpretation = C.VIPS_INTERPRETATION_sRGB
|
||||
// InterpretationRGB16 points to its libvips interpretation equivalent type.
|
||||
InterpretationRGB16 Interpretation = C.VIPS_INTERPRETATION_RGB16
|
||||
// InterpretationGREY16 points to its libvips interpretation equivalent type.
|
||||
InterpretationGREY16 Interpretation = C.VIPS_INTERPRETATION_GREY16
|
||||
// InterpretationScRGB points to its libvips interpretation equivalent type.
|
||||
InterpretationScRGB Interpretation = C.VIPS_INTERPRETATION_scRGB
|
||||
// InterpretationLAB points to its libvips interpretation equivalent type.
|
||||
InterpretationLAB Interpretation = C.VIPS_INTERPRETATION_LAB
|
||||
// InterpretationXYZ points to its libvips interpretation equivalent type.
|
||||
InterpretationXYZ Interpretation = C.VIPS_INTERPRETATION_XYZ
|
||||
)
|
||||
|
||||
// Extend represents the image extend mode, used when the edges
|
||||
// of an image are extended, you can specify how you want the extension done.
|
||||
// See: http://www.vips.ecs.soton.ac.uk/supported/8.4/doc/html/libvips/libvips-conversion.html#VIPS-EXTEND-BACKGROUND:CAPS
|
||||
type Extend int
|
||||
|
||||
const (
|
||||
// ExtendBlack extend with black (all 0) pixels mode.
|
||||
ExtendBlack Extend = C.VIPS_EXTEND_BLACK
|
||||
// ExtendCopy copy the image edges.
|
||||
ExtendCopy Extend = C.VIPS_EXTEND_COPY
|
||||
// ExtendRepeat repeat the whole image.
|
||||
ExtendRepeat Extend = C.VIPS_EXTEND_REPEAT
|
||||
// ExtendMirror mirror the whole image.
|
||||
ExtendMirror Extend = C.VIPS_EXTEND_MIRROR
|
||||
// ExtendWhite extend with white (all bits set) pixels.
|
||||
ExtendWhite Extend = C.VIPS_EXTEND_WHITE
|
||||
// ExtendBackground with colour from the background property.
|
||||
ExtendBackground Extend = C.VIPS_EXTEND_BACKGROUND
|
||||
// ExtendLast extend with last pixel.
|
||||
ExtendLast Extend = C.VIPS_EXTEND_LAST
|
||||
)
|
||||
|
||||
// WatermarkFont defines the default watermark font to be used.
|
||||
var WatermarkFont = "sans 10"
|
||||
|
||||
// Color represents a traditional RGB color scheme.
|
||||
type Color struct {
|
||||
R, G, B uint8
|
||||
}
|
||||
|
||||
// ColorBlack is a shortcut to black RGB color representation.
|
||||
var ColorBlack = Color{0, 0, 0}
|
||||
|
||||
// Watermark represents the text-based watermark supported options.
|
||||
type Watermark struct {
|
||||
Width int
|
||||
DPI int
|
||||
Margin int
|
||||
Opacity float32
|
||||
NoReplicate bool
|
||||
Text string
|
||||
Font string
|
||||
Background Color
|
||||
}
|
||||
|
||||
// WatermarkImage represents the image-based watermark supported options.
|
||||
type WatermarkImage struct {
|
||||
Left int
|
||||
Top int
|
||||
Buf []byte
|
||||
Opacity float32
|
||||
}
|
||||
|
||||
// GaussianBlur represents the gaussian image transformation values.
|
||||
type GaussianBlur struct {
|
||||
Sigma float64
|
||||
MinAmpl float64
|
||||
}
|
||||
|
||||
// Sharpen represents the image sharp transformation options.
|
||||
type Sharpen struct {
|
||||
Radius int
|
||||
X1 float64
|
||||
Y2 float64
|
||||
Y3 float64
|
||||
M1 float64
|
||||
M2 float64
|
||||
}
|
||||
|
||||
// Options represents the supported image transformation options.
|
||||
type Options struct {
|
||||
Height int
|
||||
Width int
|
||||
AreaHeight int
|
||||
AreaWidth int
|
||||
Top int
|
||||
Left int
|
||||
Quality int
|
||||
Compression int
|
||||
Zoom int
|
||||
Crop bool
|
||||
SmartCrop bool // Deprecated
|
||||
Enlarge bool
|
||||
Embed bool
|
||||
Flip bool
|
||||
Flop bool
|
||||
Force bool
|
||||
NoAutoRotate bool
|
||||
NoProfile bool
|
||||
Interlace bool
|
||||
Extend Extend
|
||||
Rotate Angle
|
||||
Background Color
|
||||
Gravity Gravity
|
||||
Watermark Watermark
|
||||
WatermarkImage WatermarkImage
|
||||
Type ImageType
|
||||
Interpolator Interpolator
|
||||
Interpretation Interpretation
|
||||
GaussianBlur GaussianBlur
|
||||
Sharpen Sharpen
|
||||
}
|
302
_vendor/src/github.com/h2non/bimg/preinstall.sh
Normal file
@ -0,0 +1,302 @@
|
||||
#!/bin/bash
|
||||
|
||||
vips_version_minimum=8.4.2
|
||||
vips_version_latest_major_minor=8.4
|
||||
vips_version_latest_patch=2
|
||||
|
||||
openslide_version_minimum=3.4.0
|
||||
openslide_version_latest_major_minor=3.4
|
||||
openslide_version_latest_patch=1
|
||||
|
||||
install_libvips_from_source() {
|
||||
echo "Compiling libvips $vips_version_latest_major_minor.$vips_version_latest_patch from source"
|
||||
curl -O http://www.vips.ecs.soton.ac.uk/supported/$vips_version_latest_major_minor/vips-$vips_version_latest_major_minor.$vips_version_latest_patch.tar.gz
|
||||
tar zvxf vips-$vips_version_latest_major_minor.$vips_version_latest_patch.tar.gz
|
||||
cd vips-$vips_version_latest_major_minor.$vips_version_latest_patch
|
||||
CXXFLAGS="-D_GLIBCXX_USE_CXX11_ABI=0" ./configure --disable-debug --disable-docs --disable-static --disable-introspection --disable-dependency-tracking --enable-cxx=yes --without-python --without-orc --without-fftw $1
|
||||
make
|
||||
make install
|
||||
cd ..
|
||||
rm -rf vips-$vips_version_latest_major_minor.$vips_version_latest_patch
|
||||
rm vips-$vips_version_latest_major_minor.$vips_version_latest_patch.tar.gz
|
||||
ldconfig
|
||||
echo "Installed libvips $(PKG_CONFIG_PATH=$PKG_CONFIG_PATH:/usr/local/lib/pkgconfig:/usr/lib/pkgconfig pkg-config --modversion vips)"
|
||||
}
|
||||
|
||||
install_libopenslide_from_source() {
|
||||
echo "Compiling openslide $openslide_version_latest_major_minor.$openslide_version_latest_patch from source"
|
||||
curl -O -L https://github.com/openslide/openslide/releases/download/v$openslide_version_latest_major_minor.$openslide_version_latest_patch/openslide-$openslide_version_latest_major_minor.$openslide_version_latest_patch.tar.gz
|
||||
tar xzvf openslide-$openslide_version_latest_major_minor.$openslide_version_latest_patch.tar.gz
|
||||
cd openslide-$openslide_version_latest_major_minor.$openslide_version_latest_patch
|
||||
PKG_CONFIG_PATH=$pkg_config_path ./configure $1
|
||||
make
|
||||
make install
|
||||
cd ..
|
||||
rm -rf openslide-$openslide_version_latest_major_minor.$openslide_version_latest_patch
|
||||
rm openslide-$openslide_version_latest_major_minor.$openslide_version_latest_patch.tar.gz
|
||||
ldconfig
|
||||
echo "Installed libopenslide $openslide_version_latest_major_minor.$openslide_version_latest_patch"
|
||||
}
|
||||
|
||||
sorry() {
|
||||
echo "Sorry, I don't yet know how to install lib$1 on $2"
|
||||
exit 1
|
||||
}
|
||||
|
||||
pkg_config_path="$PKG_CONFIG_PATH:/usr/local/lib/pkgconfig:/usr/lib/pkgconfig"
|
||||
|
||||
check_if_library_exists() {
|
||||
PKG_CONFIG_PATH=$pkg_config_path pkg-config --exists $1
|
||||
if [ $? -eq 0 ]; then
|
||||
version_found=$(PKG_CONFIG_PATH=$pkg_config_path pkg-config --modversion $1)
|
||||
PKG_CONFIG_PATH=$pkg_config_path pkg-config --atleast-version=$2 $1
|
||||
if [ $? -eq 0 ]; then
|
||||
# Found suitable version of libvips
|
||||
echo "Found lib$1 $version_found"
|
||||
return 1
|
||||
fi
|
||||
echo "Found lib$1 $version_found but require $2"
|
||||
else
|
||||
echo "Could not find lib$1 using a PKG_CONFIG_PATH of '$pkg_config_path'"
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
enable_openslide=0
|
||||
# Is libvips already installed, and is it at least the minimum required version?
|
||||
if [ $# -eq 1 ]; then
|
||||
if [ "$1" = "--with-openslide" ]; then
|
||||
echo "Installing vips with openslide support"
|
||||
enable_openslide=1
|
||||
else
|
||||
echo "Sorry, $1 is not supported. Did you mean --with-openslide?"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! type pkg-config >/dev/null; then
|
||||
sorry "vips" "a system without pkg-config"
|
||||
fi
|
||||
|
||||
openslide_exists=0
|
||||
if [ $enable_openslide -eq 1 ]; then
|
||||
check_if_library_exists "openslide" "$openslide_version_minimum"
|
||||
openslide_exists=$?
|
||||
fi
|
||||
|
||||
check_if_library_exists "vips" "$vips_version_minimum"
|
||||
vips_exists=$?
|
||||
if [ $vips_exists -eq 1 ] && [ $enable_openslide -eq 1 ]; then
|
||||
if [ $openslide_exists -eq 1 ]; then
|
||||
# Check if vips compiled with openslide support
|
||||
vips_with_openslide=`vips list classes | grep -i opensli`
|
||||
if [ -z $vips_with_openslide ]; then
|
||||
echo "Vips compiled without openslide support."
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
elif [ $vips_exists -eq 1 ] && [ $enable_openslide -eq 0 ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Verify root/sudo access
|
||||
if [ "$(id -u)" -ne "0" ]; then
|
||||
echo "Sorry, I need root/sudo access to continue"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Deprecation warning
|
||||
if [ "$(arch)" == "x86_64" ]; then
|
||||
echo "This script is no longer required on most 64-bit Linux systems when using sharp v0.12.0+"
|
||||
fi
|
||||
|
||||
# OS-specific installations of libopenslide follows
|
||||
# Either openslide does not exist, or vips is installed without openslide support
|
||||
if [ $enable_openslide -eq 1 ] && [ -z $vips_with_openslide ] && [ $openslide_exists -eq 0 ]; then
|
||||
if [ -f /etc/debian_version ]; then
|
||||
# Debian Linux
|
||||
DISTRO=$(lsb_release -c -s)
|
||||
echo "Detected Debian Linux '$DISTRO'"
|
||||
case "$DISTRO" in
|
||||
jessie|vivid|wily|xenial)
|
||||
# Debian 8, Ubuntu 15
|
||||
echo "Installing libopenslide via apt-get"
|
||||
apt-get install -y libopenslide-dev
|
||||
;;
|
||||
trusty|utopic|qiana|rebecca|rafaela|freya|rosa|sarah|serena)
|
||||
# Ubuntu 14, Mint 17+
|
||||
echo "Installing libopenslide dependencies via apt-get"
|
||||
apt-get install -y automake build-essential curl zlib1g-dev libopenjpeg-dev libpng12-dev libjpeg-dev libtiff5-dev libgdk-pixbuf2.0-dev libxml2-dev libsqlite3-dev libcairo2-dev libglib2.0-dev sqlite3 libsqlite3-dev
|
||||
install_libopenslide_from_source
|
||||
;;
|
||||
precise|wheezy|maya)
|
||||
# Debian 7, Ubuntu 12.04, Mint 13
|
||||
echo "Installing libopenslide dependencies via apt-get"
|
||||
apt-get install -y automake build-essential curl zlib1g-dev libopenjpeg-dev libpng12-dev libjpeg-dev libtiff5-dev libgdk-pixbuf2.0-dev libxml2-dev libsqlite3-dev libcairo2-dev libglib2.0-dev sqlite3 libsqlite3-dev
|
||||
install_libopenslide_from_source
|
||||
;;
|
||||
*)
|
||||
# Unsupported Debian-based OS
|
||||
sorry "openslide" "Debian-based $DISTRO"
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
# Red Hat Linux
|
||||
RELEASE=$(cat /etc/redhat-release)
|
||||
echo "Detected Red Hat Linux '$RELEASE'"
|
||||
case $RELEASE in
|
||||
"Red Hat Enterprise Linux release 7."*|"CentOS Linux release 7."*|"Scientific Linux release 7."*)
|
||||
# RHEL/CentOS 7
|
||||
echo "Installing libopenslide dependencies via yum"
|
||||
yum groupinstall -y "Development Tools"
|
||||
yum install -y tar curl libpng-devel libjpeg-devel libxml2-devel zlib-devel openjpeg-devel libtiff-devel gdk-pixbuf2-devel sqlite-devel cairo-devel glib2-devel
|
||||
install_libopenslide_from_source "--prefix=/usr"
|
||||
;;
|
||||
"Red Hat Enterprise Linux release 6."*|"CentOS release 6."*|"Scientific Linux release 6."*)
|
||||
# RHEL/CentOS 6
|
||||
echo "Installing libopenslide dependencies via yum"
|
||||
yum groupinstall -y "Development Tools"
|
||||
yum install -y tar curl libpng-devel libjpeg-devel libxml2-devel zlib-devel openjpeg-devel libtiff-devel gdk-pixbuf2-devel sqlite-devel cairo-devel glib2-devel
|
||||
install_libopenslide_from_source "--prefix=/usr"
|
||||
;;
|
||||
"Fedora release 21 "*|"Fedora release 22 "*)
|
||||
# Fedora 21, 22
|
||||
echo "Installing libopenslide via yum"
|
||||
yum install -y openslide-devel
|
||||
;;
|
||||
*)
|
||||
# Unsupported RHEL-based OS
|
||||
sorry "openslide" "$RELEASE"
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/os-release ]; then
|
||||
RELEASE=$(cat /etc/os-release | grep VERSION)
|
||||
echo "Detected OpenSuse Linux '$RELEASE'"
|
||||
case $RELEASE in
|
||||
*"13.2"*)
|
||||
echo "Installing libopenslide via zypper"
|
||||
zypper --gpg-auto-import-keys install -y libopenslide-devel
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/SuSE-brand ]; then
|
||||
RELEASE=$(cat /etc/SuSE-brand | grep VERSION)
|
||||
echo "Detected OpenSuse Linux '$RELEASE'"
|
||||
case $RELEASE in
|
||||
*"13.1")
|
||||
echo "Installing libopenslide dependencies via zypper"
|
||||
zypper --gpg-auto-import-keys install -y --type pattern devel_basis
|
||||
zypper --gpg-auto-import-keys install -y tar curl libpng16-devel libjpeg-turbo libjpeg8-devel libxml2-devel zlib-devel openjpeg-devel libtiff-devel libgdk_pixbuf-2_0-0 sqlite3-devel cairo-devel glib2-devel
|
||||
install_libopenslide_from_source
|
||||
;;
|
||||
esac
|
||||
else
|
||||
# Unsupported OS
|
||||
sorry "openslide" "$(uname -a)"
|
||||
fi
|
||||
fi
|
||||
|
||||
# OS-specific installations of libvips follows
|
||||
|
||||
if [ -f /etc/debian_version ]; then
|
||||
# Debian Linux
|
||||
DISTRO=$(lsb_release -c -s)
|
||||
echo "Detected Debian Linux '$DISTRO'"
|
||||
case "$DISTRO" in
|
||||
jessie|trusty|utopic|vivid|wily|xenial|qiana|rebecca|rafaela|freya|rosa|sarah|serena)
|
||||
# Debian 8, Ubuntu 14.04+, Mint 17+
|
||||
echo "Installing libvips dependencies via apt-get"
|
||||
apt-get install -y automake build-essential gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-dev libpng12-dev libwebp-dev libtiff5-dev libexif-dev libgsf-1-dev liblcms2-dev libxml2-dev swig libmagickcore-dev curl
|
||||
install_libvips_from_source
|
||||
;;
|
||||
precise|wheezy|maya)
|
||||
# Debian 7, Ubuntu 12.04, Mint 13
|
||||
echo "Installing libvips dependencies via apt-get"
|
||||
add-apt-repository -y ppa:lyrasis/precise-backports
|
||||
apt-get update
|
||||
apt-get install -y automake build-essential gobject-introspection gtk-doc-tools libglib2.0-dev libjpeg-dev libpng12-dev libwebp-dev libtiff4-dev libexif-dev libgsf-1-dev liblcms2-dev libxml2-dev swig libmagickcore-dev curl
|
||||
install_libvips_from_source
|
||||
;;
|
||||
*)
|
||||
# Unsupported Debian-based OS
|
||||
sorry "vips" "Debian-based $DISTRO"
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/redhat-release ]; then
|
||||
# Red Hat Linux
|
||||
RELEASE=$(cat /etc/redhat-release)
|
||||
echo "Detected Red Hat Linux '$RELEASE'"
|
||||
case $RELEASE in
|
||||
"Red Hat Enterprise Linux release 7."*|"CentOS Linux release 7."*|"Scientific Linux release 7."*)
|
||||
# RHEL/CentOS 7
|
||||
echo "Installing libvips dependencies via yum"
|
||||
yum groupinstall -y "Development Tools"
|
||||
yum install -y tar curl gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel libgsf-devel lcms2-devel ImageMagick-devel gobject-introspection-devel libwebp-devel
|
||||
install_libvips_from_source "--prefix=/usr"
|
||||
;;
|
||||
"Red Hat Enterprise Linux release 6."*|"CentOS release 6."*|"Scientific Linux release 6."*)
|
||||
# RHEL/CentOS 6
|
||||
echo "Installing libvips dependencies via yum"
|
||||
yum groupinstall -y "Development Tools"
|
||||
yum install -y tar curl gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel libgsf-devel lcms-devel ImageMagick-devel
|
||||
yum install -y http://li.nux.ro/download/nux/dextop/el6/x86_64/nux-dextop-release-0-2.el6.nux.noarch.rpm
|
||||
yum install -y --enablerepo=nux-dextop gobject-introspection-devel
|
||||
yum install -y http://rpms.famillecollet.com/enterprise/remi-release-6.rpm
|
||||
yum install -y --enablerepo=remi libwebp-devel
|
||||
install_libvips_from_source "--prefix=/usr"
|
||||
;;
|
||||
"Fedora"*)
|
||||
# Fedora 21, 22, 23
|
||||
echo "Installing libvips dependencies via yum"
|
||||
yum groupinstall -y "Development Tools"
|
||||
yum install -y gcc-c++ gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel lcms-devel ImageMagick-devel gobject-introspection-devel libwebp-devel curl
|
||||
install_libvips_from_source "--prefix=/usr"
|
||||
;;
|
||||
*)
|
||||
# Unsupported RHEL-based OS
|
||||
sorry "vips" "$RELEASE"
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/system-release ]; then
|
||||
# Probably Amazon Linux
|
||||
RELEASE=$(cat /etc/system-release)
|
||||
case $RELEASE in
|
||||
"Amazon Linux AMI release 2015.03"|"Amazon Linux AMI release 2015.09")
|
||||
# Amazon Linux
|
||||
echo "Detected '$RELEASE'"
|
||||
echo "Installing libvips dependencies via yum"
|
||||
yum groupinstall -y "Development Tools"
|
||||
yum install -y gtk-doc libxml2-devel libjpeg-turbo-devel libpng-devel libtiff-devel libexif-devel libgsf-devel lcms2-devel ImageMagick-devel gobject-introspection-devel libwebp-devel curl
|
||||
install_libvips_from_source "--prefix=/usr"
|
||||
;;
|
||||
*)
|
||||
# Unsupported Amazon Linux version
|
||||
sorry "vips" "$RELEASE"
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/os-release ]; then
|
||||
RELEASE=$(cat /etc/os-release | grep VERSION)
|
||||
echo "Detected OpenSuse Linux '$RELEASE'"
|
||||
case $RELEASE in
|
||||
*"13.2"*)
|
||||
echo "Installing libvips dependencies via zypper"
|
||||
zypper --gpg-auto-import-keys install -y --type pattern devel_basis
|
||||
zypper --gpg-auto-import-keys install -y tar curl gtk-doc libxml2-devel libjpeg-turbo libjpeg8-devel libpng16-devel libtiff-devel libexif-devel liblcms2-devel ImageMagick-devel gobject-introspection-devel libwebp-devel
|
||||
install_libvips_from_source
|
||||
;;
|
||||
esac
|
||||
elif [ -f /etc/SuSE-brand ]; then
|
||||
RELEASE=$(cat /etc/SuSE-brand | grep VERSION)
|
||||
echo "Detected OpenSuse Linux '$RELEASE'"
|
||||
case $RELEASE in
|
||||
*"13.1")
|
||||
echo "Installing libvips dependencies via zypper"
|
||||
zypper --gpg-auto-import-keys install -y --type pattern devel_basis
|
||||
zypper --gpg-auto-import-keys install -y tar curl gtk-doc libxml2-devel libjpeg-turbo libjpeg8-devel libpng16-devel libtiff-devel libexif-devel liblcms2-devel ImageMagick-devel gobject-introspection-devel libwebp-devel
|
||||
install_libvips_from_source
|
||||
;;
|
||||
esac
|
||||
else
|
||||
# Unsupported OS
|
||||
sorry "vips" "$(uname -a)"
|
||||
fi
|
561
_vendor/src/github.com/h2non/bimg/resize.go
Normal file
@ -0,0 +1,561 @@
|
||||
package bimg
|
||||
|
||||
/*
|
||||
#cgo pkg-config: vips
|
||||
#include "vips/vips.h"
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"math"
|
||||
)
|
||||
|
||||
// Resize is used to transform a given image as byte buffer
|
||||
// with the passed options.
|
||||
func Resize(buf []byte, o Options) ([]byte, error) {
|
||||
defer C.vips_thread_shutdown()
|
||||
|
||||
image, imageType, err := loadImage(buf)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Clone and define default options
|
||||
o = applyDefaults(o, imageType)
|
||||
|
||||
if !IsTypeSupported(o.Type) {
|
||||
return nil, errors.New("Unsupported image output type")
|
||||
}
|
||||
|
||||
debug("Options: %#v", o)
|
||||
|
||||
// Auto rotate image based on EXIF orientation header
|
||||
image, rotated, err := rotateAndFlipImage(image, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If JPEG image, retrieve the buffer
|
||||
if rotated && imageType == JPEG && !o.NoAutoRotate {
|
||||
buf, err = getImageBuffer(image)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
inWidth := int(image.Xsize)
|
||||
inHeight := int(image.Ysize)
|
||||
|
||||
// Infer the required operation based on the in/out image sizes for a coherent transformation
|
||||
normalizeOperation(&o, inWidth, inHeight)
|
||||
|
||||
// image calculations
|
||||
factor := imageCalculations(&o, inWidth, inHeight)
|
||||
shrink := calculateShrink(factor, o.Interpolator)
|
||||
residual := calculateResidual(factor, shrink)
|
||||
|
||||
// Do not enlarge the output if the input width or height
|
||||
// are already less than the required dimensions
|
||||
if !o.Enlarge && !o.Force {
|
||||
if inWidth < o.Width && inHeight < o.Height {
|
||||
factor = 1.0
|
||||
shrink = 1
|
||||
residual = 0
|
||||
o.Width = inWidth
|
||||
o.Height = inHeight
|
||||
}
|
||||
}
|
||||
|
||||
// Try to use libjpeg shrink-on-load
|
||||
if imageType == JPEG && shrink >= 2 {
|
||||
tmpImage, factor, err := shrinkJpegImage(buf, image, factor, shrink)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
image = tmpImage
|
||||
factor = math.Max(factor, 1.0)
|
||||
shrink = int(math.Floor(factor))
|
||||
residual = float64(shrink) / factor
|
||||
}
|
||||
|
||||
// Zoom image, if necessary
|
||||
image, err = zoomImage(image, o.Zoom)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Transform image, if necessary
|
||||
if shouldTransformImage(o, inWidth, inHeight) {
|
||||
image, err = transformImage(image, o, shrink, residual)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Apply effects, if necessary
|
||||
if shouldApplyEffects(o) {
|
||||
image, err = applyEffects(image, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Add watermark, if necessary
|
||||
image, err = watermarkImageWithText(image, o.Watermark)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Add watermark, if necessary
|
||||
image, err = watermarkImageWithAnotherImage(image, o.WatermarkImage)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Flatten image on a background, if necessary
|
||||
image, err = imageFlatten(image, imageType, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return saveImage(image, o)
|
||||
}
|
||||
|
||||
func loadImage(buf []byte) (*C.VipsImage, ImageType, error) {
|
||||
if len(buf) == 0 {
|
||||
return nil, JPEG, errors.New("Image buffer is empty")
|
||||
}
|
||||
|
||||
image, imageType, err := vipsRead(buf)
|
||||
if err != nil {
|
||||
return nil, JPEG, err
|
||||
}
|
||||
|
||||
return image, imageType, nil
|
||||
}
|
||||
|
||||
func applyDefaults(o Options, imageType ImageType) Options {
|
||||
if o.Quality == 0 {
|
||||
o.Quality = Quality
|
||||
}
|
||||
if o.Compression == 0 {
|
||||
o.Compression = 6
|
||||
}
|
||||
if o.Type == 0 {
|
||||
o.Type = imageType
|
||||
}
|
||||
if o.Interpretation == 0 {
|
||||
o.Interpretation = InterpretationSRGB
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
func saveImage(image *C.VipsImage, o Options) ([]byte, error) {
|
||||
saveOptions := vipsSaveOptions{
|
||||
Quality: o.Quality,
|
||||
Type: o.Type,
|
||||
Compression: o.Compression,
|
||||
Interlace: o.Interlace,
|
||||
NoProfile: o.NoProfile,
|
||||
Interpretation: o.Interpretation,
|
||||
}
|
||||
// Finally get the resultant buffer
|
||||
return vipsSave(image, saveOptions)
|
||||
}
|
||||
|
||||
func normalizeOperation(o *Options, inWidth, inHeight int) {
|
||||
if !o.Force && !o.Crop && !o.Embed && !o.Enlarge && o.Rotate == 0 && (o.Width > 0 || o.Height > 0) {
|
||||
o.Force = true
|
||||
}
|
||||
}
|
||||
|
||||
func shouldTransformImage(o Options, inWidth, inHeight int) bool {
|
||||
return o.Force || (o.Width > 0 && o.Width != inWidth) ||
|
||||
(o.Height > 0 && o.Height != inHeight) || o.AreaWidth > 0 || o.AreaHeight > 0
|
||||
}
|
||||
|
||||
func shouldApplyEffects(o Options) bool {
|
||||
return o.GaussianBlur.Sigma > 0 || o.GaussianBlur.MinAmpl > 0 || o.Sharpen.Radius > 0 && o.Sharpen.Y2 > 0 || o.Sharpen.Y3 > 0
|
||||
}
|
||||
|
||||
func transformImage(image *C.VipsImage, o Options, shrink int, residual float64) (*C.VipsImage, error) {
|
||||
var err error
|
||||
// Use vips_shrink with the integral reduction
|
||||
if shrink > 1 {
|
||||
image, residual, err = shrinkImage(image, o, residual, shrink)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
residualx, residualy := residual, residual
|
||||
if o.Force {
|
||||
residualx = float64(o.Width) / float64(image.Xsize)
|
||||
residualy = float64(o.Height) / float64(image.Ysize)
|
||||
}
|
||||
|
||||
if o.Force || residual != 0 {
|
||||
image, err = vipsAffine(image, residualx, residualy, o.Interpolator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if o.Force {
|
||||
o.Crop = false
|
||||
o.Embed = false
|
||||
}
|
||||
|
||||
image, err = extractOrEmbedImage(image, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
debug("Transform: shrink=%v, residual=%v, interpolator=%v",
|
||||
shrink, residual, o.Interpolator.String())
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func applyEffects(image *C.VipsImage, o Options) (*C.VipsImage, error) {
|
||||
var err error
|
||||
|
||||
if o.GaussianBlur.Sigma > 0 || o.GaussianBlur.MinAmpl > 0 {
|
||||
image, err = vipsGaussianBlur(image, o.GaussianBlur)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if o.Sharpen.Radius > 0 && o.Sharpen.Y2 > 0 || o.Sharpen.Y3 > 0 {
|
||||
image, err = vipsSharpen(image, o.Sharpen)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
debug("Effects: gaussSigma=%v, gaussMinAmpl=%v, sharpenRadius=%v",
|
||||
o.GaussianBlur.Sigma, o.GaussianBlur.MinAmpl, o.Sharpen.Radius)
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func extractOrEmbedImage(image *C.VipsImage, o Options) (*C.VipsImage, error) {
|
||||
var err error
|
||||
inWidth := int(image.Xsize)
|
||||
inHeight := int(image.Ysize)
|
||||
|
||||
switch {
|
||||
case o.Gravity == GravitySmart, o.SmartCrop:
|
||||
image, err = vipsSmartCrop(image, o.Width, o.Height)
|
||||
break
|
||||
case o.Crop:
|
||||
width := int(math.Min(float64(inWidth), float64(o.Width)))
|
||||
height := int(math.Min(float64(inHeight), float64(o.Height)))
|
||||
left, top := calculateCrop(inWidth, inHeight, o.Width, o.Height, o.Gravity)
|
||||
left, top = int(math.Max(float64(left), 0)), int(math.Max(float64(top), 0))
|
||||
image, err = vipsExtract(image, left, top, width, height)
|
||||
break
|
||||
case o.Embed:
|
||||
left, top := (o.Width-inWidth)/2, (o.Height-inHeight)/2
|
||||
image, err = vipsEmbed(image, left, top, o.Width, o.Height, o.Extend, o.Background)
|
||||
break
|
||||
|
||||
case o.Top != 0 || o.Left != 0 || o.AreaWidth != 0 || o.AreaHeight != 0:
|
||||
if o.AreaWidth == 0 {
|
||||
o.AreaHeight = o.Width
|
||||
}
|
||||
if o.AreaHeight == 0 {
|
||||
o.AreaHeight = o.Height
|
||||
}
|
||||
if o.AreaWidth == 0 || o.AreaHeight == 0 {
|
||||
return nil, errors.New("Extract area width/height params are required")
|
||||
}
|
||||
image, err = vipsExtract(image, o.Left, o.Top, o.AreaWidth, o.AreaHeight)
|
||||
break
|
||||
}
|
||||
|
||||
return image, err
|
||||
}
|
||||
|
||||
func rotateAndFlipImage(image *C.VipsImage, o Options) (*C.VipsImage, bool, error) {
|
||||
var err error
|
||||
var rotated bool
|
||||
var direction Direction = -1
|
||||
|
||||
if o.NoAutoRotate == false {
|
||||
rotation, flip := calculateRotationAndFlip(image, o.Rotate)
|
||||
if flip {
|
||||
o.Flip = flip
|
||||
}
|
||||
if rotation > 0 && o.Rotate == 0 {
|
||||
o.Rotate = rotation
|
||||
}
|
||||
}
|
||||
|
||||
if o.Rotate > 0 {
|
||||
rotated = true
|
||||
image, err = vipsRotate(image, getAngle(o.Rotate))
|
||||
}
|
||||
|
||||
if o.Flip {
|
||||
direction = Horizontal
|
||||
} else if o.Flop {
|
||||
direction = Vertical
|
||||
}
|
||||
|
||||
if direction != -1 {
|
||||
rotated = true
|
||||
image, err = vipsFlip(image, direction)
|
||||
}
|
||||
|
||||
return image, rotated, err
|
||||
}
|
||||
|
||||
func watermarkImageWithText(image *C.VipsImage, w Watermark) (*C.VipsImage, error) {
|
||||
if w.Text == "" {
|
||||
return image, nil
|
||||
}
|
||||
|
||||
// Defaults
|
||||
if w.Font == "" {
|
||||
w.Font = WatermarkFont
|
||||
}
|
||||
if w.Width == 0 {
|
||||
w.Width = int(math.Floor(float64(image.Xsize / 6)))
|
||||
}
|
||||
if w.DPI == 0 {
|
||||
w.DPI = 150
|
||||
}
|
||||
if w.Margin == 0 {
|
||||
w.Margin = w.Width
|
||||
}
|
||||
if w.Opacity == 0 {
|
||||
w.Opacity = 0.25
|
||||
} else if w.Opacity > 1 {
|
||||
w.Opacity = 1
|
||||
}
|
||||
|
||||
image, err := vipsWatermark(image, w)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func watermarkImageWithAnotherImage(image *C.VipsImage, w WatermarkImage) (*C.VipsImage, error) {
|
||||
|
||||
if len(w.Buf) == 0 {
|
||||
return image, nil
|
||||
}
|
||||
|
||||
if w.Opacity == 0.0 {
|
||||
w.Opacity = 1.0
|
||||
}
|
||||
|
||||
image, err := vipsDrawWatermark(image, w)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func imageFlatten(image *C.VipsImage, imageType ImageType, o Options) (*C.VipsImage, error) {
|
||||
// Only PNG images are supported for now
|
||||
if imageType != PNG || o.Background == ColorBlack {
|
||||
return image, nil
|
||||
}
|
||||
return vipsFlattenBackground(image, o.Background)
|
||||
}
|
||||
|
||||
func zoomImage(image *C.VipsImage, zoom int) (*C.VipsImage, error) {
|
||||
if zoom == 0 {
|
||||
return image, nil
|
||||
}
|
||||
return vipsZoom(image, zoom+1)
|
||||
}
|
||||
|
||||
func shrinkImage(image *C.VipsImage, o Options, residual float64, shrink int) (*C.VipsImage, float64, error) {
|
||||
// Use vips_shrink with the integral reduction
|
||||
image, err := vipsShrink(image, shrink)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// Recalculate residual float based on dimensions of required vs shrunk images
|
||||
residualx := float64(o.Width) / float64(image.Xsize)
|
||||
residualy := float64(o.Height) / float64(image.Ysize)
|
||||
|
||||
if o.Crop {
|
||||
residual = math.Max(residualx, residualy)
|
||||
} else {
|
||||
residual = math.Min(residualx, residualy)
|
||||
}
|
||||
|
||||
return image, residual, nil
|
||||
}
|
||||
|
||||
func shrinkJpegImage(buf []byte, input *C.VipsImage, factor float64, shrink int) (*C.VipsImage, float64, error) {
|
||||
var image *C.VipsImage
|
||||
var err error
|
||||
shrinkOnLoad := 1
|
||||
|
||||
// Recalculate integral shrink and double residual
|
||||
switch {
|
||||
case shrink >= 8:
|
||||
factor = factor / 8
|
||||
shrinkOnLoad = 8
|
||||
case shrink >= 4:
|
||||
factor = factor / 4
|
||||
shrinkOnLoad = 4
|
||||
case shrink >= 2:
|
||||
factor = factor / 2
|
||||
shrinkOnLoad = 2
|
||||
}
|
||||
|
||||
// Reload input using shrink-on-load
|
||||
if shrinkOnLoad > 1 {
|
||||
image, err = vipsShrinkJpeg(buf, input, shrinkOnLoad)
|
||||
}
|
||||
|
||||
return image, factor, err
|
||||
}
|
||||
|
||||
func imageCalculations(o *Options, inWidth, inHeight int) float64 {
|
||||
factor := 1.0
|
||||
xfactor := float64(inWidth) / float64(o.Width)
|
||||
yfactor := float64(inHeight) / float64(o.Height)
|
||||
|
||||
switch {
|
||||
// Fixed width and height
|
||||
case o.Width > 0 && o.Height > 0:
|
||||
if o.Crop {
|
||||
factor = math.Min(xfactor, yfactor)
|
||||
} else {
|
||||
factor = math.Max(xfactor, yfactor)
|
||||
}
|
||||
// Fixed width, auto height
|
||||
case o.Width > 0:
|
||||
if o.Crop {
|
||||
o.Height = inHeight
|
||||
} else {
|
||||
factor = xfactor
|
||||
o.Height = roundFloat(float64(inHeight) / factor)
|
||||
}
|
||||
// Fixed height, auto width
|
||||
case o.Height > 0:
|
||||
if o.Crop {
|
||||
o.Width = inWidth
|
||||
} else {
|
||||
factor = yfactor
|
||||
o.Width = roundFloat(float64(inWidth) / factor)
|
||||
}
|
||||
// Identity transform
|
||||
default:
|
||||
o.Width = inWidth
|
||||
o.Height = inHeight
|
||||
break
|
||||
}
|
||||
|
||||
return factor
|
||||
}
|
||||
|
||||
func roundFloat(f float64) int {
|
||||
if f < 0 {
|
||||
return int(math.Ceil(f - 0.5))
|
||||
}
|
||||
return int(math.Floor(f + 0.5))
|
||||
}
|
||||
|
||||
func calculateCrop(inWidth, inHeight, outWidth, outHeight int, gravity Gravity) (int, int) {
|
||||
left, top := 0, 0
|
||||
|
||||
switch gravity {
|
||||
case GravityNorth:
|
||||
left = (inWidth - outWidth + 1) / 2
|
||||
case GravityEast:
|
||||
left = inWidth - outWidth
|
||||
top = (inHeight - outHeight + 1) / 2
|
||||
case GravitySouth:
|
||||
left = (inWidth - outWidth + 1) / 2
|
||||
top = inHeight - outHeight
|
||||
case GravityWest:
|
||||
top = (inHeight - outHeight + 1) / 2
|
||||
default:
|
||||
left = (inWidth - outWidth + 1) / 2
|
||||
top = (inHeight - outHeight + 1) / 2
|
||||
}
|
||||
|
||||
return left, top
|
||||
}
|
||||
|
||||
func calculateRotationAndFlip(image *C.VipsImage, angle Angle) (Angle, bool) {
|
||||
rotate := D0
|
||||
flip := false
|
||||
|
||||
if angle > 0 {
|
||||
return rotate, flip
|
||||
}
|
||||
|
||||
switch vipsExifOrientation(image) {
|
||||
case 6:
|
||||
rotate = D90
|
||||
break
|
||||
case 3:
|
||||
rotate = D180
|
||||
break
|
||||
case 8:
|
||||
rotate = D270
|
||||
break
|
||||
case 2:
|
||||
flip = true
|
||||
break // flip 1
|
||||
case 7:
|
||||
flip = true
|
||||
rotate = D90
|
||||
break // flip 6
|
||||
case 4:
|
||||
flip = true
|
||||
rotate = D180
|
||||
break // flip 3
|
||||
case 5:
|
||||
flip = true
|
||||
rotate = D270
|
||||
break // flip 8
|
||||
}
|
||||
|
||||
return rotate, flip
|
||||
}
|
||||
|
||||
func calculateShrink(factor float64, i Interpolator) int {
|
||||
var shrink float64
|
||||
|
||||
// Calculate integral box shrink
|
||||
windowSize := vipsWindowSize(i.String())
|
||||
if factor >= 2 && windowSize > 3 {
|
||||
// Shrink less, affine more with interpolators that use at least 4x4 pixel window, e.g. bicubic
|
||||
shrink = float64(math.Floor(factor * 3.0 / windowSize))
|
||||
} else {
|
||||
shrink = math.Floor(factor)
|
||||
}
|
||||
|
||||
return int(math.Max(shrink, 1))
|
||||
}
|
||||
|
||||
func calculateResidual(factor float64, shrink int) float64 {
|
||||
return float64(shrink) / factor
|
||||
}
|
||||
|
||||
func getAngle(angle Angle) Angle {
|
||||
divisor := angle % 90
|
||||
if divisor != 0 {
|
||||
angle = angle - divisor
|
||||
}
|
||||
return Angle(math.Min(float64(angle), 270))
|
||||
}
|
644
_vendor/src/github.com/h2non/bimg/resize_test.go
Normal file
@ -0,0 +1,644 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"image"
|
||||
"image/jpeg"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestResize(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(newImg) != JPEG {
|
||||
t.Fatal("Image is not jpeg")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestResizeVerticalImage(t *testing.T) {
|
||||
tests := []struct {
|
||||
format ImageType
|
||||
options Options
|
||||
}{
|
||||
{JPEG, Options{Width: 800, Height: 600}},
|
||||
{JPEG, Options{Width: 1000, Height: 1000}},
|
||||
{JPEG, Options{Width: 1000, Height: 1500}},
|
||||
{JPEG, Options{Width: 1000}},
|
||||
{JPEG, Options{Height: 1500}},
|
||||
{JPEG, Options{Width: 100, Height: 50}},
|
||||
{JPEG, Options{Width: 2000, Height: 2000}},
|
||||
{JPEG, Options{Width: 500, Height: 1000}},
|
||||
{JPEG, Options{Width: 500}},
|
||||
{JPEG, Options{Height: 500}},
|
||||
{JPEG, Options{Crop: true, Width: 500, Height: 1000}},
|
||||
{JPEG, Options{Crop: true, Enlarge: true, Width: 2000, Height: 1400}},
|
||||
{JPEG, Options{Enlarge: true, Force: true, Width: 2000, Height: 2000}},
|
||||
{JPEG, Options{Force: true, Width: 2000, Height: 2000}},
|
||||
}
|
||||
|
||||
buf, _ := Read("fixtures/vertical.jpg")
|
||||
for _, test := range tests {
|
||||
image, err := Resize(buf, test.options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", test.options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(image) != test.format {
|
||||
t.Fatalf("Image format is invalid. Expected: %#v", test.format)
|
||||
}
|
||||
|
||||
size, _ := Size(image)
|
||||
if test.options.Height > 0 && size.Height != test.options.Height {
|
||||
t.Fatalf("Invalid height: %d", size.Height)
|
||||
}
|
||||
if test.options.Width > 0 && size.Width != test.options.Width {
|
||||
t.Fatalf("Invalid width: %d", size.Width)
|
||||
}
|
||||
|
||||
Write("fixtures/test_vertical_"+strconv.Itoa(test.options.Width)+"x"+strconv.Itoa(test.options.Height)+"_out.jpg", image)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResizeCustomSizes(t *testing.T) {
|
||||
tests := []struct {
|
||||
format ImageType
|
||||
options Options
|
||||
}{
|
||||
{JPEG, Options{Width: 800, Height: 600}},
|
||||
{JPEG, Options{Width: 1000, Height: 1000}},
|
||||
{JPEG, Options{Width: 100, Height: 50}},
|
||||
{JPEG, Options{Width: 2000, Height: 2000}},
|
||||
{JPEG, Options{Width: 500, Height: 1000}},
|
||||
{JPEG, Options{Width: 500}},
|
||||
{JPEG, Options{Height: 500}},
|
||||
{JPEG, Options{Crop: true, Width: 500, Height: 1000}},
|
||||
{JPEG, Options{Crop: true, Enlarge: true, Width: 2000, Height: 1400}},
|
||||
{JPEG, Options{Enlarge: true, Force: true, Width: 2000, Height: 2000}},
|
||||
{JPEG, Options{Force: true, Width: 2000, Height: 2000}},
|
||||
}
|
||||
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
for _, test := range tests {
|
||||
image, err := Resize(buf, test.options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", test.options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(image) != test.format {
|
||||
t.Fatalf("Image format is invalid. Expected: %#v", test.format)
|
||||
}
|
||||
|
||||
size, _ := Size(image)
|
||||
if test.options.Height > 0 && size.Height != test.options.Height {
|
||||
t.Fatalf("Invalid height: %d", size.Height)
|
||||
}
|
||||
if test.options.Width > 0 && size.Width != test.options.Width {
|
||||
t.Fatalf("Invalid width: %d", size.Width)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestResizePrecision(t *testing.T) {
|
||||
// see https://github.com/h2non/bimg/issues/99
|
||||
img := image.NewGray16(image.Rect(0, 0, 1920, 1080))
|
||||
input := &bytes.Buffer{}
|
||||
jpeg.Encode(input, img, nil)
|
||||
|
||||
opts := Options{Width: 300}
|
||||
newImg, err := Resize(input.Bytes(), opts)
|
||||
if err != nil {
|
||||
t.Fatalf("Resize(imgData, %#v) error: %#v", opts, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Width != opts.Width {
|
||||
t.Fatalf("Invalid width: %d", size.Width)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRotate(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600, Rotate: 270, Crop: true}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(newImg) != JPEG {
|
||||
t.Error("Image is not jpeg")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Width != options.Width || size.Height != options.Height {
|
||||
t.Errorf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_rotate_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestInvalidRotateDegrees(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600, Rotate: 111, Crop: true}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(newImg) != JPEG {
|
||||
t.Errorf("Image is not jpeg")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Width != options.Width || size.Height != options.Height {
|
||||
t.Errorf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_rotate_invalid_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestCorruptedImage(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600}
|
||||
buf, _ := Read("fixtures/corrupt.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(newImg) != JPEG {
|
||||
t.Fatal("Image is not jpeg")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_corrupt_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestNoColorProfile(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600, NoProfile: true}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
metadata, err := Metadata(newImg)
|
||||
if metadata.Profile == true {
|
||||
t.Fatal("Invalid profile data")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEmbedExtendColor(t *testing.T) {
|
||||
options := Options{Width: 400, Height: 600, Crop: false, Embed: true, Extend: ExtendWhite, Background: Color{255, 20, 10}}
|
||||
buf, _ := Read("fixtures/test_issue.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_extend_white_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestEmbedExtendWithCustomColor(t *testing.T) {
|
||||
options := Options{Width: 400, Height: 600, Crop: false, Embed: true, Extend: 5, Background: Color{255, 20, 10}}
|
||||
buf, _ := Read("fixtures/test_issue.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_extend_background_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestGaussianBlur(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600, GaussianBlur: GaussianBlur{Sigma: 5}}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_gaussian_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestSharpen(t *testing.T) {
|
||||
options := Options{Width: 800, Height: 600, Sharpen: Sharpen{Radius: 1, X1: 1.5, Y2: 20, Y3: 50, M1: 1, M2: 2}}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.Height || size.Width != options.Width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_sharpen_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestExtractWithDefaultAxis(t *testing.T) {
|
||||
options := Options{AreaWidth: 200, AreaHeight: 200}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.AreaHeight || size.Width != options.AreaWidth {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_extract_defaults_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestExtractCustomAxis(t *testing.T) {
|
||||
options := Options{Top: 100, Left: 100, AreaWidth: 200, AreaHeight: 200}
|
||||
buf, _ := Read("fixtures/test.jpg")
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != options.AreaHeight || size.Width != options.AreaWidth {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
|
||||
Write("fixtures/test_extract_custom_axis_out.jpg", newImg)
|
||||
}
|
||||
|
||||
func TestConvert(t *testing.T) {
|
||||
width, height := 300, 240
|
||||
formats := [3]ImageType{PNG, WEBP, JPEG}
|
||||
|
||||
files := []string{
|
||||
"test.jpg",
|
||||
"test.png",
|
||||
"test.webp",
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
img, err := os.Open("fixtures/" + file)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
buf, err := ioutil.ReadAll(img)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
img.Close()
|
||||
|
||||
for _, format := range formats {
|
||||
options := Options{Width: width, Height: height, Crop: true, Type: format}
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(newImg) != format {
|
||||
t.Fatal("Image is not png")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != height || size.Width != width {
|
||||
t.Fatalf("Invalid image size: %dx%d", size.Width, size.Height)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestResizePngWithTransparency(t *testing.T) {
|
||||
width, height := 300, 240
|
||||
|
||||
options := Options{Width: width, Height: height, Crop: true}
|
||||
img, err := os.Open("fixtures/transparent.png")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer img.Close()
|
||||
|
||||
buf, err := ioutil.ReadAll(img)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
newImg, err := Resize(buf, options)
|
||||
if err != nil {
|
||||
t.Errorf("Resize(imgData, %#v) error: %#v", options, err)
|
||||
}
|
||||
|
||||
if DetermineImageType(newImg) != PNG {
|
||||
t.Fatal("Image is not png")
|
||||
}
|
||||
|
||||
size, _ := Size(newImg)
|
||||
if size.Height != height || size.Width != width {
|
||||
t.Fatal("Invalid image size")
|
||||
}
|
||||
|
||||
Write("fixtures/transparent_out.png", newImg)
|
||||
}
|
||||
|
||||
func TestIfBothSmartCropOptionsAreIdentical(t *testing.T) {
|
||||
if !(VipsMajorVersion >= 8 && VipsMinorVersion > 4) {
|
||||
t.Skipf("Skipping this test, libvips doesn't meet version requirement %s > 8.4", VipsVersion)
|
||||
}
|
||||
|
||||
benchmarkOptions := Options{Width: 100, Height: 100, Crop: true}
|
||||
smartCropOptions := Options{Width: 100, Height: 100, Crop: true, SmartCrop: true}
|
||||
gravityOptions := Options{Width: 100, Height: 100, Crop: true, Gravity: GravitySmart}
|
||||
|
||||
testImg, err := os.Open("fixtures/northern_cardinal_bird.jpg")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer testImg.Close()
|
||||
|
||||
testImgByte, err := ioutil.ReadAll(testImg)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
scImg, err := Resize(testImgByte, smartCropOptions)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
gImg, err := Resize(testImgByte, gravityOptions)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
benchmarkImg, err := Resize(testImgByte, benchmarkOptions)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
sch, gh, bh := md5.Sum(scImg), md5.Sum(gImg), md5.Sum(benchmarkImg)
|
||||
if gh == bh || sch == bh {
|
||||
t.Error("Expected both options produce a different result from a standard crop.")
|
||||
}
|
||||
|
||||
if sch != gh {
|
||||
t.Errorf("Expected both options to result in the same output, %x != %x", sch, gh)
|
||||
}
|
||||
}
|
||||
|
||||
func runBenchmarkResize(file string, o Options, b *testing.B) {
|
||||
buf, _ := Read(path.Join("fixtures", file))
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
Resize(buf, o)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkRotateJpeg(b *testing.B) {
|
||||
options := Options{Rotate: 180}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkResizeLargeJpeg(b *testing.B) {
|
||||
options := Options{
|
||||
Width: 800,
|
||||
Height: 600,
|
||||
}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkResizePng(b *testing.B) {
|
||||
options := Options{
|
||||
Width: 200,
|
||||
Height: 200,
|
||||
}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkResizeWebP(b *testing.B) {
|
||||
options := Options{
|
||||
Width: 200,
|
||||
Height: 200,
|
||||
}
|
||||
runBenchmarkResize("test.webp", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkConvertToJpeg(b *testing.B) {
|
||||
options := Options{Type: JPEG}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkConvertToPng(b *testing.B) {
|
||||
options := Options{Type: PNG}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkConvertToWebp(b *testing.B) {
|
||||
options := Options{Type: WEBP}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkCropJpeg(b *testing.B) {
|
||||
options := Options{
|
||||
Width: 800,
|
||||
Height: 600,
|
||||
}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkCropPng(b *testing.B) {
|
||||
options := Options{
|
||||
Width: 800,
|
||||
Height: 600,
|
||||
}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkCropWebP(b *testing.B) {
|
||||
options := Options{
|
||||
Width: 800,
|
||||
Height: 600,
|
||||
}
|
||||
runBenchmarkResize("test.webp", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkExtractJpeg(b *testing.B) {
|
||||
options := Options{
|
||||
Top: 100,
|
||||
Left: 50,
|
||||
AreaWidth: 600,
|
||||
AreaHeight: 480,
|
||||
}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkExtractPng(b *testing.B) {
|
||||
options := Options{
|
||||
Top: 100,
|
||||
Left: 50,
|
||||
AreaWidth: 600,
|
||||
AreaHeight: 480,
|
||||
}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkExtractWebp(b *testing.B) {
|
||||
options := Options{
|
||||
Top: 100,
|
||||
Left: 50,
|
||||
AreaWidth: 600,
|
||||
AreaHeight: 480,
|
||||
}
|
||||
runBenchmarkResize("test.webp", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkZoomJpeg(b *testing.B) {
|
||||
options := Options{Zoom: 1}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkZoomPng(b *testing.B) {
|
||||
options := Options{Zoom: 1}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkZoomWebp(b *testing.B) {
|
||||
options := Options{Zoom: 1}
|
||||
runBenchmarkResize("test.webp", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkWatermarkJpeg(b *testing.B) {
|
||||
options := Options{
|
||||
Watermark: Watermark{
|
||||
Text: "Chuck Norris (c) 2315",
|
||||
Opacity: 0.25,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
Margin: 150,
|
||||
Font: "sans bold 12",
|
||||
Background: Color{255, 255, 255},
|
||||
},
|
||||
}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkWatermarPng(b *testing.B) {
|
||||
options := Options{
|
||||
Watermark: Watermark{
|
||||
Text: "Chuck Norris (c) 2315",
|
||||
Opacity: 0.25,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
Margin: 150,
|
||||
Font: "sans bold 12",
|
||||
Background: Color{255, 255, 255},
|
||||
},
|
||||
}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkWatermarWebp(b *testing.B) {
|
||||
options := Options{
|
||||
Watermark: Watermark{
|
||||
Text: "Chuck Norris (c) 2315",
|
||||
Opacity: 0.25,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
Margin: 150,
|
||||
Font: "sans bold 12",
|
||||
Background: Color{255, 255, 255},
|
||||
},
|
||||
}
|
||||
runBenchmarkResize("test.webp", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkWatermarkImageJpeg(b *testing.B) {
|
||||
watermark := readFile("transparent.png")
|
||||
options := Options{
|
||||
WatermarkImage: WatermarkImage{
|
||||
Buf: watermark,
|
||||
Opacity: 0.25,
|
||||
Left: 100,
|
||||
Top: 100,
|
||||
},
|
||||
}
|
||||
runBenchmarkResize("test.jpg", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkWatermarImagePng(b *testing.B) {
|
||||
watermark := readFile("transparent.png")
|
||||
options := Options{
|
||||
WatermarkImage: WatermarkImage{
|
||||
Buf: watermark,
|
||||
Opacity: 0.25,
|
||||
Left: 100,
|
||||
Top: 100,
|
||||
},
|
||||
}
|
||||
runBenchmarkResize("test.png", options, b)
|
||||
}
|
||||
|
||||
func BenchmarkWatermarImageWebp(b *testing.B) {
|
||||
watermark := readFile("transparent.png")
|
||||
options := Options{
|
||||
WatermarkImage: WatermarkImage{
|
||||
Buf: watermark,
|
||||
Opacity: 0.25,
|
||||
Left: 100,
|
||||
Top: 100,
|
||||
},
|
||||
}
|
||||
runBenchmarkResize("test.webp", options, b)
|
||||
}
|
172
_vendor/src/github.com/h2non/bimg/type.go
Normal file
@ -0,0 +1,172 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sync"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
// UNKNOWN represents an unknow image type value.
|
||||
UNKNOWN ImageType = iota
|
||||
// JPEG represents the JPEG image type.
|
||||
JPEG
|
||||
// WEBP represents the WEBP image type.
|
||||
WEBP
|
||||
// PNG represents the PNG image type.
|
||||
PNG
|
||||
// TIFF represents the TIFF image type.
|
||||
TIFF
|
||||
// GIF represents the GIF image type.
|
||||
GIF
|
||||
// PDF represents the PDF type.
|
||||
PDF
|
||||
// SVG represents the SVG image type.
|
||||
SVG
|
||||
// MAGICK represents the libmagick compatible genetic image type.
|
||||
MAGICK
|
||||
)
|
||||
|
||||
// ImageType represents an image type value.
|
||||
type ImageType int
|
||||
|
||||
var (
|
||||
htmlCommentRegex = regexp.MustCompile("(?i)<!--([\\s\\S]*?)-->")
|
||||
svgRegex = regexp.MustCompile(`(?i)^\s*(?:<\?xml[^>]*>\s*)?(?:<!doctype svg[^>]*>\s*)?<svg[^>]*>[^*]*<\/svg>\s*$`)
|
||||
)
|
||||
|
||||
// ImageTypes stores as pairs of image types supported and its alias names.
|
||||
var ImageTypes = map[ImageType]string{
|
||||
JPEG: "jpeg",
|
||||
PNG: "png",
|
||||
WEBP: "webp",
|
||||
TIFF: "tiff",
|
||||
GIF: "gif",
|
||||
PDF: "pdf",
|
||||
SVG: "svg",
|
||||
MAGICK: "magick",
|
||||
}
|
||||
|
||||
// imageMutex is used to provide thread-safe synchronization
|
||||
// for SupportedImageTypes map.
|
||||
var imageMutex = &sync.RWMutex{}
|
||||
|
||||
// SupportedImageType represents whether a type can be loaded and/or saved by
|
||||
// the current libvips compilation.
|
||||
type SupportedImageType struct {
|
||||
Load bool
|
||||
Save bool
|
||||
}
|
||||
|
||||
// SupportedImageTypes stores the optional image type supported
|
||||
// by the current libvips compilation.
|
||||
// Note: lazy evaluation as demand is required due
|
||||
// to bootstrap runtime limitation with C/libvips world.
|
||||
var SupportedImageTypes = map[ImageType]SupportedImageType{}
|
||||
|
||||
// discoverSupportedImageTypes is used to fill SupportedImageTypes map.
|
||||
func discoverSupportedImageTypes() {
|
||||
imageMutex.Lock()
|
||||
for imageType := range ImageTypes {
|
||||
SupportedImageTypes[imageType] = SupportedImageType{
|
||||
Load: VipsIsTypeSupported(imageType),
|
||||
Save: VipsIsTypeSupportedSave(imageType),
|
||||
}
|
||||
}
|
||||
imageMutex.Unlock()
|
||||
}
|
||||
|
||||
// isBinary checks if the given buffer is a binary file.
|
||||
func isBinary(buf []byte) bool {
|
||||
if len(buf) < 24 {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < 24; i++ {
|
||||
charCode, _ := utf8.DecodeRuneInString(string(buf[i]))
|
||||
if charCode == 65533 || charCode <= 8 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsSVGImage returns true if the given buffer is a valid SVG image.
|
||||
func IsSVGImage(buf []byte) bool {
|
||||
return !isBinary(buf) && svgRegex.Match(htmlCommentRegex.ReplaceAll(buf, []byte{}))
|
||||
}
|
||||
|
||||
// DetermineImageType determines the image type format (jpeg, png, webp or tiff)
|
||||
func DetermineImageType(buf []byte) ImageType {
|
||||
return vipsImageType(buf)
|
||||
}
|
||||
|
||||
// DetermineImageTypeName determines the image type format by name (jpeg, png, webp or tiff)
|
||||
func DetermineImageTypeName(buf []byte) string {
|
||||
return ImageTypeName(vipsImageType(buf))
|
||||
}
|
||||
|
||||
// IsImageTypeSupportedByVips returns true if the given image type
|
||||
// is supported by current libvips compilation.
|
||||
func IsImageTypeSupportedByVips(t ImageType) SupportedImageType {
|
||||
imageMutex.RLock()
|
||||
|
||||
// Discover supported image types and cache the result
|
||||
itShouldDiscover := len(SupportedImageTypes) == 0
|
||||
if itShouldDiscover {
|
||||
imageMutex.RUnlock()
|
||||
discoverSupportedImageTypes()
|
||||
}
|
||||
|
||||
// Check if image type is actually supported
|
||||
supported, ok := SupportedImageTypes[t]
|
||||
if !itShouldDiscover {
|
||||
imageMutex.RUnlock()
|
||||
}
|
||||
|
||||
if ok {
|
||||
return supported
|
||||
}
|
||||
return SupportedImageType{Load: false, Save: false}
|
||||
}
|
||||
|
||||
// IsTypeSupported checks if a given image type is supported
|
||||
func IsTypeSupported(t ImageType) bool {
|
||||
_, ok := ImageTypes[t]
|
||||
return ok && IsImageTypeSupportedByVips(t).Load
|
||||
}
|
||||
|
||||
// IsTypeNameSupported checks if a given image type name is supported
|
||||
func IsTypeNameSupported(t string) bool {
|
||||
for imageType, name := range ImageTypes {
|
||||
if name == t {
|
||||
return IsImageTypeSupportedByVips(imageType).Load
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsTypeSupportedSave checks if a given image type is support for saving
|
||||
func IsTypeSupportedSave(t ImageType) bool {
|
||||
_, ok := ImageTypes[t]
|
||||
return ok && IsImageTypeSupportedByVips(t).Save
|
||||
}
|
||||
|
||||
// IsTypeNameSupportedSave checks if a given image type name is supported for
|
||||
// saving
|
||||
func IsTypeNameSupportedSave(t string) bool {
|
||||
for imageType, name := range ImageTypes {
|
||||
if name == t {
|
||||
return IsImageTypeSupportedByVips(imageType).Save
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// ImageTypeName is used to get the human friendly name of an image format.
|
||||
func ImageTypeName(t ImageType) string {
|
||||
imageType := ImageTypes[t]
|
||||
if imageType == "" {
|
||||
return "unknown"
|
||||
}
|
||||
return imageType
|
||||
}
|
128
_vendor/src/github.com/h2non/bimg/type_test.go
Normal file
@ -0,0 +1,128 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDeterminateImageType(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
expected ImageType
|
||||
}{
|
||||
{"test.jpg", JPEG},
|
||||
{"test.png", PNG},
|
||||
{"test.webp", WEBP},
|
||||
{"test.gif", GIF},
|
||||
{"test.pdf", PDF},
|
||||
{"test.svg", SVG},
|
||||
{"test.jp2", MAGICK},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
img, _ := os.Open(path.Join("fixtures", file.name))
|
||||
buf, _ := ioutil.ReadAll(img)
|
||||
defer img.Close()
|
||||
|
||||
if DetermineImageType(buf) != file.expected {
|
||||
t.Fatal("Image type is not valid")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeterminateImageTypeName(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
expected string
|
||||
}{
|
||||
{"test.jpg", "jpeg"},
|
||||
{"test.png", "png"},
|
||||
{"test.webp", "webp"},
|
||||
{"test.gif", "gif"},
|
||||
{"test.pdf", "pdf"},
|
||||
{"test.svg", "svg"},
|
||||
{"test.jp2", "magick"},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
img, _ := os.Open(path.Join("fixtures", file.name))
|
||||
buf, _ := ioutil.ReadAll(img)
|
||||
defer img.Close()
|
||||
|
||||
if DetermineImageTypeName(buf) != file.expected {
|
||||
t.Fatal("Image type is not valid")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsTypeSupported(t *testing.T) {
|
||||
types := []struct {
|
||||
name ImageType
|
||||
}{
|
||||
{JPEG}, {PNG}, {WEBP}, {GIF}, {PDF},
|
||||
}
|
||||
|
||||
for _, n := range types {
|
||||
if IsTypeSupported(n.name) == false {
|
||||
t.Fatalf("Image type %#v is not valid", ImageTypes[n.name])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsTypeNameSupported(t *testing.T) {
|
||||
types := []struct {
|
||||
name string
|
||||
expected bool
|
||||
}{
|
||||
{"jpeg", true},
|
||||
{"png", true},
|
||||
{"webp", true},
|
||||
{"gif", true},
|
||||
{"pdf", true},
|
||||
}
|
||||
|
||||
for _, n := range types {
|
||||
if IsTypeNameSupported(n.name) != n.expected {
|
||||
t.Fatalf("Image type %#v is not valid", n.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsTypeSupportedSave(t *testing.T) {
|
||||
types := []struct {
|
||||
name ImageType
|
||||
}{
|
||||
{JPEG}, {PNG}, {WEBP},
|
||||
}
|
||||
if VipsVersion >= "8.5.0" {
|
||||
types = append(types, struct{ name ImageType }{TIFF})
|
||||
}
|
||||
|
||||
for _, n := range types {
|
||||
if IsTypeSupportedSave(n.name) == false {
|
||||
t.Fatalf("Image type %#v is not valid", ImageTypes[n.name])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsTypeNameSupportedSave(t *testing.T) {
|
||||
types := []struct {
|
||||
name string
|
||||
expected bool
|
||||
}{
|
||||
{"jpeg", true},
|
||||
{"png", true},
|
||||
{"webp", true},
|
||||
{"gif", false},
|
||||
{"pdf", false},
|
||||
{"tiff", VipsVersion >= "8.5.0"},
|
||||
}
|
||||
|
||||
for _, n := range types {
|
||||
if IsTypeNameSupportedSave(n.name) != n.expected {
|
||||
t.Fatalf("Image type %#v is not valid", n.name)
|
||||
}
|
||||
}
|
||||
}
|
4
_vendor/src/github.com/h2non/bimg/version.go
Normal file
@ -0,0 +1,4 @@
|
||||
package bimg
|
||||
|
||||
// Version represents the current package semantic version.
|
||||
const Version = "1.0.9"
|
632
_vendor/src/github.com/h2non/bimg/vips.go
Normal file
@ -0,0 +1,632 @@
|
||||
package bimg
|
||||
|
||||
/*
|
||||
#cgo pkg-config: vips
|
||||
#include "vips.h"
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
d "github.com/tj/go-debug"
|
||||
)
|
||||
|
||||
// debug is internally used to
|
||||
var debug = d.Debug("bimg")
|
||||
|
||||
// VipsVersion exposes the current libvips semantic version
|
||||
const VipsVersion = string(C.VIPS_VERSION)
|
||||
|
||||
// VipsMajorVersion exposes the current libvips major version number
|
||||
const VipsMajorVersion = int(C.VIPS_MAJOR_VERSION)
|
||||
|
||||
// VipsMinorVersion exposes the current libvips minor version number
|
||||
const VipsMinorVersion = int(C.VIPS_MINOR_VERSION)
|
||||
|
||||
const (
|
||||
maxCacheMem = 100 * 1024 * 1024
|
||||
maxCacheSize = 500
|
||||
)
|
||||
|
||||
var (
|
||||
m sync.Mutex
|
||||
initialized bool
|
||||
)
|
||||
|
||||
// VipsMemoryInfo represents the memory stats provided by libvips.
|
||||
type VipsMemoryInfo struct {
|
||||
Memory int64
|
||||
MemoryHighwater int64
|
||||
Allocations int64
|
||||
}
|
||||
|
||||
// vipsSaveOptions represents the internal option used to talk with libvips.
|
||||
type vipsSaveOptions struct {
|
||||
Quality int
|
||||
Compression int
|
||||
Type ImageType
|
||||
Interlace bool
|
||||
NoProfile bool
|
||||
Interpretation Interpretation
|
||||
}
|
||||
|
||||
type vipsWatermarkOptions struct {
|
||||
Width C.int
|
||||
DPI C.int
|
||||
Margin C.int
|
||||
NoReplicate C.int
|
||||
Opacity C.float
|
||||
Background [3]C.double
|
||||
}
|
||||
|
||||
type vipsWatermarkImageOptions struct {
|
||||
Left C.int
|
||||
Top C.int
|
||||
Opacity C.float
|
||||
}
|
||||
|
||||
type vipsWatermarkTextOptions struct {
|
||||
Text *C.char
|
||||
Font *C.char
|
||||
}
|
||||
|
||||
func init() {
|
||||
Initialize()
|
||||
}
|
||||
|
||||
// Initialize is used to explicitly start libvips in thread-safe way.
|
||||
// Only call this function if you have previously turned off libvips.
|
||||
func Initialize() {
|
||||
if C.VIPS_MAJOR_VERSION <= 7 && C.VIPS_MINOR_VERSION < 40 {
|
||||
panic("unsupported libvips version!")
|
||||
}
|
||||
|
||||
m.Lock()
|
||||
runtime.LockOSThread()
|
||||
defer m.Unlock()
|
||||
defer runtime.UnlockOSThread()
|
||||
|
||||
err := C.vips_init(C.CString("bimg"))
|
||||
if err != 0 {
|
||||
panic("unable to start vips!")
|
||||
}
|
||||
|
||||
// Set libvips cache params
|
||||
C.vips_cache_set_max_mem(maxCacheMem)
|
||||
C.vips_cache_set_max(maxCacheSize)
|
||||
|
||||
// Define a custom thread concurrency limit in libvips (this may generate thread-unsafe issues)
|
||||
// See: https://github.com/jcupitt/libvips/issues/261#issuecomment-92850414
|
||||
if os.Getenv("VIPS_CONCURRENCY") == "" {
|
||||
C.vips_concurrency_set(1)
|
||||
}
|
||||
|
||||
// Enable libvips cache tracing
|
||||
if os.Getenv("VIPS_TRACE") != "" {
|
||||
C.vips_enable_cache_set_trace()
|
||||
}
|
||||
|
||||
initialized = true
|
||||
}
|
||||
|
||||
// Shutdown is used to shutdown libvips in a thread-safe way.
|
||||
// You can call this to drop caches as well.
|
||||
// If libvips was already initialized, the function is no-op
|
||||
func Shutdown() {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
|
||||
if initialized {
|
||||
C.vips_shutdown()
|
||||
initialized = false
|
||||
}
|
||||
}
|
||||
|
||||
// VipsDebugInfo outputs to stdout libvips collected data. Useful for debugging.
|
||||
func VipsDebugInfo() {
|
||||
C.im__print_all()
|
||||
}
|
||||
|
||||
// VipsMemory gets memory info stats from libvips (cache size, memory allocs...)
|
||||
func VipsMemory() VipsMemoryInfo {
|
||||
return VipsMemoryInfo{
|
||||
Memory: int64(C.vips_tracked_get_mem()),
|
||||
MemoryHighwater: int64(C.vips_tracked_get_mem_highwater()),
|
||||
Allocations: int64(C.vips_tracked_get_allocs()),
|
||||
}
|
||||
}
|
||||
|
||||
// VipsIsTypeSupported returns true if the given image type
|
||||
// is supported by the current libvips compilation.
|
||||
func VipsIsTypeSupported(t ImageType) bool {
|
||||
if t == JPEG {
|
||||
return int(C.vips_type_find_bridge(C.JPEG)) != 0
|
||||
}
|
||||
if t == WEBP {
|
||||
return int(C.vips_type_find_bridge(C.WEBP)) != 0
|
||||
}
|
||||
if t == PNG {
|
||||
return int(C.vips_type_find_bridge(C.PNG)) != 0
|
||||
}
|
||||
if t == GIF {
|
||||
return int(C.vips_type_find_bridge(C.GIF)) != 0
|
||||
}
|
||||
if t == PDF {
|
||||
return int(C.vips_type_find_bridge(C.PDF)) != 0
|
||||
}
|
||||
if t == SVG {
|
||||
return int(C.vips_type_find_bridge(C.SVG)) != 0
|
||||
}
|
||||
if t == TIFF {
|
||||
return int(C.vips_type_find_bridge(C.TIFF)) != 0
|
||||
}
|
||||
if t == MAGICK {
|
||||
return int(C.vips_type_find_bridge(C.MAGICK)) != 0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// VipsIsTypeSupportedSave returns true if the given image type
|
||||
// is supported by the current libvips compilation for the
|
||||
// save operation.
|
||||
func VipsIsTypeSupportedSave(t ImageType) bool {
|
||||
if t == JPEG {
|
||||
return int(C.vips_type_find_save_bridge(C.JPEG)) != 0
|
||||
}
|
||||
if t == WEBP {
|
||||
return int(C.vips_type_find_save_bridge(C.WEBP)) != 0
|
||||
}
|
||||
if t == PNG {
|
||||
return int(C.vips_type_find_save_bridge(C.PNG)) != 0
|
||||
}
|
||||
if t == TIFF {
|
||||
return int(C.vips_type_find_save_bridge(C.TIFF)) != 0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func vipsExifOrientation(image *C.VipsImage) int {
|
||||
return int(C.vips_exif_orientation(image))
|
||||
}
|
||||
|
||||
func vipsHasAlpha(image *C.VipsImage) bool {
|
||||
return int(C.has_alpha_channel(image)) > 0
|
||||
}
|
||||
|
||||
func vipsHasProfile(image *C.VipsImage) bool {
|
||||
return int(C.has_profile_embed(image)) > 0
|
||||
}
|
||||
|
||||
func vipsWindowSize(name string) float64 {
|
||||
cname := C.CString(name)
|
||||
defer C.free(unsafe.Pointer(cname))
|
||||
return float64(C.interpolator_window_size(cname))
|
||||
}
|
||||
|
||||
func vipsSpace(image *C.VipsImage) string {
|
||||
return C.GoString(C.vips_enum_nick_bridge(image))
|
||||
}
|
||||
|
||||
func vipsRotate(image *C.VipsImage, angle Angle) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
err := C.vips_rotate(image, &out, C.int(angle))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func vipsFlip(image *C.VipsImage, direction Direction) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
err := C.vips_flip_bridge(image, &out, C.int(direction))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func vipsZoom(image *C.VipsImage, zoom int) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
err := C.vips_zoom_bridge(image, &out, C.int(zoom), C.int(zoom))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func vipsWatermark(image *C.VipsImage, w Watermark) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
|
||||
// Defaults
|
||||
noReplicate := 0
|
||||
if w.NoReplicate {
|
||||
noReplicate = 1
|
||||
}
|
||||
|
||||
text := C.CString(w.Text)
|
||||
font := C.CString(w.Font)
|
||||
background := [3]C.double{C.double(w.Background.R), C.double(w.Background.G), C.double(w.Background.B)}
|
||||
|
||||
textOpts := vipsWatermarkTextOptions{text, font}
|
||||
opts := vipsWatermarkOptions{C.int(w.Width), C.int(w.DPI), C.int(w.Margin), C.int(noReplicate), C.float(w.Opacity), background}
|
||||
|
||||
defer C.free(unsafe.Pointer(text))
|
||||
defer C.free(unsafe.Pointer(font))
|
||||
|
||||
err := C.vips_watermark(image, &out, (*C.WatermarkTextOptions)(unsafe.Pointer(&textOpts)), (*C.WatermarkOptions)(unsafe.Pointer(&opts)))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func vipsRead(buf []byte) (*C.VipsImage, ImageType, error) {
|
||||
var image *C.VipsImage
|
||||
imageType := vipsImageType(buf)
|
||||
|
||||
if imageType == UNKNOWN {
|
||||
return nil, UNKNOWN, errors.New("Unsupported image format")
|
||||
}
|
||||
|
||||
length := C.size_t(len(buf))
|
||||
imageBuf := unsafe.Pointer(&buf[0])
|
||||
|
||||
err := C.vips_init_image(imageBuf, length, C.int(imageType), &image)
|
||||
if err != 0 {
|
||||
return nil, UNKNOWN, catchVipsError()
|
||||
}
|
||||
|
||||
return image, imageType, nil
|
||||
}
|
||||
|
||||
func vipsColourspaceIsSupportedBuffer(buf []byte) (bool, error) {
|
||||
image, _, err := vipsRead(buf)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
C.g_object_unref(C.gpointer(image))
|
||||
return vipsColourspaceIsSupported(image), nil
|
||||
}
|
||||
|
||||
func vipsColourspaceIsSupported(image *C.VipsImage) bool {
|
||||
return int(C.vips_colourspace_issupported_bridge(image)) == 1
|
||||
}
|
||||
|
||||
func vipsInterpretationBuffer(buf []byte) (Interpretation, error) {
|
||||
image, _, err := vipsRead(buf)
|
||||
if err != nil {
|
||||
return InterpretationError, err
|
||||
}
|
||||
C.g_object_unref(C.gpointer(image))
|
||||
return vipsInterpretation(image), nil
|
||||
}
|
||||
|
||||
func vipsInterpretation(image *C.VipsImage) Interpretation {
|
||||
return Interpretation(C.vips_image_guess_interpretation_bridge(image))
|
||||
}
|
||||
|
||||
func vipsFlattenBackground(image *C.VipsImage, background Color) (*C.VipsImage, error) {
|
||||
var outImage *C.VipsImage
|
||||
|
||||
backgroundC := [3]C.double{
|
||||
C.double(background.R),
|
||||
C.double(background.G),
|
||||
C.double(background.B),
|
||||
}
|
||||
|
||||
if vipsHasAlpha(image) {
|
||||
err := C.vips_flatten_background_brigde(image, &outImage,
|
||||
backgroundC[0], backgroundC[1], backgroundC[2])
|
||||
if int(err) != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
C.g_object_unref(C.gpointer(image))
|
||||
image = outImage
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func vipsPreSave(image *C.VipsImage, o *vipsSaveOptions) (*C.VipsImage, error) {
|
||||
// Remove ICC profile metadata
|
||||
if o.NoProfile {
|
||||
C.remove_profile(image)
|
||||
}
|
||||
|
||||
// Use a default interpretation and cast it to C type
|
||||
if o.Interpretation == 0 {
|
||||
o.Interpretation = InterpretationSRGB
|
||||
}
|
||||
interpretation := C.VipsInterpretation(o.Interpretation)
|
||||
|
||||
// Apply the proper colour space
|
||||
var outImage *C.VipsImage
|
||||
if vipsColourspaceIsSupported(image) {
|
||||
err := C.vips_colourspace_bridge(image, &outImage, interpretation)
|
||||
if int(err) != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
image = outImage
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func vipsSave(image *C.VipsImage, o vipsSaveOptions) ([]byte, error) {
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
tmpImage, err := vipsPreSave(image, &o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// When an image has an unsupported color space, vipsPreSave
|
||||
// returns the pointer of the image passed to it unmodified.
|
||||
// When this occurs, we must take care to not dereference the
|
||||
// original image a second time; we may otherwise erroneously
|
||||
// free the object twice.
|
||||
if tmpImage != image {
|
||||
defer C.g_object_unref(C.gpointer(tmpImage))
|
||||
}
|
||||
|
||||
length := C.size_t(0)
|
||||
saveErr := C.int(0)
|
||||
interlace := C.int(boolToInt(o.Interlace))
|
||||
quality := C.int(o.Quality)
|
||||
|
||||
if o.Type != 0 && !IsTypeSupportedSave(o.Type) {
|
||||
return nil, fmt.Errorf("VIPS cannot save to %#v", ImageTypes[o.Type])
|
||||
}
|
||||
var ptr unsafe.Pointer
|
||||
switch o.Type {
|
||||
case WEBP:
|
||||
saveErr = C.vips_webpsave_bridge(tmpImage, &ptr, &length, 1, quality)
|
||||
case PNG:
|
||||
saveErr = C.vips_pngsave_bridge(tmpImage, &ptr, &length, 1, C.int(o.Compression), quality, interlace)
|
||||
case TIFF:
|
||||
saveErr = C.vips_tiffsave_bridge(tmpImage, &ptr, &length)
|
||||
default:
|
||||
saveErr = C.vips_jpegsave_bridge(tmpImage, &ptr, &length, 1, quality, interlace)
|
||||
}
|
||||
|
||||
if int(saveErr) != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
buf := C.GoBytes(ptr, C.int(length))
|
||||
|
||||
// Clean up
|
||||
C.g_free(C.gpointer(ptr))
|
||||
C.vips_error_clear()
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func getImageBuffer(image *C.VipsImage) ([]byte, error) {
|
||||
var ptr unsafe.Pointer
|
||||
|
||||
length := C.size_t(0)
|
||||
interlace := C.int(0)
|
||||
quality := C.int(100)
|
||||
|
||||
err := C.int(0)
|
||||
err = C.vips_jpegsave_bridge(image, &ptr, &length, 1, quality, interlace)
|
||||
if int(err) != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
defer C.g_free(C.gpointer(ptr))
|
||||
defer C.vips_error_clear()
|
||||
|
||||
return C.GoBytes(ptr, C.int(length)), nil
|
||||
}
|
||||
|
||||
func vipsExtract(image *C.VipsImage, left, top, width, height int) (*C.VipsImage, error) {
|
||||
var buf *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
if width > MaxSize || height > MaxSize {
|
||||
return nil, errors.New("Maximum image size exceeded")
|
||||
}
|
||||
|
||||
top, left = max(top), max(left)
|
||||
err := C.vips_extract_area_bridge(image, &buf, C.int(left), C.int(top), C.int(width), C.int(height))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func vipsSmartCrop(image *C.VipsImage, width, height int) (*C.VipsImage, error) {
|
||||
var buf *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
if width > MaxSize || height > MaxSize {
|
||||
return nil, errors.New("Maximum image size exceeded")
|
||||
}
|
||||
|
||||
err := C.vips_smartcrop_bridge(image, &buf, C.int(width), C.int(height))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
func vipsShrinkJpeg(buf []byte, input *C.VipsImage, shrink int) (*C.VipsImage, error) {
|
||||
var image *C.VipsImage
|
||||
var ptr = unsafe.Pointer(&buf[0])
|
||||
defer C.g_object_unref(C.gpointer(input))
|
||||
|
||||
err := C.vips_jpegload_buffer_shrink(ptr, C.size_t(len(buf)), &image, C.int(shrink))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func vipsShrink(input *C.VipsImage, shrink int) (*C.VipsImage, error) {
|
||||
var image *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(input))
|
||||
|
||||
err := C.vips_shrink_bridge(input, &image, C.double(float64(shrink)), C.double(float64(shrink)))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func vipsEmbed(input *C.VipsImage, left, top, width, height int, extend Extend, background Color) (*C.VipsImage, error) {
|
||||
var image *C.VipsImage
|
||||
|
||||
// Max extend value, see: http://www.vips.ecs.soton.ac.uk/supported/8.4/doc/html/libvips/libvips-conversion.html#VipsExtend
|
||||
if extend > 5 {
|
||||
extend = ExtendBackground
|
||||
}
|
||||
|
||||
defer C.g_object_unref(C.gpointer(input))
|
||||
err := C.vips_embed_bridge(input, &image, C.int(left), C.int(top), C.int(width),
|
||||
C.int(height), C.int(extend), C.double(background.R), C.double(background.G), C.double(background.B))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func vipsAffine(input *C.VipsImage, residualx, residualy float64, i Interpolator) (*C.VipsImage, error) {
|
||||
var image *C.VipsImage
|
||||
cstring := C.CString(i.String())
|
||||
interpolator := C.vips_interpolate_new(cstring)
|
||||
|
||||
defer C.free(unsafe.Pointer(cstring))
|
||||
defer C.g_object_unref(C.gpointer(input))
|
||||
defer C.g_object_unref(C.gpointer(interpolator))
|
||||
|
||||
err := C.vips_affine_interpolator(input, &image, C.double(residualx), 0, 0, C.double(residualy), interpolator)
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return image, nil
|
||||
}
|
||||
|
||||
func vipsImageType(buf []byte) ImageType {
|
||||
if len(buf) == 0 {
|
||||
return UNKNOWN
|
||||
}
|
||||
if buf[0] == 0x89 && buf[1] == 0x50 && buf[2] == 0x4E && buf[3] == 0x47 {
|
||||
return PNG
|
||||
}
|
||||
if buf[0] == 0xFF && buf[1] == 0xD8 && buf[2] == 0xFF {
|
||||
return JPEG
|
||||
}
|
||||
if IsTypeSupported(WEBP) && buf[8] == 0x57 && buf[9] == 0x45 && buf[10] == 0x42 && buf[11] == 0x50 {
|
||||
return WEBP
|
||||
}
|
||||
if IsTypeSupported(TIFF) &&
|
||||
((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) ||
|
||||
(buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A)) {
|
||||
return TIFF
|
||||
}
|
||||
if IsTypeSupported(GIF) && buf[0] == 0x47 && buf[1] == 0x49 && buf[2] == 0x46 {
|
||||
return GIF
|
||||
}
|
||||
if IsTypeSupported(PDF) && buf[0] == 0x25 && buf[1] == 0x50 && buf[2] == 0x44 && buf[3] == 0x46 {
|
||||
return PDF
|
||||
}
|
||||
if IsTypeSupported(SVG) && IsSVGImage(buf) {
|
||||
return SVG
|
||||
}
|
||||
if IsTypeSupported(MAGICK) && strings.HasSuffix(readImageType(buf), "MagickBuffer") {
|
||||
return MAGICK
|
||||
}
|
||||
return UNKNOWN
|
||||
}
|
||||
|
||||
func readImageType(buf []byte) string {
|
||||
length := C.size_t(len(buf))
|
||||
imageBuf := unsafe.Pointer(&buf[0])
|
||||
load := C.vips_foreign_find_load_buffer(imageBuf, length)
|
||||
return C.GoString(load)
|
||||
}
|
||||
|
||||
func catchVipsError() error {
|
||||
s := C.GoString(C.vips_error_buffer())
|
||||
C.vips_error_clear()
|
||||
C.vips_thread_shutdown()
|
||||
return errors.New(s)
|
||||
}
|
||||
|
||||
func boolToInt(b bool) int {
|
||||
if b {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func vipsGaussianBlur(image *C.VipsImage, o GaussianBlur) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
err := C.vips_gaussblur_bridge(image, &out, C.double(o.Sigma), C.double(o.MinAmpl))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func vipsSharpen(image *C.VipsImage, o Sharpen) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
defer C.g_object_unref(C.gpointer(image))
|
||||
|
||||
err := C.vips_sharpen_bridge(image, &out, C.int(o.Radius), C.double(o.X1), C.double(o.Y2), C.double(o.Y3), C.double(o.M1), C.double(o.M2))
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func max(x int) int {
|
||||
return int(math.Max(float64(x), 0))
|
||||
}
|
||||
|
||||
func vipsDrawWatermark(image *C.VipsImage, o WatermarkImage) (*C.VipsImage, error) {
|
||||
var out *C.VipsImage
|
||||
|
||||
watermark, _, e := vipsRead(o.Buf)
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
|
||||
opts := vipsWatermarkImageOptions{C.int(o.Left), C.int(o.Top), C.float(o.Opacity)}
|
||||
|
||||
err := C.vips_watermark_image(image, watermark, &out, (*C.WatermarkImageOptions)(unsafe.Pointer(&opts)))
|
||||
|
||||
if err != 0 {
|
||||
return nil, catchVipsError()
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
532
_vendor/src/github.com/h2non/bimg/vips.h
Normal file
@ -0,0 +1,532 @@
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <vips/vips.h>
|
||||
#include <vips/foreign.h>
|
||||
#include <vips/vips7compat.h>
|
||||
|
||||
/**
|
||||
* Starting libvips 7.41, VIPS_ANGLE_x has been renamed to VIPS_ANGLE_Dx
|
||||
* "to help python". So we provide the macro to correctly build for versions
|
||||
* before 7.41.x.
|
||||
* https://github.com/jcupitt/libvips/blob/master/ChangeLog#L128
|
||||
*/
|
||||
|
||||
#if (VIPS_MAJOR_VERSION == 7 && VIPS_MINOR_VERSION < 41)
|
||||
#define VIPS_ANGLE_D0 VIPS_ANGLE_0
|
||||
#define VIPS_ANGLE_D90 VIPS_ANGLE_90
|
||||
#define VIPS_ANGLE_D180 VIPS_ANGLE_180
|
||||
#define VIPS_ANGLE_D270 VIPS_ANGLE_270
|
||||
#endif
|
||||
|
||||
#define EXIF_IFD0_ORIENTATION "exif-ifd0-Orientation"
|
||||
|
||||
enum types {
|
||||
UNKNOWN = 0,
|
||||
JPEG,
|
||||
WEBP,
|
||||
PNG,
|
||||
TIFF,
|
||||
GIF,
|
||||
PDF,
|
||||
SVG,
|
||||
MAGICK
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
const char *Text;
|
||||
const char *Font;
|
||||
} WatermarkTextOptions;
|
||||
|
||||
typedef struct {
|
||||
int Width;
|
||||
int DPI;
|
||||
int Margin;
|
||||
int NoReplicate;
|
||||
float Opacity;
|
||||
double Background[3];
|
||||
} WatermarkOptions;
|
||||
|
||||
typedef struct {
|
||||
int Left;
|
||||
int Top;
|
||||
float Opacity;
|
||||
} WatermarkImageOptions;
|
||||
|
||||
static unsigned long
|
||||
has_profile_embed(VipsImage *image) {
|
||||
return vips_image_get_typeof(image, VIPS_META_ICC_NAME);
|
||||
}
|
||||
|
||||
static void
|
||||
remove_profile(VipsImage *image) {
|
||||
vips_image_remove(image, VIPS_META_ICC_NAME);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
with_interlace(int interlace) {
|
||||
return interlace > 0 ? TRUE : FALSE;
|
||||
}
|
||||
|
||||
static int
|
||||
has_alpha_channel(VipsImage *image) {
|
||||
return (
|
||||
(image->Bands == 2 && image->Type == VIPS_INTERPRETATION_B_W) ||
|
||||
(image->Bands == 4 && image->Type != VIPS_INTERPRETATION_CMYK) ||
|
||||
(image->Bands == 5 && image->Type == VIPS_INTERPRETATION_CMYK)
|
||||
) ? 1 : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is here to handle the weird initialization of the vips lib.
|
||||
* libvips use a macro VIPS_INIT() that call vips__init() in version < 7.41,
|
||||
* or calls vips_init() in version >= 7.41.
|
||||
*
|
||||
* Anyway, it's not possible to build bimg on Debian Jessie with libvips 7.40.x,
|
||||
* as vips_init() is a macro to VIPS_INIT(), which is also a macro, hence, cgo
|
||||
* is unable to determine the return type of vips_init(), making the build impossible.
|
||||
* In order to correctly build bimg, for version < 7.41, we should undef vips_init and
|
||||
* creates a vips_init() method that calls VIPS_INIT().
|
||||
*/
|
||||
|
||||
#if (VIPS_MAJOR_VERSION == 7 && VIPS_MINOR_VERSION < 41)
|
||||
#undef vips_init
|
||||
int
|
||||
vips_init(const char *argv0)
|
||||
{
|
||||
return VIPS_INIT(argv0);
|
||||
}
|
||||
#endif
|
||||
|
||||
void
|
||||
vips_enable_cache_set_trace() {
|
||||
vips_cache_set_trace(TRUE);
|
||||
}
|
||||
|
||||
int
|
||||
vips_affine_interpolator(VipsImage *in, VipsImage **out, double a, double b, double c, double d, VipsInterpolate *interpolator) {
|
||||
return vips_affine(in, out, a, b, c, d, "interpolate", interpolator, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_jpegload_buffer_shrink(void *buf, size_t len, VipsImage **out, int shrink) {
|
||||
return vips_jpegload_buffer(buf, len, out, "shrink", shrink, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_flip_bridge(VipsImage *in, VipsImage **out, int direction) {
|
||||
return vips_flip(in, out, direction, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_shrink_bridge(VipsImage *in, VipsImage **out, double xshrink, double yshrink) {
|
||||
return vips_shrink(in, out, xshrink, yshrink, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_type_find_bridge(int t) {
|
||||
if (t == GIF) {
|
||||
return vips_type_find("VipsOperation", "gifload");
|
||||
}
|
||||
if (t == PDF) {
|
||||
return vips_type_find("VipsOperation", "pdfload");
|
||||
}
|
||||
if (t == TIFF) {
|
||||
return vips_type_find("VipsOperation", "tiffload");
|
||||
}
|
||||
if (t == SVG) {
|
||||
return vips_type_find("VipsOperation", "svgload");
|
||||
}
|
||||
if (t == WEBP) {
|
||||
return vips_type_find("VipsOperation", "webpload");
|
||||
}
|
||||
if (t == PNG) {
|
||||
return vips_type_find("VipsOperation", "pngload");
|
||||
}
|
||||
if (t == JPEG) {
|
||||
return vips_type_find("VipsOperation", "jpegload");
|
||||
}
|
||||
if (t == MAGICK) {
|
||||
return vips_type_find("VipsOperation", "magickload");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
vips_type_find_save_bridge(int t) {
|
||||
if (t == TIFF) {
|
||||
return vips_type_find("VipsOperation", "tiffsave_buffer");
|
||||
}
|
||||
if (t == WEBP) {
|
||||
return vips_type_find("VipsOperation", "webpsave_buffer");
|
||||
}
|
||||
if (t == PNG) {
|
||||
return vips_type_find("VipsOperation", "pngsave_buffer");
|
||||
}
|
||||
if (t == JPEG) {
|
||||
return vips_type_find("VipsOperation", "jpegsave_buffer");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
vips_rotate(VipsImage *in, VipsImage **out, int angle) {
|
||||
int rotate = VIPS_ANGLE_D0;
|
||||
|
||||
angle %= 360;
|
||||
|
||||
if (angle == 45) {
|
||||
rotate = VIPS_ANGLE45_D45;
|
||||
} else if (angle == 90) {
|
||||
rotate = VIPS_ANGLE_D90;
|
||||
} else if (angle == 135) {
|
||||
rotate = VIPS_ANGLE45_D135;
|
||||
} else if (angle == 180) {
|
||||
rotate = VIPS_ANGLE_D180;
|
||||
} else if (angle == 225) {
|
||||
rotate = VIPS_ANGLE45_D225;
|
||||
} else if (angle == 270) {
|
||||
rotate = VIPS_ANGLE_D270;
|
||||
} else if (angle == 315) {
|
||||
rotate = VIPS_ANGLE45_D315;
|
||||
} else {
|
||||
angle = 0;
|
||||
}
|
||||
|
||||
if (angle > 0 && angle % 90 != 0) {
|
||||
return vips_rot45(in, out, "angle", rotate, NULL);
|
||||
} else {
|
||||
return vips_rot(in, out, rotate, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
int
|
||||
vips_exif_orientation(VipsImage *image) {
|
||||
int orientation = 0;
|
||||
const char *exif;
|
||||
if (
|
||||
vips_image_get_typeof(image, EXIF_IFD0_ORIENTATION) != 0 &&
|
||||
!vips_image_get_string(image, EXIF_IFD0_ORIENTATION, &exif)
|
||||
) {
|
||||
orientation = atoi(&exif[0]);
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
int
|
||||
interpolator_window_size(char const *name) {
|
||||
VipsInterpolate *interpolator = vips_interpolate_new(name);
|
||||
int window_size = vips_interpolate_get_window_size(interpolator);
|
||||
g_object_unref(interpolator);
|
||||
return window_size;
|
||||
}
|
||||
|
||||
const char *
|
||||
vips_enum_nick_bridge(VipsImage *image) {
|
||||
return vips_enum_nick(VIPS_TYPE_INTERPRETATION, image->Type);
|
||||
}
|
||||
|
||||
int
|
||||
vips_zoom_bridge(VipsImage *in, VipsImage **out, int xfac, int yfac) {
|
||||
return vips_zoom(in, out, xfac, yfac, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_embed_bridge(VipsImage *in, VipsImage **out, int left, int top, int width, int height, int extend, double r, double g, double b) {
|
||||
if (extend == VIPS_EXTEND_BACKGROUND) {
|
||||
double background[3] = {r, g, b};
|
||||
VipsArrayDouble *vipsBackground = vips_array_double_new(background, 3);
|
||||
return vips_embed(in, out, left, top, width, height, "extend", extend, "background", vipsBackground, NULL);
|
||||
}
|
||||
return vips_embed(in, out, left, top, width, height, "extend", extend, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_extract_area_bridge(VipsImage *in, VipsImage **out, int left, int top, int width, int height) {
|
||||
return vips_extract_area(in, out, left, top, width, height, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_colourspace_issupported_bridge(VipsImage *in) {
|
||||
return vips_colourspace_issupported(in) ? 1 : 0;
|
||||
}
|
||||
|
||||
VipsInterpretation
|
||||
vips_image_guess_interpretation_bridge(VipsImage *in) {
|
||||
return vips_image_guess_interpretation(in);
|
||||
}
|
||||
|
||||
int
|
||||
vips_colourspace_bridge(VipsImage *in, VipsImage **out, VipsInterpretation space) {
|
||||
return vips_colourspace(in, out, space, NULL);
|
||||
}
|
||||
|
||||
int
|
||||
vips_jpegsave_bridge(VipsImage *in, void **buf, size_t *len, int strip, int quality, int interlace) {
|
||||
return vips_jpegsave_buffer(in, buf, len,
|
||||
"strip", strip,
|
||||
"Q", quality,
|
||||
"optimize_coding", TRUE,
|
||||
"interlace", with_interlace(interlace),
|
||||
NULL
|
||||
);
|
||||
}
|
||||
|
||||
int
|
||||
vips_pngsave_bridge(VipsImage *in, void **buf, size_t *len, int strip, int compression, int quality, int interlace) {
|
||||
#if (VIPS_MAJOR_VERSION >= 8 || (VIPS_MAJOR_VERSION >= 7 && VIPS_MINOR_VERSION >= 42))
|
||||
return vips_pngsave_buffer(in, buf, len,
|
||||
"strip", FALSE,
|
||||
"compression", compression,
|
||||
"interlace", with_interlace(interlace),
|
||||
"filter", VIPS_FOREIGN_PNG_FILTER_NONE,
|
||||
NULL
|
||||
);
|
||||
#else
|
||||
return vips_pngsave_buffer(in, buf, len,
|
||||
"strip", FALSE,
|
||||
"compression", compression,
|
||||
"interlace", with_interlace(interlace),
|
||||
NULL
|
||||
);
|
||||
#endif
|
||||
}
|
||||
|
||||
int
|
||||
vips_webpsave_bridge(VipsImage *in, void **buf, size_t *len, int strip, int quality) {
|
||||
return vips_webpsave_buffer(in, buf, len,
|
||||
"strip", strip,
|
||||
"Q", quality,
|
||||
NULL
|
||||
);
|
||||
}
|
||||
|
||||
int
|
||||
vips_tiffsave_bridge(VipsImage *in, void **buf, size_t *len) {
|
||||
#if (VIPS_MAJOR_VERSION >= 8 && VIPS_MINOR_VERSION >= 5)
|
||||
return vips_tiffsave_buffer(in, buf, len, NULL);
|
||||
#else
|
||||
return 0;
|
||||
#endif
|
||||
}
|
||||
|
||||
int
|
||||
vips_is_16bit (VipsInterpretation interpretation) {
|
||||
return interpretation == VIPS_INTERPRETATION_RGB16 || interpretation == VIPS_INTERPRETATION_GREY16;
|
||||
}
|
||||
|
||||
int
|
||||
vips_flatten_background_brigde(VipsImage *in, VipsImage **out, double r, double g, double b) {
|
||||
if (vips_is_16bit(in->Type)) {
|
||||
r = 65535 * r / 255;
|
||||
g = 65535 * g / 255;
|
||||
b = 65535 * b / 255;
|
||||
}
|
||||
|
||||
double background[3] = {r, g, b};
|
||||
VipsArrayDouble *vipsBackground = vips_array_double_new(background, 3);
|
||||
|
||||
return vips_flatten(in, out,
|
||||
"background", vipsBackground,
|
||||
"max_alpha", vips_is_16bit(in->Type) ? 65535.0 : 255.0,
|
||||
NULL
|
||||
);
|
||||
}
|
||||
|
||||
int
|
||||
vips_init_image (void *buf, size_t len, int imageType, VipsImage **out) {
|
||||
int code = 1;
|
||||
|
||||
if (imageType == JPEG) {
|
||||
code = vips_jpegload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
} else if (imageType == PNG) {
|
||||
code = vips_pngload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
} else if (imageType == WEBP) {
|
||||
code = vips_webpload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
} else if (imageType == TIFF) {
|
||||
code = vips_tiffload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
#if (VIPS_MAJOR_VERSION >= 8)
|
||||
#if (VIPS_MINOR_VERSION >= 3)
|
||||
} else if (imageType == GIF) {
|
||||
code = vips_gifload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
} else if (imageType == PDF) {
|
||||
code = vips_pdfload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
} else if (imageType == SVG) {
|
||||
code = vips_svgload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
#endif
|
||||
} else if (imageType == MAGICK) {
|
||||
code = vips_magickload_buffer(buf, len, out, "access", VIPS_ACCESS_RANDOM, NULL);
|
||||
#endif
|
||||
}
|
||||
|
||||
return code;
|
||||
}
|
||||
|
||||
int
|
||||
vips_watermark_replicate (VipsImage *orig, VipsImage *in, VipsImage **out) {
|
||||
VipsImage *cache = vips_image_new();
|
||||
|
||||
if (
|
||||
vips_replicate(in, &cache,
|
||||
1 + orig->Xsize / in->Xsize,
|
||||
1 + orig->Ysize / in->Ysize, NULL) ||
|
||||
vips_crop(cache, out, 0, 0, orig->Xsize, orig->Ysize, NULL)
|
||||
) {
|
||||
g_object_unref(cache);
|
||||
return 1;
|
||||
}
|
||||
|
||||
g_object_unref(cache);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
vips_watermark(VipsImage *in, VipsImage **out, WatermarkTextOptions *to, WatermarkOptions *o) {
|
||||
double ones[3] = { 1, 1, 1 };
|
||||
|
||||
VipsImage *base = vips_image_new();
|
||||
VipsImage **t = (VipsImage **) vips_object_local_array(VIPS_OBJECT(base), 10);
|
||||
t[0] = in;
|
||||
|
||||
// Make the mask.
|
||||
if (
|
||||
vips_text(&t[1], to->Text,
|
||||
"width", o->Width,
|
||||
"dpi", o->DPI,
|
||||
"font", to->Font,
|
||||
NULL) ||
|
||||
vips_linear1(t[1], &t[2], o->Opacity, 0.0, NULL) ||
|
||||
vips_cast(t[2], &t[3], VIPS_FORMAT_UCHAR, NULL) ||
|
||||
vips_embed(t[3], &t[4], 100, 100, t[3]->Xsize + o->Margin, t[3]->Ysize + o->Margin, NULL)
|
||||
) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Replicate if necessary
|
||||
if (o->NoReplicate != 1) {
|
||||
VipsImage *cache = vips_image_new();
|
||||
if (vips_watermark_replicate(t[0], t[4], &cache)) {
|
||||
g_object_unref(cache);
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
g_object_unref(t[4]);
|
||||
t[4] = cache;
|
||||
}
|
||||
|
||||
// Make the constant image to paint the text with.
|
||||
if (
|
||||
vips_black(&t[5], 1, 1, NULL) ||
|
||||
vips_linear(t[5], &t[6], ones, o->Background, 3, NULL) ||
|
||||
vips_cast(t[6], &t[7], VIPS_FORMAT_UCHAR, NULL) ||
|
||||
vips_copy(t[7], &t[8], "interpretation", t[0]->Type, NULL) ||
|
||||
vips_embed(t[8], &t[9], 0, 0, t[0]->Xsize, t[0]->Ysize, "extend", VIPS_EXTEND_COPY, NULL)
|
||||
) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Blend the mask and text and write to output.
|
||||
if (vips_ifthenelse(t[4], t[9], t[0], out, "blend", TRUE, NULL)) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
|
||||
g_object_unref(base);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
vips_gaussblur_bridge(VipsImage *in, VipsImage **out, double sigma, double min_ampl) {
|
||||
#if (VIPS_MAJOR_VERSION == 7 && VIPS_MINOR_VERSION < 41)
|
||||
return vips_gaussblur(in, out, (int) sigma, NULL);
|
||||
#else
|
||||
return vips_gaussblur(in, out, sigma, NULL, "min_ampl", min_ampl, NULL);
|
||||
#endif
|
||||
}
|
||||
|
||||
int
|
||||
vips_sharpen_bridge(VipsImage *in, VipsImage **out, int radius, double x1, double y2, double y3, double m1, double m2) {
|
||||
#if (VIPS_MAJOR_VERSION == 7 && VIPS_MINOR_VERSION < 41)
|
||||
return vips_sharpen(in, out, radius, x1, y2, y3, m1, m2, NULL);
|
||||
#else
|
||||
return vips_sharpen(in, out, "radius", radius, "x1", x1, "y2", y2, "y3", y3, "m1", m1, "m2", m2, NULL);
|
||||
#endif
|
||||
}
|
||||
|
||||
int
|
||||
vips_add_band(VipsImage *in, VipsImage **out, double c) {
|
||||
#if (VIPS_MAJOR_VERSION > 8 || (VIPS_MAJOR_VERSION >= 8 && VIPS_MINOR_VERSION >= 2))
|
||||
return vips_bandjoin_const1(in, out, c, NULL);
|
||||
#else
|
||||
VipsImage *base = vips_image_new();
|
||||
if (
|
||||
vips_black(&base, in->Xsize, in->Ysize, NULL) ||
|
||||
vips_linear1(base, &base, 1, c, NULL)) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
g_object_unref(base);
|
||||
return vips_bandjoin2(in, base, out, c, NULL);
|
||||
#endif
|
||||
}
|
||||
|
||||
int
|
||||
vips_watermark_image(VipsImage *in, VipsImage *sub, VipsImage **out, WatermarkImageOptions *o) {
|
||||
VipsImage *base = vips_image_new();
|
||||
VipsImage **t = (VipsImage **) vips_object_local_array(VIPS_OBJECT(base), 10);
|
||||
|
||||
// add in and sub for unreffing and later use
|
||||
t[0] = in;
|
||||
t[1] = sub;
|
||||
|
||||
if (has_alpha_channel(in) == 0) {
|
||||
vips_add_band(in, &t[0], 255.0);
|
||||
// in is no longer in the array and won't be unreffed, so add it at the end
|
||||
t[8] = in;
|
||||
}
|
||||
|
||||
if (has_alpha_channel(sub) == 0) {
|
||||
vips_add_band(sub, &t[1], 255.0);
|
||||
// sub is no longer in the array and won't be unreffed, so add it at the end
|
||||
t[9] = sub;
|
||||
}
|
||||
|
||||
// Place watermark image in the right place and size it to the size of the
|
||||
// image that should be watermarked
|
||||
if (
|
||||
vips_embed(t[1], &t[2], o->Left, o->Top, t[0]->Xsize, t[0]->Ysize, NULL)) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Create a mask image based on the alpha band from the watermark image
|
||||
// and place it in the right position
|
||||
if (
|
||||
vips_extract_band(t[1], &t[3], t[1]->Bands - 1, "n", 1, NULL) ||
|
||||
vips_linear1(t[3], &t[4], o->Opacity, 0.0, NULL) ||
|
||||
vips_cast(t[4], &t[5], VIPS_FORMAT_UCHAR, NULL) ||
|
||||
vips_copy(t[5], &t[6], "interpretation", t[0]->Type, NULL) ||
|
||||
vips_embed(t[6], &t[7], o->Left, o->Top, t[0]->Xsize, t[0]->Ysize, NULL)) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Blend the mask and watermark image and write to output.
|
||||
if (vips_ifthenelse(t[7], t[2], t[0], out, "blend", TRUE, NULL)) {
|
||||
g_object_unref(base);
|
||||
return 1;
|
||||
}
|
||||
|
||||
g_object_unref(base);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int
|
||||
vips_smartcrop_bridge(VipsImage *in, VipsImage **out, int width, int height) {
|
||||
#if (VIPS_MAJOR_VERSION >= 8 && VIPS_MINOR_VERSION >= 5)
|
||||
return vips_smartcrop(in, out, width, height, NULL);
|
||||
#else
|
||||
return 0;
|
||||
#endif
|
||||
}
|
163
_vendor/src/github.com/h2non/bimg/vips_test.go
Normal file
@ -0,0 +1,163 @@
|
||||
package bimg
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestVipsRead(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
expected ImageType
|
||||
}{
|
||||
{"test.jpg", JPEG},
|
||||
{"test.png", PNG},
|
||||
{"test.webp", WEBP},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
image, imageType, _ := vipsRead(readImage(file.name))
|
||||
if image == nil {
|
||||
t.Fatal("Empty image")
|
||||
}
|
||||
if imageType != file.expected {
|
||||
t.Fatal("Invalid image type")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsSave(t *testing.T) {
|
||||
types := [...]ImageType{JPEG, PNG, WEBP}
|
||||
|
||||
for _, typ := range types {
|
||||
image, _, _ := vipsRead(readImage("test.jpg"))
|
||||
options := vipsSaveOptions{Quality: 95, Type: typ}
|
||||
|
||||
buf, err := vipsSave(image, options)
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot save the image as '%v'", ImageTypes[typ])
|
||||
}
|
||||
if len(buf) == 0 {
|
||||
t.Fatalf("Empty saved '%v' image", ImageTypes[typ])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsSaveTiff(t *testing.T) {
|
||||
if !IsTypeSupportedSave(TIFF) {
|
||||
t.Skipf("Format %#v is not supported", ImageTypes[TIFF])
|
||||
}
|
||||
image, _, _ := vipsRead(readImage("test.jpg"))
|
||||
options := vipsSaveOptions{Quality: 95, Type: TIFF}
|
||||
buf, _ := vipsSave(image, options)
|
||||
|
||||
if len(buf) == 0 {
|
||||
t.Fatalf("Empty saved '%v' image", ImageTypes[TIFF])
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsRotate(t *testing.T) {
|
||||
files := []struct {
|
||||
name string
|
||||
rotate Angle
|
||||
}{
|
||||
{"test.jpg", D90},
|
||||
{"test_square.jpg", D45},
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
image, _, _ := vipsRead(readImage(file.name))
|
||||
|
||||
newImg, err := vipsRotate(image, file.rotate)
|
||||
if err != nil {
|
||||
t.Fatal("Cannot rotate the image")
|
||||
}
|
||||
|
||||
buf, _ := vipsSave(newImg, vipsSaveOptions{Quality: 95})
|
||||
if len(buf) == 0 {
|
||||
t.Fatal("Empty image")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsZoom(t *testing.T) {
|
||||
image, _, _ := vipsRead(readImage("test.jpg"))
|
||||
|
||||
newImg, err := vipsZoom(image, 1)
|
||||
if err != nil {
|
||||
t.Fatal("Cannot save the image")
|
||||
}
|
||||
|
||||
buf, _ := vipsSave(newImg, vipsSaveOptions{Quality: 95})
|
||||
if len(buf) == 0 {
|
||||
t.Fatal("Empty image")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsWatermark(t *testing.T) {
|
||||
image, _, _ := vipsRead(readImage("test.jpg"))
|
||||
|
||||
watermark := Watermark{
|
||||
Text: "Copy me if you can",
|
||||
Font: "sans bold 12",
|
||||
Opacity: 0.5,
|
||||
Width: 200,
|
||||
DPI: 100,
|
||||
Margin: 100,
|
||||
Background: Color{255, 255, 255},
|
||||
}
|
||||
|
||||
newImg, err := vipsWatermark(image, watermark)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot add watermark: %s", err)
|
||||
}
|
||||
|
||||
buf, _ := vipsSave(newImg, vipsSaveOptions{Quality: 95})
|
||||
if len(buf) == 0 {
|
||||
t.Fatal("Empty image")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsWatermarkWithImage(t *testing.T) {
|
||||
image, _, _ := vipsRead(readImage("test.jpg"))
|
||||
|
||||
watermark := readImage("transparent.png")
|
||||
|
||||
options := WatermarkImage{Left: 100, Top: 100, Opacity: 1.0, Buf: watermark}
|
||||
newImg, err := vipsDrawWatermark(image, options)
|
||||
if err != nil {
|
||||
t.Errorf("Cannot add watermark: %s", err)
|
||||
}
|
||||
|
||||
buf, _ := vipsSave(newImg, vipsSaveOptions{Quality: 95})
|
||||
if len(buf) == 0 {
|
||||
t.Fatal("Empty image")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsImageType(t *testing.T) {
|
||||
imgType := vipsImageType(readImage("test.jpg"))
|
||||
if imgType != JPEG {
|
||||
t.Fatal("Invalid image type")
|
||||
}
|
||||
}
|
||||
|
||||
func TestVipsMemory(t *testing.T) {
|
||||
mem := VipsMemory()
|
||||
|
||||
if mem.Memory < 1024 {
|
||||
t.Fatal("Invalid memory")
|
||||
}
|
||||
if mem.Allocations == 0 {
|
||||
t.Fatal("Invalid memory allocations")
|
||||
}
|
||||
}
|
||||
|
||||
func readImage(file string) []byte {
|
||||
img, _ := os.Open(path.Join("fixtures", file))
|
||||
buf, _ := ioutil.ReadAll(img)
|
||||
defer img.Close()
|
||||
return buf
|
||||
}
|
21
_vendor/src/github.com/tj/go-debug/History.md
Normal file
@ -0,0 +1,21 @@
|
||||
|
||||
v2.0.0 / 2014-10-22
|
||||
==================
|
||||
|
||||
* remove live toggling feature. Closes #10
|
||||
|
||||
1.1.1 / 2014-07-07
|
||||
==================
|
||||
|
||||
* fix: dispose socket. Closes #1
|
||||
|
||||
1.1.0 / 2014-06-29
|
||||
==================
|
||||
|
||||
* add unix domain socket live debugging support
|
||||
* add support for enabling/disabling at runtime
|
||||
|
||||
0.1.0 / 2014-05-24
|
||||
==================
|
||||
|
||||
* add global and debug relative deltas
|
8
_vendor/src/github.com/tj/go-debug/Makefile
Normal file
@ -0,0 +1,8 @@
|
||||
|
||||
test:
|
||||
@go test
|
||||
|
||||
bench:
|
||||
@go test -bench=.
|
||||
|
||||
.PHONY: bench test
|
75
_vendor/src/github.com/tj/go-debug/Readme.md
Normal file
@ -0,0 +1,75 @@
|
||||
|
||||
# go-debug
|
||||
|
||||
Conditional debug logging for Go libraries.
|
||||
|
||||
View the [docs](http://godoc.org/github.com/tj/go-debug).
|
||||
|
||||
## Installation
|
||||
|
||||
```
|
||||
$ go get github.com/tj/go-debug
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import . "github.com/tj/go-debug"
|
||||
import "time"
|
||||
|
||||
var debug = Debug("single")
|
||||
|
||||
func main() {
|
||||
for {
|
||||
debug("sending mail")
|
||||
debug("send email to %s", "tobi@segment.io")
|
||||
debug("send email to %s", "loki@segment.io")
|
||||
debug("send email to %s", "jane@segment.io")
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you run the program with the `DEBUG=*` environment variable you will see:
|
||||
|
||||
```
|
||||
15:58:15.115 34us 33us single - sending mail
|
||||
15:58:15.116 3us 3us single - send email to tobi@segment.io
|
||||
15:58:15.116 1us 1us single - send email to loki@segment.io
|
||||
15:58:15.116 1us 1us single - send email to jane@segment.io
|
||||
15:58:15.620 504ms 504ms single - sending mail
|
||||
15:58:15.620 6us 6us single - send email to tobi@segment.io
|
||||
15:58:15.620 4us 4us single - send email to loki@segment.io
|
||||
15:58:15.620 4us 4us single - send email to jane@segment.io
|
||||
15:58:16.123 503ms 503ms single - sending mail
|
||||
15:58:16.123 7us 7us single - send email to tobi@segment.io
|
||||
15:58:16.123 4us 4us single - send email to loki@segment.io
|
||||
15:58:16.123 4us 4us single - send email to jane@segment.io
|
||||
15:58:16.625 501ms 501ms single - sending mail
|
||||
15:58:16.625 4us 4us single - send email to tobi@segment.io
|
||||
15:58:16.625 4us 4us single - send email to loki@segment.io
|
||||
15:58:16.625 5us 5us single - send email to jane@segment.io
|
||||
```
|
||||
|
||||
A timestamp and two deltas are displayed. The timestamp consists of hour, minute, second and microseconds. The left-most delta is relative to the previous debug call of any name, followed by a delta specific to that debug function. These may be useful to identify timing issues and potential bottlenecks.
|
||||
|
||||
## The DEBUG environment variable
|
||||
|
||||
Executables often support `--verbose` flags for conditional logging, however
|
||||
libraries typically either require altering your code to enable logging,
|
||||
or simply omit logging all together. go-debug allows conditional logging
|
||||
to be enabled via the __DEBUG__ environment variable, where one or more
|
||||
patterns may be specified.
|
||||
|
||||
For example suppose your application has several models and you want
|
||||
to output logs for users only, you might use `DEBUG=models:user`. In contrast
|
||||
if you wanted to see what all database activity was you might use `DEBUG=models:*`,
|
||||
or if you're love being swamped with logs: `DEBUG=*`. You may also specify a list of names delimited by a comma, for example `DEBUG=mongo,redis:*`.
|
||||
|
||||
The name given _should_ be the package name, however you can use whatever you like.
|
||||
|
||||
# License
|
||||
|
||||
MIT
|
128
_vendor/src/github.com/tj/go-debug/debug.go
Normal file
@ -0,0 +1,128 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"math/rand"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
writer io.Writer = os.Stderr
|
||||
reg *regexp.Regexp
|
||||
m sync.Mutex
|
||||
enabled = false
|
||||
)
|
||||
|
||||
// Debugger function.
|
||||
type DebugFunction func(string, ...interface{})
|
||||
|
||||
// Terminal colors used at random.
|
||||
var colors []string = []string{
|
||||
"31",
|
||||
"32",
|
||||
"33",
|
||||
"34",
|
||||
"35",
|
||||
"36",
|
||||
}
|
||||
|
||||
// Initialize with DEBUG environment variable.
|
||||
func init() {
|
||||
env := os.Getenv("DEBUG")
|
||||
|
||||
if "" != env {
|
||||
Enable(env)
|
||||
}
|
||||
}
|
||||
|
||||
// SetWriter replaces the default of os.Stderr with `w`.
|
||||
func SetWriter(w io.Writer) {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
writer = w
|
||||
}
|
||||
|
||||
// Disable all pattern matching. This function is thread-safe.
|
||||
func Disable() {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
enabled = false
|
||||
}
|
||||
|
||||
// Enable the given debug `pattern`. Patterns take a glob-like form,
|
||||
// for example if you wanted to enable everything, just use "*", or
|
||||
// if you had a library named mongodb you could use "mongodb:connection",
|
||||
// or "mongodb:*". Multiple matches can be made with a comma, for
|
||||
// example "mongo*,redis*".
|
||||
//
|
||||
// This function is thread-safe.
|
||||
func Enable(pattern string) {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
pattern = regexp.QuoteMeta(pattern)
|
||||
pattern = strings.Replace(pattern, "\\*", ".*?", -1)
|
||||
pattern = strings.Replace(pattern, ",", "|", -1)
|
||||
pattern = "^(" + pattern + ")$"
|
||||
reg = regexp.MustCompile(pattern)
|
||||
enabled = true
|
||||
}
|
||||
|
||||
// Debug creates a debug function for `name` which you call
|
||||
// with printf-style arguments in your application or library.
|
||||
func Debug(name string) DebugFunction {
|
||||
prevGlobal := time.Now()
|
||||
color := colors[rand.Intn(len(colors))]
|
||||
prev := time.Now()
|
||||
|
||||
return func(format string, args ...interface{}) {
|
||||
if !enabled {
|
||||
return
|
||||
}
|
||||
|
||||
if !reg.MatchString(name) {
|
||||
return
|
||||
}
|
||||
|
||||
d := deltas(prevGlobal, prev, color)
|
||||
fmt.Fprintf(writer, d+" \033["+color+"m"+name+"\033[0m - "+format+"\n", args...)
|
||||
prevGlobal = time.Now()
|
||||
prev = time.Now()
|
||||
}
|
||||
}
|
||||
|
||||
// Return formatting for deltas.
|
||||
func deltas(prevGlobal, prev time.Time, color string) string {
|
||||
now := time.Now()
|
||||
global := now.Sub(prevGlobal).Nanoseconds()
|
||||
delta := now.Sub(prev).Nanoseconds()
|
||||
ts := now.UTC().Format("15:04:05.000")
|
||||
deltas := fmt.Sprintf("%s %-6s \033["+color+"m%-6s", ts, humanizeNano(global), humanizeNano(delta))
|
||||
return deltas
|
||||
}
|
||||
|
||||
// Humanize nanoseconds to a string.
|
||||
func humanizeNano(n int64) string {
|
||||
var suffix string
|
||||
|
||||
switch {
|
||||
case n > 1e9:
|
||||
n /= 1e9
|
||||
suffix = "s"
|
||||
case n > 1e6:
|
||||
n /= 1e6
|
||||
suffix = "ms"
|
||||
case n > 1e3:
|
||||
n /= 1e3
|
||||
suffix = "us"
|
||||
default:
|
||||
suffix = "ns"
|
||||
}
|
||||
|
||||
return strconv.Itoa(int(n)) + suffix
|
||||
}
|
152
_vendor/src/github.com/tj/go-debug/debug_test.go
Normal file
@ -0,0 +1,152 @@
|
||||
package debug
|
||||
|
||||
import "testing"
|
||||
import "strings"
|
||||
import "bytes"
|
||||
import "time"
|
||||
|
||||
func assertContains(t *testing.T, str, substr string) {
|
||||
if !strings.Contains(str, substr) {
|
||||
t.Fatalf("expected %q to contain %q", str, substr)
|
||||
}
|
||||
}
|
||||
|
||||
func assertNotContains(t *testing.T, str, substr string) {
|
||||
if strings.Contains(str, substr) {
|
||||
t.Fatalf("expected %q to not contain %q", str, substr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefault(t *testing.T) {
|
||||
var b []byte
|
||||
buf := bytes.NewBuffer(b)
|
||||
SetWriter(buf)
|
||||
|
||||
debug := Debug("foo")
|
||||
debug("something")
|
||||
debug("here")
|
||||
debug("whoop")
|
||||
|
||||
if buf.Len() != 0 {
|
||||
t.Fatalf("buffer should be empty")
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnable(t *testing.T) {
|
||||
var b []byte
|
||||
buf := bytes.NewBuffer(b)
|
||||
SetWriter(buf)
|
||||
|
||||
Enable("foo")
|
||||
|
||||
debug := Debug("foo")
|
||||
debug("something")
|
||||
debug("here")
|
||||
debug("whoop")
|
||||
|
||||
if buf.Len() == 0 {
|
||||
t.Fatalf("buffer should have output")
|
||||
}
|
||||
|
||||
str := string(buf.Bytes())
|
||||
assertContains(t, str, "something")
|
||||
assertContains(t, str, "here")
|
||||
assertContains(t, str, "whoop")
|
||||
}
|
||||
|
||||
func TestMultipleOneEnabled(t *testing.T) {
|
||||
var b []byte
|
||||
buf := bytes.NewBuffer(b)
|
||||
SetWriter(buf)
|
||||
|
||||
Enable("foo")
|
||||
|
||||
foo := Debug("foo")
|
||||
foo("foo")
|
||||
|
||||
bar := Debug("bar")
|
||||
bar("bar")
|
||||
|
||||
if buf.Len() == 0 {
|
||||
t.Fatalf("buffer should have output")
|
||||
}
|
||||
|
||||
str := string(buf.Bytes())
|
||||
assertContains(t, str, "foo")
|
||||
assertNotContains(t, str, "bar")
|
||||
}
|
||||
|
||||
func TestMultipleEnabled(t *testing.T) {
|
||||
var b []byte
|
||||
buf := bytes.NewBuffer(b)
|
||||
SetWriter(buf)
|
||||
|
||||
Enable("foo,bar")
|
||||
|
||||
foo := Debug("foo")
|
||||
foo("foo")
|
||||
|
||||
bar := Debug("bar")
|
||||
bar("bar")
|
||||
|
||||
if buf.Len() == 0 {
|
||||
t.Fatalf("buffer should have output")
|
||||
}
|
||||
|
||||
str := string(buf.Bytes())
|
||||
assertContains(t, str, "foo")
|
||||
assertContains(t, str, "bar")
|
||||
}
|
||||
|
||||
func TestEnableDisable(t *testing.T) {
|
||||
var b []byte
|
||||
buf := bytes.NewBuffer(b)
|
||||
SetWriter(buf)
|
||||
|
||||
Enable("foo,bar")
|
||||
Disable()
|
||||
|
||||
foo := Debug("foo")
|
||||
foo("foo")
|
||||
|
||||
bar := Debug("bar")
|
||||
bar("bar")
|
||||
|
||||
if buf.Len() != 0 {
|
||||
t.Fatalf("buffer should not have output")
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleEnable() {
|
||||
Enable("mongo:connection")
|
||||
Enable("mongo:*")
|
||||
Enable("foo,bar,baz")
|
||||
Enable("*")
|
||||
}
|
||||
|
||||
func ExampleDebug() {
|
||||
var debug = Debug("single")
|
||||
|
||||
for {
|
||||
debug("sending mail")
|
||||
debug("send email to %s", "tobi@segment.io")
|
||||
debug("send email to %s", "loki@segment.io")
|
||||
debug("send email to %s", "jane@segment.io")
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkDisabled(b *testing.B) {
|
||||
debug := Debug("something")
|
||||
for i := 0; i < b.N; i++ {
|
||||
debug("stuff")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNonMatch(b *testing.B) {
|
||||
debug := Debug("something")
|
||||
Enable("nonmatch")
|
||||
for i := 0; i < b.N; i++ {
|
||||
debug("stuff")
|
||||
}
|
||||
}
|
25
_vendor/src/github.com/tj/go-debug/example/multiple.go
Normal file
@ -0,0 +1,25 @@
|
||||
package main
|
||||
|
||||
import . "github.com/visionmedia/go-debug"
|
||||
import "time"
|
||||
|
||||
var a = Debug("multiple:a")
|
||||
var b = Debug("multiple:b")
|
||||
var c = Debug("multiple:c")
|
||||
|
||||
func work(debug DebugFunction, delay time.Duration) {
|
||||
for {
|
||||
debug("doing stuff")
|
||||
time.Sleep(delay)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
q := make(chan bool)
|
||||
|
||||
go work(a, 1000*time.Millisecond)
|
||||
go work(b, 250*time.Millisecond)
|
||||
go work(c, 100*time.Millisecond)
|
||||
|
||||
<-q
|
||||
}
|
16
_vendor/src/github.com/tj/go-debug/example/single.go
Normal file
@ -0,0 +1,16 @@
|
||||
package main
|
||||
|
||||
import . "github.com/visionmedia/go-debug"
|
||||
import "time"
|
||||
|
||||
var debug = Debug("single")
|
||||
|
||||
func main() {
|
||||
for {
|
||||
debug("sending mail")
|
||||
debug("send email to %s", "tobi@segment.io")
|
||||
debug("send email to %s", "loki@segment.io")
|
||||
debug("send email to %s", "jane@segment.io")
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
}
|
||||
}
|
9
_vendor/src/gopkg.in/yaml.v2/.travis.yml
Normal file
@ -0,0 +1,9 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
- tip
|
||||
|
||||
go_import_path: gopkg.in/yaml.v2
|
13
_vendor/src/gopkg.in/yaml.v2/LICENSE
Normal file
@ -0,0 +1,13 @@
|
||||
Copyright 2011-2016 Canonical Ltd.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
31
_vendor/src/gopkg.in/yaml.v2/LICENSE.libyaml
Normal file
@ -0,0 +1,31 @@
|
||||
The following files were ported to Go from C files of libyaml, and thus
|
||||
are still covered by their original copyright and license:
|
||||
|
||||
apic.go
|
||||
emitterc.go
|
||||
parserc.go
|
||||
readerc.go
|
||||
scannerc.go
|
||||
writerc.go
|
||||
yamlh.go
|
||||
yamlprivateh.go
|
||||
|
||||
Copyright (c) 2006 Kirill Simonov
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
131
_vendor/src/gopkg.in/yaml.v2/README.md
Normal file
@ -0,0 +1,131 @@
|
||||
# YAML support for the Go language
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
The yaml package enables Go programs to comfortably encode and decode YAML
|
||||
values. It was developed within [Canonical](https://www.canonical.com) as
|
||||
part of the [juju](https://juju.ubuntu.com) project, and is based on a
|
||||
pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
|
||||
C library to parse and generate YAML data quickly and reliably.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
The yaml package supports most of YAML 1.1 and 1.2, including support for
|
||||
anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
|
||||
implemented, and base-60 floats from YAML 1.1 are purposefully not
|
||||
supported since they're a poor design and are gone in YAML 1.2.
|
||||
|
||||
Installation and usage
|
||||
----------------------
|
||||
|
||||
The import path for the package is *gopkg.in/yaml.v2*.
|
||||
|
||||
To install it, run:
|
||||
|
||||
go get gopkg.in/yaml.v2
|
||||
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
If opened in a browser, the import path itself leads to the API documentation:
|
||||
|
||||
* [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
|
||||
|
||||
API stability
|
||||
-------------
|
||||
|
||||
The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
|
||||
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
```Go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var data = `
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4]
|
||||
`
|
||||
|
||||
type T struct {
|
||||
A string
|
||||
B struct {
|
||||
RenamedC int `yaml:"c"`
|
||||
D []int `yaml:",flow"`
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
t := T{}
|
||||
|
||||
err := yaml.Unmarshal([]byte(data), &t)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- t:\n%v\n\n", t)
|
||||
|
||||
d, err := yaml.Marshal(&t)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- t dump:\n%s\n\n", string(d))
|
||||
|
||||
m := make(map[interface{}]interface{})
|
||||
|
||||
err = yaml.Unmarshal([]byte(data), &m)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- m:\n%v\n\n", m)
|
||||
|
||||
d, err = yaml.Marshal(&m)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- m dump:\n%s\n\n", string(d))
|
||||
}
|
||||
```
|
||||
|
||||
This example will generate the following output:
|
||||
|
||||
```
|
||||
--- t:
|
||||
{Easy! {2 [3 4]}}
|
||||
|
||||
--- t dump:
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4]
|
||||
|
||||
|
||||
--- m:
|
||||
map[a:Easy! b:map[c:2 d:[3 4]]]
|
||||
|
||||
--- m dump:
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d:
|
||||
- 3
|
||||
- 4
|
||||
```
|
||||
|
742
_vendor/src/gopkg.in/yaml.v2/apic.go
Normal file
@ -0,0 +1,742 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
|
||||
//fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens))
|
||||
|
||||
// Check if we can move the queue at the beginning of the buffer.
|
||||
if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) {
|
||||
if parser.tokens_head != len(parser.tokens) {
|
||||
copy(parser.tokens, parser.tokens[parser.tokens_head:])
|
||||
}
|
||||
parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head]
|
||||
parser.tokens_head = 0
|
||||
}
|
||||
parser.tokens = append(parser.tokens, *token)
|
||||
if pos < 0 {
|
||||
return
|
||||
}
|
||||
copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:])
|
||||
parser.tokens[parser.tokens_head+pos] = *token
|
||||
}
|
||||
|
||||
// Create a new parser object.
|
||||
func yaml_parser_initialize(parser *yaml_parser_t) bool {
|
||||
*parser = yaml_parser_t{
|
||||
raw_buffer: make([]byte, 0, input_raw_buffer_size),
|
||||
buffer: make([]byte, 0, input_buffer_size),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy a parser object.
|
||||
func yaml_parser_delete(parser *yaml_parser_t) {
|
||||
*parser = yaml_parser_t{}
|
||||
}
|
||||
|
||||
// String read handler.
|
||||
func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||
if parser.input_pos == len(parser.input) {
|
||||
return 0, io.EOF
|
||||
}
|
||||
n = copy(buffer, parser.input[parser.input_pos:])
|
||||
parser.input_pos += n
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// File read handler.
|
||||
func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||
return parser.input_file.Read(buffer)
|
||||
}
|
||||
|
||||
// Set a string input.
|
||||
func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
|
||||
if parser.read_handler != nil {
|
||||
panic("must set the input source only once")
|
||||
}
|
||||
parser.read_handler = yaml_string_read_handler
|
||||
parser.input = input
|
||||
parser.input_pos = 0
|
||||
}
|
||||
|
||||
// Set a file input.
|
||||
func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
|
||||
if parser.read_handler != nil {
|
||||
panic("must set the input source only once")
|
||||
}
|
||||
parser.read_handler = yaml_file_read_handler
|
||||
parser.input_file = file
|
||||
}
|
||||
|
||||
// Set the source encoding.
|
||||
func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
|
||||
if parser.encoding != yaml_ANY_ENCODING {
|
||||
panic("must set the encoding only once")
|
||||
}
|
||||
parser.encoding = encoding
|
||||
}
|
||||
|
||||
// Create a new emitter object.
|
||||
func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
|
||||
*emitter = yaml_emitter_t{
|
||||
buffer: make([]byte, output_buffer_size),
|
||||
raw_buffer: make([]byte, 0, output_raw_buffer_size),
|
||||
states: make([]yaml_emitter_state_t, 0, initial_stack_size),
|
||||
events: make([]yaml_event_t, 0, initial_queue_size),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy an emitter object.
|
||||
func yaml_emitter_delete(emitter *yaml_emitter_t) {
|
||||
*emitter = yaml_emitter_t{}
|
||||
}
|
||||
|
||||
// String write handler.
|
||||
func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||
*emitter.output_buffer = append(*emitter.output_buffer, buffer...)
|
||||
return nil
|
||||
}
|
||||
|
||||
// File write handler.
|
||||
func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||
_, err := emitter.output_file.Write(buffer)
|
||||
return err
|
||||
}
|
||||
|
||||
// Set a string output.
|
||||
func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) {
|
||||
if emitter.write_handler != nil {
|
||||
panic("must set the output target only once")
|
||||
}
|
||||
emitter.write_handler = yaml_string_write_handler
|
||||
emitter.output_buffer = output_buffer
|
||||
}
|
||||
|
||||
// Set a file output.
|
||||
func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
|
||||
if emitter.write_handler != nil {
|
||||
panic("must set the output target only once")
|
||||
}
|
||||
emitter.write_handler = yaml_file_write_handler
|
||||
emitter.output_file = file
|
||||
}
|
||||
|
||||
// Set the output encoding.
|
||||
func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) {
|
||||
if emitter.encoding != yaml_ANY_ENCODING {
|
||||
panic("must set the output encoding only once")
|
||||
}
|
||||
emitter.encoding = encoding
|
||||
}
|
||||
|
||||
// Set the canonical output style.
|
||||
func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) {
|
||||
emitter.canonical = canonical
|
||||
}
|
||||
|
||||
//// Set the indentation increment.
|
||||
func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) {
|
||||
if indent < 2 || indent > 9 {
|
||||
indent = 2
|
||||
}
|
||||
emitter.best_indent = indent
|
||||
}
|
||||
|
||||
// Set the preferred line width.
|
||||
func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) {
|
||||
if width < 0 {
|
||||
width = -1
|
||||
}
|
||||
emitter.best_width = width
|
||||
}
|
||||
|
||||
// Set if unescaped non-ASCII characters are allowed.
|
||||
func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) {
|
||||
emitter.unicode = unicode
|
||||
}
|
||||
|
||||
// Set the preferred line break character.
|
||||
func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
|
||||
emitter.line_break = line_break
|
||||
}
|
||||
|
||||
///*
|
||||
// * Destroy a token object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(void)
|
||||
//yaml_token_delete(yaml_token_t *token)
|
||||
//{
|
||||
// assert(token); // Non-NULL token object expected.
|
||||
//
|
||||
// switch (token.type)
|
||||
// {
|
||||
// case YAML_TAG_DIRECTIVE_TOKEN:
|
||||
// yaml_free(token.data.tag_directive.handle);
|
||||
// yaml_free(token.data.tag_directive.prefix);
|
||||
// break;
|
||||
//
|
||||
// case YAML_ALIAS_TOKEN:
|
||||
// yaml_free(token.data.alias.value);
|
||||
// break;
|
||||
//
|
||||
// case YAML_ANCHOR_TOKEN:
|
||||
// yaml_free(token.data.anchor.value);
|
||||
// break;
|
||||
//
|
||||
// case YAML_TAG_TOKEN:
|
||||
// yaml_free(token.data.tag.handle);
|
||||
// yaml_free(token.data.tag.suffix);
|
||||
// break;
|
||||
//
|
||||
// case YAML_SCALAR_TOKEN:
|
||||
// yaml_free(token.data.scalar.value);
|
||||
// break;
|
||||
//
|
||||
// default:
|
||||
// break;
|
||||
// }
|
||||
//
|
||||
// memset(token, 0, sizeof(yaml_token_t));
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Check if a string is a valid UTF-8 sequence.
|
||||
// *
|
||||
// * Check 'reader.c' for more details on UTF-8 encoding.
|
||||
// */
|
||||
//
|
||||
//static int
|
||||
//yaml_check_utf8(yaml_char_t *start, size_t length)
|
||||
//{
|
||||
// yaml_char_t *end = start+length;
|
||||
// yaml_char_t *pointer = start;
|
||||
//
|
||||
// while (pointer < end) {
|
||||
// unsigned char octet;
|
||||
// unsigned int width;
|
||||
// unsigned int value;
|
||||
// size_t k;
|
||||
//
|
||||
// octet = pointer[0];
|
||||
// width = (octet & 0x80) == 0x00 ? 1 :
|
||||
// (octet & 0xE0) == 0xC0 ? 2 :
|
||||
// (octet & 0xF0) == 0xE0 ? 3 :
|
||||
// (octet & 0xF8) == 0xF0 ? 4 : 0;
|
||||
// value = (octet & 0x80) == 0x00 ? octet & 0x7F :
|
||||
// (octet & 0xE0) == 0xC0 ? octet & 0x1F :
|
||||
// (octet & 0xF0) == 0xE0 ? octet & 0x0F :
|
||||
// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0;
|
||||
// if (!width) return 0;
|
||||
// if (pointer+width > end) return 0;
|
||||
// for (k = 1; k < width; k ++) {
|
||||
// octet = pointer[k];
|
||||
// if ((octet & 0xC0) != 0x80) return 0;
|
||||
// value = (value << 6) + (octet & 0x3F);
|
||||
// }
|
||||
// if (!((width == 1) ||
|
||||
// (width == 2 && value >= 0x80) ||
|
||||
// (width == 3 && value >= 0x800) ||
|
||||
// (width == 4 && value >= 0x10000))) return 0;
|
||||
//
|
||||
// pointer += width;
|
||||
// }
|
||||
//
|
||||
// return 1;
|
||||
//}
|
||||
//
|
||||
|
||||
// Create STREAM-START.
|
||||
func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_STREAM_START_EVENT,
|
||||
encoding: encoding,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create STREAM-END.
|
||||
func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_STREAM_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create DOCUMENT-START.
|
||||
func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
|
||||
tag_directives []yaml_tag_directive_t, implicit bool) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_START_EVENT,
|
||||
version_directive: version_directive,
|
||||
tag_directives: tag_directives,
|
||||
implicit: implicit,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create DOCUMENT-END.
|
||||
func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_END_EVENT,
|
||||
implicit: implicit,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
///*
|
||||
// * Create ALIAS.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t)
|
||||
//{
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// anchor_copy *yaml_char_t = NULL
|
||||
//
|
||||
// assert(event) // Non-NULL event object is expected.
|
||||
// assert(anchor) // Non-NULL anchor is expected.
|
||||
//
|
||||
// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0
|
||||
//
|
||||
// anchor_copy = yaml_strdup(anchor)
|
||||
// if (!anchor_copy)
|
||||
// return 0
|
||||
//
|
||||
// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark)
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
|
||||
// Create SCALAR.
|
||||
func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_SCALAR_EVENT,
|
||||
anchor: anchor,
|
||||
tag: tag,
|
||||
value: value,
|
||||
implicit: plain_implicit,
|
||||
quoted_implicit: quoted_implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create SEQUENCE-START.
|
||||
func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_SEQUENCE_START_EVENT,
|
||||
anchor: anchor,
|
||||
tag: tag,
|
||||
implicit: implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create SEQUENCE-END.
|
||||
func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_SEQUENCE_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create MAPPING-START.
|
||||
func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_MAPPING_START_EVENT,
|
||||
anchor: anchor,
|
||||
tag: tag,
|
||||
implicit: implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create MAPPING-END.
|
||||
func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_MAPPING_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy an event object.
|
||||
func yaml_event_delete(event *yaml_event_t) {
|
||||
*event = yaml_event_t{}
|
||||
}
|
||||
|
||||
///*
|
||||
// * Create a document object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_initialize(document *yaml_document_t,
|
||||
// version_directive *yaml_version_directive_t,
|
||||
// tag_directives_start *yaml_tag_directive_t,
|
||||
// tag_directives_end *yaml_tag_directive_t,
|
||||
// start_implicit int, end_implicit int)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// struct {
|
||||
// start *yaml_node_t
|
||||
// end *yaml_node_t
|
||||
// top *yaml_node_t
|
||||
// } nodes = { NULL, NULL, NULL }
|
||||
// version_directive_copy *yaml_version_directive_t = NULL
|
||||
// struct {
|
||||
// start *yaml_tag_directive_t
|
||||
// end *yaml_tag_directive_t
|
||||
// top *yaml_tag_directive_t
|
||||
// } tag_directives_copy = { NULL, NULL, NULL }
|
||||
// value yaml_tag_directive_t = { NULL, NULL }
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
// assert((tag_directives_start && tag_directives_end) ||
|
||||
// (tag_directives_start == tag_directives_end))
|
||||
// // Valid tag directives are expected.
|
||||
//
|
||||
// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error
|
||||
//
|
||||
// if (version_directive) {
|
||||
// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t))
|
||||
// if (!version_directive_copy) goto error
|
||||
// version_directive_copy.major = version_directive.major
|
||||
// version_directive_copy.minor = version_directive.minor
|
||||
// }
|
||||
//
|
||||
// if (tag_directives_start != tag_directives_end) {
|
||||
// tag_directive *yaml_tag_directive_t
|
||||
// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE))
|
||||
// goto error
|
||||
// for (tag_directive = tag_directives_start
|
||||
// tag_directive != tag_directives_end; tag_directive ++) {
|
||||
// assert(tag_directive.handle)
|
||||
// assert(tag_directive.prefix)
|
||||
// if (!yaml_check_utf8(tag_directive.handle,
|
||||
// strlen((char *)tag_directive.handle)))
|
||||
// goto error
|
||||
// if (!yaml_check_utf8(tag_directive.prefix,
|
||||
// strlen((char *)tag_directive.prefix)))
|
||||
// goto error
|
||||
// value.handle = yaml_strdup(tag_directive.handle)
|
||||
// value.prefix = yaml_strdup(tag_directive.prefix)
|
||||
// if (!value.handle || !value.prefix) goto error
|
||||
// if (!PUSH(&context, tag_directives_copy, value))
|
||||
// goto error
|
||||
// value.handle = NULL
|
||||
// value.prefix = NULL
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy,
|
||||
// tag_directives_copy.start, tag_directives_copy.top,
|
||||
// start_implicit, end_implicit, mark, mark)
|
||||
//
|
||||
// return 1
|
||||
//
|
||||
//error:
|
||||
// STACK_DEL(&context, nodes)
|
||||
// yaml_free(version_directive_copy)
|
||||
// while (!STACK_EMPTY(&context, tag_directives_copy)) {
|
||||
// value yaml_tag_directive_t = POP(&context, tag_directives_copy)
|
||||
// yaml_free(value.handle)
|
||||
// yaml_free(value.prefix)
|
||||
// }
|
||||
// STACK_DEL(&context, tag_directives_copy)
|
||||
// yaml_free(value.handle)
|
||||
// yaml_free(value.prefix)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Destroy a document object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(void)
|
||||
//yaml_document_delete(document *yaml_document_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// tag_directive *yaml_tag_directive_t
|
||||
//
|
||||
// context.error = YAML_NO_ERROR // Eliminate a compliler warning.
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// while (!STACK_EMPTY(&context, document.nodes)) {
|
||||
// node yaml_node_t = POP(&context, document.nodes)
|
||||
// yaml_free(node.tag)
|
||||
// switch (node.type) {
|
||||
// case YAML_SCALAR_NODE:
|
||||
// yaml_free(node.data.scalar.value)
|
||||
// break
|
||||
// case YAML_SEQUENCE_NODE:
|
||||
// STACK_DEL(&context, node.data.sequence.items)
|
||||
// break
|
||||
// case YAML_MAPPING_NODE:
|
||||
// STACK_DEL(&context, node.data.mapping.pairs)
|
||||
// break
|
||||
// default:
|
||||
// assert(0) // Should not happen.
|
||||
// }
|
||||
// }
|
||||
// STACK_DEL(&context, document.nodes)
|
||||
//
|
||||
// yaml_free(document.version_directive)
|
||||
// for (tag_directive = document.tag_directives.start
|
||||
// tag_directive != document.tag_directives.end
|
||||
// tag_directive++) {
|
||||
// yaml_free(tag_directive.handle)
|
||||
// yaml_free(tag_directive.prefix)
|
||||
// }
|
||||
// yaml_free(document.tag_directives.start)
|
||||
//
|
||||
// memset(document, 0, sizeof(yaml_document_t))
|
||||
//}
|
||||
//
|
||||
///**
|
||||
// * Get a document node.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(yaml_node_t *)
|
||||
//yaml_document_get_node(document *yaml_document_t, index int)
|
||||
//{
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (index > 0 && document.nodes.start + index <= document.nodes.top) {
|
||||
// return document.nodes.start + index - 1
|
||||
// }
|
||||
// return NULL
|
||||
//}
|
||||
//
|
||||
///**
|
||||
// * Get the root object.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(yaml_node_t *)
|
||||
//yaml_document_get_root_node(document *yaml_document_t)
|
||||
//{
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (document.nodes.top != document.nodes.start) {
|
||||
// return document.nodes.start
|
||||
// }
|
||||
// return NULL
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Add a scalar node to a document.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_add_scalar(document *yaml_document_t,
|
||||
// tag *yaml_char_t, value *yaml_char_t, length int,
|
||||
// style yaml_scalar_style_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// tag_copy *yaml_char_t = NULL
|
||||
// value_copy *yaml_char_t = NULL
|
||||
// node yaml_node_t
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
// assert(value) // Non-NULL value is expected.
|
||||
//
|
||||
// if (!tag) {
|
||||
// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||
// tag_copy = yaml_strdup(tag)
|
||||
// if (!tag_copy) goto error
|
||||
//
|
||||
// if (length < 0) {
|
||||
// length = strlen((char *)value)
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(value, length)) goto error
|
||||
// value_copy = yaml_malloc(length+1)
|
||||
// if (!value_copy) goto error
|
||||
// memcpy(value_copy, value, length)
|
||||
// value_copy[length] = '\0'
|
||||
//
|
||||
// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark)
|
||||
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||
//
|
||||
// return document.nodes.top - document.nodes.start
|
||||
//
|
||||
//error:
|
||||
// yaml_free(tag_copy)
|
||||
// yaml_free(value_copy)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Add a sequence node to a document.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_add_sequence(document *yaml_document_t,
|
||||
// tag *yaml_char_t, style yaml_sequence_style_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// tag_copy *yaml_char_t = NULL
|
||||
// struct {
|
||||
// start *yaml_node_item_t
|
||||
// end *yaml_node_item_t
|
||||
// top *yaml_node_item_t
|
||||
// } items = { NULL, NULL, NULL }
|
||||
// node yaml_node_t
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (!tag) {
|
||||
// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||
// tag_copy = yaml_strdup(tag)
|
||||
// if (!tag_copy) goto error
|
||||
//
|
||||
// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error
|
||||
//
|
||||
// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end,
|
||||
// style, mark, mark)
|
||||
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||
//
|
||||
// return document.nodes.top - document.nodes.start
|
||||
//
|
||||
//error:
|
||||
// STACK_DEL(&context, items)
|
||||
// yaml_free(tag_copy)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Add a mapping node to a document.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_add_mapping(document *yaml_document_t,
|
||||
// tag *yaml_char_t, style yaml_mapping_style_t)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
// mark yaml_mark_t = { 0, 0, 0 }
|
||||
// tag_copy *yaml_char_t = NULL
|
||||
// struct {
|
||||
// start *yaml_node_pair_t
|
||||
// end *yaml_node_pair_t
|
||||
// top *yaml_node_pair_t
|
||||
// } pairs = { NULL, NULL, NULL }
|
||||
// node yaml_node_t
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
// if (!tag) {
|
||||
// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG
|
||||
// }
|
||||
//
|
||||
// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error
|
||||
// tag_copy = yaml_strdup(tag)
|
||||
// if (!tag_copy) goto error
|
||||
//
|
||||
// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error
|
||||
//
|
||||
// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end,
|
||||
// style, mark, mark)
|
||||
// if (!PUSH(&context, document.nodes, node)) goto error
|
||||
//
|
||||
// return document.nodes.top - document.nodes.start
|
||||
//
|
||||
//error:
|
||||
// STACK_DEL(&context, pairs)
|
||||
// yaml_free(tag_copy)
|
||||
//
|
||||
// return 0
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Append an item to a sequence node.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_append_sequence_item(document *yaml_document_t,
|
||||
// sequence int, item int)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
//
|
||||
// assert(document) // Non-NULL document is required.
|
||||
// assert(sequence > 0
|
||||
// && document.nodes.start + sequence <= document.nodes.top)
|
||||
// // Valid sequence id is required.
|
||||
// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE)
|
||||
// // A sequence node is required.
|
||||
// assert(item > 0 && document.nodes.start + item <= document.nodes.top)
|
||||
// // Valid item id is required.
|
||||
//
|
||||
// if (!PUSH(&context,
|
||||
// document.nodes.start[sequence-1].data.sequence.items, item))
|
||||
// return 0
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
//
|
||||
///*
|
||||
// * Append a pair of a key and a value to a mapping node.
|
||||
// */
|
||||
//
|
||||
//YAML_DECLARE(int)
|
||||
//yaml_document_append_mapping_pair(document *yaml_document_t,
|
||||
// mapping int, key int, value int)
|
||||
//{
|
||||
// struct {
|
||||
// error yaml_error_type_t
|
||||
// } context
|
||||
//
|
||||
// pair yaml_node_pair_t
|
||||
//
|
||||
// assert(document) // Non-NULL document is required.
|
||||
// assert(mapping > 0
|
||||
// && document.nodes.start + mapping <= document.nodes.top)
|
||||
// // Valid mapping id is required.
|
||||
// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE)
|
||||
// // A mapping node is required.
|
||||
// assert(key > 0 && document.nodes.start + key <= document.nodes.top)
|
||||
// // Valid key id is required.
|
||||
// assert(value > 0 && document.nodes.start + value <= document.nodes.top)
|
||||
// // Valid value id is required.
|
||||
//
|
||||
// pair.key = key
|
||||
// pair.value = value
|
||||
//
|
||||
// if (!PUSH(&context,
|
||||
// document.nodes.start[mapping-1].data.mapping.pairs, pair))
|
||||
// return 0
|
||||
//
|
||||
// return 1
|
||||
//}
|
||||
//
|
||||
//
|
682
_vendor/src/gopkg.in/yaml.v2/decode.go
Normal file
@ -0,0 +1,682 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
documentNode = 1 << iota
|
||||
mappingNode
|
||||
sequenceNode
|
||||
scalarNode
|
||||
aliasNode
|
||||
)
|
||||
|
||||
type node struct {
|
||||
kind int
|
||||
line, column int
|
||||
tag string
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Parser, produces a node tree out of a libyaml event stream.
|
||||
|
||||
type parser struct {
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *node
|
||||
}
|
||||
|
||||
func newParser(b []byte) *parser {
|
||||
p := parser{}
|
||||
if !yaml_parser_initialize(&p.parser) {
|
||||
panic("failed to initialize YAML emitter")
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
b = []byte{'\n'}
|
||||
}
|
||||
|
||||
yaml_parser_set_input_string(&p.parser, b)
|
||||
|
||||
p.skip()
|
||||
if p.event.typ != yaml_STREAM_START_EVENT {
|
||||
panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p *parser) destroy() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
yaml_event_delete(&p.event)
|
||||
}
|
||||
yaml_parser_delete(&p.parser)
|
||||
}
|
||||
|
||||
func (p *parser) skip() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||
failf("attempted to go past the end of stream; corrupted value?")
|
||||
}
|
||||
yaml_event_delete(&p.event)
|
||||
}
|
||||
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||
p.fail()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) fail() {
|
||||
var where string
|
||||
var line int
|
||||
if p.parser.problem_mark.line != 0 {
|
||||
line = p.parser.problem_mark.line
|
||||
} else if p.parser.context_mark.line != 0 {
|
||||
line = p.parser.context_mark.line
|
||||
}
|
||||
if line != 0 {
|
||||
where = "line " + strconv.Itoa(line) + ": "
|
||||
}
|
||||
var msg string
|
||||
if len(p.parser.problem) > 0 {
|
||||
msg = p.parser.problem
|
||||
} else {
|
||||
msg = "unknown problem parsing YAML content"
|
||||
}
|
||||
failf("%s%s", where, msg)
|
||||
}
|
||||
|
||||
func (p *parser) anchor(n *node, anchor []byte) {
|
||||
if anchor != nil {
|
||||
p.doc.anchors[string(anchor)] = n
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parse() *node {
|
||||
switch p.event.typ {
|
||||
case yaml_SCALAR_EVENT:
|
||||
return p.scalar()
|
||||
case yaml_ALIAS_EVENT:
|
||||
return p.alias()
|
||||
case yaml_MAPPING_START_EVENT:
|
||||
return p.mapping()
|
||||
case yaml_SEQUENCE_START_EVENT:
|
||||
return p.sequence()
|
||||
case yaml_DOCUMENT_START_EVENT:
|
||||
return p.document()
|
||||
case yaml_STREAM_END_EVENT:
|
||||
// Happens when attempting to decode an empty buffer.
|
||||
return nil
|
||||
default:
|
||||
panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) node(kind int) *node {
|
||||
return &node{
|
||||
kind: kind,
|
||||
line: p.event.start_mark.line,
|
||||
column: p.event.start_mark.column,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) document() *node {
|
||||
n := p.node(documentNode)
|
||||
n.anchors = make(map[string]*node)
|
||||
p.doc = n
|
||||
p.skip()
|
||||
n.children = append(n.children, p.parse())
|
||||
if p.event.typ != yaml_DOCUMENT_END_EVENT {
|
||||
panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) alias() *node {
|
||||
n := p.node(aliasNode)
|
||||
n.value = string(p.event.anchor)
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) scalar() *node {
|
||||
n := p.node(scalarNode)
|
||||
n.value = string(p.event.value)
|
||||
n.tag = string(p.event.tag)
|
||||
n.implicit = p.event.implicit
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) sequence() *node {
|
||||
n := p.node(sequenceNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_SEQUENCE_END_EVENT {
|
||||
n.children = append(n.children, p.parse())
|
||||
}
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) mapping() *node {
|
||||
n := p.node(mappingNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_MAPPING_END_EVENT {
|
||||
n.children = append(n.children, p.parse(), p.parse())
|
||||
}
|
||||
p.skip()
|
||||
return n
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Decoder, unmarshals a node into a provided value.
|
||||
|
||||
type decoder struct {
|
||||
doc *node
|
||||
aliases map[string]bool
|
||||
mapType reflect.Type
|
||||
terrors []string
|
||||
}
|
||||
|
||||
var (
|
||||
mapItemType = reflect.TypeOf(MapItem{})
|
||||
durationType = reflect.TypeOf(time.Duration(0))
|
||||
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||
ifaceType = defaultMapType.Elem()
|
||||
)
|
||||
|
||||
func newDecoder() *decoder {
|
||||
d := &decoder{mapType: defaultMapType}
|
||||
d.aliases = make(map[string]bool)
|
||||
return d
|
||||
}
|
||||
|
||||
func (d *decoder) terror(n *node, tag string, out reflect.Value) {
|
||||
if n.tag != "" {
|
||||
tag = n.tag
|
||||
}
|
||||
value := n.value
|
||||
if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG {
|
||||
if len(value) > 10 {
|
||||
value = " `" + value[:7] + "...`"
|
||||
} else {
|
||||
value = " `" + value + "`"
|
||||
}
|
||||
}
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type()))
|
||||
}
|
||||
|
||||
func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
|
||||
terrlen := len(d.terrors)
|
||||
err := u.UnmarshalYAML(func(v interface{}) (err error) {
|
||||
defer handleErr(&err)
|
||||
d.unmarshal(n, reflect.ValueOf(v))
|
||||
if len(d.terrors) > terrlen {
|
||||
issues := d.terrors[terrlen:]
|
||||
d.terrors = d.terrors[:terrlen]
|
||||
return &TypeError{issues}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if e, ok := err.(*TypeError); ok {
|
||||
d.terrors = append(d.terrors, e.Errors...)
|
||||
return false
|
||||
}
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// d.prepare initializes and dereferences pointers and calls UnmarshalYAML
|
||||
// if a value is found to implement it.
|
||||
// It returns the initialized and dereferenced out value, whether
|
||||
// unmarshalling was already done by UnmarshalYAML, and if so whether
|
||||
// its types unmarshalled appropriately.
|
||||
//
|
||||
// If n holds a null value, prepare returns before doing anything.
|
||||
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "" && n.implicit) {
|
||||
return out, false, false
|
||||
}
|
||||
again := true
|
||||
for again {
|
||||
again = false
|
||||
if out.Kind() == reflect.Ptr {
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.New(out.Type().Elem()))
|
||||
}
|
||||
out = out.Elem()
|
||||
again = true
|
||||
}
|
||||
if out.CanAddr() {
|
||||
if u, ok := out.Addr().Interface().(Unmarshaler); ok {
|
||||
good = d.callUnmarshaler(n, u)
|
||||
return out, true, good
|
||||
}
|
||||
}
|
||||
}
|
||||
return out, false, false
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) {
|
||||
switch n.kind {
|
||||
case documentNode:
|
||||
return d.document(n, out)
|
||||
case aliasNode:
|
||||
return d.alias(n, out)
|
||||
}
|
||||
out, unmarshaled, good := d.prepare(n, out)
|
||||
if unmarshaled {
|
||||
return good
|
||||
}
|
||||
switch n.kind {
|
||||
case scalarNode:
|
||||
good = d.scalar(n, out)
|
||||
case mappingNode:
|
||||
good = d.mapping(n, out)
|
||||
case sequenceNode:
|
||||
good = d.sequence(n, out)
|
||||
default:
|
||||
panic("internal error: unknown node kind: " + strconv.Itoa(n.kind))
|
||||
}
|
||||
return good
|
||||
}
|
||||
|
||||
func (d *decoder) document(n *node, out reflect.Value) (good bool) {
|
||||
if len(n.children) == 1 {
|
||||
d.doc = n
|
||||
d.unmarshal(n.children[0], out)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if !ok {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
if d.aliases[n.value] {
|
||||
failf("anchor '%s' value contains itself", n.value)
|
||||
}
|
||||
d.aliases[n.value] = true
|
||||
good = d.unmarshal(an, out)
|
||||
delete(d.aliases, n.value)
|
||||
return good
|
||||
}
|
||||
|
||||
var zeroValue reflect.Value
|
||||
|
||||
func resetMap(out reflect.Value) {
|
||||
for _, k := range out.MapKeys() {
|
||||
out.SetMapIndex(k, zeroValue)
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
||||
var tag string
|
||||
var resolved interface{}
|
||||
if n.tag == "" && !n.implicit {
|
||||
tag = yaml_STR_TAG
|
||||
resolved = n.value
|
||||
} else {
|
||||
tag, resolved = resolve(n.tag, n.value)
|
||||
if tag == yaml_BINARY_TAG {
|
||||
data, err := base64.StdEncoding.DecodeString(resolved.(string))
|
||||
if err != nil {
|
||||
failf("!!binary value contains invalid base64 data")
|
||||
}
|
||||
resolved = string(data)
|
||||
}
|
||||
}
|
||||
if resolved == nil {
|
||||
if out.Kind() == reflect.Map && !out.CanAddr() {
|
||||
resetMap(out)
|
||||
} else {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
}
|
||||
return true
|
||||
}
|
||||
if s, ok := resolved.(string); ok && out.CanAddr() {
|
||||
if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
|
||||
err := u.UnmarshalText([]byte(s))
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
switch out.Kind() {
|
||||
case reflect.String:
|
||||
if tag == yaml_BINARY_TAG {
|
||||
out.SetString(resolved.(string))
|
||||
good = true
|
||||
} else if resolved != nil {
|
||||
out.SetString(n.value)
|
||||
good = true
|
||||
}
|
||||
case reflect.Interface:
|
||||
if resolved == nil {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
} else {
|
||||
out.Set(reflect.ValueOf(resolved))
|
||||
}
|
||||
good = true
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
}
|
||||
case int64:
|
||||
if !out.OverflowInt(resolved) {
|
||||
out.SetInt(resolved)
|
||||
good = true
|
||||
}
|
||||
case uint64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
}
|
||||
case string:
|
||||
if out.Type() == durationType {
|
||||
d, err := time.ParseDuration(resolved)
|
||||
if err == nil {
|
||||
out.SetInt(int64(d))
|
||||
good = true
|
||||
}
|
||||
}
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
case int64:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
case uint64:
|
||||
if !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
}
|
||||
}
|
||||
case reflect.Bool:
|
||||
switch resolved := resolved.(type) {
|
||||
case bool:
|
||||
out.SetBool(resolved)
|
||||
good = true
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
case int64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
case uint64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
case float64:
|
||||
out.SetFloat(resolved)
|
||||
good = true
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if out.Type().Elem() == reflect.TypeOf(resolved) {
|
||||
// TODO DOes this make sense? When is out a Ptr except when decoding a nil value?
|
||||
elem := reflect.New(out.Type().Elem())
|
||||
elem.Elem().Set(reflect.ValueOf(resolved))
|
||||
out.Set(elem)
|
||||
good = true
|
||||
}
|
||||
}
|
||||
if !good {
|
||||
d.terror(n, tag, out)
|
||||
}
|
||||
return good
|
||||
}
|
||||
|
||||
func settableValueOf(i interface{}) reflect.Value {
|
||||
v := reflect.ValueOf(i)
|
||||
sv := reflect.New(v.Type()).Elem()
|
||||
sv.Set(v)
|
||||
return sv
|
||||
}
|
||||
|
||||
func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
||||
l := len(n.children)
|
||||
|
||||
var iface reflect.Value
|
||||
switch out.Kind() {
|
||||
case reflect.Slice:
|
||||
out.Set(reflect.MakeSlice(out.Type(), l, l))
|
||||
case reflect.Interface:
|
||||
// No type hints. Will have to use a generic sequence.
|
||||
iface = out
|
||||
out = settableValueOf(make([]interface{}, l))
|
||||
default:
|
||||
d.terror(n, yaml_SEQ_TAG, out)
|
||||
return false
|
||||
}
|
||||
et := out.Type().Elem()
|
||||
|
||||
j := 0
|
||||
for i := 0; i < l; i++ {
|
||||
e := reflect.New(et).Elem()
|
||||
if ok := d.unmarshal(n.children[i], e); ok {
|
||||
out.Index(j).Set(e)
|
||||
j++
|
||||
}
|
||||
}
|
||||
out.Set(out.Slice(0, j))
|
||||
if iface.IsValid() {
|
||||
iface.Set(out)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
||||
switch out.Kind() {
|
||||
case reflect.Struct:
|
||||
return d.mappingStruct(n, out)
|
||||
case reflect.Slice:
|
||||
return d.mappingSlice(n, out)
|
||||
case reflect.Map:
|
||||
// okay
|
||||
case reflect.Interface:
|
||||
if d.mapType.Kind() == reflect.Map {
|
||||
iface := out
|
||||
out = reflect.MakeMap(d.mapType)
|
||||
iface.Set(out)
|
||||
} else {
|
||||
slicev := reflect.New(d.mapType).Elem()
|
||||
if !d.mappingSlice(n, slicev) {
|
||||
return false
|
||||
}
|
||||
out.Set(slicev)
|
||||
return true
|
||||
}
|
||||
default:
|
||||
d.terror(n, yaml_MAP_TAG, out)
|
||||
return false
|
||||
}
|
||||
outt := out.Type()
|
||||
kt := outt.Key()
|
||||
et := outt.Elem()
|
||||
|
||||
mapType := d.mapType
|
||||
if outt.Key() == ifaceType && outt.Elem() == ifaceType {
|
||||
d.mapType = outt
|
||||
}
|
||||
|
||||
if out.IsNil() {
|
||||
out.Set(reflect.MakeMap(outt))
|
||||
}
|
||||
l := len(n.children)
|
||||
for i := 0; i < l; i += 2 {
|
||||
if isMerge(n.children[i]) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
k := reflect.New(kt).Elem()
|
||||
if d.unmarshal(n.children[i], k) {
|
||||
kkind := k.Kind()
|
||||
if kkind == reflect.Interface {
|
||||
kkind = k.Elem().Kind()
|
||||
}
|
||||
if kkind == reflect.Map || kkind == reflect.Slice {
|
||||
failf("invalid map key: %#v", k.Interface())
|
||||
}
|
||||
e := reflect.New(et).Elem()
|
||||
if d.unmarshal(n.children[i+1], e) {
|
||||
out.SetMapIndex(k, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
d.mapType = mapType
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
|
||||
outt := out.Type()
|
||||
if outt.Elem() != mapItemType {
|
||||
d.terror(n, yaml_MAP_TAG, out)
|
||||
return false
|
||||
}
|
||||
|
||||
mapType := d.mapType
|
||||
d.mapType = outt
|
||||
|
||||
var slice []MapItem
|
||||
var l = len(n.children)
|
||||
for i := 0; i < l; i += 2 {
|
||||
if isMerge(n.children[i]) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
item := MapItem{}
|
||||
k := reflect.ValueOf(&item.Key).Elem()
|
||||
if d.unmarshal(n.children[i], k) {
|
||||
v := reflect.ValueOf(&item.Value).Elem()
|
||||
if d.unmarshal(n.children[i+1], v) {
|
||||
slice = append(slice, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
out.Set(reflect.ValueOf(slice))
|
||||
d.mapType = mapType
|
||||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
||||
sinfo, err := getStructInfo(out.Type())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
name := settableValueOf("")
|
||||
l := len(n.children)
|
||||
|
||||
var inlineMap reflect.Value
|
||||
var elemType reflect.Type
|
||||
if sinfo.InlineMap != -1 {
|
||||
inlineMap = out.Field(sinfo.InlineMap)
|
||||
inlineMap.Set(reflect.New(inlineMap.Type()).Elem())
|
||||
elemType = inlineMap.Type().Elem()
|
||||
}
|
||||
|
||||
for i := 0; i < l; i += 2 {
|
||||
ni := n.children[i]
|
||||
if isMerge(ni) {
|
||||
d.merge(n.children[i+1], out)
|
||||
continue
|
||||
}
|
||||
if !d.unmarshal(ni, name) {
|
||||
continue
|
||||
}
|
||||
if info, ok := sinfo.FieldsMap[name.String()]; ok {
|
||||
var field reflect.Value
|
||||
if info.Inline == nil {
|
||||
field = out.Field(info.Num)
|
||||
} else {
|
||||
field = out.FieldByIndex(info.Inline)
|
||||
}
|
||||
d.unmarshal(n.children[i+1], field)
|
||||
} else if sinfo.InlineMap != -1 {
|
||||
if inlineMap.IsNil() {
|
||||
inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
|
||||
}
|
||||
value := reflect.New(elemType).Elem()
|
||||
d.unmarshal(n.children[i+1], value)
|
||||
inlineMap.SetMapIndex(name, value)
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func failWantMap() {
|
||||
failf("map merge requires map or sequence of maps as the value")
|
||||
}
|
||||
|
||||
func (d *decoder) merge(n *node, out reflect.Value) {
|
||||
switch n.kind {
|
||||
case mappingNode:
|
||||
d.unmarshal(n, out)
|
||||
case aliasNode:
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(n, out)
|
||||
case sequenceNode:
|
||||
// Step backwards as earlier nodes take precedence.
|
||||
for i := len(n.children) - 1; i >= 0; i-- {
|
||||
ni := n.children[i]
|
||||
if ni.kind == aliasNode {
|
||||
an, ok := d.doc.anchors[ni.value]
|
||||
if ok && an.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
} else if ni.kind != mappingNode {
|
||||
failWantMap()
|
||||
}
|
||||
d.unmarshal(ni, out)
|
||||
}
|
||||
default:
|
||||
failWantMap()
|
||||
}
|
||||
}
|
||||
|
||||
func isMerge(n *node) bool {
|
||||
return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG)
|
||||
}
|
998
_vendor/src/gopkg.in/yaml.v2/decode_test.go
Normal file
@ -0,0 +1,998 @@
|
||||
package yaml_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
. "gopkg.in/check.v1"
|
||||
"gopkg.in/yaml.v2"
|
||||
"math"
|
||||
"net"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var unmarshalIntTest = 123
|
||||
|
||||
var unmarshalTests = []struct {
|
||||
data string
|
||||
value interface{}
|
||||
}{
|
||||
{
|
||||
"",
|
||||
&struct{}{},
|
||||
}, {
|
||||
"{}", &struct{}{},
|
||||
}, {
|
||||
"v: hi",
|
||||
map[string]string{"v": "hi"},
|
||||
}, {
|
||||
"v: hi", map[string]interface{}{"v": "hi"},
|
||||
}, {
|
||||
"v: true",
|
||||
map[string]string{"v": "true"},
|
||||
}, {
|
||||
"v: true",
|
||||
map[string]interface{}{"v": true},
|
||||
}, {
|
||||
"v: 10",
|
||||
map[string]interface{}{"v": 10},
|
||||
}, {
|
||||
"v: 0b10",
|
||||
map[string]interface{}{"v": 2},
|
||||
}, {
|
||||
"v: 0xA",
|
||||
map[string]interface{}{"v": 10},
|
||||
}, {
|
||||
"v: 4294967296",
|
||||
map[string]int64{"v": 4294967296},
|
||||
}, {
|
||||
"v: 0.1",
|
||||
map[string]interface{}{"v": 0.1},
|
||||
}, {
|
||||
"v: .1",
|
||||
map[string]interface{}{"v": 0.1},
|
||||
}, {
|
||||
"v: .Inf",
|
||||
map[string]interface{}{"v": math.Inf(+1)},
|
||||
}, {
|
||||
"v: -.Inf",
|
||||
map[string]interface{}{"v": math.Inf(-1)},
|
||||
}, {
|
||||
"v: -10",
|
||||
map[string]interface{}{"v": -10},
|
||||
}, {
|
||||
"v: -.1",
|
||||
map[string]interface{}{"v": -0.1},
|
||||
},
|
||||
|
||||
// Simple values.
|
||||
{
|
||||
"123",
|
||||
&unmarshalIntTest,
|
||||
},
|
||||
|
||||
// Floats from spec
|
||||
{
|
||||
"canonical: 6.8523e+5",
|
||||
map[string]interface{}{"canonical": 6.8523e+5},
|
||||
}, {
|
||||
"expo: 685.230_15e+03",
|
||||
map[string]interface{}{"expo": 685.23015e+03},
|
||||
}, {
|
||||
"fixed: 685_230.15",
|
||||
map[string]interface{}{"fixed": 685230.15},
|
||||
}, {
|
||||
"neginf: -.inf",
|
||||
map[string]interface{}{"neginf": math.Inf(-1)},
|
||||
}, {
|
||||
"fixed: 685_230.15",
|
||||
map[string]float64{"fixed": 685230.15},
|
||||
},
|
||||
//{"sexa: 190:20:30.15", map[string]interface{}{"sexa": 0}}, // Unsupported
|
||||
//{"notanum: .NaN", map[string]interface{}{"notanum": math.NaN()}}, // Equality of NaN fails.
|
||||
|
||||
// Bools from spec
|
||||
{
|
||||
"canonical: y",
|
||||
map[string]interface{}{"canonical": true},
|
||||
}, {
|
||||
"answer: NO",
|
||||
map[string]interface{}{"answer": false},
|
||||
}, {
|
||||
"logical: True",
|
||||
map[string]interface{}{"logical": true},
|
||||
}, {
|
||||
"option: on",
|
||||
map[string]interface{}{"option": true},
|
||||
}, {
|
||||
"option: on",
|
||||
map[string]bool{"option": true},
|
||||
},
|
||||
// Ints from spec
|
||||
{
|
||||
"canonical: 685230",
|
||||
map[string]interface{}{"canonical": 685230},
|
||||
}, {
|
||||
"decimal: +685_230",
|
||||
map[string]interface{}{"decimal": 685230},
|
||||
}, {
|
||||
"octal: 02472256",
|
||||
map[string]interface{}{"octal": 685230},
|
||||
}, {
|
||||
"hexa: 0x_0A_74_AE",
|
||||
map[string]interface{}{"hexa": 685230},
|
||||
}, {
|
||||
"bin: 0b1010_0111_0100_1010_1110",
|
||||
map[string]interface{}{"bin": 685230},
|
||||
}, {
|
||||
"bin: -0b101010",
|
||||
map[string]interface{}{"bin": -42},
|
||||
}, {
|
||||
"decimal: +685_230",
|
||||
map[string]int{"decimal": 685230},
|
||||
},
|
||||
|
||||
//{"sexa: 190:20:30", map[string]interface{}{"sexa": 0}}, // Unsupported
|
||||
|
||||
// Nulls from spec
|
||||
{
|
||||
"empty:",
|
||||
map[string]interface{}{"empty": nil},
|
||||
}, {
|
||||
"canonical: ~",
|
||||
map[string]interface{}{"canonical": nil},
|
||||
}, {
|
||||
"english: null",
|
||||
map[string]interface{}{"english": nil},
|
||||
}, {
|
||||
"~: null key",
|
||||
map[interface{}]string{nil: "null key"},
|
||||
}, {
|
||||
"empty:",
|
||||
map[string]*bool{"empty": nil},
|
||||
},
|
||||
|
||||
// Flow sequence
|
||||
{
|
||||
"seq: [A,B]",
|
||||
map[string]interface{}{"seq": []interface{}{"A", "B"}},
|
||||
}, {
|
||||
"seq: [A,B,C,]",
|
||||
map[string][]string{"seq": []string{"A", "B", "C"}},
|
||||
}, {
|
||||
"seq: [A,1,C]",
|
||||
map[string][]string{"seq": []string{"A", "1", "C"}},
|
||||
}, {
|
||||
"seq: [A,1,C]",
|
||||
map[string][]int{"seq": []int{1}},
|
||||
}, {
|
||||
"seq: [A,1,C]",
|
||||
map[string]interface{}{"seq": []interface{}{"A", 1, "C"}},
|
||||
},
|
||||
// Block sequence
|
||||
{
|
||||
"seq:\n - A\n - B",
|
||||
map[string]interface{}{"seq": []interface{}{"A", "B"}},
|
||||
}, {
|
||||
"seq:\n - A\n - B\n - C",
|
||||
map[string][]string{"seq": []string{"A", "B", "C"}},
|
||||
}, {
|
||||
"seq:\n - A\n - 1\n - C",
|
||||
map[string][]string{"seq": []string{"A", "1", "C"}},
|
||||
}, {
|
||||
"seq:\n - A\n - 1\n - C",
|
||||
map[string][]int{"seq": []int{1}},
|
||||
}, {
|
||||
"seq:\n - A\n - 1\n - C",
|
||||
map[string]interface{}{"seq": []interface{}{"A", 1, "C"}},
|
||||
},
|
||||
|
||||
// Literal block scalar
|
||||
{
|
||||
"scalar: | # Comment\n\n literal\n\n \ttext\n\n",
|
||||
map[string]string{"scalar": "\nliteral\n\n\ttext\n"},
|
||||
},
|
||||
|
||||
// Folded block scalar
|
||||
{
|
||||
"scalar: > # Comment\n\n folded\n line\n \n next\n line\n * one\n * two\n\n last\n line\n\n",
|
||||
map[string]string{"scalar": "\nfolded line\nnext line\n * one\n * two\n\nlast line\n"},
|
||||
},
|
||||
|
||||
// Map inside interface with no type hints.
|
||||
{
|
||||
"a: {b: c}",
|
||||
map[interface{}]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
|
||||
},
|
||||
|
||||
// Structs and type conversions.
|
||||
{
|
||||
"hello: world",
|
||||
&struct{ Hello string }{"world"},
|
||||
}, {
|
||||
"a: {b: c}",
|
||||
&struct{ A struct{ B string } }{struct{ B string }{"c"}},
|
||||
}, {
|
||||
"a: {b: c}",
|
||||
&struct{ A *struct{ B string } }{&struct{ B string }{"c"}},
|
||||
}, {
|
||||
"a: {b: c}",
|
||||
&struct{ A map[string]string }{map[string]string{"b": "c"}},
|
||||
}, {
|
||||
"a: {b: c}",
|
||||
&struct{ A *map[string]string }{&map[string]string{"b": "c"}},
|
||||
}, {
|
||||
"a:",
|
||||
&struct{ A map[string]string }{},
|
||||
}, {
|
||||
"a: 1",
|
||||
&struct{ A int }{1},
|
||||
}, {
|
||||
"a: 1",
|
||||
&struct{ A float64 }{1},
|
||||
}, {
|
||||
"a: 1.0",
|
||||
&struct{ A int }{1},
|
||||
}, {
|
||||
"a: 1.0",
|
||||
&struct{ A uint }{1},
|
||||
}, {
|
||||
"a: [1, 2]",
|
||||
&struct{ A []int }{[]int{1, 2}},
|
||||
}, {
|
||||
"a: 1",
|
||||
&struct{ B int }{0},
|
||||
}, {
|
||||
"a: 1",
|
||||
&struct {
|
||||
B int "a"
|
||||
}{1},
|
||||
}, {
|
||||
"a: y",
|
||||
&struct{ A bool }{true},
|
||||
},
|
||||
|
||||
// Some cross type conversions
|
||||
{
|
||||
"v: 42",
|
||||
map[string]uint{"v": 42},
|
||||
}, {
|
||||
"v: -42",
|
||||
map[string]uint{},
|
||||
}, {
|
||||
"v: 4294967296",
|
||||
map[string]uint64{"v": 4294967296},
|
||||
}, {
|
||||
"v: -4294967296",
|
||||
map[string]uint64{},
|
||||
},
|
||||
|
||||
// int
|
||||
{
|
||||
"int_max: 2147483647",
|
||||
map[string]int{"int_max": math.MaxInt32},
|
||||
},
|
||||
{
|
||||
"int_min: -2147483648",
|
||||
map[string]int{"int_min": math.MinInt32},
|
||||
},
|
||||
{
|
||||
"int_overflow: 9223372036854775808", // math.MaxInt64 + 1
|
||||
map[string]int{},
|
||||
},
|
||||
|
||||
// int64
|
||||
{
|
||||
"int64_max: 9223372036854775807",
|
||||
map[string]int64{"int64_max": math.MaxInt64},
|
||||
},
|
||||
{
|
||||
"int64_max_base2: 0b111111111111111111111111111111111111111111111111111111111111111",
|
||||
map[string]int64{"int64_max_base2": math.MaxInt64},
|
||||
},
|
||||
{
|
||||
"int64_min: -9223372036854775808",
|
||||
map[string]int64{"int64_min": math.MinInt64},
|
||||
},
|
||||
{
|
||||
"int64_neg_base2: -0b111111111111111111111111111111111111111111111111111111111111111",
|
||||
map[string]int64{"int64_neg_base2": -math.MaxInt64},
|
||||
},
|
||||
{
|
||||
"int64_overflow: 9223372036854775808", // math.MaxInt64 + 1
|
||||
map[string]int64{},
|
||||
},
|
||||
|
||||
// uint
|
||||
{
|
||||
"uint_min: 0",
|
||||
map[string]uint{"uint_min": 0},
|
||||
},
|
||||
{
|
||||
"uint_max: 4294967295",
|
||||
map[string]uint{"uint_max": math.MaxUint32},
|
||||
},
|
||||
{
|
||||
"uint_underflow: -1",
|
||||
map[string]uint{},
|
||||
},
|
||||
|
||||
// uint64
|
||||
{
|
||||
"uint64_min: 0",
|
||||
map[string]uint{"uint64_min": 0},
|
||||
},
|
||||
{
|
||||
"uint64_max: 18446744073709551615",
|
||||
map[string]uint64{"uint64_max": math.MaxUint64},
|
||||
},
|
||||
{
|
||||
"uint64_max_base2: 0b1111111111111111111111111111111111111111111111111111111111111111",
|
||||
map[string]uint64{"uint64_max_base2": math.MaxUint64},
|
||||
},
|
||||
{
|
||||
"uint64_maxint64: 9223372036854775807",
|
||||
map[string]uint64{"uint64_maxint64": math.MaxInt64},
|
||||
},
|
||||
{
|
||||
"uint64_underflow: -1",
|
||||
map[string]uint64{},
|
||||
},
|
||||
|
||||
// float32
|
||||
{
|
||||
"float32_max: 3.40282346638528859811704183484516925440e+38",
|
||||
map[string]float32{"float32_max": math.MaxFloat32},
|
||||
},
|
||||
{
|
||||
"float32_nonzero: 1.401298464324817070923729583289916131280e-45",
|
||||
map[string]float32{"float32_nonzero": math.SmallestNonzeroFloat32},
|
||||
},
|
||||
{
|
||||
"float32_maxuint64: 18446744073709551615",
|
||||
map[string]float32{"float32_maxuint64": float32(math.MaxUint64)},
|
||||
},
|
||||
{
|
||||
"float32_maxuint64+1: 18446744073709551616",
|
||||
map[string]float32{"float32_maxuint64+1": float32(math.MaxUint64 + 1)},
|
||||
},
|
||||
|
||||
// float64
|
||||
{
|
||||
"float64_max: 1.797693134862315708145274237317043567981e+308",
|
||||
map[string]float64{"float64_max": math.MaxFloat64},
|
||||
},
|
||||
{
|
||||
"float64_nonzero: 4.940656458412465441765687928682213723651e-324",
|
||||
map[string]float64{"float64_nonzero": math.SmallestNonzeroFloat64},
|
||||
},
|
||||
{
|
||||
"float64_maxuint64: 18446744073709551615",
|
||||
map[string]float64{"float64_maxuint64": float64(math.MaxUint64)},
|
||||
},
|
||||
{
|
||||
"float64_maxuint64+1: 18446744073709551616",
|
||||
map[string]float64{"float64_maxuint64+1": float64(math.MaxUint64 + 1)},
|
||||
},
|
||||
|
||||
// Overflow cases.
|
||||
{
|
||||
"v: 4294967297",
|
||||
map[string]int32{},
|
||||
}, {
|
||||
"v: 128",
|
||||
map[string]int8{},
|
||||
},
|
||||
|
||||
// Quoted values.
|
||||
{
|
||||
"'1': '\"2\"'",
|
||||
map[interface{}]interface{}{"1": "\"2\""},
|
||||
}, {
|
||||
"v:\n- A\n- 'B\n\n C'\n",
|
||||
map[string][]string{"v": []string{"A", "B\nC"}},
|
||||
},
|
||||
|
||||
// Explicit tags.
|
||||
{
|
||||
"v: !!float '1.1'",
|
||||
map[string]interface{}{"v": 1.1},
|
||||
}, {
|
||||
"v: !!null ''",
|
||||
map[string]interface{}{"v": nil},
|
||||
}, {
|
||||
"%TAG !y! tag:yaml.org,2002:\n---\nv: !y!int '1'",
|
||||
map[string]interface{}{"v": 1},
|
||||
},
|
||||
|
||||
// Anchors and aliases.
|
||||
{
|
||||
"a: &x 1\nb: &y 2\nc: *x\nd: *y\n",
|
||||
&struct{ A, B, C, D int }{1, 2, 1, 2},
|
||||
}, {
|
||||
"a: &a {c: 1}\nb: *a",
|
||||
&struct {
|
||||
A, B struct {
|
||||
C int
|
||||
}
|
||||
}{struct{ C int }{1}, struct{ C int }{1}},
|
||||
}, {
|
||||
"a: &a [1, 2]\nb: *a",
|
||||
&struct{ B []int }{[]int{1, 2}},
|
||||
}, {
|
||||
"b: *a\na: &a {c: 1}",
|
||||
&struct {
|
||||
A, B struct {
|
||||
C int
|
||||
}
|
||||
}{struct{ C int }{1}, struct{ C int }{1}},
|
||||
},
|
||||
|
||||
// Bug #1133337
|
||||
{
|
||||
"foo: ''",
|
||||
map[string]*string{"foo": new(string)},
|
||||
}, {
|
||||
"foo: null",
|
||||
map[string]string{"foo": ""},
|
||||
}, {
|
||||
"foo: null",
|
||||
map[string]interface{}{"foo": nil},
|
||||
},
|
||||
|
||||
// Ignored field
|
||||
{
|
||||
"a: 1\nb: 2\n",
|
||||
&struct {
|
||||
A int
|
||||
B int "-"
|
||||
}{1, 0},
|
||||
},
|
||||
|
||||
// Bug #1191981
|
||||
{
|
||||
"" +
|
||||
"%YAML 1.1\n" +
|
||||
"--- !!str\n" +
|
||||
`"Generic line break (no glyph)\n\` + "\n" +
|
||||
` Generic line break (glyphed)\n\` + "\n" +
|
||||
` Line separator\u2028\` + "\n" +
|
||||
` Paragraph separator\u2029"` + "\n",
|
||||
"" +
|
||||
"Generic line break (no glyph)\n" +
|
||||
"Generic line break (glyphed)\n" +
|
||||
"Line separator\u2028Paragraph separator\u2029",
|
||||
},
|
||||
|
||||
// Struct inlining
|
||||
{
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
&struct {
|
||||
A int
|
||||
C inlineB `yaml:",inline"`
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
},
|
||||
|
||||
// Map inlining
|
||||
{
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
&struct {
|
||||
A int
|
||||
C map[string]int `yaml:",inline"`
|
||||
}{1, map[string]int{"b": 2, "c": 3}},
|
||||
},
|
||||
|
||||
// bug 1243827
|
||||
{
|
||||
"a: -b_c",
|
||||
map[string]interface{}{"a": "-b_c"},
|
||||
},
|
||||
{
|
||||
"a: +b_c",
|
||||
map[string]interface{}{"a": "+b_c"},
|
||||
},
|
||||
{
|
||||
"a: 50cent_of_dollar",
|
||||
map[string]interface{}{"a": "50cent_of_dollar"},
|
||||
},
|
||||
|
||||
// Duration
|
||||
{
|
||||
"a: 3s",
|
||||
map[string]time.Duration{"a": 3 * time.Second},
|
||||
},
|
||||
|
||||
// Issue #24.
|
||||
{
|
||||
"a: <foo>",
|
||||
map[string]string{"a": "<foo>"},
|
||||
},
|
||||
|
||||
// Base 60 floats are obsolete and unsupported.
|
||||
{
|
||||
"a: 1:1\n",
|
||||
map[string]string{"a": "1:1"},
|
||||
},
|
||||
|
||||
// Binary data.
|
||||
{
|
||||
"a: !!binary gIGC\n",
|
||||
map[string]string{"a": "\x80\x81\x82"},
|
||||
}, {
|
||||
"a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
|
||||
map[string]string{"a": strings.Repeat("\x90", 54)},
|
||||
}, {
|
||||
"a: !!binary |\n " + strings.Repeat("A", 70) + "\n ==\n",
|
||||
map[string]string{"a": strings.Repeat("\x00", 52)},
|
||||
},
|
||||
|
||||
// Ordered maps.
|
||||
{
|
||||
"{b: 2, a: 1, d: 4, c: 3, sub: {e: 5}}",
|
||||
&yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}},
|
||||
},
|
||||
|
||||
// Issue #39.
|
||||
{
|
||||
"a:\n b:\n c: d\n",
|
||||
map[string]struct{ B interface{} }{"a": {map[interface{}]interface{}{"c": "d"}}},
|
||||
},
|
||||
|
||||
// Custom map type.
|
||||
{
|
||||
"a: {b: c}",
|
||||
M{"a": M{"b": "c"}},
|
||||
},
|
||||
|
||||
// Support encoding.TextUnmarshaler.
|
||||
{
|
||||
"a: 1.2.3.4\n",
|
||||
map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)},
|
||||
},
|
||||
{
|
||||
"a: 2015-02-24T18:19:39Z\n",
|
||||
map[string]time.Time{"a": time.Unix(1424801979, 0).In(time.UTC)},
|
||||
},
|
||||
|
||||
// Encode empty lists as zero-length slices.
|
||||
{
|
||||
"a: []",
|
||||
&struct{ A []int }{[]int{}},
|
||||
},
|
||||
|
||||
// UTF-16-LE
|
||||
{
|
||||
"\xff\xfe\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00\n\x00",
|
||||
M{"ñoño": "very yes"},
|
||||
},
|
||||
// UTF-16-LE with surrogate.
|
||||
{
|
||||
"\xff\xfe\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00 \x00=\xd8\xd4\xdf\n\x00",
|
||||
M{"ñoño": "very yes 🟔"},
|
||||
},
|
||||
|
||||
// UTF-16-BE
|
||||
{
|
||||
"\xfe\xff\x00\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00\n",
|
||||
M{"ñoño": "very yes"},
|
||||
},
|
||||
// UTF-16-BE with surrogate.
|
||||
{
|
||||
"\xfe\xff\x00\xf1\x00o\x00\xf1\x00o\x00:\x00 \x00v\x00e\x00r\x00y\x00 \x00y\x00e\x00s\x00 \xd8=\xdf\xd4\x00\n",
|
||||
M{"ñoño": "very yes 🟔"},
|
||||
},
|
||||
|
||||
// YAML Float regex shouldn't match this
|
||||
{
|
||||
"a: 123456e1\n",
|
||||
M{"a": "123456e1"},
|
||||
}, {
|
||||
"a: 123456E1\n",
|
||||
M{"a": "123456E1"},
|
||||
},
|
||||
}
|
||||
|
||||
type M map[interface{}]interface{}
|
||||
|
||||
type inlineB struct {
|
||||
B int
|
||||
inlineC `yaml:",inline"`
|
||||
}
|
||||
|
||||
type inlineC struct {
|
||||
C int
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshal(c *C) {
|
||||
for _, item := range unmarshalTests {
|
||||
t := reflect.ValueOf(item.value).Type()
|
||||
var value interface{}
|
||||
switch t.Kind() {
|
||||
case reflect.Map:
|
||||
value = reflect.MakeMap(t).Interface()
|
||||
case reflect.String:
|
||||
value = reflect.New(t).Interface()
|
||||
case reflect.Ptr:
|
||||
value = reflect.New(t.Elem()).Interface()
|
||||
default:
|
||||
c.Fatalf("missing case for %s", t)
|
||||
}
|
||||
err := yaml.Unmarshal([]byte(item.data), value)
|
||||
if _, ok := err.(*yaml.TypeError); !ok {
|
||||
c.Assert(err, IsNil)
|
||||
}
|
||||
if t.Kind() == reflect.String {
|
||||
c.Assert(*value.(*string), Equals, item.value)
|
||||
} else {
|
||||
c.Assert(value, DeepEquals, item.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalNaN(c *C) {
|
||||
value := map[string]interface{}{}
|
||||
err := yaml.Unmarshal([]byte("notanum: .NaN"), &value)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(math.IsNaN(value["notanum"].(float64)), Equals, true)
|
||||
}
|
||||
|
||||
var unmarshalErrorTests = []struct {
|
||||
data, error string
|
||||
}{
|
||||
{"v: !!float 'error'", "yaml: cannot decode !!str `error` as a !!float"},
|
||||
{"v: [A,", "yaml: line 1: did not find expected node content"},
|
||||
{"v:\n- [A,", "yaml: line 2: did not find expected node content"},
|
||||
{"a: *b\n", "yaml: unknown anchor 'b' referenced"},
|
||||
{"a: &a\n b: *a\n", "yaml: anchor 'a' value contains itself"},
|
||||
{"value: -", "yaml: block sequence entries are not allowed in this context"},
|
||||
{"a: !!binary ==", "yaml: !!binary value contains invalid base64 data"},
|
||||
{"{[.]}", `yaml: invalid map key: \[\]interface \{\}\{"\."\}`},
|
||||
{"{{.}}", `yaml: invalid map key: map\[interface\ \{\}\]interface \{\}\{".":interface \{\}\(nil\)\}`},
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalErrors(c *C) {
|
||||
for _, item := range unmarshalErrorTests {
|
||||
var value interface{}
|
||||
err := yaml.Unmarshal([]byte(item.data), &value)
|
||||
c.Assert(err, ErrorMatches, item.error, Commentf("Partial unmarshal: %#v", value))
|
||||
}
|
||||
}
|
||||
|
||||
var unmarshalerTests = []struct {
|
||||
data, tag string
|
||||
value interface{}
|
||||
}{
|
||||
{"_: {hi: there}", "!!map", map[interface{}]interface{}{"hi": "there"}},
|
||||
{"_: [1,A]", "!!seq", []interface{}{1, "A"}},
|
||||
{"_: 10", "!!int", 10},
|
||||
{"_: null", "!!null", nil},
|
||||
{`_: BAR!`, "!!str", "BAR!"},
|
||||
{`_: "BAR!"`, "!!str", "BAR!"},
|
||||
{"_: !!foo 'BAR!'", "!!foo", "BAR!"},
|
||||
{`_: ""`, "!!str", ""},
|
||||
}
|
||||
|
||||
var unmarshalerResult = map[int]error{}
|
||||
|
||||
type unmarshalerType struct {
|
||||
value interface{}
|
||||
}
|
||||
|
||||
func (o *unmarshalerType) UnmarshalYAML(unmarshal func(v interface{}) error) error {
|
||||
if err := unmarshal(&o.value); err != nil {
|
||||
return err
|
||||
}
|
||||
if i, ok := o.value.(int); ok {
|
||||
if result, ok := unmarshalerResult[i]; ok {
|
||||
return result
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type unmarshalerPointer struct {
|
||||
Field *unmarshalerType "_"
|
||||
}
|
||||
|
||||
type unmarshalerValue struct {
|
||||
Field unmarshalerType "_"
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerPointerField(c *C) {
|
||||
for _, item := range unmarshalerTests {
|
||||
obj := &unmarshalerPointer{}
|
||||
err := yaml.Unmarshal([]byte(item.data), obj)
|
||||
c.Assert(err, IsNil)
|
||||
if item.value == nil {
|
||||
c.Assert(obj.Field, IsNil)
|
||||
} else {
|
||||
c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value))
|
||||
c.Assert(obj.Field.value, DeepEquals, item.value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerValueField(c *C) {
|
||||
for _, item := range unmarshalerTests {
|
||||
obj := &unmarshalerValue{}
|
||||
err := yaml.Unmarshal([]byte(item.data), obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value))
|
||||
c.Assert(obj.Field.value, DeepEquals, item.value)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerWholeDocument(c *C) {
|
||||
obj := &unmarshalerType{}
|
||||
err := yaml.Unmarshal([]byte(unmarshalerTests[0].data), obj)
|
||||
c.Assert(err, IsNil)
|
||||
value, ok := obj.value.(map[interface{}]interface{})
|
||||
c.Assert(ok, Equals, true, Commentf("value: %#v", obj.value))
|
||||
c.Assert(value["_"], DeepEquals, unmarshalerTests[0].value)
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerTypeError(c *C) {
|
||||
unmarshalerResult[2] = &yaml.TypeError{[]string{"foo"}}
|
||||
unmarshalerResult[4] = &yaml.TypeError{[]string{"bar"}}
|
||||
defer func() {
|
||||
delete(unmarshalerResult, 2)
|
||||
delete(unmarshalerResult, 4)
|
||||
}()
|
||||
|
||||
type T struct {
|
||||
Before int
|
||||
After int
|
||||
M map[string]*unmarshalerType
|
||||
}
|
||||
var v T
|
||||
data := `{before: A, m: {abc: 1, def: 2, ghi: 3, jkl: 4}, after: B}`
|
||||
err := yaml.Unmarshal([]byte(data), &v)
|
||||
c.Assert(err, ErrorMatches, ""+
|
||||
"yaml: unmarshal errors:\n"+
|
||||
" line 1: cannot unmarshal !!str `A` into int\n"+
|
||||
" foo\n"+
|
||||
" bar\n"+
|
||||
" line 1: cannot unmarshal !!str `B` into int")
|
||||
c.Assert(v.M["abc"], NotNil)
|
||||
c.Assert(v.M["def"], IsNil)
|
||||
c.Assert(v.M["ghi"], NotNil)
|
||||
c.Assert(v.M["jkl"], IsNil)
|
||||
|
||||
c.Assert(v.M["abc"].value, Equals, 1)
|
||||
c.Assert(v.M["ghi"].value, Equals, 3)
|
||||
}
|
||||
|
||||
type proxyTypeError struct{}
|
||||
|
||||
func (v *proxyTypeError) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var s string
|
||||
var a int32
|
||||
var b int64
|
||||
if err := unmarshal(&s); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if s == "a" {
|
||||
if err := unmarshal(&b); err == nil {
|
||||
panic("should have failed")
|
||||
}
|
||||
return unmarshal(&a)
|
||||
}
|
||||
if err := unmarshal(&a); err == nil {
|
||||
panic("should have failed")
|
||||
}
|
||||
return unmarshal(&b)
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerTypeErrorProxying(c *C) {
|
||||
type T struct {
|
||||
Before int
|
||||
After int
|
||||
M map[string]*proxyTypeError
|
||||
}
|
||||
var v T
|
||||
data := `{before: A, m: {abc: a, def: b}, after: B}`
|
||||
err := yaml.Unmarshal([]byte(data), &v)
|
||||
c.Assert(err, ErrorMatches, ""+
|
||||
"yaml: unmarshal errors:\n"+
|
||||
" line 1: cannot unmarshal !!str `A` into int\n"+
|
||||
" line 1: cannot unmarshal !!str `a` into int32\n"+
|
||||
" line 1: cannot unmarshal !!str `b` into int64\n"+
|
||||
" line 1: cannot unmarshal !!str `B` into int")
|
||||
}
|
||||
|
||||
type failingUnmarshaler struct{}
|
||||
|
||||
var failingErr = errors.New("failingErr")
|
||||
|
||||
func (ft *failingUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
return failingErr
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerError(c *C) {
|
||||
err := yaml.Unmarshal([]byte("a: b"), &failingUnmarshaler{})
|
||||
c.Assert(err, Equals, failingErr)
|
||||
}
|
||||
|
||||
type sliceUnmarshaler []int
|
||||
|
||||
func (su *sliceUnmarshaler) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
||||
var slice []int
|
||||
err := unmarshal(&slice)
|
||||
if err == nil {
|
||||
*su = slice
|
||||
return nil
|
||||
}
|
||||
|
||||
var intVal int
|
||||
err = unmarshal(&intVal)
|
||||
if err == nil {
|
||||
*su = []int{intVal}
|
||||
return nil
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalerRetry(c *C) {
|
||||
var su sliceUnmarshaler
|
||||
err := yaml.Unmarshal([]byte("[1, 2, 3]"), &su)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1, 2, 3}))
|
||||
|
||||
err = yaml.Unmarshal([]byte("1"), &su)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(su, DeepEquals, sliceUnmarshaler([]int{1}))
|
||||
}
|
||||
|
||||
// From http://yaml.org/type/merge.html
|
||||
var mergeTests = `
|
||||
anchors:
|
||||
list:
|
||||
- &CENTER { "x": 1, "y": 2 }
|
||||
- &LEFT { "x": 0, "y": 2 }
|
||||
- &BIG { "r": 10 }
|
||||
- &SMALL { "r": 1 }
|
||||
|
||||
# All the following maps are equal:
|
||||
|
||||
plain:
|
||||
# Explicit keys
|
||||
"x": 1
|
||||
"y": 2
|
||||
"r": 10
|
||||
label: center/big
|
||||
|
||||
mergeOne:
|
||||
# Merge one map
|
||||
<< : *CENTER
|
||||
"r": 10
|
||||
label: center/big
|
||||
|
||||
mergeMultiple:
|
||||
# Merge multiple maps
|
||||
<< : [ *CENTER, *BIG ]
|
||||
label: center/big
|
||||
|
||||
override:
|
||||
# Override
|
||||
<< : [ *BIG, *LEFT, *SMALL ]
|
||||
"x": 1
|
||||
label: center/big
|
||||
|
||||
shortTag:
|
||||
# Explicit short merge tag
|
||||
!!merge "<<" : [ *CENTER, *BIG ]
|
||||
label: center/big
|
||||
|
||||
longTag:
|
||||
# Explicit merge long tag
|
||||
!<tag:yaml.org,2002:merge> "<<" : [ *CENTER, *BIG ]
|
||||
label: center/big
|
||||
|
||||
inlineMap:
|
||||
# Inlined map
|
||||
<< : {"x": 1, "y": 2, "r": 10}
|
||||
label: center/big
|
||||
|
||||
inlineSequenceMap:
|
||||
# Inlined map in sequence
|
||||
<< : [ *CENTER, {"r": 10} ]
|
||||
label: center/big
|
||||
`
|
||||
|
||||
func (s *S) TestMerge(c *C) {
|
||||
var want = map[interface{}]interface{}{
|
||||
"x": 1,
|
||||
"y": 2,
|
||||
"r": 10,
|
||||
"label": "center/big",
|
||||
}
|
||||
|
||||
var m map[interface{}]interface{}
|
||||
err := yaml.Unmarshal([]byte(mergeTests), &m)
|
||||
c.Assert(err, IsNil)
|
||||
for name, test := range m {
|
||||
if name == "anchors" {
|
||||
continue
|
||||
}
|
||||
c.Assert(test, DeepEquals, want, Commentf("test %q failed", name))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMergeStruct(c *C) {
|
||||
type Data struct {
|
||||
X, Y, R int
|
||||
Label string
|
||||
}
|
||||
want := Data{1, 2, 10, "center/big"}
|
||||
|
||||
var m map[string]Data
|
||||
err := yaml.Unmarshal([]byte(mergeTests), &m)
|
||||
c.Assert(err, IsNil)
|
||||
for name, test := range m {
|
||||
if name == "anchors" {
|
||||
continue
|
||||
}
|
||||
c.Assert(test, Equals, want, Commentf("test %q failed", name))
|
||||
}
|
||||
}
|
||||
|
||||
var unmarshalNullTests = []func() interface{}{
|
||||
func() interface{} { var v interface{}; v = "v"; return &v },
|
||||
func() interface{} { var s = "s"; return &s },
|
||||
func() interface{} { var s = "s"; sptr := &s; return &sptr },
|
||||
func() interface{} { var i = 1; return &i },
|
||||
func() interface{} { var i = 1; iptr := &i; return &iptr },
|
||||
func() interface{} { m := map[string]int{"s": 1}; return &m },
|
||||
func() interface{} { m := map[string]int{"s": 1}; return m },
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalNull(c *C) {
|
||||
for _, test := range unmarshalNullTests {
|
||||
item := test()
|
||||
zero := reflect.Zero(reflect.TypeOf(item).Elem()).Interface()
|
||||
err := yaml.Unmarshal([]byte("null"), item)
|
||||
c.Assert(err, IsNil)
|
||||
if reflect.TypeOf(item).Kind() == reflect.Map {
|
||||
c.Assert(reflect.ValueOf(item).Interface(), DeepEquals, reflect.MakeMap(reflect.TypeOf(item)).Interface())
|
||||
} else {
|
||||
c.Assert(reflect.ValueOf(item).Elem().Interface(), DeepEquals, zero)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestUnmarshalSliceOnPreset(c *C) {
|
||||
// Issue #48.
|
||||
v := struct{ A []int }{[]int{1}}
|
||||
yaml.Unmarshal([]byte("a: [2]"), &v)
|
||||
c.Assert(v.A, DeepEquals, []int{2})
|
||||
}
|
||||
|
||||
//var data []byte
|
||||
//func init() {
|
||||
// var err error
|
||||
// data, err = ioutil.ReadFile("/tmp/file.yaml")
|
||||
// if err != nil {
|
||||
// panic(err)
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//func (s *S) BenchmarkUnmarshal(c *C) {
|
||||
// var err error
|
||||
// for i := 0; i < c.N; i++ {
|
||||
// var v map[string]interface{}
|
||||
// err = yaml.Unmarshal(data, &v)
|
||||
// }
|
||||
// if err != nil {
|
||||
// panic(err)
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//func (s *S) BenchmarkMarshal(c *C) {
|
||||
// var v map[string]interface{}
|
||||
// yaml.Unmarshal(data, &v)
|
||||
// c.ResetTimer()
|
||||
// for i := 0; i < c.N; i++ {
|
||||
// yaml.Marshal(&v)
|
||||
// }
|
||||
//}
|
1684
_vendor/src/gopkg.in/yaml.v2/emitterc.go
Normal file
306
_vendor/src/gopkg.in/yaml.v2/encode.go
Normal file
@ -0,0 +1,306 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"encoding"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
emitter yaml_emitter_t
|
||||
event yaml_event_t
|
||||
out []byte
|
||||
flow bool
|
||||
}
|
||||
|
||||
func newEncoder() (e *encoder) {
|
||||
e = &encoder{}
|
||||
e.must(yaml_emitter_initialize(&e.emitter))
|
||||
yaml_emitter_set_output_string(&e.emitter, &e.out)
|
||||
yaml_emitter_set_unicode(&e.emitter, true)
|
||||
e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
|
||||
e.emit()
|
||||
e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
|
||||
e.emit()
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *encoder) finish() {
|
||||
e.must(yaml_document_end_event_initialize(&e.event, true))
|
||||
e.emit()
|
||||
e.emitter.open_ended = false
|
||||
e.must(yaml_stream_end_event_initialize(&e.event))
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) destroy() {
|
||||
yaml_emitter_delete(&e.emitter)
|
||||
}
|
||||
|
||||
func (e *encoder) emit() {
|
||||
// This will internally delete the e.event value.
|
||||
if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
|
||||
e.must(false)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) must(ok bool) {
|
||||
if !ok {
|
||||
msg := e.emitter.problem
|
||||
if msg == "" {
|
||||
msg = "unknown problem generating YAML content"
|
||||
}
|
||||
failf("%s", msg)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||
if !in.IsValid() {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
iface := in.Interface()
|
||||
if m, ok := iface.(Marshaler); ok {
|
||||
v, err := m.MarshalYAML()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
if v == nil {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
in = reflect.ValueOf(v)
|
||||
} else if m, ok := iface.(encoding.TextMarshaler); ok {
|
||||
text, err := m.MarshalText()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
in = reflect.ValueOf(string(text))
|
||||
}
|
||||
switch in.Kind() {
|
||||
case reflect.Interface:
|
||||
if in.IsNil() {
|
||||
e.nilv()
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
case reflect.Map:
|
||||
e.mapv(tag, in)
|
||||
case reflect.Ptr:
|
||||
if in.IsNil() {
|
||||
e.nilv()
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
case reflect.Struct:
|
||||
e.structv(tag, in)
|
||||
case reflect.Slice:
|
||||
if in.Type().Elem() == mapItemType {
|
||||
e.itemsv(tag, in)
|
||||
} else {
|
||||
e.slicev(tag, in)
|
||||
}
|
||||
case reflect.String:
|
||||
e.stringv(tag, in)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
if in.Type() == durationType {
|
||||
e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String()))
|
||||
} else {
|
||||
e.intv(tag, in)
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
e.uintv(tag, in)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
e.floatv(tag, in)
|
||||
case reflect.Bool:
|
||||
e.boolv(tag, in)
|
||||
default:
|
||||
panic("cannot marshal type: " + in.Type().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) mapv(tag string, in reflect.Value) {
|
||||
e.mappingv(tag, func() {
|
||||
keys := keyList(in.MapKeys())
|
||||
sort.Sort(keys)
|
||||
for _, k := range keys {
|
||||
e.marshal("", k)
|
||||
e.marshal("", in.MapIndex(k))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) itemsv(tag string, in reflect.Value) {
|
||||
e.mappingv(tag, func() {
|
||||
slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem)
|
||||
for _, item := range slice {
|
||||
e.marshal("", reflect.ValueOf(item.Key))
|
||||
e.marshal("", reflect.ValueOf(item.Value))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) structv(tag string, in reflect.Value) {
|
||||
sinfo, err := getStructInfo(in.Type())
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
e.mappingv(tag, func() {
|
||||
for _, info := range sinfo.FieldsList {
|
||||
var value reflect.Value
|
||||
if info.Inline == nil {
|
||||
value = in.Field(info.Num)
|
||||
} else {
|
||||
value = in.FieldByIndex(info.Inline)
|
||||
}
|
||||
if info.OmitEmpty && isZero(value) {
|
||||
continue
|
||||
}
|
||||
e.marshal("", reflect.ValueOf(info.Key))
|
||||
e.flow = info.Flow
|
||||
e.marshal("", value)
|
||||
}
|
||||
if sinfo.InlineMap >= 0 {
|
||||
m := in.Field(sinfo.InlineMap)
|
||||
if m.Len() > 0 {
|
||||
e.flow = false
|
||||
keys := keyList(m.MapKeys())
|
||||
sort.Sort(keys)
|
||||
for _, k := range keys {
|
||||
if _, found := sinfo.FieldsMap[k.String()]; found {
|
||||
panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String()))
|
||||
}
|
||||
e.marshal("", k)
|
||||
e.flow = false
|
||||
e.marshal("", m.MapIndex(k))
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (e *encoder) mappingv(tag string, f func()) {
|
||||
implicit := tag == ""
|
||||
style := yaml_BLOCK_MAPPING_STYLE
|
||||
if e.flow {
|
||||
e.flow = false
|
||||
style = yaml_FLOW_MAPPING_STYLE
|
||||
}
|
||||
e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
|
||||
e.emit()
|
||||
f()
|
||||
e.must(yaml_mapping_end_event_initialize(&e.event))
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) slicev(tag string, in reflect.Value) {
|
||||
implicit := tag == ""
|
||||
style := yaml_BLOCK_SEQUENCE_STYLE
|
||||
if e.flow {
|
||||
e.flow = false
|
||||
style = yaml_FLOW_SEQUENCE_STYLE
|
||||
}
|
||||
e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
|
||||
e.emit()
|
||||
n := in.Len()
|
||||
for i := 0; i < n; i++ {
|
||||
e.marshal("", in.Index(i))
|
||||
}
|
||||
e.must(yaml_sequence_end_event_initialize(&e.event))
|
||||
e.emit()
|
||||
}
|
||||
|
||||
// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1.
|
||||
//
|
||||
// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported
|
||||
// in YAML 1.2 and by this package, but these should be marshalled quoted for
|
||||
// the time being for compatibility with other parsers.
|
||||
func isBase60Float(s string) (result bool) {
|
||||
// Fast path.
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
c := s[0]
|
||||
if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 {
|
||||
return false
|
||||
}
|
||||
// Do the full match.
|
||||
return base60float.MatchString(s)
|
||||
}
|
||||
|
||||
// From http://yaml.org/type/float.html, except the regular expression there
|
||||
// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix.
|
||||
var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`)
|
||||
|
||||
func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||
var style yaml_scalar_style_t
|
||||
s := in.String()
|
||||
rtag, rs := resolve("", s)
|
||||
if rtag == yaml_BINARY_TAG {
|
||||
if tag == "" || tag == yaml_STR_TAG {
|
||||
tag = rtag
|
||||
s = rs.(string)
|
||||
} else if tag == yaml_BINARY_TAG {
|
||||
failf("explicitly tagged !!binary data must be base64-encoded")
|
||||
} else {
|
||||
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
|
||||
}
|
||||
}
|
||||
if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
} else if strings.Contains(s, "\n") {
|
||||
style = yaml_LITERAL_SCALAR_STYLE
|
||||
} else {
|
||||
style = yaml_PLAIN_SCALAR_STYLE
|
||||
}
|
||||
e.emitScalar(s, "", tag, style)
|
||||
}
|
||||
|
||||
func (e *encoder) boolv(tag string, in reflect.Value) {
|
||||
var s string
|
||||
if in.Bool() {
|
||||
s = "true"
|
||||
} else {
|
||||
s = "false"
|
||||
}
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) intv(tag string, in reflect.Value) {
|
||||
s := strconv.FormatInt(in.Int(), 10)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) uintv(tag string, in reflect.Value) {
|
||||
s := strconv.FormatUint(in.Uint(), 10)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) floatv(tag string, in reflect.Value) {
|
||||
// FIXME: Handle 64 bits here.
|
||||
s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
|
||||
switch s {
|
||||
case "+Inf":
|
||||
s = ".inf"
|
||||
case "-Inf":
|
||||
s = "-.inf"
|
||||
case "NaN":
|
||||
s = ".nan"
|
||||
}
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) nilv() {
|
||||
e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) {
|
||||
implicit := tag == ""
|
||||
e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style))
|
||||
e.emit()
|
||||
}
|
501
_vendor/src/gopkg.in/yaml.v2/encode_test.go
Normal file
@ -0,0 +1,501 @@
|
||||
package yaml_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
"gopkg.in/yaml.v2"
|
||||
"net"
|
||||
"os"
|
||||
)
|
||||
|
||||
var marshalIntTest = 123
|
||||
|
||||
var marshalTests = []struct {
|
||||
value interface{}
|
||||
data string
|
||||
}{
|
||||
{
|
||||
nil,
|
||||
"null\n",
|
||||
}, {
|
||||
&struct{}{},
|
||||
"{}\n",
|
||||
}, {
|
||||
map[string]string{"v": "hi"},
|
||||
"v: hi\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": "hi"},
|
||||
"v: hi\n",
|
||||
}, {
|
||||
map[string]string{"v": "true"},
|
||||
"v: \"true\"\n",
|
||||
}, {
|
||||
map[string]string{"v": "false"},
|
||||
"v: \"false\"\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": true},
|
||||
"v: true\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": false},
|
||||
"v: false\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": 10},
|
||||
"v: 10\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": -10},
|
||||
"v: -10\n",
|
||||
}, {
|
||||
map[string]uint{"v": 42},
|
||||
"v: 42\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": int64(4294967296)},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]int64{"v": int64(4294967296)},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]uint64{"v": 4294967296},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": "10"},
|
||||
"v: \"10\"\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": 0.1},
|
||||
"v: 0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": float64(0.1)},
|
||||
"v: 0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": -0.1},
|
||||
"v: -0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.Inf(+1)},
|
||||
"v: .inf\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.Inf(-1)},
|
||||
"v: -.inf\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.NaN()},
|
||||
"v: .nan\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": nil},
|
||||
"v: null\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": ""},
|
||||
"v: \"\"\n",
|
||||
}, {
|
||||
map[string][]string{"v": []string{"A", "B"}},
|
||||
"v:\n- A\n- B\n",
|
||||
}, {
|
||||
map[string][]string{"v": []string{"A", "B\nC"}},
|
||||
"v:\n- A\n- |-\n B\n C\n",
|
||||
}, {
|
||||
map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}},
|
||||
"v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
|
||||
}, {
|
||||
map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
map[string]interface{}{"a": "-"},
|
||||
"a: '-'\n",
|
||||
},
|
||||
|
||||
// Simple values.
|
||||
{
|
||||
&marshalIntTest,
|
||||
"123\n",
|
||||
},
|
||||
|
||||
// Structures
|
||||
{
|
||||
&struct{ Hello string }{"world"},
|
||||
"hello: world\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct {
|
||||
B string
|
||||
}
|
||||
}{struct{ B string }{"c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct {
|
||||
B string
|
||||
}
|
||||
}{&struct{ B string }{"c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct {
|
||||
B string
|
||||
}
|
||||
}{},
|
||||
"a: null\n",
|
||||
}, {
|
||||
&struct{ A int }{1},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct{ A []int }{[]int{1, 2}},
|
||||
"a:\n- 1\n- 2\n",
|
||||
}, {
|
||||
&struct {
|
||||
B int "a"
|
||||
}{1},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct{ A bool }{true},
|
||||
"a: true\n",
|
||||
},
|
||||
|
||||
// Conditional flag
|
||||
{
|
||||
&struct {
|
||||
A int "a,omitempty"
|
||||
B int "b,omitempty"
|
||||
}{1, 0},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct {
|
||||
A int "a,omitempty"
|
||||
B int "b,omitempty"
|
||||
}{0, 0},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{&struct{ X, y int }{1, 2}},
|
||||
"a: {x: 1}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{nil},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{&struct{ X, y int }{}},
|
||||
"a: {x: 0}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct{ X, y int } "a,omitempty,flow"
|
||||
}{struct{ X, y int }{1, 2}},
|
||||
"a: {x: 1}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct{ X, y int } "a,omitempty,flow"
|
||||
}{struct{ X, y int }{0, 1}},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A float64 "a,omitempty"
|
||||
B float64 "b,omitempty"
|
||||
}{1, 0},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Flow flag
|
||||
{
|
||||
&struct {
|
||||
A []int "a,flow"
|
||||
}{[]int{1, 2}},
|
||||
"a: [1, 2]\n",
|
||||
}, {
|
||||
&struct {
|
||||
A map[string]string "a,flow"
|
||||
}{map[string]string{"b": "c", "d": "e"}},
|
||||
"a: {b: c, d: e}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct {
|
||||
B, D string
|
||||
} "a,flow"
|
||||
}{struct{ B, D string }{"c", "e"}},
|
||||
"a: {b: c, d: e}\n",
|
||||
},
|
||||
|
||||
// Unexported field
|
||||
{
|
||||
&struct {
|
||||
u int
|
||||
A int
|
||||
}{0, 1},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Ignored field
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
B int "-"
|
||||
}{1, 2},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Struct inlining
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
C inlineB `yaml:",inline"`
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
},
|
||||
|
||||
// Map inlining
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
C map[string]int `yaml:",inline"`
|
||||
}{1, map[string]int{"b": 2, "c": 3}},
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
},
|
||||
|
||||
// Duration
|
||||
{
|
||||
map[string]time.Duration{"a": 3 * time.Second},
|
||||
"a: 3s\n",
|
||||
},
|
||||
|
||||
// Issue #24: bug in map merging logic.
|
||||
{
|
||||
map[string]string{"a": "<foo>"},
|
||||
"a: <foo>\n",
|
||||
},
|
||||
|
||||
// Issue #34: marshal unsupported base 60 floats quoted for compatibility
|
||||
// with old YAML 1.1 parsers.
|
||||
{
|
||||
map[string]string{"a": "1:1"},
|
||||
"a: \"1:1\"\n",
|
||||
},
|
||||
|
||||
// Binary data.
|
||||
{
|
||||
map[string]string{"a": "\x00"},
|
||||
"a: \"\\0\"\n",
|
||||
}, {
|
||||
map[string]string{"a": "\x80\x81\x82"},
|
||||
"a: !!binary gIGC\n",
|
||||
}, {
|
||||
map[string]string{"a": strings.Repeat("\x90", 54)},
|
||||
"a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
|
||||
},
|
||||
|
||||
// Ordered maps.
|
||||
{
|
||||
&yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}},
|
||||
"b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n",
|
||||
},
|
||||
|
||||
// Encode unicode as utf-8 rather than in escaped form.
|
||||
{
|
||||
map[string]string{"a": "你好"},
|
||||
"a: 你好\n",
|
||||
},
|
||||
|
||||
// Support encoding.TextMarshaler.
|
||||
{
|
||||
map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)},
|
||||
"a: 1.2.3.4\n",
|
||||
},
|
||||
{
|
||||
map[string]time.Time{"a": time.Unix(1424801979, 0)},
|
||||
"a: 2015-02-24T18:19:39Z\n",
|
||||
},
|
||||
|
||||
// Ensure strings containing ": " are quoted (reported as PR #43, but not reproducible).
|
||||
{
|
||||
map[string]string{"a": "b: c"},
|
||||
"a: 'b: c'\n",
|
||||
},
|
||||
|
||||
// Containing hash mark ('#') in string should be quoted
|
||||
{
|
||||
map[string]string{"a": "Hello #comment"},
|
||||
"a: 'Hello #comment'\n",
|
||||
},
|
||||
{
|
||||
map[string]string{"a": "你好 #comment"},
|
||||
"a: '你好 #comment'\n",
|
||||
},
|
||||
}
|
||||
|
||||
func (s *S) TestMarshal(c *C) {
|
||||
defer os.Setenv("TZ", os.Getenv("TZ"))
|
||||
os.Setenv("TZ", "UTC")
|
||||
for _, item := range marshalTests {
|
||||
data, err := yaml.Marshal(item.value)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, item.data)
|
||||
}
|
||||
}
|
||||
|
||||
var marshalErrorTests = []struct {
|
||||
value interface{}
|
||||
error string
|
||||
panic string
|
||||
}{{
|
||||
value: &struct {
|
||||
B int
|
||||
inlineB ",inline"
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
panic: `Duplicated key 'b' in struct struct \{ B int; .*`,
|
||||
}, {
|
||||
value: &struct {
|
||||
A int
|
||||
B map[string]int ",inline"
|
||||
}{1, map[string]int{"a": 2}},
|
||||
panic: `Can't have key "a" in inlined map; conflicts with struct field`,
|
||||
}}
|
||||
|
||||
func (s *S) TestMarshalErrors(c *C) {
|
||||
for _, item := range marshalErrorTests {
|
||||
if item.panic != "" {
|
||||
c.Assert(func() { yaml.Marshal(item.value) }, PanicMatches, item.panic)
|
||||
} else {
|
||||
_, err := yaml.Marshal(item.value)
|
||||
c.Assert(err, ErrorMatches, item.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalTypeCache(c *C) {
|
||||
var data []byte
|
||||
var err error
|
||||
func() {
|
||||
type T struct{ A int }
|
||||
data, err = yaml.Marshal(&T{})
|
||||
c.Assert(err, IsNil)
|
||||
}()
|
||||
func() {
|
||||
type T struct{ B int }
|
||||
data, err = yaml.Marshal(&T{})
|
||||
c.Assert(err, IsNil)
|
||||
}()
|
||||
c.Assert(string(data), Equals, "b: 0\n")
|
||||
}
|
||||
|
||||
var marshalerTests = []struct {
|
||||
data string
|
||||
value interface{}
|
||||
}{
|
||||
{"_:\n hi: there\n", map[interface{}]interface{}{"hi": "there"}},
|
||||
{"_:\n- 1\n- A\n", []interface{}{1, "A"}},
|
||||
{"_: 10\n", 10},
|
||||
{"_: null\n", nil},
|
||||
{"_: BAR!\n", "BAR!"},
|
||||
}
|
||||
|
||||
type marshalerType struct {
|
||||
value interface{}
|
||||
}
|
||||
|
||||
func (o marshalerType) MarshalText() ([]byte, error) {
|
||||
panic("MarshalText called on type with MarshalYAML")
|
||||
}
|
||||
|
||||
func (o marshalerType) MarshalYAML() (interface{}, error) {
|
||||
return o.value, nil
|
||||
}
|
||||
|
||||
type marshalerValue struct {
|
||||
Field marshalerType "_"
|
||||
}
|
||||
|
||||
func (s *S) TestMarshaler(c *C) {
|
||||
for _, item := range marshalerTests {
|
||||
obj := &marshalerValue{}
|
||||
obj.Field.value = item.value
|
||||
data, err := yaml.Marshal(obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, string(item.data))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalerWholeDocument(c *C) {
|
||||
obj := &marshalerType{}
|
||||
obj.value = map[string]string{"hello": "world!"}
|
||||
data, err := yaml.Marshal(obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, "hello: world!\n")
|
||||
}
|
||||
|
||||
type failingMarshaler struct{}
|
||||
|
||||
func (ft *failingMarshaler) MarshalYAML() (interface{}, error) {
|
||||
return nil, failingErr
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalerError(c *C) {
|
||||
_, err := yaml.Marshal(&failingMarshaler{})
|
||||
c.Assert(err, Equals, failingErr)
|
||||
}
|
||||
|
||||
func (s *S) TestSortedOutput(c *C) {
|
||||
order := []interface{}{
|
||||
false,
|
||||
true,
|
||||
1,
|
||||
uint(1),
|
||||
1.0,
|
||||
1.1,
|
||||
1.2,
|
||||
2,
|
||||
uint(2),
|
||||
2.0,
|
||||
2.1,
|
||||
"",
|
||||
".1",
|
||||
".2",
|
||||
".a",
|
||||
"1",
|
||||
"2",
|
||||
"a!10",
|
||||
"a/2",
|
||||
"a/10",
|
||||
"a~10",
|
||||
"ab/1",
|
||||
"b/1",
|
||||
"b/01",
|
||||
"b/2",
|
||||
"b/02",
|
||||
"b/3",
|
||||
"b/03",
|
||||
"b1",
|
||||
"b01",
|
||||
"b3",
|
||||
"c2.10",
|
||||
"c10.2",
|
||||
"d1",
|
||||
"d12",
|
||||
"d12a",
|
||||
}
|
||||
m := make(map[interface{}]int)
|
||||
for _, k := range order {
|
||||
m[k] = 1
|
||||
}
|
||||
data, err := yaml.Marshal(m)
|
||||
c.Assert(err, IsNil)
|
||||
out := "\n" + string(data)
|
||||
last := 0
|
||||
for i, k := range order {
|
||||
repr := fmt.Sprint(k)
|
||||
if s, ok := k.(string); ok {
|
||||
if _, err = strconv.ParseFloat(repr, 32); s == "" || err == nil {
|
||||
repr = `"` + repr + `"`
|
||||
}
|
||||
}
|
||||
index := strings.Index(out, "\n"+repr+":")
|
||||
if index == -1 {
|
||||
c.Fatalf("%#v is not in the output: %#v", k, out)
|
||||
}
|
||||
if index < last {
|
||||
c.Fatalf("%#v was generated before %#v: %q", k, order[i-1], out)
|
||||
}
|
||||
last = index
|
||||
}
|
||||
}
|
1095
_vendor/src/gopkg.in/yaml.v2/parserc.go
Normal file
394
_vendor/src/gopkg.in/yaml.v2/readerc.go
Normal file
@ -0,0 +1,394 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// Set the reader error and return 0.
|
||||
func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool {
|
||||
parser.error = yaml_READER_ERROR
|
||||
parser.problem = problem
|
||||
parser.problem_offset = offset
|
||||
parser.problem_value = value
|
||||
return false
|
||||
}
|
||||
|
||||
// Byte order marks.
|
||||
const (
|
||||
bom_UTF8 = "\xef\xbb\xbf"
|
||||
bom_UTF16LE = "\xff\xfe"
|
||||
bom_UTF16BE = "\xfe\xff"
|
||||
)
|
||||
|
||||
// Determine the input stream encoding by checking the BOM symbol. If no BOM is
|
||||
// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure.
|
||||
func yaml_parser_determine_encoding(parser *yaml_parser_t) bool {
|
||||
// Ensure that we had enough bytes in the raw buffer.
|
||||
for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 {
|
||||
if !yaml_parser_update_raw_buffer(parser) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Determine the encoding.
|
||||
buf := parser.raw_buffer
|
||||
pos := parser.raw_buffer_pos
|
||||
avail := len(buf) - pos
|
||||
if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] {
|
||||
parser.encoding = yaml_UTF16LE_ENCODING
|
||||
parser.raw_buffer_pos += 2
|
||||
parser.offset += 2
|
||||
} else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] {
|
||||
parser.encoding = yaml_UTF16BE_ENCODING
|
||||
parser.raw_buffer_pos += 2
|
||||
parser.offset += 2
|
||||
} else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] {
|
||||
parser.encoding = yaml_UTF8_ENCODING
|
||||
parser.raw_buffer_pos += 3
|
||||
parser.offset += 3
|
||||
} else {
|
||||
parser.encoding = yaml_UTF8_ENCODING
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Update the raw buffer.
|
||||
func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool {
|
||||
size_read := 0
|
||||
|
||||
// Return if the raw buffer is full.
|
||||
if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Return on EOF.
|
||||
if parser.eof {
|
||||
return true
|
||||
}
|
||||
|
||||
// Move the remaining bytes in the raw buffer to the beginning.
|
||||
if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) {
|
||||
copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:])
|
||||
}
|
||||
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos]
|
||||
parser.raw_buffer_pos = 0
|
||||
|
||||
// Call the read handler to fill the buffer.
|
||||
size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)])
|
||||
parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read]
|
||||
if err == io.EOF {
|
||||
parser.eof = true
|
||||
} else if err != nil {
|
||||
return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Ensure that the buffer contains at least `length` characters.
|
||||
// Return true on success, false on failure.
|
||||
//
|
||||
// The length is supposed to be significantly less that the buffer size.
|
||||
func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
|
||||
if parser.read_handler == nil {
|
||||
panic("read handler must be set")
|
||||
}
|
||||
|
||||
// If the EOF flag is set and the raw buffer is empty, do nothing.
|
||||
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Return if the buffer contains enough characters.
|
||||
if parser.unread >= length {
|
||||
return true
|
||||
}
|
||||
|
||||
// Determine the input encoding if it is not known yet.
|
||||
if parser.encoding == yaml_ANY_ENCODING {
|
||||
if !yaml_parser_determine_encoding(parser) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Move the unread characters to the beginning of the buffer.
|
||||
buffer_len := len(parser.buffer)
|
||||
if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len {
|
||||
copy(parser.buffer, parser.buffer[parser.buffer_pos:])
|
||||
buffer_len -= parser.buffer_pos
|
||||
parser.buffer_pos = 0
|
||||
} else if parser.buffer_pos == buffer_len {
|
||||
buffer_len = 0
|
||||
parser.buffer_pos = 0
|
||||
}
|
||||
|
||||
// Open the whole buffer for writing, and cut it before returning.
|
||||
parser.buffer = parser.buffer[:cap(parser.buffer)]
|
||||
|
||||
// Fill the buffer until it has enough characters.
|
||||
first := true
|
||||
for parser.unread < length {
|
||||
|
||||
// Fill the raw buffer if necessary.
|
||||
if !first || parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||
if !yaml_parser_update_raw_buffer(parser) {
|
||||
parser.buffer = parser.buffer[:buffer_len]
|
||||
return false
|
||||
}
|
||||
}
|
||||
first = false
|
||||
|
||||
// Decode the raw buffer.
|
||||
inner:
|
||||
for parser.raw_buffer_pos != len(parser.raw_buffer) {
|
||||
var value rune
|
||||
var width int
|
||||
|
||||
raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos
|
||||
|
||||
// Decode the next character.
|
||||
switch parser.encoding {
|
||||
case yaml_UTF8_ENCODING:
|
||||
// Decode a UTF-8 character. Check RFC 3629
|
||||
// (http://www.ietf.org/rfc/rfc3629.txt) for more details.
|
||||
//
|
||||
// The following table (taken from the RFC) is used for
|
||||
// decoding.
|
||||
//
|
||||
// Char. number range | UTF-8 octet sequence
|
||||
// (hexadecimal) | (binary)
|
||||
// --------------------+------------------------------------
|
||||
// 0000 0000-0000 007F | 0xxxxxxx
|
||||
// 0000 0080-0000 07FF | 110xxxxx 10xxxxxx
|
||||
// 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx
|
||||
// 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
//
|
||||
// Additionally, the characters in the range 0xD800-0xDFFF
|
||||
// are prohibited as they are reserved for use with UTF-16
|
||||
// surrogate pairs.
|
||||
|
||||
// Determine the length of the UTF-8 sequence.
|
||||
octet := parser.raw_buffer[parser.raw_buffer_pos]
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
width = 1
|
||||
case octet&0xE0 == 0xC0:
|
||||
width = 2
|
||||
case octet&0xF0 == 0xE0:
|
||||
width = 3
|
||||
case octet&0xF8 == 0xF0:
|
||||
width = 4
|
||||
default:
|
||||
// The leading octet is invalid.
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid leading UTF-8 octet",
|
||||
parser.offset, int(octet))
|
||||
}
|
||||
|
||||
// Check if the raw buffer contains an incomplete character.
|
||||
if width > raw_unread {
|
||||
if parser.eof {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"incomplete UTF-8 octet sequence",
|
||||
parser.offset, -1)
|
||||
}
|
||||
break inner
|
||||
}
|
||||
|
||||
// Decode the leading octet.
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
value = rune(octet & 0x7F)
|
||||
case octet&0xE0 == 0xC0:
|
||||
value = rune(octet & 0x1F)
|
||||
case octet&0xF0 == 0xE0:
|
||||
value = rune(octet & 0x0F)
|
||||
case octet&0xF8 == 0xF0:
|
||||
value = rune(octet & 0x07)
|
||||
default:
|
||||
value = 0
|
||||
}
|
||||
|
||||
// Check and decode the trailing octets.
|
||||
for k := 1; k < width; k++ {
|
||||
octet = parser.raw_buffer[parser.raw_buffer_pos+k]
|
||||
|
||||
// Check if the octet is valid.
|
||||
if (octet & 0xC0) != 0x80 {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid trailing UTF-8 octet",
|
||||
parser.offset+k, int(octet))
|
||||
}
|
||||
|
||||
// Decode the octet.
|
||||
value = (value << 6) + rune(octet&0x3F)
|
||||
}
|
||||
|
||||
// Check the length of the sequence against the value.
|
||||
switch {
|
||||
case width == 1:
|
||||
case width == 2 && value >= 0x80:
|
||||
case width == 3 && value >= 0x800:
|
||||
case width == 4 && value >= 0x10000:
|
||||
default:
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid length of a UTF-8 sequence",
|
||||
parser.offset, -1)
|
||||
}
|
||||
|
||||
// Check the range of the value.
|
||||
if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"invalid Unicode character",
|
||||
parser.offset, int(value))
|
||||
}
|
||||
|
||||
case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING:
|
||||
var low, high int
|
||||
if parser.encoding == yaml_UTF16LE_ENCODING {
|
||||
low, high = 0, 1
|
||||
} else {
|
||||
low, high = 1, 0
|
||||
}
|
||||
|
||||
// The UTF-16 encoding is not as simple as one might
|
||||
// naively think. Check RFC 2781
|
||||
// (http://www.ietf.org/rfc/rfc2781.txt).
|
||||
//
|
||||
// Normally, two subsequent bytes describe a Unicode
|
||||
// character. However a special technique (called a
|
||||
// surrogate pair) is used for specifying character
|
||||
// values larger than 0xFFFF.
|
||||
//
|
||||
// A surrogate pair consists of two pseudo-characters:
|
||||
// high surrogate area (0xD800-0xDBFF)
|
||||
// low surrogate area (0xDC00-0xDFFF)
|
||||
//
|
||||
// The following formulas are used for decoding
|
||||
// and encoding characters using surrogate pairs:
|
||||
//
|
||||
// U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF)
|
||||
// U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF)
|
||||
// W1 = 110110yyyyyyyyyy
|
||||
// W2 = 110111xxxxxxxxxx
|
||||
//
|
||||
// where U is the character value, W1 is the high surrogate
|
||||
// area, W2 is the low surrogate area.
|
||||
|
||||
// Check for incomplete UTF-16 character.
|
||||
if raw_unread < 2 {
|
||||
if parser.eof {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"incomplete UTF-16 character",
|
||||
parser.offset, -1)
|
||||
}
|
||||
break inner
|
||||
}
|
||||
|
||||
// Get the character.
|
||||
value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) +
|
||||
(rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8)
|
||||
|
||||
// Check for unexpected low surrogate area.
|
||||
if value&0xFC00 == 0xDC00 {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"unexpected low surrogate area",
|
||||
parser.offset, int(value))
|
||||
}
|
||||
|
||||
// Check for a high surrogate area.
|
||||
if value&0xFC00 == 0xD800 {
|
||||
width = 4
|
||||
|
||||
// Check for incomplete surrogate pair.
|
||||
if raw_unread < 4 {
|
||||
if parser.eof {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"incomplete UTF-16 surrogate pair",
|
||||
parser.offset, -1)
|
||||
}
|
||||
break inner
|
||||
}
|
||||
|
||||
// Get the next character.
|
||||
value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) +
|
||||
(rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8)
|
||||
|
||||
// Check for a low surrogate area.
|
||||
if value2&0xFC00 != 0xDC00 {
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"expected low surrogate area",
|
||||
parser.offset+2, int(value2))
|
||||
}
|
||||
|
||||
// Generate the value of the surrogate pair.
|
||||
value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF)
|
||||
} else {
|
||||
width = 2
|
||||
}
|
||||
|
||||
default:
|
||||
panic("impossible")
|
||||
}
|
||||
|
||||
// Check if the character is in the allowed range:
|
||||
// #x9 | #xA | #xD | [#x20-#x7E] (8 bit)
|
||||
// | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit)
|
||||
// | [#x10000-#x10FFFF] (32 bit)
|
||||
switch {
|
||||
case value == 0x09:
|
||||
case value == 0x0A:
|
||||
case value == 0x0D:
|
||||
case value >= 0x20 && value <= 0x7E:
|
||||
case value == 0x85:
|
||||
case value >= 0xA0 && value <= 0xD7FF:
|
||||
case value >= 0xE000 && value <= 0xFFFD:
|
||||
case value >= 0x10000 && value <= 0x10FFFF:
|
||||
default:
|
||||
return yaml_parser_set_reader_error(parser,
|
||||
"control characters are not allowed",
|
||||
parser.offset, int(value))
|
||||
}
|
||||
|
||||
// Move the raw pointers.
|
||||
parser.raw_buffer_pos += width
|
||||
parser.offset += width
|
||||
|
||||
// Finally put the character into the buffer.
|
||||
if value <= 0x7F {
|
||||
// 0000 0000-0000 007F . 0xxxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(value)
|
||||
buffer_len += 1
|
||||
} else if value <= 0x7FF {
|
||||
// 0000 0080-0000 07FF . 110xxxxx 10xxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6))
|
||||
parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F))
|
||||
buffer_len += 2
|
||||
} else if value <= 0xFFFF {
|
||||
// 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12))
|
||||
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F))
|
||||
parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F))
|
||||
buffer_len += 3
|
||||
} else {
|
||||
// 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
|
||||
parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18))
|
||||
parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F))
|
||||
parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F))
|
||||
parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F))
|
||||
buffer_len += 4
|
||||
}
|
||||
|
||||
parser.unread++
|
||||
}
|
||||
|
||||
// On EOF, put NUL into the buffer and return.
|
||||
if parser.eof {
|
||||
parser.buffer[buffer_len] = 0
|
||||
buffer_len++
|
||||
parser.unread++
|
||||
break
|
||||
}
|
||||
}
|
||||
parser.buffer = parser.buffer[:buffer_len]
|
||||
return true
|
||||
}
|
208
_vendor/src/gopkg.in/yaml.v2/resolve.go
Normal file
@ -0,0 +1,208 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"math"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type resolveMapItem struct {
|
||||
value interface{}
|
||||
tag string
|
||||
}
|
||||
|
||||
var resolveTable = make([]byte, 256)
|
||||
var resolveMap = make(map[string]resolveMapItem)
|
||||
|
||||
func init() {
|
||||
t := resolveTable
|
||||
t[int('+')] = 'S' // Sign
|
||||
t[int('-')] = 'S'
|
||||
for _, c := range "0123456789" {
|
||||
t[int(c)] = 'D' // Digit
|
||||
}
|
||||
for _, c := range "yYnNtTfFoO~" {
|
||||
t[int(c)] = 'M' // In map
|
||||
}
|
||||
t[int('.')] = '.' // Float (potentially in map)
|
||||
|
||||
var resolveMapList = []struct {
|
||||
v interface{}
|
||||
tag string
|
||||
l []string
|
||||
}{
|
||||
{true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}},
|
||||
{true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}},
|
||||
{true, yaml_BOOL_TAG, []string{"on", "On", "ON"}},
|
||||
{false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}},
|
||||
{false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}},
|
||||
{false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}},
|
||||
{nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}},
|
||||
{math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}},
|
||||
{math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}},
|
||||
{math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}},
|
||||
{math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}},
|
||||
{"<<", yaml_MERGE_TAG, []string{"<<"}},
|
||||
}
|
||||
|
||||
m := resolveMap
|
||||
for _, item := range resolveMapList {
|
||||
for _, s := range item.l {
|
||||
m[s] = resolveMapItem{item.v, item.tag}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const longTagPrefix = "tag:yaml.org,2002:"
|
||||
|
||||
func shortTag(tag string) string {
|
||||
// TODO This can easily be made faster and produce less garbage.
|
||||
if strings.HasPrefix(tag, longTagPrefix) {
|
||||
return "!!" + tag[len(longTagPrefix):]
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
func longTag(tag string) string {
|
||||
if strings.HasPrefix(tag, "!!") {
|
||||
return longTagPrefix + tag[2:]
|
||||
}
|
||||
return tag
|
||||
}
|
||||
|
||||
func resolvableTag(tag string) bool {
|
||||
switch tag {
|
||||
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var yamlStyleFloat = regexp.MustCompile(`^[-+]?[0-9]*\.?[0-9]+([eE][-+][0-9]+)?$`)
|
||||
|
||||
func resolve(tag string, in string) (rtag string, out interface{}) {
|
||||
if !resolvableTag(tag) {
|
||||
return tag, in
|
||||
}
|
||||
|
||||
defer func() {
|
||||
switch tag {
|
||||
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
|
||||
return
|
||||
}
|
||||
failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
|
||||
}()
|
||||
|
||||
// Any data is accepted as a !!str or !!binary.
|
||||
// Otherwise, the prefix is enough of a hint about what it might be.
|
||||
hint := byte('N')
|
||||
if in != "" {
|
||||
hint = resolveTable[in[0]]
|
||||
}
|
||||
if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG {
|
||||
// Handle things we can lookup in a map.
|
||||
if item, ok := resolveMap[in]; ok {
|
||||
return item.tag, item.value
|
||||
}
|
||||
|
||||
// Base 60 floats are a bad idea, were dropped in YAML 1.2, and
|
||||
// are purposefully unsupported here. They're still quoted on
|
||||
// the way out for compatibility with other parser, though.
|
||||
|
||||
switch hint {
|
||||
case 'M':
|
||||
// We've already checked the map above.
|
||||
|
||||
case '.':
|
||||
// Not in the map, so maybe a normal float.
|
||||
floatv, err := strconv.ParseFloat(in, 64)
|
||||
if err == nil {
|
||||
return yaml_FLOAT_TAG, floatv
|
||||
}
|
||||
|
||||
case 'D', 'S':
|
||||
// Int, float, or timestamp.
|
||||
plain := strings.Replace(in, "_", "", -1)
|
||||
intv, err := strconv.ParseInt(plain, 0, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain, 0, 64)
|
||||
if err == nil {
|
||||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
if yamlStyleFloat.MatchString(plain) {
|
||||
floatv, err := strconv.ParseFloat(plain, 64)
|
||||
if err == nil {
|
||||
return yaml_FLOAT_TAG, floatv
|
||||
}
|
||||
}
|
||||
if strings.HasPrefix(plain, "0b") {
|
||||
intv, err := strconv.ParseInt(plain[2:], 2, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
uintv, err := strconv.ParseUint(plain[2:], 2, 64)
|
||||
if err == nil {
|
||||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
} else if strings.HasPrefix(plain, "-0b") {
|
||||
intv, err := strconv.ParseInt(plain[3:], 2, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, -int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, -intv
|
||||
}
|
||||
}
|
||||
}
|
||||
// XXX Handle timestamps here.
|
||||
|
||||
default:
|
||||
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
|
||||
}
|
||||
}
|
||||
if tag == yaml_BINARY_TAG {
|
||||
return yaml_BINARY_TAG, in
|
||||
}
|
||||
if utf8.ValidString(in) {
|
||||
return yaml_STR_TAG, in
|
||||
}
|
||||
return yaml_BINARY_TAG, encodeBase64(in)
|
||||
}
|
||||
|
||||
// encodeBase64 encodes s as base64 that is broken up into multiple lines
|
||||
// as appropriate for the resulting length.
|
||||
func encodeBase64(s string) string {
|
||||
const lineLen = 70
|
||||
encLen := base64.StdEncoding.EncodedLen(len(s))
|
||||
lines := encLen/lineLen + 1
|
||||
buf := make([]byte, encLen*2+lines)
|
||||
in := buf[0:encLen]
|
||||
out := buf[encLen:]
|
||||
base64.StdEncoding.Encode(in, []byte(s))
|
||||
k := 0
|
||||
for i := 0; i < len(in); i += lineLen {
|
||||
j := i + lineLen
|
||||
if j > len(in) {
|
||||
j = len(in)
|
||||
}
|
||||
k += copy(out[k:], in[i:j])
|
||||
if lines > 1 {
|
||||
out[k] = '\n'
|
||||
k++
|
||||
}
|
||||
}
|
||||
return string(out[:k])
|
||||
}
|
2710
_vendor/src/gopkg.in/yaml.v2/scannerc.go
Normal file
104
_vendor/src/gopkg.in/yaml.v2/sorter.go
Normal file
@ -0,0 +1,104 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type keyList []reflect.Value
|
||||
|
||||
func (l keyList) Len() int { return len(l) }
|
||||
func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||
func (l keyList) Less(i, j int) bool {
|
||||
a := l[i]
|
||||
b := l[j]
|
||||
ak := a.Kind()
|
||||
bk := b.Kind()
|
||||
for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() {
|
||||
a = a.Elem()
|
||||
ak = a.Kind()
|
||||
}
|
||||
for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() {
|
||||
b = b.Elem()
|
||||
bk = b.Kind()
|
||||
}
|
||||
af, aok := keyFloat(a)
|
||||
bf, bok := keyFloat(b)
|
||||
if aok && bok {
|
||||
if af != bf {
|
||||
return af < bf
|
||||
}
|
||||
if ak != bk {
|
||||
return ak < bk
|
||||
}
|
||||
return numLess(a, b)
|
||||
}
|
||||
if ak != reflect.String || bk != reflect.String {
|
||||
return ak < bk
|
||||
}
|
||||
ar, br := []rune(a.String()), []rune(b.String())
|
||||
for i := 0; i < len(ar) && i < len(br); i++ {
|
||||
if ar[i] == br[i] {
|
||||
continue
|
||||
}
|
||||
al := unicode.IsLetter(ar[i])
|
||||
bl := unicode.IsLetter(br[i])
|
||||
if al && bl {
|
||||
return ar[i] < br[i]
|
||||
}
|
||||
if al || bl {
|
||||
return bl
|
||||
}
|
||||
var ai, bi int
|
||||
var an, bn int64
|
||||
for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
|
||||
an = an*10 + int64(ar[ai]-'0')
|
||||
}
|
||||
for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ {
|
||||
bn = bn*10 + int64(br[bi]-'0')
|
||||
}
|
||||
if an != bn {
|
||||
return an < bn
|
||||
}
|
||||
if ai != bi {
|
||||
return ai < bi
|
||||
}
|
||||
return ar[i] < br[i]
|
||||
}
|
||||
return len(ar) < len(br)
|
||||
}
|
||||
|
||||
// keyFloat returns a float value for v if it is a number/bool
|
||||
// and whether it is a number/bool or not.
|
||||
func keyFloat(v reflect.Value) (f float64, ok bool) {
|
||||
switch v.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return float64(v.Int()), true
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float(), true
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return float64(v.Uint()), true
|
||||
case reflect.Bool:
|
||||
if v.Bool() {
|
||||
return 1, true
|
||||
}
|
||||
return 0, true
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
// numLess returns whether a < b.
|
||||
// a and b must necessarily have the same kind.
|
||||
func numLess(a, b reflect.Value) bool {
|
||||
switch a.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return a.Int() < b.Int()
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return a.Float() < b.Float()
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return a.Uint() < b.Uint()
|
||||
case reflect.Bool:
|
||||
return !a.Bool() && b.Bool()
|
||||
}
|
||||
panic("not a number")
|
||||
}
|
12
_vendor/src/gopkg.in/yaml.v2/suite_test.go
Normal file
@ -0,0 +1,12 @@
|
||||
package yaml_test
|
||||
|
||||
import (
|
||||
. "gopkg.in/check.v1"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test(t *testing.T) { TestingT(t) }
|
||||
|
||||
type S struct{}
|
||||
|
||||
var _ = Suite(&S{})
|
89
_vendor/src/gopkg.in/yaml.v2/writerc.go
Normal file
@ -0,0 +1,89 @@
|
||||
package yaml
|
||||
|
||||
// Set the writer error and return false.
|
||||
func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool {
|
||||
emitter.error = yaml_WRITER_ERROR
|
||||
emitter.problem = problem
|
||||
return false
|
||||
}
|
||||
|
||||
// Flush the output buffer.
|
||||
func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
|
||||
if emitter.write_handler == nil {
|
||||
panic("write handler not set")
|
||||
}
|
||||
|
||||
// Check if the buffer is empty.
|
||||
if emitter.buffer_pos == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// If the output encoding is UTF-8, we don't need to recode the buffer.
|
||||
if emitter.encoding == yaml_UTF8_ENCODING {
|
||||
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
return true
|
||||
}
|
||||
|
||||
// Recode the buffer into the raw buffer.
|
||||
var low, high int
|
||||
if emitter.encoding == yaml_UTF16LE_ENCODING {
|
||||
low, high = 0, 1
|
||||
} else {
|
||||
high, low = 1, 0
|
||||
}
|
||||
|
||||
pos := 0
|
||||
for pos < emitter.buffer_pos {
|
||||
// See the "reader.c" code for more details on UTF-8 encoding. Note
|
||||
// that we assume that the buffer contains a valid UTF-8 sequence.
|
||||
|
||||
// Read the next UTF-8 character.
|
||||
octet := emitter.buffer[pos]
|
||||
|
||||
var w int
|
||||
var value rune
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
w, value = 1, rune(octet&0x7F)
|
||||
case octet&0xE0 == 0xC0:
|
||||
w, value = 2, rune(octet&0x1F)
|
||||
case octet&0xF0 == 0xE0:
|
||||
w, value = 3, rune(octet&0x0F)
|
||||
case octet&0xF8 == 0xF0:
|
||||
w, value = 4, rune(octet&0x07)
|
||||
}
|
||||
for k := 1; k < w; k++ {
|
||||
octet = emitter.buffer[pos+k]
|
||||
value = (value << 6) + (rune(octet) & 0x3F)
|
||||
}
|
||||
pos += w
|
||||
|
||||
// Write the character.
|
||||
if value < 0x10000 {
|
||||
var b [2]byte
|
||||
b[high] = byte(value >> 8)
|
||||
b[low] = byte(value & 0xFF)
|
||||
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
|
||||
} else {
|
||||
// Write the character using a surrogate pair (check "reader.c").
|
||||
var b [4]byte
|
||||
value -= 0x10000
|
||||
b[high] = byte(0xD8 + (value >> 18))
|
||||
b[low] = byte((value >> 10) & 0xFF)
|
||||
b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
|
||||
b[low+2] = byte(value & 0xFF)
|
||||
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
|
||||
}
|
||||
}
|
||||
|
||||
// Write the raw buffer.
|
||||
if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
emitter.raw_buffer = emitter.raw_buffer[:0]
|
||||
return true
|
||||
}
|
346
_vendor/src/gopkg.in/yaml.v2/yaml.go
Normal file
@ -0,0 +1,346 @@
|
||||
// Package yaml implements YAML support for the Go language.
|
||||
//
|
||||
// Source code and other details for the project are available at GitHub:
|
||||
//
|
||||
// https://github.com/go-yaml/yaml
|
||||
//
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// MapSlice encodes and decodes as a YAML map.
|
||||
// The order of keys is preserved when encoding and decoding.
|
||||
type MapSlice []MapItem
|
||||
|
||||
// MapItem is an item in a MapSlice.
|
||||
type MapItem struct {
|
||||
Key, Value interface{}
|
||||
}
|
||||
|
||||
// The Unmarshaler interface may be implemented by types to customize their
|
||||
// behavior when being unmarshaled from a YAML document. The UnmarshalYAML
|
||||
// method receives a function that may be called to unmarshal the original
|
||||
// YAML value into a field or variable. It is safe to call the unmarshal
|
||||
// function parameter more than once if necessary.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalYAML(unmarshal func(interface{}) error) error
|
||||
}
|
||||
|
||||
// The Marshaler interface may be implemented by types to customize their
|
||||
// behavior when being marshaled into a YAML document. The returned value
|
||||
// is marshaled in place of the original value implementing Marshaler.
|
||||
//
|
||||
// If an error is returned by MarshalYAML, the marshaling procedure stops
|
||||
// and returns with the provided error.
|
||||
type Marshaler interface {
|
||||
MarshalYAML() (interface{}, error)
|
||||
}
|
||||
|
||||
// Unmarshal decodes the first document found within the in byte slice
|
||||
// and assigns decoded values into the out value.
|
||||
//
|
||||
// Maps and pointers (to a struct, string, int, etc) are accepted as out
|
||||
// values. If an internal pointer within a struct is not initialized,
|
||||
// the yaml package will initialize it if necessary for unmarshalling
|
||||
// the provided data. The out parameter must not be nil.
|
||||
//
|
||||
// The type of the decoded values should be compatible with the respective
|
||||
// values in out. If one or more values cannot be decoded due to a type
|
||||
// mismatches, decoding continues partially until the end of the YAML
|
||||
// content, and a *yaml.TypeError is returned with details for all
|
||||
// missed values.
|
||||
//
|
||||
// Struct fields are only unmarshalled if they are exported (have an
|
||||
// upper case first letter), and are unmarshalled using the field name
|
||||
// lowercased as the default key. Custom keys may be defined via the
|
||||
// "yaml" name in the field tag: the content preceding the first comma
|
||||
// is used as the key, and the following comma-separated options are
|
||||
// used to tweak the marshalling process (see Marshal).
|
||||
// Conflicting names result in a runtime error.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int `yaml:"a,omitempty"`
|
||||
// B int
|
||||
// }
|
||||
// var t T
|
||||
// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t)
|
||||
//
|
||||
// See the documentation of Marshal for the format of tags and a list of
|
||||
// supported tag options.
|
||||
//
|
||||
func Unmarshal(in []byte, out interface{}) (err error) {
|
||||
defer handleErr(&err)
|
||||
d := newDecoder()
|
||||
p := newParser(in)
|
||||
defer p.destroy()
|
||||
node := p.parse()
|
||||
if node != nil {
|
||||
v := reflect.ValueOf(out)
|
||||
if v.Kind() == reflect.Ptr && !v.IsNil() {
|
||||
v = v.Elem()
|
||||
}
|
||||
d.unmarshal(node, v)
|
||||
}
|
||||
if len(d.terrors) > 0 {
|
||||
return &TypeError{d.terrors}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Marshal serializes the value provided into a YAML document. The structure
|
||||
// of the generated document will reflect the structure of the value itself.
|
||||
// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
|
||||
//
|
||||
// Struct fields are only unmarshalled if they are exported (have an upper case
|
||||
// first letter), and are unmarshalled using the field name lowercased as the
|
||||
// default key. Custom keys may be defined via the "yaml" name in the field
|
||||
// tag: the content preceding the first comma is used as the key, and the
|
||||
// following comma-separated options are used to tweak the marshalling process.
|
||||
// Conflicting names result in a runtime error.
|
||||
//
|
||||
// The field tag format accepted is:
|
||||
//
|
||||
// `(...) yaml:"[<key>][,<flag1>[,<flag2>]]" (...)`
|
||||
//
|
||||
// The following flags are currently supported:
|
||||
//
|
||||
// omitempty Only include the field if it's not set to the zero
|
||||
// value for the type or to empty slices or maps.
|
||||
// Does not apply to zero valued structs.
|
||||
//
|
||||
// flow Marshal using a flow style (useful for structs,
|
||||
// sequences and maps).
|
||||
//
|
||||
// inline Inline the field, which must be a struct or a map,
|
||||
// causing all of its fields or keys to be processed as if
|
||||
// they were part of the outer struct. For maps, keys must
|
||||
// not conflict with the yaml keys of other struct fields.
|
||||
//
|
||||
// In addition, if the key is "-", the field is ignored.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int "a,omitempty"
|
||||
// B int
|
||||
// }
|
||||
// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
|
||||
// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n"
|
||||
//
|
||||
func Marshal(in interface{}) (out []byte, err error) {
|
||||
defer handleErr(&err)
|
||||
e := newEncoder()
|
||||
defer e.destroy()
|
||||
e.marshal("", reflect.ValueOf(in))
|
||||
e.finish()
|
||||
out = e.out
|
||||
return
|
||||
}
|
||||
|
||||
func handleErr(err *error) {
|
||||
if v := recover(); v != nil {
|
||||
if e, ok := v.(yamlError); ok {
|
||||
*err = e.err
|
||||
} else {
|
||||
panic(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type yamlError struct {
|
||||
err error
|
||||
}
|
||||
|
||||
func fail(err error) {
|
||||
panic(yamlError{err})
|
||||
}
|
||||
|
||||
func failf(format string, args ...interface{}) {
|
||||
panic(yamlError{fmt.Errorf("yaml: "+format, args...)})
|
||||
}
|
||||
|
||||
// A TypeError is returned by Unmarshal when one or more fields in
|
||||
// the YAML document cannot be properly decoded into the requested
|
||||
// types. When this error is returned, the value is still
|
||||
// unmarshaled partially.
|
||||
type TypeError struct {
|
||||
Errors []string
|
||||
}
|
||||
|
||||
func (e *TypeError) Error() string {
|
||||
return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n "))
|
||||
}
|
||||
|
||||
// --------------------------------------------------------------------------
|
||||
// Maintain a mapping of keys to structure field indexes
|
||||
|
||||
// The code in this section was copied from mgo/bson.
|
||||
|
||||
// structInfo holds details for the serialization of fields of
|
||||
// a given struct.
|
||||
type structInfo struct {
|
||||
FieldsMap map[string]fieldInfo
|
||||
FieldsList []fieldInfo
|
||||
|
||||
// InlineMap is the number of the field in the struct that
|
||||
// contains an ,inline map, or -1 if there's none.
|
||||
InlineMap int
|
||||
}
|
||||
|
||||
type fieldInfo struct {
|
||||
Key string
|
||||
Num int
|
||||
OmitEmpty bool
|
||||
Flow bool
|
||||
|
||||
// Inline holds the field index if the field is part of an inlined struct.
|
||||
Inline []int
|
||||
}
|
||||
|
||||
var structMap = make(map[reflect.Type]*structInfo)
|
||||
var fieldMapMutex sync.RWMutex
|
||||
|
||||
func getStructInfo(st reflect.Type) (*structInfo, error) {
|
||||
fieldMapMutex.RLock()
|
||||
sinfo, found := structMap[st]
|
||||
fieldMapMutex.RUnlock()
|
||||
if found {
|
||||
return sinfo, nil
|
||||
}
|
||||
|
||||
n := st.NumField()
|
||||
fieldsMap := make(map[string]fieldInfo)
|
||||
fieldsList := make([]fieldInfo, 0, n)
|
||||
inlineMap := -1
|
||||
for i := 0; i != n; i++ {
|
||||
field := st.Field(i)
|
||||
if field.PkgPath != "" && !field.Anonymous {
|
||||
continue // Private field
|
||||
}
|
||||
|
||||
info := fieldInfo{Num: i}
|
||||
|
||||
tag := field.Tag.Get("yaml")
|
||||
if tag == "" && strings.Index(string(field.Tag), ":") < 0 {
|
||||
tag = string(field.Tag)
|
||||
}
|
||||
if tag == "-" {
|
||||
continue
|
||||
}
|
||||
|
||||
inline := false
|
||||
fields := strings.Split(tag, ",")
|
||||
if len(fields) > 1 {
|
||||
for _, flag := range fields[1:] {
|
||||
switch flag {
|
||||
case "omitempty":
|
||||
info.OmitEmpty = true
|
||||
case "flow":
|
||||
info.Flow = true
|
||||
case "inline":
|
||||
inline = true
|
||||
default:
|
||||
return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st))
|
||||
}
|
||||
}
|
||||
tag = fields[0]
|
||||
}
|
||||
|
||||
if inline {
|
||||
switch field.Type.Kind() {
|
||||
case reflect.Map:
|
||||
if inlineMap >= 0 {
|
||||
return nil, errors.New("Multiple ,inline maps in struct " + st.String())
|
||||
}
|
||||
if field.Type.Key() != reflect.TypeOf("") {
|
||||
return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String())
|
||||
}
|
||||
inlineMap = info.Num
|
||||
case reflect.Struct:
|
||||
sinfo, err := getStructInfo(field.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, finfo := range sinfo.FieldsList {
|
||||
if _, found := fieldsMap[finfo.Key]; found {
|
||||
msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
if finfo.Inline == nil {
|
||||
finfo.Inline = []int{i, finfo.Num}
|
||||
} else {
|
||||
finfo.Inline = append([]int{i}, finfo.Inline...)
|
||||
}
|
||||
fieldsMap[finfo.Key] = finfo
|
||||
fieldsList = append(fieldsList, finfo)
|
||||
}
|
||||
default:
|
||||
//return nil, errors.New("Option ,inline needs a struct value or map field")
|
||||
return nil, errors.New("Option ,inline needs a struct value field")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if tag != "" {
|
||||
info.Key = tag
|
||||
} else {
|
||||
info.Key = strings.ToLower(field.Name)
|
||||
}
|
||||
|
||||
if _, found = fieldsMap[info.Key]; found {
|
||||
msg := "Duplicated key '" + info.Key + "' in struct " + st.String()
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
fieldsList = append(fieldsList, info)
|
||||
fieldsMap[info.Key] = info
|
||||
}
|
||||
|
||||
sinfo = &structInfo{fieldsMap, fieldsList, inlineMap}
|
||||
|
||||
fieldMapMutex.Lock()
|
||||
structMap[st] = sinfo
|
||||
fieldMapMutex.Unlock()
|
||||
return sinfo, nil
|
||||
}
|
||||
|
||||
func isZero(v reflect.Value) bool {
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
return len(v.String()) == 0
|
||||
case reflect.Interface, reflect.Ptr:
|
||||
return v.IsNil()
|
||||
case reflect.Slice:
|
||||
return v.Len() == 0
|
||||
case reflect.Map:
|
||||
return v.Len() == 0
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v.Int() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v.Uint() == 0
|
||||
case reflect.Bool:
|
||||
return !v.Bool()
|
||||
case reflect.Struct:
|
||||
vt := v.Type()
|
||||
for i := v.NumField() - 1; i >= 0; i-- {
|
||||
if vt.Field(i).PkgPath != "" {
|
||||
continue // Private field
|
||||
}
|
||||
if !isZero(v.Field(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
716
_vendor/src/gopkg.in/yaml.v2/yamlh.go
Normal file
@ -0,0 +1,716 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// The version directive data.
|
||||
type yaml_version_directive_t struct {
|
||||
major int8 // The major version number.
|
||||
minor int8 // The minor version number.
|
||||
}
|
||||
|
||||
// The tag directive data.
|
||||
type yaml_tag_directive_t struct {
|
||||
handle []byte // The tag handle.
|
||||
prefix []byte // The tag prefix.
|
||||
}
|
||||
|
||||
type yaml_encoding_t int
|
||||
|
||||
// The stream encoding.
|
||||
const (
|
||||
// Let the parser choose the encoding.
|
||||
yaml_ANY_ENCODING yaml_encoding_t = iota
|
||||
|
||||
yaml_UTF8_ENCODING // The default UTF-8 encoding.
|
||||
yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM.
|
||||
yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM.
|
||||
)
|
||||
|
||||
type yaml_break_t int
|
||||
|
||||
// Line break types.
|
||||
const (
|
||||
// Let the parser choose the break type.
|
||||
yaml_ANY_BREAK yaml_break_t = iota
|
||||
|
||||
yaml_CR_BREAK // Use CR for line breaks (Mac style).
|
||||
yaml_LN_BREAK // Use LN for line breaks (Unix style).
|
||||
yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style).
|
||||
)
|
||||
|
||||
type yaml_error_type_t int
|
||||
|
||||
// Many bad things could happen with the parser and emitter.
|
||||
const (
|
||||
// No error is produced.
|
||||
yaml_NO_ERROR yaml_error_type_t = iota
|
||||
|
||||
yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory.
|
||||
yaml_READER_ERROR // Cannot read or decode the input stream.
|
||||
yaml_SCANNER_ERROR // Cannot scan the input stream.
|
||||
yaml_PARSER_ERROR // Cannot parse the input stream.
|
||||
yaml_COMPOSER_ERROR // Cannot compose a YAML document.
|
||||
yaml_WRITER_ERROR // Cannot write to the output stream.
|
||||
yaml_EMITTER_ERROR // Cannot emit a YAML stream.
|
||||
)
|
||||
|
||||
// The pointer position.
|
||||
type yaml_mark_t struct {
|
||||
index int // The position index.
|
||||
line int // The position line.
|
||||
column int // The position column.
|
||||
}
|
||||
|
||||
// Node Styles
|
||||
|
||||
type yaml_style_t int8
|
||||
|
||||
type yaml_scalar_style_t yaml_style_t
|
||||
|
||||
// Scalar styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota
|
||||
|
||||
yaml_PLAIN_SCALAR_STYLE // The plain scalar style.
|
||||
yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style.
|
||||
yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style.
|
||||
yaml_LITERAL_SCALAR_STYLE // The literal scalar style.
|
||||
yaml_FOLDED_SCALAR_STYLE // The folded scalar style.
|
||||
)
|
||||
|
||||
type yaml_sequence_style_t yaml_style_t
|
||||
|
||||
// Sequence styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota
|
||||
|
||||
yaml_BLOCK_SEQUENCE_STYLE // The block sequence style.
|
||||
yaml_FLOW_SEQUENCE_STYLE // The flow sequence style.
|
||||
)
|
||||
|
||||
type yaml_mapping_style_t yaml_style_t
|
||||
|
||||
// Mapping styles.
|
||||
const (
|
||||
// Let the emitter choose the style.
|
||||
yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota
|
||||
|
||||
yaml_BLOCK_MAPPING_STYLE // The block mapping style.
|
||||
yaml_FLOW_MAPPING_STYLE // The flow mapping style.
|
||||
)
|
||||
|
||||
// Tokens
|
||||
|
||||
type yaml_token_type_t int
|
||||
|
||||
// Token types.
|
||||
const (
|
||||
// An empty token.
|
||||
yaml_NO_TOKEN yaml_token_type_t = iota
|
||||
|
||||
yaml_STREAM_START_TOKEN // A STREAM-START token.
|
||||
yaml_STREAM_END_TOKEN // A STREAM-END token.
|
||||
|
||||
yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token.
|
||||
yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token.
|
||||
yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token.
|
||||
yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token.
|
||||
|
||||
yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token.
|
||||
yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token.
|
||||
yaml_BLOCK_END_TOKEN // A BLOCK-END token.
|
||||
|
||||
yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token.
|
||||
yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token.
|
||||
yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token.
|
||||
yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token.
|
||||
|
||||
yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token.
|
||||
yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token.
|
||||
yaml_KEY_TOKEN // A KEY token.
|
||||
yaml_VALUE_TOKEN // A VALUE token.
|
||||
|
||||
yaml_ALIAS_TOKEN // An ALIAS token.
|
||||
yaml_ANCHOR_TOKEN // An ANCHOR token.
|
||||
yaml_TAG_TOKEN // A TAG token.
|
||||
yaml_SCALAR_TOKEN // A SCALAR token.
|
||||
)
|
||||
|
||||
func (tt yaml_token_type_t) String() string {
|
||||
switch tt {
|
||||
case yaml_NO_TOKEN:
|
||||
return "yaml_NO_TOKEN"
|
||||
case yaml_STREAM_START_TOKEN:
|
||||
return "yaml_STREAM_START_TOKEN"
|
||||
case yaml_STREAM_END_TOKEN:
|
||||
return "yaml_STREAM_END_TOKEN"
|
||||
case yaml_VERSION_DIRECTIVE_TOKEN:
|
||||
return "yaml_VERSION_DIRECTIVE_TOKEN"
|
||||
case yaml_TAG_DIRECTIVE_TOKEN:
|
||||
return "yaml_TAG_DIRECTIVE_TOKEN"
|
||||
case yaml_DOCUMENT_START_TOKEN:
|
||||
return "yaml_DOCUMENT_START_TOKEN"
|
||||
case yaml_DOCUMENT_END_TOKEN:
|
||||
return "yaml_DOCUMENT_END_TOKEN"
|
||||
case yaml_BLOCK_SEQUENCE_START_TOKEN:
|
||||
return "yaml_BLOCK_SEQUENCE_START_TOKEN"
|
||||
case yaml_BLOCK_MAPPING_START_TOKEN:
|
||||
return "yaml_BLOCK_MAPPING_START_TOKEN"
|
||||
case yaml_BLOCK_END_TOKEN:
|
||||
return "yaml_BLOCK_END_TOKEN"
|
||||
case yaml_FLOW_SEQUENCE_START_TOKEN:
|
||||
return "yaml_FLOW_SEQUENCE_START_TOKEN"
|
||||
case yaml_FLOW_SEQUENCE_END_TOKEN:
|
||||
return "yaml_FLOW_SEQUENCE_END_TOKEN"
|
||||
case yaml_FLOW_MAPPING_START_TOKEN:
|
||||
return "yaml_FLOW_MAPPING_START_TOKEN"
|
||||
case yaml_FLOW_MAPPING_END_TOKEN:
|
||||
return "yaml_FLOW_MAPPING_END_TOKEN"
|
||||
case yaml_BLOCK_ENTRY_TOKEN:
|
||||
return "yaml_BLOCK_ENTRY_TOKEN"
|
||||
case yaml_FLOW_ENTRY_TOKEN:
|
||||
return "yaml_FLOW_ENTRY_TOKEN"
|
||||
case yaml_KEY_TOKEN:
|
||||
return "yaml_KEY_TOKEN"
|
||||
case yaml_VALUE_TOKEN:
|
||||
return "yaml_VALUE_TOKEN"
|
||||
case yaml_ALIAS_TOKEN:
|
||||
return "yaml_ALIAS_TOKEN"
|
||||
case yaml_ANCHOR_TOKEN:
|
||||
return "yaml_ANCHOR_TOKEN"
|
||||
case yaml_TAG_TOKEN:
|
||||
return "yaml_TAG_TOKEN"
|
||||
case yaml_SCALAR_TOKEN:
|
||||
return "yaml_SCALAR_TOKEN"
|
||||
}
|
||||
return "<unknown token>"
|
||||
}
|
||||
|
||||
// The token structure.
|
||||
type yaml_token_t struct {
|
||||
// The token type.
|
||||
typ yaml_token_type_t
|
||||
|
||||
// The start/end of the token.
|
||||
start_mark, end_mark yaml_mark_t
|
||||
|
||||
// The stream encoding (for yaml_STREAM_START_TOKEN).
|
||||
encoding yaml_encoding_t
|
||||
|
||||
// The alias/anchor/scalar value or tag/tag directive handle
|
||||
// (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN).
|
||||
value []byte
|
||||
|
||||
// The tag suffix (for yaml_TAG_TOKEN).
|
||||
suffix []byte
|
||||
|
||||
// The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN).
|
||||
prefix []byte
|
||||
|
||||
// The scalar style (for yaml_SCALAR_TOKEN).
|
||||
style yaml_scalar_style_t
|
||||
|
||||
// The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN).
|
||||
major, minor int8
|
||||
}
|
||||
|
||||
// Events
|
||||
|
||||
type yaml_event_type_t int8
|
||||
|
||||
// Event types.
|
||||
const (
|
||||
// An empty event.
|
||||
yaml_NO_EVENT yaml_event_type_t = iota
|
||||
|
||||
yaml_STREAM_START_EVENT // A STREAM-START event.
|
||||
yaml_STREAM_END_EVENT // A STREAM-END event.
|
||||
yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event.
|
||||
yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event.
|
||||
yaml_ALIAS_EVENT // An ALIAS event.
|
||||
yaml_SCALAR_EVENT // A SCALAR event.
|
||||
yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event.
|
||||
yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event.
|
||||
yaml_MAPPING_START_EVENT // A MAPPING-START event.
|
||||
yaml_MAPPING_END_EVENT // A MAPPING-END event.
|
||||
)
|
||||
|
||||
// The event structure.
|
||||
type yaml_event_t struct {
|
||||
|
||||
// The event type.
|
||||
typ yaml_event_type_t
|
||||
|
||||
// The start and end of the event.
|
||||
start_mark, end_mark yaml_mark_t
|
||||
|
||||
// The document encoding (for yaml_STREAM_START_EVENT).
|
||||
encoding yaml_encoding_t
|
||||
|
||||
// The version directive (for yaml_DOCUMENT_START_EVENT).
|
||||
version_directive *yaml_version_directive_t
|
||||
|
||||
// The list of tag directives (for yaml_DOCUMENT_START_EVENT).
|
||||
tag_directives []yaml_tag_directive_t
|
||||
|
||||
// The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT).
|
||||
anchor []byte
|
||||
|
||||
// The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
|
||||
tag []byte
|
||||
|
||||
// The scalar value (for yaml_SCALAR_EVENT).
|
||||
value []byte
|
||||
|
||||
// Is the document start/end indicator implicit, or the tag optional?
|
||||
// (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT).
|
||||
implicit bool
|
||||
|
||||
// Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT).
|
||||
quoted_implicit bool
|
||||
|
||||
// The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT).
|
||||
style yaml_style_t
|
||||
}
|
||||
|
||||
func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) }
|
||||
func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) }
|
||||
func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) }
|
||||
|
||||
// Nodes
|
||||
|
||||
const (
|
||||
yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null.
|
||||
yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false.
|
||||
yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values.
|
||||
yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values.
|
||||
yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values.
|
||||
yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values.
|
||||
|
||||
yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences.
|
||||
yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping.
|
||||
|
||||
// Not in original libyaml.
|
||||
yaml_BINARY_TAG = "tag:yaml.org,2002:binary"
|
||||
yaml_MERGE_TAG = "tag:yaml.org,2002:merge"
|
||||
|
||||
yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str.
|
||||
yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq.
|
||||
yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map.
|
||||
)
|
||||
|
||||
type yaml_node_type_t int
|
||||
|
||||
// Node types.
|
||||
const (
|
||||
// An empty node.
|
||||
yaml_NO_NODE yaml_node_type_t = iota
|
||||
|
||||
yaml_SCALAR_NODE // A scalar node.
|
||||
yaml_SEQUENCE_NODE // A sequence node.
|
||||
yaml_MAPPING_NODE // A mapping node.
|
||||
)
|
||||
|
||||
// An element of a sequence node.
|
||||
type yaml_node_item_t int
|
||||
|
||||
// An element of a mapping node.
|
||||
type yaml_node_pair_t struct {
|
||||
key int // The key of the element.
|
||||
value int // The value of the element.
|
||||
}
|
||||
|
||||
// The node structure.
|
||||
type yaml_node_t struct {
|
||||
typ yaml_node_type_t // The node type.
|
||||
tag []byte // The node tag.
|
||||
|
||||
// The node data.
|
||||
|
||||
// The scalar parameters (for yaml_SCALAR_NODE).
|
||||
scalar struct {
|
||||
value []byte // The scalar value.
|
||||
length int // The length of the scalar value.
|
||||
style yaml_scalar_style_t // The scalar style.
|
||||
}
|
||||
|
||||
// The sequence parameters (for YAML_SEQUENCE_NODE).
|
||||
sequence struct {
|
||||
items_data []yaml_node_item_t // The stack of sequence items.
|
||||
style yaml_sequence_style_t // The sequence style.
|
||||
}
|
||||
|
||||
// The mapping parameters (for yaml_MAPPING_NODE).
|
||||
mapping struct {
|
||||
pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value).
|
||||
pairs_start *yaml_node_pair_t // The beginning of the stack.
|
||||
pairs_end *yaml_node_pair_t // The end of the stack.
|
||||
pairs_top *yaml_node_pair_t // The top of the stack.
|
||||
style yaml_mapping_style_t // The mapping style.
|
||||
}
|
||||
|
||||
start_mark yaml_mark_t // The beginning of the node.
|
||||
end_mark yaml_mark_t // The end of the node.
|
||||
|
||||
}
|
||||
|
||||
// The document structure.
|
||||
type yaml_document_t struct {
|
||||
|
||||
// The document nodes.
|
||||
nodes []yaml_node_t
|
||||
|
||||
// The version directive.
|
||||
version_directive *yaml_version_directive_t
|
||||
|
||||
// The list of tag directives.
|
||||
tag_directives_data []yaml_tag_directive_t
|
||||
tag_directives_start int // The beginning of the tag directives list.
|
||||
tag_directives_end int // The end of the tag directives list.
|
||||
|
||||
start_implicit int // Is the document start indicator implicit?
|
||||
end_implicit int // Is the document end indicator implicit?
|
||||
|
||||
// The start/end of the document.
|
||||
start_mark, end_mark yaml_mark_t
|
||||
}
|
||||
|
||||
// The prototype of a read handler.
|
||||
//
|
||||
// The read handler is called when the parser needs to read more bytes from the
|
||||
// source. The handler should write not more than size bytes to the buffer.
|
||||
// The number of written bytes should be set to the size_read variable.
|
||||
//
|
||||
// [in,out] data A pointer to an application data specified by
|
||||
// yaml_parser_set_input().
|
||||
// [out] buffer The buffer to write the data from the source.
|
||||
// [in] size The size of the buffer.
|
||||
// [out] size_read The actual number of bytes read from the source.
|
||||
//
|
||||
// On success, the handler should return 1. If the handler failed,
|
||||
// the returned value should be 0. On EOF, the handler should set the
|
||||
// size_read to 0 and return 1.
|
||||
type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error)
|
||||
|
||||
// This structure holds information about a potential simple key.
|
||||
type yaml_simple_key_t struct {
|
||||
possible bool // Is a simple key possible?
|
||||
required bool // Is a simple key required?
|
||||
token_number int // The number of the token.
|
||||
mark yaml_mark_t // The position mark.
|
||||
}
|
||||
|
||||
// The states of the parser.
|
||||
type yaml_parser_state_t int
|
||||
|
||||
const (
|
||||
yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota
|
||||
|
||||
yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document.
|
||||
yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START.
|
||||
yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||
yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||
yaml_PARSE_BLOCK_NODE_STATE // Expect a block node.
|
||||
yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence.
|
||||
yaml_PARSE_FLOW_NODE_STATE // Expect a flow node.
|
||||
yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence.
|
||||
yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence.
|
||||
yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence.
|
||||
yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
|
||||
yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key.
|
||||
yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value.
|
||||
yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping.
|
||||
yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry.
|
||||
yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||
yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||
yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||
yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping.
|
||||
yaml_PARSE_END_STATE // Expect nothing.
|
||||
)
|
||||
|
||||
func (ps yaml_parser_state_t) String() string {
|
||||
switch ps {
|
||||
case yaml_PARSE_STREAM_START_STATE:
|
||||
return "yaml_PARSE_STREAM_START_STATE"
|
||||
case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE:
|
||||
return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE"
|
||||
case yaml_PARSE_DOCUMENT_START_STATE:
|
||||
return "yaml_PARSE_DOCUMENT_START_STATE"
|
||||
case yaml_PARSE_DOCUMENT_CONTENT_STATE:
|
||||
return "yaml_PARSE_DOCUMENT_CONTENT_STATE"
|
||||
case yaml_PARSE_DOCUMENT_END_STATE:
|
||||
return "yaml_PARSE_DOCUMENT_END_STATE"
|
||||
case yaml_PARSE_BLOCK_NODE_STATE:
|
||||
return "yaml_PARSE_BLOCK_NODE_STATE"
|
||||
case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE:
|
||||
return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE"
|
||||
case yaml_PARSE_FLOW_NODE_STATE:
|
||||
return "yaml_PARSE_FLOW_NODE_STATE"
|
||||
case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE:
|
||||
return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE"
|
||||
case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE:
|
||||
return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE"
|
||||
case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE:
|
||||
return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE"
|
||||
case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE:
|
||||
return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE"
|
||||
case yaml_PARSE_BLOCK_MAPPING_KEY_STATE:
|
||||
return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE"
|
||||
case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE:
|
||||
return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE"
|
||||
case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE:
|
||||
return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_KEY_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_KEY_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_VALUE_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE"
|
||||
case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE:
|
||||
return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE"
|
||||
case yaml_PARSE_END_STATE:
|
||||
return "yaml_PARSE_END_STATE"
|
||||
}
|
||||
return "<unknown parser state>"
|
||||
}
|
||||
|
||||
// This structure holds aliases data.
|
||||
type yaml_alias_data_t struct {
|
||||
anchor []byte // The anchor.
|
||||
index int // The node id.
|
||||
mark yaml_mark_t // The anchor mark.
|
||||
}
|
||||
|
||||
// The parser structure.
|
||||
//
|
||||
// All members are internal. Manage the structure using the
|
||||
// yaml_parser_ family of functions.
|
||||
type yaml_parser_t struct {
|
||||
|
||||
// Error handling
|
||||
|
||||
error yaml_error_type_t // Error type.
|
||||
|
||||
problem string // Error description.
|
||||
|
||||
// The byte about which the problem occured.
|
||||
problem_offset int
|
||||
problem_value int
|
||||
problem_mark yaml_mark_t
|
||||
|
||||
// The error context.
|
||||
context string
|
||||
context_mark yaml_mark_t
|
||||
|
||||
// Reader stuff
|
||||
|
||||
read_handler yaml_read_handler_t // Read handler.
|
||||
|
||||
input_file io.Reader // File input data.
|
||||
input []byte // String input data.
|
||||
input_pos int
|
||||
|
||||
eof bool // EOF flag
|
||||
|
||||
buffer []byte // The working buffer.
|
||||
buffer_pos int // The current position of the buffer.
|
||||
|
||||
unread int // The number of unread characters in the buffer.
|
||||
|
||||
raw_buffer []byte // The raw buffer.
|
||||
raw_buffer_pos int // The current position of the buffer.
|
||||
|
||||
encoding yaml_encoding_t // The input encoding.
|
||||
|
||||
offset int // The offset of the current position (in bytes).
|
||||
mark yaml_mark_t // The mark of the current position.
|
||||
|
||||
// Scanner stuff
|
||||
|
||||
stream_start_produced bool // Have we started to scan the input stream?
|
||||
stream_end_produced bool // Have we reached the end of the input stream?
|
||||
|
||||
flow_level int // The number of unclosed '[' and '{' indicators.
|
||||
|
||||
tokens []yaml_token_t // The tokens queue.
|
||||
tokens_head int // The head of the tokens queue.
|
||||
tokens_parsed int // The number of tokens fetched from the queue.
|
||||
token_available bool // Does the tokens queue contain a token ready for dequeueing.
|
||||
|
||||
indent int // The current indentation level.
|
||||
indents []int // The indentation levels stack.
|
||||
|
||||
simple_key_allowed bool // May a simple key occur at the current position?
|
||||
simple_keys []yaml_simple_key_t // The stack of simple keys.
|
||||
|
||||
// Parser stuff
|
||||
|
||||
state yaml_parser_state_t // The current parser state.
|
||||
states []yaml_parser_state_t // The parser states stack.
|
||||
marks []yaml_mark_t // The stack of marks.
|
||||
tag_directives []yaml_tag_directive_t // The list of TAG directives.
|
||||
|
||||
// Dumper stuff
|
||||
|
||||
aliases []yaml_alias_data_t // The alias data.
|
||||
|
||||
document *yaml_document_t // The currently parsed document.
|
||||
}
|
||||
|
||||
// Emitter Definitions
|
||||
|
||||
// The prototype of a write handler.
|
||||
//
|
||||
// The write handler is called when the emitter needs to flush the accumulated
|
||||
// characters to the output. The handler should write @a size bytes of the
|
||||
// @a buffer to the output.
|
||||
//
|
||||
// @param[in,out] data A pointer to an application data specified by
|
||||
// yaml_emitter_set_output().
|
||||
// @param[in] buffer The buffer with bytes to be written.
|
||||
// @param[in] size The size of the buffer.
|
||||
//
|
||||
// @returns On success, the handler should return @c 1. If the handler failed,
|
||||
// the returned value should be @c 0.
|
||||
//
|
||||
type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error
|
||||
|
||||
type yaml_emitter_state_t int
|
||||
|
||||
// The emitter states.
|
||||
const (
|
||||
// Expect STREAM-START.
|
||||
yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota
|
||||
|
||||
yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END.
|
||||
yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END.
|
||||
yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document.
|
||||
yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END.
|
||||
yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence.
|
||||
yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence.
|
||||
yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping.
|
||||
yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping.
|
||||
yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence.
|
||||
yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence.
|
||||
yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping.
|
||||
yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping.
|
||||
yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping.
|
||||
yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping.
|
||||
yaml_EMIT_END_STATE // Expect nothing.
|
||||
)
|
||||
|
||||
// The emitter structure.
|
||||
//
|
||||
// All members are internal. Manage the structure using the @c yaml_emitter_
|
||||
// family of functions.
|
||||
type yaml_emitter_t struct {
|
||||
|
||||
// Error handling
|
||||
|
||||
error yaml_error_type_t // Error type.
|
||||
problem string // Error description.
|
||||
|
||||
// Writer stuff
|
||||
|
||||
write_handler yaml_write_handler_t // Write handler.
|
||||
|
||||
output_buffer *[]byte // String output data.
|
||||
output_file io.Writer // File output data.
|
||||
|
||||
buffer []byte // The working buffer.
|
||||
buffer_pos int // The current position of the buffer.
|
||||
|
||||
raw_buffer []byte // The raw buffer.
|
||||
raw_buffer_pos int // The current position of the buffer.
|
||||
|
||||
encoding yaml_encoding_t // The stream encoding.
|
||||
|
||||
// Emitter stuff
|
||||
|
||||
canonical bool // If the output is in the canonical style?
|
||||
best_indent int // The number of indentation spaces.
|
||||
best_width int // The preferred width of the output lines.
|
||||
unicode bool // Allow unescaped non-ASCII characters?
|
||||
line_break yaml_break_t // The preferred line break.
|
||||
|
||||
state yaml_emitter_state_t // The current emitter state.
|
||||
states []yaml_emitter_state_t // The stack of states.
|
||||
|
||||
events []yaml_event_t // The event queue.
|
||||
events_head int // The head of the event queue.
|
||||
|
||||
indents []int // The stack of indentation levels.
|
||||
|
||||
tag_directives []yaml_tag_directive_t // The list of tag directives.
|
||||
|
||||
indent int // The current indentation level.
|
||||
|
||||
flow_level int // The current flow level.
|
||||
|
||||
root_context bool // Is it the document root context?
|
||||
sequence_context bool // Is it a sequence context?
|
||||
mapping_context bool // Is it a mapping context?
|
||||
simple_key_context bool // Is it a simple mapping key context?
|
||||
|
||||
line int // The current line.
|
||||
column int // The current column.
|
||||
whitespace bool // If the last character was a whitespace?
|
||||
indention bool // If the last character was an indentation character (' ', '-', '?', ':')?
|
||||
open_ended bool // If an explicit document end is required?
|
||||
|
||||
// Anchor analysis.
|
||||
anchor_data struct {
|
||||
anchor []byte // The anchor value.
|
||||
alias bool // Is it an alias?
|
||||
}
|
||||
|
||||
// Tag analysis.
|
||||
tag_data struct {
|
||||
handle []byte // The tag handle.
|
||||
suffix []byte // The tag suffix.
|
||||
}
|
||||
|
||||
// Scalar analysis.
|
||||
scalar_data struct {
|
||||
value []byte // The scalar value.
|
||||
multiline bool // Does the scalar contain line breaks?
|
||||
flow_plain_allowed bool // Can the scalar be expessed in the flow plain style?
|
||||
block_plain_allowed bool // Can the scalar be expressed in the block plain style?
|
||||
single_quoted_allowed bool // Can the scalar be expressed in the single quoted style?
|
||||
block_allowed bool // Can the scalar be expressed in the literal or folded styles?
|
||||
style yaml_scalar_style_t // The output style.
|
||||
}
|
||||
|
||||
// Dumper stuff
|
||||
|
||||
opened bool // If the stream was already opened?
|
||||
closed bool // If the stream was already closed?
|
||||
|
||||
// The information associated with the document nodes.
|
||||
anchors *struct {
|
||||
references int // The number of references.
|
||||
anchor int // The anchor id.
|
||||
serialized bool // If the node has been emitted?
|
||||
}
|
||||
|
||||
last_anchor_id int // The last assigned anchor id.
|
||||
|
||||
document *yaml_document_t // The currently emitted document.
|
||||
}
|
173
_vendor/src/gopkg.in/yaml.v2/yamlprivateh.go
Normal file
@ -0,0 +1,173 @@
|
||||
package yaml
|
||||
|
||||
const (
|
||||
// The size of the input raw buffer.
|
||||
input_raw_buffer_size = 512
|
||||
|
||||
// The size of the input buffer.
|
||||
// It should be possible to decode the whole raw buffer.
|
||||
input_buffer_size = input_raw_buffer_size * 3
|
||||
|
||||
// The size of the output buffer.
|
||||
output_buffer_size = 128
|
||||
|
||||
// The size of the output raw buffer.
|
||||
// It should be possible to encode the whole output buffer.
|
||||
output_raw_buffer_size = (output_buffer_size*2 + 2)
|
||||
|
||||
// The size of other stacks and queues.
|
||||
initial_stack_size = 16
|
||||
initial_queue_size = 16
|
||||
initial_string_size = 16
|
||||
)
|
||||
|
||||
// Check if the character at the specified position is an alphabetical
|
||||
// character, a digit, '_', or '-'.
|
||||
func is_alpha(b []byte, i int) bool {
|
||||
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is a digit.
|
||||
func is_digit(b []byte, i int) bool {
|
||||
return b[i] >= '0' && b[i] <= '9'
|
||||
}
|
||||
|
||||
// Get the value of a digit.
|
||||
func as_digit(b []byte, i int) int {
|
||||
return int(b[i]) - '0'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is a hex-digit.
|
||||
func is_hex(b []byte, i int) bool {
|
||||
return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f'
|
||||
}
|
||||
|
||||
// Get the value of a hex-digit.
|
||||
func as_hex(b []byte, i int) int {
|
||||
bi := b[i]
|
||||
if bi >= 'A' && bi <= 'F' {
|
||||
return int(bi) - 'A' + 10
|
||||
}
|
||||
if bi >= 'a' && bi <= 'f' {
|
||||
return int(bi) - 'a' + 10
|
||||
}
|
||||
return int(bi) - '0'
|
||||
}
|
||||
|
||||
// Check if the character is ASCII.
|
||||
func is_ascii(b []byte, i int) bool {
|
||||
return b[i] <= 0x7F
|
||||
}
|
||||
|
||||
// Check if the character at the start of the buffer can be printed unescaped.
|
||||
func is_printable(b []byte, i int) bool {
|
||||
return ((b[i] == 0x0A) || // . == #x0A
|
||||
(b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E
|
||||
(b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF
|
||||
(b[i] > 0xC2 && b[i] < 0xED) ||
|
||||
(b[i] == 0xED && b[i+1] < 0xA0) ||
|
||||
(b[i] == 0xEE) ||
|
||||
(b[i] == 0xEF && // #xE000 <= . <= #xFFFD
|
||||
!(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF
|
||||
!(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF))))
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is NUL.
|
||||
func is_z(b []byte, i int) bool {
|
||||
return b[i] == 0x00
|
||||
}
|
||||
|
||||
// Check if the beginning of the buffer is a BOM.
|
||||
func is_bom(b []byte, i int) bool {
|
||||
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is space.
|
||||
func is_space(b []byte, i int) bool {
|
||||
return b[i] == ' '
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is tab.
|
||||
func is_tab(b []byte, i int) bool {
|
||||
return b[i] == '\t'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is blank (space or tab).
|
||||
func is_blank(b []byte, i int) bool {
|
||||
//return is_space(b, i) || is_tab(b, i)
|
||||
return b[i] == ' ' || b[i] == '\t'
|
||||
}
|
||||
|
||||
// Check if the character at the specified position is a line break.
|
||||
func is_break(b []byte, i int) bool {
|
||||
return (b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029)
|
||||
}
|
||||
|
||||
func is_crlf(b []byte, i int) bool {
|
||||
return b[i] == '\r' && b[i+1] == '\n'
|
||||
}
|
||||
|
||||
// Check if the character is a line break or NUL.
|
||||
func is_breakz(b []byte, i int) bool {
|
||||
//return is_break(b, i) || is_z(b, i)
|
||||
return ( // is_break:
|
||||
b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||
// is_z:
|
||||
b[i] == 0)
|
||||
}
|
||||
|
||||
// Check if the character is a line break, space, or NUL.
|
||||
func is_spacez(b []byte, i int) bool {
|
||||
//return is_space(b, i) || is_breakz(b, i)
|
||||
return ( // is_space:
|
||||
b[i] == ' ' ||
|
||||
// is_breakz:
|
||||
b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||
b[i] == 0)
|
||||
}
|
||||
|
||||
// Check if the character is a line break, space, tab, or NUL.
|
||||
func is_blankz(b []byte, i int) bool {
|
||||
//return is_blank(b, i) || is_breakz(b, i)
|
||||
return ( // is_blank:
|
||||
b[i] == ' ' || b[i] == '\t' ||
|
||||
// is_breakz:
|
||||
b[i] == '\r' || // CR (#xD)
|
||||
b[i] == '\n' || // LF (#xA)
|
||||
b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028)
|
||||
b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029)
|
||||
b[i] == 0)
|
||||
}
|
||||
|
||||
// Determine the width of the character.
|
||||
func width(b byte) int {
|
||||
// Don't replace these by a switch without first
|
||||
// confirming that it is being inlined.
|
||||
if b&0x80 == 0x00 {
|
||||
return 1
|
||||
}
|
||||
if b&0xE0 == 0xC0 {
|
||||
return 2
|
||||
}
|
||||
if b&0xF0 == 0xE0 {
|
||||
return 3
|
||||
}
|
||||
if b&0xF8 == 0xF0 {
|
||||
return 4
|
||||
}
|
||||
return 0
|
||||
|
||||
}
|
93
config.go
Normal file
@ -0,0 +1,93 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"flag"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type config struct {
|
||||
Bind string
|
||||
ReadTimeout int `yaml:"read_timeout"`
|
||||
WriteTimeout int `yaml:"write_timeout"`
|
||||
|
||||
Key string
|
||||
Salt string
|
||||
KeyBin []byte
|
||||
SaltBin []byte
|
||||
|
||||
MaxSrcDimension int `yaml:"max_src_dimension"`
|
||||
|
||||
Quality int
|
||||
Compression int
|
||||
}
|
||||
|
||||
var conf = config{
|
||||
Bind: ":8080",
|
||||
MaxSrcDimension: 4096,
|
||||
}
|
||||
|
||||
func absPathToFile(path string) string {
|
||||
if filepath.IsAbs(path) {
|
||||
return path
|
||||
}
|
||||
|
||||
appPath, _ := filepath.Abs(filepath.Dir(os.Args[0]))
|
||||
return filepath.Join(appPath, path)
|
||||
}
|
||||
|
||||
func init() {
|
||||
cpath := flag.String(
|
||||
"config", "../config.yml", "path to configuration file",
|
||||
)
|
||||
flag.Parse()
|
||||
|
||||
file, err := os.Open(absPathToFile(*cpath))
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
cdata, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
err = yaml.Unmarshal(cdata, &conf)
|
||||
if err != nil {
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
if len(conf.Bind) == 0 {
|
||||
conf.Bind = ":8080"
|
||||
}
|
||||
|
||||
if conf.MaxSrcDimension == 0 {
|
||||
conf.MaxSrcDimension = 4096
|
||||
}
|
||||
|
||||
if conf.KeyBin, err = hex.DecodeString(conf.Key); err != nil {
|
||||
log.Fatalln("Invalid key. Key should be encoded to hex")
|
||||
}
|
||||
|
||||
if conf.SaltBin, err = hex.DecodeString(conf.Salt); err != nil {
|
||||
log.Fatalln("Invalid salt. Salt should be encoded to hex")
|
||||
}
|
||||
|
||||
if conf.MaxSrcDimension == 0 {
|
||||
conf.MaxSrcDimension = 4096
|
||||
}
|
||||
|
||||
if conf.Quality == 0 {
|
||||
conf.Quality = 80
|
||||
}
|
||||
|
||||
if conf.Compression == 0 {
|
||||
conf.Compression = 6
|
||||
}
|
||||
}
|
12
config.yml.example
Normal file
@ -0,0 +1,12 @@
|
||||
bind: ":8080"
|
||||
read_timeout: 10
|
||||
write_timeout: 10
|
||||
|
||||
# key and salt are hex-encoded
|
||||
key: 943b421c9eb07c830af81030552c86009268de4e532ba2ee2eab8247c6da0881
|
||||
salt: 520f986b998545b4785e0defbc4f3c1203f22de2374a3d53cb7a7fe9fea309c5
|
||||
|
||||
max_src_dimension: 4096
|
||||
|
||||
quality: 80
|
||||
compression: 6
|
26
crypt.go
Normal file
@ -0,0 +1,26 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/hmac"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
)
|
||||
|
||||
func validatePath(token, path string) error {
|
||||
messageMAC, err := base64.RawURLEncoding.DecodeString(token)
|
||||
if err != nil {
|
||||
return errors.New("Invalid token encoding")
|
||||
}
|
||||
|
||||
mac := hmac.New(sha256.New, conf.KeyBin)
|
||||
mac.Write(conf.SaltBin)
|
||||
mac.Write([]byte(path))
|
||||
expectedMAC := mac.Sum(nil)
|
||||
|
||||
if !hmac.Equal(messageMAC, expectedMAC) {
|
||||
return errors.New("Invalid token")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
59
download.go
Normal file
@ -0,0 +1,59 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"image"
|
||||
"net/http"
|
||||
|
||||
_ "image/gif"
|
||||
_ "image/jpeg"
|
||||
_ "image/png"
|
||||
)
|
||||
|
||||
const chunkSize = 4096
|
||||
|
||||
func checkTypeAndDimensions(b []byte) error {
|
||||
imgconf, _, err := image.DecodeConfig(bytes.NewReader(b))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if imgconf.Width > conf.MaxSrcDimension || imgconf.Height > conf.MaxSrcDimension {
|
||||
return errors.New("File is too big")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readAndCheckImage(res *http.Response) ([]byte, error) {
|
||||
b := make([]byte, chunkSize)
|
||||
n, err := res.Body.Read(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = checkTypeAndDimensions(b[:n]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
buf := bytes.NewBuffer(b[:n])
|
||||
|
||||
if res.ContentLength > 0 {
|
||||
buf.Grow(int(res.ContentLength))
|
||||
}
|
||||
|
||||
if _, err = buf.ReadFrom(res.Body); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func downloadImage(url string) ([]byte, error) {
|
||||
res, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
return readAndCheckImage(res)
|
||||
}
|
147
main.go
Normal file
@ -0,0 +1,147 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"image"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type httpHandler struct{}
|
||||
|
||||
func parsePath(r *http.Request) (string, processingOptions, error) {
|
||||
var po processingOptions
|
||||
var err error
|
||||
|
||||
path := r.URL.Path
|
||||
parts := strings.Split(strings.TrimPrefix(path, "/"), "/")
|
||||
|
||||
if len(parts) < 7 {
|
||||
return "", po, errors.New("Invalid path")
|
||||
}
|
||||
|
||||
token := parts[0]
|
||||
|
||||
if err = validatePath(token, strings.TrimPrefix(path, fmt.Sprintf("/%s", token))); err != nil {
|
||||
return "", po, err
|
||||
}
|
||||
|
||||
po.resize = parts[1]
|
||||
|
||||
if po.width, err = strconv.Atoi(parts[2]); err != nil {
|
||||
return "", po, fmt.Errorf("Invalid width: %s", parts[2])
|
||||
}
|
||||
|
||||
if po.height, err = strconv.Atoi(parts[3]); err != nil {
|
||||
return "", po, fmt.Errorf("Invalid height: %s", parts[3])
|
||||
}
|
||||
|
||||
if g, ok := gravityTypes[parts[4]]; ok {
|
||||
po.gravity = g
|
||||
} else {
|
||||
return "", po, fmt.Errorf("Invalid gravity: %s", parts[4])
|
||||
}
|
||||
|
||||
po.enlarge = parts[5] != "0"
|
||||
|
||||
filenameParts := strings.Split(strings.Join(parts[6:], ""), ".")
|
||||
|
||||
if len(filenameParts) < 2 {
|
||||
po.format = imageTypes["jpg"]
|
||||
} else if f, ok := imageTypes[filenameParts[1]]; ok {
|
||||
po.format = f
|
||||
} else {
|
||||
return "", po, fmt.Errorf("Invalid image format: %s", filenameParts[1])
|
||||
}
|
||||
|
||||
filename, err := base64.RawURLEncoding.DecodeString(filenameParts[0])
|
||||
if err != nil {
|
||||
return "", po, errors.New("Invalid filename encoding")
|
||||
}
|
||||
|
||||
return string(filename), po, nil
|
||||
}
|
||||
|
||||
func imageContentType(b []byte) string {
|
||||
_, imgtype, _ := image.DecodeConfig(bytes.NewReader(b))
|
||||
return fmt.Sprintf("image/%s", imgtype)
|
||||
}
|
||||
|
||||
func logResponse(status int, msg string) {
|
||||
var color int
|
||||
|
||||
if status > 500 {
|
||||
color = 31
|
||||
} else if status > 400 {
|
||||
color = 33
|
||||
} else {
|
||||
color = 32
|
||||
}
|
||||
|
||||
log.Printf("|\033[7;%dm %d \033[0m| %s\n", color, status, msg)
|
||||
}
|
||||
|
||||
func respondWithImage(rw http.ResponseWriter, data []byte, imgURL string, po processingOptions) {
|
||||
logResponse(200, fmt.Sprintf("Processed: %s; %+v", imgURL, po))
|
||||
|
||||
rw.WriteHeader(200)
|
||||
rw.Header().Set("Content-Type", imageContentType(data))
|
||||
rw.Write(data)
|
||||
}
|
||||
|
||||
func respondWithError(rw http.ResponseWriter, status int, err error, msg string) {
|
||||
logResponse(status, err.Error())
|
||||
|
||||
rw.WriteHeader(status)
|
||||
rw.Write([]byte(msg))
|
||||
}
|
||||
|
||||
func (h httpHandler) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
|
||||
log.Printf("GET: %s\n", r.URL.RequestURI())
|
||||
|
||||
imgURL, procOpt, err := parsePath(r)
|
||||
if err != nil {
|
||||
respondWithError(rw, 404, err, "Invalid image url")
|
||||
return
|
||||
}
|
||||
|
||||
if _, err = url.ParseRequestURI(imgURL); err != nil {
|
||||
respondWithError(rw, 404, err, "Invalid image url")
|
||||
return
|
||||
}
|
||||
|
||||
b, err := downloadImage(imgURL)
|
||||
if err != nil {
|
||||
respondWithError(rw, 404, err, "Image is unreacable")
|
||||
return
|
||||
}
|
||||
|
||||
b, err = processImage(b, procOpt)
|
||||
if err != nil {
|
||||
respondWithError(rw, 500, err, "Error occured while processing image")
|
||||
return
|
||||
}
|
||||
|
||||
respondWithImage(rw, b, imgURL, procOpt)
|
||||
}
|
||||
|
||||
func main() {
|
||||
s := &http.Server{
|
||||
Addr: conf.Bind,
|
||||
Handler: httpHandler{},
|
||||
ReadTimeout: time.Duration(conf.ReadTimeout) * time.Second,
|
||||
WriteTimeout: time.Duration(conf.WriteTimeout) * time.Second,
|
||||
MaxHeaderBytes: 1 << 20,
|
||||
}
|
||||
|
||||
log.Printf("Starting server at %s\n", conf.Bind)
|
||||
|
||||
log.Fatal(s.ListenAndServe())
|
||||
}
|
91
process.go
Normal file
@ -0,0 +1,91 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"github.com/h2non/bimg"
|
||||
)
|
||||
|
||||
type processingOptions struct {
|
||||
resize string
|
||||
width int
|
||||
height int
|
||||
gravity bimg.Gravity
|
||||
enlarge bool
|
||||
format bimg.ImageType
|
||||
}
|
||||
|
||||
var imageTypes = map[string]bimg.ImageType{
|
||||
"jpeg": bimg.JPEG,
|
||||
"jpg": bimg.JPEG,
|
||||
"png": bimg.PNG,
|
||||
}
|
||||
|
||||
var gravityTypes = map[string]bimg.Gravity{
|
||||
"ce": bimg.GravityCentre,
|
||||
"no": bimg.GravityNorth,
|
||||
"ea": bimg.GravityEast,
|
||||
"so": bimg.GravitySouth,
|
||||
"we": bimg.GravityWest,
|
||||
"sm": bimg.GravitySmart,
|
||||
}
|
||||
|
||||
func round(f float64) int {
|
||||
return int(f + .5)
|
||||
}
|
||||
func calcSize(size bimg.ImageSize, po processingOptions) (int, int) {
|
||||
if (po.width == size.Width && po.height == size.Height) || (po.resize != "fill" && po.resize != "fit") {
|
||||
return po.width, po.height
|
||||
}
|
||||
|
||||
fsw, fsh, fow, foh := float64(size.Width), float64(size.Height), float64(po.width), float64(po.height)
|
||||
|
||||
wr := fow / fsw
|
||||
hr := foh / fsh
|
||||
|
||||
var rate float64
|
||||
if po.resize == "fit" {
|
||||
rate = math.Min(wr, hr)
|
||||
} else {
|
||||
rate = math.Max(wr, hr)
|
||||
}
|
||||
|
||||
return round(math.Min(fsw*rate, fow)), round(math.Min(fsh*rate, foh))
|
||||
}
|
||||
|
||||
func processImage(p []byte, po processingOptions) ([]byte, error) {
|
||||
var err error
|
||||
|
||||
img := bimg.NewImage(p)
|
||||
|
||||
size, err := img.Size()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Default options
|
||||
opts := bimg.Options{
|
||||
Interpolator: bimg.Bicubic,
|
||||
Quality: conf.Quality,
|
||||
Compression: conf.Compression,
|
||||
Gravity: po.gravity,
|
||||
Enlarge: po.enlarge,
|
||||
Type: po.format,
|
||||
}
|
||||
|
||||
opts.Width, opts.Height = calcSize(size, po)
|
||||
|
||||
switch po.resize {
|
||||
case "fit":
|
||||
opts.Embed = true
|
||||
case "fill":
|
||||
opts.Embed = true
|
||||
opts.Crop = true
|
||||
case "crop":
|
||||
opts.Crop = true
|
||||
default:
|
||||
opts.Force = true
|
||||
}
|
||||
|
||||
return img.Process(opts)
|
||||
}
|