aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Holzinger <pholzing@redhat.com>2022-05-03 18:34:38 +0200
committerPaul Holzinger <pholzing@redhat.com>2022-05-03 18:43:18 +0200
commit9166894c696582ee37893ce92a694ba227744fa0 (patch)
tree8b4c9ffa20de8a4a484c0a427fc74b5c22634994
parent1e0c50df38ff955011f7ebb83a0268f3f1cd2841 (diff)
downloadpodman-9166894c696582ee37893ce92a694ba227744fa0.tar.gz
podman-9166894c696582ee37893ce92a694ba227744fa0.tar.bz2
podman-9166894c696582ee37893ce92a694ba227744fa0.zip
vendor test dependencies instead of installing via network
We can vendor the test dependencies such as go-md2man, git-validation and goimports. This allows us to always install the same version as specified in go.mod. Also we do not rely on a network connection for this. The advantage with this method is that dependabot will also update the dependencies for us and we do not have to hardcode versions in the Makefile. Signed-off-by: Paul Holzinger <pholzing@redhat.com>
-rw-r--r--Makefile10
-rw-r--r--go.mod3
-rw-r--r--go.sum10
-rw-r--r--test/tools/tools.go13
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/.gitignore2
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/Dockerfile20
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md21
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/Makefile35
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/README.md15
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/go-md2man.1.md23
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/go.mod5
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/go.sum2
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/md2man.go51
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go14
-rw-r--r--vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go336
-rw-r--r--vendor/github.com/hashicorp/go-version/LICENSE354
-rw-r--r--vendor/github.com/hashicorp/go-version/README.md66
-rw-r--r--vendor/github.com/hashicorp/go-version/constraint.go204
-rw-r--r--vendor/github.com/hashicorp/go-version/go.mod1
-rw-r--r--vendor/github.com/hashicorp/go-version/version.go384
-rw-r--r--vendor/github.com/hashicorp/go-version/version_collection.go17
-rw-r--r--vendor/github.com/russross/blackfriday/v2/.gitignore8
-rw-r--r--vendor/github.com/russross/blackfriday/v2/.travis.yml17
-rw-r--r--vendor/github.com/russross/blackfriday/v2/LICENSE.txt29
-rw-r--r--vendor/github.com/russross/blackfriday/v2/README.md335
-rw-r--r--vendor/github.com/russross/blackfriday/v2/block.go1612
-rw-r--r--vendor/github.com/russross/blackfriday/v2/doc.go46
-rw-r--r--vendor/github.com/russross/blackfriday/v2/entities.go2236
-rw-r--r--vendor/github.com/russross/blackfriday/v2/esc.go70
-rw-r--r--vendor/github.com/russross/blackfriday/v2/go.mod1
-rw-r--r--vendor/github.com/russross/blackfriday/v2/html.go952
-rw-r--r--vendor/github.com/russross/blackfriday/v2/inline.go1228
-rw-r--r--vendor/github.com/russross/blackfriday/v2/markdown.go950
-rw-r--r--vendor/github.com/russross/blackfriday/v2/node.go360
-rw-r--r--vendor/github.com/russross/blackfriday/v2/smartypants.go457
-rw-r--r--vendor/github.com/vbatts/git-validation/.gitignore2
-rw-r--r--vendor/github.com/vbatts/git-validation/.travis.yml37
-rw-r--r--vendor/github.com/vbatts/git-validation/LICENSE21
-rw-r--r--vendor/github.com/vbatts/git-validation/README.md106
-rw-r--r--vendor/github.com/vbatts/git-validation/git/commits.go192
-rw-r--r--vendor/github.com/vbatts/git-validation/go.mod8
-rw-r--r--vendor/github.com/vbatts/git-validation/go.sum15
-rw-r--r--vendor/github.com/vbatts/git-validation/main.go92
-rw-r--r--vendor/github.com/vbatts/git-validation/rules/danglingwhitespace/rule.go39
-rw-r--r--vendor/github.com/vbatts/git-validation/rules/dco/dco.go51
-rw-r--r--vendor/github.com/vbatts/git-validation/rules/messageregexp/rule.go61
-rw-r--r--vendor/github.com/vbatts/git-validation/rules/shortsubject/shortsubject.go44
-rw-r--r--vendor/github.com/vbatts/git-validation/validate/rules.go134
-rw-r--r--vendor/github.com/vbatts/git-validation/validate/runner.go109
-rw-r--r--vendor/golang.org/x/mod/LICENSE27
-rw-r--r--vendor/golang.org/x/mod/PATENTS22
-rw-r--r--vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go78
-rw-r--r--vendor/golang.org/x/mod/module/module.go841
-rw-r--r--vendor/golang.org/x/mod/module/pseudo.go250
-rw-r--r--vendor/golang.org/x/mod/semver/semver.go401
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/doc.go47
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/goimports.go380
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go27
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go12
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/enclosing.go639
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go482
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/rewrite.go483
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/util.go18
-rw-r--r--vendor/golang.org/x/tools/internal/event/core/event.go85
-rw-r--r--vendor/golang.org/x/tools/internal/event/core/export.go70
-rw-r--r--vendor/golang.org/x/tools/internal/event/core/fast.go77
-rw-r--r--vendor/golang.org/x/tools/internal/event/doc.go7
-rw-r--r--vendor/golang.org/x/tools/internal/event/event.go127
-rw-r--r--vendor/golang.org/x/tools/internal/event/keys/keys.go564
-rw-r--r--vendor/golang.org/x/tools/internal/event/keys/standard.go22
-rw-r--r--vendor/golang.org/x/tools/internal/event/label/label.go215
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go196
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go14
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go15
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go14
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go29
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go38
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go153
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/invoke.go281
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/vendor.go109
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/version.go51
-rw-r--r--vendor/golang.org/x/tools/internal/gopathwalk/walk.go264
-rw-r--r--vendor/golang.org/x/tools/internal/imports/fix.go1730
-rw-r--r--vendor/golang.org/x/tools/internal/imports/imports.go346
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod.go698
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod_cache.go236
-rw-r--r--vendor/golang.org/x/tools/internal/imports/sortimports.go291
-rw-r--r--vendor/golang.org/x/tools/internal/imports/zstdlib.go10756
-rw-r--r--vendor/golang.org/x/xerrors/LICENSE27
-rw-r--r--vendor/golang.org/x/xerrors/PATENTS22
-rw-r--r--vendor/golang.org/x/xerrors/README2
-rw-r--r--vendor/golang.org/x/xerrors/adaptor.go193
-rw-r--r--vendor/golang.org/x/xerrors/codereview.cfg1
-rw-r--r--vendor/golang.org/x/xerrors/doc.go22
-rw-r--r--vendor/golang.org/x/xerrors/errors.go33
-rw-r--r--vendor/golang.org/x/xerrors/fmt.go187
-rw-r--r--vendor/golang.org/x/xerrors/format.go34
-rw-r--r--vendor/golang.org/x/xerrors/frame.go56
-rw-r--r--vendor/golang.org/x/xerrors/go.mod3
-rw-r--r--vendor/golang.org/x/xerrors/internal/internal.go8
-rw-r--r--vendor/golang.org/x/xerrors/wrap.go106
-rw-r--r--vendor/modules.txt35
102 files changed, 31615 insertions, 7 deletions
diff --git a/Makefile b/Makefile
index caa991b14..8cb6fed08 100644
--- a/Makefile
+++ b/Makefile
@@ -174,10 +174,6 @@ endif
# Necessary for nested-$(MAKE) calls and docs/remote-docs.sh
export GOOS GOARCH CGO_ENABLED BINSFX SRCBINDIR
-define go-install
- $(GO) install ${1}@latest
-endef
-
# Need to use CGO for mDNS resolution, but cross builds need CGO disabled
# See https://github.com/golang/go/issues/12524 for details
DARWIN_GCO := 0
@@ -864,7 +860,7 @@ install.tools: .install.goimports .install.gitvalidation .install.md2man .instal
.install.goimports: .gopathok
if [ ! -x "$(GOBIN)/goimports" ]; then \
- $(call go-install,golang.org/x/tools/cmd/goimports); \
+ $(GO) install ./vendor/golang.org/x/tools/cmd/goimports ; \
fi
touch .install.goimports
@@ -877,7 +873,7 @@ install.tools: .install.goimports .install.gitvalidation .install.md2man .instal
.PHONY: .install.gitvalidation
.install.gitvalidation: .gopathok
if [ ! -x "$(GOBIN)/git-validation" ]; then \
- $(call go-install,github.com/vbatts/git-validation); \
+ $(GO) install ./vendor/github.com/vbatts/git-validation ; \
fi
.PHONY: .install.golangci-lint
@@ -897,7 +893,7 @@ install.tools: .install.goimports .install.gitvalidation .install.md2man .instal
.PHONY: .install.md2man
.install.md2man: .gopathok
if [ ! -x "$(GOMD2MAN)" ]; then \
- $(call go-install,github.com/cpuguy83/go-md2man); \
+ $(GO) install ./vendor/github.com/cpuguy83/go-md2man/v2 ; \
fi
# $BUILD_TAGS variable is used in hack/golangci-lint.sh
diff --git a/go.mod b/go.mod
index 6f5941603..54c06e037 100644
--- a/go.mod
+++ b/go.mod
@@ -20,6 +20,7 @@ require (
github.com/containers/storage v1.40.0
github.com/coreos/go-systemd/v22 v22.3.2
github.com/coreos/stream-metadata-go v0.0.0-20210225230131-70edb9eb47b3
+ github.com/cpuguy83/go-md2man/v2 v2.0.2
github.com/cyphar/filepath-securejoin v0.2.3
github.com/davecgh/go-spew v1.1.1
github.com/digitalocean/go-qemu v0.0.0-20210326154740-ac9e0b687001
@@ -61,6 +62,7 @@ require (
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635
github.com/uber/jaeger-client-go v2.30.0+incompatible
github.com/ulikunitz/xz v0.5.10
+ github.com/vbatts/git-validation v1.1.0
github.com/vbauerster/mpb/v7 v7.4.1
github.com/vishvananda/netlink v1.1.1-0.20220115184804-dd687eb2f2d4
go.etcd.io/bbolt v1.3.6
@@ -69,6 +71,7 @@ require (
golang.org/x/sys v0.0.0-20220422013727-9388b58f7150
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
golang.org/x/text v0.3.7
+ golang.org/x/tools v0.1.10
google.golang.org/protobuf v1.28.0
gopkg.in/inf.v0 v0.9.1
gopkg.in/yaml.v2 v2.4.0
diff --git a/go.sum b/go.sum
index 5219dd50a..ee417adcf 100644
--- a/go.sum
+++ b/go.sum
@@ -411,10 +411,13 @@ github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfc
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/coreos/stream-metadata-go v0.0.0-20210225230131-70edb9eb47b3 h1:0JspqV66RwYqYfvi8lCUoL5zUZMh9uN4hx/J5+NRXIE=
github.com/coreos/stream-metadata-go v0.0.0-20210225230131-70edb9eb47b3/go.mod h1:RTjQyHgO/G37oJ3qnqYK6Z4TPZ5EsaabOtfMjVXmgko=
+github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
+github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=
@@ -792,6 +795,8 @@ github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerX
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/hashicorp/go-version v1.2.1 h1:zEfKbn2+PDgroKdiOzqiE8rsmLqU2uwi5PB5pBJ3TkI=
github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
@@ -1230,8 +1235,10 @@ github.com/rootless-containers/rootlesskit v1.0.1 h1:jepqW1txFSowKSMAEkVhWH3Oa1T
github.com/rootless-containers/rootlesskit v1.0.1/go.mod h1:t2UAiYagxrJ+wmpFAUIZPcqsm4k2B7ve6g7lILKbloc=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
+github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryancurrah/gomodguard v1.2.3/go.mod h1:rYbA/4Tg5c54mV1sv4sQTP5WOPBcoLtnBZ7/TEhXAbg=
github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA=
@@ -1380,6 +1387,8 @@ github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyC
github.com/valyala/fasthttp v1.30.0/go.mod h1:2rsYD01CKFrjjsvFxx75KlEUNpWNBY9JWD3K/7o2Cus=
github.com/valyala/quicktemplate v1.7.0/go.mod h1:sqKJnoaOF88V07vkO+9FL8fb9uZg/VPSJnLYn+LmLk8=
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
+github.com/vbatts/git-validation v1.1.0 h1:umNoFGOOQSpRdD2sfA9vW0o3+8VwMs/zePZ0pnDy2cs=
+github.com/vbatts/git-validation v1.1.0/go.mod h1:QyK3uQnRYWGt/5ezd8kcpwPrm6zn9tNM/KtozbpfU6k=
github.com/vbatts/tar-split v0.11.2 h1:Via6XqJr0hceW4wff3QRzD5gAk/tatMw/4ZA7cTlIME=
github.com/vbatts/tar-split v0.11.2/go.mod h1:vV3ZuO2yWSVsz+pfFzDG/upWH1JhjOiEaWq6kXyQ3VI=
github.com/vbauerster/mpb/v7 v7.3.2/go.mod h1:wfxIZcOJq/bG1/lAtfzMXcOiSvbqVi/5GX5WCSi+IsA=
@@ -1556,6 +1565,7 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
+golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
diff --git a/test/tools/tools.go b/test/tools/tools.go
new file mode 100644
index 000000000..5bd1183a5
--- /dev/null
+++ b/test/tools/tools.go
@@ -0,0 +1,13 @@
+//go:build tools
+// +build tools
+
+package tools
+
+// Importing the packages here will allow to vendor those via
+// `go mod vendor`.
+
+import (
+ _ "github.com/cpuguy83/go-md2man/v2"
+ _ "github.com/vbatts/git-validation"
+ _ "golang.org/x/tools/cmd/goimports"
+)
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/.gitignore b/vendor/github.com/cpuguy83/go-md2man/v2/.gitignore
new file mode 100644
index 000000000..30f97c3d7
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/.gitignore
@@ -0,0 +1,2 @@
+go-md2man
+bin
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/Dockerfile b/vendor/github.com/cpuguy83/go-md2man/v2/Dockerfile
new file mode 100644
index 000000000..7181c5306
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/Dockerfile
@@ -0,0 +1,20 @@
+ARG GO_VERSION=1.18
+ARG GO_IMAGE=golang:${GO_VERSION}
+
+FROM --platform=$BUILDPLATFORM $GO_IMAGE AS build
+COPY . /go/src/github.com/cpuguy83/go-md2man
+WORKDIR /go/src/github.com/cpuguy83/go-md2man
+ARG TARGETOS
+ARG TARGETARCH
+ARG TARGETVARIANT
+RUN \
+ export GOOS="${TARGETOS}"; \
+ export GOARCH="${TARGETARCH}"; \
+ if [ "${TARGETARCH}" = "arm" ] && [ "${TARGETVARIANT}" ]; then \
+ export GOARM="${TARGETVARIANT#v}"; \
+ fi; \
+ CGO_ENABLED=0 go build
+
+FROM scratch
+COPY --from=build /go/src/github.com/cpuguy83/go-md2man/go-md2man /go-md2man
+ENTRYPOINT ["/go-md2man"]
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md b/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md
new file mode 100644
index 000000000..1cade6cef
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Brian Goff
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/Makefile b/vendor/github.com/cpuguy83/go-md2man/v2/Makefile
new file mode 100644
index 000000000..437fc9997
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/Makefile
@@ -0,0 +1,35 @@
+GO111MODULE ?= on
+LINTER_BIN ?= golangci-lint
+
+export GO111MODULE
+
+.PHONY:
+build: bin/go-md2man
+
+.PHONY: clean
+clean:
+ @rm -rf bin/*
+
+.PHONY: test
+test:
+ @go test $(TEST_FLAGS) ./...
+
+bin/go-md2man: actual_build_flags := $(BUILD_FLAGS) -o bin/go-md2man
+bin/go-md2man: bin
+ @CGO_ENABLED=0 go build $(actual_build_flags)
+
+bin:
+ @mkdir ./bin
+
+.PHONY: mod
+mod:
+ @go mod tidy
+
+.PHONY: check-mod
+check-mod: # verifies that module changes for go.mod and go.sum are checked in
+ @hack/ci/check_mods.sh
+
+.PHONY: vendor
+vendor: mod
+ @go mod vendor -v
+
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/README.md b/vendor/github.com/cpuguy83/go-md2man/v2/README.md
new file mode 100644
index 000000000..0e30d3414
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/README.md
@@ -0,0 +1,15 @@
+go-md2man
+=========
+
+Converts markdown into roff (man pages).
+
+Uses blackfriday to process markdown into man pages.
+
+### Usage
+
+./md2man -in /path/to/markdownfile.md -out /manfile/output/path
+
+### How to contribute
+
+We use go modules to manage dependencies.
+As such you must be using at lest go1.11.
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/go-md2man.1.md b/vendor/github.com/cpuguy83/go-md2man/v2/go-md2man.1.md
new file mode 100644
index 000000000..16d1133aa
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/go-md2man.1.md
@@ -0,0 +1,23 @@
+go-md2man 1 "January 2015" go-md2man "User Manual"
+==================================================
+
+# NAME
+ go-md2man - Convert markdown files into manpages
+
+# SYNOPSIS
+ go-md2man -in=[/path/to/md/file] -out=[/path/to/output]
+
+# Description
+ go-md2man converts standard markdown formatted documents into manpages. It is
+ written purely in Go so as to reduce dependencies on 3rd party libs.
+
+ By default, the input is stdin and the output is stdout.
+
+# Example
+ Convert the markdown file "go-md2man.1.md" into a manpage.
+
+ go-md2man -in=go-md2man.1.md -out=go-md2man.1.out
+
+# HISTORY
+ January 2015, Originally compiled by Brian Goff( cpuguy83@gmail.com )
+
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/go.mod b/vendor/github.com/cpuguy83/go-md2man/v2/go.mod
new file mode 100644
index 000000000..0bc888da0
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/go.mod
@@ -0,0 +1,5 @@
+module github.com/cpuguy83/go-md2man/v2
+
+go 1.11
+
+require github.com/russross/blackfriday/v2 v2.1.0
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/go.sum b/vendor/github.com/cpuguy83/go-md2man/v2/go.sum
new file mode 100644
index 000000000..502a072c0
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/go.sum
@@ -0,0 +1,2 @@
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man.go
new file mode 100644
index 000000000..6078864a3
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man.go
@@ -0,0 +1,51 @@
+package main
+
+import (
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "os"
+
+ "github.com/cpuguy83/go-md2man/v2/md2man"
+)
+
+var inFilePath = flag.String("in", "", "Path to file to be processed (default: stdin)")
+var outFilePath = flag.String("out", "", "Path to output processed file (default: stdout)")
+
+func main() {
+ var err error
+ flag.Parse()
+
+ inFile := os.Stdin
+ if *inFilePath != "" {
+ inFile, err = os.Open(*inFilePath)
+ if err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+ }
+ defer inFile.Close() // nolint: errcheck
+
+ doc, err := ioutil.ReadAll(inFile)
+ if err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+
+ out := md2man.Render(doc)
+
+ outFile := os.Stdout
+ if *outFilePath != "" {
+ outFile, err = os.Create(*outFilePath)
+ if err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+ defer outFile.Close() // nolint: errcheck
+ }
+ _, err = outFile.Write(out)
+ if err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+}
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go
new file mode 100644
index 000000000..b48005673
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go
@@ -0,0 +1,14 @@
+package md2man
+
+import (
+ "github.com/russross/blackfriday/v2"
+)
+
+// Render converts a markdown document into a roff formatted document.
+func Render(doc []byte) []byte {
+ renderer := NewRoffRenderer()
+
+ return blackfriday.Run(doc,
+ []blackfriday.Option{blackfriday.WithRenderer(renderer),
+ blackfriday.WithExtensions(renderer.GetExtensions())}...)
+}
diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
new file mode 100644
index 000000000..be2b34360
--- /dev/null
+++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go
@@ -0,0 +1,336 @@
+package md2man
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/russross/blackfriday/v2"
+)
+
+// roffRenderer implements the blackfriday.Renderer interface for creating
+// roff format (manpages) from markdown text
+type roffRenderer struct {
+ extensions blackfriday.Extensions
+ listCounters []int
+ firstHeader bool
+ firstDD bool
+ listDepth int
+}
+
+const (
+ titleHeader = ".TH "
+ topLevelHeader = "\n\n.SH "
+ secondLevelHdr = "\n.SH "
+ otherHeader = "\n.SS "
+ crTag = "\n"
+ emphTag = "\\fI"
+ emphCloseTag = "\\fP"
+ strongTag = "\\fB"
+ strongCloseTag = "\\fP"
+ breakTag = "\n.br\n"
+ paraTag = "\n.PP\n"
+ hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n"
+ linkTag = "\n\\[la]"
+ linkCloseTag = "\\[ra]"
+ codespanTag = "\\fB\\fC"
+ codespanCloseTag = "\\fR"
+ codeTag = "\n.PP\n.RS\n\n.nf\n"
+ codeCloseTag = "\n.fi\n.RE\n"
+ quoteTag = "\n.PP\n.RS\n"
+ quoteCloseTag = "\n.RE\n"
+ listTag = "\n.RS\n"
+ listCloseTag = "\n.RE\n"
+ dtTag = "\n.TP\n"
+ dd2Tag = "\n"
+ tableStart = "\n.TS\nallbox;\n"
+ tableEnd = ".TE\n"
+ tableCellStart = "T{\n"
+ tableCellEnd = "\nT}\n"
+)
+
+// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents
+// from markdown
+func NewRoffRenderer() *roffRenderer { // nolint: golint
+ var extensions blackfriday.Extensions
+
+ extensions |= blackfriday.NoIntraEmphasis
+ extensions |= blackfriday.Tables
+ extensions |= blackfriday.FencedCode
+ extensions |= blackfriday.SpaceHeadings
+ extensions |= blackfriday.Footnotes
+ extensions |= blackfriday.Titleblock
+ extensions |= blackfriday.DefinitionLists
+ return &roffRenderer{
+ extensions: extensions,
+ }
+}
+
+// GetExtensions returns the list of extensions used by this renderer implementation
+func (r *roffRenderer) GetExtensions() blackfriday.Extensions {
+ return r.extensions
+}
+
+// RenderHeader handles outputting the header at document start
+func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) {
+ // disable hyphenation
+ out(w, ".nh\n")
+}
+
+// RenderFooter handles outputting the footer at the document end; the roff
+// renderer has no footer information
+func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {
+}
+
+// RenderNode is called for each node in a markdown document; based on the node
+// type the equivalent roff output is sent to the writer
+func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
+
+ var walkAction = blackfriday.GoToNext
+
+ switch node.Type {
+ case blackfriday.Text:
+ escapeSpecialChars(w, node.Literal)
+ case blackfriday.Softbreak:
+ out(w, crTag)
+ case blackfriday.Hardbreak:
+ out(w, breakTag)
+ case blackfriday.Emph:
+ if entering {
+ out(w, emphTag)
+ } else {
+ out(w, emphCloseTag)
+ }
+ case blackfriday.Strong:
+ if entering {
+ out(w, strongTag)
+ } else {
+ out(w, strongCloseTag)
+ }
+ case blackfriday.Link:
+ if !entering {
+ out(w, linkTag+string(node.LinkData.Destination)+linkCloseTag)
+ }
+ case blackfriday.Image:
+ // ignore images
+ walkAction = blackfriday.SkipChildren
+ case blackfriday.Code:
+ out(w, codespanTag)
+ escapeSpecialChars(w, node.Literal)
+ out(w, codespanCloseTag)
+ case blackfriday.Document:
+ break
+ case blackfriday.Paragraph:
+ // roff .PP markers break lists
+ if r.listDepth > 0 {
+ return blackfriday.GoToNext
+ }
+ if entering {
+ out(w, paraTag)
+ } else {
+ out(w, crTag)
+ }
+ case blackfriday.BlockQuote:
+ if entering {
+ out(w, quoteTag)
+ } else {
+ out(w, quoteCloseTag)
+ }
+ case blackfriday.Heading:
+ r.handleHeading(w, node, entering)
+ case blackfriday.HorizontalRule:
+ out(w, hruleTag)
+ case blackfriday.List:
+ r.handleList(w, node, entering)
+ case blackfriday.Item:
+ r.handleItem(w, node, entering)
+ case blackfriday.CodeBlock:
+ out(w, codeTag)
+ escapeSpecialChars(w, node.Literal)
+ out(w, codeCloseTag)
+ case blackfriday.Table:
+ r.handleTable(w, node, entering)
+ case blackfriday.TableHead:
+ case blackfriday.TableBody:
+ case blackfriday.TableRow:
+ // no action as cell entries do all the nroff formatting
+ return blackfriday.GoToNext
+ case blackfriday.TableCell:
+ r.handleTableCell(w, node, entering)
+ case blackfriday.HTMLSpan:
+ // ignore other HTML tags
+ default:
+ fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
+ }
+ return walkAction
+}
+
+func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) {
+ if entering {
+ switch node.Level {
+ case 1:
+ if !r.firstHeader {
+ out(w, titleHeader)
+ r.firstHeader = true
+ break
+ }
+ out(w, topLevelHeader)
+ case 2:
+ out(w, secondLevelHdr)
+ default:
+ out(w, otherHeader)
+ }
+ }
+}
+
+func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) {
+ openTag := listTag
+ closeTag := listCloseTag
+ if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
+ // tags for definition lists handled within Item node
+ openTag = ""
+ closeTag = ""
+ }
+ if entering {
+ r.listDepth++
+ if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
+ r.listCounters = append(r.listCounters, 1)
+ }
+ out(w, openTag)
+ } else {
+ if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
+ r.listCounters = r.listCounters[:len(r.listCounters)-1]
+ }
+ out(w, closeTag)
+ r.listDepth--
+ }
+}
+
+func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) {
+ if entering {
+ if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
+ out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1]))
+ r.listCounters[len(r.listCounters)-1]++
+ } else if node.ListFlags&blackfriday.ListTypeTerm != 0 {
+ // DT (definition term): line just before DD (see below).
+ out(w, dtTag)
+ r.firstDD = true
+ } else if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
+ // DD (definition description): line that starts with ": ".
+ //
+ // We have to distinguish between the first DD and the
+ // subsequent ones, as there should be no vertical
+ // whitespace between the DT and the first DD.
+ if r.firstDD {
+ r.firstDD = false
+ } else {
+ out(w, dd2Tag)
+ }
+ } else {
+ out(w, ".IP \\(bu 2\n")
+ }
+ } else {
+ out(w, "\n")
+ }
+}
+
+func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) {
+ if entering {
+ out(w, tableStart)
+ // call walker to count cells (and rows?) so format section can be produced
+ columns := countColumns(node)
+ out(w, strings.Repeat("l ", columns)+"\n")
+ out(w, strings.Repeat("l ", columns)+".\n")
+ } else {
+ out(w, tableEnd)
+ }
+}
+
+func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) {
+ if entering {
+ var start string
+ if node.Prev != nil && node.Prev.Type == blackfriday.TableCell {
+ start = "\t"
+ }
+ if node.IsHeader {
+ start += codespanTag
+ } else if nodeLiteralSize(node) > 30 {
+ start += tableCellStart
+ }
+ out(w, start)
+ } else {
+ var end string
+ if node.IsHeader {
+ end = codespanCloseTag
+ } else if nodeLiteralSize(node) > 30 {
+ end = tableCellEnd
+ }
+ if node.Next == nil && end != tableCellEnd {
+ // Last cell: need to carriage return if we are at the end of the
+ // header row and content isn't wrapped in a "tablecell"
+ end += crTag
+ }
+ out(w, end)
+ }
+}
+
+func nodeLiteralSize(node *blackfriday.Node) int {
+ total := 0
+ for n := node.FirstChild; n != nil; n = n.FirstChild {
+ total += len(n.Literal)
+ }
+ return total
+}
+
+// because roff format requires knowing the column count before outputting any table
+// data we need to walk a table tree and count the columns
+func countColumns(node *blackfriday.Node) int {
+ var columns int
+
+ node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
+ switch node.Type {
+ case blackfriday.TableRow:
+ if !entering {
+ return blackfriday.Terminate
+ }
+ case blackfriday.TableCell:
+ if entering {
+ columns++
+ }
+ default:
+ }
+ return blackfriday.GoToNext
+ })
+ return columns
+}
+
+func out(w io.Writer, output string) {
+ io.WriteString(w, output) // nolint: errcheck
+}
+
+func escapeSpecialChars(w io.Writer, text []byte) {
+ for i := 0; i < len(text); i++ {
+ // escape initial apostrophe or period
+ if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') {
+ out(w, "\\&")
+ }
+
+ // directly copy normal characters
+ org := i
+
+ for i < len(text) && text[i] != '\\' {
+ i++
+ }
+ if i > org {
+ w.Write(text[org:i]) // nolint: errcheck
+ }
+
+ // escape a character
+ if i >= len(text) {
+ break
+ }
+
+ w.Write([]byte{'\\', text[i]}) // nolint: errcheck
+ }
+}
diff --git a/vendor/github.com/hashicorp/go-version/LICENSE b/vendor/github.com/hashicorp/go-version/LICENSE
new file mode 100644
index 000000000..c33dcc7c9
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/LICENSE
@@ -0,0 +1,354 @@
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. “Contributor”
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. “Contributor Version”
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor’s Contribution.
+
+1.3. “Contribution”
+
+ means Covered Software of a particular Contributor.
+
+1.4. “Covered Software”
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. “Incompatible With Secondary Licenses”
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of version
+ 1.1 or earlier of the License, but not also under the terms of a
+ Secondary License.
+
+1.6. “Executable Form”
+
+ means any form of the work other than Source Code Form.
+
+1.7. “Larger Work”
+
+ means a work that combines Covered Software with other material, in a separate
+ file or files, that is not Covered Software.
+
+1.8. “License”
+
+ means this document.
+
+1.9. “Licensable”
+
+ means having the right to grant, to the maximum extent possible, whether at the
+ time of the initial grant or subsequently, any and all of the rights conveyed by
+ this License.
+
+1.10. “Modifications”
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to, deletion
+ from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. “Patent Claims” of a Contributor
+
+ means any patent claim(s), including without limitation, method, process,
+ and apparatus claims, in any patent Licensable by such Contributor that
+ would be infringed, but for the grant of the License, by the making,
+ using, selling, offering for sale, having made, import, or transfer of
+ either its Contributions or its Contributor Version.
+
+1.12. “Secondary License”
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. “Source Code Form”
+
+ means the form of the work preferred for making modifications.
+
+1.14. “You” (or “Your”)
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, “You” includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, “control” means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or as
+ part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its Contributions
+ or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution become
+ effective for each Contribution on the date the Contributor first distributes
+ such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under this
+ License. No additional rights or licenses will be implied from the distribution
+ or licensing of Covered Software under this License. Notwithstanding Section
+ 2.1(b) above, no patent license is granted by a Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party’s
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of its
+ Contributions.
+
+ This License does not grant any rights in the trademarks, service marks, or
+ logos of any Contributor (except as may be necessary to comply with the
+ notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this License
+ (see Section 10.2) or under the terms of a Secondary License (if permitted
+ under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its Contributions
+ are its original creation(s) or it has sufficient rights to grant the
+ rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under applicable
+ copyright doctrines of fair use, fair dealing, or other equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under the
+ terms of this License. You must inform recipients that the Source Code Form
+ of the Covered Software is governed by the terms of this License, and how
+ they can obtain a copy of this License. You may not attempt to alter or
+ restrict the recipients’ rights in the Source Code Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this License,
+ or sublicense it under different terms, provided that the license for
+ the Executable Form does not attempt to limit or alter the recipients’
+ rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for the
+ Covered Software. If the Larger Work is a combination of Covered Software
+ with a work governed by one or more Secondary Licenses, and the Covered
+ Software is not Incompatible With Secondary Licenses, this License permits
+ You to additionally distribute such Covered Software under the terms of
+ such Secondary License(s), so that the recipient of the Larger Work may, at
+ their option, further distribute the Covered Software under the terms of
+ either this License or such Secondary License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices (including
+ copyright notices, patent notices, disclaimers of warranty, or limitations
+ of liability) contained within the Source Code Form of the Covered
+ Software, except that You may alter any license notices to the extent
+ required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on behalf
+ of any Contributor. You must make it absolutely clear that any such
+ warranty, support, indemnity, or liability obligation is offered by You
+ alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute, judicial
+ order, or regulation then You must: (a) comply with the terms of this License
+ to the maximum extent possible; and (b) describe the limitations and the code
+ they affect. Such description must be placed in a text file included with all
+ distributions of the Covered Software under this License. Except to the
+ extent prohibited by statute or regulation, such description must be
+ sufficiently detailed for a recipient of ordinary skill to be able to
+ understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing basis,
+ if such Contributor fails to notify You of the non-compliance by some
+ reasonable means prior to 60 days after You have come back into compliance.
+ Moreover, Your grants from a particular Contributor are reinstated on an
+ ongoing basis if such Contributor notifies You of the non-compliance by
+ some reasonable means, this is the first time You have received notice of
+ non-compliance with this License from such Contributor, and You become
+ compliant prior to 30 days after Your receipt of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions, counter-claims,
+ and cross-claims) alleging that a Contributor Version directly or
+ indirectly infringes any patent, then the rights granted to You by any and
+ all Contributors for the Covered Software under Section 2.1 of this License
+ shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an “as is” basis, without
+ warranty of any kind, either expressed, implied, or statutory, including,
+ without limitation, warranties that the Covered Software is free of defects,
+ merchantable, fit for a particular purpose or non-infringing. The entire
+ risk as to the quality and performance of the Covered Software is with You.
+ Should any Covered Software prove defective in any respect, You (not any
+ Contributor) assume the cost of any necessary servicing, repair, or
+ correction. This disclaimer of warranty constitutes an essential part of this
+ License. No use of any Covered Software is authorized under this License
+ except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from such
+ party’s negligence to the extent applicable law prohibits such limitation.
+ Some jurisdictions do not allow the exclusion or limitation of incidental or
+ consequential damages, so this exclusion and limitation may not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts of
+ a jurisdiction where the defendant maintains its principal place of business
+ and such litigation shall be governed by laws of that jurisdiction, without
+ reference to its conflict-of-law provisions. Nothing in this Section shall
+ prevent a party’s ability to bring cross-claims or counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject matter
+ hereof. If any provision of this License is held to be unenforceable, such
+ provision shall be reformed only to the extent necessary to make it
+ enforceable. Any law or regulation which provides that the language of a
+ contract shall be construed against the drafter shall not be used to construe
+ this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version of
+ the License under which You originally received the Covered Software, or
+ under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a modified
+ version of this License if you rename the license and remove any
+ references to the name of the license steward (except to note that such
+ modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
+ If You choose to distribute Source Code Form that is Incompatible With
+ Secondary Licenses under the terms of this version of the License, the
+ notice described in Exhibit B of this License must be attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file, then
+You may include the notice in a location (such as a LICENSE file in a relevant
+directory) where a recipient would be likely to look for such a notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - “Incompatible With Secondary Licenses” Notice
+
+ This Source Code Form is “Incompatible
+ With Secondary Licenses”, as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/vendor/github.com/hashicorp/go-version/README.md b/vendor/github.com/hashicorp/go-version/README.md
new file mode 100644
index 000000000..851a337be
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/README.md
@@ -0,0 +1,66 @@
+# Versioning Library for Go
+[![Build Status](https://circleci.com/gh/hashicorp/go-version/tree/master.svg?style=svg)](https://circleci.com/gh/hashicorp/go-version/tree/master)
+[![GoDoc](https://godoc.org/github.com/hashicorp/go-version?status.svg)](https://godoc.org/github.com/hashicorp/go-version)
+
+go-version is a library for parsing versions and version constraints,
+and verifying versions against a set of constraints. go-version
+can sort a collection of versions properly, handles prerelease/beta
+versions, can increment versions, etc.
+
+Versions used with go-version must follow [SemVer](http://semver.org/).
+
+## Installation and Usage
+
+Package documentation can be found on
+[GoDoc](http://godoc.org/github.com/hashicorp/go-version).
+
+Installation can be done with a normal `go get`:
+
+```
+$ go get github.com/hashicorp/go-version
+```
+
+#### Version Parsing and Comparison
+
+```go
+v1, err := version.NewVersion("1.2")
+v2, err := version.NewVersion("1.5+metadata")
+
+// Comparison example. There is also GreaterThan, Equal, and just
+// a simple Compare that returns an int allowing easy >=, <=, etc.
+if v1.LessThan(v2) {
+ fmt.Printf("%s is less than %s", v1, v2)
+}
+```
+
+#### Version Constraints
+
+```go
+v1, err := version.NewVersion("1.2")
+
+// Constraints example.
+constraints, err := version.NewConstraint(">= 1.0, < 1.4")
+if constraints.Check(v1) {
+ fmt.Printf("%s satisfies constraints %s", v1, constraints)
+}
+```
+
+#### Version Sorting
+
+```go
+versionsRaw := []string{"1.1", "0.7.1", "1.4-beta", "1.4", "2"}
+versions := make([]*version.Version, len(versionsRaw))
+for i, raw := range versionsRaw {
+ v, _ := version.NewVersion(raw)
+ versions[i] = v
+}
+
+// After this, the versions are properly sorted
+sort.Sort(version.Collection(versions))
+```
+
+## Issues and Contributing
+
+If you find an issue with this library, please report an issue. If you'd
+like, we welcome any contributions. Fork this library and submit a pull
+request.
diff --git a/vendor/github.com/hashicorp/go-version/constraint.go b/vendor/github.com/hashicorp/go-version/constraint.go
new file mode 100644
index 000000000..d05575961
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/constraint.go
@@ -0,0 +1,204 @@
+package version
+
+import (
+ "fmt"
+ "reflect"
+ "regexp"
+ "strings"
+)
+
+// Constraint represents a single constraint for a version, such as
+// ">= 1.0".
+type Constraint struct {
+ f constraintFunc
+ check *Version
+ original string
+}
+
+// Constraints is a slice of constraints. We make a custom type so that
+// we can add methods to it.
+type Constraints []*Constraint
+
+type constraintFunc func(v, c *Version) bool
+
+var constraintOperators map[string]constraintFunc
+
+var constraintRegexp *regexp.Regexp
+
+func init() {
+ constraintOperators = map[string]constraintFunc{
+ "": constraintEqual,
+ "=": constraintEqual,
+ "!=": constraintNotEqual,
+ ">": constraintGreaterThan,
+ "<": constraintLessThan,
+ ">=": constraintGreaterThanEqual,
+ "<=": constraintLessThanEqual,
+ "~>": constraintPessimistic,
+ }
+
+ ops := make([]string, 0, len(constraintOperators))
+ for k := range constraintOperators {
+ ops = append(ops, regexp.QuoteMeta(k))
+ }
+
+ constraintRegexp = regexp.MustCompile(fmt.Sprintf(
+ `^\s*(%s)\s*(%s)\s*$`,
+ strings.Join(ops, "|"),
+ VersionRegexpRaw))
+}
+
+// NewConstraint will parse one or more constraints from the given
+// constraint string. The string must be a comma-separated list of
+// constraints.
+func NewConstraint(v string) (Constraints, error) {
+ vs := strings.Split(v, ",")
+ result := make([]*Constraint, len(vs))
+ for i, single := range vs {
+ c, err := parseSingle(single)
+ if err != nil {
+ return nil, err
+ }
+
+ result[i] = c
+ }
+
+ return Constraints(result), nil
+}
+
+// Check tests if a version satisfies all the constraints.
+func (cs Constraints) Check(v *Version) bool {
+ for _, c := range cs {
+ if !c.Check(v) {
+ return false
+ }
+ }
+
+ return true
+}
+
+// Returns the string format of the constraints
+func (cs Constraints) String() string {
+ csStr := make([]string, len(cs))
+ for i, c := range cs {
+ csStr[i] = c.String()
+ }
+
+ return strings.Join(csStr, ",")
+}
+
+// Check tests if a constraint is validated by the given version.
+func (c *Constraint) Check(v *Version) bool {
+ return c.f(v, c.check)
+}
+
+func (c *Constraint) String() string {
+ return c.original
+}
+
+func parseSingle(v string) (*Constraint, error) {
+ matches := constraintRegexp.FindStringSubmatch(v)
+ if matches == nil {
+ return nil, fmt.Errorf("Malformed constraint: %s", v)
+ }
+
+ check, err := NewVersion(matches[2])
+ if err != nil {
+ return nil, err
+ }
+
+ return &Constraint{
+ f: constraintOperators[matches[1]],
+ check: check,
+ original: v,
+ }, nil
+}
+
+func prereleaseCheck(v, c *Version) bool {
+ switch vPre, cPre := v.Prerelease() != "", c.Prerelease() != ""; {
+ case cPre && vPre:
+ // A constraint with a pre-release can only match a pre-release version
+ // with the same base segments.
+ return reflect.DeepEqual(c.Segments64(), v.Segments64())
+
+ case !cPre && vPre:
+ // A constraint without a pre-release can only match a version without a
+ // pre-release.
+ return false
+
+ case cPre && !vPre:
+ // OK, except with the pessimistic operator
+ case !cPre && !vPre:
+ // OK
+ }
+ return true
+}
+
+//-------------------------------------------------------------------
+// Constraint functions
+//-------------------------------------------------------------------
+
+func constraintEqual(v, c *Version) bool {
+ return v.Equal(c)
+}
+
+func constraintNotEqual(v, c *Version) bool {
+ return !v.Equal(c)
+}
+
+func constraintGreaterThan(v, c *Version) bool {
+ return prereleaseCheck(v, c) && v.Compare(c) == 1
+}
+
+func constraintLessThan(v, c *Version) bool {
+ return prereleaseCheck(v, c) && v.Compare(c) == -1
+}
+
+func constraintGreaterThanEqual(v, c *Version) bool {
+ return prereleaseCheck(v, c) && v.Compare(c) >= 0
+}
+
+func constraintLessThanEqual(v, c *Version) bool {
+ return prereleaseCheck(v, c) && v.Compare(c) <= 0
+}
+
+func constraintPessimistic(v, c *Version) bool {
+ // Using a pessimistic constraint with a pre-release, restricts versions to pre-releases
+ if !prereleaseCheck(v, c) || (c.Prerelease() != "" && v.Prerelease() == "") {
+ return false
+ }
+
+ // If the version being checked is naturally less than the constraint, then there
+ // is no way for the version to be valid against the constraint
+ if v.LessThan(c) {
+ return false
+ }
+ // We'll use this more than once, so grab the length now so it's a little cleaner
+ // to write the later checks
+ cs := len(c.segments)
+
+ // If the version being checked has less specificity than the constraint, then there
+ // is no way for the version to be valid against the constraint
+ if cs > len(v.segments) {
+ return false
+ }
+
+ // Check the segments in the constraint against those in the version. If the version
+ // being checked, at any point, does not have the same values in each index of the
+ // constraints segments, then it cannot be valid against the constraint.
+ for i := 0; i < c.si-1; i++ {
+ if v.segments[i] != c.segments[i] {
+ return false
+ }
+ }
+
+ // Check the last part of the segment in the constraint. If the version segment at
+ // this index is less than the constraints segment at this index, then it cannot
+ // be valid against the constraint
+ if c.segments[cs-1] > v.segments[cs-1] {
+ return false
+ }
+
+ // If nothing has rejected the version by now, it's valid
+ return true
+}
diff --git a/vendor/github.com/hashicorp/go-version/go.mod b/vendor/github.com/hashicorp/go-version/go.mod
new file mode 100644
index 000000000..f5285555f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/go.mod
@@ -0,0 +1 @@
+module github.com/hashicorp/go-version
diff --git a/vendor/github.com/hashicorp/go-version/version.go b/vendor/github.com/hashicorp/go-version/version.go
new file mode 100644
index 000000000..09703e8e6
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/version.go
@@ -0,0 +1,384 @@
+package version
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// The compiled regular expression used to test the validity of a version.
+var (
+ versionRegexp *regexp.Regexp
+ semverRegexp *regexp.Regexp
+)
+
+// The raw regular expression string used for testing the validity
+// of a version.
+const (
+ VersionRegexpRaw string = `v?([0-9]+(\.[0-9]+)*?)` +
+ `(-([0-9]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)|(-?([A-Za-z\-~]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)))?` +
+ `(\+([0-9A-Za-z\-~]+(\.[0-9A-Za-z\-~]+)*))?` +
+ `?`
+
+ // SemverRegexpRaw requires a separator between version and prerelease
+ SemverRegexpRaw string = `v?([0-9]+(\.[0-9]+)*?)` +
+ `(-([0-9]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)|(-([A-Za-z\-~]+[0-9A-Za-z\-~]*(\.[0-9A-Za-z\-~]+)*)))?` +
+ `(\+([0-9A-Za-z\-~]+(\.[0-9A-Za-z\-~]+)*))?` +
+ `?`
+)
+
+// Version represents a single version.
+type Version struct {
+ metadata string
+ pre string
+ segments []int64
+ si int
+ original string
+}
+
+func init() {
+ versionRegexp = regexp.MustCompile("^" + VersionRegexpRaw + "$")
+ semverRegexp = regexp.MustCompile("^" + SemverRegexpRaw + "$")
+}
+
+// NewVersion parses the given version and returns a new
+// Version.
+func NewVersion(v string) (*Version, error) {
+ return newVersion(v, versionRegexp)
+}
+
+// NewSemver parses the given version and returns a new
+// Version that adheres strictly to SemVer specs
+// https://semver.org/
+func NewSemver(v string) (*Version, error) {
+ return newVersion(v, semverRegexp)
+}
+
+func newVersion(v string, pattern *regexp.Regexp) (*Version, error) {
+ matches := pattern.FindStringSubmatch(v)
+ if matches == nil {
+ return nil, fmt.Errorf("Malformed version: %s", v)
+ }
+ segmentsStr := strings.Split(matches[1], ".")
+ segments := make([]int64, len(segmentsStr))
+ si := 0
+ for i, str := range segmentsStr {
+ val, err := strconv.ParseInt(str, 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf(
+ "Error parsing version: %s", err)
+ }
+
+ segments[i] = int64(val)
+ si++
+ }
+
+ // Even though we could support more than three segments, if we
+ // got less than three, pad it with 0s. This is to cover the basic
+ // default usecase of semver, which is MAJOR.MINOR.PATCH at the minimum
+ for i := len(segments); i < 3; i++ {
+ segments = append(segments, 0)
+ }
+
+ pre := matches[7]
+ if pre == "" {
+ pre = matches[4]
+ }
+
+ return &Version{
+ metadata: matches[10],
+ pre: pre,
+ segments: segments,
+ si: si,
+ original: v,
+ }, nil
+}
+
+// Must is a helper that wraps a call to a function returning (*Version, error)
+// and panics if error is non-nil.
+func Must(v *Version, err error) *Version {
+ if err != nil {
+ panic(err)
+ }
+
+ return v
+}
+
+// Compare compares this version to another version. This
+// returns -1, 0, or 1 if this version is smaller, equal,
+// or larger than the other version, respectively.
+//
+// If you want boolean results, use the LessThan, Equal,
+// GreaterThan, GreaterThanOrEqual or LessThanOrEqual methods.
+func (v *Version) Compare(other *Version) int {
+ // A quick, efficient equality check
+ if v.String() == other.String() {
+ return 0
+ }
+
+ segmentsSelf := v.Segments64()
+ segmentsOther := other.Segments64()
+
+ // If the segments are the same, we must compare on prerelease info
+ if reflect.DeepEqual(segmentsSelf, segmentsOther) {
+ preSelf := v.Prerelease()
+ preOther := other.Prerelease()
+ if preSelf == "" && preOther == "" {
+ return 0
+ }
+ if preSelf == "" {
+ return 1
+ }
+ if preOther == "" {
+ return -1
+ }
+
+ return comparePrereleases(preSelf, preOther)
+ }
+
+ // Get the highest specificity (hS), or if they're equal, just use segmentSelf length
+ lenSelf := len(segmentsSelf)
+ lenOther := len(segmentsOther)
+ hS := lenSelf
+ if lenSelf < lenOther {
+ hS = lenOther
+ }
+ // Compare the segments
+ // Because a constraint could have more/less specificity than the version it's
+ // checking, we need to account for a lopsided or jagged comparison
+ for i := 0; i < hS; i++ {
+ if i > lenSelf-1 {
+ // This means Self had the lower specificity
+ // Check to see if the remaining segments in Other are all zeros
+ if !allZero(segmentsOther[i:]) {
+ // if not, it means that Other has to be greater than Self
+ return -1
+ }
+ break
+ } else if i > lenOther-1 {
+ // this means Other had the lower specificity
+ // Check to see if the remaining segments in Self are all zeros -
+ if !allZero(segmentsSelf[i:]) {
+ //if not, it means that Self has to be greater than Other
+ return 1
+ }
+ break
+ }
+ lhs := segmentsSelf[i]
+ rhs := segmentsOther[i]
+ if lhs == rhs {
+ continue
+ } else if lhs < rhs {
+ return -1
+ }
+ // Otherwis, rhs was > lhs, they're not equal
+ return 1
+ }
+
+ // if we got this far, they're equal
+ return 0
+}
+
+func allZero(segs []int64) bool {
+ for _, s := range segs {
+ if s != 0 {
+ return false
+ }
+ }
+ return true
+}
+
+func comparePart(preSelf string, preOther string) int {
+ if preSelf == preOther {
+ return 0
+ }
+
+ var selfInt int64
+ selfNumeric := true
+ selfInt, err := strconv.ParseInt(preSelf, 10, 64)
+ if err != nil {
+ selfNumeric = false
+ }
+
+ var otherInt int64
+ otherNumeric := true
+ otherInt, err = strconv.ParseInt(preOther, 10, 64)
+ if err != nil {
+ otherNumeric = false
+ }
+
+ // if a part is empty, we use the other to decide
+ if preSelf == "" {
+ if otherNumeric {
+ return -1
+ }
+ return 1
+ }
+
+ if preOther == "" {
+ if selfNumeric {
+ return 1
+ }
+ return -1
+ }
+
+ if selfNumeric && !otherNumeric {
+ return -1
+ } else if !selfNumeric && otherNumeric {
+ return 1
+ } else if !selfNumeric && !otherNumeric && preSelf > preOther {
+ return 1
+ } else if selfInt > otherInt {
+ return 1
+ }
+
+ return -1
+}
+
+func comparePrereleases(v string, other string) int {
+ // the same pre release!
+ if v == other {
+ return 0
+ }
+
+ // split both pre releases for analyse their parts
+ selfPreReleaseMeta := strings.Split(v, ".")
+ otherPreReleaseMeta := strings.Split(other, ".")
+
+ selfPreReleaseLen := len(selfPreReleaseMeta)
+ otherPreReleaseLen := len(otherPreReleaseMeta)
+
+ biggestLen := otherPreReleaseLen
+ if selfPreReleaseLen > otherPreReleaseLen {
+ biggestLen = selfPreReleaseLen
+ }
+
+ // loop for parts to find the first difference
+ for i := 0; i < biggestLen; i = i + 1 {
+ partSelfPre := ""
+ if i < selfPreReleaseLen {
+ partSelfPre = selfPreReleaseMeta[i]
+ }
+
+ partOtherPre := ""
+ if i < otherPreReleaseLen {
+ partOtherPre = otherPreReleaseMeta[i]
+ }
+
+ compare := comparePart(partSelfPre, partOtherPre)
+ // if parts are equals, continue the loop
+ if compare != 0 {
+ return compare
+ }
+ }
+
+ return 0
+}
+
+// Equal tests if two versions are equal.
+func (v *Version) Equal(o *Version) bool {
+ if v == nil || o == nil {
+ return v == o
+ }
+
+ return v.Compare(o) == 0
+}
+
+// GreaterThan tests if this version is greater than another version.
+func (v *Version) GreaterThan(o *Version) bool {
+ return v.Compare(o) > 0
+}
+
+// GreaterThanOrEqual tests if this version is greater than or equal to another version.
+func (v *Version) GreaterThanOrEqual(o *Version) bool {
+ return v.Compare(o) >= 0
+}
+
+// LessThan tests if this version is less than another version.
+func (v *Version) LessThan(o *Version) bool {
+ return v.Compare(o) < 0
+}
+
+// LessThanOrEqual tests if this version is less than or equal to another version.
+func (v *Version) LessThanOrEqual(o *Version) bool {
+ return v.Compare(o) <= 0
+}
+
+// Metadata returns any metadata that was part of the version
+// string.
+//
+// Metadata is anything that comes after the "+" in the version.
+// For example, with "1.2.3+beta", the metadata is "beta".
+func (v *Version) Metadata() string {
+ return v.metadata
+}
+
+// Prerelease returns any prerelease data that is part of the version,
+// or blank if there is no prerelease data.
+//
+// Prerelease information is anything that comes after the "-" in the
+// version (but before any metadata). For example, with "1.2.3-beta",
+// the prerelease information is "beta".
+func (v *Version) Prerelease() string {
+ return v.pre
+}
+
+// Segments returns the numeric segments of the version as a slice of ints.
+//
+// This excludes any metadata or pre-release information. For example,
+// for a version "1.2.3-beta", segments will return a slice of
+// 1, 2, 3.
+func (v *Version) Segments() []int {
+ segmentSlice := make([]int, len(v.segments))
+ for i, v := range v.segments {
+ segmentSlice[i] = int(v)
+ }
+ return segmentSlice
+}
+
+// Segments64 returns the numeric segments of the version as a slice of int64s.
+//
+// This excludes any metadata or pre-release information. For example,
+// for a version "1.2.3-beta", segments will return a slice of
+// 1, 2, 3.
+func (v *Version) Segments64() []int64 {
+ result := make([]int64, len(v.segments))
+ copy(result, v.segments)
+ return result
+}
+
+// String returns the full version string included pre-release
+// and metadata information.
+//
+// This value is rebuilt according to the parsed segments and other
+// information. Therefore, ambiguities in the version string such as
+// prefixed zeroes (1.04.0 => 1.4.0), `v` prefix (v1.0.0 => 1.0.0), and
+// missing parts (1.0 => 1.0.0) will be made into a canonicalized form
+// as shown in the parenthesized examples.
+func (v *Version) String() string {
+ var buf bytes.Buffer
+ fmtParts := make([]string, len(v.segments))
+ for i, s := range v.segments {
+ // We can ignore err here since we've pre-parsed the values in segments
+ str := strconv.FormatInt(s, 10)
+ fmtParts[i] = str
+ }
+ fmt.Fprintf(&buf, strings.Join(fmtParts, "."))
+ if v.pre != "" {
+ fmt.Fprintf(&buf, "-%s", v.pre)
+ }
+ if v.metadata != "" {
+ fmt.Fprintf(&buf, "+%s", v.metadata)
+ }
+
+ return buf.String()
+}
+
+// Original returns the original parsed version as-is, including any
+// potential whitespace, `v` prefix, etc.
+func (v *Version) Original() string {
+ return v.original
+}
diff --git a/vendor/github.com/hashicorp/go-version/version_collection.go b/vendor/github.com/hashicorp/go-version/version_collection.go
new file mode 100644
index 000000000..cc888d43e
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-version/version_collection.go
@@ -0,0 +1,17 @@
+package version
+
+// Collection is a type that implements the sort.Interface interface
+// so that versions can be sorted.
+type Collection []*Version
+
+func (v Collection) Len() int {
+ return len(v)
+}
+
+func (v Collection) Less(i, j int) bool {
+ return v[i].LessThan(v[j])
+}
+
+func (v Collection) Swap(i, j int) {
+ v[i], v[j] = v[j], v[i]
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/.gitignore b/vendor/github.com/russross/blackfriday/v2/.gitignore
new file mode 100644
index 000000000..75623dccc
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/.gitignore
@@ -0,0 +1,8 @@
+*.out
+*.swp
+*.8
+*.6
+_obj
+_test*
+markdown
+tags
diff --git a/vendor/github.com/russross/blackfriday/v2/.travis.yml b/vendor/github.com/russross/blackfriday/v2/.travis.yml
new file mode 100644
index 000000000..b0b525a5a
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/.travis.yml
@@ -0,0 +1,17 @@
+sudo: false
+language: go
+go:
+ - "1.10.x"
+ - "1.11.x"
+ - tip
+matrix:
+ fast_finish: true
+ allow_failures:
+ - go: tip
+install:
+ - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
+script:
+ - go get -t -v ./...
+ - diff -u <(echo -n) <(gofmt -d -s .)
+ - go tool vet .
+ - go test -v ./...
diff --git a/vendor/github.com/russross/blackfriday/v2/LICENSE.txt b/vendor/github.com/russross/blackfriday/v2/LICENSE.txt
new file mode 100644
index 000000000..2885af360
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/LICENSE.txt
@@ -0,0 +1,29 @@
+Blackfriday is distributed under the Simplified BSD License:
+
+> Copyright © 2011 Russ Ross
+> All rights reserved.
+>
+> Redistribution and use in source and binary forms, with or without
+> modification, are permitted provided that the following conditions
+> are met:
+>
+> 1. Redistributions of source code must retain the above copyright
+> notice, this list of conditions and the following disclaimer.
+>
+> 2. Redistributions in binary form must reproduce the above
+> copyright notice, this list of conditions and the following
+> disclaimer in the documentation and/or other materials provided with
+> the distribution.
+>
+> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+> POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/russross/blackfriday/v2/README.md b/vendor/github.com/russross/blackfriday/v2/README.md
new file mode 100644
index 000000000..d9c08a22f
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/README.md
@@ -0,0 +1,335 @@
+Blackfriday
+[![Build Status][BuildV2SVG]][BuildV2URL]
+[![PkgGoDev][PkgGoDevV2SVG]][PkgGoDevV2URL]
+===========
+
+Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It
+is paranoid about its input (so you can safely feed it user-supplied
+data), it is fast, it supports common extensions (tables, smart
+punctuation substitutions, etc.), and it is safe for all utf-8
+(unicode) input.
+
+HTML output is currently supported, along with Smartypants
+extensions.
+
+It started as a translation from C of [Sundown][3].
+
+
+Installation
+------------
+
+Blackfriday is compatible with modern Go releases in module mode.
+With Go installed:
+
+ go get github.com/russross/blackfriday/v2
+
+will resolve and add the package to the current development module,
+then build and install it. Alternatively, you can achieve the same
+if you import it in a package:
+
+ import "github.com/russross/blackfriday/v2"
+
+and `go get` without parameters.
+
+Legacy GOPATH mode is unsupported.
+
+
+Versions
+--------
+
+Currently maintained and recommended version of Blackfriday is `v2`. It's being
+developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the
+documentation is available at
+https://pkg.go.dev/github.com/russross/blackfriday/v2.
+
+It is `go get`-able in module mode at `github.com/russross/blackfriday/v2`.
+
+Version 2 offers a number of improvements over v1:
+
+* Cleaned up API
+* A separate call to [`Parse`][4], which produces an abstract syntax tree for
+ the document
+* Latest bug fixes
+* Flexibility to easily add your own rendering extensions
+
+Potential drawbacks:
+
+* Our benchmarks show v2 to be slightly slower than v1. Currently in the
+ ballpark of around 15%.
+* API breakage. If you can't afford modifying your code to adhere to the new API
+ and don't care too much about the new features, v2 is probably not for you.
+* Several bug fixes are trailing behind and still need to be forward-ported to
+ v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for
+ tracking.
+
+If you are still interested in the legacy `v1`, you can import it from
+`github.com/russross/blackfriday`. Documentation for the legacy v1 can be found
+here: https://pkg.go.dev/github.com/russross/blackfriday.
+
+
+Usage
+-----
+
+For the most sensible markdown processing, it is as simple as getting your input
+into a byte slice and calling:
+
+```go
+output := blackfriday.Run(input)
+```
+
+Your input will be parsed and the output rendered with a set of most popular
+extensions enabled. If you want the most basic feature set, corresponding with
+the bare Markdown specification, use:
+
+```go
+output := blackfriday.Run(input, blackfriday.WithNoExtensions())
+```
+
+### Sanitize untrusted content
+
+Blackfriday itself does nothing to protect against malicious content. If you are
+dealing with user-supplied markdown, we recommend running Blackfriday's output
+through HTML sanitizer such as [Bluemonday][5].
+
+Here's an example of simple usage of Blackfriday together with Bluemonday:
+
+```go
+import (
+ "github.com/microcosm-cc/bluemonday"
+ "github.com/russross/blackfriday/v2"
+)
+
+// ...
+unsafe := blackfriday.Run(input)
+html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
+```
+
+### Custom options
+
+If you want to customize the set of options, use `blackfriday.WithExtensions`,
+`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`.
+
+### `blackfriday-tool`
+
+You can also check out `blackfriday-tool` for a more complete example
+of how to use it. Download and install it using:
+
+ go get github.com/russross/blackfriday-tool
+
+This is a simple command-line tool that allows you to process a
+markdown file using a standalone program. You can also browse the
+source directly on github if you are just looking for some example
+code:
+
+* <https://github.com/russross/blackfriday-tool>
+
+Note that if you have not already done so, installing
+`blackfriday-tool` will be sufficient to download and install
+blackfriday in addition to the tool itself. The tool binary will be
+installed in `$GOPATH/bin`. This is a statically-linked binary that
+can be copied to wherever you need it without worrying about
+dependencies and library versions.
+
+### Sanitized anchor names
+
+Blackfriday includes an algorithm for creating sanitized anchor names
+corresponding to a given input text. This algorithm is used to create
+anchors for headings when `AutoHeadingIDs` extension is enabled. The
+algorithm has a specification, so that other packages can create
+compatible anchor names and links to those anchors.
+
+The specification is located at https://pkg.go.dev/github.com/russross/blackfriday/v2#hdr-Sanitized_Anchor_Names.
+
+[`SanitizedAnchorName`](https://pkg.go.dev/github.com/russross/blackfriday/v2#SanitizedAnchorName) exposes this functionality, and can be used to
+create compatible links to the anchor names generated by blackfriday.
+This algorithm is also implemented in a small standalone package at
+[`github.com/shurcooL/sanitized_anchor_name`](https://pkg.go.dev/github.com/shurcooL/sanitized_anchor_name). It can be useful for clients
+that want a small package and don't need full functionality of blackfriday.
+
+
+Features
+--------
+
+All features of Sundown are supported, including:
+
+* **Compatibility**. The Markdown v1.0.3 test suite passes with
+ the `--tidy` option. Without `--tidy`, the differences are
+ mostly in whitespace and entity escaping, where blackfriday is
+ more consistent and cleaner.
+
+* **Common extensions**, including table support, fenced code
+ blocks, autolinks, strikethroughs, non-strict emphasis, etc.
+
+* **Safety**. Blackfriday is paranoid when parsing, making it safe
+ to feed untrusted user input without fear of bad things
+ happening. The test suite stress tests this and there are no
+ known inputs that make it crash. If you find one, please let me
+ know and send me the input that does it.
+
+ NOTE: "safety" in this context means *runtime safety only*. In order to
+ protect yourself against JavaScript injection in untrusted content, see
+ [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content).
+
+* **Fast processing**. It is fast enough to render on-demand in
+ most web applications without having to cache the output.
+
+* **Thread safety**. You can run multiple parsers in different
+ goroutines without ill effect. There is no dependence on global
+ shared state.
+
+* **Minimal dependencies**. Blackfriday only depends on standard
+ library packages in Go. The source code is pretty
+ self-contained, so it is easy to add to any project, including
+ Google App Engine projects.
+
+* **Standards compliant**. Output successfully validates using the
+ W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional.
+
+
+Extensions
+----------
+
+In addition to the standard markdown syntax, this package
+implements the following extensions:
+
+* **Intra-word emphasis supression**. The `_` character is
+ commonly used inside words when discussing code, so having
+ markdown interpret it as an emphasis command is usually the
+ wrong thing. Blackfriday lets you treat all emphasis markers as
+ normal characters when they occur inside a word.
+
+* **Tables**. Tables can be created by drawing them in the input
+ using a simple syntax:
+
+ ```
+ Name | Age
+ --------|------
+ Bob | 27
+ Alice | 23
+ ```
+
+* **Fenced code blocks**. In addition to the normal 4-space
+ indentation to mark code blocks, you can explicitly mark them
+ and supply a language (to make syntax highlighting simple). Just
+ mark it like this:
+
+ ```go
+ func getTrue() bool {
+ return true
+ }
+ ```
+
+ You can use 3 or more backticks to mark the beginning of the
+ block, and the same number to mark the end of the block.
+
+ To preserve classes of fenced code blocks while using the bluemonday
+ HTML sanitizer, use the following policy:
+
+ ```go
+ p := bluemonday.UGCPolicy()
+ p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code")
+ html := p.SanitizeBytes(unsafe)
+ ```
+
+* **Definition lists**. A simple definition list is made of a single-line
+ term followed by a colon and the definition for that term.
+
+ Cat
+ : Fluffy animal everyone likes
+
+ Internet
+ : Vector of transmission for pictures of cats
+
+ Terms must be separated from the previous definition by a blank line.
+
+* **Footnotes**. A marker in the text that will become a superscript number;
+ a footnote definition that will be placed in a list of footnotes at the
+ end of the document. A footnote looks like this:
+
+ This is a footnote.[^1]
+
+ [^1]: the footnote text.
+
+* **Autolinking**. Blackfriday can find URLs that have not been
+ explicitly marked as links and turn them into links.
+
+* **Strikethrough**. Use two tildes (`~~`) to mark text that
+ should be crossed out.
+
+* **Hard line breaks**. With this extension enabled newlines in the input
+ translate into line breaks in the output. This extension is off by default.
+
+* **Smart quotes**. Smartypants-style punctuation substitution is
+ supported, turning normal double- and single-quote marks into
+ curly quotes, etc.
+
+* **LaTeX-style dash parsing** is an additional option, where `--`
+ is translated into `&ndash;`, and `---` is translated into
+ `&mdash;`. This differs from most smartypants processors, which
+ turn a single hyphen into an ndash and a double hyphen into an
+ mdash.
+
+* **Smart fractions**, where anything that looks like a fraction
+ is translated into suitable HTML (instead of just a few special
+ cases like most smartypant processors). For example, `4/5`
+ becomes `<sup>4</sup>&frasl;<sub>5</sub>`, which renders as
+ <sup>4</sup>&frasl;<sub>5</sub>.
+
+
+Other renderers
+---------------
+
+Blackfriday is structured to allow alternative rendering engines. Here
+are a few of note:
+
+* [github_flavored_markdown](https://pkg.go.dev/github.com/shurcooL/github_flavored_markdown):
+ provides a GitHub Flavored Markdown renderer with fenced code block
+ highlighting, clickable heading anchor links.
+
+ It's not customizable, and its goal is to produce HTML output
+ equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode),
+ except the rendering is performed locally.
+
+* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt,
+ but for markdown.
+
+* [LaTeX output](https://gitlab.com/ambrevar/blackfriday-latex):
+ renders output as LaTeX.
+
+* [bfchroma](https://github.com/Depado/bfchroma/): provides convenience
+ integration with the [Chroma](https://github.com/alecthomas/chroma) code
+ highlighting library. bfchroma is only compatible with v2 of Blackfriday and
+ provides a drop-in renderer ready to use with Blackfriday, as well as
+ options and means for further customization.
+
+* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer.
+
+* [Blackfriday-Slack](https://github.com/karriereat/blackfriday-slack): converts markdown to slack message style
+
+
+TODO
+----
+
+* More unit testing
+* Improve Unicode support. It does not understand all Unicode
+ rules (about what constitutes a letter, a punctuation symbol,
+ etc.), so it may fail to detect word boundaries correctly in
+ some instances. It is safe on all UTF-8 input.
+
+
+License
+-------
+
+[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt)
+
+
+ [1]: https://daringfireball.net/projects/markdown/ "Markdown"
+ [2]: https://golang.org/ "Go Language"
+ [3]: https://github.com/vmg/sundown "Sundown"
+ [4]: https://pkg.go.dev/github.com/russross/blackfriday/v2#Parse "Parse func"
+ [5]: https://github.com/microcosm-cc/bluemonday "Bluemonday"
+
+ [BuildV2SVG]: https://travis-ci.org/russross/blackfriday.svg?branch=v2
+ [BuildV2URL]: https://travis-ci.org/russross/blackfriday
+ [PkgGoDevV2SVG]: https://pkg.go.dev/badge/github.com/russross/blackfriday/v2
+ [PkgGoDevV2URL]: https://pkg.go.dev/github.com/russross/blackfriday/v2
diff --git a/vendor/github.com/russross/blackfriday/v2/block.go b/vendor/github.com/russross/blackfriday/v2/block.go
new file mode 100644
index 000000000..dcd61e6e3
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/block.go
@@ -0,0 +1,1612 @@
+//
+// Blackfriday Markdown Processor
+// Available at http://github.com/russross/blackfriday
+//
+// Copyright © 2011 Russ Ross <russ@russross.com>.
+// Distributed under the Simplified BSD License.
+// See README.md for details.
+//
+
+//
+// Functions to parse block-level elements.
+//
+
+package blackfriday
+
+import (
+ "bytes"
+ "html"
+ "regexp"
+ "strings"
+ "unicode"
+)
+
+const (
+ charEntity = "&(?:#x[a-f0-9]{1,8}|#[0-9]{1,8}|[a-z][a-z0-9]{1,31});"
+ escapable = "[!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]"
+)
+
+var (
+ reBackslashOrAmp = regexp.MustCompile("[\\&]")
+ reEntityOrEscapedChar = regexp.MustCompile("(?i)\\\\" + escapable + "|" + charEntity)
+)
+
+// Parse block-level data.
+// Note: this function and many that it calls assume that
+// the input buffer ends with a newline.
+func (p *Markdown) block(data []byte) {
+ // this is called recursively: enforce a maximum depth
+ if p.nesting >= p.maxNesting {
+ return
+ }
+ p.nesting++
+
+ // parse out one block-level construct at a time
+ for len(data) > 0 {
+ // prefixed heading:
+ //
+ // # Heading 1
+ // ## Heading 2
+ // ...
+ // ###### Heading 6
+ if p.isPrefixHeading(data) {
+ data = data[p.prefixHeading(data):]
+ continue
+ }
+
+ // block of preformatted HTML:
+ //
+ // <div>
+ // ...
+ // </div>
+ if data[0] == '<' {
+ if i := p.html(data, true); i > 0 {
+ data = data[i:]
+ continue
+ }
+ }
+
+ // title block
+ //
+ // % stuff
+ // % more stuff
+ // % even more stuff
+ if p.extensions&Titleblock != 0 {
+ if data[0] == '%' {
+ if i := p.titleBlock(data, true); i > 0 {
+ data = data[i:]
+ continue
+ }
+ }
+ }
+
+ // blank lines. note: returns the # of bytes to skip
+ if i := p.isEmpty(data); i > 0 {
+ data = data[i:]
+ continue
+ }
+
+ // indented code block:
+ //
+ // func max(a, b int) int {
+ // if a > b {
+ // return a
+ // }
+ // return b
+ // }
+ if p.codePrefix(data) > 0 {
+ data = data[p.code(data):]
+ continue
+ }
+
+ // fenced code block:
+ //
+ // ``` go
+ // func fact(n int) int {
+ // if n <= 1 {
+ // return n
+ // }
+ // return n * fact(n-1)
+ // }
+ // ```
+ if p.extensions&FencedCode != 0 {
+ if i := p.fencedCodeBlock(data, true); i > 0 {
+ data = data[i:]
+ continue
+ }
+ }
+
+ // horizontal rule:
+ //
+ // ------
+ // or
+ // ******
+ // or
+ // ______
+ if p.isHRule(data) {
+ p.addBlock(HorizontalRule, nil)
+ var i int
+ for i = 0; i < len(data) && data[i] != '\n'; i++ {
+ }
+ data = data[i:]
+ continue
+ }
+
+ // block quote:
+ //
+ // > A big quote I found somewhere
+ // > on the web
+ if p.quotePrefix(data) > 0 {
+ data = data[p.quote(data):]
+ continue
+ }
+
+ // table:
+ //
+ // Name | Age | Phone
+ // ------|-----|---------
+ // Bob | 31 | 555-1234
+ // Alice | 27 | 555-4321
+ if p.extensions&Tables != 0 {
+ if i := p.table(data); i > 0 {
+ data = data[i:]
+ continue
+ }
+ }
+
+ // an itemized/unordered list:
+ //
+ // * Item 1
+ // * Item 2
+ //
+ // also works with + or -
+ if p.uliPrefix(data) > 0 {
+ data = data[p.list(data, 0):]
+ continue
+ }
+
+ // a numbered/ordered list:
+ //
+ // 1. Item 1
+ // 2. Item 2
+ if p.oliPrefix(data) > 0 {
+ data = data[p.list(data, ListTypeOrdered):]
+ continue
+ }
+
+ // definition lists:
+ //
+ // Term 1
+ // : Definition a
+ // : Definition b
+ //
+ // Term 2
+ // : Definition c
+ if p.extensions&DefinitionLists != 0 {
+ if p.dliPrefix(data) > 0 {
+ data = data[p.list(data, ListTypeDefinition):]
+ continue
+ }
+ }
+
+ // anything else must look like a normal paragraph
+ // note: this finds underlined headings, too
+ data = data[p.paragraph(data):]
+ }
+
+ p.nesting--
+}
+
+func (p *Markdown) addBlock(typ NodeType, content []byte) *Node {
+ p.closeUnmatchedBlocks()
+ container := p.addChild(typ, 0)
+ container.content = content
+ return container
+}
+
+func (p *Markdown) isPrefixHeading(data []byte) bool {
+ if data[0] != '#' {
+ return false
+ }
+
+ if p.extensions&SpaceHeadings != 0 {
+ level := 0
+ for level < 6 && level < len(data) && data[level] == '#' {
+ level++
+ }
+ if level == len(data) || data[level] != ' ' {
+ return false
+ }
+ }
+ return true
+}
+
+func (p *Markdown) prefixHeading(data []byte) int {
+ level := 0
+ for level < 6 && level < len(data) && data[level] == '#' {
+ level++
+ }
+ i := skipChar(data, level, ' ')
+ end := skipUntilChar(data, i, '\n')
+ skip := end
+ id := ""
+ if p.extensions&HeadingIDs != 0 {
+ j, k := 0, 0
+ // find start/end of heading id
+ for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ {
+ }
+ for k = j + 1; k < end && data[k] != '}'; k++ {
+ }
+ // extract heading id iff found
+ if j < end && k < end {
+ id = string(data[j+2 : k])
+ end = j
+ skip = k + 1
+ for end > 0 && data[end-1] == ' ' {
+ end--
+ }
+ }
+ }
+ for end > 0 && data[end-1] == '#' {
+ if isBackslashEscaped(data, end-1) {
+ break
+ }
+ end--
+ }
+ for end > 0 && data[end-1] == ' ' {
+ end--
+ }
+ if end > i {
+ if id == "" && p.extensions&AutoHeadingIDs != 0 {
+ id = SanitizedAnchorName(string(data[i:end]))
+ }
+ block := p.addBlock(Heading, data[i:end])
+ block.HeadingID = id
+ block.Level = level
+ }
+ return skip
+}
+
+func (p *Markdown) isUnderlinedHeading(data []byte) int {
+ // test of level 1 heading
+ if data[0] == '=' {
+ i := skipChar(data, 1, '=')
+ i = skipChar(data, i, ' ')
+ if i < len(data) && data[i] == '\n' {
+ return 1
+ }
+ return 0
+ }
+
+ // test of level 2 heading
+ if data[0] == '-' {
+ i := skipChar(data, 1, '-')
+ i = skipChar(data, i, ' ')
+ if i < len(data) && data[i] == '\n' {
+ return 2
+ }
+ return 0
+ }
+
+ return 0
+}
+
+func (p *Markdown) titleBlock(data []byte, doRender bool) int {
+ if data[0] != '%' {
+ return 0
+ }
+ splitData := bytes.Split(data, []byte("\n"))
+ var i int
+ for idx, b := range splitData {
+ if !bytes.HasPrefix(b, []byte("%")) {
+ i = idx // - 1
+ break
+ }
+ }
+
+ data = bytes.Join(splitData[0:i], []byte("\n"))
+ consumed := len(data)
+ data = bytes.TrimPrefix(data, []byte("% "))
+ data = bytes.Replace(data, []byte("\n% "), []byte("\n"), -1)
+ block := p.addBlock(Heading, data)
+ block.Level = 1
+ block.IsTitleblock = true
+
+ return consumed
+}
+
+func (p *Markdown) html(data []byte, doRender bool) int {
+ var i, j int
+
+ // identify the opening tag
+ if data[0] != '<' {
+ return 0
+ }
+ curtag, tagfound := p.htmlFindTag(data[1:])
+
+ // handle special cases
+ if !tagfound {
+ // check for an HTML comment
+ if size := p.htmlComment(data, doRender); size > 0 {
+ return size
+ }
+
+ // check for an <hr> tag
+ if size := p.htmlHr(data, doRender); size > 0 {
+ return size
+ }
+
+ // no special case recognized
+ return 0
+ }
+
+ // look for an unindented matching closing tag
+ // followed by a blank line
+ found := false
+ /*
+ closetag := []byte("\n</" + curtag + ">")
+ j = len(curtag) + 1
+ for !found {
+ // scan for a closing tag at the beginning of a line
+ if skip := bytes.Index(data[j:], closetag); skip >= 0 {
+ j += skip + len(closetag)
+ } else {
+ break
+ }
+
+ // see if it is the only thing on the line
+ if skip := p.isEmpty(data[j:]); skip > 0 {
+ // see if it is followed by a blank line/eof
+ j += skip
+ if j >= len(data) {
+ found = true
+ i = j
+ } else {
+ if skip := p.isEmpty(data[j:]); skip > 0 {
+ j += skip
+ found = true
+ i = j
+ }
+ }
+ }
+ }
+ */
+
+ // if not found, try a second pass looking for indented match
+ // but not if tag is "ins" or "del" (following original Markdown.pl)
+ if !found && curtag != "ins" && curtag != "del" {
+ i = 1
+ for i < len(data) {
+ i++
+ for i < len(data) && !(data[i-1] == '<' && data[i] == '/') {
+ i++
+ }
+
+ if i+2+len(curtag) >= len(data) {
+ break
+ }
+
+ j = p.htmlFindEnd(curtag, data[i-1:])
+
+ if j > 0 {
+ i += j - 1
+ found = true
+ break
+ }
+ }
+ }
+
+ if !found {
+ return 0
+ }
+
+ // the end of the block has been found
+ if doRender {
+ // trim newlines
+ end := i
+ for end > 0 && data[end-1] == '\n' {
+ end--
+ }
+ finalizeHTMLBlock(p.addBlock(HTMLBlock, data[:end]))
+ }
+
+ return i
+}
+
+func finalizeHTMLBlock(block *Node) {
+ block.Literal = block.content
+ block.content = nil
+}
+
+// HTML comment, lax form
+func (p *Markdown) htmlComment(data []byte, doRender bool) int {
+ i := p.inlineHTMLComment(data)
+ // needs to end with a blank line
+ if j := p.isEmpty(data[i:]); j > 0 {
+ size := i + j
+ if doRender {
+ // trim trailing newlines
+ end := size
+ for end > 0 && data[end-1] == '\n' {
+ end--
+ }
+ block := p.addBlock(HTMLBlock, data[:end])
+ finalizeHTMLBlock(block)
+ }
+ return size
+ }
+ return 0
+}
+
+// HR, which is the only self-closing block tag considered
+func (p *Markdown) htmlHr(data []byte, doRender bool) int {
+ if len(data) < 4 {
+ return 0
+ }
+ if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') {
+ return 0
+ }
+ if data[3] != ' ' && data[3] != '/' && data[3] != '>' {
+ // not an <hr> tag after all; at least not a valid one
+ return 0
+ }
+ i := 3
+ for i < len(data) && data[i] != '>' && data[i] != '\n' {
+ i++
+ }
+ if i < len(data) && data[i] == '>' {
+ i++
+ if j := p.isEmpty(data[i:]); j > 0 {
+ size := i + j
+ if doRender {
+ // trim newlines
+ end := size
+ for end > 0 && data[end-1] == '\n' {
+ end--
+ }
+ finalizeHTMLBlock(p.addBlock(HTMLBlock, data[:end]))
+ }
+ return size
+ }
+ }
+ return 0
+}
+
+func (p *Markdown) htmlFindTag(data []byte) (string, bool) {
+ i := 0
+ for i < len(data) && isalnum(data[i]) {
+ i++
+ }
+ key := string(data[:i])
+ if _, ok := blockTags[key]; ok {
+ return key, true
+ }
+ return "", false
+}
+
+func (p *Markdown) htmlFindEnd(tag string, data []byte) int {
+ // assume data[0] == '<' && data[1] == '/' already tested
+ if tag == "hr" {
+ return 2
+ }
+ // check if tag is a match
+ closetag := []byte("</" + tag + ">")
+ if !bytes.HasPrefix(data, closetag) {
+ return 0
+ }
+ i := len(closetag)
+
+ // check that the rest of the line is blank
+ skip := 0
+ if skip = p.isEmpty(data[i:]); skip == 0 {
+ return 0
+ }
+ i += skip
+ skip = 0
+
+ if i >= len(data) {
+ return i
+ }
+
+ if p.extensions&LaxHTMLBlocks != 0 {
+ return i
+ }
+ if skip = p.isEmpty(data[i:]); skip == 0 {
+ // following line must be blank
+ return 0
+ }
+
+ return i + skip
+}
+
+func (*Markdown) isEmpty(data []byte) int {
+ // it is okay to call isEmpty on an empty buffer
+ if len(data) == 0 {
+ return 0
+ }
+
+ var i int
+ for i = 0; i < len(data) && data[i] != '\n'; i++ {
+ if data[i] != ' ' && data[i] != '\t' {
+ return 0
+ }
+ }
+ if i < len(data) && data[i] == '\n' {
+ i++
+ }
+ return i
+}
+
+func (*Markdown) isHRule(data []byte) bool {
+ i := 0
+
+ // skip up to three spaces
+ for i < 3 && data[i] == ' ' {
+ i++
+ }
+
+ // look at the hrule char
+ if data[i] != '*' && data[i] != '-' && data[i] != '_' {
+ return false
+ }
+ c := data[i]
+
+ // the whole line must be the char or whitespace
+ n := 0
+ for i < len(data) && data[i] != '\n' {
+ switch {
+ case data[i] == c:
+ n++
+ case data[i] != ' ':
+ return false
+ }
+ i++
+ }
+
+ return n >= 3
+}
+
+// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data,
+// and returns the end index if so, or 0 otherwise. It also returns the marker found.
+// If info is not nil, it gets set to the syntax specified in the fence line.
+func isFenceLine(data []byte, info *string, oldmarker string) (end int, marker string) {
+ i, size := 0, 0
+
+ // skip up to three spaces
+ for i < len(data) && i < 3 && data[i] == ' ' {
+ i++
+ }
+
+ // check for the marker characters: ~ or `
+ if i >= len(data) {
+ return 0, ""
+ }
+ if data[i] != '~' && data[i] != '`' {
+ return 0, ""
+ }
+
+ c := data[i]
+
+ // the whole line must be the same char or whitespace
+ for i < len(data) && data[i] == c {
+ size++
+ i++
+ }
+
+ // the marker char must occur at least 3 times
+ if size < 3 {
+ return 0, ""
+ }
+ marker = string(data[i-size : i])
+
+ // if this is the end marker, it must match the beginning marker
+ if oldmarker != "" && marker != oldmarker {
+ return 0, ""
+ }
+
+ // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here
+ // into one, always get the info string, and discard it if the caller doesn't care.
+ if info != nil {
+ infoLength := 0
+ i = skipChar(data, i, ' ')
+
+ if i >= len(data) {
+ if i == len(data) {
+ return i, marker
+ }
+ return 0, ""
+ }
+
+ infoStart := i
+
+ if data[i] == '{' {
+ i++
+ infoStart++
+
+ for i < len(data) && data[i] != '}' && data[i] != '\n' {
+ infoLength++
+ i++
+ }
+
+ if i >= len(data) || data[i] != '}' {
+ return 0, ""
+ }
+
+ // strip all whitespace at the beginning and the end
+ // of the {} block
+ for infoLength > 0 && isspace(data[infoStart]) {
+ infoStart++
+ infoLength--
+ }
+
+ for infoLength > 0 && isspace(data[infoStart+infoLength-1]) {
+ infoLength--
+ }
+ i++
+ i = skipChar(data, i, ' ')
+ } else {
+ for i < len(data) && !isverticalspace(data[i]) {
+ infoLength++
+ i++
+ }
+ }
+
+ *info = strings.TrimSpace(string(data[infoStart : infoStart+infoLength]))
+ }
+
+ if i == len(data) {
+ return i, marker
+ }
+ if i > len(data) || data[i] != '\n' {
+ return 0, ""
+ }
+ return i + 1, marker // Take newline into account.
+}
+
+// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning,
+// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects.
+// If doRender is true, a final newline is mandatory to recognize the fenced code block.
+func (p *Markdown) fencedCodeBlock(data []byte, doRender bool) int {
+ var info string
+ beg, marker := isFenceLine(data, &info, "")
+ if beg == 0 || beg >= len(data) {
+ return 0
+ }
+ fenceLength := beg - 1
+
+ var work bytes.Buffer
+ work.Write([]byte(info))
+ work.WriteByte('\n')
+
+ for {
+ // safe to assume beg < len(data)
+
+ // check for the end of the code block
+ fenceEnd, _ := isFenceLine(data[beg:], nil, marker)
+ if fenceEnd != 0 {
+ beg += fenceEnd
+ break
+ }
+
+ // copy the current line
+ end := skipUntilChar(data, beg, '\n') + 1
+
+ // did we reach the end of the buffer without a closing marker?
+ if end >= len(data) {
+ return 0
+ }
+
+ // verbatim copy to the working buffer
+ if doRender {
+ work.Write(data[beg:end])
+ }
+ beg = end
+ }
+
+ if doRender {
+ block := p.addBlock(CodeBlock, work.Bytes()) // TODO: get rid of temp buffer
+ block.IsFenced = true
+ block.FenceLength = fenceLength
+ finalizeCodeBlock(block)
+ }
+
+ return beg
+}
+
+func unescapeChar(str []byte) []byte {
+ if str[0] == '\\' {
+ return []byte{str[1]}
+ }
+ return []byte(html.UnescapeString(string(str)))
+}
+
+func unescapeString(str []byte) []byte {
+ if reBackslashOrAmp.Match(str) {
+ return reEntityOrEscapedChar.ReplaceAllFunc(str, unescapeChar)
+ }
+ return str
+}
+
+func finalizeCodeBlock(block *Node) {
+ if block.IsFenced {
+ newlinePos := bytes.IndexByte(block.content, '\n')
+ firstLine := block.content[:newlinePos]
+ rest := block.content[newlinePos+1:]
+ block.Info = unescapeString(bytes.Trim(firstLine, "\n"))
+ block.Literal = rest
+ } else {
+ block.Literal = block.content
+ }
+ block.content = nil
+}
+
+func (p *Markdown) table(data []byte) int {
+ table := p.addBlock(Table, nil)
+ i, columns := p.tableHeader(data)
+ if i == 0 {
+ p.tip = table.Parent
+ table.Unlink()
+ return 0
+ }
+
+ p.addBlock(TableBody, nil)
+
+ for i < len(data) {
+ pipes, rowStart := 0, i
+ for ; i < len(data) && data[i] != '\n'; i++ {
+ if data[i] == '|' {
+ pipes++
+ }
+ }
+
+ if pipes == 0 {
+ i = rowStart
+ break
+ }
+
+ // include the newline in data sent to tableRow
+ if i < len(data) && data[i] == '\n' {
+ i++
+ }
+ p.tableRow(data[rowStart:i], columns, false)
+ }
+
+ return i
+}
+
+// check if the specified position is preceded by an odd number of backslashes
+func isBackslashEscaped(data []byte, i int) bool {
+ backslashes := 0
+ for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' {
+ backslashes++
+ }
+ return backslashes&1 == 1
+}
+
+func (p *Markdown) tableHeader(data []byte) (size int, columns []CellAlignFlags) {
+ i := 0
+ colCount := 1
+ for i = 0; i < len(data) && data[i] != '\n'; i++ {
+ if data[i] == '|' && !isBackslashEscaped(data, i) {
+ colCount++
+ }
+ }
+
+ // doesn't look like a table header
+ if colCount == 1 {
+ return
+ }
+
+ // include the newline in the data sent to tableRow
+ j := i
+ if j < len(data) && data[j] == '\n' {
+ j++
+ }
+ header := data[:j]
+
+ // column count ignores pipes at beginning or end of line
+ if data[0] == '|' {
+ colCount--
+ }
+ if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) {
+ colCount--
+ }
+
+ columns = make([]CellAlignFlags, colCount)
+
+ // move on to the header underline
+ i++
+ if i >= len(data) {
+ return
+ }
+
+ if data[i] == '|' && !isBackslashEscaped(data, i) {
+ i++
+ }
+ i = skipChar(data, i, ' ')
+
+ // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3
+ // and trailing | optional on last column
+ col := 0
+ for i < len(data) && data[i] != '\n' {
+ dashes := 0
+
+ if data[i] == ':' {
+ i++
+ columns[col] |= TableAlignmentLeft
+ dashes++
+ }
+ for i < len(data) && data[i] == '-' {
+ i++
+ dashes++
+ }
+ if i < len(data) && data[i] == ':' {
+ i++
+ columns[col] |= TableAlignmentRight
+ dashes++
+ }
+ for i < len(data) && data[i] == ' ' {
+ i++
+ }
+ if i == len(data) {
+ return
+ }
+ // end of column test is messy
+ switch {
+ case dashes < 3:
+ // not a valid column
+ return
+
+ case data[i] == '|' && !isBackslashEscaped(data, i):
+ // marker found, now skip past trailing whitespace
+ col++
+ i++
+ for i < len(data) && data[i] == ' ' {
+ i++
+ }
+
+ // trailing junk found after last column
+ if col >= colCount && i < len(data) && data[i] != '\n' {
+ return
+ }
+
+ case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount:
+ // something else found where marker was required
+ return
+
+ case data[i] == '\n':
+ // marker is optional for the last column
+ col++
+
+ default:
+ // trailing junk found after last column
+ return
+ }
+ }
+ if col != colCount {
+ return
+ }
+
+ p.addBlock(TableHead, nil)
+ p.tableRow(header, columns, true)
+ size = i
+ if size < len(data) && data[size] == '\n' {
+ size++
+ }
+ return
+}
+
+func (p *Markdown) tableRow(data []byte, columns []CellAlignFlags, header bool) {
+ p.addBlock(TableRow, nil)
+ i, col := 0, 0
+
+ if data[i] == '|' && !isBackslashEscaped(data, i) {
+ i++
+ }
+
+ for col = 0; col < len(columns) && i < len(data); col++ {
+ for i < len(data) && data[i] == ' ' {
+ i++
+ }
+
+ cellStart := i
+
+ for i < len(data) && (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' {
+ i++
+ }
+
+ cellEnd := i
+
+ // skip the end-of-cell marker, possibly taking us past end of buffer
+ i++
+
+ for cellEnd > cellStart && cellEnd-1 < len(data) && data[cellEnd-1] == ' ' {
+ cellEnd--
+ }
+
+ cell := p.addBlock(TableCell, data[cellStart:cellEnd])
+ cell.IsHeader = header
+ cell.Align = columns[col]
+ }
+
+ // pad it out with empty columns to get the right number
+ for ; col < len(columns); col++ {
+ cell := p.addBlock(TableCell, nil)
+ cell.IsHeader = header
+ cell.Align = columns[col]
+ }
+
+ // silently ignore rows with too many cells
+}
+
+// returns blockquote prefix length
+func (p *Markdown) quotePrefix(data []byte) int {
+ i := 0
+ for i < 3 && i < len(data) && data[i] == ' ' {
+ i++
+ }
+ if i < len(data) && data[i] == '>' {
+ if i+1 < len(data) && data[i+1] == ' ' {
+ return i + 2
+ }
+ return i + 1
+ }
+ return 0
+}
+
+// blockquote ends with at least one blank line
+// followed by something without a blockquote prefix
+func (p *Markdown) terminateBlockquote(data []byte, beg, end int) bool {
+ if p.isEmpty(data[beg:]) <= 0 {
+ return false
+ }
+ if end >= len(data) {
+ return true
+ }
+ return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0
+}
+
+// parse a blockquote fragment
+func (p *Markdown) quote(data []byte) int {
+ block := p.addBlock(BlockQuote, nil)
+ var raw bytes.Buffer
+ beg, end := 0, 0
+ for beg < len(data) {
+ end = beg
+ // Step over whole lines, collecting them. While doing that, check for
+ // fenced code and if one's found, incorporate it altogether,
+ // irregardless of any contents inside it
+ for end < len(data) && data[end] != '\n' {
+ if p.extensions&FencedCode != 0 {
+ if i := p.fencedCodeBlock(data[end:], false); i > 0 {
+ // -1 to compensate for the extra end++ after the loop:
+ end += i - 1
+ break
+ }
+ }
+ end++
+ }
+ if end < len(data) && data[end] == '\n' {
+ end++
+ }
+ if pre := p.quotePrefix(data[beg:]); pre > 0 {
+ // skip the prefix
+ beg += pre
+ } else if p.terminateBlockquote(data, beg, end) {
+ break
+ }
+ // this line is part of the blockquote
+ raw.Write(data[beg:end])
+ beg = end
+ }
+ p.block(raw.Bytes())
+ p.finalize(block)
+ return end
+}
+
+// returns prefix length for block code
+func (p *Markdown) codePrefix(data []byte) int {
+ if len(data) >= 1 && data[0] == '\t' {
+ return 1
+ }
+ if len(data) >= 4 && data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' {
+ return 4
+ }
+ return 0
+}
+
+func (p *Markdown) code(data []byte) int {
+ var work bytes.Buffer
+
+ i := 0
+ for i < len(data) {
+ beg := i
+ for i < len(data) && data[i] != '\n' {
+ i++
+ }
+ if i < len(data) && data[i] == '\n' {
+ i++
+ }
+
+ blankline := p.isEmpty(data[beg:i]) > 0
+ if pre := p.codePrefix(data[beg:i]); pre > 0 {
+ beg += pre
+ } else if !blankline {
+ // non-empty, non-prefixed line breaks the pre
+ i = beg
+ break
+ }
+
+ // verbatim copy to the working buffer
+ if blankline {
+ work.WriteByte('\n')
+ } else {
+ work.Write(data[beg:i])
+ }
+ }
+
+ // trim all the \n off the end of work
+ workbytes := work.Bytes()
+ eol := len(workbytes)
+ for eol > 0 && workbytes[eol-1] == '\n' {
+ eol--
+ }
+ if eol != len(workbytes) {
+ work.Truncate(eol)
+ }
+
+ work.WriteByte('\n')
+
+ block := p.addBlock(CodeBlock, work.Bytes()) // TODO: get rid of temp buffer
+ block.IsFenced = false
+ finalizeCodeBlock(block)
+
+ return i
+}
+
+// returns unordered list item prefix
+func (p *Markdown) uliPrefix(data []byte) int {
+ i := 0
+ // start with up to 3 spaces
+ for i < len(data) && i < 3 && data[i] == ' ' {
+ i++
+ }
+ if i >= len(data)-1 {
+ return 0
+ }
+ // need one of {'*', '+', '-'} followed by a space or a tab
+ if (data[i] != '*' && data[i] != '+' && data[i] != '-') ||
+ (data[i+1] != ' ' && data[i+1] != '\t') {
+ return 0
+ }
+ return i + 2
+}
+
+// returns ordered list item prefix
+func (p *Markdown) oliPrefix(data []byte) int {
+ i := 0
+
+ // start with up to 3 spaces
+ for i < 3 && i < len(data) && data[i] == ' ' {
+ i++
+ }
+
+ // count the digits
+ start := i
+ for i < len(data) && data[i] >= '0' && data[i] <= '9' {
+ i++
+ }
+ if start == i || i >= len(data)-1 {
+ return 0
+ }
+
+ // we need >= 1 digits followed by a dot and a space or a tab
+ if data[i] != '.' || !(data[i+1] == ' ' || data[i+1] == '\t') {
+ return 0
+ }
+ return i + 2
+}
+
+// returns definition list item prefix
+func (p *Markdown) dliPrefix(data []byte) int {
+ if len(data) < 2 {
+ return 0
+ }
+ i := 0
+ // need a ':' followed by a space or a tab
+ if data[i] != ':' || !(data[i+1] == ' ' || data[i+1] == '\t') {
+ return 0
+ }
+ for i < len(data) && data[i] == ' ' {
+ i++
+ }
+ return i + 2
+}
+
+// parse ordered or unordered list block
+func (p *Markdown) list(data []byte, flags ListType) int {
+ i := 0
+ flags |= ListItemBeginningOfList
+ block := p.addBlock(List, nil)
+ block.ListFlags = flags
+ block.Tight = true
+
+ for i < len(data) {
+ skip := p.listItem(data[i:], &flags)
+ if flags&ListItemContainsBlock != 0 {
+ block.ListData.Tight = false
+ }
+ i += skip
+ if skip == 0 || flags&ListItemEndOfList != 0 {
+ break
+ }
+ flags &= ^ListItemBeginningOfList
+ }
+
+ above := block.Parent
+ finalizeList(block)
+ p.tip = above
+ return i
+}
+
+// Returns true if the list item is not the same type as its parent list
+func (p *Markdown) listTypeChanged(data []byte, flags *ListType) bool {
+ if p.dliPrefix(data) > 0 && *flags&ListTypeDefinition == 0 {
+ return true
+ } else if p.oliPrefix(data) > 0 && *flags&ListTypeOrdered == 0 {
+ return true
+ } else if p.uliPrefix(data) > 0 && (*flags&ListTypeOrdered != 0 || *flags&ListTypeDefinition != 0) {
+ return true
+ }
+ return false
+}
+
+// Returns true if block ends with a blank line, descending if needed
+// into lists and sublists.
+func endsWithBlankLine(block *Node) bool {
+ // TODO: figure this out. Always false now.
+ for block != nil {
+ //if block.lastLineBlank {
+ //return true
+ //}
+ t := block.Type
+ if t == List || t == Item {
+ block = block.LastChild
+ } else {
+ break
+ }
+ }
+ return false
+}
+
+func finalizeList(block *Node) {
+ block.open = false
+ item := block.FirstChild
+ for item != nil {
+ // check for non-final list item ending with blank line:
+ if endsWithBlankLine(item) && item.Next != nil {
+ block.ListData.Tight = false
+ break
+ }
+ // recurse into children of list item, to see if there are spaces
+ // between any of them:
+ subItem := item.FirstChild
+ for subItem != nil {
+ if endsWithBlankLine(subItem) && (item.Next != nil || subItem.Next != nil) {
+ block.ListData.Tight = false
+ break
+ }
+ subItem = subItem.Next
+ }
+ item = item.Next
+ }
+}
+
+// Parse a single list item.
+// Assumes initial prefix is already removed if this is a sublist.
+func (p *Markdown) listItem(data []byte, flags *ListType) int {
+ // keep track of the indentation of the first line
+ itemIndent := 0
+ if data[0] == '\t' {
+ itemIndent += 4
+ } else {
+ for itemIndent < 3 && data[itemIndent] == ' ' {
+ itemIndent++
+ }
+ }
+
+ var bulletChar byte = '*'
+ i := p.uliPrefix(data)
+ if i == 0 {
+ i = p.oliPrefix(data)
+ } else {
+ bulletChar = data[i-2]
+ }
+ if i == 0 {
+ i = p.dliPrefix(data)
+ // reset definition term flag
+ if i > 0 {
+ *flags &= ^ListTypeTerm
+ }
+ }
+ if i == 0 {
+ // if in definition list, set term flag and continue
+ if *flags&ListTypeDefinition != 0 {
+ *flags |= ListTypeTerm
+ } else {
+ return 0
+ }
+ }
+
+ // skip leading whitespace on first line
+ for i < len(data) && data[i] == ' ' {
+ i++
+ }
+
+ // find the end of the line
+ line := i
+ for i > 0 && i < len(data) && data[i-1] != '\n' {
+ i++
+ }
+
+ // get working buffer
+ var raw bytes.Buffer
+
+ // put the first line into the working buffer
+ raw.Write(data[line:i])
+ line = i
+
+ // process the following lines
+ containsBlankLine := false
+ sublist := 0
+ codeBlockMarker := ""
+
+gatherlines:
+ for line < len(data) {
+ i++
+
+ // find the end of this line
+ for i < len(data) && data[i-1] != '\n' {
+ i++
+ }
+
+ // if it is an empty line, guess that it is part of this item
+ // and move on to the next line
+ if p.isEmpty(data[line:i]) > 0 {
+ containsBlankLine = true
+ line = i
+ continue
+ }
+
+ // calculate the indentation
+ indent := 0
+ indentIndex := 0
+ if data[line] == '\t' {
+ indentIndex++
+ indent += 4
+ } else {
+ for indent < 4 && line+indent < i && data[line+indent] == ' ' {
+ indent++
+ indentIndex++
+ }
+ }
+
+ chunk := data[line+indentIndex : i]
+
+ if p.extensions&FencedCode != 0 {
+ // determine if in or out of codeblock
+ // if in codeblock, ignore normal list processing
+ _, marker := isFenceLine(chunk, nil, codeBlockMarker)
+ if marker != "" {
+ if codeBlockMarker == "" {
+ // start of codeblock
+ codeBlockMarker = marker
+ } else {
+ // end of codeblock.
+ codeBlockMarker = ""
+ }
+ }
+ // we are in a codeblock, write line, and continue
+ if codeBlockMarker != "" || marker != "" {
+ raw.Write(data[line+indentIndex : i])
+ line = i
+ continue gatherlines
+ }
+ }
+
+ // evaluate how this line fits in
+ switch {
+ // is this a nested list item?
+ case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) ||
+ p.oliPrefix(chunk) > 0 ||
+ p.dliPrefix(chunk) > 0:
+
+ // to be a nested list, it must be indented more
+ // if not, it is either a different kind of list
+ // or the next item in the same list
+ if indent <= itemIndent {
+ if p.listTypeChanged(chunk, flags) {
+ *flags |= ListItemEndOfList
+ } else if containsBlankLine {
+ *flags |= ListItemContainsBlock
+ }
+
+ break gatherlines
+ }
+
+ if containsBlankLine {
+ *flags |= ListItemContainsBlock
+ }
+
+ // is this the first item in the nested list?
+ if sublist == 0 {
+ sublist = raw.Len()
+ }
+
+ // is this a nested prefix heading?
+ case p.isPrefixHeading(chunk):
+ // if the heading is not indented, it is not nested in the list
+ // and thus ends the list
+ if containsBlankLine && indent < 4 {
+ *flags |= ListItemEndOfList
+ break gatherlines
+ }
+ *flags |= ListItemContainsBlock
+
+ // anything following an empty line is only part
+ // of this item if it is indented 4 spaces
+ // (regardless of the indentation of the beginning of the item)
+ case containsBlankLine && indent < 4:
+ if *flags&ListTypeDefinition != 0 && i < len(data)-1 {
+ // is the next item still a part of this list?
+ next := i
+ for next < len(data) && data[next] != '\n' {
+ next++
+ }
+ for next < len(data)-1 && data[next] == '\n' {
+ next++
+ }
+ if i < len(data)-1 && data[i] != ':' && data[next] != ':' {
+ *flags |= ListItemEndOfList
+ }
+ } else {
+ *flags |= ListItemEndOfList
+ }
+ break gatherlines
+
+ // a blank line means this should be parsed as a block
+ case containsBlankLine:
+ raw.WriteByte('\n')
+ *flags |= ListItemContainsBlock
+ }
+
+ // if this line was preceded by one or more blanks,
+ // re-introduce the blank into the buffer
+ if containsBlankLine {
+ containsBlankLine = false
+ raw.WriteByte('\n')
+ }
+
+ // add the line into the working buffer without prefix
+ raw.Write(data[line+indentIndex : i])
+
+ line = i
+ }
+
+ rawBytes := raw.Bytes()
+
+ block := p.addBlock(Item, nil)
+ block.ListFlags = *flags
+ block.Tight = false
+ block.BulletChar = bulletChar
+ block.Delimiter = '.' // Only '.' is possible in Markdown, but ')' will also be possible in CommonMark
+
+ // render the contents of the list item
+ if *flags&ListItemContainsBlock != 0 && *flags&ListTypeTerm == 0 {
+ // intermediate render of block item, except for definition term
+ if sublist > 0 {
+ p.block(rawBytes[:sublist])
+ p.block(rawBytes[sublist:])
+ } else {
+ p.block(rawBytes)
+ }
+ } else {
+ // intermediate render of inline item
+ if sublist > 0 {
+ child := p.addChild(Paragraph, 0)
+ child.content = rawBytes[:sublist]
+ p.block(rawBytes[sublist:])
+ } else {
+ child := p.addChild(Paragraph, 0)
+ child.content = rawBytes
+ }
+ }
+ return line
+}
+
+// render a single paragraph that has already been parsed out
+func (p *Markdown) renderParagraph(data []byte) {
+ if len(data) == 0 {
+ return
+ }
+
+ // trim leading spaces
+ beg := 0
+ for data[beg] == ' ' {
+ beg++
+ }
+
+ end := len(data)
+ // trim trailing newline
+ if data[len(data)-1] == '\n' {
+ end--
+ }
+
+ // trim trailing spaces
+ for end > beg && data[end-1] == ' ' {
+ end--
+ }
+
+ p.addBlock(Paragraph, data[beg:end])
+}
+
+func (p *Markdown) paragraph(data []byte) int {
+ // prev: index of 1st char of previous line
+ // line: index of 1st char of current line
+ // i: index of cursor/end of current line
+ var prev, line, i int
+ tabSize := TabSizeDefault
+ if p.extensions&TabSizeEight != 0 {
+ tabSize = TabSizeDouble
+ }
+ // keep going until we find something to mark the end of the paragraph
+ for i < len(data) {
+ // mark the beginning of the current line
+ prev = line
+ current := data[i:]
+ line = i
+
+ // did we find a reference or a footnote? If so, end a paragraph
+ // preceding it and report that we have consumed up to the end of that
+ // reference:
+ if refEnd := isReference(p, current, tabSize); refEnd > 0 {
+ p.renderParagraph(data[:i])
+ return i + refEnd
+ }
+
+ // did we find a blank line marking the end of the paragraph?
+ if n := p.isEmpty(current); n > 0 {
+ // did this blank line followed by a definition list item?
+ if p.extensions&DefinitionLists != 0 {
+ if i < len(data)-1 && data[i+1] == ':' {
+ return p.list(data[prev:], ListTypeDefinition)
+ }
+ }
+
+ p.renderParagraph(data[:i])
+ return i + n
+ }
+
+ // an underline under some text marks a heading, so our paragraph ended on prev line
+ if i > 0 {
+ if level := p.isUnderlinedHeading(current); level > 0 {
+ // render the paragraph
+ p.renderParagraph(data[:prev])
+
+ // ignore leading and trailing whitespace
+ eol := i - 1
+ for prev < eol && data[prev] == ' ' {
+ prev++
+ }
+ for eol > prev && data[eol-1] == ' ' {
+ eol--
+ }
+
+ id := ""
+ if p.extensions&AutoHeadingIDs != 0 {
+ id = SanitizedAnchorName(string(data[prev:eol]))
+ }
+
+ block := p.addBlock(Heading, data[prev:eol])
+ block.Level = level
+ block.HeadingID = id
+
+ // find the end of the underline
+ for i < len(data) && data[i] != '\n' {
+ i++
+ }
+ return i
+ }
+ }
+
+ // if the next line starts a block of HTML, then the paragraph ends here
+ if p.extensions&LaxHTMLBlocks != 0 {
+ if data[i] == '<' && p.html(current, false) > 0 {
+ // rewind to before the HTML block
+ p.renderParagraph(data[:i])
+ return i
+ }
+ }
+
+ // if there's a prefixed heading or a horizontal rule after this, paragraph is over
+ if p.isPrefixHeading(current) || p.isHRule(current) {
+ p.renderParagraph(data[:i])
+ return i
+ }
+
+ // if there's a fenced code block, paragraph is over
+ if p.extensions&FencedCode != 0 {
+ if p.fencedCodeBlock(current, false) > 0 {
+ p.renderParagraph(data[:i])
+ return i
+ }
+ }
+
+ // if there's a definition list item, prev line is a definition term
+ if p.extensions&DefinitionLists != 0 {
+ if p.dliPrefix(current) != 0 {
+ ret := p.list(data[prev:], ListTypeDefinition)
+ return ret
+ }
+ }
+
+ // if there's a list after this, paragraph is over
+ if p.extensions&NoEmptyLineBeforeBlock != 0 {
+ if p.uliPrefix(current) != 0 ||
+ p.oliPrefix(current) != 0 ||
+ p.quotePrefix(current) != 0 ||
+ p.codePrefix(current) != 0 {
+ p.renderParagraph(data[:i])
+ return i
+ }
+ }
+
+ // otherwise, scan to the beginning of the next line
+ nl := bytes.IndexByte(data[i:], '\n')
+ if nl >= 0 {
+ i += nl + 1
+ } else {
+ i += len(data[i:])
+ }
+ }
+
+ p.renderParagraph(data[:i])
+ return i
+}
+
+func skipChar(data []byte, start int, char byte) int {
+ i := start
+ for i < len(data) && data[i] == char {
+ i++
+ }
+ return i
+}
+
+func skipUntilChar(text []byte, start int, char byte) int {
+ i := start
+ for i < len(text) && text[i] != char {
+ i++
+ }
+ return i
+}
+
+// SanitizedAnchorName returns a sanitized anchor name for the given text.
+//
+// It implements the algorithm specified in the package comment.
+func SanitizedAnchorName(text string) string {
+ var anchorName []rune
+ futureDash := false
+ for _, r := range text {
+ switch {
+ case unicode.IsLetter(r) || unicode.IsNumber(r):
+ if futureDash && len(anchorName) > 0 {
+ anchorName = append(anchorName, '-')
+ }
+ futureDash = false
+ anchorName = append(anchorName, unicode.ToLower(r))
+ default:
+ futureDash = true
+ }
+ }
+ return string(anchorName)
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/doc.go b/vendor/github.com/russross/blackfriday/v2/doc.go
new file mode 100644
index 000000000..57ff152a0
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/doc.go
@@ -0,0 +1,46 @@
+// Package blackfriday is a markdown processor.
+//
+// It translates plain text with simple formatting rules into an AST, which can
+// then be further processed to HTML (provided by Blackfriday itself) or other
+// formats (provided by the community).
+//
+// The simplest way to invoke Blackfriday is to call the Run function. It will
+// take a text input and produce a text output in HTML (or other format).
+//
+// A slightly more sophisticated way to use Blackfriday is to create a Markdown
+// processor and to call Parse, which returns a syntax tree for the input
+// document. You can leverage Blackfriday's parsing for content extraction from
+// markdown documents. You can assign a custom renderer and set various options
+// to the Markdown processor.
+//
+// If you're interested in calling Blackfriday from command line, see
+// https://github.com/russross/blackfriday-tool.
+//
+// Sanitized Anchor Names
+//
+// Blackfriday includes an algorithm for creating sanitized anchor names
+// corresponding to a given input text. This algorithm is used to create
+// anchors for headings when AutoHeadingIDs extension is enabled. The
+// algorithm is specified below, so that other packages can create
+// compatible anchor names and links to those anchors.
+//
+// The algorithm iterates over the input text, interpreted as UTF-8,
+// one Unicode code point (rune) at a time. All runes that are letters (category L)
+// or numbers (category N) are considered valid characters. They are mapped to
+// lower case, and included in the output. All other runes are considered
+// invalid characters. Invalid characters that precede the first valid character,
+// as well as invalid character that follow the last valid character
+// are dropped completely. All other sequences of invalid characters
+// between two valid characters are replaced with a single dash character '-'.
+//
+// SanitizedAnchorName exposes this functionality, and can be used to
+// create compatible links to the anchor names generated by blackfriday.
+// This algorithm is also implemented in a small standalone package at
+// github.com/shurcooL/sanitized_anchor_name. It can be useful for clients
+// that want a small package and don't need full functionality of blackfriday.
+package blackfriday
+
+// NOTE: Keep Sanitized Anchor Name algorithm in sync with package
+// github.com/shurcooL/sanitized_anchor_name.
+// Otherwise, users of sanitized_anchor_name will get anchor names
+// that are incompatible with those generated by blackfriday.
diff --git a/vendor/github.com/russross/blackfriday/v2/entities.go b/vendor/github.com/russross/blackfriday/v2/entities.go
new file mode 100644
index 000000000..a2c3edb69
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/entities.go
@@ -0,0 +1,2236 @@
+package blackfriday
+
+// Extracted from https://html.spec.whatwg.org/multipage/entities.json
+var entities = map[string]bool{
+ "&AElig": true,
+ "&AElig;": true,
+ "&AMP": true,
+ "&AMP;": true,
+ "&Aacute": true,
+ "&Aacute;": true,
+ "&Abreve;": true,
+ "&Acirc": true,
+ "&Acirc;": true,
+ "&Acy;": true,
+ "&Afr;": true,
+ "&Agrave": true,
+ "&Agrave;": true,
+ "&Alpha;": true,
+ "&Amacr;": true,
+ "&And;": true,
+ "&Aogon;": true,
+ "&Aopf;": true,
+ "&ApplyFunction;": true,
+ "&Aring": true,
+ "&Aring;": true,
+ "&Ascr;": true,
+ "&Assign;": true,
+ "&Atilde": true,
+ "&Atilde;": true,
+ "&Auml": true,
+ "&Auml;": true,
+ "&Backslash;": true,
+ "&Barv;": true,
+ "&Barwed;": true,
+ "&Bcy;": true,
+ "&Because;": true,
+ "&Bernoullis;": true,
+ "&Beta;": true,
+ "&Bfr;": true,
+ "&Bopf;": true,
+ "&Breve;": true,
+ "&Bscr;": true,
+ "&Bumpeq;": true,
+ "&CHcy;": true,
+ "&COPY": true,
+ "&COPY;": true,
+ "&Cacute;": true,
+ "&Cap;": true,
+ "&CapitalDifferentialD;": true,
+ "&Cayleys;": true,
+ "&Ccaron;": true,
+ "&Ccedil": true,
+ "&Ccedil;": true,
+ "&Ccirc;": true,
+ "&Cconint;": true,
+ "&Cdot;": true,
+ "&Cedilla;": true,
+ "&CenterDot;": true,
+ "&Cfr;": true,
+ "&Chi;": true,
+ "&CircleDot;": true,
+ "&CircleMinus;": true,
+ "&CirclePlus;": true,
+ "&CircleTimes;": true,
+ "&ClockwiseContourIntegral;": true,
+ "&CloseCurlyDoubleQuote;": true,
+ "&CloseCurlyQuote;": true,
+ "&Colon;": true,
+ "&Colone;": true,
+ "&Congruent;": true,
+ "&Conint;": true,
+ "&ContourIntegral;": true,
+ "&Copf;": true,
+ "&Coproduct;": true,
+ "&CounterClockwiseContourIntegral;": true,
+ "&Cross;": true,
+ "&Cscr;": true,
+ "&Cup;": true,
+ "&CupCap;": true,
+ "&DD;": true,
+ "&DDotrahd;": true,
+ "&DJcy;": true,
+ "&DScy;": true,
+ "&DZcy;": true,
+ "&Dagger;": true,
+ "&Darr;": true,
+ "&Dashv;": true,
+ "&Dcaron;": true,
+ "&Dcy;": true,
+ "&Del;": true,
+ "&Delta;": true,
+ "&Dfr;": true,
+ "&DiacriticalAcute;": true,
+ "&DiacriticalDot;": true,
+ "&DiacriticalDoubleAcute;": true,
+ "&DiacriticalGrave;": true,
+ "&DiacriticalTilde;": true,
+ "&Diamond;": true,
+ "&DifferentialD;": true,
+ "&Dopf;": true,
+ "&Dot;": true,
+ "&DotDot;": true,
+ "&DotEqual;": true,
+ "&DoubleContourIntegral;": true,
+ "&DoubleDot;": true,
+ "&DoubleDownArrow;": true,
+ "&DoubleLeftArrow;": true,
+ "&DoubleLeftRightArrow;": true,
+ "&DoubleLeftTee;": true,
+ "&DoubleLongLeftArrow;": true,
+ "&DoubleLongLeftRightArrow;": true,
+ "&DoubleLongRightArrow;": true,
+ "&DoubleRightArrow;": true,
+ "&DoubleRightTee;": true,
+ "&DoubleUpArrow;": true,
+ "&DoubleUpDownArrow;": true,
+ "&DoubleVerticalBar;": true,
+ "&DownArrow;": true,
+ "&DownArrowBar;": true,
+ "&DownArrowUpArrow;": true,
+ "&DownBreve;": true,
+ "&DownLeftRightVector;": true,
+ "&DownLeftTeeVector;": true,
+ "&DownLeftVector;": true,
+ "&DownLeftVectorBar;": true,
+ "&DownRightTeeVector;": true,
+ "&DownRightVector;": true,
+ "&DownRightVectorBar;": true,
+ "&DownTee;": true,
+ "&DownTeeArrow;": true,
+ "&Downarrow;": true,
+ "&Dscr;": true,
+ "&Dstrok;": true,
+ "&ENG;": true,
+ "&ETH": true,
+ "&ETH;": true,
+ "&Eacute": true,
+ "&Eacute;": true,
+ "&Ecaron;": true,
+ "&Ecirc": true,
+ "&Ecirc;": true,
+ "&Ecy;": true,
+ "&Edot;": true,
+ "&Efr;": true,
+ "&Egrave": true,
+ "&Egrave;": true,
+ "&Element;": true,
+ "&Emacr;": true,
+ "&EmptySmallSquare;": true,
+ "&EmptyVerySmallSquare;": true,
+ "&Eogon;": true,
+ "&Eopf;": true,
+ "&Epsilon;": true,
+ "&Equal;": true,
+ "&EqualTilde;": true,
+ "&Equilibrium;": true,
+ "&Escr;": true,
+ "&Esim;": true,
+ "&Eta;": true,
+ "&Euml": true,
+ "&Euml;": true,
+ "&Exists;": true,
+ "&ExponentialE;": true,
+ "&Fcy;": true,
+ "&Ffr;": true,
+ "&FilledSmallSquare;": true,
+ "&FilledVerySmallSquare;": true,
+ "&Fopf;": true,
+ "&ForAll;": true,
+ "&Fouriertrf;": true,
+ "&Fscr;": true,
+ "&GJcy;": true,
+ "&GT": true,
+ "&GT;": true,
+ "&Gamma;": true,
+ "&Gammad;": true,
+ "&Gbreve;": true,
+ "&Gcedil;": true,
+ "&Gcirc;": true,
+ "&Gcy;": true,
+ "&Gdot;": true,
+ "&Gfr;": true,
+ "&Gg;": true,
+ "&Gopf;": true,
+ "&GreaterEqual;": true,
+ "&GreaterEqualLess;": true,
+ "&GreaterFullEqual;": true,
+ "&GreaterGreater;": true,
+ "&GreaterLess;": true,
+ "&GreaterSlantEqual;": true,
+ "&GreaterTilde;": true,
+ "&Gscr;": true,
+ "&Gt;": true,
+ "&HARDcy;": true,
+ "&Hacek;": true,
+ "&Hat;": true,
+ "&Hcirc;": true,
+ "&Hfr;": true,
+ "&HilbertSpace;": true,
+ "&Hopf;": true,
+ "&HorizontalLine;": true,
+ "&Hscr;": true,
+ "&Hstrok;": true,
+ "&HumpDownHump;": true,
+ "&HumpEqual;": true,
+ "&IEcy;": true,
+ "&IJlig;": true,
+ "&IOcy;": true,
+ "&Iacute": true,
+ "&Iacute;": true,
+ "&Icirc": true,
+ "&Icirc;": true,
+ "&Icy;": true,
+ "&Idot;": true,
+ "&Ifr;": true,
+ "&Igrave": true,
+ "&Igrave;": true,
+ "&Im;": true,
+ "&Imacr;": true,
+ "&ImaginaryI;": true,
+ "&Implies;": true,
+ "&Int;": true,
+ "&Integral;": true,
+ "&Intersection;": true,
+ "&InvisibleComma;": true,
+ "&InvisibleTimes;": true,
+ "&Iogon;": true,
+ "&Iopf;": true,
+ "&Iota;": true,
+ "&Iscr;": true,
+ "&Itilde;": true,
+ "&Iukcy;": true,
+ "&Iuml": true,
+ "&Iuml;": true,
+ "&Jcirc;": true,
+ "&Jcy;": true,
+ "&Jfr;": true,
+ "&Jopf;": true,
+ "&Jscr;": true,
+ "&Jsercy;": true,
+ "&Jukcy;": true,
+ "&KHcy;": true,
+ "&KJcy;": true,
+ "&Kappa;": true,
+ "&Kcedil;": true,
+ "&Kcy;": true,
+ "&Kfr;": true,
+ "&Kopf;": true,
+ "&Kscr;": true,
+ "&LJcy;": true,
+ "&LT": true,
+ "&LT;": true,
+ "&Lacute;": true,
+ "&Lambda;": true,
+ "&Lang;": true,
+ "&Laplacetrf;": true,
+ "&Larr;": true,
+ "&Lcaron;": true,
+ "&Lcedil;": true,
+ "&Lcy;": true,
+ "&LeftAngleBracket;": true,
+ "&LeftArrow;": true,
+ "&LeftArrowBar;": true,
+ "&LeftArrowRightArrow;": true,
+ "&LeftCeiling;": true,
+ "&LeftDoubleBracket;": true,
+ "&LeftDownTeeVector;": true,
+ "&LeftDownVector;": true,
+ "&LeftDownVectorBar;": true,
+ "&LeftFloor;": true,
+ "&LeftRightArrow;": true,
+ "&LeftRightVector;": true,
+ "&LeftTee;": true,
+ "&LeftTeeArrow;": true,
+ "&LeftTeeVector;": true,
+ "&LeftTriangle;": true,
+ "&LeftTriangleBar;": true,
+ "&LeftTriangleEqual;": true,
+ "&LeftUpDownVector;": true,
+ "&LeftUpTeeVector;": true,
+ "&LeftUpVector;": true,
+ "&LeftUpVectorBar;": true,
+ "&LeftVector;": true,
+ "&LeftVectorBar;": true,
+ "&Leftarrow;": true,
+ "&Leftrightarrow;": true,
+ "&LessEqualGreater;": true,
+ "&LessFullEqual;": true,
+ "&LessGreater;": true,
+ "&LessLess;": true,
+ "&LessSlantEqual;": true,
+ "&LessTilde;": true,
+ "&Lfr;": true,
+ "&Ll;": true,
+ "&Lleftarrow;": true,
+ "&Lmidot;": true,
+ "&LongLeftArrow;": true,
+ "&LongLeftRightArrow;": true,
+ "&LongRightArrow;": true,
+ "&Longleftarrow;": true,
+ "&Longleftrightarrow;": true,
+ "&Longrightarrow;": true,
+ "&Lopf;": true,
+ "&LowerLeftArrow;": true,
+ "&LowerRightArrow;": true,
+ "&Lscr;": true,
+ "&Lsh;": true,
+ "&Lstrok;": true,
+ "&Lt;": true,
+ "&Map;": true,
+ "&Mcy;": true,
+ "&MediumSpace;": true,
+ "&Mellintrf;": true,
+ "&Mfr;": true,
+ "&MinusPlus;": true,
+ "&Mopf;": true,
+ "&Mscr;": true,
+ "&Mu;": true,
+ "&NJcy;": true,
+ "&Nacute;": true,
+ "&Ncaron;": true,
+ "&Ncedil;": true,
+ "&Ncy;": true,
+ "&NegativeMediumSpace;": true,
+ "&NegativeThickSpace;": true,
+ "&NegativeThinSpace;": true,
+ "&NegativeVeryThinSpace;": true,
+ "&NestedGreaterGreater;": true,
+ "&NestedLessLess;": true,
+ "&NewLine;": true,
+ "&Nfr;": true,
+ "&NoBreak;": true,
+ "&NonBreakingSpace;": true,
+ "&Nopf;": true,
+ "&Not;": true,
+ "&NotCongruent;": true,
+ "&NotCupCap;": true,
+ "&NotDoubleVerticalBar;": true,
+ "&NotElement;": true,
+ "&NotEqual;": true,
+ "&NotEqualTilde;": true,
+ "&NotExists;": true,
+ "&NotGreater;": true,
+ "&NotGreaterEqual;": true,
+ "&NotGreaterFullEqual;": true,
+ "&NotGreaterGreater;": true,
+ "&NotGreaterLess;": true,
+ "&NotGreaterSlantEqual;": true,
+ "&NotGreaterTilde;": true,
+ "&NotHumpDownHump;": true,
+ "&NotHumpEqual;": true,
+ "&NotLeftTriangle;": true,
+ "&NotLeftTriangleBar;": true,
+ "&NotLeftTriangleEqual;": true,
+ "&NotLess;": true,
+ "&NotLessEqual;": true,
+ "&NotLessGreater;": true,
+ "&NotLessLess;": true,
+ "&NotLessSlantEqual;": true,
+ "&NotLessTilde;": true,
+ "&NotNestedGreaterGreater;": true,
+ "&NotNestedLessLess;": true,
+ "&NotPrecedes;": true,
+ "&NotPrecedesEqual;": true,
+ "&NotPrecedesSlantEqual;": true,
+ "&NotReverseElement;": true,
+ "&NotRightTriangle;": true,
+ "&NotRightTriangleBar;": true,
+ "&NotRightTriangleEqual;": true,
+ "&NotSquareSubset;": true,
+ "&NotSquareSubsetEqual;": true,
+ "&NotSquareSuperset;": true,
+ "&NotSquareSupersetEqual;": true,
+ "&NotSubset;": true,
+ "&NotSubsetEqual;": true,
+ "&NotSucceeds;": true,
+ "&NotSucceedsEqual;": true,
+ "&NotSucceedsSlantEqual;": true,
+ "&NotSucceedsTilde;": true,
+ "&NotSuperset;": true,
+ "&NotSupersetEqual;": true,
+ "&NotTilde;": true,
+ "&NotTildeEqual;": true,
+ "&NotTildeFullEqual;": true,
+ "&NotTildeTilde;": true,
+ "&NotVerticalBar;": true,
+ "&Nscr;": true,
+ "&Ntilde": true,
+ "&Ntilde;": true,
+ "&Nu;": true,
+ "&OElig;": true,
+ "&Oacute": true,
+ "&Oacute;": true,
+ "&Ocirc": true,
+ "&Ocirc;": true,
+ "&Ocy;": true,
+ "&Odblac;": true,
+ "&Ofr;": true,
+ "&Ograve": true,
+ "&Ograve;": true,
+ "&Omacr;": true,
+ "&Omega;": true,
+ "&Omicron;": true,
+ "&Oopf;": true,
+ "&OpenCurlyDoubleQuote;": true,
+ "&OpenCurlyQuote;": true,
+ "&Or;": true,
+ "&Oscr;": true,
+ "&Oslash": true,
+ "&Oslash;": true,
+ "&Otilde": true,
+ "&Otilde;": true,
+ "&Otimes;": true,
+ "&Ouml": true,
+ "&Ouml;": true,
+ "&OverBar;": true,
+ "&OverBrace;": true,
+ "&OverBracket;": true,
+ "&OverParenthesis;": true,
+ "&PartialD;": true,
+ "&Pcy;": true,
+ "&Pfr;": true,
+ "&Phi;": true,
+ "&Pi;": true,
+ "&PlusMinus;": true,
+ "&Poincareplane;": true,
+ "&Popf;": true,
+ "&Pr;": true,
+ "&Precedes;": true,
+ "&PrecedesEqual;": true,
+ "&PrecedesSlantEqual;": true,
+ "&PrecedesTilde;": true,
+ "&Prime;": true,
+ "&Product;": true,
+ "&Proportion;": true,
+ "&Proportional;": true,
+ "&Pscr;": true,
+ "&Psi;": true,
+ "&QUOT": true,
+ "&QUOT;": true,
+ "&Qfr;": true,
+ "&Qopf;": true,
+ "&Qscr;": true,
+ "&RBarr;": true,
+ "&REG": true,
+ "&REG;": true,
+ "&Racute;": true,
+ "&Rang;": true,
+ "&Rarr;": true,
+ "&Rarrtl;": true,
+ "&Rcaron;": true,
+ "&Rcedil;": true,
+ "&Rcy;": true,
+ "&Re;": true,
+ "&ReverseElement;": true,
+ "&ReverseEquilibrium;": true,
+ "&ReverseUpEquilibrium;": true,
+ "&Rfr;": true,
+ "&Rho;": true,
+ "&RightAngleBracket;": true,
+ "&RightArrow;": true,
+ "&RightArrowBar;": true,
+ "&RightArrowLeftArrow;": true,
+ "&RightCeiling;": true,
+ "&RightDoubleBracket;": true,
+ "&RightDownTeeVector;": true,
+ "&RightDownVector;": true,
+ "&RightDownVectorBar;": true,
+ "&RightFloor;": true,
+ "&RightTee;": true,
+ "&RightTeeArrow;": true,
+ "&RightTeeVector;": true,
+ "&RightTriangle;": true,
+ "&RightTriangleBar;": true,
+ "&RightTriangleEqual;": true,
+ "&RightUpDownVector;": true,
+ "&RightUpTeeVector;": true,
+ "&RightUpVector;": true,
+ "&RightUpVectorBar;": true,
+ "&RightVector;": true,
+ "&RightVectorBar;": true,
+ "&Rightarrow;": true,
+ "&Ropf;": true,
+ "&RoundImplies;": true,
+ "&Rrightarrow;": true,
+ "&Rscr;": true,
+ "&Rsh;": true,
+ "&RuleDelayed;": true,
+ "&SHCHcy;": true,
+ "&SHcy;": true,
+ "&SOFTcy;": true,
+ "&Sacute;": true,
+ "&Sc;": true,
+ "&Scaron;": true,
+ "&Scedil;": true,
+ "&Scirc;": true,
+ "&Scy;": true,
+ "&Sfr;": true,
+ "&ShortDownArrow;": true,
+ "&ShortLeftArrow;": true,
+ "&ShortRightArrow;": true,
+ "&ShortUpArrow;": true,
+ "&Sigma;": true,
+ "&SmallCircle;": true,
+ "&Sopf;": true,
+ "&Sqrt;": true,
+ "&Square;": true,
+ "&SquareIntersection;": true,
+ "&SquareSubset;": true,
+ "&SquareSubsetEqual;": true,
+ "&SquareSuperset;": true,
+ "&SquareSupersetEqual;": true,
+ "&SquareUnion;": true,
+ "&Sscr;": true,
+ "&Star;": true,
+ "&Sub;": true,
+ "&Subset;": true,
+ "&SubsetEqual;": true,
+ "&Succeeds;": true,
+ "&SucceedsEqual;": true,
+ "&SucceedsSlantEqual;": true,
+ "&SucceedsTilde;": true,
+ "&SuchThat;": true,
+ "&Sum;": true,
+ "&Sup;": true,
+ "&Superset;": true,
+ "&SupersetEqual;": true,
+ "&Supset;": true,
+ "&THORN": true,
+ "&THORN;": true,
+ "&TRADE;": true,
+ "&TSHcy;": true,
+ "&TScy;": true,
+ "&Tab;": true,
+ "&Tau;": true,
+ "&Tcaron;": true,
+ "&Tcedil;": true,
+ "&Tcy;": true,
+ "&Tfr;": true,
+ "&Therefore;": true,
+ "&Theta;": true,
+ "&ThickSpace;": true,
+ "&ThinSpace;": true,
+ "&Tilde;": true,
+ "&TildeEqual;": true,
+ "&TildeFullEqual;": true,
+ "&TildeTilde;": true,
+ "&Topf;": true,
+ "&TripleDot;": true,
+ "&Tscr;": true,
+ "&Tstrok;": true,
+ "&Uacute": true,
+ "&Uacute;": true,
+ "&Uarr;": true,
+ "&Uarrocir;": true,
+ "&Ubrcy;": true,
+ "&Ubreve;": true,
+ "&Ucirc": true,
+ "&Ucirc;": true,
+ "&Ucy;": true,
+ "&Udblac;": true,
+ "&Ufr;": true,
+ "&Ugrave": true,
+ "&Ugrave;": true,
+ "&Umacr;": true,
+ "&UnderBar;": true,
+ "&UnderBrace;": true,
+ "&UnderBracket;": true,
+ "&UnderParenthesis;": true,
+ "&Union;": true,
+ "&UnionPlus;": true,
+ "&Uogon;": true,
+ "&Uopf;": true,
+ "&UpArrow;": true,
+ "&UpArrowBar;": true,
+ "&UpArrowDownArrow;": true,
+ "&UpDownArrow;": true,
+ "&UpEquilibrium;": true,
+ "&UpTee;": true,
+ "&UpTeeArrow;": true,
+ "&Uparrow;": true,
+ "&Updownarrow;": true,
+ "&UpperLeftArrow;": true,
+ "&UpperRightArrow;": true,
+ "&Upsi;": true,
+ "&Upsilon;": true,
+ "&Uring;": true,
+ "&Uscr;": true,
+ "&Utilde;": true,
+ "&Uuml": true,
+ "&Uuml;": true,
+ "&VDash;": true,
+ "&Vbar;": true,
+ "&Vcy;": true,
+ "&Vdash;": true,
+ "&Vdashl;": true,
+ "&Vee;": true,
+ "&Verbar;": true,
+ "&Vert;": true,
+ "&VerticalBar;": true,
+ "&VerticalLine;": true,
+ "&VerticalSeparator;": true,
+ "&VerticalTilde;": true,
+ "&VeryThinSpace;": true,
+ "&Vfr;": true,
+ "&Vopf;": true,
+ "&Vscr;": true,
+ "&Vvdash;": true,
+ "&Wcirc;": true,
+ "&Wedge;": true,
+ "&Wfr;": true,
+ "&Wopf;": true,
+ "&Wscr;": true,
+ "&Xfr;": true,
+ "&Xi;": true,
+ "&Xopf;": true,
+ "&Xscr;": true,
+ "&YAcy;": true,
+ "&YIcy;": true,
+ "&YUcy;": true,
+ "&Yacute": true,
+ "&Yacute;": true,
+ "&Ycirc;": true,
+ "&Ycy;": true,
+ "&Yfr;": true,
+ "&Yopf;": true,
+ "&Yscr;": true,
+ "&Yuml;": true,
+ "&ZHcy;": true,
+ "&Zacute;": true,
+ "&Zcaron;": true,
+ "&Zcy;": true,
+ "&Zdot;": true,
+ "&ZeroWidthSpace;": true,
+ "&Zeta;": true,
+ "&Zfr;": true,
+ "&Zopf;": true,
+ "&Zscr;": true,
+ "&aacute": true,
+ "&aacute;": true,
+ "&abreve;": true,
+ "&ac;": true,
+ "&acE;": true,
+ "&acd;": true,
+ "&acirc": true,
+ "&acirc;": true,
+ "&acute": true,
+ "&acute;": true,
+ "&acy;": true,
+ "&aelig": true,
+ "&aelig;": true,
+ "&af;": true,
+ "&afr;": true,
+ "&agrave": true,
+ "&agrave;": true,
+ "&alefsym;": true,
+ "&aleph;": true,
+ "&alpha;": true,
+ "&amacr;": true,
+ "&amalg;": true,
+ "&amp": true,
+ "&amp;": true,
+ "&and;": true,
+ "&andand;": true,
+ "&andd;": true,
+ "&andslope;": true,
+ "&andv;": true,
+ "&ang;": true,
+ "&ange;": true,
+ "&angle;": true,
+ "&angmsd;": true,
+ "&angmsdaa;": true,
+ "&angmsdab;": true,
+ "&angmsdac;": true,
+ "&angmsdad;": true,
+ "&angmsdae;": true,
+ "&angmsdaf;": true,
+ "&angmsdag;": true,
+ "&angmsdah;": true,
+ "&angrt;": true,
+ "&angrtvb;": true,
+ "&angrtvbd;": true,
+ "&angsph;": true,
+ "&angst;": true,
+ "&angzarr;": true,
+ "&aogon;": true,
+ "&aopf;": true,
+ "&ap;": true,
+ "&apE;": true,
+ "&apacir;": true,
+ "&ape;": true,
+ "&apid;": true,
+ "&apos;": true,
+ "&approx;": true,
+ "&approxeq;": true,
+ "&aring": true,
+ "&aring;": true,
+ "&ascr;": true,
+ "&ast;": true,
+ "&asymp;": true,
+ "&asympeq;": true,
+ "&atilde": true,
+ "&atilde;": true,
+ "&auml": true,
+ "&auml;": true,
+ "&awconint;": true,
+ "&awint;": true,
+ "&bNot;": true,
+ "&backcong;": true,
+ "&backepsilon;": true,
+ "&backprime;": true,
+ "&backsim;": true,
+ "&backsimeq;": true,
+ "&barvee;": true,
+ "&barwed;": true,
+ "&barwedge;": true,
+ "&bbrk;": true,
+ "&bbrktbrk;": true,
+ "&bcong;": true,
+ "&bcy;": true,
+ "&bdquo;": true,
+ "&becaus;": true,
+ "&because;": true,
+ "&bemptyv;": true,
+ "&bepsi;": true,
+ "&bernou;": true,
+ "&beta;": true,
+ "&beth;": true,
+ "&between;": true,
+ "&bfr;": true,
+ "&bigcap;": true,
+ "&bigcirc;": true,
+ "&bigcup;": true,
+ "&bigodot;": true,
+ "&bigoplus;": true,
+ "&bigotimes;": true,
+ "&bigsqcup;": true,
+ "&bigstar;": true,
+ "&bigtriangledown;": true,
+ "&bigtriangleup;": true,
+ "&biguplus;": true,
+ "&bigvee;": true,
+ "&bigwedge;": true,
+ "&bkarow;": true,
+ "&blacklozenge;": true,
+ "&blacksquare;": true,
+ "&blacktriangle;": true,
+ "&blacktriangledown;": true,
+ "&blacktriangleleft;": true,
+ "&blacktriangleright;": true,
+ "&blank;": true,
+ "&blk12;": true,
+ "&blk14;": true,
+ "&blk34;": true,
+ "&block;": true,
+ "&bne;": true,
+ "&bnequiv;": true,
+ "&bnot;": true,
+ "&bopf;": true,
+ "&bot;": true,
+ "&bottom;": true,
+ "&bowtie;": true,
+ "&boxDL;": true,
+ "&boxDR;": true,
+ "&boxDl;": true,
+ "&boxDr;": true,
+ "&boxH;": true,
+ "&boxHD;": true,
+ "&boxHU;": true,
+ "&boxHd;": true,
+ "&boxHu;": true,
+ "&boxUL;": true,
+ "&boxUR;": true,
+ "&boxUl;": true,
+ "&boxUr;": true,
+ "&boxV;": true,
+ "&boxVH;": true,
+ "&boxVL;": true,
+ "&boxVR;": true,
+ "&boxVh;": true,
+ "&boxVl;": true,
+ "&boxVr;": true,
+ "&boxbox;": true,
+ "&boxdL;": true,
+ "&boxdR;": true,
+ "&boxdl;": true,
+ "&boxdr;": true,
+ "&boxh;": true,
+ "&boxhD;": true,
+ "&boxhU;": true,
+ "&boxhd;": true,
+ "&boxhu;": true,
+ "&boxminus;": true,
+ "&boxplus;": true,
+ "&boxtimes;": true,
+ "&boxuL;": true,
+ "&boxuR;": true,
+ "&boxul;": true,
+ "&boxur;": true,
+ "&boxv;": true,
+ "&boxvH;": true,
+ "&boxvL;": true,
+ "&boxvR;": true,
+ "&boxvh;": true,
+ "&boxvl;": true,
+ "&boxvr;": true,
+ "&bprime;": true,
+ "&breve;": true,
+ "&brvbar": true,
+ "&brvbar;": true,
+ "&bscr;": true,
+ "&bsemi;": true,
+ "&bsim;": true,
+ "&bsime;": true,
+ "&bsol;": true,
+ "&bsolb;": true,
+ "&bsolhsub;": true,
+ "&bull;": true,
+ "&bullet;": true,
+ "&bump;": true,
+ "&bumpE;": true,
+ "&bumpe;": true,
+ "&bumpeq;": true,
+ "&cacute;": true,
+ "&cap;": true,
+ "&capand;": true,
+ "&capbrcup;": true,
+ "&capcap;": true,
+ "&capcup;": true,
+ "&capdot;": true,
+ "&caps;": true,
+ "&caret;": true,
+ "&caron;": true,
+ "&ccaps;": true,
+ "&ccaron;": true,
+ "&ccedil": true,
+ "&ccedil;": true,
+ "&ccirc;": true,
+ "&ccups;": true,
+ "&ccupssm;": true,
+ "&cdot;": true,
+ "&cedil": true,
+ "&cedil;": true,
+ "&cemptyv;": true,
+ "&cent": true,
+ "&cent;": true,
+ "&centerdot;": true,
+ "&cfr;": true,
+ "&chcy;": true,
+ "&check;": true,
+ "&checkmark;": true,
+ "&chi;": true,
+ "&cir;": true,
+ "&cirE;": true,
+ "&circ;": true,
+ "&circeq;": true,
+ "&circlearrowleft;": true,
+ "&circlearrowright;": true,
+ "&circledR;": true,
+ "&circledS;": true,
+ "&circledast;": true,
+ "&circledcirc;": true,
+ "&circleddash;": true,
+ "&cire;": true,
+ "&cirfnint;": true,
+ "&cirmid;": true,
+ "&cirscir;": true,
+ "&clubs;": true,
+ "&clubsuit;": true,
+ "&colon;": true,
+ "&colone;": true,
+ "&coloneq;": true,
+ "&comma;": true,
+ "&commat;": true,
+ "&comp;": true,
+ "&compfn;": true,
+ "&complement;": true,
+ "&complexes;": true,
+ "&cong;": true,
+ "&congdot;": true,
+ "&conint;": true,
+ "&copf;": true,
+ "&coprod;": true,
+ "&copy": true,
+ "&copy;": true,
+ "&copysr;": true,
+ "&crarr;": true,
+ "&cross;": true,
+ "&cscr;": true,
+ "&csub;": true,
+ "&csube;": true,
+ "&csup;": true,
+ "&csupe;": true,
+ "&ctdot;": true,
+ "&cudarrl;": true,
+ "&cudarrr;": true,
+ "&cuepr;": true,
+ "&cuesc;": true,
+ "&cularr;": true,
+ "&cularrp;": true,
+ "&cup;": true,
+ "&cupbrcap;": true,
+ "&cupcap;": true,
+ "&cupcup;": true,
+ "&cupdot;": true,
+ "&cupor;": true,
+ "&cups;": true,
+ "&curarr;": true,
+ "&curarrm;": true,
+ "&curlyeqprec;": true,
+ "&curlyeqsucc;": true,
+ "&curlyvee;": true,
+ "&curlywedge;": true,
+ "&curren": true,
+ "&curren;": true,
+ "&curvearrowleft;": true,
+ "&curvearrowright;": true,
+ "&cuvee;": true,
+ "&cuwed;": true,
+ "&cwconint;": true,
+ "&cwint;": true,
+ "&cylcty;": true,
+ "&dArr;": true,
+ "&dHar;": true,
+ "&dagger;": true,
+ "&daleth;": true,
+ "&darr;": true,
+ "&dash;": true,
+ "&dashv;": true,
+ "&dbkarow;": true,
+ "&dblac;": true,
+ "&dcaron;": true,
+ "&dcy;": true,
+ "&dd;": true,
+ "&ddagger;": true,
+ "&ddarr;": true,
+ "&ddotseq;": true,
+ "&deg": true,
+ "&deg;": true,
+ "&delta;": true,
+ "&demptyv;": true,
+ "&dfisht;": true,
+ "&dfr;": true,
+ "&dharl;": true,
+ "&dharr;": true,
+ "&diam;": true,
+ "&diamond;": true,
+ "&diamondsuit;": true,
+ "&diams;": true,
+ "&die;": true,
+ "&digamma;": true,
+ "&disin;": true,
+ "&div;": true,
+ "&divide": true,
+ "&divide;": true,
+ "&divideontimes;": true,
+ "&divonx;": true,
+ "&djcy;": true,
+ "&dlcorn;": true,
+ "&dlcrop;": true,
+ "&dollar;": true,
+ "&dopf;": true,
+ "&dot;": true,
+ "&doteq;": true,
+ "&doteqdot;": true,
+ "&dotminus;": true,
+ "&dotplus;": true,
+ "&dotsquare;": true,
+ "&doublebarwedge;": true,
+ "&downarrow;": true,
+ "&downdownarrows;": true,
+ "&downharpoonleft;": true,
+ "&downharpoonright;": true,
+ "&drbkarow;": true,
+ "&drcorn;": true,
+ "&drcrop;": true,
+ "&dscr;": true,
+ "&dscy;": true,
+ "&dsol;": true,
+ "&dstrok;": true,
+ "&dtdot;": true,
+ "&dtri;": true,
+ "&dtrif;": true,
+ "&duarr;": true,
+ "&duhar;": true,
+ "&dwangle;": true,
+ "&dzcy;": true,
+ "&dzigrarr;": true,
+ "&eDDot;": true,
+ "&eDot;": true,
+ "&eacute": true,
+ "&eacute;": true,
+ "&easter;": true,
+ "&ecaron;": true,
+ "&ecir;": true,
+ "&ecirc": true,
+ "&ecirc;": true,
+ "&ecolon;": true,
+ "&ecy;": true,
+ "&edot;": true,
+ "&ee;": true,
+ "&efDot;": true,
+ "&efr;": true,
+ "&eg;": true,
+ "&egrave": true,
+ "&egrave;": true,
+ "&egs;": true,
+ "&egsdot;": true,
+ "&el;": true,
+ "&elinters;": true,
+ "&ell;": true,
+ "&els;": true,
+ "&elsdot;": true,
+ "&emacr;": true,
+ "&empty;": true,
+ "&emptyset;": true,
+ "&emptyv;": true,
+ "&emsp13;": true,
+ "&emsp14;": true,
+ "&emsp;": true,
+ "&eng;": true,
+ "&ensp;": true,
+ "&eogon;": true,
+ "&eopf;": true,
+ "&epar;": true,
+ "&eparsl;": true,
+ "&eplus;": true,
+ "&epsi;": true,
+ "&epsilon;": true,
+ "&epsiv;": true,
+ "&eqcirc;": true,
+ "&eqcolon;": true,
+ "&eqsim;": true,
+ "&eqslantgtr;": true,
+ "&eqslantless;": true,
+ "&equals;": true,
+ "&equest;": true,
+ "&equiv;": true,
+ "&equivDD;": true,
+ "&eqvparsl;": true,
+ "&erDot;": true,
+ "&erarr;": true,
+ "&escr;": true,
+ "&esdot;": true,
+ "&esim;": true,
+ "&eta;": true,
+ "&eth": true,
+ "&eth;": true,
+ "&euml": true,
+ "&euml;": true,
+ "&euro;": true,
+ "&excl;": true,
+ "&exist;": true,
+ "&expectation;": true,
+ "&exponentiale;": true,
+ "&fallingdotseq;": true,
+ "&fcy;": true,
+ "&female;": true,
+ "&ffilig;": true,
+ "&fflig;": true,
+ "&ffllig;": true,
+ "&ffr;": true,
+ "&filig;": true,
+ "&fjlig;": true,
+ "&flat;": true,
+ "&fllig;": true,
+ "&fltns;": true,
+ "&fnof;": true,
+ "&fopf;": true,
+ "&forall;": true,
+ "&fork;": true,
+ "&forkv;": true,
+ "&fpartint;": true,
+ "&frac12": true,
+ "&frac12;": true,
+ "&frac13;": true,
+ "&frac14": true,
+ "&frac14;": true,
+ "&frac15;": true,
+ "&frac16;": true,
+ "&frac18;": true,
+ "&frac23;": true,
+ "&frac25;": true,
+ "&frac34": true,
+ "&frac34;": true,
+ "&frac35;": true,
+ "&frac38;": true,
+ "&frac45;": true,
+ "&frac56;": true,
+ "&frac58;": true,
+ "&frac78;": true,
+ "&frasl;": true,
+ "&frown;": true,
+ "&fscr;": true,
+ "&gE;": true,
+ "&gEl;": true,
+ "&gacute;": true,
+ "&gamma;": true,
+ "&gammad;": true,
+ "&gap;": true,
+ "&gbreve;": true,
+ "&gcirc;": true,
+ "&gcy;": true,
+ "&gdot;": true,
+ "&ge;": true,
+ "&gel;": true,
+ "&geq;": true,
+ "&geqq;": true,
+ "&geqslant;": true,
+ "&ges;": true,
+ "&gescc;": true,
+ "&gesdot;": true,
+ "&gesdoto;": true,
+ "&gesdotol;": true,
+ "&gesl;": true,
+ "&gesles;": true,
+ "&gfr;": true,
+ "&gg;": true,
+ "&ggg;": true,
+ "&gimel;": true,
+ "&gjcy;": true,
+ "&gl;": true,
+ "&glE;": true,
+ "&gla;": true,
+ "&glj;": true,
+ "&gnE;": true,
+ "&gnap;": true,
+ "&gnapprox;": true,
+ "&gne;": true,
+ "&gneq;": true,
+ "&gneqq;": true,
+ "&gnsim;": true,
+ "&gopf;": true,
+ "&grave;": true,
+ "&gscr;": true,
+ "&gsim;": true,
+ "&gsime;": true,
+ "&gsiml;": true,
+ "&gt": true,
+ "&gt;": true,
+ "&gtcc;": true,
+ "&gtcir;": true,
+ "&gtdot;": true,
+ "&gtlPar;": true,
+ "&gtquest;": true,
+ "&gtrapprox;": true,
+ "&gtrarr;": true,
+ "&gtrdot;": true,
+ "&gtreqless;": true,
+ "&gtreqqless;": true,
+ "&gtrless;": true,
+ "&gtrsim;": true,
+ "&gvertneqq;": true,
+ "&gvnE;": true,
+ "&hArr;": true,
+ "&hairsp;": true,
+ "&half;": true,
+ "&hamilt;": true,
+ "&hardcy;": true,
+ "&harr;": true,
+ "&harrcir;": true,
+ "&harrw;": true,
+ "&hbar;": true,
+ "&hcirc;": true,
+ "&hearts;": true,
+ "&heartsuit;": true,
+ "&hellip;": true,
+ "&hercon;": true,
+ "&hfr;": true,
+ "&hksearow;": true,
+ "&hkswarow;": true,
+ "&hoarr;": true,
+ "&homtht;": true,
+ "&hookleftarrow;": true,
+ "&hookrightarrow;": true,
+ "&hopf;": true,
+ "&horbar;": true,
+ "&hscr;": true,
+ "&hslash;": true,
+ "&hstrok;": true,
+ "&hybull;": true,
+ "&hyphen;": true,
+ "&iacute": true,
+ "&iacute;": true,
+ "&ic;": true,
+ "&icirc": true,
+ "&icirc;": true,
+ "&icy;": true,
+ "&iecy;": true,
+ "&iexcl": true,
+ "&iexcl;": true,
+ "&iff;": true,
+ "&ifr;": true,
+ "&igrave": true,
+ "&igrave;": true,
+ "&ii;": true,
+ "&iiiint;": true,
+ "&iiint;": true,
+ "&iinfin;": true,
+ "&iiota;": true,
+ "&ijlig;": true,
+ "&imacr;": true,
+ "&image;": true,
+ "&imagline;": true,
+ "&imagpart;": true,
+ "&imath;": true,
+ "&imof;": true,
+ "&imped;": true,
+ "&in;": true,
+ "&incare;": true,
+ "&infin;": true,
+ "&infintie;": true,
+ "&inodot;": true,
+ "&int;": true,
+ "&intcal;": true,
+ "&integers;": true,
+ "&intercal;": true,
+ "&intlarhk;": true,
+ "&intprod;": true,
+ "&iocy;": true,
+ "&iogon;": true,
+ "&iopf;": true,
+ "&iota;": true,
+ "&iprod;": true,
+ "&iquest": true,
+ "&iquest;": true,
+ "&iscr;": true,
+ "&isin;": true,
+ "&isinE;": true,
+ "&isindot;": true,
+ "&isins;": true,
+ "&isinsv;": true,
+ "&isinv;": true,
+ "&it;": true,
+ "&itilde;": true,
+ "&iukcy;": true,
+ "&iuml": true,
+ "&iuml;": true,
+ "&jcirc;": true,
+ "&jcy;": true,
+ "&jfr;": true,
+ "&jmath;": true,
+ "&jopf;": true,
+ "&jscr;": true,
+ "&jsercy;": true,
+ "&jukcy;": true,
+ "&kappa;": true,
+ "&kappav;": true,
+ "&kcedil;": true,
+ "&kcy;": true,
+ "&kfr;": true,
+ "&kgreen;": true,
+ "&khcy;": true,
+ "&kjcy;": true,
+ "&kopf;": true,
+ "&kscr;": true,
+ "&lAarr;": true,
+ "&lArr;": true,
+ "&lAtail;": true,
+ "&lBarr;": true,
+ "&lE;": true,
+ "&lEg;": true,
+ "&lHar;": true,
+ "&lacute;": true,
+ "&laemptyv;": true,
+ "&lagran;": true,
+ "&lambda;": true,
+ "&lang;": true,
+ "&langd;": true,
+ "&langle;": true,
+ "&lap;": true,
+ "&laquo": true,
+ "&laquo;": true,
+ "&larr;": true,
+ "&larrb;": true,
+ "&larrbfs;": true,
+ "&larrfs;": true,
+ "&larrhk;": true,
+ "&larrlp;": true,
+ "&larrpl;": true,
+ "&larrsim;": true,
+ "&larrtl;": true,
+ "&lat;": true,
+ "&latail;": true,
+ "&late;": true,
+ "&lates;": true,
+ "&lbarr;": true,
+ "&lbbrk;": true,
+ "&lbrace;": true,
+ "&lbrack;": true,
+ "&lbrke;": true,
+ "&lbrksld;": true,
+ "&lbrkslu;": true,
+ "&lcaron;": true,
+ "&lcedil;": true,
+ "&lceil;": true,
+ "&lcub;": true,
+ "&lcy;": true,
+ "&ldca;": true,
+ "&ldquo;": true,
+ "&ldquor;": true,
+ "&ldrdhar;": true,
+ "&ldrushar;": true,
+ "&ldsh;": true,
+ "&le;": true,
+ "&leftarrow;": true,
+ "&leftarrowtail;": true,
+ "&leftharpoondown;": true,
+ "&leftharpoonup;": true,
+ "&leftleftarrows;": true,
+ "&leftrightarrow;": true,
+ "&leftrightarrows;": true,
+ "&leftrightharpoons;": true,
+ "&leftrightsquigarrow;": true,
+ "&leftthreetimes;": true,
+ "&leg;": true,
+ "&leq;": true,
+ "&leqq;": true,
+ "&leqslant;": true,
+ "&les;": true,
+ "&lescc;": true,
+ "&lesdot;": true,
+ "&lesdoto;": true,
+ "&lesdotor;": true,
+ "&lesg;": true,
+ "&lesges;": true,
+ "&lessapprox;": true,
+ "&lessdot;": true,
+ "&lesseqgtr;": true,
+ "&lesseqqgtr;": true,
+ "&lessgtr;": true,
+ "&lesssim;": true,
+ "&lfisht;": true,
+ "&lfloor;": true,
+ "&lfr;": true,
+ "&lg;": true,
+ "&lgE;": true,
+ "&lhard;": true,
+ "&lharu;": true,
+ "&lharul;": true,
+ "&lhblk;": true,
+ "&ljcy;": true,
+ "&ll;": true,
+ "&llarr;": true,
+ "&llcorner;": true,
+ "&llhard;": true,
+ "&lltri;": true,
+ "&lmidot;": true,
+ "&lmoust;": true,
+ "&lmoustache;": true,
+ "&lnE;": true,
+ "&lnap;": true,
+ "&lnapprox;": true,
+ "&lne;": true,
+ "&lneq;": true,
+ "&lneqq;": true,
+ "&lnsim;": true,
+ "&loang;": true,
+ "&loarr;": true,
+ "&lobrk;": true,
+ "&longleftarrow;": true,
+ "&longleftrightarrow;": true,
+ "&longmapsto;": true,
+ "&longrightarrow;": true,
+ "&looparrowleft;": true,
+ "&looparrowright;": true,
+ "&lopar;": true,
+ "&lopf;": true,
+ "&loplus;": true,
+ "&lotimes;": true,
+ "&lowast;": true,
+ "&lowbar;": true,
+ "&loz;": true,
+ "&lozenge;": true,
+ "&lozf;": true,
+ "&lpar;": true,
+ "&lparlt;": true,
+ "&lrarr;": true,
+ "&lrcorner;": true,
+ "&lrhar;": true,
+ "&lrhard;": true,
+ "&lrm;": true,
+ "&lrtri;": true,
+ "&lsaquo;": true,
+ "&lscr;": true,
+ "&lsh;": true,
+ "&lsim;": true,
+ "&lsime;": true,
+ "&lsimg;": true,
+ "&lsqb;": true,
+ "&lsquo;": true,
+ "&lsquor;": true,
+ "&lstrok;": true,
+ "&lt": true,
+ "&lt;": true,
+ "&ltcc;": true,
+ "&ltcir;": true,
+ "&ltdot;": true,
+ "&lthree;": true,
+ "&ltimes;": true,
+ "&ltlarr;": true,
+ "&ltquest;": true,
+ "&ltrPar;": true,
+ "&ltri;": true,
+ "&ltrie;": true,
+ "&ltrif;": true,
+ "&lurdshar;": true,
+ "&luruhar;": true,
+ "&lvertneqq;": true,
+ "&lvnE;": true,
+ "&mDDot;": true,
+ "&macr": true,
+ "&macr;": true,
+ "&male;": true,
+ "&malt;": true,
+ "&maltese;": true,
+ "&map;": true,
+ "&mapsto;": true,
+ "&mapstodown;": true,
+ "&mapstoleft;": true,
+ "&mapstoup;": true,
+ "&marker;": true,
+ "&mcomma;": true,
+ "&mcy;": true,
+ "&mdash;": true,
+ "&measuredangle;": true,
+ "&mfr;": true,
+ "&mho;": true,
+ "&micro": true,
+ "&micro;": true,
+ "&mid;": true,
+ "&midast;": true,
+ "&midcir;": true,
+ "&middot": true,
+ "&middot;": true,
+ "&minus;": true,
+ "&minusb;": true,
+ "&minusd;": true,
+ "&minusdu;": true,
+ "&mlcp;": true,
+ "&mldr;": true,
+ "&mnplus;": true,
+ "&models;": true,
+ "&mopf;": true,
+ "&mp;": true,
+ "&mscr;": true,
+ "&mstpos;": true,
+ "&mu;": true,
+ "&multimap;": true,
+ "&mumap;": true,
+ "&nGg;": true,
+ "&nGt;": true,
+ "&nGtv;": true,
+ "&nLeftarrow;": true,
+ "&nLeftrightarrow;": true,
+ "&nLl;": true,
+ "&nLt;": true,
+ "&nLtv;": true,
+ "&nRightarrow;": true,
+ "&nVDash;": true,
+ "&nVdash;": true,
+ "&nabla;": true,
+ "&nacute;": true,
+ "&nang;": true,
+ "&nap;": true,
+ "&napE;": true,
+ "&napid;": true,
+ "&napos;": true,
+ "&napprox;": true,
+ "&natur;": true,
+ "&natural;": true,
+ "&naturals;": true,
+ "&nbsp": true,
+ "&nbsp;": true,
+ "&nbump;": true,
+ "&nbumpe;": true,
+ "&ncap;": true,
+ "&ncaron;": true,
+ "&ncedil;": true,
+ "&ncong;": true,
+ "&ncongdot;": true,
+ "&ncup;": true,
+ "&ncy;": true,
+ "&ndash;": true,
+ "&ne;": true,
+ "&neArr;": true,
+ "&nearhk;": true,
+ "&nearr;": true,
+ "&nearrow;": true,
+ "&nedot;": true,
+ "&nequiv;": true,
+ "&nesear;": true,
+ "&nesim;": true,
+ "&nexist;": true,
+ "&nexists;": true,
+ "&nfr;": true,
+ "&ngE;": true,
+ "&nge;": true,
+ "&ngeq;": true,
+ "&ngeqq;": true,
+ "&ngeqslant;": true,
+ "&nges;": true,
+ "&ngsim;": true,
+ "&ngt;": true,
+ "&ngtr;": true,
+ "&nhArr;": true,
+ "&nharr;": true,
+ "&nhpar;": true,
+ "&ni;": true,
+ "&nis;": true,
+ "&nisd;": true,
+ "&niv;": true,
+ "&njcy;": true,
+ "&nlArr;": true,
+ "&nlE;": true,
+ "&nlarr;": true,
+ "&nldr;": true,
+ "&nle;": true,
+ "&nleftarrow;": true,
+ "&nleftrightarrow;": true,
+ "&nleq;": true,
+ "&nleqq;": true,
+ "&nleqslant;": true,
+ "&nles;": true,
+ "&nless;": true,
+ "&nlsim;": true,
+ "&nlt;": true,
+ "&nltri;": true,
+ "&nltrie;": true,
+ "&nmid;": true,
+ "&nopf;": true,
+ "&not": true,
+ "&not;": true,
+ "&notin;": true,
+ "&notinE;": true,
+ "&notindot;": true,
+ "&notinva;": true,
+ "&notinvb;": true,
+ "&notinvc;": true,
+ "&notni;": true,
+ "&notniva;": true,
+ "&notnivb;": true,
+ "&notnivc;": true,
+ "&npar;": true,
+ "&nparallel;": true,
+ "&nparsl;": true,
+ "&npart;": true,
+ "&npolint;": true,
+ "&npr;": true,
+ "&nprcue;": true,
+ "&npre;": true,
+ "&nprec;": true,
+ "&npreceq;": true,
+ "&nrArr;": true,
+ "&nrarr;": true,
+ "&nrarrc;": true,
+ "&nrarrw;": true,
+ "&nrightarrow;": true,
+ "&nrtri;": true,
+ "&nrtrie;": true,
+ "&nsc;": true,
+ "&nsccue;": true,
+ "&nsce;": true,
+ "&nscr;": true,
+ "&nshortmid;": true,
+ "&nshortparallel;": true,
+ "&nsim;": true,
+ "&nsime;": true,
+ "&nsimeq;": true,
+ "&nsmid;": true,
+ "&nspar;": true,
+ "&nsqsube;": true,
+ "&nsqsupe;": true,
+ "&nsub;": true,
+ "&nsubE;": true,
+ "&nsube;": true,
+ "&nsubset;": true,
+ "&nsubseteq;": true,
+ "&nsubseteqq;": true,
+ "&nsucc;": true,
+ "&nsucceq;": true,
+ "&nsup;": true,
+ "&nsupE;": true,
+ "&nsupe;": true,
+ "&nsupset;": true,
+ "&nsupseteq;": true,
+ "&nsupseteqq;": true,
+ "&ntgl;": true,
+ "&ntilde": true,
+ "&ntilde;": true,
+ "&ntlg;": true,
+ "&ntriangleleft;": true,
+ "&ntrianglelefteq;": true,
+ "&ntriangleright;": true,
+ "&ntrianglerighteq;": true,
+ "&nu;": true,
+ "&num;": true,
+ "&numero;": true,
+ "&numsp;": true,
+ "&nvDash;": true,
+ "&nvHarr;": true,
+ "&nvap;": true,
+ "&nvdash;": true,
+ "&nvge;": true,
+ "&nvgt;": true,
+ "&nvinfin;": true,
+ "&nvlArr;": true,
+ "&nvle;": true,
+ "&nvlt;": true,
+ "&nvltrie;": true,
+ "&nvrArr;": true,
+ "&nvrtrie;": true,
+ "&nvsim;": true,
+ "&nwArr;": true,
+ "&nwarhk;": true,
+ "&nwarr;": true,
+ "&nwarrow;": true,
+ "&nwnear;": true,
+ "&oS;": true,
+ "&oacute": true,
+ "&oacute;": true,
+ "&oast;": true,
+ "&ocir;": true,
+ "&ocirc": true,
+ "&ocirc;": true,
+ "&ocy;": true,
+ "&odash;": true,
+ "&odblac;": true,
+ "&odiv;": true,
+ "&odot;": true,
+ "&odsold;": true,
+ "&oelig;": true,
+ "&ofcir;": true,
+ "&ofr;": true,
+ "&ogon;": true,
+ "&ograve": true,
+ "&ograve;": true,
+ "&ogt;": true,
+ "&ohbar;": true,
+ "&ohm;": true,
+ "&oint;": true,
+ "&olarr;": true,
+ "&olcir;": true,
+ "&olcross;": true,
+ "&oline;": true,
+ "&olt;": true,
+ "&omacr;": true,
+ "&omega;": true,
+ "&omicron;": true,
+ "&omid;": true,
+ "&ominus;": true,
+ "&oopf;": true,
+ "&opar;": true,
+ "&operp;": true,
+ "&oplus;": true,
+ "&or;": true,
+ "&orarr;": true,
+ "&ord;": true,
+ "&order;": true,
+ "&orderof;": true,
+ "&ordf": true,
+ "&ordf;": true,
+ "&ordm": true,
+ "&ordm;": true,
+ "&origof;": true,
+ "&oror;": true,
+ "&orslope;": true,
+ "&orv;": true,
+ "&oscr;": true,
+ "&oslash": true,
+ "&oslash;": true,
+ "&osol;": true,
+ "&otilde": true,
+ "&otilde;": true,
+ "&otimes;": true,
+ "&otimesas;": true,
+ "&ouml": true,
+ "&ouml;": true,
+ "&ovbar;": true,
+ "&par;": true,
+ "&para": true,
+ "&para;": true,
+ "&parallel;": true,
+ "&parsim;": true,
+ "&parsl;": true,
+ "&part;": true,
+ "&pcy;": true,
+ "&percnt;": true,
+ "&period;": true,
+ "&permil;": true,
+ "&perp;": true,
+ "&pertenk;": true,
+ "&pfr;": true,
+ "&phi;": true,
+ "&phiv;": true,
+ "&phmmat;": true,
+ "&phone;": true,
+ "&pi;": true,
+ "&pitchfork;": true,
+ "&piv;": true,
+ "&planck;": true,
+ "&planckh;": true,
+ "&plankv;": true,
+ "&plus;": true,
+ "&plusacir;": true,
+ "&plusb;": true,
+ "&pluscir;": true,
+ "&plusdo;": true,
+ "&plusdu;": true,
+ "&pluse;": true,
+ "&plusmn": true,
+ "&plusmn;": true,
+ "&plussim;": true,
+ "&plustwo;": true,
+ "&pm;": true,
+ "&pointint;": true,
+ "&popf;": true,
+ "&pound": true,
+ "&pound;": true,
+ "&pr;": true,
+ "&prE;": true,
+ "&prap;": true,
+ "&prcue;": true,
+ "&pre;": true,
+ "&prec;": true,
+ "&precapprox;": true,
+ "&preccurlyeq;": true,
+ "&preceq;": true,
+ "&precnapprox;": true,
+ "&precneqq;": true,
+ "&precnsim;": true,
+ "&precsim;": true,
+ "&prime;": true,
+ "&primes;": true,
+ "&prnE;": true,
+ "&prnap;": true,
+ "&prnsim;": true,
+ "&prod;": true,
+ "&profalar;": true,
+ "&profline;": true,
+ "&profsurf;": true,
+ "&prop;": true,
+ "&propto;": true,
+ "&prsim;": true,
+ "&prurel;": true,
+ "&pscr;": true,
+ "&psi;": true,
+ "&puncsp;": true,
+ "&qfr;": true,
+ "&qint;": true,
+ "&qopf;": true,
+ "&qprime;": true,
+ "&qscr;": true,
+ "&quaternions;": true,
+ "&quatint;": true,
+ "&quest;": true,
+ "&questeq;": true,
+ "&quot": true,
+ "&quot;": true,
+ "&rAarr;": true,
+ "&rArr;": true,
+ "&rAtail;": true,
+ "&rBarr;": true,
+ "&rHar;": true,
+ "&race;": true,
+ "&racute;": true,
+ "&radic;": true,
+ "&raemptyv;": true,
+ "&rang;": true,
+ "&rangd;": true,
+ "&range;": true,
+ "&rangle;": true,
+ "&raquo": true,
+ "&raquo;": true,
+ "&rarr;": true,
+ "&rarrap;": true,
+ "&rarrb;": true,
+ "&rarrbfs;": true,
+ "&rarrc;": true,
+ "&rarrfs;": true,
+ "&rarrhk;": true,
+ "&rarrlp;": true,
+ "&rarrpl;": true,
+ "&rarrsim;": true,
+ "&rarrtl;": true,
+ "&rarrw;": true,
+ "&ratail;": true,
+ "&ratio;": true,
+ "&rationals;": true,
+ "&rbarr;": true,
+ "&rbbrk;": true,
+ "&rbrace;": true,
+ "&rbrack;": true,
+ "&rbrke;": true,
+ "&rbrksld;": true,
+ "&rbrkslu;": true,
+ "&rcaron;": true,
+ "&rcedil;": true,
+ "&rceil;": true,
+ "&rcub;": true,
+ "&rcy;": true,
+ "&rdca;": true,
+ "&rdldhar;": true,
+ "&rdquo;": true,
+ "&rdquor;": true,
+ "&rdsh;": true,
+ "&real;": true,
+ "&realine;": true,
+ "&realpart;": true,
+ "&reals;": true,
+ "&rect;": true,
+ "&reg": true,
+ "&reg;": true,
+ "&rfisht;": true,
+ "&rfloor;": true,
+ "&rfr;": true,
+ "&rhard;": true,
+ "&rharu;": true,
+ "&rharul;": true,
+ "&rho;": true,
+ "&rhov;": true,
+ "&rightarrow;": true,
+ "&rightarrowtail;": true,
+ "&rightharpoondown;": true,
+ "&rightharpoonup;": true,
+ "&rightleftarrows;": true,
+ "&rightleftharpoons;": true,
+ "&rightrightarrows;": true,
+ "&rightsquigarrow;": true,
+ "&rightthreetimes;": true,
+ "&ring;": true,
+ "&risingdotseq;": true,
+ "&rlarr;": true,
+ "&rlhar;": true,
+ "&rlm;": true,
+ "&rmoust;": true,
+ "&rmoustache;": true,
+ "&rnmid;": true,
+ "&roang;": true,
+ "&roarr;": true,
+ "&robrk;": true,
+ "&ropar;": true,
+ "&ropf;": true,
+ "&roplus;": true,
+ "&rotimes;": true,
+ "&rpar;": true,
+ "&rpargt;": true,
+ "&rppolint;": true,
+ "&rrarr;": true,
+ "&rsaquo;": true,
+ "&rscr;": true,
+ "&rsh;": true,
+ "&rsqb;": true,
+ "&rsquo;": true,
+ "&rsquor;": true,
+ "&rthree;": true,
+ "&rtimes;": true,
+ "&rtri;": true,
+ "&rtrie;": true,
+ "&rtrif;": true,
+ "&rtriltri;": true,
+ "&ruluhar;": true,
+ "&rx;": true,
+ "&sacute;": true,
+ "&sbquo;": true,
+ "&sc;": true,
+ "&scE;": true,
+ "&scap;": true,
+ "&scaron;": true,
+ "&sccue;": true,
+ "&sce;": true,
+ "&scedil;": true,
+ "&scirc;": true,
+ "&scnE;": true,
+ "&scnap;": true,
+ "&scnsim;": true,
+ "&scpolint;": true,
+ "&scsim;": true,
+ "&scy;": true,
+ "&sdot;": true,
+ "&sdotb;": true,
+ "&sdote;": true,
+ "&seArr;": true,
+ "&searhk;": true,
+ "&searr;": true,
+ "&searrow;": true,
+ "&sect": true,
+ "&sect;": true,
+ "&semi;": true,
+ "&seswar;": true,
+ "&setminus;": true,
+ "&setmn;": true,
+ "&sext;": true,
+ "&sfr;": true,
+ "&sfrown;": true,
+ "&sharp;": true,
+ "&shchcy;": true,
+ "&shcy;": true,
+ "&shortmid;": true,
+ "&shortparallel;": true,
+ "&shy": true,
+ "&shy;": true,
+ "&sigma;": true,
+ "&sigmaf;": true,
+ "&sigmav;": true,
+ "&sim;": true,
+ "&simdot;": true,
+ "&sime;": true,
+ "&simeq;": true,
+ "&simg;": true,
+ "&simgE;": true,
+ "&siml;": true,
+ "&simlE;": true,
+ "&simne;": true,
+ "&simplus;": true,
+ "&simrarr;": true,
+ "&slarr;": true,
+ "&smallsetminus;": true,
+ "&smashp;": true,
+ "&smeparsl;": true,
+ "&smid;": true,
+ "&smile;": true,
+ "&smt;": true,
+ "&smte;": true,
+ "&smtes;": true,
+ "&softcy;": true,
+ "&sol;": true,
+ "&solb;": true,
+ "&solbar;": true,
+ "&sopf;": true,
+ "&spades;": true,
+ "&spadesuit;": true,
+ "&spar;": true,
+ "&sqcap;": true,
+ "&sqcaps;": true,
+ "&sqcup;": true,
+ "&sqcups;": true,
+ "&sqsub;": true,
+ "&sqsube;": true,
+ "&sqsubset;": true,
+ "&sqsubseteq;": true,
+ "&sqsup;": true,
+ "&sqsupe;": true,
+ "&sqsupset;": true,
+ "&sqsupseteq;": true,
+ "&squ;": true,
+ "&square;": true,
+ "&squarf;": true,
+ "&squf;": true,
+ "&srarr;": true,
+ "&sscr;": true,
+ "&ssetmn;": true,
+ "&ssmile;": true,
+ "&sstarf;": true,
+ "&star;": true,
+ "&starf;": true,
+ "&straightepsilon;": true,
+ "&straightphi;": true,
+ "&strns;": true,
+ "&sub;": true,
+ "&subE;": true,
+ "&subdot;": true,
+ "&sube;": true,
+ "&subedot;": true,
+ "&submult;": true,
+ "&subnE;": true,
+ "&subne;": true,
+ "&subplus;": true,
+ "&subrarr;": true,
+ "&subset;": true,
+ "&subseteq;": true,
+ "&subseteqq;": true,
+ "&subsetneq;": true,
+ "&subsetneqq;": true,
+ "&subsim;": true,
+ "&subsub;": true,
+ "&subsup;": true,
+ "&succ;": true,
+ "&succapprox;": true,
+ "&succcurlyeq;": true,
+ "&succeq;": true,
+ "&succnapprox;": true,
+ "&succneqq;": true,
+ "&succnsim;": true,
+ "&succsim;": true,
+ "&sum;": true,
+ "&sung;": true,
+ "&sup1": true,
+ "&sup1;": true,
+ "&sup2": true,
+ "&sup2;": true,
+ "&sup3": true,
+ "&sup3;": true,
+ "&sup;": true,
+ "&supE;": true,
+ "&supdot;": true,
+ "&supdsub;": true,
+ "&supe;": true,
+ "&supedot;": true,
+ "&suphsol;": true,
+ "&suphsub;": true,
+ "&suplarr;": true,
+ "&supmult;": true,
+ "&supnE;": true,
+ "&supne;": true,
+ "&supplus;": true,
+ "&supset;": true,
+ "&supseteq;": true,
+ "&supseteqq;": true,
+ "&supsetneq;": true,
+ "&supsetneqq;": true,
+ "&supsim;": true,
+ "&supsub;": true,
+ "&supsup;": true,
+ "&swArr;": true,
+ "&swarhk;": true,
+ "&swarr;": true,
+ "&swarrow;": true,
+ "&swnwar;": true,
+ "&szlig": true,
+ "&szlig;": true,
+ "&target;": true,
+ "&tau;": true,
+ "&tbrk;": true,
+ "&tcaron;": true,
+ "&tcedil;": true,
+ "&tcy;": true,
+ "&tdot;": true,
+ "&telrec;": true,
+ "&tfr;": true,
+ "&there4;": true,
+ "&therefore;": true,
+ "&theta;": true,
+ "&thetasym;": true,
+ "&thetav;": true,
+ "&thickapprox;": true,
+ "&thicksim;": true,
+ "&thinsp;": true,
+ "&thkap;": true,
+ "&thksim;": true,
+ "&thorn": true,
+ "&thorn;": true,
+ "&tilde;": true,
+ "&times": true,
+ "&times;": true,
+ "&timesb;": true,
+ "&timesbar;": true,
+ "&timesd;": true,
+ "&tint;": true,
+ "&toea;": true,
+ "&top;": true,
+ "&topbot;": true,
+ "&topcir;": true,
+ "&topf;": true,
+ "&topfork;": true,
+ "&tosa;": true,
+ "&tprime;": true,
+ "&trade;": true,
+ "&triangle;": true,
+ "&triangledown;": true,
+ "&triangleleft;": true,
+ "&trianglelefteq;": true,
+ "&triangleq;": true,
+ "&triangleright;": true,
+ "&trianglerighteq;": true,
+ "&tridot;": true,
+ "&trie;": true,
+ "&triminus;": true,
+ "&triplus;": true,
+ "&trisb;": true,
+ "&tritime;": true,
+ "&trpezium;": true,
+ "&tscr;": true,
+ "&tscy;": true,
+ "&tshcy;": true,
+ "&tstrok;": true,
+ "&twixt;": true,
+ "&twoheadleftarrow;": true,
+ "&twoheadrightarrow;": true,
+ "&uArr;": true,
+ "&uHar;": true,
+ "&uacute": true,
+ "&uacute;": true,
+ "&uarr;": true,
+ "&ubrcy;": true,
+ "&ubreve;": true,
+ "&ucirc": true,
+ "&ucirc;": true,
+ "&ucy;": true,
+ "&udarr;": true,
+ "&udblac;": true,
+ "&udhar;": true,
+ "&ufisht;": true,
+ "&ufr;": true,
+ "&ugrave": true,
+ "&ugrave;": true,
+ "&uharl;": true,
+ "&uharr;": true,
+ "&uhblk;": true,
+ "&ulcorn;": true,
+ "&ulcorner;": true,
+ "&ulcrop;": true,
+ "&ultri;": true,
+ "&umacr;": true,
+ "&uml": true,
+ "&uml;": true,
+ "&uogon;": true,
+ "&uopf;": true,
+ "&uparrow;": true,
+ "&updownarrow;": true,
+ "&upharpoonleft;": true,
+ "&upharpoonright;": true,
+ "&uplus;": true,
+ "&upsi;": true,
+ "&upsih;": true,
+ "&upsilon;": true,
+ "&upuparrows;": true,
+ "&urcorn;": true,
+ "&urcorner;": true,
+ "&urcrop;": true,
+ "&uring;": true,
+ "&urtri;": true,
+ "&uscr;": true,
+ "&utdot;": true,
+ "&utilde;": true,
+ "&utri;": true,
+ "&utrif;": true,
+ "&uuarr;": true,
+ "&uuml": true,
+ "&uuml;": true,
+ "&uwangle;": true,
+ "&vArr;": true,
+ "&vBar;": true,
+ "&vBarv;": true,
+ "&vDash;": true,
+ "&vangrt;": true,
+ "&varepsilon;": true,
+ "&varkappa;": true,
+ "&varnothing;": true,
+ "&varphi;": true,
+ "&varpi;": true,
+ "&varpropto;": true,
+ "&varr;": true,
+ "&varrho;": true,
+ "&varsigma;": true,
+ "&varsubsetneq;": true,
+ "&varsubsetneqq;": true,
+ "&varsupsetneq;": true,
+ "&varsupsetneqq;": true,
+ "&vartheta;": true,
+ "&vartriangleleft;": true,
+ "&vartriangleright;": true,
+ "&vcy;": true,
+ "&vdash;": true,
+ "&vee;": true,
+ "&veebar;": true,
+ "&veeeq;": true,
+ "&vellip;": true,
+ "&verbar;": true,
+ "&vert;": true,
+ "&vfr;": true,
+ "&vltri;": true,
+ "&vnsub;": true,
+ "&vnsup;": true,
+ "&vopf;": true,
+ "&vprop;": true,
+ "&vrtri;": true,
+ "&vscr;": true,
+ "&vsubnE;": true,
+ "&vsubne;": true,
+ "&vsupnE;": true,
+ "&vsupne;": true,
+ "&vzigzag;": true,
+ "&wcirc;": true,
+ "&wedbar;": true,
+ "&wedge;": true,
+ "&wedgeq;": true,
+ "&weierp;": true,
+ "&wfr;": true,
+ "&wopf;": true,
+ "&wp;": true,
+ "&wr;": true,
+ "&wreath;": true,
+ "&wscr;": true,
+ "&xcap;": true,
+ "&xcirc;": true,
+ "&xcup;": true,
+ "&xdtri;": true,
+ "&xfr;": true,
+ "&xhArr;": true,
+ "&xharr;": true,
+ "&xi;": true,
+ "&xlArr;": true,
+ "&xlarr;": true,
+ "&xmap;": true,
+ "&xnis;": true,
+ "&xodot;": true,
+ "&xopf;": true,
+ "&xoplus;": true,
+ "&xotime;": true,
+ "&xrArr;": true,
+ "&xrarr;": true,
+ "&xscr;": true,
+ "&xsqcup;": true,
+ "&xuplus;": true,
+ "&xutri;": true,
+ "&xvee;": true,
+ "&xwedge;": true,
+ "&yacute": true,
+ "&yacute;": true,
+ "&yacy;": true,
+ "&ycirc;": true,
+ "&ycy;": true,
+ "&yen": true,
+ "&yen;": true,
+ "&yfr;": true,
+ "&yicy;": true,
+ "&yopf;": true,
+ "&yscr;": true,
+ "&yucy;": true,
+ "&yuml": true,
+ "&yuml;": true,
+ "&zacute;": true,
+ "&zcaron;": true,
+ "&zcy;": true,
+ "&zdot;": true,
+ "&zeetrf;": true,
+ "&zeta;": true,
+ "&zfr;": true,
+ "&zhcy;": true,
+ "&zigrarr;": true,
+ "&zopf;": true,
+ "&zscr;": true,
+ "&zwj;": true,
+ "&zwnj;": true,
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/esc.go b/vendor/github.com/russross/blackfriday/v2/esc.go
new file mode 100644
index 000000000..6ab60102c
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/esc.go
@@ -0,0 +1,70 @@
+package blackfriday
+
+import (
+ "html"
+ "io"
+)
+
+var htmlEscaper = [256][]byte{
+ '&': []byte("&amp;"),
+ '<': []byte("&lt;"),
+ '>': []byte("&gt;"),
+ '"': []byte("&quot;"),
+}
+
+func escapeHTML(w io.Writer, s []byte) {
+ escapeEntities(w, s, false)
+}
+
+func escapeAllHTML(w io.Writer, s []byte) {
+ escapeEntities(w, s, true)
+}
+
+func escapeEntities(w io.Writer, s []byte, escapeValidEntities bool) {
+ var start, end int
+ for end < len(s) {
+ escSeq := htmlEscaper[s[end]]
+ if escSeq != nil {
+ isEntity, entityEnd := nodeIsEntity(s, end)
+ if isEntity && !escapeValidEntities {
+ w.Write(s[start : entityEnd+1])
+ start = entityEnd + 1
+ } else {
+ w.Write(s[start:end])
+ w.Write(escSeq)
+ start = end + 1
+ }
+ }
+ end++
+ }
+ if start < len(s) && end <= len(s) {
+ w.Write(s[start:end])
+ }
+}
+
+func nodeIsEntity(s []byte, end int) (isEntity bool, endEntityPos int) {
+ isEntity = false
+ endEntityPos = end + 1
+
+ if s[end] == '&' {
+ for endEntityPos < len(s) {
+ if s[endEntityPos] == ';' {
+ if entities[string(s[end:endEntityPos+1])] {
+ isEntity = true
+ break
+ }
+ }
+ if !isalnum(s[endEntityPos]) && s[endEntityPos] != '&' && s[endEntityPos] != '#' {
+ break
+ }
+ endEntityPos++
+ }
+ }
+
+ return isEntity, endEntityPos
+}
+
+func escLink(w io.Writer, text []byte) {
+ unesc := html.UnescapeString(string(text))
+ escapeHTML(w, []byte(unesc))
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/go.mod b/vendor/github.com/russross/blackfriday/v2/go.mod
new file mode 100644
index 000000000..620b74e0a
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/go.mod
@@ -0,0 +1 @@
+module github.com/russross/blackfriday/v2
diff --git a/vendor/github.com/russross/blackfriday/v2/html.go b/vendor/github.com/russross/blackfriday/v2/html.go
new file mode 100644
index 000000000..cb4f26e30
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/html.go
@@ -0,0 +1,952 @@
+//
+// Blackfriday Markdown Processor
+// Available at http://github.com/russross/blackfriday
+//
+// Copyright © 2011 Russ Ross <russ@russross.com>.
+// Distributed under the Simplified BSD License.
+// See README.md for details.
+//
+
+//
+//
+// HTML rendering backend
+//
+//
+
+package blackfriday
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "regexp"
+ "strings"
+)
+
+// HTMLFlags control optional behavior of HTML renderer.
+type HTMLFlags int
+
+// HTML renderer configuration options.
+const (
+ HTMLFlagsNone HTMLFlags = 0
+ SkipHTML HTMLFlags = 1 << iota // Skip preformatted HTML blocks
+ SkipImages // Skip embedded images
+ SkipLinks // Skip all links
+ Safelink // Only link to trusted protocols
+ NofollowLinks // Only link with rel="nofollow"
+ NoreferrerLinks // Only link with rel="noreferrer"
+ NoopenerLinks // Only link with rel="noopener"
+ HrefTargetBlank // Add a blank target
+ CompletePage // Generate a complete HTML page
+ UseXHTML // Generate XHTML output instead of HTML
+ FootnoteReturnLinks // Generate a link at the end of a footnote to return to the source
+ Smartypants // Enable smart punctuation substitutions
+ SmartypantsFractions // Enable smart fractions (with Smartypants)
+ SmartypantsDashes // Enable smart dashes (with Smartypants)
+ SmartypantsLatexDashes // Enable LaTeX-style dashes (with Smartypants)
+ SmartypantsAngledQuotes // Enable angled double quotes (with Smartypants) for double quotes rendering
+ SmartypantsQuotesNBSP // Enable « French guillemets » (with Smartypants)
+ TOC // Generate a table of contents
+)
+
+var (
+ htmlTagRe = regexp.MustCompile("(?i)^" + htmlTag)
+)
+
+const (
+ htmlTag = "(?:" + openTag + "|" + closeTag + "|" + htmlComment + "|" +
+ processingInstruction + "|" + declaration + "|" + cdata + ")"
+ closeTag = "</" + tagName + "\\s*[>]"
+ openTag = "<" + tagName + attribute + "*" + "\\s*/?>"
+ attribute = "(?:" + "\\s+" + attributeName + attributeValueSpec + "?)"
+ attributeValue = "(?:" + unquotedValue + "|" + singleQuotedValue + "|" + doubleQuotedValue + ")"
+ attributeValueSpec = "(?:" + "\\s*=" + "\\s*" + attributeValue + ")"
+ attributeName = "[a-zA-Z_:][a-zA-Z0-9:._-]*"
+ cdata = "<!\\[CDATA\\[[\\s\\S]*?\\]\\]>"
+ declaration = "<![A-Z]+" + "\\s+[^>]*>"
+ doubleQuotedValue = "\"[^\"]*\""
+ htmlComment = "<!---->|<!--(?:-?[^>-])(?:-?[^-])*-->"
+ processingInstruction = "[<][?].*?[?][>]"
+ singleQuotedValue = "'[^']*'"
+ tagName = "[A-Za-z][A-Za-z0-9-]*"
+ unquotedValue = "[^\"'=<>`\\x00-\\x20]+"
+)
+
+// HTMLRendererParameters is a collection of supplementary parameters tweaking
+// the behavior of various parts of HTML renderer.
+type HTMLRendererParameters struct {
+ // Prepend this text to each relative URL.
+ AbsolutePrefix string
+ // Add this text to each footnote anchor, to ensure uniqueness.
+ FootnoteAnchorPrefix string
+ // Show this text inside the <a> tag for a footnote return link, if the
+ // HTML_FOOTNOTE_RETURN_LINKS flag is enabled. If blank, the string
+ // <sup>[return]</sup> is used.
+ FootnoteReturnLinkContents string
+ // If set, add this text to the front of each Heading ID, to ensure
+ // uniqueness.
+ HeadingIDPrefix string
+ // If set, add this text to the back of each Heading ID, to ensure uniqueness.
+ HeadingIDSuffix string
+ // Increase heading levels: if the offset is 1, <h1> becomes <h2> etc.
+ // Negative offset is also valid.
+ // Resulting levels are clipped between 1 and 6.
+ HeadingLevelOffset int
+
+ Title string // Document title (used if CompletePage is set)
+ CSS string // Optional CSS file URL (used if CompletePage is set)
+ Icon string // Optional icon file URL (used if CompletePage is set)
+
+ Flags HTMLFlags // Flags allow customizing this renderer's behavior
+}
+
+// HTMLRenderer is a type that implements the Renderer interface for HTML output.
+//
+// Do not create this directly, instead use the NewHTMLRenderer function.
+type HTMLRenderer struct {
+ HTMLRendererParameters
+
+ closeTag string // how to end singleton tags: either " />" or ">"
+
+ // Track heading IDs to prevent ID collision in a single generation.
+ headingIDs map[string]int
+
+ lastOutputLen int
+ disableTags int
+
+ sr *SPRenderer
+}
+
+const (
+ xhtmlClose = " />"
+ htmlClose = ">"
+)
+
+// NewHTMLRenderer creates and configures an HTMLRenderer object, which
+// satisfies the Renderer interface.
+func NewHTMLRenderer(params HTMLRendererParameters) *HTMLRenderer {
+ // configure the rendering engine
+ closeTag := htmlClose
+ if params.Flags&UseXHTML != 0 {
+ closeTag = xhtmlClose
+ }
+
+ if params.FootnoteReturnLinkContents == "" {
+ // U+FE0E is VARIATION SELECTOR-15.
+ // It suppresses automatic emoji presentation of the preceding
+ // U+21A9 LEFTWARDS ARROW WITH HOOK on iOS and iPadOS.
+ params.FootnoteReturnLinkContents = "<span aria-label='Return'>↩\ufe0e</span>"
+ }
+
+ return &HTMLRenderer{
+ HTMLRendererParameters: params,
+
+ closeTag: closeTag,
+ headingIDs: make(map[string]int),
+
+ sr: NewSmartypantsRenderer(params.Flags),
+ }
+}
+
+func isHTMLTag(tag []byte, tagname string) bool {
+ found, _ := findHTMLTagPos(tag, tagname)
+ return found
+}
+
+// Look for a character, but ignore it when it's in any kind of quotes, it
+// might be JavaScript
+func skipUntilCharIgnoreQuotes(html []byte, start int, char byte) int {
+ inSingleQuote := false
+ inDoubleQuote := false
+ inGraveQuote := false
+ i := start
+ for i < len(html) {
+ switch {
+ case html[i] == char && !inSingleQuote && !inDoubleQuote && !inGraveQuote:
+ return i
+ case html[i] == '\'':
+ inSingleQuote = !inSingleQuote
+ case html[i] == '"':
+ inDoubleQuote = !inDoubleQuote
+ case html[i] == '`':
+ inGraveQuote = !inGraveQuote
+ }
+ i++
+ }
+ return start
+}
+
+func findHTMLTagPos(tag []byte, tagname string) (bool, int) {
+ i := 0
+ if i < len(tag) && tag[0] != '<' {
+ return false, -1
+ }
+ i++
+ i = skipSpace(tag, i)
+
+ if i < len(tag) && tag[i] == '/' {
+ i++
+ }
+
+ i = skipSpace(tag, i)
+ j := 0
+ for ; i < len(tag); i, j = i+1, j+1 {
+ if j >= len(tagname) {
+ break
+ }
+
+ if strings.ToLower(string(tag[i]))[0] != tagname[j] {
+ return false, -1
+ }
+ }
+
+ if i == len(tag) {
+ return false, -1
+ }
+
+ rightAngle := skipUntilCharIgnoreQuotes(tag, i, '>')
+ if rightAngle >= i {
+ return true, rightAngle
+ }
+
+ return false, -1
+}
+
+func skipSpace(tag []byte, i int) int {
+ for i < len(tag) && isspace(tag[i]) {
+ i++
+ }
+ return i
+}
+
+func isRelativeLink(link []byte) (yes bool) {
+ // a tag begin with '#'
+ if link[0] == '#' {
+ return true
+ }
+
+ // link begin with '/' but not '//', the second maybe a protocol relative link
+ if len(link) >= 2 && link[0] == '/' && link[1] != '/' {
+ return true
+ }
+
+ // only the root '/'
+ if len(link) == 1 && link[0] == '/' {
+ return true
+ }
+
+ // current directory : begin with "./"
+ if bytes.HasPrefix(link, []byte("./")) {
+ return true
+ }
+
+ // parent directory : begin with "../"
+ if bytes.HasPrefix(link, []byte("../")) {
+ return true
+ }
+
+ return false
+}
+
+func (r *HTMLRenderer) ensureUniqueHeadingID(id string) string {
+ for count, found := r.headingIDs[id]; found; count, found = r.headingIDs[id] {
+ tmp := fmt.Sprintf("%s-%d", id, count+1)
+
+ if _, tmpFound := r.headingIDs[tmp]; !tmpFound {
+ r.headingIDs[id] = count + 1
+ id = tmp
+ } else {
+ id = id + "-1"
+ }
+ }
+
+ if _, found := r.headingIDs[id]; !found {
+ r.headingIDs[id] = 0
+ }
+
+ return id
+}
+
+func (r *HTMLRenderer) addAbsPrefix(link []byte) []byte {
+ if r.AbsolutePrefix != "" && isRelativeLink(link) && link[0] != '.' {
+ newDest := r.AbsolutePrefix
+ if link[0] != '/' {
+ newDest += "/"
+ }
+ newDest += string(link)
+ return []byte(newDest)
+ }
+ return link
+}
+
+func appendLinkAttrs(attrs []string, flags HTMLFlags, link []byte) []string {
+ if isRelativeLink(link) {
+ return attrs
+ }
+ val := []string{}
+ if flags&NofollowLinks != 0 {
+ val = append(val, "nofollow")
+ }
+ if flags&NoreferrerLinks != 0 {
+ val = append(val, "noreferrer")
+ }
+ if flags&NoopenerLinks != 0 {
+ val = append(val, "noopener")
+ }
+ if flags&HrefTargetBlank != 0 {
+ attrs = append(attrs, "target=\"_blank\"")
+ }
+ if len(val) == 0 {
+ return attrs
+ }
+ attr := fmt.Sprintf("rel=%q", strings.Join(val, " "))
+ return append(attrs, attr)
+}
+
+func isMailto(link []byte) bool {
+ return bytes.HasPrefix(link, []byte("mailto:"))
+}
+
+func needSkipLink(flags HTMLFlags, dest []byte) bool {
+ if flags&SkipLinks != 0 {
+ return true
+ }
+ return flags&Safelink != 0 && !isSafeLink(dest) && !isMailto(dest)
+}
+
+func isSmartypantable(node *Node) bool {
+ pt := node.Parent.Type
+ return pt != Link && pt != CodeBlock && pt != Code
+}
+
+func appendLanguageAttr(attrs []string, info []byte) []string {
+ if len(info) == 0 {
+ return attrs
+ }
+ endOfLang := bytes.IndexAny(info, "\t ")
+ if endOfLang < 0 {
+ endOfLang = len(info)
+ }
+ return append(attrs, fmt.Sprintf("class=\"language-%s\"", info[:endOfLang]))
+}
+
+func (r *HTMLRenderer) tag(w io.Writer, name []byte, attrs []string) {
+ w.Write(name)
+ if len(attrs) > 0 {
+ w.Write(spaceBytes)
+ w.Write([]byte(strings.Join(attrs, " ")))
+ }
+ w.Write(gtBytes)
+ r.lastOutputLen = 1
+}
+
+func footnoteRef(prefix string, node *Node) []byte {
+ urlFrag := prefix + string(slugify(node.Destination))
+ anchor := fmt.Sprintf(`<a href="#fn:%s">%d</a>`, urlFrag, node.NoteID)
+ return []byte(fmt.Sprintf(`<sup class="footnote-ref" id="fnref:%s">%s</sup>`, urlFrag, anchor))
+}
+
+func footnoteItem(prefix string, slug []byte) []byte {
+ return []byte(fmt.Sprintf(`<li id="fn:%s%s">`, prefix, slug))
+}
+
+func footnoteReturnLink(prefix, returnLink string, slug []byte) []byte {
+ const format = ` <a class="footnote-return" href="#fnref:%s%s">%s</a>`
+ return []byte(fmt.Sprintf(format, prefix, slug, returnLink))
+}
+
+func itemOpenCR(node *Node) bool {
+ if node.Prev == nil {
+ return false
+ }
+ ld := node.Parent.ListData
+ return !ld.Tight && ld.ListFlags&ListTypeDefinition == 0
+}
+
+func skipParagraphTags(node *Node) bool {
+ grandparent := node.Parent.Parent
+ if grandparent == nil || grandparent.Type != List {
+ return false
+ }
+ tightOrTerm := grandparent.Tight || node.Parent.ListFlags&ListTypeTerm != 0
+ return grandparent.Type == List && tightOrTerm
+}
+
+func cellAlignment(align CellAlignFlags) string {
+ switch align {
+ case TableAlignmentLeft:
+ return "left"
+ case TableAlignmentRight:
+ return "right"
+ case TableAlignmentCenter:
+ return "center"
+ default:
+ return ""
+ }
+}
+
+func (r *HTMLRenderer) out(w io.Writer, text []byte) {
+ if r.disableTags > 0 {
+ w.Write(htmlTagRe.ReplaceAll(text, []byte{}))
+ } else {
+ w.Write(text)
+ }
+ r.lastOutputLen = len(text)
+}
+
+func (r *HTMLRenderer) cr(w io.Writer) {
+ if r.lastOutputLen > 0 {
+ r.out(w, nlBytes)
+ }
+}
+
+var (
+ nlBytes = []byte{'\n'}
+ gtBytes = []byte{'>'}
+ spaceBytes = []byte{' '}
+)
+
+var (
+ brTag = []byte("<br>")
+ brXHTMLTag = []byte("<br />")
+ emTag = []byte("<em>")
+ emCloseTag = []byte("</em>")
+ strongTag = []byte("<strong>")
+ strongCloseTag = []byte("</strong>")
+ delTag = []byte("<del>")
+ delCloseTag = []byte("</del>")
+ ttTag = []byte("<tt>")
+ ttCloseTag = []byte("</tt>")
+ aTag = []byte("<a")
+ aCloseTag = []byte("</a>")
+ preTag = []byte("<pre>")
+ preCloseTag = []byte("</pre>")
+ codeTag = []byte("<code>")
+ codeCloseTag = []byte("</code>")
+ pTag = []byte("<p>")
+ pCloseTag = []byte("</p>")
+ blockquoteTag = []byte("<blockquote>")
+ blockquoteCloseTag = []byte("</blockquote>")
+ hrTag = []byte("<hr>")
+ hrXHTMLTag = []byte("<hr />")
+ ulTag = []byte("<ul>")
+ ulCloseTag = []byte("</ul>")
+ olTag = []byte("<ol>")
+ olCloseTag = []byte("</ol>")
+ dlTag = []byte("<dl>")
+ dlCloseTag = []byte("</dl>")
+ liTag = []byte("<li>")
+ liCloseTag = []byte("</li>")
+ ddTag = []byte("<dd>")
+ ddCloseTag = []byte("</dd>")
+ dtTag = []byte("<dt>")
+ dtCloseTag = []byte("</dt>")
+ tableTag = []byte("<table>")
+ tableCloseTag = []byte("</table>")
+ tdTag = []byte("<td")
+ tdCloseTag = []byte("</td>")
+ thTag = []byte("<th")
+ thCloseTag = []byte("</th>")
+ theadTag = []byte("<thead>")
+ theadCloseTag = []byte("</thead>")
+ tbodyTag = []byte("<tbody>")
+ tbodyCloseTag = []byte("</tbody>")
+ trTag = []byte("<tr>")
+ trCloseTag = []byte("</tr>")
+ h1Tag = []byte("<h1")
+ h1CloseTag = []byte("</h1>")
+ h2Tag = []byte("<h2")
+ h2CloseTag = []byte("</h2>")
+ h3Tag = []byte("<h3")
+ h3CloseTag = []byte("</h3>")
+ h4Tag = []byte("<h4")
+ h4CloseTag = []byte("</h4>")
+ h5Tag = []byte("<h5")
+ h5CloseTag = []byte("</h5>")
+ h6Tag = []byte("<h6")
+ h6CloseTag = []byte("</h6>")
+
+ footnotesDivBytes = []byte("\n<div class=\"footnotes\">\n\n")
+ footnotesCloseDivBytes = []byte("\n</div>\n")
+)
+
+func headingTagsFromLevel(level int) ([]byte, []byte) {
+ if level <= 1 {
+ return h1Tag, h1CloseTag
+ }
+ switch level {
+ case 2:
+ return h2Tag, h2CloseTag
+ case 3:
+ return h3Tag, h3CloseTag
+ case 4:
+ return h4Tag, h4CloseTag
+ case 5:
+ return h5Tag, h5CloseTag
+ }
+ return h6Tag, h6CloseTag
+}
+
+func (r *HTMLRenderer) outHRTag(w io.Writer) {
+ if r.Flags&UseXHTML == 0 {
+ r.out(w, hrTag)
+ } else {
+ r.out(w, hrXHTMLTag)
+ }
+}
+
+// RenderNode is a default renderer of a single node of a syntax tree. For
+// block nodes it will be called twice: first time with entering=true, second
+// time with entering=false, so that it could know when it's working on an open
+// tag and when on close. It writes the result to w.
+//
+// The return value is a way to tell the calling walker to adjust its walk
+// pattern: e.g. it can terminate the traversal by returning Terminate. Or it
+// can ask the walker to skip a subtree of this node by returning SkipChildren.
+// The typical behavior is to return GoToNext, which asks for the usual
+// traversal to the next node.
+func (r *HTMLRenderer) RenderNode(w io.Writer, node *Node, entering bool) WalkStatus {
+ attrs := []string{}
+ switch node.Type {
+ case Text:
+ if r.Flags&Smartypants != 0 {
+ var tmp bytes.Buffer
+ escapeHTML(&tmp, node.Literal)
+ r.sr.Process(w, tmp.Bytes())
+ } else {
+ if node.Parent.Type == Link {
+ escLink(w, node.Literal)
+ } else {
+ escapeHTML(w, node.Literal)
+ }
+ }
+ case Softbreak:
+ r.cr(w)
+ // TODO: make it configurable via out(renderer.softbreak)
+ case Hardbreak:
+ if r.Flags&UseXHTML == 0 {
+ r.out(w, brTag)
+ } else {
+ r.out(w, brXHTMLTag)
+ }
+ r.cr(w)
+ case Emph:
+ if entering {
+ r.out(w, emTag)
+ } else {
+ r.out(w, emCloseTag)
+ }
+ case Strong:
+ if entering {
+ r.out(w, strongTag)
+ } else {
+ r.out(w, strongCloseTag)
+ }
+ case Del:
+ if entering {
+ r.out(w, delTag)
+ } else {
+ r.out(w, delCloseTag)
+ }
+ case HTMLSpan:
+ if r.Flags&SkipHTML != 0 {
+ break
+ }
+ r.out(w, node.Literal)
+ case Link:
+ // mark it but don't link it if it is not a safe link: no smartypants
+ dest := node.LinkData.Destination
+ if needSkipLink(r.Flags, dest) {
+ if entering {
+ r.out(w, ttTag)
+ } else {
+ r.out(w, ttCloseTag)
+ }
+ } else {
+ if entering {
+ dest = r.addAbsPrefix(dest)
+ var hrefBuf bytes.Buffer
+ hrefBuf.WriteString("href=\"")
+ escLink(&hrefBuf, dest)
+ hrefBuf.WriteByte('"')
+ attrs = append(attrs, hrefBuf.String())
+ if node.NoteID != 0 {
+ r.out(w, footnoteRef(r.FootnoteAnchorPrefix, node))
+ break
+ }
+ attrs = appendLinkAttrs(attrs, r.Flags, dest)
+ if len(node.LinkData.Title) > 0 {
+ var titleBuff bytes.Buffer
+ titleBuff.WriteString("title=\"")
+ escapeHTML(&titleBuff, node.LinkData.Title)
+ titleBuff.WriteByte('"')
+ attrs = append(attrs, titleBuff.String())
+ }
+ r.tag(w, aTag, attrs)
+ } else {
+ if node.NoteID != 0 {
+ break
+ }
+ r.out(w, aCloseTag)
+ }
+ }
+ case Image:
+ if r.Flags&SkipImages != 0 {
+ return SkipChildren
+ }
+ if entering {
+ dest := node.LinkData.Destination
+ dest = r.addAbsPrefix(dest)
+ if r.disableTags == 0 {
+ //if options.safe && potentiallyUnsafe(dest) {
+ //out(w, `<img src="" alt="`)
+ //} else {
+ r.out(w, []byte(`<img src="`))
+ escLink(w, dest)
+ r.out(w, []byte(`" alt="`))
+ //}
+ }
+ r.disableTags++
+ } else {
+ r.disableTags--
+ if r.disableTags == 0 {
+ if node.LinkData.Title != nil {
+ r.out(w, []byte(`" title="`))
+ escapeHTML(w, node.LinkData.Title)
+ }
+ r.out(w, []byte(`" />`))
+ }
+ }
+ case Code:
+ r.out(w, codeTag)
+ escapeAllHTML(w, node.Literal)
+ r.out(w, codeCloseTag)
+ case Document:
+ break
+ case Paragraph:
+ if skipParagraphTags(node) {
+ break
+ }
+ if entering {
+ // TODO: untangle this clusterfuck about when the newlines need
+ // to be added and when not.
+ if node.Prev != nil {
+ switch node.Prev.Type {
+ case HTMLBlock, List, Paragraph, Heading, CodeBlock, BlockQuote, HorizontalRule:
+ r.cr(w)
+ }
+ }
+ if node.Parent.Type == BlockQuote && node.Prev == nil {
+ r.cr(w)
+ }
+ r.out(w, pTag)
+ } else {
+ r.out(w, pCloseTag)
+ if !(node.Parent.Type == Item && node.Next == nil) {
+ r.cr(w)
+ }
+ }
+ case BlockQuote:
+ if entering {
+ r.cr(w)
+ r.out(w, blockquoteTag)
+ } else {
+ r.out(w, blockquoteCloseTag)
+ r.cr(w)
+ }
+ case HTMLBlock:
+ if r.Flags&SkipHTML != 0 {
+ break
+ }
+ r.cr(w)
+ r.out(w, node.Literal)
+ r.cr(w)
+ case Heading:
+ headingLevel := r.HTMLRendererParameters.HeadingLevelOffset + node.Level
+ openTag, closeTag := headingTagsFromLevel(headingLevel)
+ if entering {
+ if node.IsTitleblock {
+ attrs = append(attrs, `class="title"`)
+ }
+ if node.HeadingID != "" {
+ id := r.ensureUniqueHeadingID(node.HeadingID)
+ if r.HeadingIDPrefix != "" {
+ id = r.HeadingIDPrefix + id
+ }
+ if r.HeadingIDSuffix != "" {
+ id = id + r.HeadingIDSuffix
+ }
+ attrs = append(attrs, fmt.Sprintf(`id="%s"`, id))
+ }
+ r.cr(w)
+ r.tag(w, openTag, attrs)
+ } else {
+ r.out(w, closeTag)
+ if !(node.Parent.Type == Item && node.Next == nil) {
+ r.cr(w)
+ }
+ }
+ case HorizontalRule:
+ r.cr(w)
+ r.outHRTag(w)
+ r.cr(w)
+ case List:
+ openTag := ulTag
+ closeTag := ulCloseTag
+ if node.ListFlags&ListTypeOrdered != 0 {
+ openTag = olTag
+ closeTag = olCloseTag
+ }
+ if node.ListFlags&ListTypeDefinition != 0 {
+ openTag = dlTag
+ closeTag = dlCloseTag
+ }
+ if entering {
+ if node.IsFootnotesList {
+ r.out(w, footnotesDivBytes)
+ r.outHRTag(w)
+ r.cr(w)
+ }
+ r.cr(w)
+ if node.Parent.Type == Item && node.Parent.Parent.Tight {
+ r.cr(w)
+ }
+ r.tag(w, openTag[:len(openTag)-1], attrs)
+ r.cr(w)
+ } else {
+ r.out(w, closeTag)
+ //cr(w)
+ //if node.parent.Type != Item {
+ // cr(w)
+ //}
+ if node.Parent.Type == Item && node.Next != nil {
+ r.cr(w)
+ }
+ if node.Parent.Type == Document || node.Parent.Type == BlockQuote {
+ r.cr(w)
+ }
+ if node.IsFootnotesList {
+ r.out(w, footnotesCloseDivBytes)
+ }
+ }
+ case Item:
+ openTag := liTag
+ closeTag := liCloseTag
+ if node.ListFlags&ListTypeDefinition != 0 {
+ openTag = ddTag
+ closeTag = ddCloseTag
+ }
+ if node.ListFlags&ListTypeTerm != 0 {
+ openTag = dtTag
+ closeTag = dtCloseTag
+ }
+ if entering {
+ if itemOpenCR(node) {
+ r.cr(w)
+ }
+ if node.ListData.RefLink != nil {
+ slug := slugify(node.ListData.RefLink)
+ r.out(w, footnoteItem(r.FootnoteAnchorPrefix, slug))
+ break
+ }
+ r.out(w, openTag)
+ } else {
+ if node.ListData.RefLink != nil {
+ slug := slugify(node.ListData.RefLink)
+ if r.Flags&FootnoteReturnLinks != 0 {
+ r.out(w, footnoteReturnLink(r.FootnoteAnchorPrefix, r.FootnoteReturnLinkContents, slug))
+ }
+ }
+ r.out(w, closeTag)
+ r.cr(w)
+ }
+ case CodeBlock:
+ attrs = appendLanguageAttr(attrs, node.Info)
+ r.cr(w)
+ r.out(w, preTag)
+ r.tag(w, codeTag[:len(codeTag)-1], attrs)
+ escapeAllHTML(w, node.Literal)
+ r.out(w, codeCloseTag)
+ r.out(w, preCloseTag)
+ if node.Parent.Type != Item {
+ r.cr(w)
+ }
+ case Table:
+ if entering {
+ r.cr(w)
+ r.out(w, tableTag)
+ } else {
+ r.out(w, tableCloseTag)
+ r.cr(w)
+ }
+ case TableCell:
+ openTag := tdTag
+ closeTag := tdCloseTag
+ if node.IsHeader {
+ openTag = thTag
+ closeTag = thCloseTag
+ }
+ if entering {
+ align := cellAlignment(node.Align)
+ if align != "" {
+ attrs = append(attrs, fmt.Sprintf(`align="%s"`, align))
+ }
+ if node.Prev == nil {
+ r.cr(w)
+ }
+ r.tag(w, openTag, attrs)
+ } else {
+ r.out(w, closeTag)
+ r.cr(w)
+ }
+ case TableHead:
+ if entering {
+ r.cr(w)
+ r.out(w, theadTag)
+ } else {
+ r.out(w, theadCloseTag)
+ r.cr(w)
+ }
+ case TableBody:
+ if entering {
+ r.cr(w)
+ r.out(w, tbodyTag)
+ // XXX: this is to adhere to a rather silly test. Should fix test.
+ if node.FirstChild == nil {
+ r.cr(w)
+ }
+ } else {
+ r.out(w, tbodyCloseTag)
+ r.cr(w)
+ }
+ case TableRow:
+ if entering {
+ r.cr(w)
+ r.out(w, trTag)
+ } else {
+ r.out(w, trCloseTag)
+ r.cr(w)
+ }
+ default:
+ panic("Unknown node type " + node.Type.String())
+ }
+ return GoToNext
+}
+
+// RenderHeader writes HTML document preamble and TOC if requested.
+func (r *HTMLRenderer) RenderHeader(w io.Writer, ast *Node) {
+ r.writeDocumentHeader(w)
+ if r.Flags&TOC != 0 {
+ r.writeTOC(w, ast)
+ }
+}
+
+// RenderFooter writes HTML document footer.
+func (r *HTMLRenderer) RenderFooter(w io.Writer, ast *Node) {
+ if r.Flags&CompletePage == 0 {
+ return
+ }
+ io.WriteString(w, "\n</body>\n</html>\n")
+}
+
+func (r *HTMLRenderer) writeDocumentHeader(w io.Writer) {
+ if r.Flags&CompletePage == 0 {
+ return
+ }
+ ending := ""
+ if r.Flags&UseXHTML != 0 {
+ io.WriteString(w, "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" ")
+ io.WriteString(w, "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n")
+ io.WriteString(w, "<html xmlns=\"http://www.w3.org/1999/xhtml\">\n")
+ ending = " /"
+ } else {
+ io.WriteString(w, "<!DOCTYPE html>\n")
+ io.WriteString(w, "<html>\n")
+ }
+ io.WriteString(w, "<head>\n")
+ io.WriteString(w, " <title>")
+ if r.Flags&Smartypants != 0 {
+ r.sr.Process(w, []byte(r.Title))
+ } else {
+ escapeHTML(w, []byte(r.Title))
+ }
+ io.WriteString(w, "</title>\n")
+ io.WriteString(w, " <meta name=\"GENERATOR\" content=\"Blackfriday Markdown Processor v")
+ io.WriteString(w, Version)
+ io.WriteString(w, "\"")
+ io.WriteString(w, ending)
+ io.WriteString(w, ">\n")
+ io.WriteString(w, " <meta charset=\"utf-8\"")
+ io.WriteString(w, ending)
+ io.WriteString(w, ">\n")
+ if r.CSS != "" {
+ io.WriteString(w, " <link rel=\"stylesheet\" type=\"text/css\" href=\"")
+ escapeHTML(w, []byte(r.CSS))
+ io.WriteString(w, "\"")
+ io.WriteString(w, ending)
+ io.WriteString(w, ">\n")
+ }
+ if r.Icon != "" {
+ io.WriteString(w, " <link rel=\"icon\" type=\"image/x-icon\" href=\"")
+ escapeHTML(w, []byte(r.Icon))
+ io.WriteString(w, "\"")
+ io.WriteString(w, ending)
+ io.WriteString(w, ">\n")
+ }
+ io.WriteString(w, "</head>\n")
+ io.WriteString(w, "<body>\n\n")
+}
+
+func (r *HTMLRenderer) writeTOC(w io.Writer, ast *Node) {
+ buf := bytes.Buffer{}
+
+ inHeading := false
+ tocLevel := 0
+ headingCount := 0
+
+ ast.Walk(func(node *Node, entering bool) WalkStatus {
+ if node.Type == Heading && !node.HeadingData.IsTitleblock {
+ inHeading = entering
+ if entering {
+ node.HeadingID = fmt.Sprintf("toc_%d", headingCount)
+ if node.Level == tocLevel {
+ buf.WriteString("</li>\n\n<li>")
+ } else if node.Level < tocLevel {
+ for node.Level < tocLevel {
+ tocLevel--
+ buf.WriteString("</li>\n</ul>")
+ }
+ buf.WriteString("</li>\n\n<li>")
+ } else {
+ for node.Level > tocLevel {
+ tocLevel++
+ buf.WriteString("\n<ul>\n<li>")
+ }
+ }
+
+ fmt.Fprintf(&buf, `<a href="#toc_%d">`, headingCount)
+ headingCount++
+ } else {
+ buf.WriteString("</a>")
+ }
+ return GoToNext
+ }
+
+ if inHeading {
+ return r.RenderNode(&buf, node, entering)
+ }
+
+ return GoToNext
+ })
+
+ for ; tocLevel > 0; tocLevel-- {
+ buf.WriteString("</li>\n</ul>")
+ }
+
+ if buf.Len() > 0 {
+ io.WriteString(w, "<nav>\n")
+ w.Write(buf.Bytes())
+ io.WriteString(w, "\n\n</nav>\n")
+ }
+ r.lastOutputLen = buf.Len()
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/inline.go b/vendor/github.com/russross/blackfriday/v2/inline.go
new file mode 100644
index 000000000..d45bd9417
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/inline.go
@@ -0,0 +1,1228 @@
+//
+// Blackfriday Markdown Processor
+// Available at http://github.com/russross/blackfriday
+//
+// Copyright © 2011 Russ Ross <russ@russross.com>.
+// Distributed under the Simplified BSD License.
+// See README.md for details.
+//
+
+//
+// Functions to parse inline elements.
+//
+
+package blackfriday
+
+import (
+ "bytes"
+ "regexp"
+ "strconv"
+)
+
+var (
+ urlRe = `((https?|ftp):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)]+`
+ anchorRe = regexp.MustCompile(`^(<a\shref="` + urlRe + `"(\stitle="[^"<>]+")?\s?>` + urlRe + `<\/a>)`)
+
+ // https://www.w3.org/TR/html5/syntax.html#character-references
+ // highest unicode code point in 17 planes (2^20): 1,114,112d =
+ // 7 dec digits or 6 hex digits
+ // named entity references can be 2-31 characters with stuff like &lt;
+ // at one end and &CounterClockwiseContourIntegral; at the other. There
+ // are also sometimes numbers at the end, although this isn't inherent
+ // in the specification; there are never numbers anywhere else in
+ // current character references, though; see &frac34; and &blk12;, etc.
+ // https://www.w3.org/TR/html5/syntax.html#named-character-references
+ //
+ // entity := "&" (named group | number ref) ";"
+ // named group := [a-zA-Z]{2,31}[0-9]{0,2}
+ // number ref := "#" (dec ref | hex ref)
+ // dec ref := [0-9]{1,7}
+ // hex ref := ("x" | "X") [0-9a-fA-F]{1,6}
+ htmlEntityRe = regexp.MustCompile(`&([a-zA-Z]{2,31}[0-9]{0,2}|#([0-9]{1,7}|[xX][0-9a-fA-F]{1,6}));`)
+)
+
+// Functions to parse text within a block
+// Each function returns the number of chars taken care of
+// data is the complete block being rendered
+// offset is the number of valid chars before the current cursor
+
+func (p *Markdown) inline(currBlock *Node, data []byte) {
+ // handlers might call us recursively: enforce a maximum depth
+ if p.nesting >= p.maxNesting || len(data) == 0 {
+ return
+ }
+ p.nesting++
+ beg, end := 0, 0
+ for end < len(data) {
+ handler := p.inlineCallback[data[end]]
+ if handler != nil {
+ if consumed, node := handler(p, data, end); consumed == 0 {
+ // No action from the callback.
+ end++
+ } else {
+ // Copy inactive chars into the output.
+ currBlock.AppendChild(text(data[beg:end]))
+ if node != nil {
+ currBlock.AppendChild(node)
+ }
+ // Skip past whatever the callback used.
+ beg = end + consumed
+ end = beg
+ }
+ } else {
+ end++
+ }
+ }
+ if beg < len(data) {
+ if data[end-1] == '\n' {
+ end--
+ }
+ currBlock.AppendChild(text(data[beg:end]))
+ }
+ p.nesting--
+}
+
+// single and double emphasis parsing
+func emphasis(p *Markdown, data []byte, offset int) (int, *Node) {
+ data = data[offset:]
+ c := data[0]
+
+ if len(data) > 2 && data[1] != c {
+ // whitespace cannot follow an opening emphasis;
+ // strikethrough only takes two characters '~~'
+ if c == '~' || isspace(data[1]) {
+ return 0, nil
+ }
+ ret, node := helperEmphasis(p, data[1:], c)
+ if ret == 0 {
+ return 0, nil
+ }
+
+ return ret + 1, node
+ }
+
+ if len(data) > 3 && data[1] == c && data[2] != c {
+ if isspace(data[2]) {
+ return 0, nil
+ }
+ ret, node := helperDoubleEmphasis(p, data[2:], c)
+ if ret == 0 {
+ return 0, nil
+ }
+
+ return ret + 2, node
+ }
+
+ if len(data) > 4 && data[1] == c && data[2] == c && data[3] != c {
+ if c == '~' || isspace(data[3]) {
+ return 0, nil
+ }
+ ret, node := helperTripleEmphasis(p, data, 3, c)
+ if ret == 0 {
+ return 0, nil
+ }
+
+ return ret + 3, node
+ }
+
+ return 0, nil
+}
+
+func codeSpan(p *Markdown, data []byte, offset int) (int, *Node) {
+ data = data[offset:]
+
+ nb := 0
+
+ // count the number of backticks in the delimiter
+ for nb < len(data) && data[nb] == '`' {
+ nb++
+ }
+
+ // find the next delimiter
+ i, end := 0, 0
+ for end = nb; end < len(data) && i < nb; end++ {
+ if data[end] == '`' {
+ i++
+ } else {
+ i = 0
+ }
+ }
+
+ // no matching delimiter?
+ if i < nb && end >= len(data) {
+ return 0, nil
+ }
+
+ // trim outside whitespace
+ fBegin := nb
+ for fBegin < end && data[fBegin] == ' ' {
+ fBegin++
+ }
+
+ fEnd := end - nb
+ for fEnd > fBegin && data[fEnd-1] == ' ' {
+ fEnd--
+ }
+
+ // render the code span
+ if fBegin != fEnd {
+ code := NewNode(Code)
+ code.Literal = data[fBegin:fEnd]
+ return end, code
+ }
+
+ return end, nil
+}
+
+// newline preceded by two spaces becomes <br>
+func maybeLineBreak(p *Markdown, data []byte, offset int) (int, *Node) {
+ origOffset := offset
+ for offset < len(data) && data[offset] == ' ' {
+ offset++
+ }
+
+ if offset < len(data) && data[offset] == '\n' {
+ if offset-origOffset >= 2 {
+ return offset - origOffset + 1, NewNode(Hardbreak)
+ }
+ return offset - origOffset, nil
+ }
+ return 0, nil
+}
+
+// newline without two spaces works when HardLineBreak is enabled
+func lineBreak(p *Markdown, data []byte, offset int) (int, *Node) {
+ if p.extensions&HardLineBreak != 0 {
+ return 1, NewNode(Hardbreak)
+ }
+ return 0, nil
+}
+
+type linkType int
+
+const (
+ linkNormal linkType = iota
+ linkImg
+ linkDeferredFootnote
+ linkInlineFootnote
+)
+
+func isReferenceStyleLink(data []byte, pos int, t linkType) bool {
+ if t == linkDeferredFootnote {
+ return false
+ }
+ return pos < len(data)-1 && data[pos] == '[' && data[pos+1] != '^'
+}
+
+func maybeImage(p *Markdown, data []byte, offset int) (int, *Node) {
+ if offset < len(data)-1 && data[offset+1] == '[' {
+ return link(p, data, offset)
+ }
+ return 0, nil
+}
+
+func maybeInlineFootnote(p *Markdown, data []byte, offset int) (int, *Node) {
+ if offset < len(data)-1 && data[offset+1] == '[' {
+ return link(p, data, offset)
+ }
+ return 0, nil
+}
+
+// '[': parse a link or an image or a footnote
+func link(p *Markdown, data []byte, offset int) (int, *Node) {
+ // no links allowed inside regular links, footnote, and deferred footnotes
+ if p.insideLink && (offset > 0 && data[offset-1] == '[' || len(data)-1 > offset && data[offset+1] == '^') {
+ return 0, nil
+ }
+
+ var t linkType
+ switch {
+ // special case: ![^text] == deferred footnote (that follows something with
+ // an exclamation point)
+ case p.extensions&Footnotes != 0 && len(data)-1 > offset && data[offset+1] == '^':
+ t = linkDeferredFootnote
+ // ![alt] == image
+ case offset >= 0 && data[offset] == '!':
+ t = linkImg
+ offset++
+ // ^[text] == inline footnote
+ // [^refId] == deferred footnote
+ case p.extensions&Footnotes != 0:
+ if offset >= 0 && data[offset] == '^' {
+ t = linkInlineFootnote
+ offset++
+ } else if len(data)-1 > offset && data[offset+1] == '^' {
+ t = linkDeferredFootnote
+ }
+ // [text] == regular link
+ default:
+ t = linkNormal
+ }
+
+ data = data[offset:]
+
+ var (
+ i = 1
+ noteID int
+ title, link, altContent []byte
+ textHasNl = false
+ )
+
+ if t == linkDeferredFootnote {
+ i++
+ }
+
+ // look for the matching closing bracket
+ for level := 1; level > 0 && i < len(data); i++ {
+ switch {
+ case data[i] == '\n':
+ textHasNl = true
+
+ case isBackslashEscaped(data, i):
+ continue
+
+ case data[i] == '[':
+ level++
+
+ case data[i] == ']':
+ level--
+ if level <= 0 {
+ i-- // compensate for extra i++ in for loop
+ }
+ }
+ }
+
+ if i >= len(data) {
+ return 0, nil
+ }
+
+ txtE := i
+ i++
+ var footnoteNode *Node
+
+ // skip any amount of whitespace or newline
+ // (this is much more lax than original markdown syntax)
+ for i < len(data) && isspace(data[i]) {
+ i++
+ }
+
+ // inline style link
+ switch {
+ case i < len(data) && data[i] == '(':
+ // skip initial whitespace
+ i++
+
+ for i < len(data) && isspace(data[i]) {
+ i++
+ }
+
+ linkB := i
+
+ // look for link end: ' " )
+ findlinkend:
+ for i < len(data) {
+ switch {
+ case data[i] == '\\':
+ i += 2
+
+ case data[i] == ')' || data[i] == '\'' || data[i] == '"':
+ break findlinkend
+
+ default:
+ i++
+ }
+ }
+
+ if i >= len(data) {
+ return 0, nil
+ }
+ linkE := i
+
+ // look for title end if present
+ titleB, titleE := 0, 0
+ if data[i] == '\'' || data[i] == '"' {
+ i++
+ titleB = i
+
+ findtitleend:
+ for i < len(data) {
+ switch {
+ case data[i] == '\\':
+ i += 2
+
+ case data[i] == ')':
+ break findtitleend
+
+ default:
+ i++
+ }
+ }
+
+ if i >= len(data) {
+ return 0, nil
+ }
+
+ // skip whitespace after title
+ titleE = i - 1
+ for titleE > titleB && isspace(data[titleE]) {
+ titleE--
+ }
+
+ // check for closing quote presence
+ if data[titleE] != '\'' && data[titleE] != '"' {
+ titleB, titleE = 0, 0
+ linkE = i
+ }
+ }
+
+ // remove whitespace at the end of the link
+ for linkE > linkB && isspace(data[linkE-1]) {
+ linkE--
+ }
+
+ // remove optional angle brackets around the link
+ if data[linkB] == '<' {
+ linkB++
+ }
+ if data[linkE-1] == '>' {
+ linkE--
+ }
+
+ // build escaped link and title
+ if linkE > linkB {
+ link = data[linkB:linkE]
+ }
+
+ if titleE > titleB {
+ title = data[titleB:titleE]
+ }
+
+ i++
+
+ // reference style link
+ case isReferenceStyleLink(data, i, t):
+ var id []byte
+ altContentConsidered := false
+
+ // look for the id
+ i++
+ linkB := i
+ for i < len(data) && data[i] != ']' {
+ i++
+ }
+ if i >= len(data) {
+ return 0, nil
+ }
+ linkE := i
+
+ // find the reference
+ if linkB == linkE {
+ if textHasNl {
+ var b bytes.Buffer
+
+ for j := 1; j < txtE; j++ {
+ switch {
+ case data[j] != '\n':
+ b.WriteByte(data[j])
+ case data[j-1] != ' ':
+ b.WriteByte(' ')
+ }
+ }
+
+ id = b.Bytes()
+ } else {
+ id = data[1:txtE]
+ altContentConsidered = true
+ }
+ } else {
+ id = data[linkB:linkE]
+ }
+
+ // find the reference with matching id
+ lr, ok := p.getRef(string(id))
+ if !ok {
+ return 0, nil
+ }
+
+ // keep link and title from reference
+ link = lr.link
+ title = lr.title
+ if altContentConsidered {
+ altContent = lr.text
+ }
+ i++
+
+ // shortcut reference style link or reference or inline footnote
+ default:
+ var id []byte
+
+ // craft the id
+ if textHasNl {
+ var b bytes.Buffer
+
+ for j := 1; j < txtE; j++ {
+ switch {
+ case data[j] != '\n':
+ b.WriteByte(data[j])
+ case data[j-1] != ' ':
+ b.WriteByte(' ')
+ }
+ }
+
+ id = b.Bytes()
+ } else {
+ if t == linkDeferredFootnote {
+ id = data[2:txtE] // get rid of the ^
+ } else {
+ id = data[1:txtE]
+ }
+ }
+
+ footnoteNode = NewNode(Item)
+ if t == linkInlineFootnote {
+ // create a new reference
+ noteID = len(p.notes) + 1
+
+ var fragment []byte
+ if len(id) > 0 {
+ if len(id) < 16 {
+ fragment = make([]byte, len(id))
+ } else {
+ fragment = make([]byte, 16)
+ }
+ copy(fragment, slugify(id))
+ } else {
+ fragment = append([]byte("footnote-"), []byte(strconv.Itoa(noteID))...)
+ }
+
+ ref := &reference{
+ noteID: noteID,
+ hasBlock: false,
+ link: fragment,
+ title: id,
+ footnote: footnoteNode,
+ }
+
+ p.notes = append(p.notes, ref)
+
+ link = ref.link
+ title = ref.title
+ } else {
+ // find the reference with matching id
+ lr, ok := p.getRef(string(id))
+ if !ok {
+ return 0, nil
+ }
+
+ if t == linkDeferredFootnote {
+ lr.noteID = len(p.notes) + 1
+ lr.footnote = footnoteNode
+ p.notes = append(p.notes, lr)
+ }
+
+ // keep link and title from reference
+ link = lr.link
+ // if inline footnote, title == footnote contents
+ title = lr.title
+ noteID = lr.noteID
+ }
+
+ // rewind the whitespace
+ i = txtE + 1
+ }
+
+ var uLink []byte
+ if t == linkNormal || t == linkImg {
+ if len(link) > 0 {
+ var uLinkBuf bytes.Buffer
+ unescapeText(&uLinkBuf, link)
+ uLink = uLinkBuf.Bytes()
+ }
+
+ // links need something to click on and somewhere to go
+ if len(uLink) == 0 || (t == linkNormal && txtE <= 1) {
+ return 0, nil
+ }
+ }
+
+ // call the relevant rendering function
+ var linkNode *Node
+ switch t {
+ case linkNormal:
+ linkNode = NewNode(Link)
+ linkNode.Destination = normalizeURI(uLink)
+ linkNode.Title = title
+ if len(altContent) > 0 {
+ linkNode.AppendChild(text(altContent))
+ } else {
+ // links cannot contain other links, so turn off link parsing
+ // temporarily and recurse
+ insideLink := p.insideLink
+ p.insideLink = true
+ p.inline(linkNode, data[1:txtE])
+ p.insideLink = insideLink
+ }
+
+ case linkImg:
+ linkNode = NewNode(Image)
+ linkNode.Destination = uLink
+ linkNode.Title = title
+ linkNode.AppendChild(text(data[1:txtE]))
+ i++
+
+ case linkInlineFootnote, linkDeferredFootnote:
+ linkNode = NewNode(Link)
+ linkNode.Destination = link
+ linkNode.Title = title
+ linkNode.NoteID = noteID
+ linkNode.Footnote = footnoteNode
+ if t == linkInlineFootnote {
+ i++
+ }
+
+ default:
+ return 0, nil
+ }
+
+ return i, linkNode
+}
+
+func (p *Markdown) inlineHTMLComment(data []byte) int {
+ if len(data) < 5 {
+ return 0
+ }
+ if data[0] != '<' || data[1] != '!' || data[2] != '-' || data[3] != '-' {
+ return 0
+ }
+ i := 5
+ // scan for an end-of-comment marker, across lines if necessary
+ for i < len(data) && !(data[i-2] == '-' && data[i-1] == '-' && data[i] == '>') {
+ i++
+ }
+ // no end-of-comment marker
+ if i >= len(data) {
+ return 0
+ }
+ return i + 1
+}
+
+func stripMailto(link []byte) []byte {
+ if bytes.HasPrefix(link, []byte("mailto://")) {
+ return link[9:]
+ } else if bytes.HasPrefix(link, []byte("mailto:")) {
+ return link[7:]
+ } else {
+ return link
+ }
+}
+
+// autolinkType specifies a kind of autolink that gets detected.
+type autolinkType int
+
+// These are the possible flag values for the autolink renderer.
+const (
+ notAutolink autolinkType = iota
+ normalAutolink
+ emailAutolink
+)
+
+// '<' when tags or autolinks are allowed
+func leftAngle(p *Markdown, data []byte, offset int) (int, *Node) {
+ data = data[offset:]
+ altype, end := tagLength(data)
+ if size := p.inlineHTMLComment(data); size > 0 {
+ end = size
+ }
+ if end > 2 {
+ if altype != notAutolink {
+ var uLink bytes.Buffer
+ unescapeText(&uLink, data[1:end+1-2])
+ if uLink.Len() > 0 {
+ link := uLink.Bytes()
+ node := NewNode(Link)
+ node.Destination = link
+ if altype == emailAutolink {
+ node.Destination = append([]byte("mailto:"), link...)
+ }
+ node.AppendChild(text(stripMailto(link)))
+ return end, node
+ }
+ } else {
+ htmlTag := NewNode(HTMLSpan)
+ htmlTag.Literal = data[:end]
+ return end, htmlTag
+ }
+ }
+
+ return end, nil
+}
+
+// '\\' backslash escape
+var escapeChars = []byte("\\`*_{}[]()#+-.!:|&<>~")
+
+func escape(p *Markdown, data []byte, offset int) (int, *Node) {
+ data = data[offset:]
+
+ if len(data) > 1 {
+ if p.extensions&BackslashLineBreak != 0 && data[1] == '\n' {
+ return 2, NewNode(Hardbreak)
+ }
+ if bytes.IndexByte(escapeChars, data[1]) < 0 {
+ return 0, nil
+ }
+
+ return 2, text(data[1:2])
+ }
+
+ return 2, nil
+}
+
+func unescapeText(ob *bytes.Buffer, src []byte) {
+ i := 0
+ for i < len(src) {
+ org := i
+ for i < len(src) && src[i] != '\\' {
+ i++
+ }
+
+ if i > org {
+ ob.Write(src[org:i])
+ }
+
+ if i+1 >= len(src) {
+ break
+ }
+
+ ob.WriteByte(src[i+1])
+ i += 2
+ }
+}
+
+// '&' escaped when it doesn't belong to an entity
+// valid entities are assumed to be anything matching &#?[A-Za-z0-9]+;
+func entity(p *Markdown, data []byte, offset int) (int, *Node) {
+ data = data[offset:]
+
+ end := 1
+
+ if end < len(data) && data[end] == '#' {
+ end++
+ }
+
+ for end < len(data) && isalnum(data[end]) {
+ end++
+ }
+
+ if end < len(data) && data[end] == ';' {
+ end++ // real entity
+ } else {
+ return 0, nil // lone '&'
+ }
+
+ ent := data[:end]
+ // undo &amp; escaping or it will be converted to &amp;amp; by another
+ // escaper in the renderer
+ if bytes.Equal(ent, []byte("&amp;")) {
+ ent = []byte{'&'}
+ }
+
+ return end, text(ent)
+}
+
+func linkEndsWithEntity(data []byte, linkEnd int) bool {
+ entityRanges := htmlEntityRe.FindAllIndex(data[:linkEnd], -1)
+ return entityRanges != nil && entityRanges[len(entityRanges)-1][1] == linkEnd
+}
+
+// hasPrefixCaseInsensitive is a custom implementation of
+// strings.HasPrefix(strings.ToLower(s), prefix)
+// we rolled our own because ToLower pulls in a huge machinery of lowercasing
+// anything from Unicode and that's very slow. Since this func will only be
+// used on ASCII protocol prefixes, we can take shortcuts.
+func hasPrefixCaseInsensitive(s, prefix []byte) bool {
+ if len(s) < len(prefix) {
+ return false
+ }
+ delta := byte('a' - 'A')
+ for i, b := range prefix {
+ if b != s[i] && b != s[i]+delta {
+ return false
+ }
+ }
+ return true
+}
+
+var protocolPrefixes = [][]byte{
+ []byte("http://"),
+ []byte("https://"),
+ []byte("ftp://"),
+ []byte("file://"),
+ []byte("mailto:"),
+}
+
+const shortestPrefix = 6 // len("ftp://"), the shortest of the above
+
+func maybeAutoLink(p *Markdown, data []byte, offset int) (int, *Node) {
+ // quick check to rule out most false hits
+ if p.insideLink || len(data) < offset+shortestPrefix {
+ return 0, nil
+ }
+ for _, prefix := range protocolPrefixes {
+ endOfHead := offset + 8 // 8 is the len() of the longest prefix
+ if endOfHead > len(data) {
+ endOfHead = len(data)
+ }
+ if hasPrefixCaseInsensitive(data[offset:endOfHead], prefix) {
+ return autoLink(p, data, offset)
+ }
+ }
+ return 0, nil
+}
+
+func autoLink(p *Markdown, data []byte, offset int) (int, *Node) {
+ // Now a more expensive check to see if we're not inside an anchor element
+ anchorStart := offset
+ offsetFromAnchor := 0
+ for anchorStart > 0 && data[anchorStart] != '<' {
+ anchorStart--
+ offsetFromAnchor++
+ }
+
+ anchorStr := anchorRe.Find(data[anchorStart:])
+ if anchorStr != nil {
+ anchorClose := NewNode(HTMLSpan)
+ anchorClose.Literal = anchorStr[offsetFromAnchor:]
+ return len(anchorStr) - offsetFromAnchor, anchorClose
+ }
+
+ // scan backward for a word boundary
+ rewind := 0
+ for offset-rewind > 0 && rewind <= 7 && isletter(data[offset-rewind-1]) {
+ rewind++
+ }
+ if rewind > 6 { // longest supported protocol is "mailto" which has 6 letters
+ return 0, nil
+ }
+
+ origData := data
+ data = data[offset-rewind:]
+
+ if !isSafeLink(data) {
+ return 0, nil
+ }
+
+ linkEnd := 0
+ for linkEnd < len(data) && !isEndOfLink(data[linkEnd]) {
+ linkEnd++
+ }
+
+ // Skip punctuation at the end of the link
+ if (data[linkEnd-1] == '.' || data[linkEnd-1] == ',') && data[linkEnd-2] != '\\' {
+ linkEnd--
+ }
+
+ // But don't skip semicolon if it's a part of escaped entity:
+ if data[linkEnd-1] == ';' && data[linkEnd-2] != '\\' && !linkEndsWithEntity(data, linkEnd) {
+ linkEnd--
+ }
+
+ // See if the link finishes with a punctuation sign that can be closed.
+ var copen byte
+ switch data[linkEnd-1] {
+ case '"':
+ copen = '"'
+ case '\'':
+ copen = '\''
+ case ')':
+ copen = '('
+ case ']':
+ copen = '['
+ case '}':
+ copen = '{'
+ default:
+ copen = 0
+ }
+
+ if copen != 0 {
+ bufEnd := offset - rewind + linkEnd - 2
+
+ openDelim := 1
+
+ /* Try to close the final punctuation sign in this same line;
+ * if we managed to close it outside of the URL, that means that it's
+ * not part of the URL. If it closes inside the URL, that means it
+ * is part of the URL.
+ *
+ * Examples:
+ *
+ * foo http://www.pokemon.com/Pikachu_(Electric) bar
+ * => http://www.pokemon.com/Pikachu_(Electric)
+ *
+ * foo (http://www.pokemon.com/Pikachu_(Electric)) bar
+ * => http://www.pokemon.com/Pikachu_(Electric)
+ *
+ * foo http://www.pokemon.com/Pikachu_(Electric)) bar
+ * => http://www.pokemon.com/Pikachu_(Electric))
+ *
+ * (foo http://www.pokemon.com/Pikachu_(Electric)) bar
+ * => foo http://www.pokemon.com/Pikachu_(Electric)
+ */
+
+ for bufEnd >= 0 && origData[bufEnd] != '\n' && openDelim != 0 {
+ if origData[bufEnd] == data[linkEnd-1] {
+ openDelim++
+ }
+
+ if origData[bufEnd] == copen {
+ openDelim--
+ }
+
+ bufEnd--
+ }
+
+ if openDelim == 0 {
+ linkEnd--
+ }
+ }
+
+ var uLink bytes.Buffer
+ unescapeText(&uLink, data[:linkEnd])
+
+ if uLink.Len() > 0 {
+ node := NewNode(Link)
+ node.Destination = uLink.Bytes()
+ node.AppendChild(text(uLink.Bytes()))
+ return linkEnd, node
+ }
+
+ return linkEnd, nil
+}
+
+func isEndOfLink(char byte) bool {
+ return isspace(char) || char == '<'
+}
+
+var validUris = [][]byte{[]byte("http://"), []byte("https://"), []byte("ftp://"), []byte("mailto://")}
+var validPaths = [][]byte{[]byte("/"), []byte("./"), []byte("../")}
+
+func isSafeLink(link []byte) bool {
+ for _, path := range validPaths {
+ if len(link) >= len(path) && bytes.Equal(link[:len(path)], path) {
+ if len(link) == len(path) {
+ return true
+ } else if isalnum(link[len(path)]) {
+ return true
+ }
+ }
+ }
+
+ for _, prefix := range validUris {
+ // TODO: handle unicode here
+ // case-insensitive prefix test
+ if len(link) > len(prefix) && bytes.Equal(bytes.ToLower(link[:len(prefix)]), prefix) && isalnum(link[len(prefix)]) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// return the length of the given tag, or 0 is it's not valid
+func tagLength(data []byte) (autolink autolinkType, end int) {
+ var i, j int
+
+ // a valid tag can't be shorter than 3 chars
+ if len(data) < 3 {
+ return notAutolink, 0
+ }
+
+ // begins with a '<' optionally followed by '/', followed by letter or number
+ if data[0] != '<' {
+ return notAutolink, 0
+ }
+ if data[1] == '/' {
+ i = 2
+ } else {
+ i = 1
+ }
+
+ if !isalnum(data[i]) {
+ return notAutolink, 0
+ }
+
+ // scheme test
+ autolink = notAutolink
+
+ // try to find the beginning of an URI
+ for i < len(data) && (isalnum(data[i]) || data[i] == '.' || data[i] == '+' || data[i] == '-') {
+ i++
+ }
+
+ if i > 1 && i < len(data) && data[i] == '@' {
+ if j = isMailtoAutoLink(data[i:]); j != 0 {
+ return emailAutolink, i + j
+ }
+ }
+
+ if i > 2 && i < len(data) && data[i] == ':' {
+ autolink = normalAutolink
+ i++
+ }
+
+ // complete autolink test: no whitespace or ' or "
+ switch {
+ case i >= len(data):
+ autolink = notAutolink
+ case autolink != notAutolink:
+ j = i
+
+ for i < len(data) {
+ if data[i] == '\\' {
+ i += 2
+ } else if data[i] == '>' || data[i] == '\'' || data[i] == '"' || isspace(data[i]) {
+ break
+ } else {
+ i++
+ }
+
+ }
+
+ if i >= len(data) {
+ return autolink, 0
+ }
+ if i > j && data[i] == '>' {
+ return autolink, i + 1
+ }
+
+ // one of the forbidden chars has been found
+ autolink = notAutolink
+ }
+ i += bytes.IndexByte(data[i:], '>')
+ if i < 0 {
+ return autolink, 0
+ }
+ return autolink, i + 1
+}
+
+// look for the address part of a mail autolink and '>'
+// this is less strict than the original markdown e-mail address matching
+func isMailtoAutoLink(data []byte) int {
+ nb := 0
+
+ // address is assumed to be: [-@._a-zA-Z0-9]+ with exactly one '@'
+ for i := 0; i < len(data); i++ {
+ if isalnum(data[i]) {
+ continue
+ }
+
+ switch data[i] {
+ case '@':
+ nb++
+
+ case '-', '.', '_':
+ break
+
+ case '>':
+ if nb == 1 {
+ return i + 1
+ }
+ return 0
+ default:
+ return 0
+ }
+ }
+
+ return 0
+}
+
+// look for the next emph char, skipping other constructs
+func helperFindEmphChar(data []byte, c byte) int {
+ i := 0
+
+ for i < len(data) {
+ for i < len(data) && data[i] != c && data[i] != '`' && data[i] != '[' {
+ i++
+ }
+ if i >= len(data) {
+ return 0
+ }
+ // do not count escaped chars
+ if i != 0 && data[i-1] == '\\' {
+ i++
+ continue
+ }
+ if data[i] == c {
+ return i
+ }
+
+ if data[i] == '`' {
+ // skip a code span
+ tmpI := 0
+ i++
+ for i < len(data) && data[i] != '`' {
+ if tmpI == 0 && data[i] == c {
+ tmpI = i
+ }
+ i++
+ }
+ if i >= len(data) {
+ return tmpI
+ }
+ i++
+ } else if data[i] == '[' {
+ // skip a link
+ tmpI := 0
+ i++
+ for i < len(data) && data[i] != ']' {
+ if tmpI == 0 && data[i] == c {
+ tmpI = i
+ }
+ i++
+ }
+ i++
+ for i < len(data) && (data[i] == ' ' || data[i] == '\n') {
+ i++
+ }
+ if i >= len(data) {
+ return tmpI
+ }
+ if data[i] != '[' && data[i] != '(' { // not a link
+ if tmpI > 0 {
+ return tmpI
+ }
+ continue
+ }
+ cc := data[i]
+ i++
+ for i < len(data) && data[i] != cc {
+ if tmpI == 0 && data[i] == c {
+ return i
+ }
+ i++
+ }
+ if i >= len(data) {
+ return tmpI
+ }
+ i++
+ }
+ }
+ return 0
+}
+
+func helperEmphasis(p *Markdown, data []byte, c byte) (int, *Node) {
+ i := 0
+
+ // skip one symbol if coming from emph3
+ if len(data) > 1 && data[0] == c && data[1] == c {
+ i = 1
+ }
+
+ for i < len(data) {
+ length := helperFindEmphChar(data[i:], c)
+ if length == 0 {
+ return 0, nil
+ }
+ i += length
+ if i >= len(data) {
+ return 0, nil
+ }
+
+ if i+1 < len(data) && data[i+1] == c {
+ i++
+ continue
+ }
+
+ if data[i] == c && !isspace(data[i-1]) {
+
+ if p.extensions&NoIntraEmphasis != 0 {
+ if !(i+1 == len(data) || isspace(data[i+1]) || ispunct(data[i+1])) {
+ continue
+ }
+ }
+
+ emph := NewNode(Emph)
+ p.inline(emph, data[:i])
+ return i + 1, emph
+ }
+ }
+
+ return 0, nil
+}
+
+func helperDoubleEmphasis(p *Markdown, data []byte, c byte) (int, *Node) {
+ i := 0
+
+ for i < len(data) {
+ length := helperFindEmphChar(data[i:], c)
+ if length == 0 {
+ return 0, nil
+ }
+ i += length
+
+ if i+1 < len(data) && data[i] == c && data[i+1] == c && i > 0 && !isspace(data[i-1]) {
+ nodeType := Strong
+ if c == '~' {
+ nodeType = Del
+ }
+ node := NewNode(nodeType)
+ p.inline(node, data[:i])
+ return i + 2, node
+ }
+ i++
+ }
+ return 0, nil
+}
+
+func helperTripleEmphasis(p *Markdown, data []byte, offset int, c byte) (int, *Node) {
+ i := 0
+ origData := data
+ data = data[offset:]
+
+ for i < len(data) {
+ length := helperFindEmphChar(data[i:], c)
+ if length == 0 {
+ return 0, nil
+ }
+ i += length
+
+ // skip whitespace preceded symbols
+ if data[i] != c || isspace(data[i-1]) {
+ continue
+ }
+
+ switch {
+ case i+2 < len(data) && data[i+1] == c && data[i+2] == c:
+ // triple symbol found
+ strong := NewNode(Strong)
+ em := NewNode(Emph)
+ strong.AppendChild(em)
+ p.inline(em, data[:i])
+ return i + 3, strong
+ case (i+1 < len(data) && data[i+1] == c):
+ // double symbol found, hand over to emph1
+ length, node := helperEmphasis(p, origData[offset-2:], c)
+ if length == 0 {
+ return 0, nil
+ }
+ return length - 2, node
+ default:
+ // single symbol found, hand over to emph2
+ length, node := helperDoubleEmphasis(p, origData[offset-1:], c)
+ if length == 0 {
+ return 0, nil
+ }
+ return length - 1, node
+ }
+ }
+ return 0, nil
+}
+
+func text(s []byte) *Node {
+ node := NewNode(Text)
+ node.Literal = s
+ return node
+}
+
+func normalizeURI(s []byte) []byte {
+ return s // TODO: implement
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/markdown.go b/vendor/github.com/russross/blackfriday/v2/markdown.go
new file mode 100644
index 000000000..58d2e4538
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/markdown.go
@@ -0,0 +1,950 @@
+// Blackfriday Markdown Processor
+// Available at http://github.com/russross/blackfriday
+//
+// Copyright © 2011 Russ Ross <russ@russross.com>.
+// Distributed under the Simplified BSD License.
+// See README.md for details.
+
+package blackfriday
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "strings"
+ "unicode/utf8"
+)
+
+//
+// Markdown parsing and processing
+//
+
+// Version string of the package. Appears in the rendered document when
+// CompletePage flag is on.
+const Version = "2.0"
+
+// Extensions is a bitwise or'ed collection of enabled Blackfriday's
+// extensions.
+type Extensions int
+
+// These are the supported markdown parsing extensions.
+// OR these values together to select multiple extensions.
+const (
+ NoExtensions Extensions = 0
+ NoIntraEmphasis Extensions = 1 << iota // Ignore emphasis markers inside words
+ Tables // Render tables
+ FencedCode // Render fenced code blocks
+ Autolink // Detect embedded URLs that are not explicitly marked
+ Strikethrough // Strikethrough text using ~~test~~
+ LaxHTMLBlocks // Loosen up HTML block parsing rules
+ SpaceHeadings // Be strict about prefix heading rules
+ HardLineBreak // Translate newlines into line breaks
+ TabSizeEight // Expand tabs to eight spaces instead of four
+ Footnotes // Pandoc-style footnotes
+ NoEmptyLineBeforeBlock // No need to insert an empty line to start a (code, quote, ordered list, unordered list) block
+ HeadingIDs // specify heading IDs with {#id}
+ Titleblock // Titleblock ala pandoc
+ AutoHeadingIDs // Create the heading ID from the text
+ BackslashLineBreak // Translate trailing backslashes into line breaks
+ DefinitionLists // Render definition lists
+
+ CommonHTMLFlags HTMLFlags = UseXHTML | Smartypants |
+ SmartypantsFractions | SmartypantsDashes | SmartypantsLatexDashes
+
+ CommonExtensions Extensions = NoIntraEmphasis | Tables | FencedCode |
+ Autolink | Strikethrough | SpaceHeadings | HeadingIDs |
+ BackslashLineBreak | DefinitionLists
+)
+
+// ListType contains bitwise or'ed flags for list and list item objects.
+type ListType int
+
+// These are the possible flag values for the ListItem renderer.
+// Multiple flag values may be ORed together.
+// These are mostly of interest if you are writing a new output format.
+const (
+ ListTypeOrdered ListType = 1 << iota
+ ListTypeDefinition
+ ListTypeTerm
+
+ ListItemContainsBlock
+ ListItemBeginningOfList // TODO: figure out if this is of any use now
+ ListItemEndOfList
+)
+
+// CellAlignFlags holds a type of alignment in a table cell.
+type CellAlignFlags int
+
+// These are the possible flag values for the table cell renderer.
+// Only a single one of these values will be used; they are not ORed together.
+// These are mostly of interest if you are writing a new output format.
+const (
+ TableAlignmentLeft CellAlignFlags = 1 << iota
+ TableAlignmentRight
+ TableAlignmentCenter = (TableAlignmentLeft | TableAlignmentRight)
+)
+
+// The size of a tab stop.
+const (
+ TabSizeDefault = 4
+ TabSizeDouble = 8
+)
+
+// blockTags is a set of tags that are recognized as HTML block tags.
+// Any of these can be included in markdown text without special escaping.
+var blockTags = map[string]struct{}{
+ "blockquote": {},
+ "del": {},
+ "div": {},
+ "dl": {},
+ "fieldset": {},
+ "form": {},
+ "h1": {},
+ "h2": {},
+ "h3": {},
+ "h4": {},
+ "h5": {},
+ "h6": {},
+ "iframe": {},
+ "ins": {},
+ "math": {},
+ "noscript": {},
+ "ol": {},
+ "pre": {},
+ "p": {},
+ "script": {},
+ "style": {},
+ "table": {},
+ "ul": {},
+
+ // HTML5
+ "address": {},
+ "article": {},
+ "aside": {},
+ "canvas": {},
+ "figcaption": {},
+ "figure": {},
+ "footer": {},
+ "header": {},
+ "hgroup": {},
+ "main": {},
+ "nav": {},
+ "output": {},
+ "progress": {},
+ "section": {},
+ "video": {},
+}
+
+// Renderer is the rendering interface. This is mostly of interest if you are
+// implementing a new rendering format.
+//
+// Only an HTML implementation is provided in this repository, see the README
+// for external implementations.
+type Renderer interface {
+ // RenderNode is the main rendering method. It will be called once for
+ // every leaf node and twice for every non-leaf node (first with
+ // entering=true, then with entering=false). The method should write its
+ // rendition of the node to the supplied writer w.
+ RenderNode(w io.Writer, node *Node, entering bool) WalkStatus
+
+ // RenderHeader is a method that allows the renderer to produce some
+ // content preceding the main body of the output document. The header is
+ // understood in the broad sense here. For example, the default HTML
+ // renderer will write not only the HTML document preamble, but also the
+ // table of contents if it was requested.
+ //
+ // The method will be passed an entire document tree, in case a particular
+ // implementation needs to inspect it to produce output.
+ //
+ // The output should be written to the supplied writer w. If your
+ // implementation has no header to write, supply an empty implementation.
+ RenderHeader(w io.Writer, ast *Node)
+
+ // RenderFooter is a symmetric counterpart of RenderHeader.
+ RenderFooter(w io.Writer, ast *Node)
+}
+
+// Callback functions for inline parsing. One such function is defined
+// for each character that triggers a response when parsing inline data.
+type inlineParser func(p *Markdown, data []byte, offset int) (int, *Node)
+
+// Markdown is a type that holds extensions and the runtime state used by
+// Parse, and the renderer. You can not use it directly, construct it with New.
+type Markdown struct {
+ renderer Renderer
+ referenceOverride ReferenceOverrideFunc
+ refs map[string]*reference
+ inlineCallback [256]inlineParser
+ extensions Extensions
+ nesting int
+ maxNesting int
+ insideLink bool
+
+ // Footnotes need to be ordered as well as available to quickly check for
+ // presence. If a ref is also a footnote, it's stored both in refs and here
+ // in notes. Slice is nil if footnotes not enabled.
+ notes []*reference
+
+ doc *Node
+ tip *Node // = doc
+ oldTip *Node
+ lastMatchedContainer *Node // = doc
+ allClosed bool
+}
+
+func (p *Markdown) getRef(refid string) (ref *reference, found bool) {
+ if p.referenceOverride != nil {
+ r, overridden := p.referenceOverride(refid)
+ if overridden {
+ if r == nil {
+ return nil, false
+ }
+ return &reference{
+ link: []byte(r.Link),
+ title: []byte(r.Title),
+ noteID: 0,
+ hasBlock: false,
+ text: []byte(r.Text)}, true
+ }
+ }
+ // refs are case insensitive
+ ref, found = p.refs[strings.ToLower(refid)]
+ return ref, found
+}
+
+func (p *Markdown) finalize(block *Node) {
+ above := block.Parent
+ block.open = false
+ p.tip = above
+}
+
+func (p *Markdown) addChild(node NodeType, offset uint32) *Node {
+ return p.addExistingChild(NewNode(node), offset)
+}
+
+func (p *Markdown) addExistingChild(node *Node, offset uint32) *Node {
+ for !p.tip.canContain(node.Type) {
+ p.finalize(p.tip)
+ }
+ p.tip.AppendChild(node)
+ p.tip = node
+ return node
+}
+
+func (p *Markdown) closeUnmatchedBlocks() {
+ if !p.allClosed {
+ for p.oldTip != p.lastMatchedContainer {
+ parent := p.oldTip.Parent
+ p.finalize(p.oldTip)
+ p.oldTip = parent
+ }
+ p.allClosed = true
+ }
+}
+
+//
+//
+// Public interface
+//
+//
+
+// Reference represents the details of a link.
+// See the documentation in Options for more details on use-case.
+type Reference struct {
+ // Link is usually the URL the reference points to.
+ Link string
+ // Title is the alternate text describing the link in more detail.
+ Title string
+ // Text is the optional text to override the ref with if the syntax used was
+ // [refid][]
+ Text string
+}
+
+// ReferenceOverrideFunc is expected to be called with a reference string and
+// return either a valid Reference type that the reference string maps to or
+// nil. If overridden is false, the default reference logic will be executed.
+// See the documentation in Options for more details on use-case.
+type ReferenceOverrideFunc func(reference string) (ref *Reference, overridden bool)
+
+// New constructs a Markdown processor. You can use the same With* functions as
+// for Run() to customize parser's behavior and the renderer.
+func New(opts ...Option) *Markdown {
+ var p Markdown
+ for _, opt := range opts {
+ opt(&p)
+ }
+ p.refs = make(map[string]*reference)
+ p.maxNesting = 16
+ p.insideLink = false
+ docNode := NewNode(Document)
+ p.doc = docNode
+ p.tip = docNode
+ p.oldTip = docNode
+ p.lastMatchedContainer = docNode
+ p.allClosed = true
+ // register inline parsers
+ p.inlineCallback[' '] = maybeLineBreak
+ p.inlineCallback['*'] = emphasis
+ p.inlineCallback['_'] = emphasis
+ if p.extensions&Strikethrough != 0 {
+ p.inlineCallback['~'] = emphasis
+ }
+ p.inlineCallback['`'] = codeSpan
+ p.inlineCallback['\n'] = lineBreak
+ p.inlineCallback['['] = link
+ p.inlineCallback['<'] = leftAngle
+ p.inlineCallback['\\'] = escape
+ p.inlineCallback['&'] = entity
+ p.inlineCallback['!'] = maybeImage
+ p.inlineCallback['^'] = maybeInlineFootnote
+ if p.extensions&Autolink != 0 {
+ p.inlineCallback['h'] = maybeAutoLink
+ p.inlineCallback['m'] = maybeAutoLink
+ p.inlineCallback['f'] = maybeAutoLink
+ p.inlineCallback['H'] = maybeAutoLink
+ p.inlineCallback['M'] = maybeAutoLink
+ p.inlineCallback['F'] = maybeAutoLink
+ }
+ if p.extensions&Footnotes != 0 {
+ p.notes = make([]*reference, 0)
+ }
+ return &p
+}
+
+// Option customizes the Markdown processor's default behavior.
+type Option func(*Markdown)
+
+// WithRenderer allows you to override the default renderer.
+func WithRenderer(r Renderer) Option {
+ return func(p *Markdown) {
+ p.renderer = r
+ }
+}
+
+// WithExtensions allows you to pick some of the many extensions provided by
+// Blackfriday. You can bitwise OR them.
+func WithExtensions(e Extensions) Option {
+ return func(p *Markdown) {
+ p.extensions = e
+ }
+}
+
+// WithNoExtensions turns off all extensions and custom behavior.
+func WithNoExtensions() Option {
+ return func(p *Markdown) {
+ p.extensions = NoExtensions
+ p.renderer = NewHTMLRenderer(HTMLRendererParameters{
+ Flags: HTMLFlagsNone,
+ })
+ }
+}
+
+// WithRefOverride sets an optional function callback that is called every
+// time a reference is resolved.
+//
+// In Markdown, the link reference syntax can be made to resolve a link to
+// a reference instead of an inline URL, in one of the following ways:
+//
+// * [link text][refid]
+// * [refid][]
+//
+// Usually, the refid is defined at the bottom of the Markdown document. If
+// this override function is provided, the refid is passed to the override
+// function first, before consulting the defined refids at the bottom. If
+// the override function indicates an override did not occur, the refids at
+// the bottom will be used to fill in the link details.
+func WithRefOverride(o ReferenceOverrideFunc) Option {
+ return func(p *Markdown) {
+ p.referenceOverride = o
+ }
+}
+
+// Run is the main entry point to Blackfriday. It parses and renders a
+// block of markdown-encoded text.
+//
+// The simplest invocation of Run takes one argument, input:
+// output := Run(input)
+// This will parse the input with CommonExtensions enabled and render it with
+// the default HTMLRenderer (with CommonHTMLFlags).
+//
+// Variadic arguments opts can customize the default behavior. Since Markdown
+// type does not contain exported fields, you can not use it directly. Instead,
+// use the With* functions. For example, this will call the most basic
+// functionality, with no extensions:
+// output := Run(input, WithNoExtensions())
+//
+// You can use any number of With* arguments, even contradicting ones. They
+// will be applied in order of appearance and the latter will override the
+// former:
+// output := Run(input, WithNoExtensions(), WithExtensions(exts),
+// WithRenderer(yourRenderer))
+func Run(input []byte, opts ...Option) []byte {
+ r := NewHTMLRenderer(HTMLRendererParameters{
+ Flags: CommonHTMLFlags,
+ })
+ optList := []Option{WithRenderer(r), WithExtensions(CommonExtensions)}
+ optList = append(optList, opts...)
+ parser := New(optList...)
+ ast := parser.Parse(input)
+ var buf bytes.Buffer
+ parser.renderer.RenderHeader(&buf, ast)
+ ast.Walk(func(node *Node, entering bool) WalkStatus {
+ return parser.renderer.RenderNode(&buf, node, entering)
+ })
+ parser.renderer.RenderFooter(&buf, ast)
+ return buf.Bytes()
+}
+
+// Parse is an entry point to the parsing part of Blackfriday. It takes an
+// input markdown document and produces a syntax tree for its contents. This
+// tree can then be rendered with a default or custom renderer, or
+// analyzed/transformed by the caller to whatever non-standard needs they have.
+// The return value is the root node of the syntax tree.
+func (p *Markdown) Parse(input []byte) *Node {
+ p.block(input)
+ // Walk the tree and finish up some of unfinished blocks
+ for p.tip != nil {
+ p.finalize(p.tip)
+ }
+ // Walk the tree again and process inline markdown in each block
+ p.doc.Walk(func(node *Node, entering bool) WalkStatus {
+ if node.Type == Paragraph || node.Type == Heading || node.Type == TableCell {
+ p.inline(node, node.content)
+ node.content = nil
+ }
+ return GoToNext
+ })
+ p.parseRefsToAST()
+ return p.doc
+}
+
+func (p *Markdown) parseRefsToAST() {
+ if p.extensions&Footnotes == 0 || len(p.notes) == 0 {
+ return
+ }
+ p.tip = p.doc
+ block := p.addBlock(List, nil)
+ block.IsFootnotesList = true
+ block.ListFlags = ListTypeOrdered
+ flags := ListItemBeginningOfList
+ // Note: this loop is intentionally explicit, not range-form. This is
+ // because the body of the loop will append nested footnotes to p.notes and
+ // we need to process those late additions. Range form would only walk over
+ // the fixed initial set.
+ for i := 0; i < len(p.notes); i++ {
+ ref := p.notes[i]
+ p.addExistingChild(ref.footnote, 0)
+ block := ref.footnote
+ block.ListFlags = flags | ListTypeOrdered
+ block.RefLink = ref.link
+ if ref.hasBlock {
+ flags |= ListItemContainsBlock
+ p.block(ref.title)
+ } else {
+ p.inline(block, ref.title)
+ }
+ flags &^= ListItemBeginningOfList | ListItemContainsBlock
+ }
+ above := block.Parent
+ finalizeList(block)
+ p.tip = above
+ block.Walk(func(node *Node, entering bool) WalkStatus {
+ if node.Type == Paragraph || node.Type == Heading {
+ p.inline(node, node.content)
+ node.content = nil
+ }
+ return GoToNext
+ })
+}
+
+//
+// Link references
+//
+// This section implements support for references that (usually) appear
+// as footnotes in a document, and can be referenced anywhere in the document.
+// The basic format is:
+//
+// [1]: http://www.google.com/ "Google"
+// [2]: http://www.github.com/ "Github"
+//
+// Anywhere in the document, the reference can be linked by referring to its
+// label, i.e., 1 and 2 in this example, as in:
+//
+// This library is hosted on [Github][2], a git hosting site.
+//
+// Actual footnotes as specified in Pandoc and supported by some other Markdown
+// libraries such as php-markdown are also taken care of. They look like this:
+//
+// This sentence needs a bit of further explanation.[^note]
+//
+// [^note]: This is the explanation.
+//
+// Footnotes should be placed at the end of the document in an ordered list.
+// Finally, there are inline footnotes such as:
+//
+// Inline footnotes^[Also supported.] provide a quick inline explanation,
+// but are rendered at the bottom of the document.
+//
+
+// reference holds all information necessary for a reference-style links or
+// footnotes.
+//
+// Consider this markdown with reference-style links:
+//
+// [link][ref]
+//
+// [ref]: /url/ "tooltip title"
+//
+// It will be ultimately converted to this HTML:
+//
+// <p><a href=\"/url/\" title=\"title\">link</a></p>
+//
+// And a reference structure will be populated as follows:
+//
+// p.refs["ref"] = &reference{
+// link: "/url/",
+// title: "tooltip title",
+// }
+//
+// Alternatively, reference can contain information about a footnote. Consider
+// this markdown:
+//
+// Text needing a footnote.[^a]
+//
+// [^a]: This is the note
+//
+// A reference structure will be populated as follows:
+//
+// p.refs["a"] = &reference{
+// link: "a",
+// title: "This is the note",
+// noteID: <some positive int>,
+// }
+//
+// TODO: As you can see, it begs for splitting into two dedicated structures
+// for refs and for footnotes.
+type reference struct {
+ link []byte
+ title []byte
+ noteID int // 0 if not a footnote ref
+ hasBlock bool
+ footnote *Node // a link to the Item node within a list of footnotes
+
+ text []byte // only gets populated by refOverride feature with Reference.Text
+}
+
+func (r *reference) String() string {
+ return fmt.Sprintf("{link: %q, title: %q, text: %q, noteID: %d, hasBlock: %v}",
+ r.link, r.title, r.text, r.noteID, r.hasBlock)
+}
+
+// Check whether or not data starts with a reference link.
+// If so, it is parsed and stored in the list of references
+// (in the render struct).
+// Returns the number of bytes to skip to move past it,
+// or zero if the first line is not a reference.
+func isReference(p *Markdown, data []byte, tabSize int) int {
+ // up to 3 optional leading spaces
+ if len(data) < 4 {
+ return 0
+ }
+ i := 0
+ for i < 3 && data[i] == ' ' {
+ i++
+ }
+
+ noteID := 0
+
+ // id part: anything but a newline between brackets
+ if data[i] != '[' {
+ return 0
+ }
+ i++
+ if p.extensions&Footnotes != 0 {
+ if i < len(data) && data[i] == '^' {
+ // we can set it to anything here because the proper noteIds will
+ // be assigned later during the second pass. It just has to be != 0
+ noteID = 1
+ i++
+ }
+ }
+ idOffset := i
+ for i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != ']' {
+ i++
+ }
+ if i >= len(data) || data[i] != ']' {
+ return 0
+ }
+ idEnd := i
+ // footnotes can have empty ID, like this: [^], but a reference can not be
+ // empty like this: []. Break early if it's not a footnote and there's no ID
+ if noteID == 0 && idOffset == idEnd {
+ return 0
+ }
+ // spacer: colon (space | tab)* newline? (space | tab)*
+ i++
+ if i >= len(data) || data[i] != ':' {
+ return 0
+ }
+ i++
+ for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
+ i++
+ }
+ if i < len(data) && (data[i] == '\n' || data[i] == '\r') {
+ i++
+ if i < len(data) && data[i] == '\n' && data[i-1] == '\r' {
+ i++
+ }
+ }
+ for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
+ i++
+ }
+ if i >= len(data) {
+ return 0
+ }
+
+ var (
+ linkOffset, linkEnd int
+ titleOffset, titleEnd int
+ lineEnd int
+ raw []byte
+ hasBlock bool
+ )
+
+ if p.extensions&Footnotes != 0 && noteID != 0 {
+ linkOffset, linkEnd, raw, hasBlock = scanFootnote(p, data, i, tabSize)
+ lineEnd = linkEnd
+ } else {
+ linkOffset, linkEnd, titleOffset, titleEnd, lineEnd = scanLinkRef(p, data, i)
+ }
+ if lineEnd == 0 {
+ return 0
+ }
+
+ // a valid ref has been found
+
+ ref := &reference{
+ noteID: noteID,
+ hasBlock: hasBlock,
+ }
+
+ if noteID > 0 {
+ // reusing the link field for the id since footnotes don't have links
+ ref.link = data[idOffset:idEnd]
+ // if footnote, it's not really a title, it's the contained text
+ ref.title = raw
+ } else {
+ ref.link = data[linkOffset:linkEnd]
+ ref.title = data[titleOffset:titleEnd]
+ }
+
+ // id matches are case-insensitive
+ id := string(bytes.ToLower(data[idOffset:idEnd]))
+
+ p.refs[id] = ref
+
+ return lineEnd
+}
+
+func scanLinkRef(p *Markdown, data []byte, i int) (linkOffset, linkEnd, titleOffset, titleEnd, lineEnd int) {
+ // link: whitespace-free sequence, optionally between angle brackets
+ if data[i] == '<' {
+ i++
+ }
+ linkOffset = i
+ for i < len(data) && data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' {
+ i++
+ }
+ linkEnd = i
+ if data[linkOffset] == '<' && data[linkEnd-1] == '>' {
+ linkOffset++
+ linkEnd--
+ }
+
+ // optional spacer: (space | tab)* (newline | '\'' | '"' | '(' )
+ for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
+ i++
+ }
+ if i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != '\'' && data[i] != '"' && data[i] != '(' {
+ return
+ }
+
+ // compute end-of-line
+ if i >= len(data) || data[i] == '\r' || data[i] == '\n' {
+ lineEnd = i
+ }
+ if i+1 < len(data) && data[i] == '\r' && data[i+1] == '\n' {
+ lineEnd++
+ }
+
+ // optional (space|tab)* spacer after a newline
+ if lineEnd > 0 {
+ i = lineEnd + 1
+ for i < len(data) && (data[i] == ' ' || data[i] == '\t') {
+ i++
+ }
+ }
+
+ // optional title: any non-newline sequence enclosed in '"() alone on its line
+ if i+1 < len(data) && (data[i] == '\'' || data[i] == '"' || data[i] == '(') {
+ i++
+ titleOffset = i
+
+ // look for EOL
+ for i < len(data) && data[i] != '\n' && data[i] != '\r' {
+ i++
+ }
+ if i+1 < len(data) && data[i] == '\n' && data[i+1] == '\r' {
+ titleEnd = i + 1
+ } else {
+ titleEnd = i
+ }
+
+ // step back
+ i--
+ for i > titleOffset && (data[i] == ' ' || data[i] == '\t') {
+ i--
+ }
+ if i > titleOffset && (data[i] == '\'' || data[i] == '"' || data[i] == ')') {
+ lineEnd = titleEnd
+ titleEnd = i
+ }
+ }
+
+ return
+}
+
+// The first bit of this logic is the same as Parser.listItem, but the rest
+// is much simpler. This function simply finds the entire block and shifts it
+// over by one tab if it is indeed a block (just returns the line if it's not).
+// blockEnd is the end of the section in the input buffer, and contents is the
+// extracted text that was shifted over one tab. It will need to be rendered at
+// the end of the document.
+func scanFootnote(p *Markdown, data []byte, i, indentSize int) (blockStart, blockEnd int, contents []byte, hasBlock bool) {
+ if i == 0 || len(data) == 0 {
+ return
+ }
+
+ // skip leading whitespace on first line
+ for i < len(data) && data[i] == ' ' {
+ i++
+ }
+
+ blockStart = i
+
+ // find the end of the line
+ blockEnd = i
+ for i < len(data) && data[i-1] != '\n' {
+ i++
+ }
+
+ // get working buffer
+ var raw bytes.Buffer
+
+ // put the first line into the working buffer
+ raw.Write(data[blockEnd:i])
+ blockEnd = i
+
+ // process the following lines
+ containsBlankLine := false
+
+gatherLines:
+ for blockEnd < len(data) {
+ i++
+
+ // find the end of this line
+ for i < len(data) && data[i-1] != '\n' {
+ i++
+ }
+
+ // if it is an empty line, guess that it is part of this item
+ // and move on to the next line
+ if p.isEmpty(data[blockEnd:i]) > 0 {
+ containsBlankLine = true
+ blockEnd = i
+ continue
+ }
+
+ n := 0
+ if n = isIndented(data[blockEnd:i], indentSize); n == 0 {
+ // this is the end of the block.
+ // we don't want to include this last line in the index.
+ break gatherLines
+ }
+
+ // if there were blank lines before this one, insert a new one now
+ if containsBlankLine {
+ raw.WriteByte('\n')
+ containsBlankLine = false
+ }
+
+ // get rid of that first tab, write to buffer
+ raw.Write(data[blockEnd+n : i])
+ hasBlock = true
+
+ blockEnd = i
+ }
+
+ if data[blockEnd-1] != '\n' {
+ raw.WriteByte('\n')
+ }
+
+ contents = raw.Bytes()
+
+ return
+}
+
+//
+//
+// Miscellaneous helper functions
+//
+//
+
+// Test if a character is a punctuation symbol.
+// Taken from a private function in regexp in the stdlib.
+func ispunct(c byte) bool {
+ for _, r := range []byte("!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") {
+ if c == r {
+ return true
+ }
+ }
+ return false
+}
+
+// Test if a character is a whitespace character.
+func isspace(c byte) bool {
+ return ishorizontalspace(c) || isverticalspace(c)
+}
+
+// Test if a character is a horizontal whitespace character.
+func ishorizontalspace(c byte) bool {
+ return c == ' ' || c == '\t'
+}
+
+// Test if a character is a vertical character.
+func isverticalspace(c byte) bool {
+ return c == '\n' || c == '\r' || c == '\f' || c == '\v'
+}
+
+// Test if a character is letter.
+func isletter(c byte) bool {
+ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
+}
+
+// Test if a character is a letter or a digit.
+// TODO: check when this is looking for ASCII alnum and when it should use unicode
+func isalnum(c byte) bool {
+ return (c >= '0' && c <= '9') || isletter(c)
+}
+
+// Replace tab characters with spaces, aligning to the next TAB_SIZE column.
+// always ends output with a newline
+func expandTabs(out *bytes.Buffer, line []byte, tabSize int) {
+ // first, check for common cases: no tabs, or only tabs at beginning of line
+ i, prefix := 0, 0
+ slowcase := false
+ for i = 0; i < len(line); i++ {
+ if line[i] == '\t' {
+ if prefix == i {
+ prefix++
+ } else {
+ slowcase = true
+ break
+ }
+ }
+ }
+
+ // no need to decode runes if all tabs are at the beginning of the line
+ if !slowcase {
+ for i = 0; i < prefix*tabSize; i++ {
+ out.WriteByte(' ')
+ }
+ out.Write(line[prefix:])
+ return
+ }
+
+ // the slow case: we need to count runes to figure out how
+ // many spaces to insert for each tab
+ column := 0
+ i = 0
+ for i < len(line) {
+ start := i
+ for i < len(line) && line[i] != '\t' {
+ _, size := utf8.DecodeRune(line[i:])
+ i += size
+ column++
+ }
+
+ if i > start {
+ out.Write(line[start:i])
+ }
+
+ if i >= len(line) {
+ break
+ }
+
+ for {
+ out.WriteByte(' ')
+ column++
+ if column%tabSize == 0 {
+ break
+ }
+ }
+
+ i++
+ }
+}
+
+// Find if a line counts as indented or not.
+// Returns number of characters the indent is (0 = not indented).
+func isIndented(data []byte, indentSize int) int {
+ if len(data) == 0 {
+ return 0
+ }
+ if data[0] == '\t' {
+ return 1
+ }
+ if len(data) < indentSize {
+ return 0
+ }
+ for i := 0; i < indentSize; i++ {
+ if data[i] != ' ' {
+ return 0
+ }
+ }
+ return indentSize
+}
+
+// Create a url-safe slug for fragments
+func slugify(in []byte) []byte {
+ if len(in) == 0 {
+ return in
+ }
+ out := make([]byte, 0, len(in))
+ sym := false
+
+ for _, ch := range in {
+ if isalnum(ch) {
+ sym = false
+ out = append(out, ch)
+ } else if sym {
+ continue
+ } else {
+ out = append(out, '-')
+ sym = true
+ }
+ }
+ var a, b int
+ var ch byte
+ for a, ch = range out {
+ if ch != '-' {
+ break
+ }
+ }
+ for b = len(out) - 1; b > 0; b-- {
+ if out[b] != '-' {
+ break
+ }
+ }
+ return out[a : b+1]
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/node.go b/vendor/github.com/russross/blackfriday/v2/node.go
new file mode 100644
index 000000000..04e6050ce
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/node.go
@@ -0,0 +1,360 @@
+package blackfriday
+
+import (
+ "bytes"
+ "fmt"
+)
+
+// NodeType specifies a type of a single node of a syntax tree. Usually one
+// node (and its type) corresponds to a single markdown feature, e.g. emphasis
+// or code block.
+type NodeType int
+
+// Constants for identifying different types of nodes. See NodeType.
+const (
+ Document NodeType = iota
+ BlockQuote
+ List
+ Item
+ Paragraph
+ Heading
+ HorizontalRule
+ Emph
+ Strong
+ Del
+ Link
+ Image
+ Text
+ HTMLBlock
+ CodeBlock
+ Softbreak
+ Hardbreak
+ Code
+ HTMLSpan
+ Table
+ TableCell
+ TableHead
+ TableBody
+ TableRow
+)
+
+var nodeTypeNames = []string{
+ Document: "Document",
+ BlockQuote: "BlockQuote",
+ List: "List",
+ Item: "Item",
+ Paragraph: "Paragraph",
+ Heading: "Heading",
+ HorizontalRule: "HorizontalRule",
+ Emph: "Emph",
+ Strong: "Strong",
+ Del: "Del",
+ Link: "Link",
+ Image: "Image",
+ Text: "Text",
+ HTMLBlock: "HTMLBlock",
+ CodeBlock: "CodeBlock",
+ Softbreak: "Softbreak",
+ Hardbreak: "Hardbreak",
+ Code: "Code",
+ HTMLSpan: "HTMLSpan",
+ Table: "Table",
+ TableCell: "TableCell",
+ TableHead: "TableHead",
+ TableBody: "TableBody",
+ TableRow: "TableRow",
+}
+
+func (t NodeType) String() string {
+ return nodeTypeNames[t]
+}
+
+// ListData contains fields relevant to a List and Item node type.
+type ListData struct {
+ ListFlags ListType
+ Tight bool // Skip <p>s around list item data if true
+ BulletChar byte // '*', '+' or '-' in bullet lists
+ Delimiter byte // '.' or ')' after the number in ordered lists
+ RefLink []byte // If not nil, turns this list item into a footnote item and triggers different rendering
+ IsFootnotesList bool // This is a list of footnotes
+}
+
+// LinkData contains fields relevant to a Link node type.
+type LinkData struct {
+ Destination []byte // Destination is what goes into a href
+ Title []byte // Title is the tooltip thing that goes in a title attribute
+ NoteID int // NoteID contains a serial number of a footnote, zero if it's not a footnote
+ Footnote *Node // If it's a footnote, this is a direct link to the footnote Node. Otherwise nil.
+}
+
+// CodeBlockData contains fields relevant to a CodeBlock node type.
+type CodeBlockData struct {
+ IsFenced bool // Specifies whether it's a fenced code block or an indented one
+ Info []byte // This holds the info string
+ FenceChar byte
+ FenceLength int
+ FenceOffset int
+}
+
+// TableCellData contains fields relevant to a TableCell node type.
+type TableCellData struct {
+ IsHeader bool // This tells if it's under the header row
+ Align CellAlignFlags // This holds the value for align attribute
+}
+
+// HeadingData contains fields relevant to a Heading node type.
+type HeadingData struct {
+ Level int // This holds the heading level number
+ HeadingID string // This might hold heading ID, if present
+ IsTitleblock bool // Specifies whether it's a title block
+}
+
+// Node is a single element in the abstract syntax tree of the parsed document.
+// It holds connections to the structurally neighboring nodes and, for certain
+// types of nodes, additional information that might be needed when rendering.
+type Node struct {
+ Type NodeType // Determines the type of the node
+ Parent *Node // Points to the parent
+ FirstChild *Node // Points to the first child, if any
+ LastChild *Node // Points to the last child, if any
+ Prev *Node // Previous sibling; nil if it's the first child
+ Next *Node // Next sibling; nil if it's the last child
+
+ Literal []byte // Text contents of the leaf nodes
+
+ HeadingData // Populated if Type is Heading
+ ListData // Populated if Type is List
+ CodeBlockData // Populated if Type is CodeBlock
+ LinkData // Populated if Type is Link
+ TableCellData // Populated if Type is TableCell
+
+ content []byte // Markdown content of the block nodes
+ open bool // Specifies an open block node that has not been finished to process yet
+}
+
+// NewNode allocates a node of a specified type.
+func NewNode(typ NodeType) *Node {
+ return &Node{
+ Type: typ,
+ open: true,
+ }
+}
+
+func (n *Node) String() string {
+ ellipsis := ""
+ snippet := n.Literal
+ if len(snippet) > 16 {
+ snippet = snippet[:16]
+ ellipsis = "..."
+ }
+ return fmt.Sprintf("%s: '%s%s'", n.Type, snippet, ellipsis)
+}
+
+// Unlink removes node 'n' from the tree.
+// It panics if the node is nil.
+func (n *Node) Unlink() {
+ if n.Prev != nil {
+ n.Prev.Next = n.Next
+ } else if n.Parent != nil {
+ n.Parent.FirstChild = n.Next
+ }
+ if n.Next != nil {
+ n.Next.Prev = n.Prev
+ } else if n.Parent != nil {
+ n.Parent.LastChild = n.Prev
+ }
+ n.Parent = nil
+ n.Next = nil
+ n.Prev = nil
+}
+
+// AppendChild adds a node 'child' as a child of 'n'.
+// It panics if either node is nil.
+func (n *Node) AppendChild(child *Node) {
+ child.Unlink()
+ child.Parent = n
+ if n.LastChild != nil {
+ n.LastChild.Next = child
+ child.Prev = n.LastChild
+ n.LastChild = child
+ } else {
+ n.FirstChild = child
+ n.LastChild = child
+ }
+}
+
+// InsertBefore inserts 'sibling' immediately before 'n'.
+// It panics if either node is nil.
+func (n *Node) InsertBefore(sibling *Node) {
+ sibling.Unlink()
+ sibling.Prev = n.Prev
+ if sibling.Prev != nil {
+ sibling.Prev.Next = sibling
+ }
+ sibling.Next = n
+ n.Prev = sibling
+ sibling.Parent = n.Parent
+ if sibling.Prev == nil {
+ sibling.Parent.FirstChild = sibling
+ }
+}
+
+// IsContainer returns true if 'n' can contain children.
+func (n *Node) IsContainer() bool {
+ switch n.Type {
+ case Document:
+ fallthrough
+ case BlockQuote:
+ fallthrough
+ case List:
+ fallthrough
+ case Item:
+ fallthrough
+ case Paragraph:
+ fallthrough
+ case Heading:
+ fallthrough
+ case Emph:
+ fallthrough
+ case Strong:
+ fallthrough
+ case Del:
+ fallthrough
+ case Link:
+ fallthrough
+ case Image:
+ fallthrough
+ case Table:
+ fallthrough
+ case TableHead:
+ fallthrough
+ case TableBody:
+ fallthrough
+ case TableRow:
+ fallthrough
+ case TableCell:
+ return true
+ default:
+ return false
+ }
+}
+
+// IsLeaf returns true if 'n' is a leaf node.
+func (n *Node) IsLeaf() bool {
+ return !n.IsContainer()
+}
+
+func (n *Node) canContain(t NodeType) bool {
+ if n.Type == List {
+ return t == Item
+ }
+ if n.Type == Document || n.Type == BlockQuote || n.Type == Item {
+ return t != Item
+ }
+ if n.Type == Table {
+ return t == TableHead || t == TableBody
+ }
+ if n.Type == TableHead || n.Type == TableBody {
+ return t == TableRow
+ }
+ if n.Type == TableRow {
+ return t == TableCell
+ }
+ return false
+}
+
+// WalkStatus allows NodeVisitor to have some control over the tree traversal.
+// It is returned from NodeVisitor and different values allow Node.Walk to
+// decide which node to go to next.
+type WalkStatus int
+
+const (
+ // GoToNext is the default traversal of every node.
+ GoToNext WalkStatus = iota
+ // SkipChildren tells walker to skip all children of current node.
+ SkipChildren
+ // Terminate tells walker to terminate the traversal.
+ Terminate
+)
+
+// NodeVisitor is a callback to be called when traversing the syntax tree.
+// Called twice for every node: once with entering=true when the branch is
+// first visited, then with entering=false after all the children are done.
+type NodeVisitor func(node *Node, entering bool) WalkStatus
+
+// Walk is a convenience method that instantiates a walker and starts a
+// traversal of subtree rooted at n.
+func (n *Node) Walk(visitor NodeVisitor) {
+ w := newNodeWalker(n)
+ for w.current != nil {
+ status := visitor(w.current, w.entering)
+ switch status {
+ case GoToNext:
+ w.next()
+ case SkipChildren:
+ w.entering = false
+ w.next()
+ case Terminate:
+ return
+ }
+ }
+}
+
+type nodeWalker struct {
+ current *Node
+ root *Node
+ entering bool
+}
+
+func newNodeWalker(root *Node) *nodeWalker {
+ return &nodeWalker{
+ current: root,
+ root: root,
+ entering: true,
+ }
+}
+
+func (nw *nodeWalker) next() {
+ if (!nw.current.IsContainer() || !nw.entering) && nw.current == nw.root {
+ nw.current = nil
+ return
+ }
+ if nw.entering && nw.current.IsContainer() {
+ if nw.current.FirstChild != nil {
+ nw.current = nw.current.FirstChild
+ nw.entering = true
+ } else {
+ nw.entering = false
+ }
+ } else if nw.current.Next == nil {
+ nw.current = nw.current.Parent
+ nw.entering = false
+ } else {
+ nw.current = nw.current.Next
+ nw.entering = true
+ }
+}
+
+func dump(ast *Node) {
+ fmt.Println(dumpString(ast))
+}
+
+func dumpR(ast *Node, depth int) string {
+ if ast == nil {
+ return ""
+ }
+ indent := bytes.Repeat([]byte("\t"), depth)
+ content := ast.Literal
+ if content == nil {
+ content = ast.content
+ }
+ result := fmt.Sprintf("%s%s(%q)\n", indent, ast.Type, content)
+ for n := ast.FirstChild; n != nil; n = n.Next {
+ result += dumpR(n, depth+1)
+ }
+ return result
+}
+
+func dumpString(ast *Node) string {
+ return dumpR(ast, 0)
+}
diff --git a/vendor/github.com/russross/blackfriday/v2/smartypants.go b/vendor/github.com/russross/blackfriday/v2/smartypants.go
new file mode 100644
index 000000000..3a220e942
--- /dev/null
+++ b/vendor/github.com/russross/blackfriday/v2/smartypants.go
@@ -0,0 +1,457 @@
+//
+// Blackfriday Markdown Processor
+// Available at http://github.com/russross/blackfriday
+//
+// Copyright © 2011 Russ Ross <russ@russross.com>.
+// Distributed under the Simplified BSD License.
+// See README.md for details.
+//
+
+//
+//
+// SmartyPants rendering
+//
+//
+
+package blackfriday
+
+import (
+ "bytes"
+ "io"
+)
+
+// SPRenderer is a struct containing state of a Smartypants renderer.
+type SPRenderer struct {
+ inSingleQuote bool
+ inDoubleQuote bool
+ callbacks [256]smartCallback
+}
+
+func wordBoundary(c byte) bool {
+ return c == 0 || isspace(c) || ispunct(c)
+}
+
+func tolower(c byte) byte {
+ if c >= 'A' && c <= 'Z' {
+ return c - 'A' + 'a'
+ }
+ return c
+}
+
+func isdigit(c byte) bool {
+ return c >= '0' && c <= '9'
+}
+
+func smartQuoteHelper(out *bytes.Buffer, previousChar byte, nextChar byte, quote byte, isOpen *bool, addNBSP bool) bool {
+ // edge of the buffer is likely to be a tag that we don't get to see,
+ // so we treat it like text sometimes
+
+ // enumerate all sixteen possibilities for (previousChar, nextChar)
+ // each can be one of {0, space, punct, other}
+ switch {
+ case previousChar == 0 && nextChar == 0:
+ // context is not any help here, so toggle
+ *isOpen = !*isOpen
+ case isspace(previousChar) && nextChar == 0:
+ // [ "] might be [ "<code>foo...]
+ *isOpen = true
+ case ispunct(previousChar) && nextChar == 0:
+ // [!"] hmm... could be [Run!"] or [("<code>...]
+ *isOpen = false
+ case /* isnormal(previousChar) && */ nextChar == 0:
+ // [a"] is probably a close
+ *isOpen = false
+ case previousChar == 0 && isspace(nextChar):
+ // [" ] might be [...foo</code>" ]
+ *isOpen = false
+ case isspace(previousChar) && isspace(nextChar):
+ // [ " ] context is not any help here, so toggle
+ *isOpen = !*isOpen
+ case ispunct(previousChar) && isspace(nextChar):
+ // [!" ] is probably a close
+ *isOpen = false
+ case /* isnormal(previousChar) && */ isspace(nextChar):
+ // [a" ] this is one of the easy cases
+ *isOpen = false
+ case previousChar == 0 && ispunct(nextChar):
+ // ["!] hmm... could be ["$1.95] or [</code>"!...]
+ *isOpen = false
+ case isspace(previousChar) && ispunct(nextChar):
+ // [ "!] looks more like [ "$1.95]
+ *isOpen = true
+ case ispunct(previousChar) && ispunct(nextChar):
+ // [!"!] context is not any help here, so toggle
+ *isOpen = !*isOpen
+ case /* isnormal(previousChar) && */ ispunct(nextChar):
+ // [a"!] is probably a close
+ *isOpen = false
+ case previousChar == 0 /* && isnormal(nextChar) */ :
+ // ["a] is probably an open
+ *isOpen = true
+ case isspace(previousChar) /* && isnormal(nextChar) */ :
+ // [ "a] this is one of the easy cases
+ *isOpen = true
+ case ispunct(previousChar) /* && isnormal(nextChar) */ :
+ // [!"a] is probably an open
+ *isOpen = true
+ default:
+ // [a'b] maybe a contraction?
+ *isOpen = false
+ }
+
+ // Note that with the limited lookahead, this non-breaking
+ // space will also be appended to single double quotes.
+ if addNBSP && !*isOpen {
+ out.WriteString("&nbsp;")
+ }
+
+ out.WriteByte('&')
+ if *isOpen {
+ out.WriteByte('l')
+ } else {
+ out.WriteByte('r')
+ }
+ out.WriteByte(quote)
+ out.WriteString("quo;")
+
+ if addNBSP && *isOpen {
+ out.WriteString("&nbsp;")
+ }
+
+ return true
+}
+
+func (r *SPRenderer) smartSingleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if len(text) >= 2 {
+ t1 := tolower(text[1])
+
+ if t1 == '\'' {
+ nextChar := byte(0)
+ if len(text) >= 3 {
+ nextChar = text[2]
+ }
+ if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) {
+ return 1
+ }
+ }
+
+ if (t1 == 's' || t1 == 't' || t1 == 'm' || t1 == 'd') && (len(text) < 3 || wordBoundary(text[2])) {
+ out.WriteString("&rsquo;")
+ return 0
+ }
+
+ if len(text) >= 3 {
+ t2 := tolower(text[2])
+
+ if ((t1 == 'r' && t2 == 'e') || (t1 == 'l' && t2 == 'l') || (t1 == 'v' && t2 == 'e')) &&
+ (len(text) < 4 || wordBoundary(text[3])) {
+ out.WriteString("&rsquo;")
+ return 0
+ }
+ }
+ }
+
+ nextChar := byte(0)
+ if len(text) > 1 {
+ nextChar = text[1]
+ }
+ if smartQuoteHelper(out, previousChar, nextChar, 's', &r.inSingleQuote, false) {
+ return 0
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartParens(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if len(text) >= 3 {
+ t1 := tolower(text[1])
+ t2 := tolower(text[2])
+
+ if t1 == 'c' && t2 == ')' {
+ out.WriteString("&copy;")
+ return 2
+ }
+
+ if t1 == 'r' && t2 == ')' {
+ out.WriteString("&reg;")
+ return 2
+ }
+
+ if len(text) >= 4 && t1 == 't' && t2 == 'm' && text[3] == ')' {
+ out.WriteString("&trade;")
+ return 3
+ }
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartDash(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if len(text) >= 2 {
+ if text[1] == '-' {
+ out.WriteString("&mdash;")
+ return 1
+ }
+
+ if wordBoundary(previousChar) && wordBoundary(text[1]) {
+ out.WriteString("&ndash;")
+ return 0
+ }
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartDashLatex(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if len(text) >= 3 && text[1] == '-' && text[2] == '-' {
+ out.WriteString("&mdash;")
+ return 2
+ }
+ if len(text) >= 2 && text[1] == '-' {
+ out.WriteString("&ndash;")
+ return 1
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartAmpVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte, addNBSP bool) int {
+ if bytes.HasPrefix(text, []byte("&quot;")) {
+ nextChar := byte(0)
+ if len(text) >= 7 {
+ nextChar = text[6]
+ }
+ if smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, addNBSP) {
+ return 5
+ }
+ }
+
+ if bytes.HasPrefix(text, []byte("&#0;")) {
+ return 3
+ }
+
+ out.WriteByte('&')
+ return 0
+}
+
+func (r *SPRenderer) smartAmp(angledQuotes, addNBSP bool) func(*bytes.Buffer, byte, []byte) int {
+ var quote byte = 'd'
+ if angledQuotes {
+ quote = 'a'
+ }
+
+ return func(out *bytes.Buffer, previousChar byte, text []byte) int {
+ return r.smartAmpVariant(out, previousChar, text, quote, addNBSP)
+ }
+}
+
+func (r *SPRenderer) smartPeriod(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if len(text) >= 3 && text[1] == '.' && text[2] == '.' {
+ out.WriteString("&hellip;")
+ return 2
+ }
+
+ if len(text) >= 5 && text[1] == ' ' && text[2] == '.' && text[3] == ' ' && text[4] == '.' {
+ out.WriteString("&hellip;")
+ return 4
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartBacktick(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if len(text) >= 2 && text[1] == '`' {
+ nextChar := byte(0)
+ if len(text) >= 3 {
+ nextChar = text[2]
+ }
+ if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) {
+ return 1
+ }
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartNumberGeneric(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 {
+ // is it of the form digits/digits(word boundary)?, i.e., \d+/\d+\b
+ // note: check for regular slash (/) or fraction slash (⁄, 0x2044, or 0xe2 81 84 in utf-8)
+ // and avoid changing dates like 1/23/2005 into fractions.
+ numEnd := 0
+ for len(text) > numEnd && isdigit(text[numEnd]) {
+ numEnd++
+ }
+ if numEnd == 0 {
+ out.WriteByte(text[0])
+ return 0
+ }
+ denStart := numEnd + 1
+ if len(text) > numEnd+3 && text[numEnd] == 0xe2 && text[numEnd+1] == 0x81 && text[numEnd+2] == 0x84 {
+ denStart = numEnd + 3
+ } else if len(text) < numEnd+2 || text[numEnd] != '/' {
+ out.WriteByte(text[0])
+ return 0
+ }
+ denEnd := denStart
+ for len(text) > denEnd && isdigit(text[denEnd]) {
+ denEnd++
+ }
+ if denEnd == denStart {
+ out.WriteByte(text[0])
+ return 0
+ }
+ if len(text) == denEnd || wordBoundary(text[denEnd]) && text[denEnd] != '/' {
+ out.WriteString("<sup>")
+ out.Write(text[:numEnd])
+ out.WriteString("</sup>&frasl;<sub>")
+ out.Write(text[denStart:denEnd])
+ out.WriteString("</sub>")
+ return denEnd - 1
+ }
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartNumber(out *bytes.Buffer, previousChar byte, text []byte) int {
+ if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 {
+ if text[0] == '1' && text[1] == '/' && text[2] == '2' {
+ if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' {
+ out.WriteString("&frac12;")
+ return 2
+ }
+ }
+
+ if text[0] == '1' && text[1] == '/' && text[2] == '4' {
+ if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 5 && tolower(text[3]) == 't' && tolower(text[4]) == 'h') {
+ out.WriteString("&frac14;")
+ return 2
+ }
+ }
+
+ if text[0] == '3' && text[1] == '/' && text[2] == '4' {
+ if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 6 && tolower(text[3]) == 't' && tolower(text[4]) == 'h' && tolower(text[5]) == 's') {
+ out.WriteString("&frac34;")
+ return 2
+ }
+ }
+ }
+
+ out.WriteByte(text[0])
+ return 0
+}
+
+func (r *SPRenderer) smartDoubleQuoteVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte) int {
+ nextChar := byte(0)
+ if len(text) > 1 {
+ nextChar = text[1]
+ }
+ if !smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, false) {
+ out.WriteString("&quot;")
+ }
+
+ return 0
+}
+
+func (r *SPRenderer) smartDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
+ return r.smartDoubleQuoteVariant(out, previousChar, text, 'd')
+}
+
+func (r *SPRenderer) smartAngledDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int {
+ return r.smartDoubleQuoteVariant(out, previousChar, text, 'a')
+}
+
+func (r *SPRenderer) smartLeftAngle(out *bytes.Buffer, previousChar byte, text []byte) int {
+ i := 0
+
+ for i < len(text) && text[i] != '>' {
+ i++
+ }
+
+ out.Write(text[:i+1])
+ return i
+}
+
+type smartCallback func(out *bytes.Buffer, previousChar byte, text []byte) int
+
+// NewSmartypantsRenderer constructs a Smartypants renderer object.
+func NewSmartypantsRenderer(flags HTMLFlags) *SPRenderer {
+ var (
+ r SPRenderer
+
+ smartAmpAngled = r.smartAmp(true, false)
+ smartAmpAngledNBSP = r.smartAmp(true, true)
+ smartAmpRegular = r.smartAmp(false, false)
+ smartAmpRegularNBSP = r.smartAmp(false, true)
+
+ addNBSP = flags&SmartypantsQuotesNBSP != 0
+ )
+
+ if flags&SmartypantsAngledQuotes == 0 {
+ r.callbacks['"'] = r.smartDoubleQuote
+ if !addNBSP {
+ r.callbacks['&'] = smartAmpRegular
+ } else {
+ r.callbacks['&'] = smartAmpRegularNBSP
+ }
+ } else {
+ r.callbacks['"'] = r.smartAngledDoubleQuote
+ if !addNBSP {
+ r.callbacks['&'] = smartAmpAngled
+ } else {
+ r.callbacks['&'] = smartAmpAngledNBSP
+ }
+ }
+ r.callbacks['\''] = r.smartSingleQuote
+ r.callbacks['('] = r.smartParens
+ if flags&SmartypantsDashes != 0 {
+ if flags&SmartypantsLatexDashes == 0 {
+ r.callbacks['-'] = r.smartDash
+ } else {
+ r.callbacks['-'] = r.smartDashLatex
+ }
+ }
+ r.callbacks['.'] = r.smartPeriod
+ if flags&SmartypantsFractions == 0 {
+ r.callbacks['1'] = r.smartNumber
+ r.callbacks['3'] = r.smartNumber
+ } else {
+ for ch := '1'; ch <= '9'; ch++ {
+ r.callbacks[ch] = r.smartNumberGeneric
+ }
+ }
+ r.callbacks['<'] = r.smartLeftAngle
+ r.callbacks['`'] = r.smartBacktick
+ return &r
+}
+
+// Process is the entry point of the Smartypants renderer.
+func (r *SPRenderer) Process(w io.Writer, text []byte) {
+ mark := 0
+ for i := 0; i < len(text); i++ {
+ if action := r.callbacks[text[i]]; action != nil {
+ if i > mark {
+ w.Write(text[mark:i])
+ }
+ previousChar := byte(0)
+ if i > 0 {
+ previousChar = text[i-1]
+ }
+ var tmp bytes.Buffer
+ i += action(&tmp, previousChar, text[i:])
+ w.Write(tmp.Bytes())
+ mark = i + 1
+ }
+ }
+ if mark < len(text) {
+ w.Write(text[mark:])
+ }
+}
diff --git a/vendor/github.com/vbatts/git-validation/.gitignore b/vendor/github.com/vbatts/git-validation/.gitignore
new file mode 100644
index 000000000..265db6acb
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/.gitignore
@@ -0,0 +1,2 @@
+*~
+git-validation
diff --git a/vendor/github.com/vbatts/git-validation/.travis.yml b/vendor/github.com/vbatts/git-validation/.travis.yml
new file mode 100644
index 000000000..27cd83c47
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/.travis.yml
@@ -0,0 +1,37 @@
+language: go
+
+go_import_path: github.com/vbatts/git-validation
+
+go:
+ - "tip"
+ - "1.x"
+ - "1.11.x"
+ - "1.10.x"
+ - "1.9.x"
+
+env:
+
+matrix:
+
+sudo: false
+
+install: true
+
+notifications:
+ email:
+ on_success: change
+ on_failure: always
+
+before_script:
+ - env
+
+before_install:
+ - go get ./...
+ - if [[ "$(go version |awk '{ print $3 }')" =~ ^go1\.11\. ]] ; then go get -u golang.org/x/lint/golint ; fi
+
+script:
+ - if [[ "$(go version |awk '{ print $3 }')" =~ ^go1\.11\. ]] ; then golint -set_exit_status ./... ; fi
+ - go vet -x ./...
+ - go build .
+ - go test -v ./...
+ - ./git-validation -run DCO,short-subject,dangling-whitespace -v
diff --git a/vendor/github.com/vbatts/git-validation/LICENSE b/vendor/github.com/vbatts/git-validation/LICENSE
new file mode 100644
index 000000000..8efd59c71
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Vincent Batts
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/vbatts/git-validation/README.md b/vendor/github.com/vbatts/git-validation/README.md
new file mode 100644
index 000000000..354276e02
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/README.md
@@ -0,0 +1,106 @@
+# git-validation
+
+A way to do validation on git commits.
+[![Build Status](https://travis-ci.org/vbatts/git-validation.svg?branch=master)](https://travis-ci.org/vbatts/git-validation)
+
+## install
+
+```console
+vbatts@valse ~ (master) $ go get -u github.com/vbatts/git-validation
+```
+
+## usage
+
+The flags
+```console
+vbatts@valse ~/src/vb/git-validation (master *) $ git-validation -h
+Usage of git-validation:
+ -D debug output
+ -d string
+ git directory to validate from (default ".")
+ -list-rules
+ list the rules registered
+ -range string
+ use this commit range instead
+ -run string
+ comma delimited list of rules to run. Defaults to all.
+ -v verbose
+```
+
+The entire default rule set is run by default:
+```console
+vbatts@valse ~/src/vb/git-validation (master) $ git-validation -list-rules
+"dangling-whitespace" -- checking the presence of dangling whitespaces on line endings
+"DCO" -- makes sure the commits are signed
+"message_regexp" -- checks the commit message for a user provided regular expression
+"short-subject" -- commit subjects are strictly less than 90 (github ellipsis length)
+```
+
+Or, specify comma-delimited rules to run:
+```console
+vbatts@valse ~/src/vb/git-validation (master) $ git-validation -run DCO,short-subject
+ * b243ca4 "README: adding install and usage" ... PASS
+ * d614ccf "*: run tests in a runner" ... PASS
+ * b9413c6 "shortsubject: add a subject length check" ... PASS
+ * 5e74abd "*: comments and golint" ... PASS
+ * 07a982f "git: add verbose output of the commands run" ... PASS
+ * 03bda4b "main: add filtering of rules to run" ... PASS
+ * c10ba9c "Initial commit" ... PASS
+```
+
+Verbosity shows each rule's output:
+```console
+vbatts@valse ~/src/vb/git-validation (master) $ git-validation -v
+ * d614ccf "*: run tests in a runner" ... PASS
+ - PASS - has a valid DCO
+ - PASS - commit subject is 72 characters or less! *yay*
+ * b9413c6 "shortsubject: add a subject length check" ... PASS
+ - PASS - has a valid DCO
+ - PASS - commit subject is 72 characters or less! *yay*
+ * 5e74abd "*: comments and golint" ... PASS
+ - PASS - has a valid DCO
+ - PASS - commit subject is 72 characters or less! *yay*
+ * 07a982f "git: add verbose output of the commands run" ... PASS
+ - PASS - has a valid DCO
+ - PASS - commit subject is 72 characters or less! *yay*
+ * 03bda4b "main: add filtering of rules to run" ... PASS
+ - PASS - has a valid DCO
+ - PASS - commit subject is 72 characters or less! *yay*
+ * c10ba9c "Initial commit" ... PASS
+ - PASS - has a valid DCO
+ - PASS - commit subject is 72 characters or less! *yay*
+```
+
+Here's a failure:
+```console
+vbatts@valse ~/src/vb/git-validation (master) $ git-validation
+ * 49f51a8 "README: adding install and usage" ... FAIL
+ - FAIL - does not have a valid DCO
+ * d614ccf "*: run tests in a runner" ... PASS
+ * b9413c6 "shortsubject: add a subject length check" ... PASS
+ * 5e74abd "*: comments and golint" ... PASS
+ * 07a982f "git: add verbose output of the commands run" ... PASS
+ * 03bda4b "main: add filtering of rules to run" ... PASS
+ * c10ba9c "Initial commit" ... PASS
+1 issues to fix
+vbatts@valse ~/src/vb/git-validation (master) $ echo $?
+1
+```
+
+Excluding paths that are out of the scope of your project:
+```console
+vbatts@valse ~/src/vb/git-validation (master) $ GIT_CHECK_EXCLUDE="./vendor:./git/testdata" git-validation -q -run dangling-whitespace
+...
+```
+using the `GIT_CHECK_EXCLUDE` environment variable. Multiple paths should be separated by colon(`:`)
+
+
+## Rules
+
+Default rules are added by registering them to the `validate` package.
+Usually by putting them in their own package.
+See [`./rules/`](./rules/).
+Feel free to contribute more.
+
+Otherwise, by using `validate` package API directly, rules can be handed directly to the `validate.Runner`.
+
diff --git a/vendor/github.com/vbatts/git-validation/git/commits.go b/vendor/github.com/vbatts/git-validation/git/commits.go
new file mode 100644
index 000000000..52af99789
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/git/commits.go
@@ -0,0 +1,192 @@
+package git
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+
+ version "github.com/hashicorp/go-version"
+ "github.com/sirupsen/logrus"
+)
+
+// Commits returns a set of commits.
+// If commitrange is a git still range 12345...54321, then it will be isolated set of commits.
+// If commitrange is a single commit, all ancestor commits up through the hash provided.
+// If commitrange is an empty commit range, then nil is returned.
+func Commits(commitrange string) ([]CommitEntry, error) {
+ cmdArgs := []string{"git", "--no-pager", "log", `--pretty=format:%H`, commitrange}
+ if debug() {
+ logrus.Infof("[git] cmd: %q", strings.Join(cmdArgs, " "))
+ }
+ output, err := exec.Command(cmdArgs[0], cmdArgs[1:]...).Output()
+ if err != nil {
+ logrus.Errorf("mm[git] cmd: %q", strings.Join(cmdArgs, " "))
+ return nil, err
+ }
+ if len(output) == 0 {
+ return nil, nil
+ }
+ commitHashes := strings.Split(strings.TrimSpace(string(output)), "\n")
+ commits := make([]CommitEntry, len(commitHashes))
+ for i, commitHash := range commitHashes {
+ c, err := LogCommit(commitHash)
+ if err != nil {
+ return commits, err
+ }
+ commits[i] = *c
+ }
+ return commits, nil
+}
+
+// FieldNames are for the formating and rendering of the CommitEntry structs.
+// Keys here are from git log pretty format "format:..."
+var FieldNames = map[string]string{
+ "%h": "abbreviated_commit",
+ "%p": "abbreviated_parent",
+ "%t": "abbreviated_tree",
+ "%aD": "author_date",
+ "%aE": "author_email",
+ "%aN": "author_name",
+ "%b": "body",
+ "%H": "commit",
+ "%N": "commit_notes",
+ "%cD": "committer_date",
+ "%cE": "committer_email",
+ "%cN": "committer_name",
+ "%e": "encoding",
+ "%P": "parent",
+ "%D": "refs",
+ "%f": "sanitized_subject_line",
+ "%GS": "signer",
+ "%GK": "signer_key",
+ "%s": "subject",
+ "%G?": "verification_flag",
+}
+
+func gitVersion() (string, error) {
+ cmd := exec.Command("git", "version")
+ cmd.Stderr = os.Stderr
+ buf, err := cmd.Output()
+ if err != nil {
+ return "", err
+ }
+ return strings.Fields(string(buf))[2], nil
+}
+
+// https://github.com/vbatts/git-validation/issues/37
+var versionWithExcludes = "1.9.5"
+
+func gitVersionNewerThan(otherV string) (bool, error) {
+ gv, err := gitVersion()
+ if err != nil {
+ return false, err
+ }
+ v1, err := version.NewVersion(gv)
+ if err != nil {
+ return false, err
+ }
+ v2, err := version.NewVersion(otherV)
+ if err != nil {
+ return false, err
+ }
+ return v2.Equal(v1) || v2.LessThan(v1), nil
+}
+
+// Check warns if changes introduce whitespace errors.
+// Returns non-zero if any issues are found.
+func Check(commit string) ([]byte, error) {
+ args := []string{
+ "--no-pager", "log", "--check",
+ fmt.Sprintf("%s^..%s", commit, commit),
+ }
+ if excludeEnvList := os.Getenv("GIT_CHECK_EXCLUDE"); excludeEnvList != "" {
+ gitNewEnough, err := gitVersionNewerThan(versionWithExcludes)
+ if err != nil {
+ return nil, err
+ }
+ if gitNewEnough {
+ excludeList := strings.Split(excludeEnvList, ":")
+ for _, exclude := range excludeList {
+ if exclude == "" {
+ continue
+ }
+ args = append(args, "--", ".", fmt.Sprintf(":(exclude)%s", exclude))
+ }
+ }
+ }
+ cmd := exec.Command("git", args...)
+ if debug() {
+ logrus.Infof("[git] cmd: %q", strings.Join(cmd.Args, " "))
+ }
+ cmd.Stderr = os.Stderr
+ return cmd.Output()
+}
+
+// Show returns the diff of a commit.
+//
+// NOTE: This could be expensive for very large commits.
+func Show(commit string) ([]byte, error) {
+ cmd := exec.Command("git", "--no-pager", "show", commit)
+ if debug() {
+ logrus.Infof("[git] cmd: %q", strings.Join(cmd.Args, " "))
+ }
+ cmd.Stderr = os.Stderr
+ return cmd.Output()
+}
+
+// CommitEntry represents a single commit's information from `git`.
+// See also FieldNames
+type CommitEntry map[string]string
+
+// LogCommit assembles the full information on a commit from its commit hash
+func LogCommit(commit string) (*CommitEntry, error) {
+ c := CommitEntry{}
+ for k, v := range FieldNames {
+ cmd := exec.Command("git", "--no-pager", "log", "-1", `--pretty=format:`+k+``, commit)
+ if debug() {
+ logrus.Infof("[git] cmd: %q", strings.Join(cmd.Args, " "))
+ }
+ cmd.Stderr = os.Stderr
+ out, err := cmd.Output()
+ if err != nil {
+ logrus.Errorf("[git] cmd: %q", strings.Join(cmd.Args, " "))
+ return nil, err
+ }
+ c[v] = strings.TrimSpace(string(out))
+ }
+
+ return &c, nil
+}
+
+func debug() bool {
+ return len(os.Getenv("DEBUG")) > 0
+}
+
+// FetchHeadCommit returns the hash of FETCH_HEAD
+func FetchHeadCommit() (string, error) {
+ cmdArgs := []string{"git", "--no-pager", "rev-parse", "--verify", "FETCH_HEAD"}
+ if debug() {
+ logrus.Infof("[git] cmd: %q", strings.Join(cmdArgs, " "))
+ }
+ output, err := exec.Command(cmdArgs[0], cmdArgs[1:]...).Output()
+ if err != nil {
+ logrus.Errorf("[git] cmd: %q", strings.Join(cmdArgs, " "))
+ return "", err
+ }
+ return strings.TrimSpace(string(output)), nil
+}
+
+// HeadCommit returns the hash of HEAD
+func HeadCommit() (string, error) {
+ cmdArgs := []string{"git", "--no-pager", "rev-parse", "--verify", "HEAD"}
+ if debug() {
+ logrus.Infof("[git] cmd: %q", strings.Join(cmdArgs, " "))
+ }
+ output, err := exec.Command(cmdArgs[0], cmdArgs[1:]...).Output()
+ if err != nil {
+ logrus.Errorf("[git] cmd: %q", strings.Join(cmdArgs, " "))
+ return "", err
+ }
+ return strings.TrimSpace(string(output)), nil
+}
diff --git a/vendor/github.com/vbatts/git-validation/go.mod b/vendor/github.com/vbatts/git-validation/go.mod
new file mode 100644
index 000000000..14b7069e7
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/go.mod
@@ -0,0 +1,8 @@
+module github.com/vbatts/git-validation
+
+go 1.12
+
+require (
+ github.com/hashicorp/go-version v1.2.0
+ github.com/sirupsen/logrus v1.4.1
+)
diff --git a/vendor/github.com/vbatts/git-validation/go.sum b/vendor/github.com/vbatts/git-validation/go.sum
new file mode 100644
index 000000000..265941fe3
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/go.sum
@@ -0,0 +1,15 @@
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=
+github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k=
+github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
+github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33 h1:I6FyU15t786LL7oL/hn43zqTuEGr4PN7F4XJ1p4E3Y8=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
diff --git a/vendor/github.com/vbatts/git-validation/main.go b/vendor/github.com/vbatts/git-validation/main.go
new file mode 100644
index 000000000..cd5f271a4
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/main.go
@@ -0,0 +1,92 @@
+package main
+
+import (
+ "flag"
+ "fmt"
+ "log"
+ "os"
+ "strings"
+
+ _ "github.com/vbatts/git-validation/rules/danglingwhitespace"
+ _ "github.com/vbatts/git-validation/rules/dco"
+ _ "github.com/vbatts/git-validation/rules/messageregexp"
+ _ "github.com/vbatts/git-validation/rules/shortsubject"
+ "github.com/vbatts/git-validation/validate"
+)
+
+var (
+ flCommitRange = flag.String("range", "", "use this commit range instead (implies -no-travis)")
+ flListRules = flag.Bool("list-rules", false, "list the rules registered")
+ flRun = flag.String("run", "", "comma delimited list of rules to run. Defaults to all.")
+ flVerbose = flag.Bool("v", false, "verbose")
+ flDebug = flag.Bool("D", false, "debug output")
+ flQuiet = flag.Bool("q", false, "less output")
+ flDir = flag.String("d", ".", "git directory to validate from")
+ flNoTravis = flag.Bool("no-travis", false, "disables travis environment checks (when env TRAVIS=true is set)")
+ flTravisPROnly = flag.Bool("travis-pr-only", true, "when on travis, only run validations if the CI-Build is checking pull-request build")
+)
+
+func main() {
+ flag.Parse()
+
+ if *flDebug {
+ os.Setenv("DEBUG", "1")
+ }
+ if *flQuiet {
+ os.Setenv("QUIET", "1")
+ }
+
+ if *flListRules {
+ for _, r := range validate.RegisteredRules {
+ fmt.Printf("%q -- %s\n", r.Name, r.Description)
+ }
+ return
+ }
+
+ if *flTravisPROnly && strings.ToLower(os.Getenv("TRAVIS_PULL_REQUEST")) == "false" {
+ fmt.Printf("only to check travis PR builds and this not a PR build. yielding.\n")
+ return
+ }
+
+ // rules to be used
+ var rules []validate.Rule
+ for _, r := range validate.RegisteredRules {
+ // only those that are Default
+ if r.Default {
+ rules = append(rules, r)
+ }
+ }
+ // or reduce the set being run to what the user provided
+ if *flRun != "" {
+ rules = validate.FilterRules(validate.RegisteredRules, validate.SanitizeFilters(*flRun))
+ }
+ if os.Getenv("DEBUG") != "" {
+ log.Printf("%#v", rules) // XXX maybe reduce this list
+ }
+
+ var commitRange = *flCommitRange
+ if commitRange == "" {
+ if strings.ToLower(os.Getenv("TRAVIS")) == "true" && !*flNoTravis {
+ if os.Getenv("TRAVIS_COMMIT_RANGE") != "" {
+ commitRange = strings.Replace(os.Getenv("TRAVIS_COMMIT_RANGE"), "...", "..", 1)
+ } else if os.Getenv("TRAVIS_COMMIT") != "" {
+ commitRange = os.Getenv("TRAVIS_COMMIT")
+ }
+ }
+ }
+
+ runner, err := validate.NewRunner(*flDir, rules, commitRange, *flVerbose)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ if err := runner.Run(); err != nil {
+ log.Fatal(err)
+ }
+ _, fail := runner.Results.PassFail()
+ if fail > 0 {
+ fmt.Printf("%d commits to fix\n", fail)
+ os.Exit(1)
+ }
+
+}
diff --git a/vendor/github.com/vbatts/git-validation/rules/danglingwhitespace/rule.go b/vendor/github.com/vbatts/git-validation/rules/danglingwhitespace/rule.go
new file mode 100644
index 000000000..dab3a984b
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/rules/danglingwhitespace/rule.go
@@ -0,0 +1,39 @@
+package danglingwhitespace
+
+import (
+ "github.com/vbatts/git-validation/git"
+ "github.com/vbatts/git-validation/validate"
+)
+
+var (
+ // DanglingWhitespace is the rule for checking the presence of dangling
+ // whitespaces on line endings.
+ DanglingWhitespace = validate.Rule{
+ Name: "dangling-whitespace",
+ Description: "checking the presence of dangling whitespaces on line endings",
+ Run: ValidateDanglingWhitespace,
+ Default: true,
+ }
+)
+
+func init() {
+ validate.RegisterRule(DanglingWhitespace)
+}
+
+// ValidateDanglingWhitespace runs Git's check to look for whitespace errors.
+func ValidateDanglingWhitespace(r validate.Rule, c git.CommitEntry) (vr validate.Result) {
+ vr.CommitEntry = c
+ vr.Msg = "commit does not have any whitespace errors"
+ vr.Pass = true
+
+ _, err := git.Check(c["commit"])
+ if err != nil {
+ vr.Pass = false
+ if err.Error() == "exit status 2" {
+ vr.Msg = "has whitespace errors. See `git show --check " + c["commit"] + "`."
+ } else {
+ vr.Msg = "errored with: " + err.Error()
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/vbatts/git-validation/rules/dco/dco.go b/vendor/github.com/vbatts/git-validation/rules/dco/dco.go
new file mode 100644
index 000000000..a42ea06ac
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/rules/dco/dco.go
@@ -0,0 +1,51 @@
+package dco
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/vbatts/git-validation/git"
+ "github.com/vbatts/git-validation/validate"
+)
+
+func init() {
+ validate.RegisterRule(DcoRule)
+}
+
+var (
+ // ValidDCO is the regexp for signed off DCO
+ ValidDCO = regexp.MustCompile(`^Signed-off-by: ([^<]+) <([^<>@]+@[^<>]+)>$`)
+ // DcoRule is the rule being registered
+ DcoRule = validate.Rule{
+ Name: "DCO",
+ Description: "makes sure the commits are signed",
+ Run: ValidateDCO,
+ Default: true,
+ }
+)
+
+// ValidateDCO checks that the commit has been signed off, per the DCO process
+func ValidateDCO(r validate.Rule, c git.CommitEntry) (vr validate.Result) {
+ vr.CommitEntry = c
+ if len(strings.Split(c["parent"], " ")) > 1 {
+ vr.Pass = true
+ vr.Msg = "merge commits do not require DCO"
+ return vr
+ }
+
+ hasValid := false
+ for _, line := range strings.Split(c["body"], "\n") {
+ if ValidDCO.MatchString(line) {
+ hasValid = true
+ }
+ }
+ if !hasValid {
+ vr.Pass = false
+ vr.Msg = "does not have a valid DCO"
+ } else {
+ vr.Pass = true
+ vr.Msg = "has a valid DCO"
+ }
+
+ return vr
+}
diff --git a/vendor/github.com/vbatts/git-validation/rules/messageregexp/rule.go b/vendor/github.com/vbatts/git-validation/rules/messageregexp/rule.go
new file mode 100644
index 000000000..98587f262
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/rules/messageregexp/rule.go
@@ -0,0 +1,61 @@
+package messageregexp
+
+import (
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/vbatts/git-validation/git"
+ "github.com/vbatts/git-validation/validate"
+)
+
+func init() {
+ validate.RegisterRule(RegexpRule)
+}
+
+var (
+ // RegexpRule for validating a user provided regex on the commit messages
+ RegexpRule = validate.Rule{
+ Name: "message_regexp",
+ Description: "checks the commit message for a user provided regular expression",
+ Run: ValidateMessageRegexp,
+ Default: false, // only for users specifically calling it through -run ...
+ }
+)
+
+// ValidateMessageRegexp is the message regex func to run
+func ValidateMessageRegexp(r validate.Rule, c git.CommitEntry) (vr validate.Result) {
+ if r.Value == "" {
+ vr.Pass = true
+ vr.Msg = "noop: message_regexp value is blank"
+ return vr
+ }
+
+ re := regexp.MustCompile(r.Value)
+ vr.CommitEntry = c
+ if len(strings.Split(c["parent"], " ")) > 1 {
+ vr.Pass = true
+ vr.Msg = "merge commits are not checked for message_regexp"
+ return vr
+ }
+
+ hasValid := false
+ for _, line := range strings.Split(c["subject"], "\n") {
+ if re.MatchString(line) {
+ hasValid = true
+ }
+ }
+ for _, line := range strings.Split(c["body"], "\n") {
+ if re.MatchString(line) {
+ hasValid = true
+ }
+ }
+ if !hasValid {
+ vr.Pass = false
+ vr.Msg = fmt.Sprintf("commit message does not match %q", r.Value)
+ } else {
+ vr.Pass = true
+ vr.Msg = fmt.Sprintf("commit message matches %q", r.Value)
+ }
+ return vr
+}
diff --git a/vendor/github.com/vbatts/git-validation/rules/shortsubject/shortsubject.go b/vendor/github.com/vbatts/git-validation/rules/shortsubject/shortsubject.go
new file mode 100644
index 000000000..8fd033601
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/rules/shortsubject/shortsubject.go
@@ -0,0 +1,44 @@
+package shortsubject
+
+import (
+ "strings"
+
+ "github.com/vbatts/git-validation/git"
+ "github.com/vbatts/git-validation/validate"
+)
+
+var (
+ // ShortSubjectRule is the rule being registered
+ ShortSubjectRule = validate.Rule{
+ Name: "short-subject",
+ Description: "commit subjects are strictly less than 90 (github ellipsis length)",
+ Run: ValidateShortSubject,
+ Default: true,
+ }
+)
+
+func init() {
+ validate.RegisterRule(ShortSubjectRule)
+}
+
+// ValidateShortSubject checks that the commit's subject is strictly less than
+// 90 characters (preferably not more than 72 chars).
+func ValidateShortSubject(r validate.Rule, c git.CommitEntry) (vr validate.Result) {
+ if len(strings.Split(c["parent"], " ")) > 1 {
+ vr.Pass = true
+ vr.Msg = "merge commits do not require length check"
+ return vr
+ }
+ if len(c["subject"]) >= 90 {
+ vr.Pass = false
+ vr.Msg = "commit subject exceeds 90 characters"
+ return
+ }
+ vr.Pass = true
+ if len(c["subject"]) > 72 {
+ vr.Msg = "commit subject is under 90 characters, but is still more than 72 chars"
+ } else {
+ vr.Msg = "commit subject is 72 characters or less! *yay*"
+ }
+ return
+}
diff --git a/vendor/github.com/vbatts/git-validation/validate/rules.go b/vendor/github.com/vbatts/git-validation/validate/rules.go
new file mode 100644
index 000000000..38126a4f0
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/validate/rules.go
@@ -0,0 +1,134 @@
+package validate
+
+import (
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/vbatts/git-validation/git"
+)
+
+var (
+ // RegisteredRules are the avaible validation to perform on git commits
+ RegisteredRules = []Rule{}
+ registerRuleLock = sync.Mutex{}
+)
+
+// RegisterRule includes the Rule in the avaible set to use
+func RegisterRule(vr Rule) {
+ registerRuleLock.Lock()
+ defer registerRuleLock.Unlock()
+ RegisteredRules = append(RegisteredRules, vr)
+}
+
+// Rule will operate over a provided git.CommitEntry, and return a result.
+type Rule struct {
+ Name string // short name for reference in in the `-run=...` flag
+ Value string // value to configure for the rule (i.e. a regexp to check for in the commit message)
+ Description string // longer Description for readability
+ Run func(Rule, git.CommitEntry) Result
+ Default bool // whether the registered rule is run by default
+}
+
+// Commit processes the given rules on the provided commit, and returns the result set.
+func Commit(c git.CommitEntry, rules []Rule) Results {
+ results := Results{}
+ for _, r := range rules {
+ results = append(results, r.Run(r, c))
+ }
+ return results
+}
+
+// Result is the result for a single validation of a commit.
+type Result struct {
+ CommitEntry git.CommitEntry
+ Pass bool
+ Msg string
+}
+
+// Results is a set of results. This is type makes it easy for the following function.
+type Results []Result
+
+// PassFail gives a quick over/under of passes and failures of the results in this set
+func (vr Results) PassFail() (pass int, fail int) {
+ for _, res := range vr {
+ if res.Pass {
+ pass++
+ } else {
+ fail++
+ }
+ }
+ return pass, fail
+}
+
+// SanitizeFilters takes a comma delimited list and returns the trimmend and
+// split (on ",") items in the list
+func SanitizeFilters(filtStr string) (filters []string) {
+ for _, item := range strings.Split(filtStr, ",") {
+ filters = append(filters, strings.TrimSpace(item))
+ }
+ return
+}
+
+// FilterRules takes a set of rules and a list of short names to include, and
+// returns the reduced set. The comparison is case insensitive.
+//
+// Some `includes` rules have values assigned to them.
+// i.e. -run "dco,message_regexp='^JIRA-[0-9]+ [A-Z].*$'"
+//
+func FilterRules(rules []Rule, includes []string) []Rule {
+ ret := []Rule{}
+
+ for _, r := range rules {
+ for i := range includes {
+ if strings.Contains(includes[i], "=") {
+ chunks := strings.SplitN(includes[i], "=", 2)
+ if strings.ToLower(r.Name) == strings.ToLower(chunks[0]) {
+ // for these rules, the Name won't be unique per se. There may be
+ // multiple "regexp=" with different values. We'll need to set the
+ // .Value = chunk[1] and ensure r is dup'ed so they don't clobber
+ // each other.
+ newR := Rule(r)
+ newR.Value = chunks[1]
+ ret = append(ret, newR)
+ }
+ } else {
+ if strings.ToLower(r.Name) == strings.ToLower(includes[i]) {
+ ret = append(ret, r)
+ }
+ }
+ }
+ }
+
+ return ret
+}
+
+// StringsSliceEqual compares two string arrays for equality
+func StringsSliceEqual(a, b []string) bool {
+ if !sort.StringsAreSorted(a) {
+ sort.Strings(a)
+ }
+ if !sort.StringsAreSorted(b) {
+ sort.Strings(b)
+ }
+ for i := range b {
+ if !StringsSliceContains(a, b[i]) {
+ return false
+ }
+ }
+ for i := range a {
+ if !StringsSliceContains(b, a[i]) {
+ return false
+ }
+ }
+ return true
+}
+
+// StringsSliceContains checks for the presence of a word in string array
+func StringsSliceContains(a []string, b string) bool {
+ if !sort.StringsAreSorted(a) {
+ sort.Strings(a)
+ }
+ i := sort.SearchStrings(a, b)
+ return i < len(a) && a[i] == b
+}
diff --git a/vendor/github.com/vbatts/git-validation/validate/runner.go b/vendor/github.com/vbatts/git-validation/validate/runner.go
new file mode 100644
index 000000000..eea61fba1
--- /dev/null
+++ b/vendor/github.com/vbatts/git-validation/validate/runner.go
@@ -0,0 +1,109 @@
+package validate
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/vbatts/git-validation/git"
+)
+
+// Runner is the for processing a set of rules against a range of commits
+type Runner struct {
+ Root string
+ Rules []Rule
+ Results Results
+ Verbose bool
+ CommitRange string // if this is empty, then it will default to FETCH_HEAD, then HEAD
+}
+
+// NewRunner returns an initiallized Runner.
+func NewRunner(root string, rules []Rule, commitrange string, verbose bool) (*Runner, error) {
+ newroot, err := filepath.Abs(root)
+ if err != nil {
+ return nil, fmt.Errorf("failed to get absolute path of %q: %s", root, err)
+ }
+ if commitrange == "" {
+ var err error
+ cwd, err := os.Getwd()
+ if err != nil {
+ return nil, err
+ }
+ defer os.Chdir(cwd)
+
+ if err := os.Chdir(newroot); err != nil {
+ return nil, err
+ }
+ commitrange, err = git.FetchHeadCommit()
+ if err != nil {
+ commitrange, err = git.HeadCommit()
+ if err != nil {
+ return nil, err
+ }
+ }
+ }
+ return &Runner{
+ Root: newroot,
+ Rules: rules,
+ CommitRange: commitrange,
+ Verbose: verbose,
+ }, nil
+}
+
+// Run processes the rules for each commit in the range provided
+func (r *Runner) Run() error {
+ cwd, err := os.Getwd()
+ if err != nil {
+ return err
+ }
+ defer os.Chdir(cwd)
+
+ if err := os.Chdir(r.Root); err != nil {
+ return err
+ }
+
+ // collect the entries
+ c, err := git.Commits(r.CommitRange)
+ if err != nil {
+ return err
+ }
+
+ // run them and show results
+ for _, commit := range c {
+ if os.Getenv("QUIET") == "" {
+ fmt.Printf(" * %s %q ... ", commit["abbreviated_commit"], commit["subject"])
+ }
+ vr := Commit(commit, r.Rules)
+ r.Results = append(r.Results, vr...)
+ _, fail := vr.PassFail()
+ if os.Getenv("QUIET") != "" {
+ if fail != 0 {
+ for _, res := range vr {
+ if !res.Pass {
+ fmt.Printf(" %s - FAIL - %s\n", commit["abbreviated_commit"], res.Msg)
+ }
+ }
+ }
+ // everything else in the loop is printing output.
+ // If we're quiet, then just continue
+ continue
+ }
+ if fail == 0 {
+ fmt.Println("PASS")
+ } else {
+ fmt.Println("FAIL")
+ }
+ for _, res := range vr {
+ if r.Verbose {
+ if res.Pass {
+ fmt.Printf(" - PASS - %s\n", res.Msg)
+ } else {
+ fmt.Printf(" - FAIL - %s\n", res.Msg)
+ }
+ } else if !res.Pass {
+ fmt.Printf(" - FAIL - %s\n", res.Msg)
+ }
+ }
+ }
+ return nil
+}
diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE
new file mode 100644
index 000000000..6a66aea5e
--- /dev/null
+++ b/vendor/golang.org/x/mod/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/mod/PATENTS b/vendor/golang.org/x/mod/PATENTS
new file mode 100644
index 000000000..733099041
--- /dev/null
+++ b/vendor/golang.org/x/mod/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go b/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go
new file mode 100644
index 000000000..2681af35a
--- /dev/null
+++ b/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go
@@ -0,0 +1,78 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package lazyregexp is a thin wrapper over regexp, allowing the use of global
+// regexp variables without forcing them to be compiled at init.
+package lazyregexp
+
+import (
+ "os"
+ "regexp"
+ "strings"
+ "sync"
+)
+
+// Regexp is a wrapper around regexp.Regexp, where the underlying regexp will be
+// compiled the first time it is needed.
+type Regexp struct {
+ str string
+ once sync.Once
+ rx *regexp.Regexp
+}
+
+func (r *Regexp) re() *regexp.Regexp {
+ r.once.Do(r.build)
+ return r.rx
+}
+
+func (r *Regexp) build() {
+ r.rx = regexp.MustCompile(r.str)
+ r.str = ""
+}
+
+func (r *Regexp) FindSubmatch(s []byte) [][]byte {
+ return r.re().FindSubmatch(s)
+}
+
+func (r *Regexp) FindStringSubmatch(s string) []string {
+ return r.re().FindStringSubmatch(s)
+}
+
+func (r *Regexp) FindStringSubmatchIndex(s string) []int {
+ return r.re().FindStringSubmatchIndex(s)
+}
+
+func (r *Regexp) ReplaceAllString(src, repl string) string {
+ return r.re().ReplaceAllString(src, repl)
+}
+
+func (r *Regexp) FindString(s string) string {
+ return r.re().FindString(s)
+}
+
+func (r *Regexp) FindAllString(s string, n int) []string {
+ return r.re().FindAllString(s, n)
+}
+
+func (r *Regexp) MatchString(s string) bool {
+ return r.re().MatchString(s)
+}
+
+func (r *Regexp) SubexpNames() []string {
+ return r.re().SubexpNames()
+}
+
+var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test")
+
+// New creates a new lazy regexp, delaying the compiling work until it is first
+// needed. If the code is being run as part of tests, the regexp compiling will
+// happen immediately.
+func New(str string) *Regexp {
+ lr := &Regexp{str: str}
+ if inTest {
+ // In tests, always compile the regexps early.
+ lr.re()
+ }
+ return lr
+}
diff --git a/vendor/golang.org/x/mod/module/module.go b/vendor/golang.org/x/mod/module/module.go
new file mode 100644
index 000000000..355b5a456
--- /dev/null
+++ b/vendor/golang.org/x/mod/module/module.go
@@ -0,0 +1,841 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package module defines the module.Version type along with support code.
+//
+// The module.Version type is a simple Path, Version pair:
+//
+// type Version struct {
+// Path string
+// Version string
+// }
+//
+// There are no restrictions imposed directly by use of this structure,
+// but additional checking functions, most notably Check, verify that
+// a particular path, version pair is valid.
+//
+// Escaped Paths
+//
+// Module paths appear as substrings of file system paths
+// (in the download cache) and of web server URLs in the proxy protocol.
+// In general we cannot rely on file systems to be case-sensitive,
+// nor can we rely on web servers, since they read from file systems.
+// That is, we cannot rely on the file system to keep rsc.io/QUOTE
+// and rsc.io/quote separate. Windows and macOS don't.
+// Instead, we must never require two different casings of a file path.
+// Because we want the download cache to match the proxy protocol,
+// and because we want the proxy protocol to be possible to serve
+// from a tree of static files (which might be stored on a case-insensitive
+// file system), the proxy protocol must never require two different casings
+// of a URL path either.
+//
+// One possibility would be to make the escaped form be the lowercase
+// hexadecimal encoding of the actual path bytes. This would avoid ever
+// needing different casings of a file path, but it would be fairly illegible
+// to most programmers when those paths appeared in the file system
+// (including in file paths in compiler errors and stack traces)
+// in web server logs, and so on. Instead, we want a safe escaped form that
+// leaves most paths unaltered.
+//
+// The safe escaped form is to replace every uppercase letter
+// with an exclamation mark followed by the letter's lowercase equivalent.
+//
+// For example,
+//
+// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go.
+// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy
+// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus.
+//
+// Import paths that avoid upper-case letters are left unchanged.
+// Note that because import paths are ASCII-only and avoid various
+// problematic punctuation (like : < and >), the escaped form is also ASCII-only
+// and avoids the same problematic punctuation.
+//
+// Import paths have never allowed exclamation marks, so there is no
+// need to define how to escape a literal !.
+//
+// Unicode Restrictions
+//
+// Today, paths are disallowed from using Unicode.
+//
+// Although paths are currently disallowed from using Unicode,
+// we would like at some point to allow Unicode letters as well, to assume that
+// file systems and URLs are Unicode-safe (storing UTF-8), and apply
+// the !-for-uppercase convention for escaping them in the file system.
+// But there are at least two subtle considerations.
+//
+// First, note that not all case-fold equivalent distinct runes
+// form an upper/lower pair.
+// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin)
+// are three distinct runes that case-fold to each other.
+// When we do add Unicode letters, we must not assume that upper/lower
+// are the only case-equivalent pairs.
+// Perhaps the Kelvin symbol would be disallowed entirely, for example.
+// Or perhaps it would escape as "!!k", or perhaps as "(212A)".
+//
+// Second, it would be nice to allow Unicode marks as well as letters,
+// but marks include combining marks, and then we must deal not
+// only with case folding but also normalization: both U+00E9 ('é')
+// and U+0065 U+0301 ('e' followed by combining acute accent)
+// look the same on the page and are treated by some file systems
+// as the same path. If we do allow Unicode marks in paths, there
+// must be some kind of normalization to allow only one canonical
+// encoding of any character used in an import path.
+package module
+
+// IMPORTANT NOTE
+//
+// This file essentially defines the set of valid import paths for the go command.
+// There are many subtle considerations, including Unicode ambiguity,
+// security, network, and file system representations.
+//
+// This file also defines the set of valid module path and version combinations,
+// another topic with many subtle considerations.
+//
+// Changes to the semantics in this file require approval from rsc.
+
+import (
+ "fmt"
+ "path"
+ "sort"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/mod/semver"
+ errors "golang.org/x/xerrors"
+)
+
+// A Version (for clients, a module.Version) is defined by a module path and version pair.
+// These are stored in their plain (unescaped) form.
+type Version struct {
+ // Path is a module path, like "golang.org/x/text" or "rsc.io/quote/v2".
+ Path string
+
+ // Version is usually a semantic version in canonical form.
+ // There are three exceptions to this general rule.
+ // First, the top-level target of a build has no specific version
+ // and uses Version = "".
+ // Second, during MVS calculations the version "none" is used
+ // to represent the decision to take no version of a given module.
+ // Third, filesystem paths found in "replace" directives are
+ // represented by a path with an empty version.
+ Version string `json:",omitempty"`
+}
+
+// String returns a representation of the Version suitable for logging
+// (Path@Version, or just Path if Version is empty).
+func (m Version) String() string {
+ if m.Version == "" {
+ return m.Path
+ }
+ return m.Path + "@" + m.Version
+}
+
+// A ModuleError indicates an error specific to a module.
+type ModuleError struct {
+ Path string
+ Version string
+ Err error
+}
+
+// VersionError returns a ModuleError derived from a Version and error,
+// or err itself if it is already such an error.
+func VersionError(v Version, err error) error {
+ var mErr *ModuleError
+ if errors.As(err, &mErr) && mErr.Path == v.Path && mErr.Version == v.Version {
+ return err
+ }
+ return &ModuleError{
+ Path: v.Path,
+ Version: v.Version,
+ Err: err,
+ }
+}
+
+func (e *ModuleError) Error() string {
+ if v, ok := e.Err.(*InvalidVersionError); ok {
+ return fmt.Sprintf("%s@%s: invalid %s: %v", e.Path, v.Version, v.noun(), v.Err)
+ }
+ if e.Version != "" {
+ return fmt.Sprintf("%s@%s: %v", e.Path, e.Version, e.Err)
+ }
+ return fmt.Sprintf("module %s: %v", e.Path, e.Err)
+}
+
+func (e *ModuleError) Unwrap() error { return e.Err }
+
+// An InvalidVersionError indicates an error specific to a version, with the
+// module path unknown or specified externally.
+//
+// A ModuleError may wrap an InvalidVersionError, but an InvalidVersionError
+// must not wrap a ModuleError.
+type InvalidVersionError struct {
+ Version string
+ Pseudo bool
+ Err error
+}
+
+// noun returns either "version" or "pseudo-version", depending on whether
+// e.Version is a pseudo-version.
+func (e *InvalidVersionError) noun() string {
+ if e.Pseudo {
+ return "pseudo-version"
+ }
+ return "version"
+}
+
+func (e *InvalidVersionError) Error() string {
+ return fmt.Sprintf("%s %q invalid: %s", e.noun(), e.Version, e.Err)
+}
+
+func (e *InvalidVersionError) Unwrap() error { return e.Err }
+
+// An InvalidPathError indicates a module, import, or file path doesn't
+// satisfy all naming constraints. See CheckPath, CheckImportPath,
+// and CheckFilePath for specific restrictions.
+type InvalidPathError struct {
+ Kind string // "module", "import", or "file"
+ Path string
+ Err error
+}
+
+func (e *InvalidPathError) Error() string {
+ return fmt.Sprintf("malformed %s path %q: %v", e.Kind, e.Path, e.Err)
+}
+
+func (e *InvalidPathError) Unwrap() error { return e.Err }
+
+// Check checks that a given module path, version pair is valid.
+// In addition to the path being a valid module path
+// and the version being a valid semantic version,
+// the two must correspond.
+// For example, the path "yaml/v2" only corresponds to
+// semantic versions beginning with "v2.".
+func Check(path, version string) error {
+ if err := CheckPath(path); err != nil {
+ return err
+ }
+ if !semver.IsValid(version) {
+ return &ModuleError{
+ Path: path,
+ Err: &InvalidVersionError{Version: version, Err: errors.New("not a semantic version")},
+ }
+ }
+ _, pathMajor, _ := SplitPathVersion(path)
+ if err := CheckPathMajor(version, pathMajor); err != nil {
+ return &ModuleError{Path: path, Err: err}
+ }
+ return nil
+}
+
+// firstPathOK reports whether r can appear in the first element of a module path.
+// The first element of the path must be an LDH domain name, at least for now.
+// To avoid case ambiguity, the domain name must be entirely lower case.
+func firstPathOK(r rune) bool {
+ return r == '-' || r == '.' ||
+ '0' <= r && r <= '9' ||
+ 'a' <= r && r <= 'z'
+}
+
+// modPathOK reports whether r can appear in a module path element.
+// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~.
+//
+// This matches what "go get" has historically recognized in import paths,
+// and avoids confusing sequences like '%20' or '+' that would change meaning
+// if used in a URL.
+//
+// TODO(rsc): We would like to allow Unicode letters, but that requires additional
+// care in the safe encoding (see "escaped paths" above).
+func modPathOK(r rune) bool {
+ if r < utf8.RuneSelf {
+ return r == '-' || r == '.' || r == '_' || r == '~' ||
+ '0' <= r && r <= '9' ||
+ 'A' <= r && r <= 'Z' ||
+ 'a' <= r && r <= 'z'
+ }
+ return false
+}
+
+// modPathOK reports whether r can appear in a package import path element.
+//
+// Import paths are intermediate between module paths and file paths: we allow
+// disallow characters that would be confusing or ambiguous as arguments to
+// 'go get' (such as '@' and ' ' ), but allow certain characters that are
+// otherwise-unambiguous on the command line and historically used for some
+// binary names (such as '++' as a suffix for compiler binaries and wrappers).
+func importPathOK(r rune) bool {
+ return modPathOK(r) || r == '+'
+}
+
+// fileNameOK reports whether r can appear in a file name.
+// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters.
+// If we expand the set of allowed characters here, we have to
+// work harder at detecting potential case-folding and normalization collisions.
+// See note about "escaped paths" above.
+func fileNameOK(r rune) bool {
+ if r < utf8.RuneSelf {
+ // Entire set of ASCII punctuation, from which we remove characters:
+ // ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~
+ // We disallow some shell special characters: " ' * < > ? ` |
+ // (Note that some of those are disallowed by the Windows file system as well.)
+ // We also disallow path separators / : and \ (fileNameOK is only called on path element characters).
+ // We allow spaces (U+0020) in file names.
+ const allowed = "!#$%&()+,-.=@[]^_{}~ "
+ if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' {
+ return true
+ }
+ return strings.ContainsRune(allowed, r)
+ }
+ // It may be OK to add more ASCII punctuation here, but only carefully.
+ // For example Windows disallows < > \, and macOS disallows :, so we must not allow those.
+ return unicode.IsLetter(r)
+}
+
+// CheckPath checks that a module path is valid.
+// A valid module path is a valid import path, as checked by CheckImportPath,
+// with three additional constraints.
+// First, the leading path element (up to the first slash, if any),
+// by convention a domain name, must contain only lower-case ASCII letters,
+// ASCII digits, dots (U+002E), and dashes (U+002D);
+// it must contain at least one dot and cannot start with a dash.
+// Second, for a final path element of the form /vN, where N looks numeric
+// (ASCII digits and dots) must not begin with a leading zero, must not be /v1,
+// and must not contain any dots. For paths beginning with "gopkg.in/",
+// this second requirement is replaced by a requirement that the path
+// follow the gopkg.in server's conventions.
+// Third, no path element may begin with a dot.
+func CheckPath(path string) (err error) {
+ defer func() {
+ if err != nil {
+ err = &InvalidPathError{Kind: "module", Path: path, Err: err}
+ }
+ }()
+
+ if err := checkPath(path, modulePath); err != nil {
+ return err
+ }
+ i := strings.Index(path, "/")
+ if i < 0 {
+ i = len(path)
+ }
+ if i == 0 {
+ return fmt.Errorf("leading slash")
+ }
+ if !strings.Contains(path[:i], ".") {
+ return fmt.Errorf("missing dot in first path element")
+ }
+ if path[0] == '-' {
+ return fmt.Errorf("leading dash in first path element")
+ }
+ for _, r := range path[:i] {
+ if !firstPathOK(r) {
+ return fmt.Errorf("invalid char %q in first path element", r)
+ }
+ }
+ if _, _, ok := SplitPathVersion(path); !ok {
+ return fmt.Errorf("invalid version")
+ }
+ return nil
+}
+
+// CheckImportPath checks that an import path is valid.
+//
+// A valid import path consists of one or more valid path elements
+// separated by slashes (U+002F). (It must not begin with nor end in a slash.)
+//
+// A valid path element is a non-empty string made up of
+// ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~.
+// It must not end with a dot (U+002E), nor contain two dots in a row.
+//
+// The element prefix up to the first dot must not be a reserved file name
+// on Windows, regardless of case (CON, com1, NuL, and so on). The element
+// must not have a suffix of a tilde followed by one or more ASCII digits
+// (to exclude paths elements that look like Windows short-names).
+//
+// CheckImportPath may be less restrictive in the future, but see the
+// top-level package documentation for additional information about
+// subtleties of Unicode.
+func CheckImportPath(path string) error {
+ if err := checkPath(path, importPath); err != nil {
+ return &InvalidPathError{Kind: "import", Path: path, Err: err}
+ }
+ return nil
+}
+
+// pathKind indicates what kind of path we're checking. Module paths,
+// import paths, and file paths have different restrictions.
+type pathKind int
+
+const (
+ modulePath pathKind = iota
+ importPath
+ filePath
+)
+
+// checkPath checks that a general path is valid. kind indicates what
+// specific constraints should be applied.
+//
+// checkPath returns an error describing why the path is not valid.
+// Because these checks apply to module, import, and file paths,
+// and because other checks may be applied, the caller is expected to wrap
+// this error with InvalidPathError.
+func checkPath(path string, kind pathKind) error {
+ if !utf8.ValidString(path) {
+ return fmt.Errorf("invalid UTF-8")
+ }
+ if path == "" {
+ return fmt.Errorf("empty string")
+ }
+ if path[0] == '-' && kind != filePath {
+ return fmt.Errorf("leading dash")
+ }
+ if strings.Contains(path, "//") {
+ return fmt.Errorf("double slash")
+ }
+ if path[len(path)-1] == '/' {
+ return fmt.Errorf("trailing slash")
+ }
+ elemStart := 0
+ for i, r := range path {
+ if r == '/' {
+ if err := checkElem(path[elemStart:i], kind); err != nil {
+ return err
+ }
+ elemStart = i + 1
+ }
+ }
+ if err := checkElem(path[elemStart:], kind); err != nil {
+ return err
+ }
+ return nil
+}
+
+// checkElem checks whether an individual path element is valid.
+func checkElem(elem string, kind pathKind) error {
+ if elem == "" {
+ return fmt.Errorf("empty path element")
+ }
+ if strings.Count(elem, ".") == len(elem) {
+ return fmt.Errorf("invalid path element %q", elem)
+ }
+ if elem[0] == '.' && kind == modulePath {
+ return fmt.Errorf("leading dot in path element")
+ }
+ if elem[len(elem)-1] == '.' {
+ return fmt.Errorf("trailing dot in path element")
+ }
+ for _, r := range elem {
+ ok := false
+ switch kind {
+ case modulePath:
+ ok = modPathOK(r)
+ case importPath:
+ ok = importPathOK(r)
+ case filePath:
+ ok = fileNameOK(r)
+ default:
+ panic(fmt.Sprintf("internal error: invalid kind %v", kind))
+ }
+ if !ok {
+ return fmt.Errorf("invalid char %q", r)
+ }
+ }
+
+ // Windows disallows a bunch of path elements, sadly.
+ // See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
+ short := elem
+ if i := strings.Index(short, "."); i >= 0 {
+ short = short[:i]
+ }
+ for _, bad := range badWindowsNames {
+ if strings.EqualFold(bad, short) {
+ return fmt.Errorf("%q disallowed as path element component on Windows", short)
+ }
+ }
+
+ if kind == filePath {
+ // don't check for Windows short-names in file names. They're
+ // only an issue for import paths.
+ return nil
+ }
+
+ // Reject path components that look like Windows short-names.
+ // Those usually end in a tilde followed by one or more ASCII digits.
+ if tilde := strings.LastIndexByte(short, '~'); tilde >= 0 && tilde < len(short)-1 {
+ suffix := short[tilde+1:]
+ suffixIsDigits := true
+ for _, r := range suffix {
+ if r < '0' || r > '9' {
+ suffixIsDigits = false
+ break
+ }
+ }
+ if suffixIsDigits {
+ return fmt.Errorf("trailing tilde and digits in path element")
+ }
+ }
+
+ return nil
+}
+
+// CheckFilePath checks that a slash-separated file path is valid.
+// The definition of a valid file path is the same as the definition
+// of a valid import path except that the set of allowed characters is larger:
+// all Unicode letters, ASCII digits, the ASCII space character (U+0020),
+// and the ASCII punctuation characters
+// “!#$%&()+,-.=@[]^_{}~”.
+// (The excluded punctuation characters, " * < > ? ` ' | / \ and :,
+// have special meanings in certain shells or operating systems.)
+//
+// CheckFilePath may be less restrictive in the future, but see the
+// top-level package documentation for additional information about
+// subtleties of Unicode.
+func CheckFilePath(path string) error {
+ if err := checkPath(path, filePath); err != nil {
+ return &InvalidPathError{Kind: "file", Path: path, Err: err}
+ }
+ return nil
+}
+
+// badWindowsNames are the reserved file path elements on Windows.
+// See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
+var badWindowsNames = []string{
+ "CON",
+ "PRN",
+ "AUX",
+ "NUL",
+ "COM1",
+ "COM2",
+ "COM3",
+ "COM4",
+ "COM5",
+ "COM6",
+ "COM7",
+ "COM8",
+ "COM9",
+ "LPT1",
+ "LPT2",
+ "LPT3",
+ "LPT4",
+ "LPT5",
+ "LPT6",
+ "LPT7",
+ "LPT8",
+ "LPT9",
+}
+
+// SplitPathVersion returns prefix and major version such that prefix+pathMajor == path
+// and version is either empty or "/vN" for N >= 2.
+// As a special case, gopkg.in paths are recognized directly;
+// they require ".vN" instead of "/vN", and for all N, not just N >= 2.
+// SplitPathVersion returns with ok = false when presented with
+// a path whose last path element does not satisfy the constraints
+// applied by CheckPath, such as "example.com/pkg/v1" or "example.com/pkg/v1.2".
+func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) {
+ if strings.HasPrefix(path, "gopkg.in/") {
+ return splitGopkgIn(path)
+ }
+
+ i := len(path)
+ dot := false
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') {
+ if path[i-1] == '.' {
+ dot = true
+ }
+ i--
+ }
+ if i <= 1 || i == len(path) || path[i-1] != 'v' || path[i-2] != '/' {
+ return path, "", true
+ }
+ prefix, pathMajor = path[:i-2], path[i-2:]
+ if dot || len(pathMajor) <= 2 || pathMajor[2] == '0' || pathMajor == "/v1" {
+ return path, "", false
+ }
+ return prefix, pathMajor, true
+}
+
+// splitGopkgIn is like SplitPathVersion but only for gopkg.in paths.
+func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) {
+ if !strings.HasPrefix(path, "gopkg.in/") {
+ return path, "", false
+ }
+ i := len(path)
+ if strings.HasSuffix(path, "-unstable") {
+ i -= len("-unstable")
+ }
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9') {
+ i--
+ }
+ if i <= 1 || path[i-1] != 'v' || path[i-2] != '.' {
+ // All gopkg.in paths must end in vN for some N.
+ return path, "", false
+ }
+ prefix, pathMajor = path[:i-2], path[i-2:]
+ if len(pathMajor) <= 2 || pathMajor[2] == '0' && pathMajor != ".v0" {
+ return path, "", false
+ }
+ return prefix, pathMajor, true
+}
+
+// MatchPathMajor reports whether the semantic version v
+// matches the path major version pathMajor.
+//
+// MatchPathMajor returns true if and only if CheckPathMajor returns nil.
+func MatchPathMajor(v, pathMajor string) bool {
+ return CheckPathMajor(v, pathMajor) == nil
+}
+
+// CheckPathMajor returns a non-nil error if the semantic version v
+// does not match the path major version pathMajor.
+func CheckPathMajor(v, pathMajor string) error {
+ // TODO(jayconrod): return errors or panic for invalid inputs. This function
+ // (and others) was covered by integration tests for cmd/go, and surrounding
+ // code protected against invalid inputs like non-canonical versions.
+ if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
+ pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
+ }
+ if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" {
+ // Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1.
+ // For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405.
+ return nil
+ }
+ m := semver.Major(v)
+ if pathMajor == "" {
+ if m == "v0" || m == "v1" || semver.Build(v) == "+incompatible" {
+ return nil
+ }
+ pathMajor = "v0 or v1"
+ } else if pathMajor[0] == '/' || pathMajor[0] == '.' {
+ if m == pathMajor[1:] {
+ return nil
+ }
+ pathMajor = pathMajor[1:]
+ }
+ return &InvalidVersionError{
+ Version: v,
+ Err: fmt.Errorf("should be %s, not %s", pathMajor, semver.Major(v)),
+ }
+}
+
+// PathMajorPrefix returns the major-version tag prefix implied by pathMajor.
+// An empty PathMajorPrefix allows either v0 or v1.
+//
+// Note that MatchPathMajor may accept some versions that do not actually begin
+// with this prefix: namely, it accepts a 'v0.0.0-' prefix for a '.v1'
+// pathMajor, even though that pathMajor implies 'v1' tagging.
+func PathMajorPrefix(pathMajor string) string {
+ if pathMajor == "" {
+ return ""
+ }
+ if pathMajor[0] != '/' && pathMajor[0] != '.' {
+ panic("pathMajor suffix " + pathMajor + " passed to PathMajorPrefix lacks separator")
+ }
+ if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
+ pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
+ }
+ m := pathMajor[1:]
+ if m != semver.Major(m) {
+ panic("pathMajor suffix " + pathMajor + "passed to PathMajorPrefix is not a valid major version")
+ }
+ return m
+}
+
+// CanonicalVersion returns the canonical form of the version string v.
+// It is the same as semver.Canonical(v) except that it preserves the special build suffix "+incompatible".
+func CanonicalVersion(v string) string {
+ cv := semver.Canonical(v)
+ if semver.Build(v) == "+incompatible" {
+ cv += "+incompatible"
+ }
+ return cv
+}
+
+// Sort sorts the list by Path, breaking ties by comparing Version fields.
+// The Version fields are interpreted as semantic versions (using semver.Compare)
+// optionally followed by a tie-breaking suffix introduced by a slash character,
+// like in "v0.0.1/go.mod".
+func Sort(list []Version) {
+ sort.Slice(list, func(i, j int) bool {
+ mi := list[i]
+ mj := list[j]
+ if mi.Path != mj.Path {
+ return mi.Path < mj.Path
+ }
+ // To help go.sum formatting, allow version/file.
+ // Compare semver prefix by semver rules,
+ // file by string order.
+ vi := mi.Version
+ vj := mj.Version
+ var fi, fj string
+ if k := strings.Index(vi, "/"); k >= 0 {
+ vi, fi = vi[:k], vi[k:]
+ }
+ if k := strings.Index(vj, "/"); k >= 0 {
+ vj, fj = vj[:k], vj[k:]
+ }
+ if vi != vj {
+ return semver.Compare(vi, vj) < 0
+ }
+ return fi < fj
+ })
+}
+
+// EscapePath returns the escaped form of the given module path.
+// It fails if the module path is invalid.
+func EscapePath(path string) (escaped string, err error) {
+ if err := CheckPath(path); err != nil {
+ return "", err
+ }
+
+ return escapeString(path)
+}
+
+// EscapeVersion returns the escaped form of the given module version.
+// Versions are allowed to be in non-semver form but must be valid file names
+// and not contain exclamation marks.
+func EscapeVersion(v string) (escaped string, err error) {
+ if err := checkElem(v, filePath); err != nil || strings.Contains(v, "!") {
+ return "", &InvalidVersionError{
+ Version: v,
+ Err: fmt.Errorf("disallowed version string"),
+ }
+ }
+ return escapeString(v)
+}
+
+func escapeString(s string) (escaped string, err error) {
+ haveUpper := false
+ for _, r := range s {
+ if r == '!' || r >= utf8.RuneSelf {
+ // This should be disallowed by CheckPath, but diagnose anyway.
+ // The correctness of the escaping loop below depends on it.
+ return "", fmt.Errorf("internal error: inconsistency in EscapePath")
+ }
+ if 'A' <= r && r <= 'Z' {
+ haveUpper = true
+ }
+ }
+
+ if !haveUpper {
+ return s, nil
+ }
+
+ var buf []byte
+ for _, r := range s {
+ if 'A' <= r && r <= 'Z' {
+ buf = append(buf, '!', byte(r+'a'-'A'))
+ } else {
+ buf = append(buf, byte(r))
+ }
+ }
+ return string(buf), nil
+}
+
+// UnescapePath returns the module path for the given escaped path.
+// It fails if the escaped path is invalid or describes an invalid path.
+func UnescapePath(escaped string) (path string, err error) {
+ path, ok := unescapeString(escaped)
+ if !ok {
+ return "", fmt.Errorf("invalid escaped module path %q", escaped)
+ }
+ if err := CheckPath(path); err != nil {
+ return "", fmt.Errorf("invalid escaped module path %q: %v", escaped, err)
+ }
+ return path, nil
+}
+
+// UnescapeVersion returns the version string for the given escaped version.
+// It fails if the escaped form is invalid or describes an invalid version.
+// Versions are allowed to be in non-semver form but must be valid file names
+// and not contain exclamation marks.
+func UnescapeVersion(escaped string) (v string, err error) {
+ v, ok := unescapeString(escaped)
+ if !ok {
+ return "", fmt.Errorf("invalid escaped version %q", escaped)
+ }
+ if err := checkElem(v, filePath); err != nil {
+ return "", fmt.Errorf("invalid escaped version %q: %v", v, err)
+ }
+ return v, nil
+}
+
+func unescapeString(escaped string) (string, bool) {
+ var buf []byte
+
+ bang := false
+ for _, r := range escaped {
+ if r >= utf8.RuneSelf {
+ return "", false
+ }
+ if bang {
+ bang = false
+ if r < 'a' || 'z' < r {
+ return "", false
+ }
+ buf = append(buf, byte(r+'A'-'a'))
+ continue
+ }
+ if r == '!' {
+ bang = true
+ continue
+ }
+ if 'A' <= r && r <= 'Z' {
+ return "", false
+ }
+ buf = append(buf, byte(r))
+ }
+ if bang {
+ return "", false
+ }
+ return string(buf), true
+}
+
+// MatchPrefixPatterns reports whether any path prefix of target matches one of
+// the glob patterns (as defined by path.Match) in the comma-separated globs
+// list. This implements the algorithm used when matching a module path to the
+// GOPRIVATE environment variable, as described by 'go help module-private'.
+//
+// It ignores any empty or malformed patterns in the list.
+// Trailing slashes on patterns are ignored.
+func MatchPrefixPatterns(globs, target string) bool {
+ for globs != "" {
+ // Extract next non-empty glob in comma-separated list.
+ var glob string
+ if i := strings.Index(globs, ","); i >= 0 {
+ glob, globs = globs[:i], globs[i+1:]
+ } else {
+ glob, globs = globs, ""
+ }
+ glob = strings.TrimSuffix(glob, "/")
+ if glob == "" {
+ continue
+ }
+
+ // A glob with N+1 path elements (N slashes) needs to be matched
+ // against the first N+1 path elements of target,
+ // which end just before the N+1'th slash.
+ n := strings.Count(glob, "/")
+ prefix := target
+ // Walk target, counting slashes, truncating at the N+1'th slash.
+ for i := 0; i < len(target); i++ {
+ if target[i] == '/' {
+ if n == 0 {
+ prefix = target[:i]
+ break
+ }
+ n--
+ }
+ }
+ if n > 0 {
+ // Not enough prefix elements.
+ continue
+ }
+ matched, _ := path.Match(glob, prefix)
+ if matched {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/mod/module/pseudo.go b/vendor/golang.org/x/mod/module/pseudo.go
new file mode 100644
index 000000000..f04ad3788
--- /dev/null
+++ b/vendor/golang.org/x/mod/module/pseudo.go
@@ -0,0 +1,250 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Pseudo-versions
+//
+// Code authors are expected to tag the revisions they want users to use,
+// including prereleases. However, not all authors tag versions at all,
+// and not all commits a user might want to try will have tags.
+// A pseudo-version is a version with a special form that allows us to
+// address an untagged commit and order that version with respect to
+// other versions we might encounter.
+//
+// A pseudo-version takes one of the general forms:
+//
+// (1) vX.0.0-yyyymmddhhmmss-abcdef123456
+// (2) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456
+// (3) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible
+// (4) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456
+// (5) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible
+//
+// If there is no recently tagged version with the right major version vX,
+// then form (1) is used, creating a space of pseudo-versions at the bottom
+// of the vX version range, less than any tagged version, including the unlikely v0.0.0.
+//
+// If the most recent tagged version before the target commit is vX.Y.Z or vX.Y.Z+incompatible,
+// then the pseudo-version uses form (2) or (3), making it a prerelease for the next
+// possible semantic version after vX.Y.Z. The leading 0 segment in the prerelease string
+// ensures that the pseudo-version compares less than possible future explicit prereleases
+// like vX.Y.(Z+1)-rc1 or vX.Y.(Z+1)-1.
+//
+// If the most recent tagged version before the target commit is vX.Y.Z-pre or vX.Y.Z-pre+incompatible,
+// then the pseudo-version uses form (4) or (5), making it a slightly later prerelease.
+
+package module
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+ "time"
+
+ "golang.org/x/mod/internal/lazyregexp"
+ "golang.org/x/mod/semver"
+)
+
+var pseudoVersionRE = lazyregexp.New(`^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)\d{14}-[A-Za-z0-9]+(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$`)
+
+const PseudoVersionTimestampFormat = "20060102150405"
+
+// PseudoVersion returns a pseudo-version for the given major version ("v1")
+// preexisting older tagged version ("" or "v1.2.3" or "v1.2.3-pre"), revision time,
+// and revision identifier (usually a 12-byte commit hash prefix).
+func PseudoVersion(major, older string, t time.Time, rev string) string {
+ if major == "" {
+ major = "v0"
+ }
+ segment := fmt.Sprintf("%s-%s", t.UTC().Format(PseudoVersionTimestampFormat), rev)
+ build := semver.Build(older)
+ older = semver.Canonical(older)
+ if older == "" {
+ return major + ".0.0-" + segment // form (1)
+ }
+ if semver.Prerelease(older) != "" {
+ return older + ".0." + segment + build // form (4), (5)
+ }
+
+ // Form (2), (3).
+ // Extract patch from vMAJOR.MINOR.PATCH
+ i := strings.LastIndex(older, ".") + 1
+ v, patch := older[:i], older[i:]
+
+ // Reassemble.
+ return v + incDecimal(patch) + "-0." + segment + build
+}
+
+// ZeroPseudoVersion returns a pseudo-version with a zero timestamp and
+// revision, which may be used as a placeholder.
+func ZeroPseudoVersion(major string) string {
+ return PseudoVersion(major, "", time.Time{}, "000000000000")
+}
+
+// incDecimal returns the decimal string incremented by 1.
+func incDecimal(decimal string) string {
+ // Scan right to left turning 9s to 0s until you find a digit to increment.
+ digits := []byte(decimal)
+ i := len(digits) - 1
+ for ; i >= 0 && digits[i] == '9'; i-- {
+ digits[i] = '0'
+ }
+ if i >= 0 {
+ digits[i]++
+ } else {
+ // digits is all zeros
+ digits[0] = '1'
+ digits = append(digits, '0')
+ }
+ return string(digits)
+}
+
+// decDecimal returns the decimal string decremented by 1, or the empty string
+// if the decimal is all zeroes.
+func decDecimal(decimal string) string {
+ // Scan right to left turning 0s to 9s until you find a digit to decrement.
+ digits := []byte(decimal)
+ i := len(digits) - 1
+ for ; i >= 0 && digits[i] == '0'; i-- {
+ digits[i] = '9'
+ }
+ if i < 0 {
+ // decimal is all zeros
+ return ""
+ }
+ if i == 0 && digits[i] == '1' && len(digits) > 1 {
+ digits = digits[1:]
+ } else {
+ digits[i]--
+ }
+ return string(digits)
+}
+
+// IsPseudoVersion reports whether v is a pseudo-version.
+func IsPseudoVersion(v string) bool {
+ return strings.Count(v, "-") >= 2 && semver.IsValid(v) && pseudoVersionRE.MatchString(v)
+}
+
+// IsZeroPseudoVersion returns whether v is a pseudo-version with a zero base,
+// timestamp, and revision, as returned by ZeroPseudoVersion.
+func IsZeroPseudoVersion(v string) bool {
+ return v == ZeroPseudoVersion(semver.Major(v))
+}
+
+// PseudoVersionTime returns the time stamp of the pseudo-version v.
+// It returns an error if v is not a pseudo-version or if the time stamp
+// embedded in the pseudo-version is not a valid time.
+func PseudoVersionTime(v string) (time.Time, error) {
+ _, timestamp, _, _, err := parsePseudoVersion(v)
+ if err != nil {
+ return time.Time{}, err
+ }
+ t, err := time.Parse("20060102150405", timestamp)
+ if err != nil {
+ return time.Time{}, &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: fmt.Errorf("malformed time %q", timestamp),
+ }
+ }
+ return t, nil
+}
+
+// PseudoVersionRev returns the revision identifier of the pseudo-version v.
+// It returns an error if v is not a pseudo-version.
+func PseudoVersionRev(v string) (rev string, err error) {
+ _, _, rev, _, err = parsePseudoVersion(v)
+ return
+}
+
+// PseudoVersionBase returns the canonical parent version, if any, upon which
+// the pseudo-version v is based.
+//
+// If v has no parent version (that is, if it is "vX.0.0-[…]"),
+// PseudoVersionBase returns the empty string and a nil error.
+func PseudoVersionBase(v string) (string, error) {
+ base, _, _, build, err := parsePseudoVersion(v)
+ if err != nil {
+ return "", err
+ }
+
+ switch pre := semver.Prerelease(base); pre {
+ case "":
+ // vX.0.0-yyyymmddhhmmss-abcdef123456 → ""
+ if build != "" {
+ // Pseudo-versions of the form vX.0.0-yyyymmddhhmmss-abcdef123456+incompatible
+ // are nonsensical: the "vX.0.0-" prefix implies that there is no parent tag,
+ // but the "+incompatible" suffix implies that the major version of
+ // the parent tag is not compatible with the module's import path.
+ //
+ // There are a few such entries in the index generated by proxy.golang.org,
+ // but we believe those entries were generated by the proxy itself.
+ return "", &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: fmt.Errorf("lacks base version, but has build metadata %q", build),
+ }
+ }
+ return "", nil
+
+ case "-0":
+ // vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456 → vX.Y.Z
+ // vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible → vX.Y.Z+incompatible
+ base = strings.TrimSuffix(base, pre)
+ i := strings.LastIndexByte(base, '.')
+ if i < 0 {
+ panic("base from parsePseudoVersion missing patch number: " + base)
+ }
+ patch := decDecimal(base[i+1:])
+ if patch == "" {
+ // vX.0.0-0 is invalid, but has been observed in the wild in the index
+ // generated by requests to proxy.golang.org.
+ //
+ // NOTE(bcmills): I cannot find a historical bug that accounts for
+ // pseudo-versions of this form, nor have I seen such versions in any
+ // actual go.mod files. If we find actual examples of this form and a
+ // reasonable theory of how they came into existence, it seems fine to
+ // treat them as equivalent to vX.0.0 (especially since the invalid
+ // pseudo-versions have lower precedence than the real ones). For now, we
+ // reject them.
+ return "", &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: fmt.Errorf("version before %s would have negative patch number", base),
+ }
+ }
+ return base[:i+1] + patch + build, nil
+
+ default:
+ // vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456 → vX.Y.Z-pre
+ // vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible → vX.Y.Z-pre+incompatible
+ if !strings.HasSuffix(base, ".0") {
+ panic(`base from parsePseudoVersion missing ".0" before date: ` + base)
+ }
+ return strings.TrimSuffix(base, ".0") + build, nil
+ }
+}
+
+var errPseudoSyntax = errors.New("syntax error")
+
+func parsePseudoVersion(v string) (base, timestamp, rev, build string, err error) {
+ if !IsPseudoVersion(v) {
+ return "", "", "", "", &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: errPseudoSyntax,
+ }
+ }
+ build = semver.Build(v)
+ v = strings.TrimSuffix(v, build)
+ j := strings.LastIndex(v, "-")
+ v, rev = v[:j], v[j+1:]
+ i := strings.LastIndex(v, "-")
+ if j := strings.LastIndex(v, "."); j > i {
+ base = v[:j] // "vX.Y.Z-pre.0" or "vX.Y.(Z+1)-0"
+ timestamp = v[j+1:]
+ } else {
+ base = v[:i] // "vX.0.0"
+ timestamp = v[i+1:]
+ }
+ return base, timestamp, rev, build, nil
+}
diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go
new file mode 100644
index 000000000..a30a22bf2
--- /dev/null
+++ b/vendor/golang.org/x/mod/semver/semver.go
@@ -0,0 +1,401 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package semver implements comparison of semantic version strings.
+// In this package, semantic version strings must begin with a leading "v",
+// as in "v1.0.0".
+//
+// The general form of a semantic version string accepted by this package is
+//
+// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]]
+//
+// where square brackets indicate optional parts of the syntax;
+// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros;
+// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers
+// using only alphanumeric characters and hyphens; and
+// all-numeric PRERELEASE identifiers must not have leading zeros.
+//
+// This package follows Semantic Versioning 2.0.0 (see semver.org)
+// with two exceptions. First, it requires the "v" prefix. Second, it recognizes
+// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes)
+// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0.
+package semver
+
+import "sort"
+
+// parsed returns the parsed form of a semantic version string.
+type parsed struct {
+ major string
+ minor string
+ patch string
+ short string
+ prerelease string
+ build string
+}
+
+// IsValid reports whether v is a valid semantic version string.
+func IsValid(v string) bool {
+ _, ok := parse(v)
+ return ok
+}
+
+// Canonical returns the canonical formatting of the semantic version v.
+// It fills in any missing .MINOR or .PATCH and discards build metadata.
+// Two semantic versions compare equal only if their canonical formattings
+// are identical strings.
+// The canonical invalid semantic version is the empty string.
+func Canonical(v string) string {
+ p, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ if p.build != "" {
+ return v[:len(v)-len(p.build)]
+ }
+ if p.short != "" {
+ return v + p.short
+ }
+ return v
+}
+
+// Major returns the major version prefix of the semantic version v.
+// For example, Major("v2.1.0") == "v2".
+// If v is an invalid semantic version string, Major returns the empty string.
+func Major(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return v[:1+len(pv.major)]
+}
+
+// MajorMinor returns the major.minor version prefix of the semantic version v.
+// For example, MajorMinor("v2.1.0") == "v2.1".
+// If v is an invalid semantic version string, MajorMinor returns the empty string.
+func MajorMinor(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ i := 1 + len(pv.major)
+ if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor {
+ return v[:j]
+ }
+ return v[:i] + "." + pv.minor
+}
+
+// Prerelease returns the prerelease suffix of the semantic version v.
+// For example, Prerelease("v2.1.0-pre+meta") == "-pre".
+// If v is an invalid semantic version string, Prerelease returns the empty string.
+func Prerelease(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.prerelease
+}
+
+// Build returns the build suffix of the semantic version v.
+// For example, Build("v2.1.0+meta") == "+meta".
+// If v is an invalid semantic version string, Build returns the empty string.
+func Build(v string) string {
+ pv, ok := parse(v)
+ if !ok {
+ return ""
+ }
+ return pv.build
+}
+
+// Compare returns an integer comparing two versions according to
+// semantic version precedence.
+// The result will be 0 if v == w, -1 if v < w, or +1 if v > w.
+//
+// An invalid semantic version string is considered less than a valid one.
+// All invalid semantic version strings compare equal to each other.
+func Compare(v, w string) int {
+ pv, ok1 := parse(v)
+ pw, ok2 := parse(w)
+ if !ok1 && !ok2 {
+ return 0
+ }
+ if !ok1 {
+ return -1
+ }
+ if !ok2 {
+ return +1
+ }
+ if c := compareInt(pv.major, pw.major); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.minor, pw.minor); c != 0 {
+ return c
+ }
+ if c := compareInt(pv.patch, pw.patch); c != 0 {
+ return c
+ }
+ return comparePrerelease(pv.prerelease, pw.prerelease)
+}
+
+// Max canonicalizes its arguments and then returns the version string
+// that compares greater.
+//
+// Deprecated: use Compare instead. In most cases, returning a canonicalized
+// version is not expected or desired.
+func Max(v, w string) string {
+ v = Canonical(v)
+ w = Canonical(w)
+ if Compare(v, w) > 0 {
+ return v
+ }
+ return w
+}
+
+// ByVersion implements sort.Interface for sorting semantic version strings.
+type ByVersion []string
+
+func (vs ByVersion) Len() int { return len(vs) }
+func (vs ByVersion) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] }
+func (vs ByVersion) Less(i, j int) bool {
+ cmp := Compare(vs[i], vs[j])
+ if cmp != 0 {
+ return cmp < 0
+ }
+ return vs[i] < vs[j]
+}
+
+// Sort sorts a list of semantic version strings using ByVersion.
+func Sort(list []string) {
+ sort.Sort(ByVersion(list))
+}
+
+func parse(v string) (p parsed, ok bool) {
+ if v == "" || v[0] != 'v' {
+ return
+ }
+ p.major, v, ok = parseInt(v[1:])
+ if !ok {
+ return
+ }
+ if v == "" {
+ p.minor = "0"
+ p.patch = "0"
+ p.short = ".0.0"
+ return
+ }
+ if v[0] != '.' {
+ ok = false
+ return
+ }
+ p.minor, v, ok = parseInt(v[1:])
+ if !ok {
+ return
+ }
+ if v == "" {
+ p.patch = "0"
+ p.short = ".0"
+ return
+ }
+ if v[0] != '.' {
+ ok = false
+ return
+ }
+ p.patch, v, ok = parseInt(v[1:])
+ if !ok {
+ return
+ }
+ if len(v) > 0 && v[0] == '-' {
+ p.prerelease, v, ok = parsePrerelease(v)
+ if !ok {
+ return
+ }
+ }
+ if len(v) > 0 && v[0] == '+' {
+ p.build, v, ok = parseBuild(v)
+ if !ok {
+ return
+ }
+ }
+ if v != "" {
+ ok = false
+ return
+ }
+ ok = true
+ return
+}
+
+func parseInt(v string) (t, rest string, ok bool) {
+ if v == "" {
+ return
+ }
+ if v[0] < '0' || '9' < v[0] {
+ return
+ }
+ i := 1
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ if v[0] == '0' && i != 1 {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parsePrerelease(v string) (t, rest string, ok bool) {
+ // "A pre-release version MAY be denoted by appending a hyphen and
+ // a series of dot separated identifiers immediately following the patch version.
+ // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-].
+ // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes."
+ if v == "" || v[0] != '-' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) && v[i] != '+' {
+ if !isIdentChar(v[i]) && v[i] != '.' {
+ return
+ }
+ if v[i] == '.' {
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i || isBadNum(v[start:i]) {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func parseBuild(v string) (t, rest string, ok bool) {
+ if v == "" || v[0] != '+' {
+ return
+ }
+ i := 1
+ start := 1
+ for i < len(v) {
+ if !isIdentChar(v[i]) && v[i] != '.' {
+ return
+ }
+ if v[i] == '.' {
+ if start == i {
+ return
+ }
+ start = i + 1
+ }
+ i++
+ }
+ if start == i {
+ return
+ }
+ return v[:i], v[i:], true
+}
+
+func isIdentChar(c byte) bool {
+ return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-'
+}
+
+func isBadNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v) && i > 1 && v[0] == '0'
+}
+
+func isNum(v string) bool {
+ i := 0
+ for i < len(v) && '0' <= v[i] && v[i] <= '9' {
+ i++
+ }
+ return i == len(v)
+}
+
+func compareInt(x, y string) int {
+ if x == y {
+ return 0
+ }
+ if len(x) < len(y) {
+ return -1
+ }
+ if len(x) > len(y) {
+ return +1
+ }
+ if x < y {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func comparePrerelease(x, y string) int {
+ // "When major, minor, and patch are equal, a pre-release version has
+ // lower precedence than a normal version.
+ // Example: 1.0.0-alpha < 1.0.0.
+ // Precedence for two pre-release versions with the same major, minor,
+ // and patch version MUST be determined by comparing each dot separated
+ // identifier from left to right until a difference is found as follows:
+ // identifiers consisting of only digits are compared numerically and
+ // identifiers with letters or hyphens are compared lexically in ASCII
+ // sort order. Numeric identifiers always have lower precedence than
+ // non-numeric identifiers. A larger set of pre-release fields has a
+ // higher precedence than a smaller set, if all of the preceding
+ // identifiers are equal.
+ // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta <
+ // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0."
+ if x == y {
+ return 0
+ }
+ if x == "" {
+ return +1
+ }
+ if y == "" {
+ return -1
+ }
+ for x != "" && y != "" {
+ x = x[1:] // skip - or .
+ y = y[1:] // skip - or .
+ var dx, dy string
+ dx, x = nextIdent(x)
+ dy, y = nextIdent(y)
+ if dx != dy {
+ ix := isNum(dx)
+ iy := isNum(dy)
+ if ix != iy {
+ if ix {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ if ix {
+ if len(dx) < len(dy) {
+ return -1
+ }
+ if len(dx) > len(dy) {
+ return +1
+ }
+ }
+ if dx < dy {
+ return -1
+ } else {
+ return +1
+ }
+ }
+ }
+ if x == "" {
+ return -1
+ } else {
+ return +1
+ }
+}
+
+func nextIdent(x string) (dx, rest string) {
+ i := 0
+ for i < len(x) && x[i] != '.' {
+ i++
+ }
+ return x[:i], x[i:]
+}
diff --git a/vendor/golang.org/x/tools/cmd/goimports/doc.go b/vendor/golang.org/x/tools/cmd/goimports/doc.go
new file mode 100644
index 000000000..5a5b9005f
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/doc.go
@@ -0,0 +1,47 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+
+Command goimports updates your Go import lines,
+adding missing ones and removing unreferenced ones.
+
+ $ go install golang.org/x/tools/cmd/goimports@latest
+
+In addition to fixing imports, goimports also formats
+your code in the same style as gofmt so it can be used
+as a replacement for your editor's gofmt-on-save hook.
+
+For emacs, make sure you have the latest go-mode.el:
+ https://github.com/dominikh/go-mode.el
+Then in your .emacs file:
+ (setq gofmt-command "goimports")
+ (add-hook 'before-save-hook 'gofmt-before-save)
+
+For vim, set "gofmt_command" to "goimports":
+ https://golang.org/change/39c724dd7f252
+ https://golang.org/wiki/IDEsAndTextEditorPlugins
+ etc
+
+For GoSublime, follow the steps described here:
+ http://michaelwhatcott.com/gosublime-goimports/
+
+For other editors, you probably know what to do.
+
+To exclude directories in your $GOPATH from being scanned for Go
+files, goimports respects a configuration file at
+$GOPATH/src/.goimportsignore which may contain blank lines, comment
+lines (beginning with '#'), or lines naming a directory relative to
+the configuration file to ignore when scanning. No globbing or regex
+patterns are allowed. Use the "-v" verbose flag to verify it's
+working and see what goimports is doing.
+
+File bugs or feature requests at:
+
+ https://golang.org/issues/new?title=x/tools/cmd/goimports:+
+
+Happy hacking!
+
+*/
+package main // import "golang.org/x/tools/cmd/goimports"
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports.go b/vendor/golang.org/x/tools/cmd/goimports/goimports.go
new file mode 100644
index 000000000..b354c9e82
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports.go
@@ -0,0 +1,380 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "flag"
+ "fmt"
+ "go/scanner"
+ exec "golang.org/x/sys/execabs"
+ "io"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+ "runtime/pprof"
+ "strings"
+
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/imports"
+)
+
+var (
+ // main operation modes
+ list = flag.Bool("l", false, "list files whose formatting differs from goimport's")
+ write = flag.Bool("w", false, "write result to (source) file instead of stdout")
+ doDiff = flag.Bool("d", false, "display diffs instead of rewriting files")
+ srcdir = flag.String("srcdir", "", "choose imports as if source code is from `dir`. When operating on a single file, dir may instead be the complete file name.")
+
+ verbose bool // verbose logging
+
+ cpuProfile = flag.String("cpuprofile", "", "CPU profile output")
+ memProfile = flag.String("memprofile", "", "memory profile output")
+ memProfileRate = flag.Int("memrate", 0, "if > 0, sets runtime.MemProfileRate")
+
+ options = &imports.Options{
+ TabWidth: 8,
+ TabIndent: true,
+ Comments: true,
+ Fragment: true,
+ Env: &imports.ProcessEnv{
+ GocmdRunner: &gocommand.Runner{},
+ },
+ }
+ exitCode = 0
+)
+
+func init() {
+ flag.BoolVar(&options.AllErrors, "e", false, "report all errors (not just the first 10 on different lines)")
+ flag.StringVar(&options.LocalPrefix, "local", "", "put imports beginning with this string after 3rd-party packages; comma-separated list")
+ flag.BoolVar(&options.FormatOnly, "format-only", false, "if true, don't fix imports and only format. In this mode, goimports is effectively gofmt, with the addition that imports are grouped into sections.")
+}
+
+func report(err error) {
+ scanner.PrintError(os.Stderr, err)
+ exitCode = 2
+}
+
+func usage() {
+ fmt.Fprintf(os.Stderr, "usage: goimports [flags] [path ...]\n")
+ flag.PrintDefaults()
+ os.Exit(2)
+}
+
+func isGoFile(f os.FileInfo) bool {
+ // ignore non-Go files
+ name := f.Name()
+ return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go")
+}
+
+// argumentType is which mode goimports was invoked as.
+type argumentType int
+
+const (
+ // fromStdin means the user is piping their source into goimports.
+ fromStdin argumentType = iota
+
+ // singleArg is the common case from editors, when goimports is run on
+ // a single file.
+ singleArg
+
+ // multipleArg is when the user ran "goimports file1.go file2.go"
+ // or ran goimports on a directory tree.
+ multipleArg
+)
+
+func processFile(filename string, in io.Reader, out io.Writer, argType argumentType) error {
+ opt := options
+ if argType == fromStdin {
+ nopt := *options
+ nopt.Fragment = true
+ opt = &nopt
+ }
+
+ if in == nil {
+ f, err := os.Open(filename)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ in = f
+ }
+
+ src, err := ioutil.ReadAll(in)
+ if err != nil {
+ return err
+ }
+
+ target := filename
+ if *srcdir != "" {
+ // Determine whether the provided -srcdirc is a directory or file
+ // and then use it to override the target.
+ //
+ // See https://github.com/dominikh/go-mode.el/issues/146
+ if isFile(*srcdir) {
+ if argType == multipleArg {
+ return errors.New("-srcdir value can't be a file when passing multiple arguments or when walking directories")
+ }
+ target = *srcdir
+ } else if argType == singleArg && strings.HasSuffix(*srcdir, ".go") && !isDir(*srcdir) {
+ // For a file which doesn't exist on disk yet, but might shortly.
+ // e.g. user in editor opens $DIR/newfile.go and newfile.go doesn't yet exist on disk.
+ // The goimports on-save hook writes the buffer to a temp file
+ // first and runs goimports before the actual save to newfile.go.
+ // The editor's buffer is named "newfile.go" so that is passed to goimports as:
+ // goimports -srcdir=/gopath/src/pkg/newfile.go /tmp/gofmtXXXXXXXX.go
+ // and then the editor reloads the result from the tmp file and writes
+ // it to newfile.go.
+ target = *srcdir
+ } else {
+ // Pretend that file is from *srcdir in order to decide
+ // visible imports correctly.
+ target = filepath.Join(*srcdir, filepath.Base(filename))
+ }
+ }
+
+ res, err := imports.Process(target, src, opt)
+ if err != nil {
+ return err
+ }
+
+ if !bytes.Equal(src, res) {
+ // formatting has changed
+ if *list {
+ fmt.Fprintln(out, filename)
+ }
+ if *write {
+ if argType == fromStdin {
+ // filename is "<standard input>"
+ return errors.New("can't use -w on stdin")
+ }
+ // On Windows, we need to re-set the permissions from the file. See golang/go#38225.
+ var perms os.FileMode
+ if fi, err := os.Stat(filename); err == nil {
+ perms = fi.Mode() & os.ModePerm
+ }
+ err = ioutil.WriteFile(filename, res, perms)
+ if err != nil {
+ return err
+ }
+ }
+ if *doDiff {
+ if argType == fromStdin {
+ filename = "stdin.go" // because <standard input>.orig looks silly
+ }
+ data, err := diff(src, res, filename)
+ if err != nil {
+ return fmt.Errorf("computing diff: %s", err)
+ }
+ fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename))
+ out.Write(data)
+ }
+ }
+
+ if !*list && !*write && !*doDiff {
+ _, err = out.Write(res)
+ }
+
+ return err
+}
+
+func visitFile(path string, f os.FileInfo, err error) error {
+ if err == nil && isGoFile(f) {
+ err = processFile(path, nil, os.Stdout, multipleArg)
+ }
+ if err != nil {
+ report(err)
+ }
+ return nil
+}
+
+func walkDir(path string) {
+ filepath.Walk(path, visitFile)
+}
+
+func main() {
+ runtime.GOMAXPROCS(runtime.NumCPU())
+
+ // call gofmtMain in a separate function
+ // so that it can use defer and have them
+ // run before the exit.
+ gofmtMain()
+ os.Exit(exitCode)
+}
+
+// parseFlags parses command line flags and returns the paths to process.
+// It's a var so that custom implementations can replace it in other files.
+var parseFlags = func() []string {
+ flag.BoolVar(&verbose, "v", false, "verbose logging")
+
+ flag.Parse()
+ return flag.Args()
+}
+
+func bufferedFileWriter(dest string) (w io.Writer, close func()) {
+ f, err := os.Create(dest)
+ if err != nil {
+ log.Fatal(err)
+ }
+ bw := bufio.NewWriter(f)
+ return bw, func() {
+ if err := bw.Flush(); err != nil {
+ log.Fatalf("error flushing %v: %v", dest, err)
+ }
+ if err := f.Close(); err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+
+func gofmtMain() {
+ flag.Usage = usage
+ paths := parseFlags()
+
+ if *cpuProfile != "" {
+ bw, flush := bufferedFileWriter(*cpuProfile)
+ pprof.StartCPUProfile(bw)
+ defer flush()
+ defer pprof.StopCPUProfile()
+ }
+ // doTrace is a conditionally compiled wrapper around runtime/trace. It is
+ // used to allow goimports to compile under gccgo, which does not support
+ // runtime/trace. See https://golang.org/issue/15544.
+ defer doTrace()()
+ if *memProfileRate > 0 {
+ runtime.MemProfileRate = *memProfileRate
+ bw, flush := bufferedFileWriter(*memProfile)
+ defer func() {
+ runtime.GC() // materialize all statistics
+ if err := pprof.WriteHeapProfile(bw); err != nil {
+ log.Fatal(err)
+ }
+ flush()
+ }()
+ }
+
+ if verbose {
+ log.SetFlags(log.LstdFlags | log.Lmicroseconds)
+ options.Env.Logf = log.Printf
+ }
+ if options.TabWidth < 0 {
+ fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", options.TabWidth)
+ exitCode = 2
+ return
+ }
+
+ if len(paths) == 0 {
+ if err := processFile("<standard input>", os.Stdin, os.Stdout, fromStdin); err != nil {
+ report(err)
+ }
+ return
+ }
+
+ argType := singleArg
+ if len(paths) > 1 {
+ argType = multipleArg
+ }
+
+ for _, path := range paths {
+ switch dir, err := os.Stat(path); {
+ case err != nil:
+ report(err)
+ case dir.IsDir():
+ walkDir(path)
+ default:
+ if err := processFile(path, nil, os.Stdout, argType); err != nil {
+ report(err)
+ }
+ }
+ }
+}
+
+func writeTempFile(dir, prefix string, data []byte) (string, error) {
+ file, err := ioutil.TempFile(dir, prefix)
+ if err != nil {
+ return "", err
+ }
+ _, err = file.Write(data)
+ if err1 := file.Close(); err == nil {
+ err = err1
+ }
+ if err != nil {
+ os.Remove(file.Name())
+ return "", err
+ }
+ return file.Name(), nil
+}
+
+func diff(b1, b2 []byte, filename string) (data []byte, err error) {
+ f1, err := writeTempFile("", "gofmt", b1)
+ if err != nil {
+ return
+ }
+ defer os.Remove(f1)
+
+ f2, err := writeTempFile("", "gofmt", b2)
+ if err != nil {
+ return
+ }
+ defer os.Remove(f2)
+
+ cmd := "diff"
+ if runtime.GOOS == "plan9" {
+ cmd = "/bin/ape/diff"
+ }
+
+ data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput()
+ if len(data) > 0 {
+ // diff exits with a non-zero status when the files don't match.
+ // Ignore that failure as long as we get output.
+ return replaceTempFilename(data, filename)
+ }
+ return
+}
+
+// replaceTempFilename replaces temporary filenames in diff with actual one.
+//
+// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500
+// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500
+// ...
+// ->
+// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500
+// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500
+// ...
+func replaceTempFilename(diff []byte, filename string) ([]byte, error) {
+ bs := bytes.SplitN(diff, []byte{'\n'}, 3)
+ if len(bs) < 3 {
+ return nil, fmt.Errorf("got unexpected diff for %s", filename)
+ }
+ // Preserve timestamps.
+ var t0, t1 []byte
+ if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 {
+ t0 = bs[0][i:]
+ }
+ if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 {
+ t1 = bs[1][i:]
+ }
+ // Always print filepath with slash separator.
+ f := filepath.ToSlash(filename)
+ bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0))
+ bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1))
+ return bytes.Join(bs, []byte{'\n'}), nil
+}
+
+// isFile reports whether name is a file.
+func isFile(name string) bool {
+ fi, err := os.Stat(name)
+ return err == nil && fi.Mode().IsRegular()
+}
+
+// isDir reports whether name is a directory.
+func isDir(name string) bool {
+ fi, err := os.Stat(name)
+ return err == nil && fi.IsDir()
+}
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go
new file mode 100644
index 000000000..190a56535
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go
@@ -0,0 +1,27 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build gc
+// +build gc
+
+package main
+
+import (
+ "flag"
+ "runtime/trace"
+)
+
+var traceProfile = flag.String("trace", "", "trace profile output")
+
+func doTrace() func() {
+ if *traceProfile != "" {
+ bw, flush := bufferedFileWriter(*traceProfile)
+ trace.Start(bw)
+ return func() {
+ flush()
+ trace.Stop()
+ }
+ }
+ return func() {}
+}
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go
new file mode 100644
index 000000000..344fe7576
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go
@@ -0,0 +1,12 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !gc
+// +build !gc
+
+package main
+
+func doTrace() func() {
+ return func() {}
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644
index 000000000..a5c6d6d4f
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
@@ -0,0 +1,639 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// Precondition: [start, end) both lie within the same file as root.
+// TODO(adonovan): return (nil, false) in this case and remove precond.
+// Requires FileSet; see loader.tokenFileContainsPos.
+//
+// Postcondition: path is never nil; it always contains at least 'root'.
+//
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ _, isToken := child.(tokenNode)
+ return isToken || visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+//
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+//
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("}")))
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ children = append(children,
+ tok(n.Opening, len("(")), // or len("[")
+ tok(n.Closing, len(")"))) // or len("]")
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if tparams := typeparams.ForFuncType(n.Type); tparams != nil {
+ children = append(children, tparams)
+ }
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *typeparams.IndexListExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+//
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *typeparams.IndexListExpr:
+ return "index list expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644
index 000000000..2087ceec9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -0,0 +1,482 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
+ return AddNamedImport(fset, f, "", path)
+}
+
+// AddNamedImport adds the import with the given name and path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+// AddNamedImport(fset, f, "pathpkg", "path")
+// adds
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
+ if imports(f, name, path) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(path),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with path.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(path)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, path)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import, preceded by a blank line, goes after the package declaration
+ // and after the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ // +2 for a blank line
+ impDecl.TokPos = c.End() + 2
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var delspecs []*ast.ImportSpec
+ var delcomments []*ast.CommentGroup
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ spec := gen.Specs[j]
+ impspec := spec.(*ast.ImportSpec)
+ if importName(impspec) != name || importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs = append(delspecs, impspec)
+ deleted = true
+ copy(gen.Specs[j:], gen.Specs[j+1:])
+ gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ copy(f.Decls[i:], f.Decls[i+1:])
+ f.Decls = f.Decls[:len(f.Decls)-1]
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments = append(delcomments, impspec.Doc)
+ }
+ if impspec.Comment != nil {
+ delcomments = append(delcomments, impspec.Comment)
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments = append(delcomments, cg)
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
+ line := fset.Position(impspec.Path.ValuePos).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 || !gen.Rparen.IsValid() {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole. The right parenthesis is
+ // invalid after AddImport to an import statement without parenthesis.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ for i := 0; i < len(f.Imports); i++ {
+ imp := f.Imports[i]
+ for j, del := range delspecs {
+ if imp == del {
+ copy(f.Imports[i:], f.Imports[i+1:])
+ f.Imports = f.Imports[:len(f.Imports)-1]
+ copy(delspecs[j:], delspecs[j+1:])
+ delspecs = delspecs[:len(delspecs)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ // Delete comments from f.Comments.
+ for i := 0; i < len(f.Comments); i++ {
+ cg := f.Comments[i]
+ for j, del := range delcomments {
+ if cg == del {
+ copy(f.Comments[i:], f.Comments[i+1:])
+ f.Comments = f.Comments[:len(f.Comments)-1]
+ copy(delcomments[j:], delcomments[j+1:])
+ delcomments = delcomments[:len(delcomments)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ if len(delspecs) > 0 {
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "<nil>":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports reports whether f has an import with the specified name and path.
+func imports(f *ast.File, name, path string) bool {
+ for _, s := range f.Imports {
+ if importName(s) == name && importPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importName returns the name of s,
+// or "" if the import is not named.
+func importName(s *ast.ImportSpec) string {
+ if s.Name == nil {
+ return ""
+ }
+ return s.Name.Name
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return t
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644
index 000000000..6d9ca23e2
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -0,0 +1,483 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+
+ "golang.org/x/tools/internal/typeparams"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+//
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *typeparams.IndexListExpr:
+ a.apply(n, "X", nil, n.X)
+ a.applyList(n, "Indices")
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644
index 000000000..919d5305a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go
@@ -0,0 +1,18 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+ for {
+ p, ok := e.(*ast.ParenExpr)
+ if !ok {
+ return e
+ }
+ e = p.X
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/event/core/event.go b/vendor/golang.org/x/tools/internal/event/core/event.go
new file mode 100644
index 000000000..a6cf0e64a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/core/event.go
@@ -0,0 +1,85 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package core provides support for event based telemetry.
+package core
+
+import (
+ "fmt"
+ "time"
+
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Event holds the information about an event of note that occurred.
+type Event struct {
+ at time.Time
+
+ // As events are often on the stack, storing the first few labels directly
+ // in the event can avoid an allocation at all for the very common cases of
+ // simple events.
+ // The length needs to be large enough to cope with the majority of events
+ // but no so large as to cause undue stack pressure.
+ // A log message with two values will use 3 labels (one for each value and
+ // one for the message itself).
+
+ static [3]label.Label // inline storage for the first few labels
+ dynamic []label.Label // dynamically sized storage for remaining labels
+}
+
+// eventLabelMap implements label.Map for a the labels of an Event.
+type eventLabelMap struct {
+ event Event
+}
+
+func (ev Event) At() time.Time { return ev.at }
+
+func (ev Event) Format(f fmt.State, r rune) {
+ if !ev.at.IsZero() {
+ fmt.Fprint(f, ev.at.Format("2006/01/02 15:04:05 "))
+ }
+ for index := 0; ev.Valid(index); index++ {
+ if l := ev.Label(index); l.Valid() {
+ fmt.Fprintf(f, "\n\t%v", l)
+ }
+ }
+}
+
+func (ev Event) Valid(index int) bool {
+ return index >= 0 && index < len(ev.static)+len(ev.dynamic)
+}
+
+func (ev Event) Label(index int) label.Label {
+ if index < len(ev.static) {
+ return ev.static[index]
+ }
+ return ev.dynamic[index-len(ev.static)]
+}
+
+func (ev Event) Find(key label.Key) label.Label {
+ for _, l := range ev.static {
+ if l.Key() == key {
+ return l
+ }
+ }
+ for _, l := range ev.dynamic {
+ if l.Key() == key {
+ return l
+ }
+ }
+ return label.Label{}
+}
+
+func MakeEvent(static [3]label.Label, labels []label.Label) Event {
+ return Event{
+ static: static,
+ dynamic: labels,
+ }
+}
+
+// CloneEvent event returns a copy of the event with the time adjusted to at.
+func CloneEvent(ev Event, at time.Time) Event {
+ ev.at = at
+ return ev
+}
diff --git a/vendor/golang.org/x/tools/internal/event/core/export.go b/vendor/golang.org/x/tools/internal/event/core/export.go
new file mode 100644
index 000000000..05f3a9a57
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/core/export.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package core
+
+import (
+ "context"
+ "sync/atomic"
+ "time"
+ "unsafe"
+
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Exporter is a function that handles events.
+// It may return a modified context and event.
+type Exporter func(context.Context, Event, label.Map) context.Context
+
+var (
+ exporter unsafe.Pointer
+)
+
+// SetExporter sets the global exporter function that handles all events.
+// The exporter is called synchronously from the event call site, so it should
+// return quickly so as not to hold up user code.
+func SetExporter(e Exporter) {
+ p := unsafe.Pointer(&e)
+ if e == nil {
+ // &e is always valid, and so p is always valid, but for the early abort
+ // of ProcessEvent to be efficient it needs to make the nil check on the
+ // pointer without having to dereference it, so we make the nil function
+ // also a nil pointer
+ p = nil
+ }
+ atomic.StorePointer(&exporter, p)
+}
+
+// deliver is called to deliver an event to the supplied exporter.
+// it will fill in the time.
+func deliver(ctx context.Context, exporter Exporter, ev Event) context.Context {
+ // add the current time to the event
+ ev.at = time.Now()
+ // hand the event off to the current exporter
+ return exporter(ctx, ev, ev)
+}
+
+// Export is called to deliver an event to the global exporter if set.
+func Export(ctx context.Context, ev Event) context.Context {
+ // get the global exporter and abort early if there is not one
+ exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter))
+ if exporterPtr == nil {
+ return ctx
+ }
+ return deliver(ctx, *exporterPtr, ev)
+}
+
+// ExportPair is called to deliver a start event to the supplied exporter.
+// It also returns a function that will deliver the end event to the same
+// exporter.
+// It will fill in the time.
+func ExportPair(ctx context.Context, begin, end Event) (context.Context, func()) {
+ // get the global exporter and abort early if there is not one
+ exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter))
+ if exporterPtr == nil {
+ return ctx, func() {}
+ }
+ ctx = deliver(ctx, *exporterPtr, begin)
+ return ctx, func() { deliver(ctx, *exporterPtr, end) }
+}
diff --git a/vendor/golang.org/x/tools/internal/event/core/fast.go b/vendor/golang.org/x/tools/internal/event/core/fast.go
new file mode 100644
index 000000000..06c1d4615
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/core/fast.go
@@ -0,0 +1,77 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package core
+
+import (
+ "context"
+
+ "golang.org/x/tools/internal/event/keys"
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Log1 takes a message and one label delivers a log event to the exporter.
+// It is a customized version of Print that is faster and does no allocation.
+func Log1(ctx context.Context, message string, t1 label.Label) {
+ Export(ctx, MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ t1,
+ }, nil))
+}
+
+// Log2 takes a message and two labels and delivers a log event to the exporter.
+// It is a customized version of Print that is faster and does no allocation.
+func Log2(ctx context.Context, message string, t1 label.Label, t2 label.Label) {
+ Export(ctx, MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ t1,
+ t2,
+ }, nil))
+}
+
+// Metric1 sends a label event to the exporter with the supplied labels.
+func Metric1(ctx context.Context, t1 label.Label) context.Context {
+ return Export(ctx, MakeEvent([3]label.Label{
+ keys.Metric.New(),
+ t1,
+ }, nil))
+}
+
+// Metric2 sends a label event to the exporter with the supplied labels.
+func Metric2(ctx context.Context, t1, t2 label.Label) context.Context {
+ return Export(ctx, MakeEvent([3]label.Label{
+ keys.Metric.New(),
+ t1,
+ t2,
+ }, nil))
+}
+
+// Start1 sends a span start event with the supplied label list to the exporter.
+// It also returns a function that will end the span, which should normally be
+// deferred.
+func Start1(ctx context.Context, name string, t1 label.Label) (context.Context, func()) {
+ return ExportPair(ctx,
+ MakeEvent([3]label.Label{
+ keys.Start.Of(name),
+ t1,
+ }, nil),
+ MakeEvent([3]label.Label{
+ keys.End.New(),
+ }, nil))
+}
+
+// Start2 sends a span start event with the supplied label list to the exporter.
+// It also returns a function that will end the span, which should normally be
+// deferred.
+func Start2(ctx context.Context, name string, t1, t2 label.Label) (context.Context, func()) {
+ return ExportPair(ctx,
+ MakeEvent([3]label.Label{
+ keys.Start.Of(name),
+ t1,
+ t2,
+ }, nil),
+ MakeEvent([3]label.Label{
+ keys.End.New(),
+ }, nil))
+}
diff --git a/vendor/golang.org/x/tools/internal/event/doc.go b/vendor/golang.org/x/tools/internal/event/doc.go
new file mode 100644
index 000000000..5dc6e6bab
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/doc.go
@@ -0,0 +1,7 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package event provides a set of packages that cover the main
+// concepts of telemetry in an implementation agnostic way.
+package event
diff --git a/vendor/golang.org/x/tools/internal/event/event.go b/vendor/golang.org/x/tools/internal/event/event.go
new file mode 100644
index 000000000..4d55e577d
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/event.go
@@ -0,0 +1,127 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package event
+
+import (
+ "context"
+
+ "golang.org/x/tools/internal/event/core"
+ "golang.org/x/tools/internal/event/keys"
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Exporter is a function that handles events.
+// It may return a modified context and event.
+type Exporter func(context.Context, core.Event, label.Map) context.Context
+
+// SetExporter sets the global exporter function that handles all events.
+// The exporter is called synchronously from the event call site, so it should
+// return quickly so as not to hold up user code.
+func SetExporter(e Exporter) {
+ core.SetExporter(core.Exporter(e))
+}
+
+// Log takes a message and a label list and combines them into a single event
+// before delivering them to the exporter.
+func Log(ctx context.Context, message string, labels ...label.Label) {
+ core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ }, labels))
+}
+
+// IsLog returns true if the event was built by the Log function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsLog(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Msg
+}
+
+// Error takes a message and a label list and combines them into a single event
+// before delivering them to the exporter. It captures the error in the
+// delivered event.
+func Error(ctx context.Context, message string, err error, labels ...label.Label) {
+ core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Msg.Of(message),
+ keys.Err.Of(err),
+ }, labels))
+}
+
+// IsError returns true if the event was built by the Error function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsError(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Msg &&
+ ev.Label(1).Key() == keys.Err
+}
+
+// Metric sends a label event to the exporter with the supplied labels.
+func Metric(ctx context.Context, labels ...label.Label) {
+ core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Metric.New(),
+ }, labels))
+}
+
+// IsMetric returns true if the event was built by the Metric function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsMetric(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Metric
+}
+
+// Label sends a label event to the exporter with the supplied labels.
+func Label(ctx context.Context, labels ...label.Label) context.Context {
+ return core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Label.New(),
+ }, labels))
+}
+
+// IsLabel returns true if the event was built by the Label function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsLabel(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Label
+}
+
+// Start sends a span start event with the supplied label list to the exporter.
+// It also returns a function that will end the span, which should normally be
+// deferred.
+func Start(ctx context.Context, name string, labels ...label.Label) (context.Context, func()) {
+ return core.ExportPair(ctx,
+ core.MakeEvent([3]label.Label{
+ keys.Start.Of(name),
+ }, labels),
+ core.MakeEvent([3]label.Label{
+ keys.End.New(),
+ }, nil))
+}
+
+// IsStart returns true if the event was built by the Start function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsStart(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Start
+}
+
+// IsEnd returns true if the event was built by the End function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsEnd(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.End
+}
+
+// Detach returns a context without an associated span.
+// This allows the creation of spans that are not children of the current span.
+func Detach(ctx context.Context) context.Context {
+ return core.Export(ctx, core.MakeEvent([3]label.Label{
+ keys.Detach.New(),
+ }, nil))
+}
+
+// IsDetach returns true if the event was built by the Detach function.
+// It is intended to be used in exporters to identify the semantics of the
+// event when deciding what to do with it.
+func IsDetach(ev core.Event) bool {
+ return ev.Label(0).Key() == keys.Detach
+}
diff --git a/vendor/golang.org/x/tools/internal/event/keys/keys.go b/vendor/golang.org/x/tools/internal/event/keys/keys.go
new file mode 100644
index 000000000..a02206e30
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/keys/keys.go
@@ -0,0 +1,564 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package keys
+
+import (
+ "fmt"
+ "io"
+ "math"
+ "strconv"
+
+ "golang.org/x/tools/internal/event/label"
+)
+
+// Value represents a key for untyped values.
+type Value struct {
+ name string
+ description string
+}
+
+// New creates a new Key for untyped values.
+func New(name, description string) *Value {
+ return &Value{name: name, description: description}
+}
+
+func (k *Value) Name() string { return k.name }
+func (k *Value) Description() string { return k.description }
+
+func (k *Value) Format(w io.Writer, buf []byte, l label.Label) {
+ fmt.Fprint(w, k.From(l))
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Value) Get(lm label.Map) interface{} {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return nil
+}
+
+// From can be used to get a value from a Label.
+func (k *Value) From(t label.Label) interface{} { return t.UnpackValue() }
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Value) Of(value interface{}) label.Label { return label.OfValue(k, value) }
+
+// Tag represents a key for tagging labels that have no value.
+// These are used when the existence of the label is the entire information it
+// carries, such as marking events to be of a specific kind, or from a specific
+// package.
+type Tag struct {
+ name string
+ description string
+}
+
+// NewTag creates a new Key for tagging labels.
+func NewTag(name, description string) *Tag {
+ return &Tag{name: name, description: description}
+}
+
+func (k *Tag) Name() string { return k.name }
+func (k *Tag) Description() string { return k.description }
+
+func (k *Tag) Format(w io.Writer, buf []byte, l label.Label) {}
+
+// New creates a new Label with this key.
+func (k *Tag) New() label.Label { return label.OfValue(k, nil) }
+
+// Int represents a key
+type Int struct {
+ name string
+ description string
+}
+
+// NewInt creates a new Key for int values.
+func NewInt(name, description string) *Int {
+ return &Int{name: name, description: description}
+}
+
+func (k *Int) Name() string { return k.name }
+func (k *Int) Description() string { return k.description }
+
+func (k *Int) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int) Of(v int) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int) Get(lm label.Map) int {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int) From(t label.Label) int { return int(t.Unpack64()) }
+
+// Int8 represents a key
+type Int8 struct {
+ name string
+ description string
+}
+
+// NewInt8 creates a new Key for int8 values.
+func NewInt8(name, description string) *Int8 {
+ return &Int8{name: name, description: description}
+}
+
+func (k *Int8) Name() string { return k.name }
+func (k *Int8) Description() string { return k.description }
+
+func (k *Int8) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int8) Of(v int8) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int8) Get(lm label.Map) int8 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int8) From(t label.Label) int8 { return int8(t.Unpack64()) }
+
+// Int16 represents a key
+type Int16 struct {
+ name string
+ description string
+}
+
+// NewInt16 creates a new Key for int16 values.
+func NewInt16(name, description string) *Int16 {
+ return &Int16{name: name, description: description}
+}
+
+func (k *Int16) Name() string { return k.name }
+func (k *Int16) Description() string { return k.description }
+
+func (k *Int16) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int16) Of(v int16) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int16) Get(lm label.Map) int16 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int16) From(t label.Label) int16 { return int16(t.Unpack64()) }
+
+// Int32 represents a key
+type Int32 struct {
+ name string
+ description string
+}
+
+// NewInt32 creates a new Key for int32 values.
+func NewInt32(name, description string) *Int32 {
+ return &Int32{name: name, description: description}
+}
+
+func (k *Int32) Name() string { return k.name }
+func (k *Int32) Description() string { return k.description }
+
+func (k *Int32) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int32) Of(v int32) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int32) Get(lm label.Map) int32 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int32) From(t label.Label) int32 { return int32(t.Unpack64()) }
+
+// Int64 represents a key
+type Int64 struct {
+ name string
+ description string
+}
+
+// NewInt64 creates a new Key for int64 values.
+func NewInt64(name, description string) *Int64 {
+ return &Int64{name: name, description: description}
+}
+
+func (k *Int64) Name() string { return k.name }
+func (k *Int64) Description() string { return k.description }
+
+func (k *Int64) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendInt(buf, k.From(l), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Int64) Of(v int64) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Int64) Get(lm label.Map) int64 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Int64) From(t label.Label) int64 { return int64(t.Unpack64()) }
+
+// UInt represents a key
+type UInt struct {
+ name string
+ description string
+}
+
+// NewUInt creates a new Key for uint values.
+func NewUInt(name, description string) *UInt {
+ return &UInt{name: name, description: description}
+}
+
+func (k *UInt) Name() string { return k.name }
+func (k *UInt) Description() string { return k.description }
+
+func (k *UInt) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt) Of(v uint) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt) Get(lm label.Map) uint {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt) From(t label.Label) uint { return uint(t.Unpack64()) }
+
+// UInt8 represents a key
+type UInt8 struct {
+ name string
+ description string
+}
+
+// NewUInt8 creates a new Key for uint8 values.
+func NewUInt8(name, description string) *UInt8 {
+ return &UInt8{name: name, description: description}
+}
+
+func (k *UInt8) Name() string { return k.name }
+func (k *UInt8) Description() string { return k.description }
+
+func (k *UInt8) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt8) Of(v uint8) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt8) Get(lm label.Map) uint8 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt8) From(t label.Label) uint8 { return uint8(t.Unpack64()) }
+
+// UInt16 represents a key
+type UInt16 struct {
+ name string
+ description string
+}
+
+// NewUInt16 creates a new Key for uint16 values.
+func NewUInt16(name, description string) *UInt16 {
+ return &UInt16{name: name, description: description}
+}
+
+func (k *UInt16) Name() string { return k.name }
+func (k *UInt16) Description() string { return k.description }
+
+func (k *UInt16) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt16) Of(v uint16) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt16) Get(lm label.Map) uint16 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt16) From(t label.Label) uint16 { return uint16(t.Unpack64()) }
+
+// UInt32 represents a key
+type UInt32 struct {
+ name string
+ description string
+}
+
+// NewUInt32 creates a new Key for uint32 values.
+func NewUInt32(name, description string) *UInt32 {
+ return &UInt32{name: name, description: description}
+}
+
+func (k *UInt32) Name() string { return k.name }
+func (k *UInt32) Description() string { return k.description }
+
+func (k *UInt32) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt32) Of(v uint32) label.Label { return label.Of64(k, uint64(v)) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt32) Get(lm label.Map) uint32 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt32) From(t label.Label) uint32 { return uint32(t.Unpack64()) }
+
+// UInt64 represents a key
+type UInt64 struct {
+ name string
+ description string
+}
+
+// NewUInt64 creates a new Key for uint64 values.
+func NewUInt64(name, description string) *UInt64 {
+ return &UInt64{name: name, description: description}
+}
+
+func (k *UInt64) Name() string { return k.name }
+func (k *UInt64) Description() string { return k.description }
+
+func (k *UInt64) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendUint(buf, k.From(l), 10))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *UInt64) Of(v uint64) label.Label { return label.Of64(k, v) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *UInt64) Get(lm label.Map) uint64 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *UInt64) From(t label.Label) uint64 { return t.Unpack64() }
+
+// Float32 represents a key
+type Float32 struct {
+ name string
+ description string
+}
+
+// NewFloat32 creates a new Key for float32 values.
+func NewFloat32(name, description string) *Float32 {
+ return &Float32{name: name, description: description}
+}
+
+func (k *Float32) Name() string { return k.name }
+func (k *Float32) Description() string { return k.description }
+
+func (k *Float32) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendFloat(buf, float64(k.From(l)), 'E', -1, 32))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Float32) Of(v float32) label.Label {
+ return label.Of64(k, uint64(math.Float32bits(v)))
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Float32) Get(lm label.Map) float32 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Float32) From(t label.Label) float32 {
+ return math.Float32frombits(uint32(t.Unpack64()))
+}
+
+// Float64 represents a key
+type Float64 struct {
+ name string
+ description string
+}
+
+// NewFloat64 creates a new Key for int64 values.
+func NewFloat64(name, description string) *Float64 {
+ return &Float64{name: name, description: description}
+}
+
+func (k *Float64) Name() string { return k.name }
+func (k *Float64) Description() string { return k.description }
+
+func (k *Float64) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendFloat(buf, k.From(l), 'E', -1, 64))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Float64) Of(v float64) label.Label {
+ return label.Of64(k, math.Float64bits(v))
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Float64) Get(lm label.Map) float64 {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return 0
+}
+
+// From can be used to get a value from a Label.
+func (k *Float64) From(t label.Label) float64 {
+ return math.Float64frombits(t.Unpack64())
+}
+
+// String represents a key
+type String struct {
+ name string
+ description string
+}
+
+// NewString creates a new Key for int64 values.
+func NewString(name, description string) *String {
+ return &String{name: name, description: description}
+}
+
+func (k *String) Name() string { return k.name }
+func (k *String) Description() string { return k.description }
+
+func (k *String) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendQuote(buf, k.From(l)))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *String) Of(v string) label.Label { return label.OfString(k, v) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *String) Get(lm label.Map) string {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return ""
+}
+
+// From can be used to get a value from a Label.
+func (k *String) From(t label.Label) string { return t.UnpackString() }
+
+// Boolean represents a key
+type Boolean struct {
+ name string
+ description string
+}
+
+// NewBoolean creates a new Key for bool values.
+func NewBoolean(name, description string) *Boolean {
+ return &Boolean{name: name, description: description}
+}
+
+func (k *Boolean) Name() string { return k.name }
+func (k *Boolean) Description() string { return k.description }
+
+func (k *Boolean) Format(w io.Writer, buf []byte, l label.Label) {
+ w.Write(strconv.AppendBool(buf, k.From(l)))
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Boolean) Of(v bool) label.Label {
+ if v {
+ return label.Of64(k, 1)
+ }
+ return label.Of64(k, 0)
+}
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Boolean) Get(lm label.Map) bool {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return false
+}
+
+// From can be used to get a value from a Label.
+func (k *Boolean) From(t label.Label) bool { return t.Unpack64() > 0 }
+
+// Error represents a key
+type Error struct {
+ name string
+ description string
+}
+
+// NewError creates a new Key for int64 values.
+func NewError(name, description string) *Error {
+ return &Error{name: name, description: description}
+}
+
+func (k *Error) Name() string { return k.name }
+func (k *Error) Description() string { return k.description }
+
+func (k *Error) Format(w io.Writer, buf []byte, l label.Label) {
+ io.WriteString(w, k.From(l).Error())
+}
+
+// Of creates a new Label with this key and the supplied value.
+func (k *Error) Of(v error) label.Label { return label.OfValue(k, v) }
+
+// Get can be used to get a label for the key from a label.Map.
+func (k *Error) Get(lm label.Map) error {
+ if t := lm.Find(k); t.Valid() {
+ return k.From(t)
+ }
+ return nil
+}
+
+// From can be used to get a value from a Label.
+func (k *Error) From(t label.Label) error {
+ err, _ := t.UnpackValue().(error)
+ return err
+}
diff --git a/vendor/golang.org/x/tools/internal/event/keys/standard.go b/vendor/golang.org/x/tools/internal/event/keys/standard.go
new file mode 100644
index 000000000..7e9586659
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/keys/standard.go
@@ -0,0 +1,22 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package keys
+
+var (
+ // Msg is a key used to add message strings to label lists.
+ Msg = NewString("message", "a readable message")
+ // Label is a key used to indicate an event adds labels to the context.
+ Label = NewTag("label", "a label context marker")
+ // Start is used for things like traces that have a name.
+ Start = NewString("start", "span start")
+ // Metric is a key used to indicate an event records metrics.
+ End = NewTag("end", "a span end marker")
+ // Metric is a key used to indicate an event records metrics.
+ Detach = NewTag("detach", "a span detach marker")
+ // Err is a key used to add error values to label lists.
+ Err = NewError("error", "an error that occurred")
+ // Metric is a key used to indicate an event records metrics.
+ Metric = NewTag("metric", "a metric event marker")
+)
diff --git a/vendor/golang.org/x/tools/internal/event/label/label.go b/vendor/golang.org/x/tools/internal/event/label/label.go
new file mode 100644
index 000000000..0f526e1f9
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/event/label/label.go
@@ -0,0 +1,215 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package label
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+// Key is used as the identity of a Label.
+// Keys are intended to be compared by pointer only, the name should be unique
+// for communicating with external systems, but it is not required or enforced.
+type Key interface {
+ // Name returns the key name.
+ Name() string
+ // Description returns a string that can be used to describe the value.
+ Description() string
+
+ // Format is used in formatting to append the value of the label to the
+ // supplied buffer.
+ // The formatter may use the supplied buf as a scratch area to avoid
+ // allocations.
+ Format(w io.Writer, buf []byte, l Label)
+}
+
+// Label holds a key and value pair.
+// It is normally used when passing around lists of labels.
+type Label struct {
+ key Key
+ packed uint64
+ untyped interface{}
+}
+
+// Map is the interface to a collection of Labels indexed by key.
+type Map interface {
+ // Find returns the label that matches the supplied key.
+ Find(key Key) Label
+}
+
+// List is the interface to something that provides an iterable
+// list of labels.
+// Iteration should start from 0 and continue until Valid returns false.
+type List interface {
+ // Valid returns true if the index is within range for the list.
+ // It does not imply the label at that index will itself be valid.
+ Valid(index int) bool
+ // Label returns the label at the given index.
+ Label(index int) Label
+}
+
+// list implements LabelList for a list of Labels.
+type list struct {
+ labels []Label
+}
+
+// filter wraps a LabelList filtering out specific labels.
+type filter struct {
+ keys []Key
+ underlying List
+}
+
+// listMap implements LabelMap for a simple list of labels.
+type listMap struct {
+ labels []Label
+}
+
+// mapChain implements LabelMap for a list of underlying LabelMap.
+type mapChain struct {
+ maps []Map
+}
+
+// OfValue creates a new label from the key and value.
+// This method is for implementing new key types, label creation should
+// normally be done with the Of method of the key.
+func OfValue(k Key, value interface{}) Label { return Label{key: k, untyped: value} }
+
+// UnpackValue assumes the label was built using LabelOfValue and returns the value
+// that was passed to that constructor.
+// This method is for implementing new key types, for type safety normal
+// access should be done with the From method of the key.
+func (t Label) UnpackValue() interface{} { return t.untyped }
+
+// Of64 creates a new label from a key and a uint64. This is often
+// used for non uint64 values that can be packed into a uint64.
+// This method is for implementing new key types, label creation should
+// normally be done with the Of method of the key.
+func Of64(k Key, v uint64) Label { return Label{key: k, packed: v} }
+
+// Unpack64 assumes the label was built using LabelOf64 and returns the value that
+// was passed to that constructor.
+// This method is for implementing new key types, for type safety normal
+// access should be done with the From method of the key.
+func (t Label) Unpack64() uint64 { return t.packed }
+
+type stringptr unsafe.Pointer
+
+// OfString creates a new label from a key and a string.
+// This method is for implementing new key types, label creation should
+// normally be done with the Of method of the key.
+func OfString(k Key, v string) Label {
+ hdr := (*reflect.StringHeader)(unsafe.Pointer(&v))
+ return Label{
+ key: k,
+ packed: uint64(hdr.Len),
+ untyped: stringptr(hdr.Data),
+ }
+}
+
+// UnpackString assumes the label was built using LabelOfString and returns the
+// value that was passed to that constructor.
+// This method is for implementing new key types, for type safety normal
+// access should be done with the From method of the key.
+func (t Label) UnpackString() string {
+ var v string
+ hdr := (*reflect.StringHeader)(unsafe.Pointer(&v))
+ hdr.Data = uintptr(t.untyped.(stringptr))
+ hdr.Len = int(t.packed)
+ return v
+}
+
+// Valid returns true if the Label is a valid one (it has a key).
+func (t Label) Valid() bool { return t.key != nil }
+
+// Key returns the key of this Label.
+func (t Label) Key() Key { return t.key }
+
+// Format is used for debug printing of labels.
+func (t Label) Format(f fmt.State, r rune) {
+ if !t.Valid() {
+ io.WriteString(f, `nil`)
+ return
+ }
+ io.WriteString(f, t.Key().Name())
+ io.WriteString(f, "=")
+ var buf [128]byte
+ t.Key().Format(f, buf[:0], t)
+}
+
+func (l *list) Valid(index int) bool {
+ return index >= 0 && index < len(l.labels)
+}
+
+func (l *list) Label(index int) Label {
+ return l.labels[index]
+}
+
+func (f *filter) Valid(index int) bool {
+ return f.underlying.Valid(index)
+}
+
+func (f *filter) Label(index int) Label {
+ l := f.underlying.Label(index)
+ for _, f := range f.keys {
+ if l.Key() == f {
+ return Label{}
+ }
+ }
+ return l
+}
+
+func (lm listMap) Find(key Key) Label {
+ for _, l := range lm.labels {
+ if l.Key() == key {
+ return l
+ }
+ }
+ return Label{}
+}
+
+func (c mapChain) Find(key Key) Label {
+ for _, src := range c.maps {
+ l := src.Find(key)
+ if l.Valid() {
+ return l
+ }
+ }
+ return Label{}
+}
+
+var emptyList = &list{}
+
+func NewList(labels ...Label) List {
+ if len(labels) == 0 {
+ return emptyList
+ }
+ return &list{labels: labels}
+}
+
+func Filter(l List, keys ...Key) List {
+ if len(keys) == 0 {
+ return l
+ }
+ return &filter{keys: keys, underlying: l}
+}
+
+func NewMap(labels ...Label) Map {
+ return listMap{labels: labels}
+}
+
+func MergeMaps(srcs ...Map) Map {
+ var nonNil []Map
+ for _, src := range srcs {
+ if src != nil {
+ nonNil = append(nonNil, src)
+ }
+ }
+ if len(nonNil) == 1 {
+ return nonNil[0]
+ }
+ return mapChain{maps: nonNil}
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
new file mode 100644
index 000000000..9887f7e7a
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
@@ -0,0 +1,196 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package fastwalk provides a faster version of filepath.Walk for file system
+// scanning tools.
+package fastwalk
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "runtime"
+ "sync"
+)
+
+// ErrTraverseLink is used as a return value from WalkFuncs to indicate that the
+// symlink named in the call may be traversed.
+var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
+
+// ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the
+// callback should not be called for any other files in the current directory.
+// Child directories will still be traversed.
+var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory")
+
+// Walk is a faster implementation of filepath.Walk.
+//
+// filepath.Walk's design necessarily calls os.Lstat on each file,
+// even if the caller needs less info.
+// Many tools need only the type of each file.
+// On some platforms, this information is provided directly by the readdir
+// system call, avoiding the need to stat each file individually.
+// fastwalk_unix.go contains a fork of the syscall routines.
+//
+// See golang.org/issue/16399
+//
+// Walk walks the file tree rooted at root, calling walkFn for
+// each file or directory in the tree, including root.
+//
+// If fastWalk returns filepath.SkipDir, the directory is skipped.
+//
+// Unlike filepath.Walk:
+// * file stat calls must be done by the user.
+// The only provided metadata is the file type, which does not include
+// any permission bits.
+// * multiple goroutines stat the filesystem concurrently. The provided
+// walkFn must be safe for concurrent use.
+// * fastWalk can follow symlinks if walkFn returns the TraverseLink
+// sentinel error. It is the walkFn's responsibility to prevent
+// fastWalk from going into symlink cycles.
+func Walk(root string, walkFn func(path string, typ os.FileMode) error) error {
+ // TODO(bradfitz): make numWorkers configurable? We used a
+ // minimum of 4 to give the kernel more info about multiple
+ // things we want, in hopes its I/O scheduling can take
+ // advantage of that. Hopefully most are in cache. Maybe 4 is
+ // even too low of a minimum. Profile more.
+ numWorkers := 4
+ if n := runtime.NumCPU(); n > numWorkers {
+ numWorkers = n
+ }
+
+ // Make sure to wait for all workers to finish, otherwise
+ // walkFn could still be called after returning. This Wait call
+ // runs after close(e.donec) below.
+ var wg sync.WaitGroup
+ defer wg.Wait()
+
+ w := &walker{
+ fn: walkFn,
+ enqueuec: make(chan walkItem, numWorkers), // buffered for performance
+ workc: make(chan walkItem, numWorkers), // buffered for performance
+ donec: make(chan struct{}),
+
+ // buffered for correctness & not leaking goroutines:
+ resc: make(chan error, numWorkers),
+ }
+ defer close(w.donec)
+
+ for i := 0; i < numWorkers; i++ {
+ wg.Add(1)
+ go w.doWork(&wg)
+ }
+ todo := []walkItem{{dir: root}}
+ out := 0
+ for {
+ workc := w.workc
+ var workItem walkItem
+ if len(todo) == 0 {
+ workc = nil
+ } else {
+ workItem = todo[len(todo)-1]
+ }
+ select {
+ case workc <- workItem:
+ todo = todo[:len(todo)-1]
+ out++
+ case it := <-w.enqueuec:
+ todo = append(todo, it)
+ case err := <-w.resc:
+ out--
+ if err != nil {
+ return err
+ }
+ if out == 0 && len(todo) == 0 {
+ // It's safe to quit here, as long as the buffered
+ // enqueue channel isn't also readable, which might
+ // happen if the worker sends both another unit of
+ // work and its result before the other select was
+ // scheduled and both w.resc and w.enqueuec were
+ // readable.
+ select {
+ case it := <-w.enqueuec:
+ todo = append(todo, it)
+ default:
+ return nil
+ }
+ }
+ }
+ }
+}
+
+// doWork reads directories as instructed (via workc) and runs the
+// user's callback function.
+func (w *walker) doWork(wg *sync.WaitGroup) {
+ defer wg.Done()
+ for {
+ select {
+ case <-w.donec:
+ return
+ case it := <-w.workc:
+ select {
+ case <-w.donec:
+ return
+ case w.resc <- w.walk(it.dir, !it.callbackDone):
+ }
+ }
+ }
+}
+
+type walker struct {
+ fn func(path string, typ os.FileMode) error
+
+ donec chan struct{} // closed on fastWalk's return
+ workc chan walkItem // to workers
+ enqueuec chan walkItem // from workers
+ resc chan error // from workers
+}
+
+type walkItem struct {
+ dir string
+ callbackDone bool // callback already called; don't do it again
+}
+
+func (w *walker) enqueue(it walkItem) {
+ select {
+ case w.enqueuec <- it:
+ case <-w.donec:
+ }
+}
+
+func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
+ joined := dirName + string(os.PathSeparator) + baseName
+ if typ == os.ModeDir {
+ w.enqueue(walkItem{dir: joined})
+ return nil
+ }
+
+ err := w.fn(joined, typ)
+ if typ == os.ModeSymlink {
+ if err == ErrTraverseLink {
+ // Set callbackDone so we don't call it twice for both the
+ // symlink-as-symlink and the symlink-as-directory later:
+ w.enqueue(walkItem{dir: joined, callbackDone: true})
+ return nil
+ }
+ if err == filepath.SkipDir {
+ // Permit SkipDir on symlinks too.
+ return nil
+ }
+ }
+ return err
+}
+
+func (w *walker) walk(root string, runUserCallback bool) error {
+ if runUserCallback {
+ err := w.fn(root, os.ModeDir)
+ if err == filepath.SkipDir {
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ }
+
+ return readDir(root, w.onDirEnt)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go
new file mode 100644
index 000000000..d58595dbd
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go
@@ -0,0 +1,14 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build freebsd || openbsd || netbsd
+// +build freebsd openbsd netbsd
+
+package fastwalk
+
+import "syscall"
+
+func direntInode(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Fileno)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go
new file mode 100644
index 000000000..ea02b9ebf
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go
@@ -0,0 +1,15 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build (linux || darwin) && !appengine
+// +build linux darwin
+// +build !appengine
+
+package fastwalk
+
+import "syscall"
+
+func direntInode(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Ino)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go
new file mode 100644
index 000000000..d5c9c321e
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build darwin || freebsd || openbsd || netbsd
+// +build darwin freebsd openbsd netbsd
+
+package fastwalk
+
+import "syscall"
+
+func direntNamlen(dirent *syscall.Dirent) uint64 {
+ return uint64(dirent.Namlen)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go
new file mode 100644
index 000000000..c82e57df8
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go
@@ -0,0 +1,29 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build linux && !appengine
+// +build linux,!appengine
+
+package fastwalk
+
+import (
+ "bytes"
+ "syscall"
+ "unsafe"
+)
+
+func direntNamlen(dirent *syscall.Dirent) uint64 {
+ const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name))
+ nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
+ const nameBufLen = uint16(len(nameBuf))
+ limit := dirent.Reclen - fixedHdr
+ if limit > nameBufLen {
+ limit = nameBufLen
+ }
+ nameLen := bytes.IndexByte(nameBuf[:limit], 0)
+ if nameLen < 0 {
+ panic("failed to find terminating 0 byte in dirent")
+ }
+ return uint64(nameLen)
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
new file mode 100644
index 000000000..085d31160
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
@@ -0,0 +1,38 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build appengine || (!linux && !darwin && !freebsd && !openbsd && !netbsd)
+// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd
+
+package fastwalk
+
+import (
+ "io/ioutil"
+ "os"
+)
+
+// readDir calls fn for each directory entry in dirName.
+// It does not descend into directories or follow symlinks.
+// If fn returns a non-nil error, readDir returns with that error
+// immediately.
+func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
+ fis, err := ioutil.ReadDir(dirName)
+ if err != nil {
+ return err
+ }
+ skipFiles := false
+ for _, fi := range fis {
+ if fi.Mode().IsRegular() && skipFiles {
+ continue
+ }
+ if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
+ if err == ErrSkipFiles {
+ skipFiles = true
+ continue
+ }
+ return err
+ }
+ }
+ return nil
+}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
new file mode 100644
index 000000000..58bd87841
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
@@ -0,0 +1,153 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build (linux || darwin || freebsd || openbsd || netbsd) && !appengine
+// +build linux darwin freebsd openbsd netbsd
+// +build !appengine
+
+package fastwalk
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+ "unsafe"
+)
+
+const blockSize = 8 << 10
+
+// unknownFileMode is a sentinel (and bogus) os.FileMode
+// value used to represent a syscall.DT_UNKNOWN Dirent.Type.
+const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice
+
+func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error {
+ fd, err := open(dirName, 0, 0)
+ if err != nil {
+ return &os.PathError{Op: "open", Path: dirName, Err: err}
+ }
+ defer syscall.Close(fd)
+
+ // The buffer must be at least a block long.
+ buf := make([]byte, blockSize) // stack-allocated; doesn't escape
+ bufp := 0 // starting read position in buf
+ nbuf := 0 // end valid data in buf
+ skipFiles := false
+ for {
+ if bufp >= nbuf {
+ bufp = 0
+ nbuf, err = readDirent(fd, buf)
+ if err != nil {
+ return os.NewSyscallError("readdirent", err)
+ }
+ if nbuf <= 0 {
+ return nil
+ }
+ }
+ consumed, name, typ := parseDirEnt(buf[bufp:nbuf])
+ bufp += consumed
+ if name == "" || name == "." || name == ".." {
+ continue
+ }
+ // Fallback for filesystems (like old XFS) that don't
+ // support Dirent.Type and have DT_UNKNOWN (0) there
+ // instead.
+ if typ == unknownFileMode {
+ fi, err := os.Lstat(dirName + "/" + name)
+ if err != nil {
+ // It got deleted in the meantime.
+ if os.IsNotExist(err) {
+ continue
+ }
+ return err
+ }
+ typ = fi.Mode() & os.ModeType
+ }
+ if skipFiles && typ.IsRegular() {
+ continue
+ }
+ if err := fn(dirName, name, typ); err != nil {
+ if err == ErrSkipFiles {
+ skipFiles = true
+ continue
+ }
+ return err
+ }
+ }
+}
+
+func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) {
+ // golang.org/issue/37269
+ dirent := &syscall.Dirent{}
+ copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(dirent))[:], buf)
+ if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v {
+ panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v))
+ }
+ if len(buf) < int(dirent.Reclen) {
+ panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen))
+ }
+ consumed = int(dirent.Reclen)
+ if direntInode(dirent) == 0 { // File absent in directory.
+ return
+ }
+ switch dirent.Type {
+ case syscall.DT_REG:
+ typ = 0
+ case syscall.DT_DIR:
+ typ = os.ModeDir
+ case syscall.DT_LNK:
+ typ = os.ModeSymlink
+ case syscall.DT_BLK:
+ typ = os.ModeDevice
+ case syscall.DT_FIFO:
+ typ = os.ModeNamedPipe
+ case syscall.DT_SOCK:
+ typ = os.ModeSocket
+ case syscall.DT_UNKNOWN:
+ typ = unknownFileMode
+ default:
+ // Skip weird things.
+ // It's probably a DT_WHT (http://lwn.net/Articles/325369/)
+ // or something. Revisit if/when this package is moved outside
+ // of goimports. goimports only cares about regular files,
+ // symlinks, and directories.
+ return
+ }
+
+ nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0]))
+ nameLen := direntNamlen(dirent)
+
+ // Special cases for common things:
+ if nameLen == 1 && nameBuf[0] == '.' {
+ name = "."
+ } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' {
+ name = ".."
+ } else {
+ name = string(nameBuf[:nameLen])
+ }
+ return
+}
+
+// According to https://golang.org/doc/go1.14#runtime
+// A consequence of the implementation of preemption is that on Unix systems, including Linux and macOS
+// systems, programs built with Go 1.14 will receive more signals than programs built with earlier releases.
+//
+// This causes syscall.Open and syscall.ReadDirent sometimes fail with EINTR errors.
+// We need to retry in this case.
+func open(path string, mode int, perm uint32) (fd int, err error) {
+ for {
+ fd, err := syscall.Open(path, mode, perm)
+ if err != syscall.EINTR {
+ return fd, err
+ }
+ }
+}
+
+func readDirent(fd int, buf []byte) (n int, err error) {
+ for {
+ nbuf, err := syscall.ReadDirent(fd, buf)
+ if err != syscall.EINTR {
+ return nbuf, err
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
new file mode 100644
index 000000000..f75336834
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
@@ -0,0 +1,281 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gocommand is a helper for calling the go command.
+package gocommand
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ exec "golang.org/x/sys/execabs"
+
+ "golang.org/x/tools/internal/event"
+)
+
+// An Runner will run go command invocations and serialize
+// them if it sees a concurrency error.
+type Runner struct {
+ // once guards the runner initialization.
+ once sync.Once
+
+ // inFlight tracks available workers.
+ inFlight chan struct{}
+
+ // serialized guards the ability to run a go command serially,
+ // to avoid deadlocks when claiming workers.
+ serialized chan struct{}
+}
+
+const maxInFlight = 10
+
+func (runner *Runner) initialize() {
+ runner.once.Do(func() {
+ runner.inFlight = make(chan struct{}, maxInFlight)
+ runner.serialized = make(chan struct{}, 1)
+ })
+}
+
+// 1.13: go: updates to go.mod needed, but contents have changed
+// 1.14: go: updating go.mod: existing contents have changed since last read
+var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`)
+
+// Run is a convenience wrapper around RunRaw.
+// It returns only stdout and a "friendly" error.
+func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) {
+ stdout, _, friendly, _ := runner.RunRaw(ctx, inv)
+ return stdout, friendly
+}
+
+// RunPiped runs the invocation serially, always waiting for any concurrent
+// invocations to complete first.
+func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error {
+ _, err := runner.runPiped(ctx, inv, stdout, stderr)
+ return err
+}
+
+// RunRaw runs the invocation, serializing requests only if they fight over
+// go.mod changes.
+func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
+ // Make sure the runner is always initialized.
+ runner.initialize()
+
+ // First, try to run the go command concurrently.
+ stdout, stderr, friendlyErr, err := runner.runConcurrent(ctx, inv)
+
+ // If we encounter a load concurrency error, we need to retry serially.
+ if friendlyErr == nil || !modConcurrencyError.MatchString(friendlyErr.Error()) {
+ return stdout, stderr, friendlyErr, err
+ }
+ event.Error(ctx, "Load concurrency error, will retry serially", err)
+
+ // Run serially by calling runPiped.
+ stdout.Reset()
+ stderr.Reset()
+ friendlyErr, err = runner.runPiped(ctx, inv, stdout, stderr)
+ return stdout, stderr, friendlyErr, err
+}
+
+func (runner *Runner) runConcurrent(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
+ // Wait for 1 worker to become available.
+ select {
+ case <-ctx.Done():
+ return nil, nil, nil, ctx.Err()
+ case runner.inFlight <- struct{}{}:
+ defer func() { <-runner.inFlight }()
+ }
+
+ stdout, stderr := &bytes.Buffer{}, &bytes.Buffer{}
+ friendlyErr, err := inv.runWithFriendlyError(ctx, stdout, stderr)
+ return stdout, stderr, friendlyErr, err
+}
+
+func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) (error, error) {
+ // Make sure the runner is always initialized.
+ runner.initialize()
+
+ // Acquire the serialization lock. This avoids deadlocks between two
+ // runPiped commands.
+ select {
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ case runner.serialized <- struct{}{}:
+ defer func() { <-runner.serialized }()
+ }
+
+ // Wait for all in-progress go commands to return before proceeding,
+ // to avoid load concurrency errors.
+ for i := 0; i < maxInFlight; i++ {
+ select {
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ case runner.inFlight <- struct{}{}:
+ // Make sure we always "return" any workers we took.
+ defer func() { <-runner.inFlight }()
+ }
+ }
+
+ return inv.runWithFriendlyError(ctx, stdout, stderr)
+}
+
+// An Invocation represents a call to the go command.
+type Invocation struct {
+ Verb string
+ Args []string
+ BuildFlags []string
+
+ // If ModFlag is set, the go command is invoked with -mod=ModFlag.
+ ModFlag string
+
+ // If ModFile is set, the go command is invoked with -modfile=ModFile.
+ ModFile string
+
+ // If Overlay is set, the go command is invoked with -overlay=Overlay.
+ Overlay string
+
+ // If CleanEnv is set, the invocation will run only with the environment
+ // in Env, not starting with os.Environ.
+ CleanEnv bool
+ Env []string
+ WorkingDir string
+ Logf func(format string, args ...interface{})
+}
+
+func (i *Invocation) runWithFriendlyError(ctx context.Context, stdout, stderr io.Writer) (friendlyError error, rawError error) {
+ rawError = i.run(ctx, stdout, stderr)
+ if rawError != nil {
+ friendlyError = rawError
+ // Check for 'go' executable not being found.
+ if ee, ok := rawError.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
+ friendlyError = fmt.Errorf("go command required, not found: %v", ee)
+ }
+ if ctx.Err() != nil {
+ friendlyError = ctx.Err()
+ }
+ friendlyError = fmt.Errorf("err: %v: stderr: %s", friendlyError, stderr)
+ }
+ return
+}
+
+func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error {
+ log := i.Logf
+ if log == nil {
+ log = func(string, ...interface{}) {}
+ }
+
+ goArgs := []string{i.Verb}
+
+ appendModFile := func() {
+ if i.ModFile != "" {
+ goArgs = append(goArgs, "-modfile="+i.ModFile)
+ }
+ }
+ appendModFlag := func() {
+ if i.ModFlag != "" {
+ goArgs = append(goArgs, "-mod="+i.ModFlag)
+ }
+ }
+ appendOverlayFlag := func() {
+ if i.Overlay != "" {
+ goArgs = append(goArgs, "-overlay="+i.Overlay)
+ }
+ }
+
+ switch i.Verb {
+ case "env", "version":
+ goArgs = append(goArgs, i.Args...)
+ case "mod":
+ // mod needs the sub-verb before flags.
+ goArgs = append(goArgs, i.Args[0])
+ appendModFile()
+ goArgs = append(goArgs, i.Args[1:]...)
+ case "get":
+ goArgs = append(goArgs, i.BuildFlags...)
+ appendModFile()
+ goArgs = append(goArgs, i.Args...)
+
+ default: // notably list and build.
+ goArgs = append(goArgs, i.BuildFlags...)
+ appendModFile()
+ appendModFlag()
+ appendOverlayFlag()
+ goArgs = append(goArgs, i.Args...)
+ }
+ cmd := exec.Command("go", goArgs...)
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ // On darwin the cwd gets resolved to the real path, which breaks anything that
+ // expects the working directory to keep the original path, including the
+ // go command when dealing with modules.
+ // The Go stdlib has a special feature where if the cwd and the PWD are the
+ // same node then it trusts the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go command.
+ if !i.CleanEnv {
+ cmd.Env = os.Environ()
+ }
+ cmd.Env = append(cmd.Env, i.Env...)
+ if i.WorkingDir != "" {
+ cmd.Env = append(cmd.Env, "PWD="+i.WorkingDir)
+ cmd.Dir = i.WorkingDir
+ }
+ defer func(start time.Time) { log("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
+
+ return runCmdContext(ctx, cmd)
+}
+
+// runCmdContext is like exec.CommandContext except it sends os.Interrupt
+// before os.Kill.
+func runCmdContext(ctx context.Context, cmd *exec.Cmd) error {
+ if err := cmd.Start(); err != nil {
+ return err
+ }
+ resChan := make(chan error, 1)
+ go func() {
+ resChan <- cmd.Wait()
+ }()
+
+ select {
+ case err := <-resChan:
+ return err
+ case <-ctx.Done():
+ }
+ // Cancelled. Interrupt and see if it ends voluntarily.
+ cmd.Process.Signal(os.Interrupt)
+ select {
+ case err := <-resChan:
+ return err
+ case <-time.After(time.Second):
+ }
+ // Didn't shut down in response to interrupt. Kill it hard.
+ cmd.Process.Kill()
+ return <-resChan
+}
+
+func cmdDebugStr(cmd *exec.Cmd) string {
+ env := make(map[string]string)
+ for _, kv := range cmd.Env {
+ split := strings.SplitN(kv, "=", 2)
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ var args []string
+ for _, arg := range cmd.Args {
+ quoted := strconv.Quote(arg)
+ if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") {
+ args = append(args, quoted)
+ } else {
+ args = append(args, arg)
+ }
+ }
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " "))
+}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/vendor.go b/vendor/golang.org/x/tools/internal/gocommand/vendor.go
new file mode 100644
index 000000000..2d3d408c0
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/vendor.go
@@ -0,0 +1,109 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gocommand
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "time"
+
+ "golang.org/x/mod/semver"
+)
+
+// ModuleJSON holds information about a module.
+type ModuleJSON struct {
+ Path string // module path
+ Version string // module version
+ Versions []string // available module versions (with -versions)
+ Replace *ModuleJSON // replaced by this module
+ Time *time.Time // time version was created
+ Update *ModuleJSON // available update, if any (with -u)
+ Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
+ Dir string // directory holding files for this module, if any
+ GoMod string // path to go.mod file used when loading this module, if any
+ GoVersion string // go version used in module
+}
+
+var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`)
+
+// VendorEnabled reports whether vendoring is enabled. It takes a *Runner to execute Go commands
+// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields,
+// of which only Verb and Args are modified to run the appropriate Go command.
+// Inspired by setDefaultBuildMod in modload/init.go
+func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (bool, *ModuleJSON, error) {
+ mainMod, go114, err := getMainModuleAnd114(ctx, inv, r)
+ if err != nil {
+ return false, nil, err
+ }
+
+ // We check the GOFLAGS to see if there is anything overridden or not.
+ inv.Verb = "env"
+ inv.Args = []string{"GOFLAGS"}
+ stdout, err := r.Run(ctx, inv)
+ if err != nil {
+ return false, nil, err
+ }
+ goflags := string(bytes.TrimSpace(stdout.Bytes()))
+ matches := modFlagRegexp.FindStringSubmatch(goflags)
+ var modFlag string
+ if len(matches) != 0 {
+ modFlag = matches[1]
+ }
+ // Don't override an explicit '-mod=' argument.
+ if modFlag == "vendor" {
+ return true, mainMod, nil
+ } else if modFlag != "" {
+ return false, nil, nil
+ }
+ if mainMod == nil || !go114 {
+ return false, nil, nil
+ }
+ // Check 1.14's automatic vendor mode.
+ if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() {
+ if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 {
+ // The Go version is at least 1.14, and a vendor directory exists.
+ // Set -mod=vendor by default.
+ return true, mainMod, nil
+ }
+ }
+ return false, nil, nil
+}
+
+// getMainModuleAnd114 gets one of the main modules' information and whether the
+// go command in use is 1.14+. This is the information needed to figure out
+// if vendoring should be enabled.
+func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) {
+ const format = `{{.Path}}
+{{.Dir}}
+{{.GoMod}}
+{{.GoVersion}}
+{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}}
+`
+ inv.Verb = "list"
+ inv.Args = []string{"-m", "-f", format}
+ stdout, err := r.Run(ctx, inv)
+ if err != nil {
+ return nil, false, err
+ }
+
+ lines := strings.Split(stdout.String(), "\n")
+ if len(lines) < 5 {
+ return nil, false, fmt.Errorf("unexpected stdout: %q", stdout.String())
+ }
+ mod := &ModuleJSON{
+ Path: lines[0],
+ Dir: lines[1],
+ GoMod: lines[2],
+ GoVersion: lines[3],
+ Main: true,
+ }
+ return mod, lines[4] == "go1.14", nil
+}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/version.go b/vendor/golang.org/x/tools/internal/gocommand/version.go
new file mode 100644
index 000000000..713043680
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/version.go
@@ -0,0 +1,51 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package gocommand
+
+import (
+ "context"
+ "fmt"
+ "strings"
+)
+
+// GoVersion checks the go version by running "go list" with modules off.
+// It returns the X in Go 1.X.
+func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) {
+ inv.Verb = "list"
+ inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`}
+ inv.Env = append(append([]string{}, inv.Env...), "GO111MODULE=off")
+ // Unset any unneeded flags, and remove them from BuildFlags, if they're
+ // present.
+ inv.ModFile = ""
+ inv.ModFlag = ""
+ var buildFlags []string
+ for _, flag := range inv.BuildFlags {
+ // Flags can be prefixed by one or two dashes.
+ f := strings.TrimPrefix(strings.TrimPrefix(flag, "-"), "-")
+ if strings.HasPrefix(f, "mod=") || strings.HasPrefix(f, "modfile=") {
+ continue
+ }
+ buildFlags = append(buildFlags, flag)
+ }
+ inv.BuildFlags = buildFlags
+ stdoutBytes, err := r.Run(ctx, inv)
+ if err != nil {
+ return 0, err
+ }
+ stdout := stdoutBytes.String()
+ if len(stdout) < 3 {
+ return 0, fmt.Errorf("bad ReleaseTags output: %q", stdout)
+ }
+ // Split up "[go1.1 go1.15]"
+ tags := strings.Fields(stdout[1 : len(stdout)-2])
+ for i := len(tags) - 1; i >= 0; i-- {
+ var version int
+ if _, err := fmt.Sscanf(tags[i], "go1.%d", &version); err != nil {
+ continue
+ }
+ return version, nil
+ }
+ return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags)
+}
diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
new file mode 100644
index 000000000..925ff5356
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
@@ -0,0 +1,264 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gopathwalk is like filepath.Walk but specialized for finding Go
+// packages, particularly in $GOPATH and $GOROOT.
+package gopathwalk
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "golang.org/x/tools/internal/fastwalk"
+)
+
+// Options controls the behavior of a Walk call.
+type Options struct {
+ // If Logf is non-nil, debug logging is enabled through this function.
+ Logf func(format string, args ...interface{})
+ // Search module caches. Also disables legacy goimports ignore rules.
+ ModulesEnabled bool
+}
+
+// RootType indicates the type of a Root.
+type RootType int
+
+const (
+ RootUnknown RootType = iota
+ RootGOROOT
+ RootGOPATH
+ RootCurrentModule
+ RootModuleCache
+ RootOther
+)
+
+// A Root is a starting point for a Walk.
+type Root struct {
+ Path string
+ Type RootType
+}
+
+// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
+// For each package found, add will be called (concurrently) with the absolute
+// paths of the containing source directory and the package directory.
+// add will be called concurrently.
+func Walk(roots []Root, add func(root Root, dir string), opts Options) {
+ WalkSkip(roots, add, func(Root, string) bool { return false }, opts)
+}
+
+// WalkSkip walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
+// For each package found, add will be called (concurrently) with the absolute
+// paths of the containing source directory and the package directory.
+// For each directory that will be scanned, skip will be called (concurrently)
+// with the absolute paths of the containing source directory and the directory.
+// If skip returns false on a directory it will be processed.
+// add will be called concurrently.
+// skip will be called concurrently.
+func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root, dir string) bool, opts Options) {
+ for _, root := range roots {
+ walkDir(root, add, skip, opts)
+ }
+}
+
+// walkDir creates a walker and starts fastwalk with this walker.
+func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) {
+ if _, err := os.Stat(root.Path); os.IsNotExist(err) {
+ if opts.Logf != nil {
+ opts.Logf("skipping nonexistent directory: %v", root.Path)
+ }
+ return
+ }
+ start := time.Now()
+ if opts.Logf != nil {
+ opts.Logf("gopathwalk: scanning %s", root.Path)
+ }
+ w := &walker{
+ root: root,
+ add: add,
+ skip: skip,
+ opts: opts,
+ }
+ w.init()
+ if err := fastwalk.Walk(root.Path, w.walk); err != nil {
+ log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err)
+ }
+
+ if opts.Logf != nil {
+ opts.Logf("gopathwalk: scanned %s in %v", root.Path, time.Since(start))
+ }
+}
+
+// walker is the callback for fastwalk.Walk.
+type walker struct {
+ root Root // The source directory to scan.
+ add func(Root, string) // The callback that will be invoked for every possible Go package dir.
+ skip func(Root, string) bool // The callback that will be invoked for every dir. dir is skipped if it returns true.
+ opts Options // Options passed to Walk by the user.
+
+ ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
+}
+
+// init initializes the walker based on its Options
+func (w *walker) init() {
+ var ignoredPaths []string
+ if w.root.Type == RootModuleCache {
+ ignoredPaths = []string{"cache"}
+ }
+ if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH {
+ ignoredPaths = w.getIgnoredDirs(w.root.Path)
+ ignoredPaths = append(ignoredPaths, "v", "mod")
+ }
+
+ for _, p := range ignoredPaths {
+ full := filepath.Join(w.root.Path, p)
+ if fi, err := os.Stat(full); err == nil {
+ w.ignoredDirs = append(w.ignoredDirs, fi)
+ if w.opts.Logf != nil {
+ w.opts.Logf("Directory added to ignore list: %s", full)
+ }
+ } else if w.opts.Logf != nil {
+ w.opts.Logf("Error statting ignored directory: %v", err)
+ }
+ }
+}
+
+// getIgnoredDirs reads an optional config file at <path>/.goimportsignore
+// of relative directories to ignore when scanning for go files.
+// The provided path is one of the $GOPATH entries with "src" appended.
+func (w *walker) getIgnoredDirs(path string) []string {
+ file := filepath.Join(path, ".goimportsignore")
+ slurp, err := ioutil.ReadFile(file)
+ if w.opts.Logf != nil {
+ if err != nil {
+ w.opts.Logf("%v", err)
+ } else {
+ w.opts.Logf("Read %s", file)
+ }
+ }
+ if err != nil {
+ return nil
+ }
+
+ var ignoredDirs []string
+ bs := bufio.NewScanner(bytes.NewReader(slurp))
+ for bs.Scan() {
+ line := strings.TrimSpace(bs.Text())
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ ignoredDirs = append(ignoredDirs, line)
+ }
+ return ignoredDirs
+}
+
+// shouldSkipDir reports whether the file should be skipped or not.
+func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
+ for _, ignoredDir := range w.ignoredDirs {
+ if os.SameFile(fi, ignoredDir) {
+ return true
+ }
+ }
+ if w.skip != nil {
+ // Check with the user specified callback.
+ return w.skip(w.root, dir)
+ }
+ return false
+}
+
+// walk walks through the given path.
+func (w *walker) walk(path string, typ os.FileMode) error {
+ dir := filepath.Dir(path)
+ if typ.IsRegular() {
+ if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
+ // Doesn't make sense to have regular files
+ // directly in your $GOPATH/src or $GOROOT/src.
+ return fastwalk.ErrSkipFiles
+ }
+ if !strings.HasSuffix(path, ".go") {
+ return nil
+ }
+
+ w.add(w.root, dir)
+ return fastwalk.ErrSkipFiles
+ }
+ if typ == os.ModeDir {
+ base := filepath.Base(path)
+ if base == "" || base[0] == '.' || base[0] == '_' ||
+ base == "testdata" ||
+ (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") ||
+ (!w.opts.ModulesEnabled && base == "node_modules") {
+ return filepath.SkipDir
+ }
+ fi, err := os.Lstat(path)
+ if err == nil && w.shouldSkipDir(fi, path) {
+ return filepath.SkipDir
+ }
+ return nil
+ }
+ if typ == os.ModeSymlink {
+ base := filepath.Base(path)
+ if strings.HasPrefix(base, ".#") {
+ // Emacs noise.
+ return nil
+ }
+ fi, err := os.Lstat(path)
+ if err != nil {
+ // Just ignore it.
+ return nil
+ }
+ if w.shouldTraverse(dir, fi) {
+ return fastwalk.ErrTraverseLink
+ }
+ }
+ return nil
+}
+
+// shouldTraverse reports whether the symlink fi, found in dir,
+// should be followed. It makes sure symlinks were never visited
+// before to avoid symlink loops.
+func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool {
+ path := filepath.Join(dir, fi.Name())
+ target, err := filepath.EvalSymlinks(path)
+ if err != nil {
+ return false
+ }
+ ts, err := os.Stat(target)
+ if err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ return false
+ }
+ if !ts.IsDir() {
+ return false
+ }
+ if w.shouldSkipDir(ts, dir) {
+ return false
+ }
+ // Check for symlink loops by statting each directory component
+ // and seeing if any are the same file as ts.
+ for {
+ parent := filepath.Dir(path)
+ if parent == path {
+ // Made it to the root without seeing a cycle.
+ // Use this symlink.
+ return true
+ }
+ parentInfo, err := os.Stat(parent)
+ if err != nil {
+ return false
+ }
+ if os.SameFile(ts, parentInfo) {
+ // Cycle. Don't traverse.
+ return false
+ }
+ path = parent
+ }
+
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
new file mode 100644
index 000000000..d859617b7
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/fix.go
@@ -0,0 +1,1730 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// importToGroup is a list of functions which map from an import path to
+// a group number.
+var importToGroup = []func(localPrefix, importPath string) (num int, ok bool){
+ func(localPrefix, importPath string) (num int, ok bool) {
+ if localPrefix == "" {
+ return
+ }
+ for _, p := range strings.Split(localPrefix, ",") {
+ if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath {
+ return 3, true
+ }
+ }
+ return
+ },
+ func(_, importPath string) (num int, ok bool) {
+ if strings.HasPrefix(importPath, "appengine") {
+ return 2, true
+ }
+ return
+ },
+ func(_, importPath string) (num int, ok bool) {
+ firstComponent := strings.Split(importPath, "/")[0]
+ if strings.Contains(firstComponent, ".") {
+ return 1, true
+ }
+ return
+ },
+}
+
+func importGroup(localPrefix, importPath string) int {
+ for _, fn := range importToGroup {
+ if n, ok := fn(localPrefix, importPath); ok {
+ return n
+ }
+ }
+ return 0
+}
+
+type ImportFixType int
+
+const (
+ AddImport ImportFixType = iota
+ DeleteImport
+ SetImportName
+)
+
+type ImportFix struct {
+ // StmtInfo represents the import statement this fix will add, remove, or change.
+ StmtInfo ImportInfo
+ // IdentName is the identifier that this fix will add or remove.
+ IdentName string
+ // FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
+ FixType ImportFixType
+ Relevance float64 // see pkg
+}
+
+// An ImportInfo represents a single import statement.
+type ImportInfo struct {
+ ImportPath string // import path, e.g. "crypto/rand".
+ Name string // import name, e.g. "crand", or "" if none.
+}
+
+// A packageInfo represents what's known about a package.
+type packageInfo struct {
+ name string // real package name, if known.
+ exports map[string]bool // known exports.
+}
+
+// parseOtherFiles parses all the Go files in srcDir except filename, including
+// test files if filename looks like a test.
+func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File {
+ // This could use go/packages but it doesn't buy much, and it fails
+ // with https://golang.org/issue/26296 in LoadFiles mode in some cases.
+ considerTests := strings.HasSuffix(filename, "_test.go")
+
+ fileBase := filepath.Base(filename)
+ packageFileInfos, err := ioutil.ReadDir(srcDir)
+ if err != nil {
+ return nil
+ }
+
+ var files []*ast.File
+ for _, fi := range packageFileInfos {
+ if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
+ continue
+ }
+ if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
+ continue
+ }
+
+ f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0)
+ if err != nil {
+ continue
+ }
+
+ files = append(files, f)
+ }
+
+ return files
+}
+
+// addGlobals puts the names of package vars into the provided map.
+func addGlobals(f *ast.File, globals map[string]bool) {
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, spec := range genDecl.Specs {
+ valueSpec, ok := spec.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ globals[valueSpec.Names[0].Name] = true
+ }
+ }
+}
+
+// collectReferences builds a map of selector expressions, from
+// left hand side (X) to a set of right hand sides (Sel).
+func collectReferences(f *ast.File) references {
+ refs := references{}
+
+ var visitor visitFn
+ visitor = func(node ast.Node) ast.Visitor {
+ if node == nil {
+ return visitor
+ }
+ switch v := node.(type) {
+ case *ast.SelectorExpr:
+ xident, ok := v.X.(*ast.Ident)
+ if !ok {
+ break
+ }
+ if xident.Obj != nil {
+ // If the parser can resolve it, it's not a package ref.
+ break
+ }
+ if !ast.IsExported(v.Sel.Name) {
+ // Whatever this is, it's not exported from a package.
+ break
+ }
+ pkgName := xident.Name
+ r := refs[pkgName]
+ if r == nil {
+ r = make(map[string]bool)
+ refs[pkgName] = r
+ }
+ r[v.Sel.Name] = true
+ }
+ return visitor
+ }
+ ast.Walk(visitor, f)
+ return refs
+}
+
+// collectImports returns all the imports in f.
+// Unnamed imports (., _) and "C" are ignored.
+func collectImports(f *ast.File) []*ImportInfo {
+ var imports []*ImportInfo
+ for _, imp := range f.Imports {
+ var name string
+ if imp.Name != nil {
+ name = imp.Name.Name
+ }
+ if imp.Path.Value == `"C"` || name == "_" || name == "." {
+ continue
+ }
+ path := strings.Trim(imp.Path.Value, `"`)
+ imports = append(imports, &ImportInfo{
+ Name: name,
+ ImportPath: path,
+ })
+ }
+ return imports
+}
+
+// findMissingImport searches pass's candidates for an import that provides
+// pkg, containing all of syms.
+func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
+ for _, candidate := range p.candidates {
+ pkgInfo, ok := p.knownPackages[candidate.ImportPath]
+ if !ok {
+ continue
+ }
+ if p.importIdentifier(candidate) != pkg {
+ continue
+ }
+
+ allFound := true
+ for right := range syms {
+ if !pkgInfo.exports[right] {
+ allFound = false
+ break
+ }
+ }
+
+ if allFound {
+ return candidate
+ }
+ }
+ return nil
+}
+
+// references is set of references found in a Go file. The first map key is the
+// left hand side of a selector expression, the second key is the right hand
+// side, and the value should always be true.
+type references map[string]map[string]bool
+
+// A pass contains all the inputs and state necessary to fix a file's imports.
+// It can be modified in some ways during use; see comments below.
+type pass struct {
+ // Inputs. These must be set before a call to load, and not modified after.
+ fset *token.FileSet // fset used to parse f and its siblings.
+ f *ast.File // the file being fixed.
+ srcDir string // the directory containing f.
+ env *ProcessEnv // the environment to use for go commands, etc.
+ loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
+ otherFiles []*ast.File // sibling files.
+
+ // Intermediate state, generated by load.
+ existingImports map[string]*ImportInfo
+ allRefs references
+ missingRefs references
+
+ // Inputs to fix. These can be augmented between successive fix calls.
+ lastTry bool // indicates that this is the last call and fix should clean up as best it can.
+ candidates []*ImportInfo // candidate imports in priority order.
+ knownPackages map[string]*packageInfo // information about all known packages.
+}
+
+// loadPackageNames saves the package names for everything referenced by imports.
+func (p *pass) loadPackageNames(imports []*ImportInfo) error {
+ if p.env.Logf != nil {
+ p.env.Logf("loading package names for %v packages", len(imports))
+ defer func() {
+ p.env.Logf("done loading package names for %v packages", len(imports))
+ }()
+ }
+ var unknown []string
+ for _, imp := range imports {
+ if _, ok := p.knownPackages[imp.ImportPath]; ok {
+ continue
+ }
+ unknown = append(unknown, imp.ImportPath)
+ }
+
+ resolver, err := p.env.GetResolver()
+ if err != nil {
+ return err
+ }
+
+ names, err := resolver.loadPackageNames(unknown, p.srcDir)
+ if err != nil {
+ return err
+ }
+
+ for path, name := range names {
+ p.knownPackages[path] = &packageInfo{
+ name: name,
+ exports: map[string]bool{},
+ }
+ }
+ return nil
+}
+
+// importIdentifier returns the identifier that imp will introduce. It will
+// guess if the package name has not been loaded, e.g. because the source
+// is not available.
+func (p *pass) importIdentifier(imp *ImportInfo) string {
+ if imp.Name != "" {
+ return imp.Name
+ }
+ known := p.knownPackages[imp.ImportPath]
+ if known != nil && known.name != "" {
+ return known.name
+ }
+ return ImportPathToAssumedName(imp.ImportPath)
+}
+
+// load reads in everything necessary to run a pass, and reports whether the
+// file already has all the imports it needs. It fills in p.missingRefs with the
+// file's missing symbols, if any, or removes unused imports if not.
+func (p *pass) load() ([]*ImportFix, bool) {
+ p.knownPackages = map[string]*packageInfo{}
+ p.missingRefs = references{}
+ p.existingImports = map[string]*ImportInfo{}
+
+ // Load basic information about the file in question.
+ p.allRefs = collectReferences(p.f)
+
+ // Load stuff from other files in the same package:
+ // global variables so we know they don't need resolving, and imports
+ // that we might want to mimic.
+ globals := map[string]bool{}
+ for _, otherFile := range p.otherFiles {
+ // Don't load globals from files that are in the same directory
+ // but a different package. Using them to suggest imports is OK.
+ if p.f.Name.Name == otherFile.Name.Name {
+ addGlobals(otherFile, globals)
+ }
+ p.candidates = append(p.candidates, collectImports(otherFile)...)
+ }
+
+ // Resolve all the import paths we've seen to package names, and store
+ // f's imports by the identifier they introduce.
+ imports := collectImports(p.f)
+ if p.loadRealPackageNames {
+ err := p.loadPackageNames(append(imports, p.candidates...))
+ if err != nil {
+ if p.env.Logf != nil {
+ p.env.Logf("loading package names: %v", err)
+ }
+ return nil, false
+ }
+ }
+ for _, imp := range imports {
+ p.existingImports[p.importIdentifier(imp)] = imp
+ }
+
+ // Find missing references.
+ for left, rights := range p.allRefs {
+ if globals[left] {
+ continue
+ }
+ _, ok := p.existingImports[left]
+ if !ok {
+ p.missingRefs[left] = rights
+ continue
+ }
+ }
+ if len(p.missingRefs) != 0 {
+ return nil, false
+ }
+
+ return p.fix()
+}
+
+// fix attempts to satisfy missing imports using p.candidates. If it finds
+// everything, or if p.lastTry is true, it updates fixes to add the imports it found,
+// delete anything unused, and update import names, and returns true.
+func (p *pass) fix() ([]*ImportFix, bool) {
+ // Find missing imports.
+ var selected []*ImportInfo
+ for left, rights := range p.missingRefs {
+ if imp := p.findMissingImport(left, rights); imp != nil {
+ selected = append(selected, imp)
+ }
+ }
+
+ if !p.lastTry && len(selected) != len(p.missingRefs) {
+ return nil, false
+ }
+
+ // Found everything, or giving up. Add the new imports and remove any unused.
+ var fixes []*ImportFix
+ for _, imp := range p.existingImports {
+ // We deliberately ignore globals here, because we can't be sure
+ // they're in the same package. People do things like put multiple
+ // main packages in the same directory, and we don't want to
+ // remove imports if they happen to have the same name as a var in
+ // a different package.
+ if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
+ fixes = append(fixes, &ImportFix{
+ StmtInfo: *imp,
+ IdentName: p.importIdentifier(imp),
+ FixType: DeleteImport,
+ })
+ continue
+ }
+
+ // An existing import may need to update its import name to be correct.
+ if name := p.importSpecName(imp); name != imp.Name {
+ fixes = append(fixes, &ImportFix{
+ StmtInfo: ImportInfo{
+ Name: name,
+ ImportPath: imp.ImportPath,
+ },
+ IdentName: p.importIdentifier(imp),
+ FixType: SetImportName,
+ })
+ }
+ }
+
+ for _, imp := range selected {
+ fixes = append(fixes, &ImportFix{
+ StmtInfo: ImportInfo{
+ Name: p.importSpecName(imp),
+ ImportPath: imp.ImportPath,
+ },
+ IdentName: p.importIdentifier(imp),
+ FixType: AddImport,
+ })
+ }
+
+ return fixes, true
+}
+
+// importSpecName gets the import name of imp in the import spec.
+//
+// When the import identifier matches the assumed import name, the import name does
+// not appear in the import spec.
+func (p *pass) importSpecName(imp *ImportInfo) string {
+ // If we did not load the real package names, or the name is already set,
+ // we just return the existing name.
+ if !p.loadRealPackageNames || imp.Name != "" {
+ return imp.Name
+ }
+
+ ident := p.importIdentifier(imp)
+ if ident == ImportPathToAssumedName(imp.ImportPath) {
+ return "" // ident not needed since the assumed and real names are the same.
+ }
+ return ident
+}
+
+// apply will perform the fixes on f in order.
+func apply(fset *token.FileSet, f *ast.File, fixes []*ImportFix) {
+ for _, fix := range fixes {
+ switch fix.FixType {
+ case DeleteImport:
+ astutil.DeleteNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
+ case AddImport:
+ astutil.AddNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
+ case SetImportName:
+ // Find the matching import path and change the name.
+ for _, spec := range f.Imports {
+ path := strings.Trim(spec.Path.Value, `"`)
+ if path == fix.StmtInfo.ImportPath {
+ spec.Name = &ast.Ident{
+ Name: fix.StmtInfo.Name,
+ NamePos: spec.Pos(),
+ }
+ }
+ }
+ }
+ }
+}
+
+// assumeSiblingImportsValid assumes that siblings' use of packages is valid,
+// adding the exports they use.
+func (p *pass) assumeSiblingImportsValid() {
+ for _, f := range p.otherFiles {
+ refs := collectReferences(f)
+ imports := collectImports(f)
+ importsByName := map[string]*ImportInfo{}
+ for _, imp := range imports {
+ importsByName[p.importIdentifier(imp)] = imp
+ }
+ for left, rights := range refs {
+ if imp, ok := importsByName[left]; ok {
+ if m, ok := stdlib[imp.ImportPath]; ok {
+ // We have the stdlib in memory; no need to guess.
+ rights = copyExports(m)
+ }
+ p.addCandidate(imp, &packageInfo{
+ // no name; we already know it.
+ exports: rights,
+ })
+ }
+ }
+ }
+}
+
+// addCandidate adds a candidate import to p, and merges in the information
+// in pkg.
+func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) {
+ p.candidates = append(p.candidates, imp)
+ if existing, ok := p.knownPackages[imp.ImportPath]; ok {
+ if existing.name == "" {
+ existing.name = pkg.name
+ }
+ for export := range pkg.exports {
+ existing.exports[export] = true
+ }
+ } else {
+ p.knownPackages[imp.ImportPath] = pkg
+ }
+}
+
+// fixImports adds and removes imports from f so that all its references are
+// satisfied and there are no unused imports.
+//
+// This is declared as a variable rather than a function so goimports can
+// easily be extended by adding a file with an init function.
+var fixImports = fixImportsDefault
+
+func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
+ fixes, err := getFixes(fset, f, filename, env)
+ if err != nil {
+ return err
+ }
+ apply(fset, f, fixes)
+ return err
+}
+
+// getFixes gets the import fixes that need to be made to f in order to fix the imports.
+// It does not modify the ast.
+func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
+ abs, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ srcDir := filepath.Dir(abs)
+ if env.Logf != nil {
+ env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
+ }
+
+ // First pass: looking only at f, and using the naive algorithm to
+ // derive package names from import paths, see if the file is already
+ // complete. We can't add any imports yet, because we don't know
+ // if missing references are actually package vars.
+ p := &pass{fset: fset, f: f, srcDir: srcDir, env: env}
+ if fixes, done := p.load(); done {
+ return fixes, nil
+ }
+
+ otherFiles := parseOtherFiles(fset, srcDir, filename)
+
+ // Second pass: add information from other files in the same package,
+ // like their package vars and imports.
+ p.otherFiles = otherFiles
+ if fixes, done := p.load(); done {
+ return fixes, nil
+ }
+
+ // Now we can try adding imports from the stdlib.
+ p.assumeSiblingImportsValid()
+ addStdlibCandidates(p, p.missingRefs)
+ if fixes, done := p.fix(); done {
+ return fixes, nil
+ }
+
+ // Third pass: get real package names where we had previously used
+ // the naive algorithm.
+ p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
+ p.loadRealPackageNames = true
+ p.otherFiles = otherFiles
+ if fixes, done := p.load(); done {
+ return fixes, nil
+ }
+
+ if err := addStdlibCandidates(p, p.missingRefs); err != nil {
+ return nil, err
+ }
+ p.assumeSiblingImportsValid()
+ if fixes, done := p.fix(); done {
+ return fixes, nil
+ }
+
+ // Go look for candidates in $GOPATH, etc. We don't necessarily load
+ // the real exports of sibling imports, so keep assuming their contents.
+ if err := addExternalCandidates(p, p.missingRefs, filename); err != nil {
+ return nil, err
+ }
+
+ p.lastTry = true
+ fixes, _ := p.fix()
+ return fixes, nil
+}
+
+// MaxRelevance is the highest relevance, used for the standard library.
+// Chosen arbitrarily to match pre-existing gopls code.
+const MaxRelevance = 7.0
+
+// getCandidatePkgs works with the passed callback to find all acceptable packages.
+// It deduplicates by import path, and uses a cached stdlib rather than reading
+// from disk.
+func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
+ notSelf := func(p *pkg) bool {
+ return p.packageName != filePkg || p.dir != filepath.Dir(filename)
+ }
+ goenv, err := env.goEnv()
+ if err != nil {
+ return err
+ }
+
+ var mu sync.Mutex // to guard asynchronous access to dupCheck
+ dupCheck := map[string]struct{}{}
+
+ // Start off with the standard library.
+ for importPath, exports := range stdlib {
+ p := &pkg{
+ dir: filepath.Join(goenv["GOROOT"], "src", importPath),
+ importPathShort: importPath,
+ packageName: path.Base(importPath),
+ relevance: MaxRelevance,
+ }
+ dupCheck[importPath] = struct{}{}
+ if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) {
+ wrappedCallback.exportsLoaded(p, exports)
+ }
+ }
+
+ scanFilter := &scanCallback{
+ rootFound: func(root gopathwalk.Root) bool {
+ // Exclude goroot results -- getting them is relatively expensive, not cached,
+ // and generally redundant with the in-memory version.
+ return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
+ },
+ dirFound: wrappedCallback.dirFound,
+ packageNameLoaded: func(pkg *pkg) bool {
+ mu.Lock()
+ defer mu.Unlock()
+ if _, ok := dupCheck[pkg.importPathShort]; ok {
+ return false
+ }
+ dupCheck[pkg.importPathShort] = struct{}{}
+ return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ // If we're an x_test, load the package under test's test variant.
+ if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
+ var err error
+ _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
+ if err != nil {
+ return
+ }
+ }
+ wrappedCallback.exportsLoaded(pkg, exports)
+ },
+ }
+ resolver, err := env.GetResolver()
+ if err != nil {
+ return err
+ }
+ return resolver.scan(ctx, scanFilter)
+}
+
+func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) (map[string]float64, error) {
+ result := make(map[string]float64)
+ resolver, err := env.GetResolver()
+ if err != nil {
+ return nil, err
+ }
+ for _, path := range paths {
+ result[path] = resolver.scoreImportPath(ctx, path)
+ }
+ return result, nil
+}
+
+func PrimeCache(ctx context.Context, env *ProcessEnv) error {
+ // Fully scan the disk for directories, but don't actually read any Go files.
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return false
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, "", "", env)
+}
+
+func candidateImportName(pkg *pkg) string {
+ if ImportPathToAssumedName(pkg.importPathShort) != pkg.packageName {
+ return pkg.packageName
+ }
+ return ""
+}
+
+// GetAllCandidates calls wrapped for each package whose name starts with
+// searchPrefix, and can be imported from filename with the package name filePkg.
+func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ // Try the assumed package name first, then a simpler path match
+ // in case of packages named vN, which are not uncommon.
+ return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
+ strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if !strings.HasPrefix(pkg.packageName, searchPrefix) {
+ return false
+ }
+ wrapped(ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ })
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
+}
+
+// GetImportPaths calls wrapped for each package whose import path starts with
+// searchPrefix, and can be imported from filename with the package name filePkg.
+func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ return strings.HasPrefix(pkg.importPathShort, searchPrefix)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ wrapped(ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ })
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
+}
+
+// A PackageExport is a package and its exports.
+type PackageExport struct {
+ Fix *ImportFix
+ Exports []string
+}
+
+// GetPackageExports returns all known packages with name pkg and their exports.
+func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return pkg.packageName == searchPkg
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ sort.Strings(exports)
+ wrapped(PackageExport{
+ Fix: &ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ },
+ Exports: exports,
+ })
+ },
+ }
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
+}
+
+var RequiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB"}
+
+// ProcessEnv contains environment variables and settings that affect the use of
+// the go command, the go/build package, etc.
+type ProcessEnv struct {
+ GocmdRunner *gocommand.Runner
+
+ BuildFlags []string
+ ModFlag string
+ ModFile string
+
+ // Env overrides the OS environment, and can be used to specify
+ // GOPROXY, GO111MODULE, etc. PATH cannot be set here, because
+ // exec.Command will not honor it.
+ // Specifying all of RequiredGoEnvVars avoids a call to `go env`.
+ Env map[string]string
+
+ WorkingDir string
+
+ // If Logf is non-nil, debug logging is enabled through this function.
+ Logf func(format string, args ...interface{})
+
+ initialized bool
+
+ resolver Resolver
+}
+
+func (e *ProcessEnv) goEnv() (map[string]string, error) {
+ if err := e.init(); err != nil {
+ return nil, err
+ }
+ return e.Env, nil
+}
+
+func (e *ProcessEnv) matchFile(dir, name string) (bool, error) {
+ bctx, err := e.buildContext()
+ if err != nil {
+ return false, err
+ }
+ return bctx.MatchFile(dir, name)
+}
+
+// CopyConfig copies the env's configuration into a new env.
+func (e *ProcessEnv) CopyConfig() *ProcessEnv {
+ copy := &ProcessEnv{
+ GocmdRunner: e.GocmdRunner,
+ initialized: e.initialized,
+ BuildFlags: e.BuildFlags,
+ Logf: e.Logf,
+ WorkingDir: e.WorkingDir,
+ resolver: nil,
+ Env: map[string]string{},
+ }
+ for k, v := range e.Env {
+ copy.Env[k] = v
+ }
+ return copy
+}
+
+func (e *ProcessEnv) init() error {
+ if e.initialized {
+ return nil
+ }
+
+ foundAllRequired := true
+ for _, k := range RequiredGoEnvVars {
+ if _, ok := e.Env[k]; !ok {
+ foundAllRequired = false
+ break
+ }
+ }
+ if foundAllRequired {
+ e.initialized = true
+ return nil
+ }
+
+ if e.Env == nil {
+ e.Env = map[string]string{}
+ }
+
+ goEnv := map[string]string{}
+ stdout, err := e.invokeGo(context.TODO(), "env", append([]string{"-json"}, RequiredGoEnvVars...)...)
+ if err != nil {
+ return err
+ }
+ if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil {
+ return err
+ }
+ for k, v := range goEnv {
+ e.Env[k] = v
+ }
+ e.initialized = true
+ return nil
+}
+
+func (e *ProcessEnv) env() []string {
+ var env []string // the gocommand package will prepend os.Environ.
+ for k, v := range e.Env {
+ env = append(env, k+"="+v)
+ }
+ return env
+}
+
+func (e *ProcessEnv) GetResolver() (Resolver, error) {
+ if e.resolver != nil {
+ return e.resolver, nil
+ }
+ if err := e.init(); err != nil {
+ return nil, err
+ }
+ if len(e.Env["GOMOD"]) == 0 {
+ e.resolver = newGopathResolver(e)
+ return e.resolver, nil
+ }
+ e.resolver = newModuleResolver(e)
+ return e.resolver, nil
+}
+
+func (e *ProcessEnv) buildContext() (*build.Context, error) {
+ ctx := build.Default
+ goenv, err := e.goEnv()
+ if err != nil {
+ return nil, err
+ }
+ ctx.GOROOT = goenv["GOROOT"]
+ ctx.GOPATH = goenv["GOPATH"]
+
+ // As of Go 1.14, build.Context has a Dir field
+ // (see golang.org/issue/34860).
+ // Populate it only if present.
+ rc := reflect.ValueOf(&ctx).Elem()
+ dir := rc.FieldByName("Dir")
+ if dir.IsValid() && dir.Kind() == reflect.String {
+ dir.SetString(e.WorkingDir)
+ }
+
+ // Since Go 1.11, go/build.Context.Import may invoke 'go list' depending on
+ // the value in GO111MODULE in the process's environment. We always want to
+ // run in GOPATH mode when calling Import, so we need to prevent this from
+ // happening. In Go 1.16, GO111MODULE defaults to "on", so this problem comes
+ // up more frequently.
+ //
+ // HACK: setting any of the Context I/O hooks prevents Import from invoking
+ // 'go list', regardless of GO111MODULE. This is undocumented, but it's
+ // unlikely to change before GOPATH support is removed.
+ ctx.ReadDir = ioutil.ReadDir
+
+ return &ctx, nil
+}
+
+func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) (*bytes.Buffer, error) {
+ inv := gocommand.Invocation{
+ Verb: verb,
+ Args: args,
+ BuildFlags: e.BuildFlags,
+ Env: e.env(),
+ Logf: e.Logf,
+ WorkingDir: e.WorkingDir,
+ }
+ return e.GocmdRunner.Run(ctx, inv)
+}
+
+func addStdlibCandidates(pass *pass, refs references) error {
+ goenv, err := pass.env.goEnv()
+ if err != nil {
+ return err
+ }
+ add := func(pkg string) {
+ // Prevent self-imports.
+ if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir {
+ return
+ }
+ exports := copyExports(stdlib[pkg])
+ pass.addCandidate(
+ &ImportInfo{ImportPath: pkg},
+ &packageInfo{name: path.Base(pkg), exports: exports})
+ }
+ for left := range refs {
+ if left == "rand" {
+ // Make sure we try crypto/rand before math/rand.
+ add("crypto/rand")
+ add("math/rand")
+ continue
+ }
+ for importPath := range stdlib {
+ if path.Base(importPath) == left {
+ add(importPath)
+ }
+ }
+ }
+ return nil
+}
+
+// A Resolver does the build-system-specific parts of goimports.
+type Resolver interface {
+ // loadPackageNames loads the package names in importPaths.
+ loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
+ // scan works with callback to search for packages. See scanCallback for details.
+ scan(ctx context.Context, callback *scanCallback) error
+ // loadExports returns the set of exported symbols in the package at dir.
+ // loadExports may be called concurrently.
+ loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
+ // scoreImportPath returns the relevance for an import path.
+ scoreImportPath(ctx context.Context, path string) float64
+
+ ClearForNewScan()
+}
+
+// A scanCallback controls a call to scan and receives its results.
+// In general, minor errors will be silently discarded; a user should not
+// expect to receive a full series of calls for everything.
+type scanCallback struct {
+ // rootFound is called before scanning a new root dir. If it returns true,
+ // the root will be scanned. Returning false will not necessarily prevent
+ // directories from that root making it to dirFound.
+ rootFound func(gopathwalk.Root) bool
+ // dirFound is called when a directory is found that is possibly a Go package.
+ // pkg will be populated with everything except packageName.
+ // If it returns true, the package's name will be loaded.
+ dirFound func(pkg *pkg) bool
+ // packageNameLoaded is called when a package is found and its name is loaded.
+ // If it returns true, the package's exports will be loaded.
+ packageNameLoaded func(pkg *pkg) bool
+ // exportsLoaded is called when a package's exports have been loaded.
+ exportsLoaded func(pkg *pkg, exports []string)
+}
+
+func addExternalCandidates(pass *pass, refs references, filename string) error {
+ var mu sync.Mutex
+ found := make(map[string][]pkgDistance)
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true // We want everything.
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, refs, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if _, want := refs[pkg.packageName]; !want {
+ return false
+ }
+ if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
+ // The candidate is in the same directory and has the
+ // same package name. Don't try to import ourselves.
+ return false
+ }
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
+ return false // We'll do our own loading after we sort.
+ },
+ }
+ resolver, err := pass.env.GetResolver()
+ if err != nil {
+ return err
+ }
+ if err = resolver.scan(context.Background(), callback); err != nil {
+ return err
+ }
+
+ // Search for imports matching potential package references.
+ type result struct {
+ imp *ImportInfo
+ pkg *packageInfo
+ }
+ results := make(chan result, len(refs))
+
+ ctx, cancel := context.WithCancel(context.TODO())
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+ var (
+ firstErr error
+ firstErrOnce sync.Once
+ )
+ for pkgName, symbols := range refs {
+ wg.Add(1)
+ go func(pkgName string, symbols map[string]bool) {
+ defer wg.Done()
+
+ found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename)
+
+ if err != nil {
+ firstErrOnce.Do(func() {
+ firstErr = err
+ cancel()
+ })
+ return
+ }
+
+ if found == nil {
+ return // No matching package.
+ }
+
+ imp := &ImportInfo{
+ ImportPath: found.importPathShort,
+ }
+
+ pkg := &packageInfo{
+ name: pkgName,
+ exports: symbols,
+ }
+ results <- result{imp, pkg}
+ }(pkgName, symbols)
+ }
+ go func() {
+ wg.Wait()
+ close(results)
+ }()
+
+ for result := range results {
+ pass.addCandidate(result.imp, result.pkg)
+ }
+ return firstErr
+}
+
+// notIdentifier reports whether ch is an invalid identifier character.
+func notIdentifier(ch rune) bool {
+ return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' ||
+ '0' <= ch && ch <= '9' ||
+ ch == '_' ||
+ ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch)))
+}
+
+// ImportPathToAssumedName returns the assumed package name of an import path.
+// It does this using only string parsing of the import path.
+// It picks the last element of the path that does not look like a major
+// version, and then picks the valid identifier off the start of that element.
+// It is used to determine if a local rename should be added to an import for
+// clarity.
+// This function could be moved to a standard package and exported if we want
+// for use in other tools.
+func ImportPathToAssumedName(importPath string) string {
+ base := path.Base(importPath)
+ if strings.HasPrefix(base, "v") {
+ if _, err := strconv.Atoi(base[1:]); err == nil {
+ dir := path.Dir(importPath)
+ if dir != "." {
+ base = path.Base(dir)
+ }
+ }
+ }
+ base = strings.TrimPrefix(base, "go-")
+ if i := strings.IndexFunc(base, notIdentifier); i >= 0 {
+ base = base[:i]
+ }
+ return base
+}
+
+// gopathResolver implements resolver for GOPATH workspaces.
+type gopathResolver struct {
+ env *ProcessEnv
+ walked bool
+ cache *dirInfoCache
+ scanSema chan struct{} // scanSema prevents concurrent scans.
+}
+
+func newGopathResolver(env *ProcessEnv) *gopathResolver {
+ r := &gopathResolver{
+ env: env,
+ cache: &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ },
+ scanSema: make(chan struct{}, 1),
+ }
+ r.scanSema <- struct{}{}
+ return r
+}
+
+func (r *gopathResolver) ClearForNewScan() {
+ <-r.scanSema
+ r.cache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ r.walked = false
+ r.scanSema <- struct{}{}
+}
+
+func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ names := map[string]string{}
+ bctx, err := r.env.buildContext()
+ if err != nil {
+ return nil, err
+ }
+ for _, path := range importPaths {
+ names[path] = importPathToName(bctx, path, srcDir)
+ }
+ return names, nil
+}
+
+// importPathToName finds out the actual package name, as declared in its .go files.
+func importPathToName(bctx *build.Context, importPath, srcDir string) string {
+ // Fast path for standard library without going to disk.
+ if _, ok := stdlib[importPath]; ok {
+ return path.Base(importPath) // stdlib packages always match their paths.
+ }
+
+ buildPkg, err := bctx.Import(importPath, srcDir, build.FindOnly)
+ if err != nil {
+ return ""
+ }
+ pkgName, err := packageDirToName(buildPkg.Dir)
+ if err != nil {
+ return ""
+ }
+ return pkgName
+}
+
+// packageDirToName is a faster version of build.Import if
+// the only thing desired is the package name. Given a directory,
+// packageDirToName then only parses one file in the package,
+// trusting that the files in the directory are consistent.
+func packageDirToName(dir string) (packageName string, err error) {
+ d, err := os.Open(dir)
+ if err != nil {
+ return "", err
+ }
+ names, err := d.Readdirnames(-1)
+ d.Close()
+ if err != nil {
+ return "", err
+ }
+ sort.Strings(names) // to have predictable behavior
+ var lastErr error
+ var nfile int
+ for _, name := range names {
+ if !strings.HasSuffix(name, ".go") {
+ continue
+ }
+ if strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ nfile++
+ fullFile := filepath.Join(dir, name)
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
+ if err != nil {
+ lastErr = err
+ continue
+ }
+ pkgName := f.Name.Name
+ if pkgName == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by ctx.MatchFile.
+ continue
+ }
+ if pkgName == "main" {
+ // Also skip package main, assuming it's a +build ignore generator or example.
+ // Since you can't import a package main anyway, there's no harm here.
+ continue
+ }
+ return pkgName, nil
+ }
+ if lastErr != nil {
+ return "", lastErr
+ }
+ return "", fmt.Errorf("no importable package found in %d Go files", nfile)
+}
+
+type pkg struct {
+ dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
+ importPathShort string // vendorless import path ("net/http", "a/b")
+ packageName string // package name loaded from source if requested
+ relevance float64 // a weakly-defined score of how relevant a package is. 0 is most relevant.
+}
+
+type pkgDistance struct {
+ pkg *pkg
+ distance int // relative distance to target
+}
+
+// byDistanceOrImportPathShortLength sorts by relative distance breaking ties
+// on the short import path length and then the import string itself.
+type byDistanceOrImportPathShortLength []pkgDistance
+
+func (s byDistanceOrImportPathShortLength) Len() int { return len(s) }
+func (s byDistanceOrImportPathShortLength) Less(i, j int) bool {
+ di, dj := s[i].distance, s[j].distance
+ if di == -1 {
+ return false
+ }
+ if dj == -1 {
+ return true
+ }
+ if di != dj {
+ return di < dj
+ }
+
+ vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort
+ if len(vi) != len(vj) {
+ return len(vi) < len(vj)
+ }
+ return vi < vj
+}
+func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+
+func distance(basepath, targetpath string) int {
+ p, err := filepath.Rel(basepath, targetpath)
+ if err != nil {
+ return -1
+ }
+ if p == "." {
+ return 0
+ }
+ return strings.Count(p, string(filepath.Separator)) + 1
+}
+
+func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
+ add := func(root gopathwalk.Root, dir string) {
+ // We assume cached directories have not changed. We can skip them and their
+ // children.
+ if _, ok := r.cache.Load(dir); ok {
+ return
+ }
+
+ importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):])
+ info := directoryPackageInfo{
+ status: directoryScanned,
+ dir: dir,
+ rootType: root.Type,
+ nonCanonicalImportPath: VendorlessPath(importpath),
+ }
+ r.cache.Store(dir, info)
+ }
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
+ }
+
+ p := &pkg{
+ importPathShort: info.nonCanonicalImportPath,
+ dir: info.dir,
+ relevance: MaxRelevance - 1,
+ }
+ if info.rootType == gopathwalk.RootGOROOT {
+ p.relevance = MaxRelevance
+ }
+
+ if !callback.dirFound(p) {
+ return
+ }
+ var err error
+ p.packageName, err = r.cache.CachePackageName(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.packageNameLoaded(p) {
+ return
+ }
+ if _, exports, err := r.loadExports(ctx, p, false); err == nil {
+ callback.exportsLoaded(p, exports)
+ }
+ }
+ stop := r.cache.ScanAndListen(ctx, processDir)
+ defer stop()
+
+ goenv, err := r.env.goEnv()
+ if err != nil {
+ return err
+ }
+ var roots []gopathwalk.Root
+ roots = append(roots, gopathwalk.Root{filepath.Join(goenv["GOROOT"], "src"), gopathwalk.RootGOROOT})
+ for _, p := range filepath.SplitList(goenv["GOPATH"]) {
+ roots = append(roots, gopathwalk.Root{filepath.Join(p, "src"), gopathwalk.RootGOPATH})
+ }
+ // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
+ roots = filterRoots(roots, callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ gopathwalk.Walk(roots, add, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: false})
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
+
+func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
+ }
+ return MaxRelevance - 1
+}
+
+func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
+ var result []gopathwalk.Root
+ for _, root := range roots {
+ if !include(root) {
+ continue
+ }
+ result = append(result, root)
+ }
+ return result
+}
+
+func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+ if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
+ return r.cache.CacheExports(ctx, r.env, info)
+ }
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
+}
+
+// VendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+func VendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
+ // Look for non-test, buildable .go files which could provide exports.
+ all, err := ioutil.ReadDir(dir)
+ if err != nil {
+ return "", nil, err
+ }
+ var files []os.FileInfo
+ for _, fi := range all {
+ name := fi.Name()
+ if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
+ continue
+ }
+ match, err := env.matchFile(dir, fi.Name())
+ if err != nil || !match {
+ continue
+ }
+ files = append(files, fi)
+ }
+
+ if len(files) == 0 {
+ return "", nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir)
+ }
+
+ var pkgName string
+ var exports []string
+ fset := token.NewFileSet()
+ for _, fi := range files {
+ select {
+ case <-ctx.Done():
+ return "", nil, ctx.Err()
+ default:
+ }
+
+ fullFile := filepath.Join(dir, fi.Name())
+ f, err := parser.ParseFile(fset, fullFile, nil, 0)
+ if err != nil {
+ if env.Logf != nil {
+ env.Logf("error parsing %v: %v", fullFile, err)
+ }
+ continue
+ }
+ if f.Name.Name == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by MatchFile above.
+ continue
+ }
+ if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
+ // x_test package. We want internal test files only.
+ continue
+ }
+ pkgName = f.Name.Name
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports = append(exports, name)
+ }
+ }
+ }
+
+ if env.Logf != nil {
+ sortedExports := append([]string(nil), exports...)
+ sort.Strings(sortedExports)
+ env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", "))
+ }
+ return pkgName, exports, nil
+}
+
+// findImport searches for a package with the given symbols.
+// If no package is found, findImport returns ("", false, nil)
+func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
+ // Sort the candidates by their import package length,
+ // assuming that shorter package names are better than long
+ // ones. Note that this sorts by the de-vendored name, so
+ // there's no "penalty" for vendoring.
+ sort.Sort(byDistanceOrImportPathShortLength(candidates))
+ if pass.env.Logf != nil {
+ for i, c := range candidates {
+ pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
+ }
+ }
+ resolver, err := pass.env.GetResolver()
+ if err != nil {
+ return nil, err
+ }
+
+ // Collect exports for packages with matching names.
+ rescv := make([]chan *pkg, len(candidates))
+ for i := range candidates {
+ rescv[i] = make(chan *pkg, 1)
+ }
+ const maxConcurrentPackageImport = 4
+ loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
+
+ ctx, cancel := context.WithCancel(ctx)
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for i, c := range candidates {
+ select {
+ case loadExportsSem <- struct{}{}:
+ case <-ctx.Done():
+ return
+ }
+
+ wg.Add(1)
+ go func(c pkgDistance, resc chan<- *pkg) {
+ defer func() {
+ <-loadExportsSem
+ wg.Done()
+ }()
+
+ if pass.env.Logf != nil {
+ pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
+ }
+ // If we're an x_test, load the package under test's test variant.
+ includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
+ _, exports, err := resolver.loadExports(ctx, c.pkg, includeTest)
+ if err != nil {
+ if pass.env.Logf != nil {
+ pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
+ }
+ resc <- nil
+ return
+ }
+
+ exportsMap := make(map[string]bool, len(exports))
+ for _, sym := range exports {
+ exportsMap[sym] = true
+ }
+
+ // If it doesn't have the right
+ // symbols, send nil to mean no match.
+ for symbol := range symbols {
+ if !exportsMap[symbol] {
+ resc <- nil
+ return
+ }
+ }
+ resc <- c.pkg
+ }(c, rescv[i])
+ }
+ }()
+
+ for _, resc := range rescv {
+ pkg := <-resc
+ if pkg == nil {
+ continue
+ }
+ return pkg, nil
+ }
+ return nil, nil
+}
+
+// pkgIsCandidate reports whether pkg is a candidate for satisfying the
+// finding which package pkgIdent in the file named by filename is trying
+// to refer to.
+//
+// This check is purely lexical and is meant to be as fast as possible
+// because it's run over all $GOPATH directories to filter out poor
+// candidates in order to limit the CPU and I/O later parsing the
+// exports in candidate packages.
+//
+// filename is the file being formatted.
+// pkgIdent is the package being searched for, like "client" (if
+// searching for "client.New")
+func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
+ // Check "internal" and "vendor" visibility:
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+
+ // Speed optimization to minimize disk I/O:
+ // the last two components on disk must contain the
+ // package name somewhere.
+ //
+ // This permits mismatch naming like directory
+ // "go-foo" being package "foo", or "pkg.v3" being "pkg",
+ // or directory "google.golang.org/api/cloudbilling/v1"
+ // being package "cloudbilling", but doesn't
+ // permit a directory "foo" to be package
+ // "bar", which is strongly discouraged
+ // anyway. There's no reason goimports needs
+ // to be slow just to accommodate that.
+ for pkgIdent := range refs {
+ lastTwo := lastTwoComponents(pkg.importPathShort)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func hasHyphenOrUpperASCII(s string) bool {
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ if b == '-' || ('A' <= b && b <= 'Z') {
+ return true
+ }
+ }
+ return false
+}
+
+func lowerASCIIAndRemoveHyphen(s string) (ret string) {
+ buf := make([]byte, 0, len(s))
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case b == '-':
+ continue
+ case 'A' <= b && b <= 'Z':
+ buf = append(buf, b+('a'-'A'))
+ default:
+ buf = append(buf, b)
+ }
+ }
+ return string(buf)
+}
+
+// canUse reports whether the package in dir is usable from filename,
+// respecting the Go "internal" and "vendor" visibility rules.
+func canUse(filename, dir string) bool {
+ // Fast path check, before any allocations. If it doesn't contain vendor
+ // or internal, it's not tricky:
+ // Note that this can false-negative on directories like "notinternal",
+ // but we check it correctly below. This is just a fast path.
+ if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
+ return true
+ }
+
+ dirSlash := filepath.ToSlash(dir)
+ if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
+ return true
+ }
+ // Vendor or internal directory only visible from children of parent.
+ // That means the path from the current directory to the target directory
+ // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
+ // or bar/vendor or bar/internal.
+ // After stripping all the leading ../, the only okay place to see vendor or internal
+ // is at the very beginning of the path.
+ absfile, err := filepath.Abs(filename)
+ if err != nil {
+ return false
+ }
+ absdir, err := filepath.Abs(dir)
+ if err != nil {
+ return false
+ }
+ rel, err := filepath.Rel(absfile, absdir)
+ if err != nil {
+ return false
+ }
+ relSlash := filepath.ToSlash(rel)
+ if i := strings.LastIndex(relSlash, "../"); i >= 0 {
+ relSlash = relSlash[i+len("../"):]
+ }
+ return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
+}
+
+// lastTwoComponents returns at most the last two path components
+// of v, using either / or \ as the path separator.
+func lastTwoComponents(v string) string {
+ nslash := 0
+ for i := len(v) - 1; i >= 0; i-- {
+ if v[i] == '/' || v[i] == '\\' {
+ nslash++
+ if nslash == 2 {
+ return v[i:]
+ }
+ }
+ }
+ return v
+}
+
+type visitFn func(node ast.Node) ast.Visitor
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ return fn(node)
+}
+
+func copyExports(pkg []string) map[string]bool {
+ m := make(map[string]bool, len(pkg))
+ for _, v := range pkg {
+ m[v] = true
+ }
+ return m
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
new file mode 100644
index 000000000..25973989e
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/imports.go
@@ -0,0 +1,346 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run mkstdlib.go
+
+// Package imports implements a Go pretty-printer (like package "go/format")
+// that also adds or removes import statements as necessary.
+package imports
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// Options is golang.org/x/tools/imports.Options with extra internal-only options.
+type Options struct {
+ Env *ProcessEnv // The environment to use. Note: this contains the cached module and filesystem state.
+
+ // LocalPrefix is a comma-separated string of import path prefixes, which, if
+ // set, instructs Process to sort the import paths with the given prefixes
+ // into another group after 3rd-party packages.
+ LocalPrefix string
+
+ Fragment bool // Accept fragment of a source file (no package statement)
+ AllErrors bool // Report all errors (not just the first 10 on different lines)
+
+ Comments bool // Print comments (true if nil *Options provided)
+ TabIndent bool // Use tabs for indent (true if nil *Options provided)
+ TabWidth int // Tab width (8 if nil *Options provided)
+
+ FormatOnly bool // Disable the insertion and deletion of imports
+}
+
+// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env.
+func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) {
+ fileSet := token.NewFileSet()
+ file, adjust, err := parse(fileSet, filename, src, opt)
+ if err != nil {
+ return nil, err
+ }
+
+ if !opt.FormatOnly {
+ if err := fixImports(fileSet, file, filename, opt.Env); err != nil {
+ return nil, err
+ }
+ }
+ return formatFile(fileSet, file, src, adjust, opt)
+}
+
+// FixImports returns a list of fixes to the imports that, when applied,
+// will leave the imports in the same state as Process. src and opt must
+// be specified.
+//
+// Note that filename's directory influences which imports can be chosen,
+// so it is important that filename be accurate.
+func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
+ fileSet := token.NewFileSet()
+ file, _, err := parse(fileSet, filename, src, opt)
+ if err != nil {
+ return nil, err
+ }
+
+ return getFixes(fileSet, file, filename, opt.Env)
+}
+
+// ApplyFixes applies all of the fixes to the file and formats it. extraMode
+// is added in when parsing the file. src and opts must be specified, but no
+// env is needed.
+func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) {
+ // Don't use parse() -- we don't care about fragments or statement lists
+ // here, and we need to work with unparseable files.
+ fileSet := token.NewFileSet()
+ parserMode := parser.Mode(0)
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+ parserMode |= extraMode
+
+ file, err := parser.ParseFile(fileSet, filename, src, parserMode)
+ if file == nil {
+ return nil, err
+ }
+
+ // Apply the fixes to the file.
+ apply(fileSet, file, fixes)
+
+ return formatFile(fileSet, file, src, nil, opt)
+}
+
+func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
+ mergeImports(fileSet, file)
+ sortImports(opt.LocalPrefix, fileSet, file)
+ imps := astutil.Imports(fileSet, file)
+ var spacesBefore []string // import paths we need spaces before
+ for _, impSection := range imps {
+ // Within each block of contiguous imports, see if any
+ // import lines are in different group numbers. If so,
+ // we'll need to put a space between them so it's
+ // compatible with gofmt.
+ lastGroup := -1
+ for _, importSpec := range impSection {
+ importPath, _ := strconv.Unquote(importSpec.Path.Value)
+ groupNum := importGroup(opt.LocalPrefix, importPath)
+ if groupNum != lastGroup && lastGroup != -1 {
+ spacesBefore = append(spacesBefore, importPath)
+ }
+ lastGroup = groupNum
+ }
+
+ }
+
+ printerMode := printer.UseSpaces
+ if opt.TabIndent {
+ printerMode |= printer.TabIndent
+ }
+ printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
+
+ var buf bytes.Buffer
+ err := printConfig.Fprint(&buf, fileSet, file)
+ if err != nil {
+ return nil, err
+ }
+ out := buf.Bytes()
+ if adjust != nil {
+ out = adjust(src, out)
+ }
+ if len(spacesBefore) > 0 {
+ out, err = addImportSpaces(bytes.NewReader(out), spacesBefore)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ out, err = format.Source(out)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// parse parses src, which was read from filename,
+// as a Go source file or statement list.
+func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
+ parserMode := parser.Mode(0)
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+
+ // Try as whole source file.
+ file, err := parser.ParseFile(fset, filename, src, parserMode)
+ if err == nil {
+ return file, nil, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // package line and we accept fragmented input, fall through to
+ // try as a source fragment. Stop and return on any other error.
+ if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
+ return nil, nil, err
+ }
+
+ // If this is a declaration list, make it a source file
+ // by inserting a package clause.
+ // Insert using a ;, not a newline, so that parse errors are on
+ // the correct line.
+ const prefix = "package main;"
+ psrc := append([]byte(prefix), src...)
+ file, err = parser.ParseFile(fset, filename, psrc, parserMode)
+ if err == nil {
+ // Gofmt will turn the ; into a \n.
+ // Do that ourselves now and update the file contents,
+ // so that positions and line numbers are correct going forward.
+ psrc[len(prefix)-1] = '\n'
+ fset.File(file.Package).SetLinesForContent(psrc)
+
+ // If a main function exists, we will assume this is a main
+ // package and leave the file.
+ if containsMainFunc(file) {
+ return file, nil, nil
+ }
+
+ adjust := func(orig, src []byte) []byte {
+ // Remove the package clause.
+ src = src[len(prefix):]
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // declaration, fall through to try as a statement list.
+ // Stop and return on any other error.
+ if !strings.Contains(err.Error(), "expected declaration") {
+ return nil, nil, err
+ }
+
+ // If this is a statement list, make it a source file
+ // by inserting a package clause and turning the list
+ // into a function body. This handles expressions too.
+ // Insert using a ;, not a newline, so that the line numbers
+ // in fsrc match the ones in src.
+ fsrc := append(append([]byte("package p; func _() {"), src...), '}')
+ file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
+ if err == nil {
+ adjust := func(orig, src []byte) []byte {
+ // Remove the wrapping.
+ // Gofmt has turned the ; into a \n\n.
+ src = src[len("package p\n\nfunc _() {"):]
+ src = src[:len(src)-len("}\n")]
+ // Gofmt has also indented the function body one level.
+ // Remove that indent.
+ src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+
+ // Failed, and out of options.
+ return nil, nil, err
+}
+
+// containsMainFunc checks if a file contains a function declaration with the
+// function signature 'func main()'
+func containsMainFunc(file *ast.File) bool {
+ for _, decl := range file.Decls {
+ if f, ok := decl.(*ast.FuncDecl); ok {
+ if f.Name.Name != "main" {
+ continue
+ }
+
+ if len(f.Type.Params.List) != 0 {
+ continue
+ }
+
+ if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
+ continue
+ }
+
+ return true
+ }
+ }
+
+ return false
+}
+
+func cutSpace(b []byte) (before, middle, after []byte) {
+ i := 0
+ for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
+ i++
+ }
+ j := len(b)
+ for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
+ j--
+ }
+ if i <= j {
+ return b[:i], b[i:j], b[j:]
+ }
+ return nil, nil, b[j:]
+}
+
+// matchSpace reformats src to use the same space context as orig.
+// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src.
+// 2) matchSpace copies the indentation of the first non-blank line in orig
+// to every non-blank line in src.
+// 3) matchSpace copies the trailing space from orig and uses it in place
+// of src's trailing space.
+func matchSpace(orig []byte, src []byte) []byte {
+ before, _, after := cutSpace(orig)
+ i := bytes.LastIndex(before, []byte{'\n'})
+ before, indent := before[:i+1], before[i+1:]
+
+ _, src, _ = cutSpace(src)
+
+ var b bytes.Buffer
+ b.Write(before)
+ for len(src) > 0 {
+ line := src
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, src = line[:i+1], line[i+1:]
+ } else {
+ src = nil
+ }
+ if len(line) > 0 && line[0] != '\n' { // not blank
+ b.Write(indent)
+ }
+ b.Write(line)
+ }
+ b.Write(after)
+ return b.Bytes()
+}
+
+var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+?)"`)
+
+func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
+ var out bytes.Buffer
+ in := bufio.NewReader(r)
+ inImports := false
+ done := false
+ for {
+ s, err := in.ReadString('\n')
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ if !inImports && !done && strings.HasPrefix(s, "import") {
+ inImports = true
+ }
+ if inImports && (strings.HasPrefix(s, "var") ||
+ strings.HasPrefix(s, "func") ||
+ strings.HasPrefix(s, "const") ||
+ strings.HasPrefix(s, "type")) {
+ done = true
+ inImports = false
+ }
+ if inImports && len(breaks) > 0 {
+ if m := impLine.FindStringSubmatch(s); m != nil {
+ if m[1] == breaks[0] {
+ out.WriteByte('\n')
+ breaks = breaks[1:]
+ }
+ }
+ }
+
+ fmt.Fprint(&out, s)
+ }
+ return out.Bytes(), nil
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go
new file mode 100644
index 000000000..2bcf41f5f
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/mod.go
@@ -0,0 +1,698 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "golang.org/x/mod/module"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// ModuleResolver implements resolver for modules using the go command as little
+// as feasible.
+type ModuleResolver struct {
+ env *ProcessEnv
+ moduleCacheDir string
+ dummyVendorMod *gocommand.ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory.
+ roots []gopathwalk.Root
+ scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots.
+ scannedRoots map[gopathwalk.Root]bool
+
+ initialized bool
+ mains []*gocommand.ModuleJSON
+ mainByDir map[string]*gocommand.ModuleJSON
+ modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path...
+ modsByDir []*gocommand.ModuleJSON // ...or Dir.
+
+ // moduleCacheCache stores information about the module cache.
+ moduleCacheCache *dirInfoCache
+ otherCache *dirInfoCache
+}
+
+func newModuleResolver(e *ProcessEnv) *ModuleResolver {
+ r := &ModuleResolver{
+ env: e,
+ scanSema: make(chan struct{}, 1),
+ }
+ r.scanSema <- struct{}{}
+ return r
+}
+
+func (r *ModuleResolver) init() error {
+ if r.initialized {
+ return nil
+ }
+
+ goenv, err := r.env.goEnv()
+ if err != nil {
+ return err
+ }
+ inv := gocommand.Invocation{
+ BuildFlags: r.env.BuildFlags,
+ ModFlag: r.env.ModFlag,
+ ModFile: r.env.ModFile,
+ Env: r.env.env(),
+ Logf: r.env.Logf,
+ WorkingDir: r.env.WorkingDir,
+ }
+ vendorEnabled, mainModVendor, err := gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
+ if err != nil {
+ return err
+ }
+
+ if mainModVendor != nil && vendorEnabled {
+ // Vendor mode is on, so all the non-Main modules are irrelevant,
+ // and we need to search /vendor for everything.
+ r.mains = []*gocommand.ModuleJSON{mainModVendor}
+ r.dummyVendorMod = &gocommand.ModuleJSON{
+ Path: "",
+ Dir: filepath.Join(mainModVendor.Dir, "vendor"),
+ }
+ r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+ r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+ } else {
+ // Vendor mode is off, so run go list -m ... to find everything.
+ err := r.initAllMods()
+ // We expect an error when running outside of a module with
+ // GO111MODULE=on. Other errors are fatal.
+ if err != nil {
+ if errMsg := err.Error(); !strings.Contains(errMsg, "working directory is not part of a module") && !strings.Contains(errMsg, "go.mod file not found") {
+ return err
+ }
+ }
+ }
+
+ if gmc := r.env.Env["GOMODCACHE"]; gmc != "" {
+ r.moduleCacheDir = gmc
+ } else {
+ gopaths := filepath.SplitList(goenv["GOPATH"])
+ if len(gopaths) == 0 {
+ return fmt.Errorf("empty GOPATH")
+ }
+ r.moduleCacheDir = filepath.Join(gopaths[0], "/pkg/mod")
+ }
+
+ sort.Slice(r.modsByModPath, func(i, j int) bool {
+ count := func(x int) int {
+ return strings.Count(r.modsByModPath[x].Path, "/")
+ }
+ return count(j) < count(i) // descending order
+ })
+ sort.Slice(r.modsByDir, func(i, j int) bool {
+ count := func(x int) int {
+ return strings.Count(r.modsByDir[x].Dir, "/")
+ }
+ return count(j) < count(i) // descending order
+ })
+
+ r.roots = []gopathwalk.Root{
+ {filepath.Join(goenv["GOROOT"], "/src"), gopathwalk.RootGOROOT},
+ }
+ r.mainByDir = make(map[string]*gocommand.ModuleJSON)
+ for _, main := range r.mains {
+ r.roots = append(r.roots, gopathwalk.Root{main.Dir, gopathwalk.RootCurrentModule})
+ r.mainByDir[main.Dir] = main
+ }
+ if vendorEnabled {
+ r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
+ } else {
+ addDep := func(mod *gocommand.ModuleJSON) {
+ if mod.Replace == nil {
+ // This is redundant with the cache, but we'll skip it cheaply enough.
+ r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootModuleCache})
+ } else {
+ r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
+ }
+ }
+ // Walk dependent modules before scanning the full mod cache, direct deps first.
+ for _, mod := range r.modsByModPath {
+ if !mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ for _, mod := range r.modsByModPath {
+ if mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ r.roots = append(r.roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
+ }
+
+ r.scannedRoots = map[gopathwalk.Root]bool{}
+ if r.moduleCacheCache == nil {
+ r.moduleCacheCache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ }
+ if r.otherCache == nil {
+ r.otherCache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ }
+ r.initialized = true
+ return nil
+}
+
+func (r *ModuleResolver) initAllMods() error {
+ stdout, err := r.env.invokeGo(context.TODO(), "list", "-m", "-e", "-json", "...")
+ if err != nil {
+ return err
+ }
+ for dec := json.NewDecoder(stdout); dec.More(); {
+ mod := &gocommand.ModuleJSON{}
+ if err := dec.Decode(mod); err != nil {
+ return err
+ }
+ if mod.Dir == "" {
+ if r.env.Logf != nil {
+ r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path)
+ }
+ // Can't do anything with a module that's not downloaded.
+ continue
+ }
+ // golang/go#36193: the go command doesn't always clean paths.
+ mod.Dir = filepath.Clean(mod.Dir)
+ r.modsByModPath = append(r.modsByModPath, mod)
+ r.modsByDir = append(r.modsByDir, mod)
+ if mod.Main {
+ r.mains = append(r.mains, mod)
+ }
+ }
+ return nil
+}
+
+func (r *ModuleResolver) ClearForNewScan() {
+ <-r.scanSema
+ r.scannedRoots = map[gopathwalk.Root]bool{}
+ r.otherCache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ r.scanSema <- struct{}{}
+}
+
+func (r *ModuleResolver) ClearForNewMod() {
+ <-r.scanSema
+ *r = ModuleResolver{
+ env: r.env,
+ moduleCacheCache: r.moduleCacheCache,
+ otherCache: r.otherCache,
+ scanSema: r.scanSema,
+ }
+ r.init()
+ r.scanSema <- struct{}{}
+}
+
+// findPackage returns the module and directory that contains the package at
+// the given import path, or returns nil, "" if no module is in scope.
+func (r *ModuleResolver) findPackage(importPath string) (*gocommand.ModuleJSON, string) {
+ // This can't find packages in the stdlib, but that's harmless for all
+ // the existing code paths.
+ for _, m := range r.modsByModPath {
+ if !strings.HasPrefix(importPath, m.Path) {
+ continue
+ }
+ pathInModule := importPath[len(m.Path):]
+ pkgDir := filepath.Join(m.Dir, pathInModule)
+ if r.dirIsNestedModule(pkgDir, m) {
+ continue
+ }
+
+ if info, ok := r.cacheLoad(pkgDir); ok {
+ if loaded, err := info.reachedStatus(nameLoaded); loaded {
+ if err != nil {
+ continue // No package in this dir.
+ }
+ return m, pkgDir
+ }
+ if scanned, err := info.reachedStatus(directoryScanned); scanned && err != nil {
+ continue // Dir is unreadable, etc.
+ }
+ // This is slightly wrong: a directory doesn't have to have an
+ // importable package to count as a package for package-to-module
+ // resolution. package main or _test files should count but
+ // don't.
+ // TODO(heschi): fix this.
+ if _, err := r.cachePackageName(info); err == nil {
+ return m, pkgDir
+ }
+ }
+
+ // Not cached. Read the filesystem.
+ pkgFiles, err := ioutil.ReadDir(pkgDir)
+ if err != nil {
+ continue
+ }
+ // A module only contains a package if it has buildable go
+ // files in that directory. If not, it could be provided by an
+ // outer module. See #29736.
+ for _, fi := range pkgFiles {
+ if ok, _ := r.env.matchFile(pkgDir, fi.Name()); ok {
+ return m, pkgDir
+ }
+ }
+ }
+ return nil, ""
+}
+
+func (r *ModuleResolver) cacheLoad(dir string) (directoryPackageInfo, bool) {
+ if info, ok := r.moduleCacheCache.Load(dir); ok {
+ return info, ok
+ }
+ return r.otherCache.Load(dir)
+}
+
+func (r *ModuleResolver) cacheStore(info directoryPackageInfo) {
+ if info.rootType == gopathwalk.RootModuleCache {
+ r.moduleCacheCache.Store(info.dir, info)
+ } else {
+ r.otherCache.Store(info.dir, info)
+ }
+}
+
+func (r *ModuleResolver) cacheKeys() []string {
+ return append(r.moduleCacheCache.Keys(), r.otherCache.Keys()...)
+}
+
+// cachePackageName caches the package name for a dir already in the cache.
+func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
+ if info.rootType == gopathwalk.RootModuleCache {
+ return r.moduleCacheCache.CachePackageName(info)
+ }
+ return r.otherCache.CachePackageName(info)
+}
+
+func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
+ if info.rootType == gopathwalk.RootModuleCache {
+ return r.moduleCacheCache.CacheExports(ctx, env, info)
+ }
+ return r.otherCache.CacheExports(ctx, env, info)
+}
+
+// findModuleByDir returns the module that contains dir, or nil if no such
+// module is in scope.
+func (r *ModuleResolver) findModuleByDir(dir string) *gocommand.ModuleJSON {
+ // This is quite tricky and may not be correct. dir could be:
+ // - a package in the main module.
+ // - a replace target underneath the main module's directory.
+ // - a nested module in the above.
+ // - a replace target somewhere totally random.
+ // - a nested module in the above.
+ // - in the mod cache.
+ // - in /vendor/ in -mod=vendor mode.
+ // - nested module? Dunno.
+ // Rumor has it that replace targets cannot contain other replace targets.
+ for _, m := range r.modsByDir {
+ if !strings.HasPrefix(dir, m.Dir) {
+ continue
+ }
+
+ if r.dirIsNestedModule(dir, m) {
+ continue
+ }
+
+ return m
+ }
+ return nil
+}
+
+// dirIsNestedModule reports if dir is contained in a nested module underneath
+// mod, not actually in mod.
+func (r *ModuleResolver) dirIsNestedModule(dir string, mod *gocommand.ModuleJSON) bool {
+ if !strings.HasPrefix(dir, mod.Dir) {
+ return false
+ }
+ if r.dirInModuleCache(dir) {
+ // Nested modules in the module cache are pruned,
+ // so it cannot be a nested module.
+ return false
+ }
+ if mod != nil && mod == r.dummyVendorMod {
+ // The /vendor pseudomodule is flattened and doesn't actually count.
+ return false
+ }
+ modDir, _ := r.modInfo(dir)
+ if modDir == "" {
+ return false
+ }
+ return modDir != mod.Dir
+}
+
+func (r *ModuleResolver) modInfo(dir string) (modDir string, modName string) {
+ readModName := func(modFile string) string {
+ modBytes, err := ioutil.ReadFile(modFile)
+ if err != nil {
+ return ""
+ }
+ return modulePath(modBytes)
+ }
+
+ if r.dirInModuleCache(dir) {
+ if matches := modCacheRegexp.FindStringSubmatch(dir); len(matches) == 3 {
+ index := strings.Index(dir, matches[1]+"@"+matches[2])
+ modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2])
+ return modDir, readModName(filepath.Join(modDir, "go.mod"))
+ }
+ }
+ for {
+ if info, ok := r.cacheLoad(dir); ok {
+ return info.moduleDir, info.moduleName
+ }
+ f := filepath.Join(dir, "go.mod")
+ info, err := os.Stat(f)
+ if err == nil && !info.IsDir() {
+ return dir, readModName(f)
+ }
+
+ d := filepath.Dir(dir)
+ if len(d) >= len(dir) {
+ return "", "" // reached top of file system, no go.mod
+ }
+ dir = d
+ }
+}
+
+func (r *ModuleResolver) dirInModuleCache(dir string) bool {
+ if r.moduleCacheDir == "" {
+ return false
+ }
+ return strings.HasPrefix(dir, r.moduleCacheDir)
+}
+
+func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ if err := r.init(); err != nil {
+ return nil, err
+ }
+ names := map[string]string{}
+ for _, path := range importPaths {
+ _, packageDir := r.findPackage(path)
+ if packageDir == "" {
+ continue
+ }
+ name, err := packageDirToName(packageDir)
+ if err != nil {
+ continue
+ }
+ names[path] = name
+ }
+ return names, nil
+}
+
+func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
+ if err := r.init(); err != nil {
+ return err
+ }
+
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
+ }
+ pkg, err := r.canonicalize(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.dirFound(pkg) {
+ return
+ }
+ pkg.packageName, err = r.cachePackageName(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.packageNameLoaded(pkg) {
+ return
+ }
+ _, exports, err := r.loadExports(ctx, pkg, false)
+ if err != nil {
+ return
+ }
+ callback.exportsLoaded(pkg, exports)
+ }
+
+ // Start processing everything in the cache, and listen for the new stuff
+ // we discover in the walk below.
+ stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
+ defer stop1()
+ stop2 := r.otherCache.ScanAndListen(ctx, processDir)
+ defer stop2()
+
+ // We assume cached directories are fully cached, including all their
+ // children, and have not changed. We can skip them.
+ skip := func(root gopathwalk.Root, dir string) bool {
+ info, ok := r.cacheLoad(dir)
+ if !ok {
+ return false
+ }
+ // This directory can be skipped as long as we have already scanned it.
+ // Packages with errors will continue to have errors, so there is no need
+ // to rescan them.
+ packageScanned, _ := info.reachedStatus(directoryScanned)
+ return packageScanned
+ }
+
+ // Add anything new to the cache, and process it if we're still listening.
+ add := func(root gopathwalk.Root, dir string) {
+ r.cacheStore(r.scanDirForPackage(root, dir))
+ }
+
+ // r.roots and the callback are not necessarily safe to use in the
+ // goroutine below. Process them eagerly.
+ roots := filterRoots(r.roots, callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ // We have the lock on r.scannedRoots, and no other scans can run.
+ for _, root := range roots {
+ if ctx.Err() != nil {
+ return
+ }
+
+ if r.scannedRoots[root] {
+ continue
+ }
+ gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: true})
+ r.scannedRoots[root] = true
+ }
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
+
+func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
+ }
+ mod, _ := r.findPackage(path)
+ return modRelevance(mod)
+}
+
+func modRelevance(mod *gocommand.ModuleJSON) float64 {
+ var relevance float64
+ switch {
+ case mod == nil: // out of scope
+ return MaxRelevance - 4
+ case mod.Indirect:
+ relevance = MaxRelevance - 3
+ case !mod.Main:
+ relevance = MaxRelevance - 2
+ default:
+ relevance = MaxRelevance - 1 // main module ties with stdlib
+ }
+
+ _, versionString, ok := module.SplitPathVersion(mod.Path)
+ if ok {
+ index := strings.Index(versionString, "v")
+ if index == -1 {
+ return relevance
+ }
+ if versionNumber, err := strconv.ParseFloat(versionString[index+1:], 64); err == nil {
+ relevance += versionNumber / 1000
+ }
+ }
+
+ return relevance
+}
+
+// canonicalize gets the result of canonicalizing the packages using the results
+// of initializing the resolver from 'go list -m'.
+func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
+ // Packages in GOROOT are already canonical, regardless of the std/cmd modules.
+ if info.rootType == gopathwalk.RootGOROOT {
+ return &pkg{
+ importPathShort: info.nonCanonicalImportPath,
+ dir: info.dir,
+ packageName: path.Base(info.nonCanonicalImportPath),
+ relevance: MaxRelevance,
+ }, nil
+ }
+
+ importPath := info.nonCanonicalImportPath
+ mod := r.findModuleByDir(info.dir)
+ // Check if the directory is underneath a module that's in scope.
+ if mod != nil {
+ // It is. If dir is the target of a replace directive,
+ // our guessed import path is wrong. Use the real one.
+ if mod.Dir == info.dir {
+ importPath = mod.Path
+ } else {
+ dirInMod := info.dir[len(mod.Dir)+len("/"):]
+ importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
+ }
+ } else if !strings.HasPrefix(importPath, info.moduleName) {
+ // The module's name doesn't match the package's import path. It
+ // probably needs a replace directive we don't have.
+ return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
+ }
+
+ res := &pkg{
+ importPathShort: importPath,
+ dir: info.dir,
+ relevance: modRelevance(mod),
+ }
+ // We may have discovered a package that has a different version
+ // in scope already. Canonicalize to that one if possible.
+ if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
+ res.dir = canonicalDir
+ }
+ return res, nil
+}
+
+func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+ if err := r.init(); err != nil {
+ return "", nil, err
+ }
+ if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
+ return r.cacheExports(ctx, r.env, info)
+ }
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
+}
+
+func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
+ subdir := ""
+ if dir != root.Path {
+ subdir = dir[len(root.Path)+len("/"):]
+ }
+ importPath := filepath.ToSlash(subdir)
+ if strings.HasPrefix(importPath, "vendor/") {
+ // Only enter vendor directories if they're explicitly requested as a root.
+ return directoryPackageInfo{
+ status: directoryScanned,
+ err: fmt.Errorf("unwanted vendor directory"),
+ }
+ }
+ switch root.Type {
+ case gopathwalk.RootCurrentModule:
+ importPath = path.Join(r.mainByDir[root.Path].Path, filepath.ToSlash(subdir))
+ case gopathwalk.RootModuleCache:
+ matches := modCacheRegexp.FindStringSubmatch(subdir)
+ if len(matches) == 0 {
+ return directoryPackageInfo{
+ status: directoryScanned,
+ err: fmt.Errorf("invalid module cache path: %v", subdir),
+ }
+ }
+ modPath, err := module.UnescapePath(filepath.ToSlash(matches[1]))
+ if err != nil {
+ if r.env.Logf != nil {
+ r.env.Logf("decoding module cache path %q: %v", subdir, err)
+ }
+ return directoryPackageInfo{
+ status: directoryScanned,
+ err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
+ }
+ }
+ importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
+ }
+
+ modDir, modName := r.modInfo(dir)
+ result := directoryPackageInfo{
+ status: directoryScanned,
+ dir: dir,
+ rootType: root.Type,
+ nonCanonicalImportPath: importPath,
+ moduleDir: modDir,
+ moduleName: modName,
+ }
+ if root.Type == gopathwalk.RootGOROOT {
+ // stdlib packages are always in scope, despite the confusing go.mod
+ return result
+ }
+ return result
+}
+
+// modCacheRegexp splits a path in a module cache into module, module version, and package.
+var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
+
+var (
+ slashSlash = []byte("//")
+ moduleStr = []byte("module")
+)
+
+// modulePath returns the module path from the gomod file text.
+// If it cannot find a module path, it returns an empty string.
+// It is tolerant of unrelated problems in the go.mod file.
+//
+// Copied from cmd/go/internal/modfile.
+func modulePath(mod []byte) string {
+ for len(mod) > 0 {
+ line := mod
+ mod = nil
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, mod = line[:i], line[i+1:]
+ }
+ if i := bytes.Index(line, slashSlash); i >= 0 {
+ line = line[:i]
+ }
+ line = bytes.TrimSpace(line)
+ if !bytes.HasPrefix(line, moduleStr) {
+ continue
+ }
+ line = line[len(moduleStr):]
+ n := len(line)
+ line = bytes.TrimSpace(line)
+ if len(line) == n || len(line) == 0 {
+ continue
+ }
+
+ if line[0] == '"' || line[0] == '`' {
+ p, err := strconv.Unquote(string(line))
+ if err != nil {
+ return "" // malformed quoted string or multiline module path
+ }
+ return p
+ }
+
+ return string(line)
+ }
+ return "" // missing module path
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
new file mode 100644
index 000000000..18dada495
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
@@ -0,0 +1,236 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "context"
+ "fmt"
+ "sync"
+
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// To find packages to import, the resolver needs to know about all of the
+// the packages that could be imported. This includes packages that are
+// already in modules that are in (1) the current module, (2) replace targets,
+// and (3) packages in the module cache. Packages in (1) and (2) may change over
+// time, as the client may edit the current module and locally replaced modules.
+// The module cache (which includes all of the packages in (3)) can only
+// ever be added to.
+//
+// The resolver can thus save state about packages in the module cache
+// and guarantee that this will not change over time. To obtain information
+// about new modules added to the module cache, the module cache should be
+// rescanned.
+//
+// It is OK to serve information about modules that have been deleted,
+// as they do still exist.
+// TODO(suzmue): can we share information with the caller about
+// what module needs to be downloaded to import this package?
+
+type directoryPackageStatus int
+
+const (
+ _ directoryPackageStatus = iota
+ directoryScanned
+ nameLoaded
+ exportsLoaded
+)
+
+type directoryPackageInfo struct {
+ // status indicates the extent to which this struct has been filled in.
+ status directoryPackageStatus
+ // err is non-nil when there was an error trying to reach status.
+ err error
+
+ // Set when status >= directoryScanned.
+
+ // dir is the absolute directory of this package.
+ dir string
+ rootType gopathwalk.RootType
+ // nonCanonicalImportPath is the package's expected import path. It may
+ // not actually be importable at that path.
+ nonCanonicalImportPath string
+
+ // Module-related information.
+ moduleDir string // The directory that is the module root of this dir.
+ moduleName string // The module name that contains this dir.
+
+ // Set when status >= nameLoaded.
+
+ packageName string // the package name, as declared in the source.
+
+ // Set when status >= exportsLoaded.
+
+ exports []string
+}
+
+// reachedStatus returns true when info has a status at least target and any error associated with
+// an attempt to reach target.
+func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (bool, error) {
+ if info.err == nil {
+ return info.status >= target, nil
+ }
+ if info.status == target {
+ return true, info.err
+ }
+ return true, nil
+}
+
+// dirInfoCache is a concurrency safe map for storing information about
+// directories that may contain packages.
+//
+// The information in this cache is built incrementally. Entries are initialized in scan.
+// No new keys should be added in any other functions, as all directories containing
+// packages are identified in scan.
+//
+// Other functions, including loadExports and findPackage, may update entries in this cache
+// as they discover new things about the directory.
+//
+// The information in the cache is not expected to change for the cache's
+// lifetime, so there is no protection against competing writes. Users should
+// take care not to hold the cache across changes to the underlying files.
+//
+// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc)
+type dirInfoCache struct {
+ mu sync.Mutex
+ // dirs stores information about packages in directories, keyed by absolute path.
+ dirs map[string]*directoryPackageInfo
+ listeners map[*int]cacheListener
+}
+
+type cacheListener func(directoryPackageInfo)
+
+// ScanAndListen calls listener on all the items in the cache, and on anything
+// newly added. The returned stop function waits for all in-flight callbacks to
+// finish and blocks new ones.
+func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
+ ctx, cancel := context.WithCancel(ctx)
+
+ // Flushing out all the callbacks is tricky without knowing how many there
+ // are going to be. Setting an arbitrary limit makes it much easier.
+ const maxInFlight = 10
+ sema := make(chan struct{}, maxInFlight)
+ for i := 0; i < maxInFlight; i++ {
+ sema <- struct{}{}
+ }
+
+ cookie := new(int) // A unique ID we can use for the listener.
+
+ // We can't hold mu while calling the listener.
+ d.mu.Lock()
+ var keys []string
+ for key := range d.dirs {
+ keys = append(keys, key)
+ }
+ d.listeners[cookie] = func(info directoryPackageInfo) {
+ select {
+ case <-ctx.Done():
+ return
+ case <-sema:
+ }
+ listener(info)
+ sema <- struct{}{}
+ }
+ d.mu.Unlock()
+
+ stop := func() {
+ cancel()
+ d.mu.Lock()
+ delete(d.listeners, cookie)
+ d.mu.Unlock()
+ for i := 0; i < maxInFlight; i++ {
+ <-sema
+ }
+ }
+
+ // Process the pre-existing keys.
+ for _, k := range keys {
+ select {
+ case <-ctx.Done():
+ return stop
+ default:
+ }
+ if v, ok := d.Load(k); ok {
+ listener(v)
+ }
+ }
+
+ return stop
+}
+
+// Store stores the package info for dir.
+func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) {
+ d.mu.Lock()
+ _, old := d.dirs[dir]
+ d.dirs[dir] = &info
+ var listeners []cacheListener
+ for _, l := range d.listeners {
+ listeners = append(listeners, l)
+ }
+ d.mu.Unlock()
+
+ if !old {
+ for _, l := range listeners {
+ l(info)
+ }
+ }
+}
+
+// Load returns a copy of the directoryPackageInfo for absolute directory dir.
+func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+ info, ok := d.dirs[dir]
+ if !ok {
+ return directoryPackageInfo{}, false
+ }
+ return *info, true
+}
+
+// Keys returns the keys currently present in d.
+func (d *dirInfoCache) Keys() (keys []string) {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+ for key := range d.dirs {
+ keys = append(keys, key)
+ }
+ return keys
+}
+
+func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
+ if loaded, err := info.reachedStatus(nameLoaded); loaded {
+ return info.packageName, err
+ }
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return "", fmt.Errorf("cannot read package name, scan error: %v", err)
+ }
+ info.packageName, info.err = packageDirToName(info.dir)
+ info.status = nameLoaded
+ d.Store(info.dir, info)
+ return info.packageName, info.err
+}
+
+func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
+ if reached, _ := info.reachedStatus(exportsLoaded); reached {
+ return info.packageName, info.exports, info.err
+ }
+ if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
+ return "", nil, err
+ }
+ info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
+ if info.err == context.Canceled || info.err == context.DeadlineExceeded {
+ return info.packageName, info.exports, info.err
+ }
+ // The cache structure wants things to proceed linearly. We can skip a
+ // step here, but only if we succeed.
+ if info.status == nameLoaded || info.err == nil {
+ info.status = exportsLoaded
+ } else {
+ info.status = nameLoaded
+ }
+ d.Store(info.dir, info)
+ return info.packageName, info.exports, info.err
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go
new file mode 100644
index 000000000..dc52372e4
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go
@@ -0,0 +1,291 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Hacked up copy of go/ast/import.go
+
+package imports
+
+import (
+ "go/ast"
+ "go/token"
+ "log"
+ "sort"
+ "strconv"
+)
+
+// sortImports sorts runs of consecutive import lines in import blocks in f.
+// It also removes duplicate imports when it is possible to do so without data loss.
+func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) {
+ for i, d := range f.Decls {
+ d, ok := d.(*ast.GenDecl)
+ if !ok || d.Tok != token.IMPORT {
+ // Not an import declaration, so we're done.
+ // Imports are always first.
+ break
+ }
+
+ if len(d.Specs) == 0 {
+ // Empty import block, remove it.
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ }
+
+ if !d.Lparen.IsValid() {
+ // Not a block: sorted by default.
+ continue
+ }
+
+ // Identify and sort runs of specs on successive lines.
+ i := 0
+ specs := d.Specs[:0]
+ for j, s := range d.Specs {
+ if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
+ // j begins a new run. End this one.
+ specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:j])...)
+ i = j
+ }
+ }
+ specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:])...)
+ d.Specs = specs
+
+ // Deduping can leave a blank line before the rparen; clean that up.
+ if len(d.Specs) > 0 {
+ lastSpec := d.Specs[len(d.Specs)-1]
+ lastLine := fset.Position(lastSpec.Pos()).Line
+ if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 {
+ fset.File(d.Rparen).MergeLine(rParenLine - 1)
+ }
+ }
+ }
+}
+
+// mergeImports merges all the import declarations into the first one.
+// Taken from golang.org/x/tools/ast/astutil.
+// This does not adjust line numbers properly
+func mergeImports(fset *token.FileSet, f *ast.File) {
+ if len(f.Decls) <= 1 {
+ return
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+}
+
+// declImports reports whether gen contains an import of path.
+// Taken from golang.org/x/tools/ast/astutil.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+func importPath(s ast.Spec) string {
+ t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
+ if err == nil {
+ return t
+ }
+ return ""
+}
+
+func importName(s ast.Spec) string {
+ n := s.(*ast.ImportSpec).Name
+ if n == nil {
+ return ""
+ }
+ return n.Name
+}
+
+func importComment(s ast.Spec) string {
+ c := s.(*ast.ImportSpec).Comment
+ if c == nil {
+ return ""
+ }
+ return c.Text()
+}
+
+// collapse indicates whether prev may be removed, leaving only next.
+func collapse(prev, next ast.Spec) bool {
+ if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
+ return false
+ }
+ return prev.(*ast.ImportSpec).Comment == nil
+}
+
+type posSpan struct {
+ Start token.Pos
+ End token.Pos
+}
+
+func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
+ // Can't short-circuit here even if specs are already sorted,
+ // since they might yet need deduplication.
+ // A lone import, however, may be safely ignored.
+ if len(specs) <= 1 {
+ return specs
+ }
+
+ // Record positions for specs.
+ pos := make([]posSpan, len(specs))
+ for i, s := range specs {
+ pos[i] = posSpan{s.Pos(), s.End()}
+ }
+
+ // Identify comments in this range.
+ // Any comment from pos[0].Start to the final line counts.
+ lastLine := fset.Position(pos[len(pos)-1].End).Line
+ cstart := len(f.Comments)
+ cend := len(f.Comments)
+ for i, g := range f.Comments {
+ if g.Pos() < pos[0].Start {
+ continue
+ }
+ if i < cstart {
+ cstart = i
+ }
+ if fset.Position(g.End()).Line > lastLine {
+ cend = i
+ break
+ }
+ }
+ comments := f.Comments[cstart:cend]
+
+ // Assign each comment to the import spec preceding it.
+ importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
+ specIndex := 0
+ for _, g := range comments {
+ for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
+ specIndex++
+ }
+ s := specs[specIndex].(*ast.ImportSpec)
+ importComment[s] = append(importComment[s], g)
+ }
+
+ // Sort the import specs by import path.
+ // Remove duplicates, when possible without data loss.
+ // Reassign the import paths to have the same position sequence.
+ // Reassign each comment to abut the end of its spec.
+ // Sort the comments by new position.
+ sort.Sort(byImportSpec{localPrefix, specs})
+
+ // Dedup. Thanks to our sorting, we can just consider
+ // adjacent pairs of imports.
+ deduped := specs[:0]
+ for i, s := range specs {
+ if i == len(specs)-1 || !collapse(s, specs[i+1]) {
+ deduped = append(deduped, s)
+ } else {
+ p := s.Pos()
+ fset.File(p).MergeLine(fset.Position(p).Line)
+ }
+ }
+ specs = deduped
+
+ // Fix up comment positions
+ for i, s := range specs {
+ s := s.(*ast.ImportSpec)
+ if s.Name != nil {
+ s.Name.NamePos = pos[i].Start
+ }
+ s.Path.ValuePos = pos[i].Start
+ s.EndPos = pos[i].End
+ nextSpecPos := pos[i].End
+
+ for _, g := range importComment[s] {
+ for _, c := range g.List {
+ c.Slash = pos[i].End
+ nextSpecPos = c.End()
+ }
+ }
+ if i < len(specs)-1 {
+ pos[i+1].Start = nextSpecPos
+ pos[i+1].End = nextSpecPos
+ }
+ }
+
+ sort.Sort(byCommentPos(comments))
+
+ // Fixup comments can insert blank lines, because import specs are on different lines.
+ // We remove those blank lines here by merging import spec to the first import spec line.
+ firstSpecLine := fset.Position(specs[0].Pos()).Line
+ for _, s := range specs[1:] {
+ p := s.Pos()
+ line := fset.File(p).Line(p)
+ for previousLine := line - 1; previousLine >= firstSpecLine; {
+ // MergeLine can panic. Avoid the panic at the cost of not removing the blank line
+ // golang/go#50329
+ if previousLine > 0 && previousLine < fset.File(p).LineCount() {
+ fset.File(p).MergeLine(previousLine)
+ previousLine--
+ } else {
+ // try to gather some data to diagnose how this could happen
+ req := "Please report what the imports section of your go file looked like."
+ log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s",
+ firstSpecLine, line, previousLine, fset.File(p).LineCount(), req)
+ }
+ }
+ }
+ return specs
+}
+
+type byImportSpec struct {
+ localPrefix string
+ specs []ast.Spec // slice of *ast.ImportSpec
+}
+
+func (x byImportSpec) Len() int { return len(x.specs) }
+func (x byImportSpec) Swap(i, j int) { x.specs[i], x.specs[j] = x.specs[j], x.specs[i] }
+func (x byImportSpec) Less(i, j int) bool {
+ ipath := importPath(x.specs[i])
+ jpath := importPath(x.specs[j])
+
+ igroup := importGroup(x.localPrefix, ipath)
+ jgroup := importGroup(x.localPrefix, jpath)
+ if igroup != jgroup {
+ return igroup < jgroup
+ }
+
+ if ipath != jpath {
+ return ipath < jpath
+ }
+ iname := importName(x.specs[i])
+ jname := importName(x.specs[j])
+
+ if iname != jname {
+ return iname < jname
+ }
+ return importComment(x.specs[i]) < importComment(x.specs[j])
+}
+
+type byCommentPos []*ast.CommentGroup
+
+func (x byCommentPos) Len() int { return len(x) }
+func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }
diff --git a/vendor/golang.org/x/tools/internal/imports/zstdlib.go b/vendor/golang.org/x/tools/internal/imports/zstdlib.go
new file mode 100644
index 000000000..7de2be9b4
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/zstdlib.go
@@ -0,0 +1,10756 @@
+// Code generated by mkstdlib.go. DO NOT EDIT.
+
+package imports
+
+var stdlib = map[string][]string{
+ "archive/tar": []string{
+ "ErrFieldTooLong",
+ "ErrHeader",
+ "ErrWriteAfterClose",
+ "ErrWriteTooLong",
+ "FileInfoHeader",
+ "Format",
+ "FormatGNU",
+ "FormatPAX",
+ "FormatUSTAR",
+ "FormatUnknown",
+ "Header",
+ "NewReader",
+ "NewWriter",
+ "Reader",
+ "TypeBlock",
+ "TypeChar",
+ "TypeCont",
+ "TypeDir",
+ "TypeFifo",
+ "TypeGNULongLink",
+ "TypeGNULongName",
+ "TypeGNUSparse",
+ "TypeLink",
+ "TypeReg",
+ "TypeRegA",
+ "TypeSymlink",
+ "TypeXGlobalHeader",
+ "TypeXHeader",
+ "Writer",
+ },
+ "archive/zip": []string{
+ "Compressor",
+ "Decompressor",
+ "Deflate",
+ "ErrAlgorithm",
+ "ErrChecksum",
+ "ErrFormat",
+ "File",
+ "FileHeader",
+ "FileInfoHeader",
+ "NewReader",
+ "NewWriter",
+ "OpenReader",
+ "ReadCloser",
+ "Reader",
+ "RegisterCompressor",
+ "RegisterDecompressor",
+ "Store",
+ "Writer",
+ },
+ "bufio": []string{
+ "ErrAdvanceTooFar",
+ "ErrBadReadCount",
+ "ErrBufferFull",
+ "ErrFinalToken",
+ "ErrInvalidUnreadByte",
+ "ErrInvalidUnreadRune",
+ "ErrNegativeAdvance",
+ "ErrNegativeCount",
+ "ErrTooLong",
+ "MaxScanTokenSize",
+ "NewReadWriter",
+ "NewReader",
+ "NewReaderSize",
+ "NewScanner",
+ "NewWriter",
+ "NewWriterSize",
+ "ReadWriter",
+ "Reader",
+ "ScanBytes",
+ "ScanLines",
+ "ScanRunes",
+ "ScanWords",
+ "Scanner",
+ "SplitFunc",
+ "Writer",
+ },
+ "bytes": []string{
+ "Buffer",
+ "Compare",
+ "Contains",
+ "ContainsAny",
+ "ContainsRune",
+ "Count",
+ "Equal",
+ "EqualFold",
+ "ErrTooLarge",
+ "Fields",
+ "FieldsFunc",
+ "HasPrefix",
+ "HasSuffix",
+ "Index",
+ "IndexAny",
+ "IndexByte",
+ "IndexFunc",
+ "IndexRune",
+ "Join",
+ "LastIndex",
+ "LastIndexAny",
+ "LastIndexByte",
+ "LastIndexFunc",
+ "Map",
+ "MinRead",
+ "NewBuffer",
+ "NewBufferString",
+ "NewReader",
+ "Reader",
+ "Repeat",
+ "Replace",
+ "ReplaceAll",
+ "Runes",
+ "Split",
+ "SplitAfter",
+ "SplitAfterN",
+ "SplitN",
+ "Title",
+ "ToLower",
+ "ToLowerSpecial",
+ "ToTitle",
+ "ToTitleSpecial",
+ "ToUpper",
+ "ToUpperSpecial",
+ "ToValidUTF8",
+ "Trim",
+ "TrimFunc",
+ "TrimLeft",
+ "TrimLeftFunc",
+ "TrimPrefix",
+ "TrimRight",
+ "TrimRightFunc",
+ "TrimSpace",
+ "TrimSuffix",
+ },
+ "compress/bzip2": []string{
+ "NewReader",
+ "StructuralError",
+ },
+ "compress/flate": []string{
+ "BestCompression",
+ "BestSpeed",
+ "CorruptInputError",
+ "DefaultCompression",
+ "HuffmanOnly",
+ "InternalError",
+ "NewReader",
+ "NewReaderDict",
+ "NewWriter",
+ "NewWriterDict",
+ "NoCompression",
+ "ReadError",
+ "Reader",
+ "Resetter",
+ "WriteError",
+ "Writer",
+ },
+ "compress/gzip": []string{
+ "BestCompression",
+ "BestSpeed",
+ "DefaultCompression",
+ "ErrChecksum",
+ "ErrHeader",
+ "Header",
+ "HuffmanOnly",
+ "NewReader",
+ "NewWriter",
+ "NewWriterLevel",
+ "NoCompression",
+ "Reader",
+ "Writer",
+ },
+ "compress/lzw": []string{
+ "LSB",
+ "MSB",
+ "NewReader",
+ "NewWriter",
+ "Order",
+ "Reader",
+ "Writer",
+ },
+ "compress/zlib": []string{
+ "BestCompression",
+ "BestSpeed",
+ "DefaultCompression",
+ "ErrChecksum",
+ "ErrDictionary",
+ "ErrHeader",
+ "HuffmanOnly",
+ "NewReader",
+ "NewReaderDict",
+ "NewWriter",
+ "NewWriterLevel",
+ "NewWriterLevelDict",
+ "NoCompression",
+ "Resetter",
+ "Writer",
+ },
+ "container/heap": []string{
+ "Fix",
+ "Init",
+ "Interface",
+ "Pop",
+ "Push",
+ "Remove",
+ },
+ "container/list": []string{
+ "Element",
+ "List",
+ "New",
+ },
+ "container/ring": []string{
+ "New",
+ "Ring",
+ },
+ "context": []string{
+ "Background",
+ "CancelFunc",
+ "Canceled",
+ "Context",
+ "DeadlineExceeded",
+ "TODO",
+ "WithCancel",
+ "WithDeadline",
+ "WithTimeout",
+ "WithValue",
+ },
+ "crypto": []string{
+ "BLAKE2b_256",
+ "BLAKE2b_384",
+ "BLAKE2b_512",
+ "BLAKE2s_256",
+ "Decrypter",
+ "DecrypterOpts",
+ "Hash",
+ "MD4",
+ "MD5",
+ "MD5SHA1",
+ "PrivateKey",
+ "PublicKey",
+ "RIPEMD160",
+ "RegisterHash",
+ "SHA1",
+ "SHA224",
+ "SHA256",
+ "SHA384",
+ "SHA3_224",
+ "SHA3_256",
+ "SHA3_384",
+ "SHA3_512",
+ "SHA512",
+ "SHA512_224",
+ "SHA512_256",
+ "Signer",
+ "SignerOpts",
+ },
+ "crypto/aes": []string{
+ "BlockSize",
+ "KeySizeError",
+ "NewCipher",
+ },
+ "crypto/cipher": []string{
+ "AEAD",
+ "Block",
+ "BlockMode",
+ "NewCBCDecrypter",
+ "NewCBCEncrypter",
+ "NewCFBDecrypter",
+ "NewCFBEncrypter",
+ "NewCTR",
+ "NewGCM",
+ "NewGCMWithNonceSize",
+ "NewGCMWithTagSize",
+ "NewOFB",
+ "Stream",
+ "StreamReader",
+ "StreamWriter",
+ },
+ "crypto/des": []string{
+ "BlockSize",
+ "KeySizeError",
+ "NewCipher",
+ "NewTripleDESCipher",
+ },
+ "crypto/dsa": []string{
+ "ErrInvalidPublicKey",
+ "GenerateKey",
+ "GenerateParameters",
+ "L1024N160",
+ "L2048N224",
+ "L2048N256",
+ "L3072N256",
+ "ParameterSizes",
+ "Parameters",
+ "PrivateKey",
+ "PublicKey",
+ "Sign",
+ "Verify",
+ },
+ "crypto/ecdsa": []string{
+ "GenerateKey",
+ "PrivateKey",
+ "PublicKey",
+ "Sign",
+ "SignASN1",
+ "Verify",
+ "VerifyASN1",
+ },
+ "crypto/ed25519": []string{
+ "GenerateKey",
+ "NewKeyFromSeed",
+ "PrivateKey",
+ "PrivateKeySize",
+ "PublicKey",
+ "PublicKeySize",
+ "SeedSize",
+ "Sign",
+ "SignatureSize",
+ "Verify",
+ },
+ "crypto/elliptic": []string{
+ "Curve",
+ "CurveParams",
+ "GenerateKey",
+ "Marshal",
+ "MarshalCompressed",
+ "P224",
+ "P256",
+ "P384",
+ "P521",
+ "Unmarshal",
+ "UnmarshalCompressed",
+ },
+ "crypto/hmac": []string{
+ "Equal",
+ "New",
+ },
+ "crypto/md5": []string{
+ "BlockSize",
+ "New",
+ "Size",
+ "Sum",
+ },
+ "crypto/rand": []string{
+ "Int",
+ "Prime",
+ "Read",
+ "Reader",
+ },
+ "crypto/rc4": []string{
+ "Cipher",
+ "KeySizeError",
+ "NewCipher",
+ },
+ "crypto/rsa": []string{
+ "CRTValue",
+ "DecryptOAEP",
+ "DecryptPKCS1v15",
+ "DecryptPKCS1v15SessionKey",
+ "EncryptOAEP",
+ "EncryptPKCS1v15",
+ "ErrDecryption",
+ "ErrMessageTooLong",
+ "ErrVerification",
+ "GenerateKey",
+ "GenerateMultiPrimeKey",
+ "OAEPOptions",
+ "PKCS1v15DecryptOptions",
+ "PSSOptions",
+ "PSSSaltLengthAuto",
+ "PSSSaltLengthEqualsHash",
+ "PrecomputedValues",
+ "PrivateKey",
+ "PublicKey",
+ "SignPKCS1v15",
+ "SignPSS",
+ "VerifyPKCS1v15",
+ "VerifyPSS",
+ },
+ "crypto/sha1": []string{
+ "BlockSize",
+ "New",
+ "Size",
+ "Sum",
+ },
+ "crypto/sha256": []string{
+ "BlockSize",
+ "New",
+ "New224",
+ "Size",
+ "Size224",
+ "Sum224",
+ "Sum256",
+ },
+ "crypto/sha512": []string{
+ "BlockSize",
+ "New",
+ "New384",
+ "New512_224",
+ "New512_256",
+ "Size",
+ "Size224",
+ "Size256",
+ "Size384",
+ "Sum384",
+ "Sum512",
+ "Sum512_224",
+ "Sum512_256",
+ },
+ "crypto/subtle": []string{
+ "ConstantTimeByteEq",
+ "ConstantTimeCompare",
+ "ConstantTimeCopy",
+ "ConstantTimeEq",
+ "ConstantTimeLessOrEq",
+ "ConstantTimeSelect",
+ },
+ "crypto/tls": []string{
+ "Certificate",
+ "CertificateRequestInfo",
+ "CipherSuite",
+ "CipherSuiteName",
+ "CipherSuites",
+ "Client",
+ "ClientAuthType",
+ "ClientHelloInfo",
+ "ClientSessionCache",
+ "ClientSessionState",
+ "Config",
+ "Conn",
+ "ConnectionState",
+ "CurveID",
+ "CurveP256",
+ "CurveP384",
+ "CurveP521",
+ "Dial",
+ "DialWithDialer",
+ "Dialer",
+ "ECDSAWithP256AndSHA256",
+ "ECDSAWithP384AndSHA384",
+ "ECDSAWithP521AndSHA512",
+ "ECDSAWithSHA1",
+ "Ed25519",
+ "InsecureCipherSuites",
+ "Listen",
+ "LoadX509KeyPair",
+ "NewLRUClientSessionCache",
+ "NewListener",
+ "NoClientCert",
+ "PKCS1WithSHA1",
+ "PKCS1WithSHA256",
+ "PKCS1WithSHA384",
+ "PKCS1WithSHA512",
+ "PSSWithSHA256",
+ "PSSWithSHA384",
+ "PSSWithSHA512",
+ "RecordHeaderError",
+ "RenegotiateFreelyAsClient",
+ "RenegotiateNever",
+ "RenegotiateOnceAsClient",
+ "RenegotiationSupport",
+ "RequestClientCert",
+ "RequireAndVerifyClientCert",
+ "RequireAnyClientCert",
+ "Server",
+ "SignatureScheme",
+ "TLS_AES_128_GCM_SHA256",
+ "TLS_AES_256_GCM_SHA384",
+ "TLS_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
+ "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
+ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA",
+ "TLS_FALLBACK_SCSV",
+ "TLS_RSA_WITH_3DES_EDE_CBC_SHA",
+ "TLS_RSA_WITH_AES_128_CBC_SHA",
+ "TLS_RSA_WITH_AES_128_CBC_SHA256",
+ "TLS_RSA_WITH_AES_128_GCM_SHA256",
+ "TLS_RSA_WITH_AES_256_CBC_SHA",
+ "TLS_RSA_WITH_AES_256_GCM_SHA384",
+ "TLS_RSA_WITH_RC4_128_SHA",
+ "VerifyClientCertIfGiven",
+ "VersionSSL30",
+ "VersionTLS10",
+ "VersionTLS11",
+ "VersionTLS12",
+ "VersionTLS13",
+ "X25519",
+ "X509KeyPair",
+ },
+ "crypto/x509": []string{
+ "CANotAuthorizedForExtKeyUsage",
+ "CANotAuthorizedForThisName",
+ "CertPool",
+ "Certificate",
+ "CertificateInvalidError",
+ "CertificateRequest",
+ "ConstraintViolationError",
+ "CreateCertificate",
+ "CreateCertificateRequest",
+ "CreateRevocationList",
+ "DSA",
+ "DSAWithSHA1",
+ "DSAWithSHA256",
+ "DecryptPEMBlock",
+ "ECDSA",
+ "ECDSAWithSHA1",
+ "ECDSAWithSHA256",
+ "ECDSAWithSHA384",
+ "ECDSAWithSHA512",
+ "Ed25519",
+ "EncryptPEMBlock",
+ "ErrUnsupportedAlgorithm",
+ "Expired",
+ "ExtKeyUsage",
+ "ExtKeyUsageAny",
+ "ExtKeyUsageClientAuth",
+ "ExtKeyUsageCodeSigning",
+ "ExtKeyUsageEmailProtection",
+ "ExtKeyUsageIPSECEndSystem",
+ "ExtKeyUsageIPSECTunnel",
+ "ExtKeyUsageIPSECUser",
+ "ExtKeyUsageMicrosoftCommercialCodeSigning",
+ "ExtKeyUsageMicrosoftKernelCodeSigning",
+ "ExtKeyUsageMicrosoftServerGatedCrypto",
+ "ExtKeyUsageNetscapeServerGatedCrypto",
+ "ExtKeyUsageOCSPSigning",
+ "ExtKeyUsageServerAuth",
+ "ExtKeyUsageTimeStamping",
+ "HostnameError",
+ "IncompatibleUsage",
+ "IncorrectPasswordError",
+ "InsecureAlgorithmError",
+ "InvalidReason",
+ "IsEncryptedPEMBlock",
+ "KeyUsage",
+ "KeyUsageCRLSign",
+ "KeyUsageCertSign",
+ "KeyUsageContentCommitment",
+ "KeyUsageDataEncipherment",
+ "KeyUsageDecipherOnly",
+ "KeyUsageDigitalSignature",
+ "KeyUsageEncipherOnly",
+ "KeyUsageKeyAgreement",
+ "KeyUsageKeyEncipherment",
+ "MD2WithRSA",
+ "MD5WithRSA",
+ "MarshalECPrivateKey",
+ "MarshalPKCS1PrivateKey",
+ "MarshalPKCS1PublicKey",
+ "MarshalPKCS8PrivateKey",
+ "MarshalPKIXPublicKey",
+ "NameConstraintsWithoutSANs",
+ "NameMismatch",
+ "NewCertPool",
+ "NotAuthorizedToSign",
+ "PEMCipher",
+ "PEMCipher3DES",
+ "PEMCipherAES128",
+ "PEMCipherAES192",
+ "PEMCipherAES256",
+ "PEMCipherDES",
+ "ParseCRL",
+ "ParseCertificate",
+ "ParseCertificateRequest",
+ "ParseCertificates",
+ "ParseDERCRL",
+ "ParseECPrivateKey",
+ "ParsePKCS1PrivateKey",
+ "ParsePKCS1PublicKey",
+ "ParsePKCS8PrivateKey",
+ "ParsePKIXPublicKey",
+ "PublicKeyAlgorithm",
+ "PureEd25519",
+ "RSA",
+ "RevocationList",
+ "SHA1WithRSA",
+ "SHA256WithRSA",
+ "SHA256WithRSAPSS",
+ "SHA384WithRSA",
+ "SHA384WithRSAPSS",
+ "SHA512WithRSA",
+ "SHA512WithRSAPSS",
+ "SignatureAlgorithm",
+ "SystemCertPool",
+ "SystemRootsError",
+ "TooManyConstraints",
+ "TooManyIntermediates",
+ "UnconstrainedName",
+ "UnhandledCriticalExtension",
+ "UnknownAuthorityError",
+ "UnknownPublicKeyAlgorithm",
+ "UnknownSignatureAlgorithm",
+ "VerifyOptions",
+ },
+ "crypto/x509/pkix": []string{
+ "AlgorithmIdentifier",
+ "AttributeTypeAndValue",
+ "AttributeTypeAndValueSET",
+ "CertificateList",
+ "Extension",
+ "Name",
+ "RDNSequence",
+ "RelativeDistinguishedNameSET",
+ "RevokedCertificate",
+ "TBSCertificateList",
+ },
+ "database/sql": []string{
+ "ColumnType",
+ "Conn",
+ "DB",
+ "DBStats",
+ "Drivers",
+ "ErrConnDone",
+ "ErrNoRows",
+ "ErrTxDone",
+ "IsolationLevel",
+ "LevelDefault",
+ "LevelLinearizable",
+ "LevelReadCommitted",
+ "LevelReadUncommitted",
+ "LevelRepeatableRead",
+ "LevelSerializable",
+ "LevelSnapshot",
+ "LevelWriteCommitted",
+ "Named",
+ "NamedArg",
+ "NullBool",
+ "NullByte",
+ "NullFloat64",
+ "NullInt16",
+ "NullInt32",
+ "NullInt64",
+ "NullString",
+ "NullTime",
+ "Open",
+ "OpenDB",
+ "Out",
+ "RawBytes",
+ "Register",
+ "Result",
+ "Row",
+ "Rows",
+ "Scanner",
+ "Stmt",
+ "Tx",
+ "TxOptions",
+ },
+ "database/sql/driver": []string{
+ "Bool",
+ "ColumnConverter",
+ "Conn",
+ "ConnBeginTx",
+ "ConnPrepareContext",
+ "Connector",
+ "DefaultParameterConverter",
+ "Driver",
+ "DriverContext",
+ "ErrBadConn",
+ "ErrRemoveArgument",
+ "ErrSkip",
+ "Execer",
+ "ExecerContext",
+ "Int32",
+ "IsScanValue",
+ "IsValue",
+ "IsolationLevel",
+ "NamedValue",
+ "NamedValueChecker",
+ "NotNull",
+ "Null",
+ "Pinger",
+ "Queryer",
+ "QueryerContext",
+ "Result",
+ "ResultNoRows",
+ "Rows",
+ "RowsAffected",
+ "RowsColumnTypeDatabaseTypeName",
+ "RowsColumnTypeLength",
+ "RowsColumnTypeNullable",
+ "RowsColumnTypePrecisionScale",
+ "RowsColumnTypeScanType",
+ "RowsNextResultSet",
+ "SessionResetter",
+ "Stmt",
+ "StmtExecContext",
+ "StmtQueryContext",
+ "String",
+ "Tx",
+ "TxOptions",
+ "Validator",
+ "Value",
+ "ValueConverter",
+ "Valuer",
+ },
+ "debug/dwarf": []string{
+ "AddrType",
+ "ArrayType",
+ "Attr",
+ "AttrAbstractOrigin",
+ "AttrAccessibility",
+ "AttrAddrBase",
+ "AttrAddrClass",
+ "AttrAlignment",
+ "AttrAllocated",
+ "AttrArtificial",
+ "AttrAssociated",
+ "AttrBaseTypes",
+ "AttrBinaryScale",
+ "AttrBitOffset",
+ "AttrBitSize",
+ "AttrByteSize",
+ "AttrCallAllCalls",
+ "AttrCallAllSourceCalls",
+ "AttrCallAllTailCalls",
+ "AttrCallColumn",
+ "AttrCallDataLocation",
+ "AttrCallDataValue",
+ "AttrCallFile",
+ "AttrCallLine",
+ "AttrCallOrigin",
+ "AttrCallPC",
+ "AttrCallParameter",
+ "AttrCallReturnPC",
+ "AttrCallTailCall",
+ "AttrCallTarget",
+ "AttrCallTargetClobbered",
+ "AttrCallValue",
+ "AttrCalling",
+ "AttrCommonRef",
+ "AttrCompDir",
+ "AttrConstExpr",
+ "AttrConstValue",
+ "AttrContainingType",
+ "AttrCount",
+ "AttrDataBitOffset",
+ "AttrDataLocation",
+ "AttrDataMemberLoc",
+ "AttrDecimalScale",
+ "AttrDecimalSign",
+ "AttrDeclColumn",
+ "AttrDeclFile",
+ "AttrDeclLine",
+ "AttrDeclaration",
+ "AttrDefaultValue",
+ "AttrDefaulted",
+ "AttrDeleted",
+ "AttrDescription",
+ "AttrDigitCount",
+ "AttrDiscr",
+ "AttrDiscrList",
+ "AttrDiscrValue",
+ "AttrDwoName",
+ "AttrElemental",
+ "AttrEncoding",
+ "AttrEndianity",
+ "AttrEntrypc",
+ "AttrEnumClass",
+ "AttrExplicit",
+ "AttrExportSymbols",
+ "AttrExtension",
+ "AttrExternal",
+ "AttrFrameBase",
+ "AttrFriend",
+ "AttrHighpc",
+ "AttrIdentifierCase",
+ "AttrImport",
+ "AttrInline",
+ "AttrIsOptional",
+ "AttrLanguage",
+ "AttrLinkageName",
+ "AttrLocation",
+ "AttrLoclistsBase",
+ "AttrLowerBound",
+ "AttrLowpc",
+ "AttrMacroInfo",
+ "AttrMacros",
+ "AttrMainSubprogram",
+ "AttrMutable",
+ "AttrName",
+ "AttrNamelistItem",
+ "AttrNoreturn",
+ "AttrObjectPointer",
+ "AttrOrdering",
+ "AttrPictureString",
+ "AttrPriority",
+ "AttrProducer",
+ "AttrPrototyped",
+ "AttrPure",
+ "AttrRanges",
+ "AttrRank",
+ "AttrRecursive",
+ "AttrReference",
+ "AttrReturnAddr",
+ "AttrRnglistsBase",
+ "AttrRvalueReference",
+ "AttrSegment",
+ "AttrSibling",
+ "AttrSignature",
+ "AttrSmall",
+ "AttrSpecification",
+ "AttrStartScope",
+ "AttrStaticLink",
+ "AttrStmtList",
+ "AttrStrOffsetsBase",
+ "AttrStride",
+ "AttrStrideSize",
+ "AttrStringLength",
+ "AttrStringLengthBitSize",
+ "AttrStringLengthByteSize",
+ "AttrThreadsScaled",
+ "AttrTrampoline",
+ "AttrType",
+ "AttrUpperBound",
+ "AttrUseLocation",
+ "AttrUseUTF8",
+ "AttrVarParam",
+ "AttrVirtuality",
+ "AttrVisibility",
+ "AttrVtableElemLoc",
+ "BasicType",
+ "BoolType",
+ "CharType",
+ "Class",
+ "ClassAddrPtr",
+ "ClassAddress",
+ "ClassBlock",
+ "ClassConstant",
+ "ClassExprLoc",
+ "ClassFlag",
+ "ClassLinePtr",
+ "ClassLocList",
+ "ClassLocListPtr",
+ "ClassMacPtr",
+ "ClassRangeListPtr",
+ "ClassReference",
+ "ClassReferenceAlt",
+ "ClassReferenceSig",
+ "ClassRngList",
+ "ClassRngListsPtr",
+ "ClassStrOffsetsPtr",
+ "ClassString",
+ "ClassStringAlt",
+ "ClassUnknown",
+ "CommonType",
+ "ComplexType",
+ "Data",
+ "DecodeError",
+ "DotDotDotType",
+ "Entry",
+ "EnumType",
+ "EnumValue",
+ "ErrUnknownPC",
+ "Field",
+ "FloatType",
+ "FuncType",
+ "IntType",
+ "LineEntry",
+ "LineFile",
+ "LineReader",
+ "LineReaderPos",
+ "New",
+ "Offset",
+ "PtrType",
+ "QualType",
+ "Reader",
+ "StructField",
+ "StructType",
+ "Tag",
+ "TagAccessDeclaration",
+ "TagArrayType",
+ "TagAtomicType",
+ "TagBaseType",
+ "TagCallSite",
+ "TagCallSiteParameter",
+ "TagCatchDwarfBlock",
+ "TagClassType",
+ "TagCoarrayType",
+ "TagCommonDwarfBlock",
+ "TagCommonInclusion",
+ "TagCompileUnit",
+ "TagCondition",
+ "TagConstType",
+ "TagConstant",
+ "TagDwarfProcedure",
+ "TagDynamicType",
+ "TagEntryPoint",
+ "TagEnumerationType",
+ "TagEnumerator",
+ "TagFileType",
+ "TagFormalParameter",
+ "TagFriend",
+ "TagGenericSubrange",
+ "TagImmutableType",
+ "TagImportedDeclaration",
+ "TagImportedModule",
+ "TagImportedUnit",
+ "TagInheritance",
+ "TagInlinedSubroutine",
+ "TagInterfaceType",
+ "TagLabel",
+ "TagLexDwarfBlock",
+ "TagMember",
+ "TagModule",
+ "TagMutableType",
+ "TagNamelist",
+ "TagNamelistItem",
+ "TagNamespace",
+ "TagPackedType",
+ "TagPartialUnit",
+ "TagPointerType",
+ "TagPtrToMemberType",
+ "TagReferenceType",
+ "TagRestrictType",
+ "TagRvalueReferenceType",
+ "TagSetType",
+ "TagSharedType",
+ "TagSkeletonUnit",
+ "TagStringType",
+ "TagStructType",
+ "TagSubprogram",
+ "TagSubrangeType",
+ "TagSubroutineType",
+ "TagTemplateAlias",
+ "TagTemplateTypeParameter",
+ "TagTemplateValueParameter",
+ "TagThrownType",
+ "TagTryDwarfBlock",
+ "TagTypeUnit",
+ "TagTypedef",
+ "TagUnionType",
+ "TagUnspecifiedParameters",
+ "TagUnspecifiedType",
+ "TagVariable",
+ "TagVariant",
+ "TagVariantPart",
+ "TagVolatileType",
+ "TagWithStmt",
+ "Type",
+ "TypedefType",
+ "UcharType",
+ "UintType",
+ "UnspecifiedType",
+ "UnsupportedType",
+ "VoidType",
+ },
+ "debug/elf": []string{
+ "ARM_MAGIC_TRAMP_NUMBER",
+ "COMPRESS_HIOS",
+ "COMPRESS_HIPROC",
+ "COMPRESS_LOOS",
+ "COMPRESS_LOPROC",
+ "COMPRESS_ZLIB",
+ "Chdr32",
+ "Chdr64",
+ "Class",
+ "CompressionType",
+ "DF_BIND_NOW",
+ "DF_ORIGIN",
+ "DF_STATIC_TLS",
+ "DF_SYMBOLIC",
+ "DF_TEXTREL",
+ "DT_ADDRRNGHI",
+ "DT_ADDRRNGLO",
+ "DT_AUDIT",
+ "DT_AUXILIARY",
+ "DT_BIND_NOW",
+ "DT_CHECKSUM",
+ "DT_CONFIG",
+ "DT_DEBUG",
+ "DT_DEPAUDIT",
+ "DT_ENCODING",
+ "DT_FEATURE",
+ "DT_FILTER",
+ "DT_FINI",
+ "DT_FINI_ARRAY",
+ "DT_FINI_ARRAYSZ",
+ "DT_FLAGS",
+ "DT_FLAGS_1",
+ "DT_GNU_CONFLICT",
+ "DT_GNU_CONFLICTSZ",
+ "DT_GNU_HASH",
+ "DT_GNU_LIBLIST",
+ "DT_GNU_LIBLISTSZ",
+ "DT_GNU_PRELINKED",
+ "DT_HASH",
+ "DT_HIOS",
+ "DT_HIPROC",
+ "DT_INIT",
+ "DT_INIT_ARRAY",
+ "DT_INIT_ARRAYSZ",
+ "DT_JMPREL",
+ "DT_LOOS",
+ "DT_LOPROC",
+ "DT_MIPS_AUX_DYNAMIC",
+ "DT_MIPS_BASE_ADDRESS",
+ "DT_MIPS_COMPACT_SIZE",
+ "DT_MIPS_CONFLICT",
+ "DT_MIPS_CONFLICTNO",
+ "DT_MIPS_CXX_FLAGS",
+ "DT_MIPS_DELTA_CLASS",
+ "DT_MIPS_DELTA_CLASSSYM",
+ "DT_MIPS_DELTA_CLASSSYM_NO",
+ "DT_MIPS_DELTA_CLASS_NO",
+ "DT_MIPS_DELTA_INSTANCE",
+ "DT_MIPS_DELTA_INSTANCE_NO",
+ "DT_MIPS_DELTA_RELOC",
+ "DT_MIPS_DELTA_RELOC_NO",
+ "DT_MIPS_DELTA_SYM",
+ "DT_MIPS_DELTA_SYM_NO",
+ "DT_MIPS_DYNSTR_ALIGN",
+ "DT_MIPS_FLAGS",
+ "DT_MIPS_GOTSYM",
+ "DT_MIPS_GP_VALUE",
+ "DT_MIPS_HIDDEN_GOTIDX",
+ "DT_MIPS_HIPAGENO",
+ "DT_MIPS_ICHECKSUM",
+ "DT_MIPS_INTERFACE",
+ "DT_MIPS_INTERFACE_SIZE",
+ "DT_MIPS_IVERSION",
+ "DT_MIPS_LIBLIST",
+ "DT_MIPS_LIBLISTNO",
+ "DT_MIPS_LOCALPAGE_GOTIDX",
+ "DT_MIPS_LOCAL_GOTIDX",
+ "DT_MIPS_LOCAL_GOTNO",
+ "DT_MIPS_MSYM",
+ "DT_MIPS_OPTIONS",
+ "DT_MIPS_PERF_SUFFIX",
+ "DT_MIPS_PIXIE_INIT",
+ "DT_MIPS_PLTGOT",
+ "DT_MIPS_PROTECTED_GOTIDX",
+ "DT_MIPS_RLD_MAP",
+ "DT_MIPS_RLD_MAP_REL",
+ "DT_MIPS_RLD_TEXT_RESOLVE_ADDR",
+ "DT_MIPS_RLD_VERSION",
+ "DT_MIPS_RWPLT",
+ "DT_MIPS_SYMBOL_LIB",
+ "DT_MIPS_SYMTABNO",
+ "DT_MIPS_TIME_STAMP",
+ "DT_MIPS_UNREFEXTNO",
+ "DT_MOVEENT",
+ "DT_MOVESZ",
+ "DT_MOVETAB",
+ "DT_NEEDED",
+ "DT_NULL",
+ "DT_PLTGOT",
+ "DT_PLTPAD",
+ "DT_PLTPADSZ",
+ "DT_PLTREL",
+ "DT_PLTRELSZ",
+ "DT_POSFLAG_1",
+ "DT_PPC64_GLINK",
+ "DT_PPC64_OPD",
+ "DT_PPC64_OPDSZ",
+ "DT_PPC64_OPT",
+ "DT_PPC_GOT",
+ "DT_PPC_OPT",
+ "DT_PREINIT_ARRAY",
+ "DT_PREINIT_ARRAYSZ",
+ "DT_REL",
+ "DT_RELA",
+ "DT_RELACOUNT",
+ "DT_RELAENT",
+ "DT_RELASZ",
+ "DT_RELCOUNT",
+ "DT_RELENT",
+ "DT_RELSZ",
+ "DT_RPATH",
+ "DT_RUNPATH",
+ "DT_SONAME",
+ "DT_SPARC_REGISTER",
+ "DT_STRSZ",
+ "DT_STRTAB",
+ "DT_SYMBOLIC",
+ "DT_SYMENT",
+ "DT_SYMINENT",
+ "DT_SYMINFO",
+ "DT_SYMINSZ",
+ "DT_SYMTAB",
+ "DT_SYMTAB_SHNDX",
+ "DT_TEXTREL",
+ "DT_TLSDESC_GOT",
+ "DT_TLSDESC_PLT",
+ "DT_USED",
+ "DT_VALRNGHI",
+ "DT_VALRNGLO",
+ "DT_VERDEF",
+ "DT_VERDEFNUM",
+ "DT_VERNEED",
+ "DT_VERNEEDNUM",
+ "DT_VERSYM",
+ "Data",
+ "Dyn32",
+ "Dyn64",
+ "DynFlag",
+ "DynTag",
+ "EI_ABIVERSION",
+ "EI_CLASS",
+ "EI_DATA",
+ "EI_NIDENT",
+ "EI_OSABI",
+ "EI_PAD",
+ "EI_VERSION",
+ "ELFCLASS32",
+ "ELFCLASS64",
+ "ELFCLASSNONE",
+ "ELFDATA2LSB",
+ "ELFDATA2MSB",
+ "ELFDATANONE",
+ "ELFMAG",
+ "ELFOSABI_86OPEN",
+ "ELFOSABI_AIX",
+ "ELFOSABI_ARM",
+ "ELFOSABI_AROS",
+ "ELFOSABI_CLOUDABI",
+ "ELFOSABI_FENIXOS",
+ "ELFOSABI_FREEBSD",
+ "ELFOSABI_HPUX",
+ "ELFOSABI_HURD",
+ "ELFOSABI_IRIX",
+ "ELFOSABI_LINUX",
+ "ELFOSABI_MODESTO",
+ "ELFOSABI_NETBSD",
+ "ELFOSABI_NONE",
+ "ELFOSABI_NSK",
+ "ELFOSABI_OPENBSD",
+ "ELFOSABI_OPENVMS",
+ "ELFOSABI_SOLARIS",
+ "ELFOSABI_STANDALONE",
+ "ELFOSABI_TRU64",
+ "EM_386",
+ "EM_486",
+ "EM_56800EX",
+ "EM_68HC05",
+ "EM_68HC08",
+ "EM_68HC11",
+ "EM_68HC12",
+ "EM_68HC16",
+ "EM_68K",
+ "EM_78KOR",
+ "EM_8051",
+ "EM_860",
+ "EM_88K",
+ "EM_960",
+ "EM_AARCH64",
+ "EM_ALPHA",
+ "EM_ALPHA_STD",
+ "EM_ALTERA_NIOS2",
+ "EM_AMDGPU",
+ "EM_ARC",
+ "EM_ARCA",
+ "EM_ARC_COMPACT",
+ "EM_ARC_COMPACT2",
+ "EM_ARM",
+ "EM_AVR",
+ "EM_AVR32",
+ "EM_BA1",
+ "EM_BA2",
+ "EM_BLACKFIN",
+ "EM_BPF",
+ "EM_C166",
+ "EM_CDP",
+ "EM_CE",
+ "EM_CLOUDSHIELD",
+ "EM_COGE",
+ "EM_COLDFIRE",
+ "EM_COOL",
+ "EM_COREA_1ST",
+ "EM_COREA_2ND",
+ "EM_CR",
+ "EM_CR16",
+ "EM_CRAYNV2",
+ "EM_CRIS",
+ "EM_CRX",
+ "EM_CSR_KALIMBA",
+ "EM_CUDA",
+ "EM_CYPRESS_M8C",
+ "EM_D10V",
+ "EM_D30V",
+ "EM_DSP24",
+ "EM_DSPIC30F",
+ "EM_DXP",
+ "EM_ECOG1",
+ "EM_ECOG16",
+ "EM_ECOG1X",
+ "EM_ECOG2",
+ "EM_ETPU",
+ "EM_EXCESS",
+ "EM_F2MC16",
+ "EM_FIREPATH",
+ "EM_FR20",
+ "EM_FR30",
+ "EM_FT32",
+ "EM_FX66",
+ "EM_H8S",
+ "EM_H8_300",
+ "EM_H8_300H",
+ "EM_H8_500",
+ "EM_HUANY",
+ "EM_IA_64",
+ "EM_INTEL205",
+ "EM_INTEL206",
+ "EM_INTEL207",
+ "EM_INTEL208",
+ "EM_INTEL209",
+ "EM_IP2K",
+ "EM_JAVELIN",
+ "EM_K10M",
+ "EM_KM32",
+ "EM_KMX16",
+ "EM_KMX32",
+ "EM_KMX8",
+ "EM_KVARC",
+ "EM_L10M",
+ "EM_LANAI",
+ "EM_LATTICEMICO32",
+ "EM_M16C",
+ "EM_M32",
+ "EM_M32C",
+ "EM_M32R",
+ "EM_MANIK",
+ "EM_MAX",
+ "EM_MAXQ30",
+ "EM_MCHP_PIC",
+ "EM_MCST_ELBRUS",
+ "EM_ME16",
+ "EM_METAG",
+ "EM_MICROBLAZE",
+ "EM_MIPS",
+ "EM_MIPS_RS3_LE",
+ "EM_MIPS_RS4_BE",
+ "EM_MIPS_X",
+ "EM_MMA",
+ "EM_MMDSP_PLUS",
+ "EM_MMIX",
+ "EM_MN10200",
+ "EM_MN10300",
+ "EM_MOXIE",
+ "EM_MSP430",
+ "EM_NCPU",
+ "EM_NDR1",
+ "EM_NDS32",
+ "EM_NONE",
+ "EM_NORC",
+ "EM_NS32K",
+ "EM_OPEN8",
+ "EM_OPENRISC",
+ "EM_PARISC",
+ "EM_PCP",
+ "EM_PDP10",
+ "EM_PDP11",
+ "EM_PDSP",
+ "EM_PJ",
+ "EM_PPC",
+ "EM_PPC64",
+ "EM_PRISM",
+ "EM_QDSP6",
+ "EM_R32C",
+ "EM_RCE",
+ "EM_RH32",
+ "EM_RISCV",
+ "EM_RL78",
+ "EM_RS08",
+ "EM_RX",
+ "EM_S370",
+ "EM_S390",
+ "EM_SCORE7",
+ "EM_SEP",
+ "EM_SE_C17",
+ "EM_SE_C33",
+ "EM_SH",
+ "EM_SHARC",
+ "EM_SLE9X",
+ "EM_SNP1K",
+ "EM_SPARC",
+ "EM_SPARC32PLUS",
+ "EM_SPARCV9",
+ "EM_ST100",
+ "EM_ST19",
+ "EM_ST200",
+ "EM_ST7",
+ "EM_ST9PLUS",
+ "EM_STARCORE",
+ "EM_STM8",
+ "EM_STXP7X",
+ "EM_SVX",
+ "EM_TILE64",
+ "EM_TILEGX",
+ "EM_TILEPRO",
+ "EM_TINYJ",
+ "EM_TI_ARP32",
+ "EM_TI_C2000",
+ "EM_TI_C5500",
+ "EM_TI_C6000",
+ "EM_TI_PRU",
+ "EM_TMM_GPP",
+ "EM_TPC",
+ "EM_TRICORE",
+ "EM_TRIMEDIA",
+ "EM_TSK3000",
+ "EM_UNICORE",
+ "EM_V800",
+ "EM_V850",
+ "EM_VAX",
+ "EM_VIDEOCORE",
+ "EM_VIDEOCORE3",
+ "EM_VIDEOCORE5",
+ "EM_VISIUM",
+ "EM_VPP500",
+ "EM_X86_64",
+ "EM_XCORE",
+ "EM_XGATE",
+ "EM_XIMO16",
+ "EM_XTENSA",
+ "EM_Z80",
+ "EM_ZSP",
+ "ET_CORE",
+ "ET_DYN",
+ "ET_EXEC",
+ "ET_HIOS",
+ "ET_HIPROC",
+ "ET_LOOS",
+ "ET_LOPROC",
+ "ET_NONE",
+ "ET_REL",
+ "EV_CURRENT",
+ "EV_NONE",
+ "ErrNoSymbols",
+ "File",
+ "FileHeader",
+ "FormatError",
+ "Header32",
+ "Header64",
+ "ImportedSymbol",
+ "Machine",
+ "NT_FPREGSET",
+ "NT_PRPSINFO",
+ "NT_PRSTATUS",
+ "NType",
+ "NewFile",
+ "OSABI",
+ "Open",
+ "PF_MASKOS",
+ "PF_MASKPROC",
+ "PF_R",
+ "PF_W",
+ "PF_X",
+ "PT_AARCH64_ARCHEXT",
+ "PT_AARCH64_UNWIND",
+ "PT_ARM_ARCHEXT",
+ "PT_ARM_EXIDX",
+ "PT_DYNAMIC",
+ "PT_GNU_EH_FRAME",
+ "PT_GNU_MBIND_HI",
+ "PT_GNU_MBIND_LO",
+ "PT_GNU_PROPERTY",
+ "PT_GNU_RELRO",
+ "PT_GNU_STACK",
+ "PT_HIOS",
+ "PT_HIPROC",
+ "PT_INTERP",
+ "PT_LOAD",
+ "PT_LOOS",
+ "PT_LOPROC",
+ "PT_MIPS_ABIFLAGS",
+ "PT_MIPS_OPTIONS",
+ "PT_MIPS_REGINFO",
+ "PT_MIPS_RTPROC",
+ "PT_NOTE",
+ "PT_NULL",
+ "PT_OPENBSD_BOOTDATA",
+ "PT_OPENBSD_RANDOMIZE",
+ "PT_OPENBSD_WXNEEDED",
+ "PT_PAX_FLAGS",
+ "PT_PHDR",
+ "PT_S390_PGSTE",
+ "PT_SHLIB",
+ "PT_SUNWSTACK",
+ "PT_SUNW_EH_FRAME",
+ "PT_TLS",
+ "Prog",
+ "Prog32",
+ "Prog64",
+ "ProgFlag",
+ "ProgHeader",
+ "ProgType",
+ "R_386",
+ "R_386_16",
+ "R_386_32",
+ "R_386_32PLT",
+ "R_386_8",
+ "R_386_COPY",
+ "R_386_GLOB_DAT",
+ "R_386_GOT32",
+ "R_386_GOT32X",
+ "R_386_GOTOFF",
+ "R_386_GOTPC",
+ "R_386_IRELATIVE",
+ "R_386_JMP_SLOT",
+ "R_386_NONE",
+ "R_386_PC16",
+ "R_386_PC32",
+ "R_386_PC8",
+ "R_386_PLT32",
+ "R_386_RELATIVE",
+ "R_386_SIZE32",
+ "R_386_TLS_DESC",
+ "R_386_TLS_DESC_CALL",
+ "R_386_TLS_DTPMOD32",
+ "R_386_TLS_DTPOFF32",
+ "R_386_TLS_GD",
+ "R_386_TLS_GD_32",
+ "R_386_TLS_GD_CALL",
+ "R_386_TLS_GD_POP",
+ "R_386_TLS_GD_PUSH",
+ "R_386_TLS_GOTDESC",
+ "R_386_TLS_GOTIE",
+ "R_386_TLS_IE",
+ "R_386_TLS_IE_32",
+ "R_386_TLS_LDM",
+ "R_386_TLS_LDM_32",
+ "R_386_TLS_LDM_CALL",
+ "R_386_TLS_LDM_POP",
+ "R_386_TLS_LDM_PUSH",
+ "R_386_TLS_LDO_32",
+ "R_386_TLS_LE",
+ "R_386_TLS_LE_32",
+ "R_386_TLS_TPOFF",
+ "R_386_TLS_TPOFF32",
+ "R_390",
+ "R_390_12",
+ "R_390_16",
+ "R_390_20",
+ "R_390_32",
+ "R_390_64",
+ "R_390_8",
+ "R_390_COPY",
+ "R_390_GLOB_DAT",
+ "R_390_GOT12",
+ "R_390_GOT16",
+ "R_390_GOT20",
+ "R_390_GOT32",
+ "R_390_GOT64",
+ "R_390_GOTENT",
+ "R_390_GOTOFF",
+ "R_390_GOTOFF16",
+ "R_390_GOTOFF64",
+ "R_390_GOTPC",
+ "R_390_GOTPCDBL",
+ "R_390_GOTPLT12",
+ "R_390_GOTPLT16",
+ "R_390_GOTPLT20",
+ "R_390_GOTPLT32",
+ "R_390_GOTPLT64",
+ "R_390_GOTPLTENT",
+ "R_390_GOTPLTOFF16",
+ "R_390_GOTPLTOFF32",
+ "R_390_GOTPLTOFF64",
+ "R_390_JMP_SLOT",
+ "R_390_NONE",
+ "R_390_PC16",
+ "R_390_PC16DBL",
+ "R_390_PC32",
+ "R_390_PC32DBL",
+ "R_390_PC64",
+ "R_390_PLT16DBL",
+ "R_390_PLT32",
+ "R_390_PLT32DBL",
+ "R_390_PLT64",
+ "R_390_RELATIVE",
+ "R_390_TLS_DTPMOD",
+ "R_390_TLS_DTPOFF",
+ "R_390_TLS_GD32",
+ "R_390_TLS_GD64",
+ "R_390_TLS_GDCALL",
+ "R_390_TLS_GOTIE12",
+ "R_390_TLS_GOTIE20",
+ "R_390_TLS_GOTIE32",
+ "R_390_TLS_GOTIE64",
+ "R_390_TLS_IE32",
+ "R_390_TLS_IE64",
+ "R_390_TLS_IEENT",
+ "R_390_TLS_LDCALL",
+ "R_390_TLS_LDM32",
+ "R_390_TLS_LDM64",
+ "R_390_TLS_LDO32",
+ "R_390_TLS_LDO64",
+ "R_390_TLS_LE32",
+ "R_390_TLS_LE64",
+ "R_390_TLS_LOAD",
+ "R_390_TLS_TPOFF",
+ "R_AARCH64",
+ "R_AARCH64_ABS16",
+ "R_AARCH64_ABS32",
+ "R_AARCH64_ABS64",
+ "R_AARCH64_ADD_ABS_LO12_NC",
+ "R_AARCH64_ADR_GOT_PAGE",
+ "R_AARCH64_ADR_PREL_LO21",
+ "R_AARCH64_ADR_PREL_PG_HI21",
+ "R_AARCH64_ADR_PREL_PG_HI21_NC",
+ "R_AARCH64_CALL26",
+ "R_AARCH64_CONDBR19",
+ "R_AARCH64_COPY",
+ "R_AARCH64_GLOB_DAT",
+ "R_AARCH64_GOT_LD_PREL19",
+ "R_AARCH64_IRELATIVE",
+ "R_AARCH64_JUMP26",
+ "R_AARCH64_JUMP_SLOT",
+ "R_AARCH64_LD64_GOTOFF_LO15",
+ "R_AARCH64_LD64_GOTPAGE_LO15",
+ "R_AARCH64_LD64_GOT_LO12_NC",
+ "R_AARCH64_LDST128_ABS_LO12_NC",
+ "R_AARCH64_LDST16_ABS_LO12_NC",
+ "R_AARCH64_LDST32_ABS_LO12_NC",
+ "R_AARCH64_LDST64_ABS_LO12_NC",
+ "R_AARCH64_LDST8_ABS_LO12_NC",
+ "R_AARCH64_LD_PREL_LO19",
+ "R_AARCH64_MOVW_SABS_G0",
+ "R_AARCH64_MOVW_SABS_G1",
+ "R_AARCH64_MOVW_SABS_G2",
+ "R_AARCH64_MOVW_UABS_G0",
+ "R_AARCH64_MOVW_UABS_G0_NC",
+ "R_AARCH64_MOVW_UABS_G1",
+ "R_AARCH64_MOVW_UABS_G1_NC",
+ "R_AARCH64_MOVW_UABS_G2",
+ "R_AARCH64_MOVW_UABS_G2_NC",
+ "R_AARCH64_MOVW_UABS_G3",
+ "R_AARCH64_NONE",
+ "R_AARCH64_NULL",
+ "R_AARCH64_P32_ABS16",
+ "R_AARCH64_P32_ABS32",
+ "R_AARCH64_P32_ADD_ABS_LO12_NC",
+ "R_AARCH64_P32_ADR_GOT_PAGE",
+ "R_AARCH64_P32_ADR_PREL_LO21",
+ "R_AARCH64_P32_ADR_PREL_PG_HI21",
+ "R_AARCH64_P32_CALL26",
+ "R_AARCH64_P32_CONDBR19",
+ "R_AARCH64_P32_COPY",
+ "R_AARCH64_P32_GLOB_DAT",
+ "R_AARCH64_P32_GOT_LD_PREL19",
+ "R_AARCH64_P32_IRELATIVE",
+ "R_AARCH64_P32_JUMP26",
+ "R_AARCH64_P32_JUMP_SLOT",
+ "R_AARCH64_P32_LD32_GOT_LO12_NC",
+ "R_AARCH64_P32_LDST128_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST16_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST32_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST64_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST8_ABS_LO12_NC",
+ "R_AARCH64_P32_LD_PREL_LO19",
+ "R_AARCH64_P32_MOVW_SABS_G0",
+ "R_AARCH64_P32_MOVW_UABS_G0",
+ "R_AARCH64_P32_MOVW_UABS_G0_NC",
+ "R_AARCH64_P32_MOVW_UABS_G1",
+ "R_AARCH64_P32_PREL16",
+ "R_AARCH64_P32_PREL32",
+ "R_AARCH64_P32_RELATIVE",
+ "R_AARCH64_P32_TLSDESC",
+ "R_AARCH64_P32_TLSDESC_ADD_LO12_NC",
+ "R_AARCH64_P32_TLSDESC_ADR_PAGE21",
+ "R_AARCH64_P32_TLSDESC_ADR_PREL21",
+ "R_AARCH64_P32_TLSDESC_CALL",
+ "R_AARCH64_P32_TLSDESC_LD32_LO12_NC",
+ "R_AARCH64_P32_TLSDESC_LD_PREL19",
+ "R_AARCH64_P32_TLSGD_ADD_LO12_NC",
+ "R_AARCH64_P32_TLSGD_ADR_PAGE21",
+ "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21",
+ "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC",
+ "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19",
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12",
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12",
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC",
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0",
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC",
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1",
+ "R_AARCH64_P32_TLS_DTPMOD",
+ "R_AARCH64_P32_TLS_DTPREL",
+ "R_AARCH64_P32_TLS_TPREL",
+ "R_AARCH64_P32_TSTBR14",
+ "R_AARCH64_PREL16",
+ "R_AARCH64_PREL32",
+ "R_AARCH64_PREL64",
+ "R_AARCH64_RELATIVE",
+ "R_AARCH64_TLSDESC",
+ "R_AARCH64_TLSDESC_ADD",
+ "R_AARCH64_TLSDESC_ADD_LO12_NC",
+ "R_AARCH64_TLSDESC_ADR_PAGE21",
+ "R_AARCH64_TLSDESC_ADR_PREL21",
+ "R_AARCH64_TLSDESC_CALL",
+ "R_AARCH64_TLSDESC_LD64_LO12_NC",
+ "R_AARCH64_TLSDESC_LDR",
+ "R_AARCH64_TLSDESC_LD_PREL19",
+ "R_AARCH64_TLSDESC_OFF_G0_NC",
+ "R_AARCH64_TLSDESC_OFF_G1",
+ "R_AARCH64_TLSGD_ADD_LO12_NC",
+ "R_AARCH64_TLSGD_ADR_PAGE21",
+ "R_AARCH64_TLSGD_ADR_PREL21",
+ "R_AARCH64_TLSGD_MOVW_G0_NC",
+ "R_AARCH64_TLSGD_MOVW_G1",
+ "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21",
+ "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC",
+ "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19",
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC",
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1",
+ "R_AARCH64_TLSLD_ADR_PAGE21",
+ "R_AARCH64_TLSLD_ADR_PREL21",
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12",
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC",
+ "R_AARCH64_TLSLE_ADD_TPREL_HI12",
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12",
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC",
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12",
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G2",
+ "R_AARCH64_TLS_DTPMOD64",
+ "R_AARCH64_TLS_DTPREL64",
+ "R_AARCH64_TLS_TPREL64",
+ "R_AARCH64_TSTBR14",
+ "R_ALPHA",
+ "R_ALPHA_BRADDR",
+ "R_ALPHA_COPY",
+ "R_ALPHA_GLOB_DAT",
+ "R_ALPHA_GPDISP",
+ "R_ALPHA_GPREL32",
+ "R_ALPHA_GPRELHIGH",
+ "R_ALPHA_GPRELLOW",
+ "R_ALPHA_GPVALUE",
+ "R_ALPHA_HINT",
+ "R_ALPHA_IMMED_BR_HI32",
+ "R_ALPHA_IMMED_GP_16",
+ "R_ALPHA_IMMED_GP_HI32",
+ "R_ALPHA_IMMED_LO32",
+ "R_ALPHA_IMMED_SCN_HI32",
+ "R_ALPHA_JMP_SLOT",
+ "R_ALPHA_LITERAL",
+ "R_ALPHA_LITUSE",
+ "R_ALPHA_NONE",
+ "R_ALPHA_OP_PRSHIFT",
+ "R_ALPHA_OP_PSUB",
+ "R_ALPHA_OP_PUSH",
+ "R_ALPHA_OP_STORE",
+ "R_ALPHA_REFLONG",
+ "R_ALPHA_REFQUAD",
+ "R_ALPHA_RELATIVE",
+ "R_ALPHA_SREL16",
+ "R_ALPHA_SREL32",
+ "R_ALPHA_SREL64",
+ "R_ARM",
+ "R_ARM_ABS12",
+ "R_ARM_ABS16",
+ "R_ARM_ABS32",
+ "R_ARM_ABS32_NOI",
+ "R_ARM_ABS8",
+ "R_ARM_ALU_PCREL_15_8",
+ "R_ARM_ALU_PCREL_23_15",
+ "R_ARM_ALU_PCREL_7_0",
+ "R_ARM_ALU_PC_G0",
+ "R_ARM_ALU_PC_G0_NC",
+ "R_ARM_ALU_PC_G1",
+ "R_ARM_ALU_PC_G1_NC",
+ "R_ARM_ALU_PC_G2",
+ "R_ARM_ALU_SBREL_19_12_NC",
+ "R_ARM_ALU_SBREL_27_20_CK",
+ "R_ARM_ALU_SB_G0",
+ "R_ARM_ALU_SB_G0_NC",
+ "R_ARM_ALU_SB_G1",
+ "R_ARM_ALU_SB_G1_NC",
+ "R_ARM_ALU_SB_G2",
+ "R_ARM_AMP_VCALL9",
+ "R_ARM_BASE_ABS",
+ "R_ARM_CALL",
+ "R_ARM_COPY",
+ "R_ARM_GLOB_DAT",
+ "R_ARM_GNU_VTENTRY",
+ "R_ARM_GNU_VTINHERIT",
+ "R_ARM_GOT32",
+ "R_ARM_GOTOFF",
+ "R_ARM_GOTOFF12",
+ "R_ARM_GOTPC",
+ "R_ARM_GOTRELAX",
+ "R_ARM_GOT_ABS",
+ "R_ARM_GOT_BREL12",
+ "R_ARM_GOT_PREL",
+ "R_ARM_IRELATIVE",
+ "R_ARM_JUMP24",
+ "R_ARM_JUMP_SLOT",
+ "R_ARM_LDC_PC_G0",
+ "R_ARM_LDC_PC_G1",
+ "R_ARM_LDC_PC_G2",
+ "R_ARM_LDC_SB_G0",
+ "R_ARM_LDC_SB_G1",
+ "R_ARM_LDC_SB_G2",
+ "R_ARM_LDRS_PC_G0",
+ "R_ARM_LDRS_PC_G1",
+ "R_ARM_LDRS_PC_G2",
+ "R_ARM_LDRS_SB_G0",
+ "R_ARM_LDRS_SB_G1",
+ "R_ARM_LDRS_SB_G2",
+ "R_ARM_LDR_PC_G1",
+ "R_ARM_LDR_PC_G2",
+ "R_ARM_LDR_SBREL_11_10_NC",
+ "R_ARM_LDR_SB_G0",
+ "R_ARM_LDR_SB_G1",
+ "R_ARM_LDR_SB_G2",
+ "R_ARM_ME_TOO",
+ "R_ARM_MOVT_ABS",
+ "R_ARM_MOVT_BREL",
+ "R_ARM_MOVT_PREL",
+ "R_ARM_MOVW_ABS_NC",
+ "R_ARM_MOVW_BREL",
+ "R_ARM_MOVW_BREL_NC",
+ "R_ARM_MOVW_PREL_NC",
+ "R_ARM_NONE",
+ "R_ARM_PC13",
+ "R_ARM_PC24",
+ "R_ARM_PLT32",
+ "R_ARM_PLT32_ABS",
+ "R_ARM_PREL31",
+ "R_ARM_PRIVATE_0",
+ "R_ARM_PRIVATE_1",
+ "R_ARM_PRIVATE_10",
+ "R_ARM_PRIVATE_11",
+ "R_ARM_PRIVATE_12",
+ "R_ARM_PRIVATE_13",
+ "R_ARM_PRIVATE_14",
+ "R_ARM_PRIVATE_15",
+ "R_ARM_PRIVATE_2",
+ "R_ARM_PRIVATE_3",
+ "R_ARM_PRIVATE_4",
+ "R_ARM_PRIVATE_5",
+ "R_ARM_PRIVATE_6",
+ "R_ARM_PRIVATE_7",
+ "R_ARM_PRIVATE_8",
+ "R_ARM_PRIVATE_9",
+ "R_ARM_RABS32",
+ "R_ARM_RBASE",
+ "R_ARM_REL32",
+ "R_ARM_REL32_NOI",
+ "R_ARM_RELATIVE",
+ "R_ARM_RPC24",
+ "R_ARM_RREL32",
+ "R_ARM_RSBREL32",
+ "R_ARM_RXPC25",
+ "R_ARM_SBREL31",
+ "R_ARM_SBREL32",
+ "R_ARM_SWI24",
+ "R_ARM_TARGET1",
+ "R_ARM_TARGET2",
+ "R_ARM_THM_ABS5",
+ "R_ARM_THM_ALU_ABS_G0_NC",
+ "R_ARM_THM_ALU_ABS_G1_NC",
+ "R_ARM_THM_ALU_ABS_G2_NC",
+ "R_ARM_THM_ALU_ABS_G3",
+ "R_ARM_THM_ALU_PREL_11_0",
+ "R_ARM_THM_GOT_BREL12",
+ "R_ARM_THM_JUMP11",
+ "R_ARM_THM_JUMP19",
+ "R_ARM_THM_JUMP24",
+ "R_ARM_THM_JUMP6",
+ "R_ARM_THM_JUMP8",
+ "R_ARM_THM_MOVT_ABS",
+ "R_ARM_THM_MOVT_BREL",
+ "R_ARM_THM_MOVT_PREL",
+ "R_ARM_THM_MOVW_ABS_NC",
+ "R_ARM_THM_MOVW_BREL",
+ "R_ARM_THM_MOVW_BREL_NC",
+ "R_ARM_THM_MOVW_PREL_NC",
+ "R_ARM_THM_PC12",
+ "R_ARM_THM_PC22",
+ "R_ARM_THM_PC8",
+ "R_ARM_THM_RPC22",
+ "R_ARM_THM_SWI8",
+ "R_ARM_THM_TLS_CALL",
+ "R_ARM_THM_TLS_DESCSEQ16",
+ "R_ARM_THM_TLS_DESCSEQ32",
+ "R_ARM_THM_XPC22",
+ "R_ARM_TLS_CALL",
+ "R_ARM_TLS_DESCSEQ",
+ "R_ARM_TLS_DTPMOD32",
+ "R_ARM_TLS_DTPOFF32",
+ "R_ARM_TLS_GD32",
+ "R_ARM_TLS_GOTDESC",
+ "R_ARM_TLS_IE12GP",
+ "R_ARM_TLS_IE32",
+ "R_ARM_TLS_LDM32",
+ "R_ARM_TLS_LDO12",
+ "R_ARM_TLS_LDO32",
+ "R_ARM_TLS_LE12",
+ "R_ARM_TLS_LE32",
+ "R_ARM_TLS_TPOFF32",
+ "R_ARM_V4BX",
+ "R_ARM_XPC25",
+ "R_INFO",
+ "R_INFO32",
+ "R_MIPS",
+ "R_MIPS_16",
+ "R_MIPS_26",
+ "R_MIPS_32",
+ "R_MIPS_64",
+ "R_MIPS_ADD_IMMEDIATE",
+ "R_MIPS_CALL16",
+ "R_MIPS_CALL_HI16",
+ "R_MIPS_CALL_LO16",
+ "R_MIPS_DELETE",
+ "R_MIPS_GOT16",
+ "R_MIPS_GOT_DISP",
+ "R_MIPS_GOT_HI16",
+ "R_MIPS_GOT_LO16",
+ "R_MIPS_GOT_OFST",
+ "R_MIPS_GOT_PAGE",
+ "R_MIPS_GPREL16",
+ "R_MIPS_GPREL32",
+ "R_MIPS_HI16",
+ "R_MIPS_HIGHER",
+ "R_MIPS_HIGHEST",
+ "R_MIPS_INSERT_A",
+ "R_MIPS_INSERT_B",
+ "R_MIPS_JALR",
+ "R_MIPS_LITERAL",
+ "R_MIPS_LO16",
+ "R_MIPS_NONE",
+ "R_MIPS_PC16",
+ "R_MIPS_PJUMP",
+ "R_MIPS_REL16",
+ "R_MIPS_REL32",
+ "R_MIPS_RELGOT",
+ "R_MIPS_SCN_DISP",
+ "R_MIPS_SHIFT5",
+ "R_MIPS_SHIFT6",
+ "R_MIPS_SUB",
+ "R_MIPS_TLS_DTPMOD32",
+ "R_MIPS_TLS_DTPMOD64",
+ "R_MIPS_TLS_DTPREL32",
+ "R_MIPS_TLS_DTPREL64",
+ "R_MIPS_TLS_DTPREL_HI16",
+ "R_MIPS_TLS_DTPREL_LO16",
+ "R_MIPS_TLS_GD",
+ "R_MIPS_TLS_GOTTPREL",
+ "R_MIPS_TLS_LDM",
+ "R_MIPS_TLS_TPREL32",
+ "R_MIPS_TLS_TPREL64",
+ "R_MIPS_TLS_TPREL_HI16",
+ "R_MIPS_TLS_TPREL_LO16",
+ "R_PPC",
+ "R_PPC64",
+ "R_PPC64_ADDR14",
+ "R_PPC64_ADDR14_BRNTAKEN",
+ "R_PPC64_ADDR14_BRTAKEN",
+ "R_PPC64_ADDR16",
+ "R_PPC64_ADDR16_DS",
+ "R_PPC64_ADDR16_HA",
+ "R_PPC64_ADDR16_HI",
+ "R_PPC64_ADDR16_HIGH",
+ "R_PPC64_ADDR16_HIGHA",
+ "R_PPC64_ADDR16_HIGHER",
+ "R_PPC64_ADDR16_HIGHERA",
+ "R_PPC64_ADDR16_HIGHEST",
+ "R_PPC64_ADDR16_HIGHESTA",
+ "R_PPC64_ADDR16_LO",
+ "R_PPC64_ADDR16_LO_DS",
+ "R_PPC64_ADDR24",
+ "R_PPC64_ADDR32",
+ "R_PPC64_ADDR64",
+ "R_PPC64_ADDR64_LOCAL",
+ "R_PPC64_DTPMOD64",
+ "R_PPC64_DTPREL16",
+ "R_PPC64_DTPREL16_DS",
+ "R_PPC64_DTPREL16_HA",
+ "R_PPC64_DTPREL16_HI",
+ "R_PPC64_DTPREL16_HIGH",
+ "R_PPC64_DTPREL16_HIGHA",
+ "R_PPC64_DTPREL16_HIGHER",
+ "R_PPC64_DTPREL16_HIGHERA",
+ "R_PPC64_DTPREL16_HIGHEST",
+ "R_PPC64_DTPREL16_HIGHESTA",
+ "R_PPC64_DTPREL16_LO",
+ "R_PPC64_DTPREL16_LO_DS",
+ "R_PPC64_DTPREL64",
+ "R_PPC64_ENTRY",
+ "R_PPC64_GOT16",
+ "R_PPC64_GOT16_DS",
+ "R_PPC64_GOT16_HA",
+ "R_PPC64_GOT16_HI",
+ "R_PPC64_GOT16_LO",
+ "R_PPC64_GOT16_LO_DS",
+ "R_PPC64_GOT_DTPREL16_DS",
+ "R_PPC64_GOT_DTPREL16_HA",
+ "R_PPC64_GOT_DTPREL16_HI",
+ "R_PPC64_GOT_DTPREL16_LO_DS",
+ "R_PPC64_GOT_TLSGD16",
+ "R_PPC64_GOT_TLSGD16_HA",
+ "R_PPC64_GOT_TLSGD16_HI",
+ "R_PPC64_GOT_TLSGD16_LO",
+ "R_PPC64_GOT_TLSLD16",
+ "R_PPC64_GOT_TLSLD16_HA",
+ "R_PPC64_GOT_TLSLD16_HI",
+ "R_PPC64_GOT_TLSLD16_LO",
+ "R_PPC64_GOT_TPREL16_DS",
+ "R_PPC64_GOT_TPREL16_HA",
+ "R_PPC64_GOT_TPREL16_HI",
+ "R_PPC64_GOT_TPREL16_LO_DS",
+ "R_PPC64_IRELATIVE",
+ "R_PPC64_JMP_IREL",
+ "R_PPC64_JMP_SLOT",
+ "R_PPC64_NONE",
+ "R_PPC64_PLT16_LO_DS",
+ "R_PPC64_PLTGOT16",
+ "R_PPC64_PLTGOT16_DS",
+ "R_PPC64_PLTGOT16_HA",
+ "R_PPC64_PLTGOT16_HI",
+ "R_PPC64_PLTGOT16_LO",
+ "R_PPC64_PLTGOT_LO_DS",
+ "R_PPC64_REL14",
+ "R_PPC64_REL14_BRNTAKEN",
+ "R_PPC64_REL14_BRTAKEN",
+ "R_PPC64_REL16",
+ "R_PPC64_REL16DX_HA",
+ "R_PPC64_REL16_HA",
+ "R_PPC64_REL16_HI",
+ "R_PPC64_REL16_LO",
+ "R_PPC64_REL24",
+ "R_PPC64_REL24_NOTOC",
+ "R_PPC64_REL32",
+ "R_PPC64_REL64",
+ "R_PPC64_SECTOFF_DS",
+ "R_PPC64_SECTOFF_LO_DS",
+ "R_PPC64_TLS",
+ "R_PPC64_TLSGD",
+ "R_PPC64_TLSLD",
+ "R_PPC64_TOC",
+ "R_PPC64_TOC16",
+ "R_PPC64_TOC16_DS",
+ "R_PPC64_TOC16_HA",
+ "R_PPC64_TOC16_HI",
+ "R_PPC64_TOC16_LO",
+ "R_PPC64_TOC16_LO_DS",
+ "R_PPC64_TOCSAVE",
+ "R_PPC64_TPREL16",
+ "R_PPC64_TPREL16_DS",
+ "R_PPC64_TPREL16_HA",
+ "R_PPC64_TPREL16_HI",
+ "R_PPC64_TPREL16_HIGH",
+ "R_PPC64_TPREL16_HIGHA",
+ "R_PPC64_TPREL16_HIGHER",
+ "R_PPC64_TPREL16_HIGHERA",
+ "R_PPC64_TPREL16_HIGHEST",
+ "R_PPC64_TPREL16_HIGHESTA",
+ "R_PPC64_TPREL16_LO",
+ "R_PPC64_TPREL16_LO_DS",
+ "R_PPC64_TPREL64",
+ "R_PPC_ADDR14",
+ "R_PPC_ADDR14_BRNTAKEN",
+ "R_PPC_ADDR14_BRTAKEN",
+ "R_PPC_ADDR16",
+ "R_PPC_ADDR16_HA",
+ "R_PPC_ADDR16_HI",
+ "R_PPC_ADDR16_LO",
+ "R_PPC_ADDR24",
+ "R_PPC_ADDR32",
+ "R_PPC_COPY",
+ "R_PPC_DTPMOD32",
+ "R_PPC_DTPREL16",
+ "R_PPC_DTPREL16_HA",
+ "R_PPC_DTPREL16_HI",
+ "R_PPC_DTPREL16_LO",
+ "R_PPC_DTPREL32",
+ "R_PPC_EMB_BIT_FLD",
+ "R_PPC_EMB_MRKREF",
+ "R_PPC_EMB_NADDR16",
+ "R_PPC_EMB_NADDR16_HA",
+ "R_PPC_EMB_NADDR16_HI",
+ "R_PPC_EMB_NADDR16_LO",
+ "R_PPC_EMB_NADDR32",
+ "R_PPC_EMB_RELSDA",
+ "R_PPC_EMB_RELSEC16",
+ "R_PPC_EMB_RELST_HA",
+ "R_PPC_EMB_RELST_HI",
+ "R_PPC_EMB_RELST_LO",
+ "R_PPC_EMB_SDA21",
+ "R_PPC_EMB_SDA2I16",
+ "R_PPC_EMB_SDA2REL",
+ "R_PPC_EMB_SDAI16",
+ "R_PPC_GLOB_DAT",
+ "R_PPC_GOT16",
+ "R_PPC_GOT16_HA",
+ "R_PPC_GOT16_HI",
+ "R_PPC_GOT16_LO",
+ "R_PPC_GOT_TLSGD16",
+ "R_PPC_GOT_TLSGD16_HA",
+ "R_PPC_GOT_TLSGD16_HI",
+ "R_PPC_GOT_TLSGD16_LO",
+ "R_PPC_GOT_TLSLD16",
+ "R_PPC_GOT_TLSLD16_HA",
+ "R_PPC_GOT_TLSLD16_HI",
+ "R_PPC_GOT_TLSLD16_LO",
+ "R_PPC_GOT_TPREL16",
+ "R_PPC_GOT_TPREL16_HA",
+ "R_PPC_GOT_TPREL16_HI",
+ "R_PPC_GOT_TPREL16_LO",
+ "R_PPC_JMP_SLOT",
+ "R_PPC_LOCAL24PC",
+ "R_PPC_NONE",
+ "R_PPC_PLT16_HA",
+ "R_PPC_PLT16_HI",
+ "R_PPC_PLT16_LO",
+ "R_PPC_PLT32",
+ "R_PPC_PLTREL24",
+ "R_PPC_PLTREL32",
+ "R_PPC_REL14",
+ "R_PPC_REL14_BRNTAKEN",
+ "R_PPC_REL14_BRTAKEN",
+ "R_PPC_REL24",
+ "R_PPC_REL32",
+ "R_PPC_RELATIVE",
+ "R_PPC_SDAREL16",
+ "R_PPC_SECTOFF",
+ "R_PPC_SECTOFF_HA",
+ "R_PPC_SECTOFF_HI",
+ "R_PPC_SECTOFF_LO",
+ "R_PPC_TLS",
+ "R_PPC_TPREL16",
+ "R_PPC_TPREL16_HA",
+ "R_PPC_TPREL16_HI",
+ "R_PPC_TPREL16_LO",
+ "R_PPC_TPREL32",
+ "R_PPC_UADDR16",
+ "R_PPC_UADDR32",
+ "R_RISCV",
+ "R_RISCV_32",
+ "R_RISCV_32_PCREL",
+ "R_RISCV_64",
+ "R_RISCV_ADD16",
+ "R_RISCV_ADD32",
+ "R_RISCV_ADD64",
+ "R_RISCV_ADD8",
+ "R_RISCV_ALIGN",
+ "R_RISCV_BRANCH",
+ "R_RISCV_CALL",
+ "R_RISCV_CALL_PLT",
+ "R_RISCV_COPY",
+ "R_RISCV_GNU_VTENTRY",
+ "R_RISCV_GNU_VTINHERIT",
+ "R_RISCV_GOT_HI20",
+ "R_RISCV_GPREL_I",
+ "R_RISCV_GPREL_S",
+ "R_RISCV_HI20",
+ "R_RISCV_JAL",
+ "R_RISCV_JUMP_SLOT",
+ "R_RISCV_LO12_I",
+ "R_RISCV_LO12_S",
+ "R_RISCV_NONE",
+ "R_RISCV_PCREL_HI20",
+ "R_RISCV_PCREL_LO12_I",
+ "R_RISCV_PCREL_LO12_S",
+ "R_RISCV_RELATIVE",
+ "R_RISCV_RELAX",
+ "R_RISCV_RVC_BRANCH",
+ "R_RISCV_RVC_JUMP",
+ "R_RISCV_RVC_LUI",
+ "R_RISCV_SET16",
+ "R_RISCV_SET32",
+ "R_RISCV_SET6",
+ "R_RISCV_SET8",
+ "R_RISCV_SUB16",
+ "R_RISCV_SUB32",
+ "R_RISCV_SUB6",
+ "R_RISCV_SUB64",
+ "R_RISCV_SUB8",
+ "R_RISCV_TLS_DTPMOD32",
+ "R_RISCV_TLS_DTPMOD64",
+ "R_RISCV_TLS_DTPREL32",
+ "R_RISCV_TLS_DTPREL64",
+ "R_RISCV_TLS_GD_HI20",
+ "R_RISCV_TLS_GOT_HI20",
+ "R_RISCV_TLS_TPREL32",
+ "R_RISCV_TLS_TPREL64",
+ "R_RISCV_TPREL_ADD",
+ "R_RISCV_TPREL_HI20",
+ "R_RISCV_TPREL_I",
+ "R_RISCV_TPREL_LO12_I",
+ "R_RISCV_TPREL_LO12_S",
+ "R_RISCV_TPREL_S",
+ "R_SPARC",
+ "R_SPARC_10",
+ "R_SPARC_11",
+ "R_SPARC_13",
+ "R_SPARC_16",
+ "R_SPARC_22",
+ "R_SPARC_32",
+ "R_SPARC_5",
+ "R_SPARC_6",
+ "R_SPARC_64",
+ "R_SPARC_7",
+ "R_SPARC_8",
+ "R_SPARC_COPY",
+ "R_SPARC_DISP16",
+ "R_SPARC_DISP32",
+ "R_SPARC_DISP64",
+ "R_SPARC_DISP8",
+ "R_SPARC_GLOB_DAT",
+ "R_SPARC_GLOB_JMP",
+ "R_SPARC_GOT10",
+ "R_SPARC_GOT13",
+ "R_SPARC_GOT22",
+ "R_SPARC_H44",
+ "R_SPARC_HH22",
+ "R_SPARC_HI22",
+ "R_SPARC_HIPLT22",
+ "R_SPARC_HIX22",
+ "R_SPARC_HM10",
+ "R_SPARC_JMP_SLOT",
+ "R_SPARC_L44",
+ "R_SPARC_LM22",
+ "R_SPARC_LO10",
+ "R_SPARC_LOPLT10",
+ "R_SPARC_LOX10",
+ "R_SPARC_M44",
+ "R_SPARC_NONE",
+ "R_SPARC_OLO10",
+ "R_SPARC_PC10",
+ "R_SPARC_PC22",
+ "R_SPARC_PCPLT10",
+ "R_SPARC_PCPLT22",
+ "R_SPARC_PCPLT32",
+ "R_SPARC_PC_HH22",
+ "R_SPARC_PC_HM10",
+ "R_SPARC_PC_LM22",
+ "R_SPARC_PLT32",
+ "R_SPARC_PLT64",
+ "R_SPARC_REGISTER",
+ "R_SPARC_RELATIVE",
+ "R_SPARC_UA16",
+ "R_SPARC_UA32",
+ "R_SPARC_UA64",
+ "R_SPARC_WDISP16",
+ "R_SPARC_WDISP19",
+ "R_SPARC_WDISP22",
+ "R_SPARC_WDISP30",
+ "R_SPARC_WPLT30",
+ "R_SYM32",
+ "R_SYM64",
+ "R_TYPE32",
+ "R_TYPE64",
+ "R_X86_64",
+ "R_X86_64_16",
+ "R_X86_64_32",
+ "R_X86_64_32S",
+ "R_X86_64_64",
+ "R_X86_64_8",
+ "R_X86_64_COPY",
+ "R_X86_64_DTPMOD64",
+ "R_X86_64_DTPOFF32",
+ "R_X86_64_DTPOFF64",
+ "R_X86_64_GLOB_DAT",
+ "R_X86_64_GOT32",
+ "R_X86_64_GOT64",
+ "R_X86_64_GOTOFF64",
+ "R_X86_64_GOTPC32",
+ "R_X86_64_GOTPC32_TLSDESC",
+ "R_X86_64_GOTPC64",
+ "R_X86_64_GOTPCREL",
+ "R_X86_64_GOTPCREL64",
+ "R_X86_64_GOTPCRELX",
+ "R_X86_64_GOTPLT64",
+ "R_X86_64_GOTTPOFF",
+ "R_X86_64_IRELATIVE",
+ "R_X86_64_JMP_SLOT",
+ "R_X86_64_NONE",
+ "R_X86_64_PC16",
+ "R_X86_64_PC32",
+ "R_X86_64_PC32_BND",
+ "R_X86_64_PC64",
+ "R_X86_64_PC8",
+ "R_X86_64_PLT32",
+ "R_X86_64_PLT32_BND",
+ "R_X86_64_PLTOFF64",
+ "R_X86_64_RELATIVE",
+ "R_X86_64_RELATIVE64",
+ "R_X86_64_REX_GOTPCRELX",
+ "R_X86_64_SIZE32",
+ "R_X86_64_SIZE64",
+ "R_X86_64_TLSDESC",
+ "R_X86_64_TLSDESC_CALL",
+ "R_X86_64_TLSGD",
+ "R_X86_64_TLSLD",
+ "R_X86_64_TPOFF32",
+ "R_X86_64_TPOFF64",
+ "Rel32",
+ "Rel64",
+ "Rela32",
+ "Rela64",
+ "SHF_ALLOC",
+ "SHF_COMPRESSED",
+ "SHF_EXECINSTR",
+ "SHF_GROUP",
+ "SHF_INFO_LINK",
+ "SHF_LINK_ORDER",
+ "SHF_MASKOS",
+ "SHF_MASKPROC",
+ "SHF_MERGE",
+ "SHF_OS_NONCONFORMING",
+ "SHF_STRINGS",
+ "SHF_TLS",
+ "SHF_WRITE",
+ "SHN_ABS",
+ "SHN_COMMON",
+ "SHN_HIOS",
+ "SHN_HIPROC",
+ "SHN_HIRESERVE",
+ "SHN_LOOS",
+ "SHN_LOPROC",
+ "SHN_LORESERVE",
+ "SHN_UNDEF",
+ "SHN_XINDEX",
+ "SHT_DYNAMIC",
+ "SHT_DYNSYM",
+ "SHT_FINI_ARRAY",
+ "SHT_GNU_ATTRIBUTES",
+ "SHT_GNU_HASH",
+ "SHT_GNU_LIBLIST",
+ "SHT_GNU_VERDEF",
+ "SHT_GNU_VERNEED",
+ "SHT_GNU_VERSYM",
+ "SHT_GROUP",
+ "SHT_HASH",
+ "SHT_HIOS",
+ "SHT_HIPROC",
+ "SHT_HIUSER",
+ "SHT_INIT_ARRAY",
+ "SHT_LOOS",
+ "SHT_LOPROC",
+ "SHT_LOUSER",
+ "SHT_MIPS_ABIFLAGS",
+ "SHT_NOBITS",
+ "SHT_NOTE",
+ "SHT_NULL",
+ "SHT_PREINIT_ARRAY",
+ "SHT_PROGBITS",
+ "SHT_REL",
+ "SHT_RELA",
+ "SHT_SHLIB",
+ "SHT_STRTAB",
+ "SHT_SYMTAB",
+ "SHT_SYMTAB_SHNDX",
+ "STB_GLOBAL",
+ "STB_HIOS",
+ "STB_HIPROC",
+ "STB_LOCAL",
+ "STB_LOOS",
+ "STB_LOPROC",
+ "STB_WEAK",
+ "STT_COMMON",
+ "STT_FILE",
+ "STT_FUNC",
+ "STT_HIOS",
+ "STT_HIPROC",
+ "STT_LOOS",
+ "STT_LOPROC",
+ "STT_NOTYPE",
+ "STT_OBJECT",
+ "STT_SECTION",
+ "STT_TLS",
+ "STV_DEFAULT",
+ "STV_HIDDEN",
+ "STV_INTERNAL",
+ "STV_PROTECTED",
+ "ST_BIND",
+ "ST_INFO",
+ "ST_TYPE",
+ "ST_VISIBILITY",
+ "Section",
+ "Section32",
+ "Section64",
+ "SectionFlag",
+ "SectionHeader",
+ "SectionIndex",
+ "SectionType",
+ "Sym32",
+ "Sym32Size",
+ "Sym64",
+ "Sym64Size",
+ "SymBind",
+ "SymType",
+ "SymVis",
+ "Symbol",
+ "Type",
+ "Version",
+ },
+ "debug/gosym": []string{
+ "DecodingError",
+ "Func",
+ "LineTable",
+ "NewLineTable",
+ "NewTable",
+ "Obj",
+ "Sym",
+ "Table",
+ "UnknownFileError",
+ "UnknownLineError",
+ },
+ "debug/macho": []string{
+ "ARM64_RELOC_ADDEND",
+ "ARM64_RELOC_BRANCH26",
+ "ARM64_RELOC_GOT_LOAD_PAGE21",
+ "ARM64_RELOC_GOT_LOAD_PAGEOFF12",
+ "ARM64_RELOC_PAGE21",
+ "ARM64_RELOC_PAGEOFF12",
+ "ARM64_RELOC_POINTER_TO_GOT",
+ "ARM64_RELOC_SUBTRACTOR",
+ "ARM64_RELOC_TLVP_LOAD_PAGE21",
+ "ARM64_RELOC_TLVP_LOAD_PAGEOFF12",
+ "ARM64_RELOC_UNSIGNED",
+ "ARM_RELOC_BR24",
+ "ARM_RELOC_HALF",
+ "ARM_RELOC_HALF_SECTDIFF",
+ "ARM_RELOC_LOCAL_SECTDIFF",
+ "ARM_RELOC_PAIR",
+ "ARM_RELOC_PB_LA_PTR",
+ "ARM_RELOC_SECTDIFF",
+ "ARM_RELOC_VANILLA",
+ "ARM_THUMB_32BIT_BRANCH",
+ "ARM_THUMB_RELOC_BR22",
+ "Cpu",
+ "Cpu386",
+ "CpuAmd64",
+ "CpuArm",
+ "CpuArm64",
+ "CpuPpc",
+ "CpuPpc64",
+ "Dylib",
+ "DylibCmd",
+ "Dysymtab",
+ "DysymtabCmd",
+ "ErrNotFat",
+ "FatArch",
+ "FatArchHeader",
+ "FatFile",
+ "File",
+ "FileHeader",
+ "FlagAllModsBound",
+ "FlagAllowStackExecution",
+ "FlagAppExtensionSafe",
+ "FlagBindAtLoad",
+ "FlagBindsToWeak",
+ "FlagCanonical",
+ "FlagDeadStrippableDylib",
+ "FlagDyldLink",
+ "FlagForceFlat",
+ "FlagHasTLVDescriptors",
+ "FlagIncrLink",
+ "FlagLazyInit",
+ "FlagNoFixPrebinding",
+ "FlagNoHeapExecution",
+ "FlagNoMultiDefs",
+ "FlagNoReexportedDylibs",
+ "FlagNoUndefs",
+ "FlagPIE",
+ "FlagPrebindable",
+ "FlagPrebound",
+ "FlagRootSafe",
+ "FlagSetuidSafe",
+ "FlagSplitSegs",
+ "FlagSubsectionsViaSymbols",
+ "FlagTwoLevel",
+ "FlagWeakDefines",
+ "FormatError",
+ "GENERIC_RELOC_LOCAL_SECTDIFF",
+ "GENERIC_RELOC_PAIR",
+ "GENERIC_RELOC_PB_LA_PTR",
+ "GENERIC_RELOC_SECTDIFF",
+ "GENERIC_RELOC_TLV",
+ "GENERIC_RELOC_VANILLA",
+ "Load",
+ "LoadBytes",
+ "LoadCmd",
+ "LoadCmdDylib",
+ "LoadCmdDylinker",
+ "LoadCmdDysymtab",
+ "LoadCmdRpath",
+ "LoadCmdSegment",
+ "LoadCmdSegment64",
+ "LoadCmdSymtab",
+ "LoadCmdThread",
+ "LoadCmdUnixThread",
+ "Magic32",
+ "Magic64",
+ "MagicFat",
+ "NewFatFile",
+ "NewFile",
+ "Nlist32",
+ "Nlist64",
+ "Open",
+ "OpenFat",
+ "Regs386",
+ "RegsAMD64",
+ "Reloc",
+ "RelocTypeARM",
+ "RelocTypeARM64",
+ "RelocTypeGeneric",
+ "RelocTypeX86_64",
+ "Rpath",
+ "RpathCmd",
+ "Section",
+ "Section32",
+ "Section64",
+ "SectionHeader",
+ "Segment",
+ "Segment32",
+ "Segment64",
+ "SegmentHeader",
+ "Symbol",
+ "Symtab",
+ "SymtabCmd",
+ "Thread",
+ "Type",
+ "TypeBundle",
+ "TypeDylib",
+ "TypeExec",
+ "TypeObj",
+ "X86_64_RELOC_BRANCH",
+ "X86_64_RELOC_GOT",
+ "X86_64_RELOC_GOT_LOAD",
+ "X86_64_RELOC_SIGNED",
+ "X86_64_RELOC_SIGNED_1",
+ "X86_64_RELOC_SIGNED_2",
+ "X86_64_RELOC_SIGNED_4",
+ "X86_64_RELOC_SUBTRACTOR",
+ "X86_64_RELOC_TLV",
+ "X86_64_RELOC_UNSIGNED",
+ },
+ "debug/pe": []string{
+ "COFFSymbol",
+ "COFFSymbolSize",
+ "DataDirectory",
+ "File",
+ "FileHeader",
+ "FormatError",
+ "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE",
+ "IMAGE_DIRECTORY_ENTRY_BASERELOC",
+ "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT",
+ "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR",
+ "IMAGE_DIRECTORY_ENTRY_DEBUG",
+ "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT",
+ "IMAGE_DIRECTORY_ENTRY_EXCEPTION",
+ "IMAGE_DIRECTORY_ENTRY_EXPORT",
+ "IMAGE_DIRECTORY_ENTRY_GLOBALPTR",
+ "IMAGE_DIRECTORY_ENTRY_IAT",
+ "IMAGE_DIRECTORY_ENTRY_IMPORT",
+ "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG",
+ "IMAGE_DIRECTORY_ENTRY_RESOURCE",
+ "IMAGE_DIRECTORY_ENTRY_SECURITY",
+ "IMAGE_DIRECTORY_ENTRY_TLS",
+ "IMAGE_DLLCHARACTERISTICS_APPCONTAINER",
+ "IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE",
+ "IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY",
+ "IMAGE_DLLCHARACTERISTICS_GUARD_CF",
+ "IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA",
+ "IMAGE_DLLCHARACTERISTICS_NO_BIND",
+ "IMAGE_DLLCHARACTERISTICS_NO_ISOLATION",
+ "IMAGE_DLLCHARACTERISTICS_NO_SEH",
+ "IMAGE_DLLCHARACTERISTICS_NX_COMPAT",
+ "IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE",
+ "IMAGE_DLLCHARACTERISTICS_WDM_DRIVER",
+ "IMAGE_FILE_32BIT_MACHINE",
+ "IMAGE_FILE_AGGRESIVE_WS_TRIM",
+ "IMAGE_FILE_BYTES_REVERSED_HI",
+ "IMAGE_FILE_BYTES_REVERSED_LO",
+ "IMAGE_FILE_DEBUG_STRIPPED",
+ "IMAGE_FILE_DLL",
+ "IMAGE_FILE_EXECUTABLE_IMAGE",
+ "IMAGE_FILE_LARGE_ADDRESS_AWARE",
+ "IMAGE_FILE_LINE_NUMS_STRIPPED",
+ "IMAGE_FILE_LOCAL_SYMS_STRIPPED",
+ "IMAGE_FILE_MACHINE_AM33",
+ "IMAGE_FILE_MACHINE_AMD64",
+ "IMAGE_FILE_MACHINE_ARM",
+ "IMAGE_FILE_MACHINE_ARM64",
+ "IMAGE_FILE_MACHINE_ARMNT",
+ "IMAGE_FILE_MACHINE_EBC",
+ "IMAGE_FILE_MACHINE_I386",
+ "IMAGE_FILE_MACHINE_IA64",
+ "IMAGE_FILE_MACHINE_M32R",
+ "IMAGE_FILE_MACHINE_MIPS16",
+ "IMAGE_FILE_MACHINE_MIPSFPU",
+ "IMAGE_FILE_MACHINE_MIPSFPU16",
+ "IMAGE_FILE_MACHINE_POWERPC",
+ "IMAGE_FILE_MACHINE_POWERPCFP",
+ "IMAGE_FILE_MACHINE_R4000",
+ "IMAGE_FILE_MACHINE_SH3",
+ "IMAGE_FILE_MACHINE_SH3DSP",
+ "IMAGE_FILE_MACHINE_SH4",
+ "IMAGE_FILE_MACHINE_SH5",
+ "IMAGE_FILE_MACHINE_THUMB",
+ "IMAGE_FILE_MACHINE_UNKNOWN",
+ "IMAGE_FILE_MACHINE_WCEMIPSV2",
+ "IMAGE_FILE_NET_RUN_FROM_SWAP",
+ "IMAGE_FILE_RELOCS_STRIPPED",
+ "IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP",
+ "IMAGE_FILE_SYSTEM",
+ "IMAGE_FILE_UP_SYSTEM_ONLY",
+ "IMAGE_SUBSYSTEM_EFI_APPLICATION",
+ "IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER",
+ "IMAGE_SUBSYSTEM_EFI_ROM",
+ "IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER",
+ "IMAGE_SUBSYSTEM_NATIVE",
+ "IMAGE_SUBSYSTEM_NATIVE_WINDOWS",
+ "IMAGE_SUBSYSTEM_OS2_CUI",
+ "IMAGE_SUBSYSTEM_POSIX_CUI",
+ "IMAGE_SUBSYSTEM_UNKNOWN",
+ "IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION",
+ "IMAGE_SUBSYSTEM_WINDOWS_CE_GUI",
+ "IMAGE_SUBSYSTEM_WINDOWS_CUI",
+ "IMAGE_SUBSYSTEM_WINDOWS_GUI",
+ "IMAGE_SUBSYSTEM_XBOX",
+ "ImportDirectory",
+ "NewFile",
+ "Open",
+ "OptionalHeader32",
+ "OptionalHeader64",
+ "Reloc",
+ "Section",
+ "SectionHeader",
+ "SectionHeader32",
+ "StringTable",
+ "Symbol",
+ },
+ "debug/plan9obj": []string{
+ "File",
+ "FileHeader",
+ "Magic386",
+ "Magic64",
+ "MagicAMD64",
+ "MagicARM",
+ "NewFile",
+ "Open",
+ "Section",
+ "SectionHeader",
+ "Sym",
+ },
+ "embed": []string{
+ "FS",
+ },
+ "encoding": []string{
+ "BinaryMarshaler",
+ "BinaryUnmarshaler",
+ "TextMarshaler",
+ "TextUnmarshaler",
+ },
+ "encoding/ascii85": []string{
+ "CorruptInputError",
+ "Decode",
+ "Encode",
+ "MaxEncodedLen",
+ "NewDecoder",
+ "NewEncoder",
+ },
+ "encoding/asn1": []string{
+ "BitString",
+ "ClassApplication",
+ "ClassContextSpecific",
+ "ClassPrivate",
+ "ClassUniversal",
+ "Enumerated",
+ "Flag",
+ "Marshal",
+ "MarshalWithParams",
+ "NullBytes",
+ "NullRawValue",
+ "ObjectIdentifier",
+ "RawContent",
+ "RawValue",
+ "StructuralError",
+ "SyntaxError",
+ "TagBMPString",
+ "TagBitString",
+ "TagBoolean",
+ "TagEnum",
+ "TagGeneralString",
+ "TagGeneralizedTime",
+ "TagIA5String",
+ "TagInteger",
+ "TagNull",
+ "TagNumericString",
+ "TagOID",
+ "TagOctetString",
+ "TagPrintableString",
+ "TagSequence",
+ "TagSet",
+ "TagT61String",
+ "TagUTCTime",
+ "TagUTF8String",
+ "Unmarshal",
+ "UnmarshalWithParams",
+ },
+ "encoding/base32": []string{
+ "CorruptInputError",
+ "Encoding",
+ "HexEncoding",
+ "NewDecoder",
+ "NewEncoder",
+ "NewEncoding",
+ "NoPadding",
+ "StdEncoding",
+ "StdPadding",
+ },
+ "encoding/base64": []string{
+ "CorruptInputError",
+ "Encoding",
+ "NewDecoder",
+ "NewEncoder",
+ "NewEncoding",
+ "NoPadding",
+ "RawStdEncoding",
+ "RawURLEncoding",
+ "StdEncoding",
+ "StdPadding",
+ "URLEncoding",
+ },
+ "encoding/binary": []string{
+ "BigEndian",
+ "ByteOrder",
+ "LittleEndian",
+ "MaxVarintLen16",
+ "MaxVarintLen32",
+ "MaxVarintLen64",
+ "PutUvarint",
+ "PutVarint",
+ "Read",
+ "ReadUvarint",
+ "ReadVarint",
+ "Size",
+ "Uvarint",
+ "Varint",
+ "Write",
+ },
+ "encoding/csv": []string{
+ "ErrBareQuote",
+ "ErrFieldCount",
+ "ErrQuote",
+ "ErrTrailingComma",
+ "NewReader",
+ "NewWriter",
+ "ParseError",
+ "Reader",
+ "Writer",
+ },
+ "encoding/gob": []string{
+ "CommonType",
+ "Decoder",
+ "Encoder",
+ "GobDecoder",
+ "GobEncoder",
+ "NewDecoder",
+ "NewEncoder",
+ "Register",
+ "RegisterName",
+ },
+ "encoding/hex": []string{
+ "Decode",
+ "DecodeString",
+ "DecodedLen",
+ "Dump",
+ "Dumper",
+ "Encode",
+ "EncodeToString",
+ "EncodedLen",
+ "ErrLength",
+ "InvalidByteError",
+ "NewDecoder",
+ "NewEncoder",
+ },
+ "encoding/json": []string{
+ "Compact",
+ "Decoder",
+ "Delim",
+ "Encoder",
+ "HTMLEscape",
+ "Indent",
+ "InvalidUTF8Error",
+ "InvalidUnmarshalError",
+ "Marshal",
+ "MarshalIndent",
+ "Marshaler",
+ "MarshalerError",
+ "NewDecoder",
+ "NewEncoder",
+ "Number",
+ "RawMessage",
+ "SyntaxError",
+ "Token",
+ "Unmarshal",
+ "UnmarshalFieldError",
+ "UnmarshalTypeError",
+ "Unmarshaler",
+ "UnsupportedTypeError",
+ "UnsupportedValueError",
+ "Valid",
+ },
+ "encoding/pem": []string{
+ "Block",
+ "Decode",
+ "Encode",
+ "EncodeToMemory",
+ },
+ "encoding/xml": []string{
+ "Attr",
+ "CharData",
+ "Comment",
+ "CopyToken",
+ "Decoder",
+ "Directive",
+ "Encoder",
+ "EndElement",
+ "Escape",
+ "EscapeText",
+ "HTMLAutoClose",
+ "HTMLEntity",
+ "Header",
+ "Marshal",
+ "MarshalIndent",
+ "Marshaler",
+ "MarshalerAttr",
+ "Name",
+ "NewDecoder",
+ "NewEncoder",
+ "NewTokenDecoder",
+ "ProcInst",
+ "StartElement",
+ "SyntaxError",
+ "TagPathError",
+ "Token",
+ "TokenReader",
+ "Unmarshal",
+ "UnmarshalError",
+ "Unmarshaler",
+ "UnmarshalerAttr",
+ "UnsupportedTypeError",
+ },
+ "errors": []string{
+ "As",
+ "Is",
+ "New",
+ "Unwrap",
+ },
+ "expvar": []string{
+ "Do",
+ "Float",
+ "Func",
+ "Get",
+ "Handler",
+ "Int",
+ "KeyValue",
+ "Map",
+ "NewFloat",
+ "NewInt",
+ "NewMap",
+ "NewString",
+ "Publish",
+ "String",
+ "Var",
+ },
+ "flag": []string{
+ "Arg",
+ "Args",
+ "Bool",
+ "BoolVar",
+ "CommandLine",
+ "ContinueOnError",
+ "Duration",
+ "DurationVar",
+ "ErrHelp",
+ "ErrorHandling",
+ "ExitOnError",
+ "Flag",
+ "FlagSet",
+ "Float64",
+ "Float64Var",
+ "Func",
+ "Getter",
+ "Int",
+ "Int64",
+ "Int64Var",
+ "IntVar",
+ "Lookup",
+ "NArg",
+ "NFlag",
+ "NewFlagSet",
+ "PanicOnError",
+ "Parse",
+ "Parsed",
+ "PrintDefaults",
+ "Set",
+ "String",
+ "StringVar",
+ "Uint",
+ "Uint64",
+ "Uint64Var",
+ "UintVar",
+ "UnquoteUsage",
+ "Usage",
+ "Value",
+ "Var",
+ "Visit",
+ "VisitAll",
+ },
+ "fmt": []string{
+ "Errorf",
+ "Formatter",
+ "Fprint",
+ "Fprintf",
+ "Fprintln",
+ "Fscan",
+ "Fscanf",
+ "Fscanln",
+ "GoStringer",
+ "Print",
+ "Printf",
+ "Println",
+ "Scan",
+ "ScanState",
+ "Scanf",
+ "Scanln",
+ "Scanner",
+ "Sprint",
+ "Sprintf",
+ "Sprintln",
+ "Sscan",
+ "Sscanf",
+ "Sscanln",
+ "State",
+ "Stringer",
+ },
+ "go/ast": []string{
+ "ArrayType",
+ "AssignStmt",
+ "Bad",
+ "BadDecl",
+ "BadExpr",
+ "BadStmt",
+ "BasicLit",
+ "BinaryExpr",
+ "BlockStmt",
+ "BranchStmt",
+ "CallExpr",
+ "CaseClause",
+ "ChanDir",
+ "ChanType",
+ "CommClause",
+ "Comment",
+ "CommentGroup",
+ "CommentMap",
+ "CompositeLit",
+ "Con",
+ "Decl",
+ "DeclStmt",
+ "DeferStmt",
+ "Ellipsis",
+ "EmptyStmt",
+ "Expr",
+ "ExprStmt",
+ "Field",
+ "FieldFilter",
+ "FieldList",
+ "File",
+ "FileExports",
+ "Filter",
+ "FilterDecl",
+ "FilterFile",
+ "FilterFuncDuplicates",
+ "FilterImportDuplicates",
+ "FilterPackage",
+ "FilterUnassociatedComments",
+ "ForStmt",
+ "Fprint",
+ "Fun",
+ "FuncDecl",
+ "FuncLit",
+ "FuncType",
+ "GenDecl",
+ "GoStmt",
+ "Ident",
+ "IfStmt",
+ "ImportSpec",
+ "Importer",
+ "IncDecStmt",
+ "IndexExpr",
+ "Inspect",
+ "InterfaceType",
+ "IsExported",
+ "KeyValueExpr",
+ "LabeledStmt",
+ "Lbl",
+ "MapType",
+ "MergeMode",
+ "MergePackageFiles",
+ "NewCommentMap",
+ "NewIdent",
+ "NewObj",
+ "NewPackage",
+ "NewScope",
+ "Node",
+ "NotNilFilter",
+ "ObjKind",
+ "Object",
+ "Package",
+ "PackageExports",
+ "ParenExpr",
+ "Pkg",
+ "Print",
+ "RECV",
+ "RangeStmt",
+ "ReturnStmt",
+ "SEND",
+ "Scope",
+ "SelectStmt",
+ "SelectorExpr",
+ "SendStmt",
+ "SliceExpr",
+ "SortImports",
+ "Spec",
+ "StarExpr",
+ "Stmt",
+ "StructType",
+ "SwitchStmt",
+ "Typ",
+ "TypeAssertExpr",
+ "TypeSpec",
+ "TypeSwitchStmt",
+ "UnaryExpr",
+ "ValueSpec",
+ "Var",
+ "Visitor",
+ "Walk",
+ },
+ "go/build": []string{
+ "AllowBinary",
+ "ArchChar",
+ "Context",
+ "Default",
+ "FindOnly",
+ "IgnoreVendor",
+ "Import",
+ "ImportComment",
+ "ImportDir",
+ "ImportMode",
+ "IsLocalImport",
+ "MultiplePackageError",
+ "NoGoError",
+ "Package",
+ "ToolDir",
+ },
+ "go/build/constraint": []string{
+ "AndExpr",
+ "Expr",
+ "IsGoBuild",
+ "IsPlusBuild",
+ "NotExpr",
+ "OrExpr",
+ "Parse",
+ "PlusBuildLines",
+ "SyntaxError",
+ "TagExpr",
+ },
+ "go/constant": []string{
+ "BinaryOp",
+ "BitLen",
+ "Bool",
+ "BoolVal",
+ "Bytes",
+ "Compare",
+ "Complex",
+ "Denom",
+ "Float",
+ "Float32Val",
+ "Float64Val",
+ "Imag",
+ "Int",
+ "Int64Val",
+ "Kind",
+ "Make",
+ "MakeBool",
+ "MakeFloat64",
+ "MakeFromBytes",
+ "MakeFromLiteral",
+ "MakeImag",
+ "MakeInt64",
+ "MakeString",
+ "MakeUint64",
+ "MakeUnknown",
+ "Num",
+ "Real",
+ "Shift",
+ "Sign",
+ "String",
+ "StringVal",
+ "ToComplex",
+ "ToFloat",
+ "ToInt",
+ "Uint64Val",
+ "UnaryOp",
+ "Unknown",
+ "Val",
+ "Value",
+ },
+ "go/doc": []string{
+ "AllDecls",
+ "AllMethods",
+ "Example",
+ "Examples",
+ "Filter",
+ "Func",
+ "IllegalPrefixes",
+ "IsPredeclared",
+ "Mode",
+ "New",
+ "NewFromFiles",
+ "Note",
+ "Package",
+ "PreserveAST",
+ "Synopsis",
+ "ToHTML",
+ "ToText",
+ "Type",
+ "Value",
+ },
+ "go/format": []string{
+ "Node",
+ "Source",
+ },
+ "go/importer": []string{
+ "Default",
+ "For",
+ "ForCompiler",
+ "Lookup",
+ },
+ "go/parser": []string{
+ "AllErrors",
+ "DeclarationErrors",
+ "ImportsOnly",
+ "Mode",
+ "PackageClauseOnly",
+ "ParseComments",
+ "ParseDir",
+ "ParseExpr",
+ "ParseExprFrom",
+ "ParseFile",
+ "SkipObjectResolution",
+ "SpuriousErrors",
+ "Trace",
+ },
+ "go/printer": []string{
+ "CommentedNode",
+ "Config",
+ "Fprint",
+ "Mode",
+ "RawFormat",
+ "SourcePos",
+ "TabIndent",
+ "UseSpaces",
+ },
+ "go/scanner": []string{
+ "Error",
+ "ErrorHandler",
+ "ErrorList",
+ "Mode",
+ "PrintError",
+ "ScanComments",
+ "Scanner",
+ },
+ "go/token": []string{
+ "ADD",
+ "ADD_ASSIGN",
+ "AND",
+ "AND_ASSIGN",
+ "AND_NOT",
+ "AND_NOT_ASSIGN",
+ "ARROW",
+ "ASSIGN",
+ "BREAK",
+ "CASE",
+ "CHAN",
+ "CHAR",
+ "COLON",
+ "COMMA",
+ "COMMENT",
+ "CONST",
+ "CONTINUE",
+ "DEC",
+ "DEFAULT",
+ "DEFER",
+ "DEFINE",
+ "ELLIPSIS",
+ "ELSE",
+ "EOF",
+ "EQL",
+ "FALLTHROUGH",
+ "FLOAT",
+ "FOR",
+ "FUNC",
+ "File",
+ "FileSet",
+ "GEQ",
+ "GO",
+ "GOTO",
+ "GTR",
+ "HighestPrec",
+ "IDENT",
+ "IF",
+ "ILLEGAL",
+ "IMAG",
+ "IMPORT",
+ "INC",
+ "INT",
+ "INTERFACE",
+ "IsExported",
+ "IsIdentifier",
+ "IsKeyword",
+ "LAND",
+ "LBRACE",
+ "LBRACK",
+ "LEQ",
+ "LOR",
+ "LPAREN",
+ "LSS",
+ "Lookup",
+ "LowestPrec",
+ "MAP",
+ "MUL",
+ "MUL_ASSIGN",
+ "NEQ",
+ "NOT",
+ "NewFileSet",
+ "NoPos",
+ "OR",
+ "OR_ASSIGN",
+ "PACKAGE",
+ "PERIOD",
+ "Pos",
+ "Position",
+ "QUO",
+ "QUO_ASSIGN",
+ "RANGE",
+ "RBRACE",
+ "RBRACK",
+ "REM",
+ "REM_ASSIGN",
+ "RETURN",
+ "RPAREN",
+ "SELECT",
+ "SEMICOLON",
+ "SHL",
+ "SHL_ASSIGN",
+ "SHR",
+ "SHR_ASSIGN",
+ "STRING",
+ "STRUCT",
+ "SUB",
+ "SUB_ASSIGN",
+ "SWITCH",
+ "TYPE",
+ "Token",
+ "UnaryPrec",
+ "VAR",
+ "XOR",
+ "XOR_ASSIGN",
+ },
+ "go/types": []string{
+ "Array",
+ "AssertableTo",
+ "AssignableTo",
+ "Basic",
+ "BasicInfo",
+ "BasicKind",
+ "Bool",
+ "Builtin",
+ "Byte",
+ "Chan",
+ "ChanDir",
+ "CheckExpr",
+ "Checker",
+ "Comparable",
+ "Complex128",
+ "Complex64",
+ "Config",
+ "Const",
+ "ConvertibleTo",
+ "DefPredeclaredTestFuncs",
+ "Default",
+ "Error",
+ "Eval",
+ "ExprString",
+ "FieldVal",
+ "Float32",
+ "Float64",
+ "Func",
+ "Id",
+ "Identical",
+ "IdenticalIgnoreTags",
+ "Implements",
+ "ImportMode",
+ "Importer",
+ "ImporterFrom",
+ "Info",
+ "Initializer",
+ "Int",
+ "Int16",
+ "Int32",
+ "Int64",
+ "Int8",
+ "Interface",
+ "Invalid",
+ "IsBoolean",
+ "IsComplex",
+ "IsConstType",
+ "IsFloat",
+ "IsInteger",
+ "IsInterface",
+ "IsNumeric",
+ "IsOrdered",
+ "IsString",
+ "IsUnsigned",
+ "IsUntyped",
+ "Label",
+ "LookupFieldOrMethod",
+ "Map",
+ "MethodExpr",
+ "MethodSet",
+ "MethodVal",
+ "MissingMethod",
+ "Named",
+ "NewArray",
+ "NewChan",
+ "NewChecker",
+ "NewConst",
+ "NewField",
+ "NewFunc",
+ "NewInterface",
+ "NewInterfaceType",
+ "NewLabel",
+ "NewMap",
+ "NewMethodSet",
+ "NewNamed",
+ "NewPackage",
+ "NewParam",
+ "NewPkgName",
+ "NewPointer",
+ "NewScope",
+ "NewSignature",
+ "NewSlice",
+ "NewStruct",
+ "NewTuple",
+ "NewTypeName",
+ "NewVar",
+ "Nil",
+ "Object",
+ "ObjectString",
+ "Package",
+ "PkgName",
+ "Pointer",
+ "Qualifier",
+ "RecvOnly",
+ "RelativeTo",
+ "Rune",
+ "Scope",
+ "Selection",
+ "SelectionKind",
+ "SelectionString",
+ "SendOnly",
+ "SendRecv",
+ "Signature",
+ "Sizes",
+ "SizesFor",
+ "Slice",
+ "StdSizes",
+ "String",
+ "Struct",
+ "Tuple",
+ "Typ",
+ "Type",
+ "TypeAndValue",
+ "TypeName",
+ "TypeString",
+ "Uint",
+ "Uint16",
+ "Uint32",
+ "Uint64",
+ "Uint8",
+ "Uintptr",
+ "Universe",
+ "Unsafe",
+ "UnsafePointer",
+ "UntypedBool",
+ "UntypedComplex",
+ "UntypedFloat",
+ "UntypedInt",
+ "UntypedNil",
+ "UntypedRune",
+ "UntypedString",
+ "Var",
+ "WriteExpr",
+ "WriteSignature",
+ "WriteType",
+ },
+ "hash": []string{
+ "Hash",
+ "Hash32",
+ "Hash64",
+ },
+ "hash/adler32": []string{
+ "Checksum",
+ "New",
+ "Size",
+ },
+ "hash/crc32": []string{
+ "Castagnoli",
+ "Checksum",
+ "ChecksumIEEE",
+ "IEEE",
+ "IEEETable",
+ "Koopman",
+ "MakeTable",
+ "New",
+ "NewIEEE",
+ "Size",
+ "Table",
+ "Update",
+ },
+ "hash/crc64": []string{
+ "Checksum",
+ "ECMA",
+ "ISO",
+ "MakeTable",
+ "New",
+ "Size",
+ "Table",
+ "Update",
+ },
+ "hash/fnv": []string{
+ "New128",
+ "New128a",
+ "New32",
+ "New32a",
+ "New64",
+ "New64a",
+ },
+ "hash/maphash": []string{
+ "Hash",
+ "MakeSeed",
+ "Seed",
+ },
+ "html": []string{
+ "EscapeString",
+ "UnescapeString",
+ },
+ "html/template": []string{
+ "CSS",
+ "ErrAmbigContext",
+ "ErrBadHTML",
+ "ErrBranchEnd",
+ "ErrEndContext",
+ "ErrNoSuchTemplate",
+ "ErrOutputContext",
+ "ErrPartialCharset",
+ "ErrPartialEscape",
+ "ErrPredefinedEscaper",
+ "ErrRangeLoopReentry",
+ "ErrSlashAmbig",
+ "Error",
+ "ErrorCode",
+ "FuncMap",
+ "HTML",
+ "HTMLAttr",
+ "HTMLEscape",
+ "HTMLEscapeString",
+ "HTMLEscaper",
+ "IsTrue",
+ "JS",
+ "JSEscape",
+ "JSEscapeString",
+ "JSEscaper",
+ "JSStr",
+ "Must",
+ "New",
+ "OK",
+ "ParseFS",
+ "ParseFiles",
+ "ParseGlob",
+ "Srcset",
+ "Template",
+ "URL",
+ "URLQueryEscaper",
+ },
+ "image": []string{
+ "Alpha",
+ "Alpha16",
+ "Black",
+ "CMYK",
+ "Config",
+ "Decode",
+ "DecodeConfig",
+ "ErrFormat",
+ "Gray",
+ "Gray16",
+ "Image",
+ "NRGBA",
+ "NRGBA64",
+ "NYCbCrA",
+ "NewAlpha",
+ "NewAlpha16",
+ "NewCMYK",
+ "NewGray",
+ "NewGray16",
+ "NewNRGBA",
+ "NewNRGBA64",
+ "NewNYCbCrA",
+ "NewPaletted",
+ "NewRGBA",
+ "NewRGBA64",
+ "NewUniform",
+ "NewYCbCr",
+ "Opaque",
+ "Paletted",
+ "PalettedImage",
+ "Point",
+ "Pt",
+ "RGBA",
+ "RGBA64",
+ "RGBA64Image",
+ "Rect",
+ "Rectangle",
+ "RegisterFormat",
+ "Transparent",
+ "Uniform",
+ "White",
+ "YCbCr",
+ "YCbCrSubsampleRatio",
+ "YCbCrSubsampleRatio410",
+ "YCbCrSubsampleRatio411",
+ "YCbCrSubsampleRatio420",
+ "YCbCrSubsampleRatio422",
+ "YCbCrSubsampleRatio440",
+ "YCbCrSubsampleRatio444",
+ "ZP",
+ "ZR",
+ },
+ "image/color": []string{
+ "Alpha",
+ "Alpha16",
+ "Alpha16Model",
+ "AlphaModel",
+ "Black",
+ "CMYK",
+ "CMYKModel",
+ "CMYKToRGB",
+ "Color",
+ "Gray",
+ "Gray16",
+ "Gray16Model",
+ "GrayModel",
+ "Model",
+ "ModelFunc",
+ "NRGBA",
+ "NRGBA64",
+ "NRGBA64Model",
+ "NRGBAModel",
+ "NYCbCrA",
+ "NYCbCrAModel",
+ "Opaque",
+ "Palette",
+ "RGBA",
+ "RGBA64",
+ "RGBA64Model",
+ "RGBAModel",
+ "RGBToCMYK",
+ "RGBToYCbCr",
+ "Transparent",
+ "White",
+ "YCbCr",
+ "YCbCrModel",
+ "YCbCrToRGB",
+ },
+ "image/color/palette": []string{
+ "Plan9",
+ "WebSafe",
+ },
+ "image/draw": []string{
+ "Draw",
+ "DrawMask",
+ "Drawer",
+ "FloydSteinberg",
+ "Image",
+ "Op",
+ "Over",
+ "Quantizer",
+ "RGBA64Image",
+ "Src",
+ },
+ "image/gif": []string{
+ "Decode",
+ "DecodeAll",
+ "DecodeConfig",
+ "DisposalBackground",
+ "DisposalNone",
+ "DisposalPrevious",
+ "Encode",
+ "EncodeAll",
+ "GIF",
+ "Options",
+ },
+ "image/jpeg": []string{
+ "Decode",
+ "DecodeConfig",
+ "DefaultQuality",
+ "Encode",
+ "FormatError",
+ "Options",
+ "Reader",
+ "UnsupportedError",
+ },
+ "image/png": []string{
+ "BestCompression",
+ "BestSpeed",
+ "CompressionLevel",
+ "Decode",
+ "DecodeConfig",
+ "DefaultCompression",
+ "Encode",
+ "Encoder",
+ "EncoderBuffer",
+ "EncoderBufferPool",
+ "FormatError",
+ "NoCompression",
+ "UnsupportedError",
+ },
+ "index/suffixarray": []string{
+ "Index",
+ "New",
+ },
+ "io": []string{
+ "ByteReader",
+ "ByteScanner",
+ "ByteWriter",
+ "Closer",
+ "Copy",
+ "CopyBuffer",
+ "CopyN",
+ "Discard",
+ "EOF",
+ "ErrClosedPipe",
+ "ErrNoProgress",
+ "ErrShortBuffer",
+ "ErrShortWrite",
+ "ErrUnexpectedEOF",
+ "LimitReader",
+ "LimitedReader",
+ "MultiReader",
+ "MultiWriter",
+ "NewSectionReader",
+ "NopCloser",
+ "Pipe",
+ "PipeReader",
+ "PipeWriter",
+ "ReadAll",
+ "ReadAtLeast",
+ "ReadCloser",
+ "ReadFull",
+ "ReadSeekCloser",
+ "ReadSeeker",
+ "ReadWriteCloser",
+ "ReadWriteSeeker",
+ "ReadWriter",
+ "Reader",
+ "ReaderAt",
+ "ReaderFrom",
+ "RuneReader",
+ "RuneScanner",
+ "SectionReader",
+ "SeekCurrent",
+ "SeekEnd",
+ "SeekStart",
+ "Seeker",
+ "StringWriter",
+ "TeeReader",
+ "WriteCloser",
+ "WriteSeeker",
+ "WriteString",
+ "Writer",
+ "WriterAt",
+ "WriterTo",
+ },
+ "io/fs": []string{
+ "DirEntry",
+ "ErrClosed",
+ "ErrExist",
+ "ErrInvalid",
+ "ErrNotExist",
+ "ErrPermission",
+ "FS",
+ "File",
+ "FileInfo",
+ "FileInfoToDirEntry",
+ "FileMode",
+ "Glob",
+ "GlobFS",
+ "ModeAppend",
+ "ModeCharDevice",
+ "ModeDevice",
+ "ModeDir",
+ "ModeExclusive",
+ "ModeIrregular",
+ "ModeNamedPipe",
+ "ModePerm",
+ "ModeSetgid",
+ "ModeSetuid",
+ "ModeSocket",
+ "ModeSticky",
+ "ModeSymlink",
+ "ModeTemporary",
+ "ModeType",
+ "PathError",
+ "ReadDir",
+ "ReadDirFS",
+ "ReadDirFile",
+ "ReadFile",
+ "ReadFileFS",
+ "SkipDir",
+ "Stat",
+ "StatFS",
+ "Sub",
+ "SubFS",
+ "ValidPath",
+ "WalkDir",
+ "WalkDirFunc",
+ },
+ "io/ioutil": []string{
+ "Discard",
+ "NopCloser",
+ "ReadAll",
+ "ReadDir",
+ "ReadFile",
+ "TempDir",
+ "TempFile",
+ "WriteFile",
+ },
+ "log": []string{
+ "Default",
+ "Fatal",
+ "Fatalf",
+ "Fatalln",
+ "Flags",
+ "LUTC",
+ "Ldate",
+ "Llongfile",
+ "Lmicroseconds",
+ "Lmsgprefix",
+ "Logger",
+ "Lshortfile",
+ "LstdFlags",
+ "Ltime",
+ "New",
+ "Output",
+ "Panic",
+ "Panicf",
+ "Panicln",
+ "Prefix",
+ "Print",
+ "Printf",
+ "Println",
+ "SetFlags",
+ "SetOutput",
+ "SetPrefix",
+ "Writer",
+ },
+ "log/syslog": []string{
+ "Dial",
+ "LOG_ALERT",
+ "LOG_AUTH",
+ "LOG_AUTHPRIV",
+ "LOG_CRIT",
+ "LOG_CRON",
+ "LOG_DAEMON",
+ "LOG_DEBUG",
+ "LOG_EMERG",
+ "LOG_ERR",
+ "LOG_FTP",
+ "LOG_INFO",
+ "LOG_KERN",
+ "LOG_LOCAL0",
+ "LOG_LOCAL1",
+ "LOG_LOCAL2",
+ "LOG_LOCAL3",
+ "LOG_LOCAL4",
+ "LOG_LOCAL5",
+ "LOG_LOCAL6",
+ "LOG_LOCAL7",
+ "LOG_LPR",
+ "LOG_MAIL",
+ "LOG_NEWS",
+ "LOG_NOTICE",
+ "LOG_SYSLOG",
+ "LOG_USER",
+ "LOG_UUCP",
+ "LOG_WARNING",
+ "New",
+ "NewLogger",
+ "Priority",
+ "Writer",
+ },
+ "math": []string{
+ "Abs",
+ "Acos",
+ "Acosh",
+ "Asin",
+ "Asinh",
+ "Atan",
+ "Atan2",
+ "Atanh",
+ "Cbrt",
+ "Ceil",
+ "Copysign",
+ "Cos",
+ "Cosh",
+ "Dim",
+ "E",
+ "Erf",
+ "Erfc",
+ "Erfcinv",
+ "Erfinv",
+ "Exp",
+ "Exp2",
+ "Expm1",
+ "FMA",
+ "Float32bits",
+ "Float32frombits",
+ "Float64bits",
+ "Float64frombits",
+ "Floor",
+ "Frexp",
+ "Gamma",
+ "Hypot",
+ "Ilogb",
+ "Inf",
+ "IsInf",
+ "IsNaN",
+ "J0",
+ "J1",
+ "Jn",
+ "Ldexp",
+ "Lgamma",
+ "Ln10",
+ "Ln2",
+ "Log",
+ "Log10",
+ "Log10E",
+ "Log1p",
+ "Log2",
+ "Log2E",
+ "Logb",
+ "Max",
+ "MaxFloat32",
+ "MaxFloat64",
+ "MaxInt",
+ "MaxInt16",
+ "MaxInt32",
+ "MaxInt64",
+ "MaxInt8",
+ "MaxUint",
+ "MaxUint16",
+ "MaxUint32",
+ "MaxUint64",
+ "MaxUint8",
+ "Min",
+ "MinInt",
+ "MinInt16",
+ "MinInt32",
+ "MinInt64",
+ "MinInt8",
+ "Mod",
+ "Modf",
+ "NaN",
+ "Nextafter",
+ "Nextafter32",
+ "Phi",
+ "Pi",
+ "Pow",
+ "Pow10",
+ "Remainder",
+ "Round",
+ "RoundToEven",
+ "Signbit",
+ "Sin",
+ "Sincos",
+ "Sinh",
+ "SmallestNonzeroFloat32",
+ "SmallestNonzeroFloat64",
+ "Sqrt",
+ "Sqrt2",
+ "SqrtE",
+ "SqrtPhi",
+ "SqrtPi",
+ "Tan",
+ "Tanh",
+ "Trunc",
+ "Y0",
+ "Y1",
+ "Yn",
+ },
+ "math/big": []string{
+ "Above",
+ "Accuracy",
+ "AwayFromZero",
+ "Below",
+ "ErrNaN",
+ "Exact",
+ "Float",
+ "Int",
+ "Jacobi",
+ "MaxBase",
+ "MaxExp",
+ "MaxPrec",
+ "MinExp",
+ "NewFloat",
+ "NewInt",
+ "NewRat",
+ "ParseFloat",
+ "Rat",
+ "RoundingMode",
+ "ToNearestAway",
+ "ToNearestEven",
+ "ToNegativeInf",
+ "ToPositiveInf",
+ "ToZero",
+ "Word",
+ },
+ "math/bits": []string{
+ "Add",
+ "Add32",
+ "Add64",
+ "Div",
+ "Div32",
+ "Div64",
+ "LeadingZeros",
+ "LeadingZeros16",
+ "LeadingZeros32",
+ "LeadingZeros64",
+ "LeadingZeros8",
+ "Len",
+ "Len16",
+ "Len32",
+ "Len64",
+ "Len8",
+ "Mul",
+ "Mul32",
+ "Mul64",
+ "OnesCount",
+ "OnesCount16",
+ "OnesCount32",
+ "OnesCount64",
+ "OnesCount8",
+ "Rem",
+ "Rem32",
+ "Rem64",
+ "Reverse",
+ "Reverse16",
+ "Reverse32",
+ "Reverse64",
+ "Reverse8",
+ "ReverseBytes",
+ "ReverseBytes16",
+ "ReverseBytes32",
+ "ReverseBytes64",
+ "RotateLeft",
+ "RotateLeft16",
+ "RotateLeft32",
+ "RotateLeft64",
+ "RotateLeft8",
+ "Sub",
+ "Sub32",
+ "Sub64",
+ "TrailingZeros",
+ "TrailingZeros16",
+ "TrailingZeros32",
+ "TrailingZeros64",
+ "TrailingZeros8",
+ "UintSize",
+ },
+ "math/cmplx": []string{
+ "Abs",
+ "Acos",
+ "Acosh",
+ "Asin",
+ "Asinh",
+ "Atan",
+ "Atanh",
+ "Conj",
+ "Cos",
+ "Cosh",
+ "Cot",
+ "Exp",
+ "Inf",
+ "IsInf",
+ "IsNaN",
+ "Log",
+ "Log10",
+ "NaN",
+ "Phase",
+ "Polar",
+ "Pow",
+ "Rect",
+ "Sin",
+ "Sinh",
+ "Sqrt",
+ "Tan",
+ "Tanh",
+ },
+ "math/rand": []string{
+ "ExpFloat64",
+ "Float32",
+ "Float64",
+ "Int",
+ "Int31",
+ "Int31n",
+ "Int63",
+ "Int63n",
+ "Intn",
+ "New",
+ "NewSource",
+ "NewZipf",
+ "NormFloat64",
+ "Perm",
+ "Rand",
+ "Read",
+ "Seed",
+ "Shuffle",
+ "Source",
+ "Source64",
+ "Uint32",
+ "Uint64",
+ "Zipf",
+ },
+ "mime": []string{
+ "AddExtensionType",
+ "BEncoding",
+ "ErrInvalidMediaParameter",
+ "ExtensionsByType",
+ "FormatMediaType",
+ "ParseMediaType",
+ "QEncoding",
+ "TypeByExtension",
+ "WordDecoder",
+ "WordEncoder",
+ },
+ "mime/multipart": []string{
+ "ErrMessageTooLarge",
+ "File",
+ "FileHeader",
+ "Form",
+ "NewReader",
+ "NewWriter",
+ "Part",
+ "Reader",
+ "Writer",
+ },
+ "mime/quotedprintable": []string{
+ "NewReader",
+ "NewWriter",
+ "Reader",
+ "Writer",
+ },
+ "net": []string{
+ "Addr",
+ "AddrError",
+ "Buffers",
+ "CIDRMask",
+ "Conn",
+ "DNSConfigError",
+ "DNSError",
+ "DefaultResolver",
+ "Dial",
+ "DialIP",
+ "DialTCP",
+ "DialTimeout",
+ "DialUDP",
+ "DialUnix",
+ "Dialer",
+ "ErrClosed",
+ "ErrWriteToConnected",
+ "Error",
+ "FileConn",
+ "FileListener",
+ "FilePacketConn",
+ "FlagBroadcast",
+ "FlagLoopback",
+ "FlagMulticast",
+ "FlagPointToPoint",
+ "FlagUp",
+ "Flags",
+ "HardwareAddr",
+ "IP",
+ "IPAddr",
+ "IPConn",
+ "IPMask",
+ "IPNet",
+ "IPv4",
+ "IPv4Mask",
+ "IPv4allrouter",
+ "IPv4allsys",
+ "IPv4bcast",
+ "IPv4len",
+ "IPv4zero",
+ "IPv6interfacelocalallnodes",
+ "IPv6len",
+ "IPv6linklocalallnodes",
+ "IPv6linklocalallrouters",
+ "IPv6loopback",
+ "IPv6unspecified",
+ "IPv6zero",
+ "Interface",
+ "InterfaceAddrs",
+ "InterfaceByIndex",
+ "InterfaceByName",
+ "Interfaces",
+ "InvalidAddrError",
+ "JoinHostPort",
+ "Listen",
+ "ListenConfig",
+ "ListenIP",
+ "ListenMulticastUDP",
+ "ListenPacket",
+ "ListenTCP",
+ "ListenUDP",
+ "ListenUnix",
+ "ListenUnixgram",
+ "Listener",
+ "LookupAddr",
+ "LookupCNAME",
+ "LookupHost",
+ "LookupIP",
+ "LookupMX",
+ "LookupNS",
+ "LookupPort",
+ "LookupSRV",
+ "LookupTXT",
+ "MX",
+ "NS",
+ "OpError",
+ "PacketConn",
+ "ParseCIDR",
+ "ParseError",
+ "ParseIP",
+ "ParseMAC",
+ "Pipe",
+ "ResolveIPAddr",
+ "ResolveTCPAddr",
+ "ResolveUDPAddr",
+ "ResolveUnixAddr",
+ "Resolver",
+ "SRV",
+ "SplitHostPort",
+ "TCPAddr",
+ "TCPConn",
+ "TCPListener",
+ "UDPAddr",
+ "UDPConn",
+ "UnixAddr",
+ "UnixConn",
+ "UnixListener",
+ "UnknownNetworkError",
+ },
+ "net/http": []string{
+ "AllowQuerySemicolons",
+ "CanonicalHeaderKey",
+ "Client",
+ "CloseNotifier",
+ "ConnState",
+ "Cookie",
+ "CookieJar",
+ "DefaultClient",
+ "DefaultMaxHeaderBytes",
+ "DefaultMaxIdleConnsPerHost",
+ "DefaultServeMux",
+ "DefaultTransport",
+ "DetectContentType",
+ "Dir",
+ "ErrAbortHandler",
+ "ErrBodyNotAllowed",
+ "ErrBodyReadAfterClose",
+ "ErrContentLength",
+ "ErrHandlerTimeout",
+ "ErrHeaderTooLong",
+ "ErrHijacked",
+ "ErrLineTooLong",
+ "ErrMissingBoundary",
+ "ErrMissingContentLength",
+ "ErrMissingFile",
+ "ErrNoCookie",
+ "ErrNoLocation",
+ "ErrNotMultipart",
+ "ErrNotSupported",
+ "ErrServerClosed",
+ "ErrShortBody",
+ "ErrSkipAltProtocol",
+ "ErrUnexpectedTrailer",
+ "ErrUseLastResponse",
+ "ErrWriteAfterFlush",
+ "Error",
+ "FS",
+ "File",
+ "FileServer",
+ "FileSystem",
+ "Flusher",
+ "Get",
+ "Handle",
+ "HandleFunc",
+ "Handler",
+ "HandlerFunc",
+ "Head",
+ "Header",
+ "Hijacker",
+ "ListenAndServe",
+ "ListenAndServeTLS",
+ "LocalAddrContextKey",
+ "MaxBytesReader",
+ "MethodConnect",
+ "MethodDelete",
+ "MethodGet",
+ "MethodHead",
+ "MethodOptions",
+ "MethodPatch",
+ "MethodPost",
+ "MethodPut",
+ "MethodTrace",
+ "NewFileTransport",
+ "NewRequest",
+ "NewRequestWithContext",
+ "NewServeMux",
+ "NoBody",
+ "NotFound",
+ "NotFoundHandler",
+ "ParseHTTPVersion",
+ "ParseTime",
+ "Post",
+ "PostForm",
+ "ProtocolError",
+ "ProxyFromEnvironment",
+ "ProxyURL",
+ "PushOptions",
+ "Pusher",
+ "ReadRequest",
+ "ReadResponse",
+ "Redirect",
+ "RedirectHandler",
+ "Request",
+ "Response",
+ "ResponseWriter",
+ "RoundTripper",
+ "SameSite",
+ "SameSiteDefaultMode",
+ "SameSiteLaxMode",
+ "SameSiteNoneMode",
+ "SameSiteStrictMode",
+ "Serve",
+ "ServeContent",
+ "ServeFile",
+ "ServeMux",
+ "ServeTLS",
+ "Server",
+ "ServerContextKey",
+ "SetCookie",
+ "StateActive",
+ "StateClosed",
+ "StateHijacked",
+ "StateIdle",
+ "StateNew",
+ "StatusAccepted",
+ "StatusAlreadyReported",
+ "StatusBadGateway",
+ "StatusBadRequest",
+ "StatusConflict",
+ "StatusContinue",
+ "StatusCreated",
+ "StatusEarlyHints",
+ "StatusExpectationFailed",
+ "StatusFailedDependency",
+ "StatusForbidden",
+ "StatusFound",
+ "StatusGatewayTimeout",
+ "StatusGone",
+ "StatusHTTPVersionNotSupported",
+ "StatusIMUsed",
+ "StatusInsufficientStorage",
+ "StatusInternalServerError",
+ "StatusLengthRequired",
+ "StatusLocked",
+ "StatusLoopDetected",
+ "StatusMethodNotAllowed",
+ "StatusMisdirectedRequest",
+ "StatusMovedPermanently",
+ "StatusMultiStatus",
+ "StatusMultipleChoices",
+ "StatusNetworkAuthenticationRequired",
+ "StatusNoContent",
+ "StatusNonAuthoritativeInfo",
+ "StatusNotAcceptable",
+ "StatusNotExtended",
+ "StatusNotFound",
+ "StatusNotImplemented",
+ "StatusNotModified",
+ "StatusOK",
+ "StatusPartialContent",
+ "StatusPaymentRequired",
+ "StatusPermanentRedirect",
+ "StatusPreconditionFailed",
+ "StatusPreconditionRequired",
+ "StatusProcessing",
+ "StatusProxyAuthRequired",
+ "StatusRequestEntityTooLarge",
+ "StatusRequestHeaderFieldsTooLarge",
+ "StatusRequestTimeout",
+ "StatusRequestURITooLong",
+ "StatusRequestedRangeNotSatisfiable",
+ "StatusResetContent",
+ "StatusSeeOther",
+ "StatusServiceUnavailable",
+ "StatusSwitchingProtocols",
+ "StatusTeapot",
+ "StatusTemporaryRedirect",
+ "StatusText",
+ "StatusTooEarly",
+ "StatusTooManyRequests",
+ "StatusUnauthorized",
+ "StatusUnavailableForLegalReasons",
+ "StatusUnprocessableEntity",
+ "StatusUnsupportedMediaType",
+ "StatusUpgradeRequired",
+ "StatusUseProxy",
+ "StatusVariantAlsoNegotiates",
+ "StripPrefix",
+ "TimeFormat",
+ "TimeoutHandler",
+ "TrailerPrefix",
+ "Transport",
+ },
+ "net/http/cgi": []string{
+ "Handler",
+ "Request",
+ "RequestFromMap",
+ "Serve",
+ },
+ "net/http/cookiejar": []string{
+ "Jar",
+ "New",
+ "Options",
+ "PublicSuffixList",
+ },
+ "net/http/fcgi": []string{
+ "ErrConnClosed",
+ "ErrRequestAborted",
+ "ProcessEnv",
+ "Serve",
+ },
+ "net/http/httptest": []string{
+ "DefaultRemoteAddr",
+ "NewRecorder",
+ "NewRequest",
+ "NewServer",
+ "NewTLSServer",
+ "NewUnstartedServer",
+ "ResponseRecorder",
+ "Server",
+ },
+ "net/http/httptrace": []string{
+ "ClientTrace",
+ "ContextClientTrace",
+ "DNSDoneInfo",
+ "DNSStartInfo",
+ "GotConnInfo",
+ "WithClientTrace",
+ "WroteRequestInfo",
+ },
+ "net/http/httputil": []string{
+ "BufferPool",
+ "ClientConn",
+ "DumpRequest",
+ "DumpRequestOut",
+ "DumpResponse",
+ "ErrClosed",
+ "ErrLineTooLong",
+ "ErrPersistEOF",
+ "ErrPipeline",
+ "NewChunkedReader",
+ "NewChunkedWriter",
+ "NewClientConn",
+ "NewProxyClientConn",
+ "NewServerConn",
+ "NewSingleHostReverseProxy",
+ "ReverseProxy",
+ "ServerConn",
+ },
+ "net/http/pprof": []string{
+ "Cmdline",
+ "Handler",
+ "Index",
+ "Profile",
+ "Symbol",
+ "Trace",
+ },
+ "net/mail": []string{
+ "Address",
+ "AddressParser",
+ "ErrHeaderNotPresent",
+ "Header",
+ "Message",
+ "ParseAddress",
+ "ParseAddressList",
+ "ParseDate",
+ "ReadMessage",
+ },
+ "net/rpc": []string{
+ "Accept",
+ "Call",
+ "Client",
+ "ClientCodec",
+ "DefaultDebugPath",
+ "DefaultRPCPath",
+ "DefaultServer",
+ "Dial",
+ "DialHTTP",
+ "DialHTTPPath",
+ "ErrShutdown",
+ "HandleHTTP",
+ "NewClient",
+ "NewClientWithCodec",
+ "NewServer",
+ "Register",
+ "RegisterName",
+ "Request",
+ "Response",
+ "ServeCodec",
+ "ServeConn",
+ "ServeRequest",
+ "Server",
+ "ServerCodec",
+ "ServerError",
+ },
+ "net/rpc/jsonrpc": []string{
+ "Dial",
+ "NewClient",
+ "NewClientCodec",
+ "NewServerCodec",
+ "ServeConn",
+ },
+ "net/smtp": []string{
+ "Auth",
+ "CRAMMD5Auth",
+ "Client",
+ "Dial",
+ "NewClient",
+ "PlainAuth",
+ "SendMail",
+ "ServerInfo",
+ },
+ "net/textproto": []string{
+ "CanonicalMIMEHeaderKey",
+ "Conn",
+ "Dial",
+ "Error",
+ "MIMEHeader",
+ "NewConn",
+ "NewReader",
+ "NewWriter",
+ "Pipeline",
+ "ProtocolError",
+ "Reader",
+ "TrimBytes",
+ "TrimString",
+ "Writer",
+ },
+ "net/url": []string{
+ "Error",
+ "EscapeError",
+ "InvalidHostError",
+ "Parse",
+ "ParseQuery",
+ "ParseRequestURI",
+ "PathEscape",
+ "PathUnescape",
+ "QueryEscape",
+ "QueryUnescape",
+ "URL",
+ "User",
+ "UserPassword",
+ "Userinfo",
+ "Values",
+ },
+ "os": []string{
+ "Args",
+ "Chdir",
+ "Chmod",
+ "Chown",
+ "Chtimes",
+ "Clearenv",
+ "Create",
+ "CreateTemp",
+ "DevNull",
+ "DirEntry",
+ "DirFS",
+ "Environ",
+ "ErrClosed",
+ "ErrDeadlineExceeded",
+ "ErrExist",
+ "ErrInvalid",
+ "ErrNoDeadline",
+ "ErrNotExist",
+ "ErrPermission",
+ "ErrProcessDone",
+ "Executable",
+ "Exit",
+ "Expand",
+ "ExpandEnv",
+ "File",
+ "FileInfo",
+ "FileMode",
+ "FindProcess",
+ "Getegid",
+ "Getenv",
+ "Geteuid",
+ "Getgid",
+ "Getgroups",
+ "Getpagesize",
+ "Getpid",
+ "Getppid",
+ "Getuid",
+ "Getwd",
+ "Hostname",
+ "Interrupt",
+ "IsExist",
+ "IsNotExist",
+ "IsPathSeparator",
+ "IsPermission",
+ "IsTimeout",
+ "Kill",
+ "Lchown",
+ "Link",
+ "LinkError",
+ "LookupEnv",
+ "Lstat",
+ "Mkdir",
+ "MkdirAll",
+ "MkdirTemp",
+ "ModeAppend",
+ "ModeCharDevice",
+ "ModeDevice",
+ "ModeDir",
+ "ModeExclusive",
+ "ModeIrregular",
+ "ModeNamedPipe",
+ "ModePerm",
+ "ModeSetgid",
+ "ModeSetuid",
+ "ModeSocket",
+ "ModeSticky",
+ "ModeSymlink",
+ "ModeTemporary",
+ "ModeType",
+ "NewFile",
+ "NewSyscallError",
+ "O_APPEND",
+ "O_CREATE",
+ "O_EXCL",
+ "O_RDONLY",
+ "O_RDWR",
+ "O_SYNC",
+ "O_TRUNC",
+ "O_WRONLY",
+ "Open",
+ "OpenFile",
+ "PathError",
+ "PathListSeparator",
+ "PathSeparator",
+ "Pipe",
+ "ProcAttr",
+ "Process",
+ "ProcessState",
+ "ReadDir",
+ "ReadFile",
+ "Readlink",
+ "Remove",
+ "RemoveAll",
+ "Rename",
+ "SEEK_CUR",
+ "SEEK_END",
+ "SEEK_SET",
+ "SameFile",
+ "Setenv",
+ "Signal",
+ "StartProcess",
+ "Stat",
+ "Stderr",
+ "Stdin",
+ "Stdout",
+ "Symlink",
+ "SyscallError",
+ "TempDir",
+ "Truncate",
+ "Unsetenv",
+ "UserCacheDir",
+ "UserConfigDir",
+ "UserHomeDir",
+ "WriteFile",
+ },
+ "os/exec": []string{
+ "Cmd",
+ "Command",
+ "CommandContext",
+ "ErrNotFound",
+ "Error",
+ "ExitError",
+ "LookPath",
+ },
+ "os/signal": []string{
+ "Ignore",
+ "Ignored",
+ "Notify",
+ "NotifyContext",
+ "Reset",
+ "Stop",
+ },
+ "os/user": []string{
+ "Current",
+ "Group",
+ "Lookup",
+ "LookupGroup",
+ "LookupGroupId",
+ "LookupId",
+ "UnknownGroupError",
+ "UnknownGroupIdError",
+ "UnknownUserError",
+ "UnknownUserIdError",
+ "User",
+ },
+ "path": []string{
+ "Base",
+ "Clean",
+ "Dir",
+ "ErrBadPattern",
+ "Ext",
+ "IsAbs",
+ "Join",
+ "Match",
+ "Split",
+ },
+ "path/filepath": []string{
+ "Abs",
+ "Base",
+ "Clean",
+ "Dir",
+ "ErrBadPattern",
+ "EvalSymlinks",
+ "Ext",
+ "FromSlash",
+ "Glob",
+ "HasPrefix",
+ "IsAbs",
+ "Join",
+ "ListSeparator",
+ "Match",
+ "Rel",
+ "Separator",
+ "SkipDir",
+ "Split",
+ "SplitList",
+ "ToSlash",
+ "VolumeName",
+ "Walk",
+ "WalkDir",
+ "WalkFunc",
+ },
+ "plugin": []string{
+ "Open",
+ "Plugin",
+ "Symbol",
+ },
+ "reflect": []string{
+ "Append",
+ "AppendSlice",
+ "Array",
+ "ArrayOf",
+ "Bool",
+ "BothDir",
+ "Chan",
+ "ChanDir",
+ "ChanOf",
+ "Complex128",
+ "Complex64",
+ "Copy",
+ "DeepEqual",
+ "Float32",
+ "Float64",
+ "Func",
+ "FuncOf",
+ "Indirect",
+ "Int",
+ "Int16",
+ "Int32",
+ "Int64",
+ "Int8",
+ "Interface",
+ "Invalid",
+ "Kind",
+ "MakeChan",
+ "MakeFunc",
+ "MakeMap",
+ "MakeMapWithSize",
+ "MakeSlice",
+ "Map",
+ "MapIter",
+ "MapOf",
+ "Method",
+ "New",
+ "NewAt",
+ "Ptr",
+ "PtrTo",
+ "RecvDir",
+ "Select",
+ "SelectCase",
+ "SelectDefault",
+ "SelectDir",
+ "SelectRecv",
+ "SelectSend",
+ "SendDir",
+ "Slice",
+ "SliceHeader",
+ "SliceOf",
+ "String",
+ "StringHeader",
+ "Struct",
+ "StructField",
+ "StructOf",
+ "StructTag",
+ "Swapper",
+ "Type",
+ "TypeOf",
+ "Uint",
+ "Uint16",
+ "Uint32",
+ "Uint64",
+ "Uint8",
+ "Uintptr",
+ "UnsafePointer",
+ "Value",
+ "ValueError",
+ "ValueOf",
+ "VisibleFields",
+ "Zero",
+ },
+ "regexp": []string{
+ "Compile",
+ "CompilePOSIX",
+ "Match",
+ "MatchReader",
+ "MatchString",
+ "MustCompile",
+ "MustCompilePOSIX",
+ "QuoteMeta",
+ "Regexp",
+ },
+ "regexp/syntax": []string{
+ "ClassNL",
+ "Compile",
+ "DotNL",
+ "EmptyBeginLine",
+ "EmptyBeginText",
+ "EmptyEndLine",
+ "EmptyEndText",
+ "EmptyNoWordBoundary",
+ "EmptyOp",
+ "EmptyOpContext",
+ "EmptyWordBoundary",
+ "ErrInternalError",
+ "ErrInvalidCharClass",
+ "ErrInvalidCharRange",
+ "ErrInvalidEscape",
+ "ErrInvalidNamedCapture",
+ "ErrInvalidPerlOp",
+ "ErrInvalidRepeatOp",
+ "ErrInvalidRepeatSize",
+ "ErrInvalidUTF8",
+ "ErrMissingBracket",
+ "ErrMissingParen",
+ "ErrMissingRepeatArgument",
+ "ErrTrailingBackslash",
+ "ErrUnexpectedParen",
+ "Error",
+ "ErrorCode",
+ "Flags",
+ "FoldCase",
+ "Inst",
+ "InstAlt",
+ "InstAltMatch",
+ "InstCapture",
+ "InstEmptyWidth",
+ "InstFail",
+ "InstMatch",
+ "InstNop",
+ "InstOp",
+ "InstRune",
+ "InstRune1",
+ "InstRuneAny",
+ "InstRuneAnyNotNL",
+ "IsWordChar",
+ "Literal",
+ "MatchNL",
+ "NonGreedy",
+ "OneLine",
+ "Op",
+ "OpAlternate",
+ "OpAnyChar",
+ "OpAnyCharNotNL",
+ "OpBeginLine",
+ "OpBeginText",
+ "OpCapture",
+ "OpCharClass",
+ "OpConcat",
+ "OpEmptyMatch",
+ "OpEndLine",
+ "OpEndText",
+ "OpLiteral",
+ "OpNoMatch",
+ "OpNoWordBoundary",
+ "OpPlus",
+ "OpQuest",
+ "OpRepeat",
+ "OpStar",
+ "OpWordBoundary",
+ "POSIX",
+ "Parse",
+ "Perl",
+ "PerlX",
+ "Prog",
+ "Regexp",
+ "Simple",
+ "UnicodeGroups",
+ "WasDollar",
+ },
+ "runtime": []string{
+ "BlockProfile",
+ "BlockProfileRecord",
+ "Breakpoint",
+ "CPUProfile",
+ "Caller",
+ "Callers",
+ "CallersFrames",
+ "Compiler",
+ "Error",
+ "Frame",
+ "Frames",
+ "Func",
+ "FuncForPC",
+ "GC",
+ "GOARCH",
+ "GOMAXPROCS",
+ "GOOS",
+ "GOROOT",
+ "Goexit",
+ "GoroutineProfile",
+ "Gosched",
+ "KeepAlive",
+ "LockOSThread",
+ "MemProfile",
+ "MemProfileRate",
+ "MemProfileRecord",
+ "MemStats",
+ "MutexProfile",
+ "NumCPU",
+ "NumCgoCall",
+ "NumGoroutine",
+ "ReadMemStats",
+ "ReadTrace",
+ "SetBlockProfileRate",
+ "SetCPUProfileRate",
+ "SetCgoTraceback",
+ "SetFinalizer",
+ "SetMutexProfileFraction",
+ "Stack",
+ "StackRecord",
+ "StartTrace",
+ "StopTrace",
+ "ThreadCreateProfile",
+ "TypeAssertionError",
+ "UnlockOSThread",
+ "Version",
+ },
+ "runtime/cgo": []string{
+ "Handle",
+ "NewHandle",
+ },
+ "runtime/debug": []string{
+ "BuildInfo",
+ "FreeOSMemory",
+ "GCStats",
+ "Module",
+ "PrintStack",
+ "ReadBuildInfo",
+ "ReadGCStats",
+ "SetGCPercent",
+ "SetMaxStack",
+ "SetMaxThreads",
+ "SetPanicOnFault",
+ "SetTraceback",
+ "Stack",
+ "WriteHeapDump",
+ },
+ "runtime/metrics": []string{
+ "All",
+ "Description",
+ "Float64Histogram",
+ "KindBad",
+ "KindFloat64",
+ "KindFloat64Histogram",
+ "KindUint64",
+ "Read",
+ "Sample",
+ "Value",
+ "ValueKind",
+ },
+ "runtime/pprof": []string{
+ "Do",
+ "ForLabels",
+ "Label",
+ "LabelSet",
+ "Labels",
+ "Lookup",
+ "NewProfile",
+ "Profile",
+ "Profiles",
+ "SetGoroutineLabels",
+ "StartCPUProfile",
+ "StopCPUProfile",
+ "WithLabels",
+ "WriteHeapProfile",
+ },
+ "runtime/trace": []string{
+ "IsEnabled",
+ "Log",
+ "Logf",
+ "NewTask",
+ "Region",
+ "Start",
+ "StartRegion",
+ "Stop",
+ "Task",
+ "WithRegion",
+ },
+ "sort": []string{
+ "Float64Slice",
+ "Float64s",
+ "Float64sAreSorted",
+ "IntSlice",
+ "Interface",
+ "Ints",
+ "IntsAreSorted",
+ "IsSorted",
+ "Reverse",
+ "Search",
+ "SearchFloat64s",
+ "SearchInts",
+ "SearchStrings",
+ "Slice",
+ "SliceIsSorted",
+ "SliceStable",
+ "Sort",
+ "Stable",
+ "StringSlice",
+ "Strings",
+ "StringsAreSorted",
+ },
+ "strconv": []string{
+ "AppendBool",
+ "AppendFloat",
+ "AppendInt",
+ "AppendQuote",
+ "AppendQuoteRune",
+ "AppendQuoteRuneToASCII",
+ "AppendQuoteRuneToGraphic",
+ "AppendQuoteToASCII",
+ "AppendQuoteToGraphic",
+ "AppendUint",
+ "Atoi",
+ "CanBackquote",
+ "ErrRange",
+ "ErrSyntax",
+ "FormatBool",
+ "FormatComplex",
+ "FormatFloat",
+ "FormatInt",
+ "FormatUint",
+ "IntSize",
+ "IsGraphic",
+ "IsPrint",
+ "Itoa",
+ "NumError",
+ "ParseBool",
+ "ParseComplex",
+ "ParseFloat",
+ "ParseInt",
+ "ParseUint",
+ "Quote",
+ "QuoteRune",
+ "QuoteRuneToASCII",
+ "QuoteRuneToGraphic",
+ "QuoteToASCII",
+ "QuoteToGraphic",
+ "QuotedPrefix",
+ "Unquote",
+ "UnquoteChar",
+ },
+ "strings": []string{
+ "Builder",
+ "Compare",
+ "Contains",
+ "ContainsAny",
+ "ContainsRune",
+ "Count",
+ "EqualFold",
+ "Fields",
+ "FieldsFunc",
+ "HasPrefix",
+ "HasSuffix",
+ "Index",
+ "IndexAny",
+ "IndexByte",
+ "IndexFunc",
+ "IndexRune",
+ "Join",
+ "LastIndex",
+ "LastIndexAny",
+ "LastIndexByte",
+ "LastIndexFunc",
+ "Map",
+ "NewReader",
+ "NewReplacer",
+ "Reader",
+ "Repeat",
+ "Replace",
+ "ReplaceAll",
+ "Replacer",
+ "Split",
+ "SplitAfter",
+ "SplitAfterN",
+ "SplitN",
+ "Title",
+ "ToLower",
+ "ToLowerSpecial",
+ "ToTitle",
+ "ToTitleSpecial",
+ "ToUpper",
+ "ToUpperSpecial",
+ "ToValidUTF8",
+ "Trim",
+ "TrimFunc",
+ "TrimLeft",
+ "TrimLeftFunc",
+ "TrimPrefix",
+ "TrimRight",
+ "TrimRightFunc",
+ "TrimSpace",
+ "TrimSuffix",
+ },
+ "sync": []string{
+ "Cond",
+ "Locker",
+ "Map",
+ "Mutex",
+ "NewCond",
+ "Once",
+ "Pool",
+ "RWMutex",
+ "WaitGroup",
+ },
+ "sync/atomic": []string{
+ "AddInt32",
+ "AddInt64",
+ "AddUint32",
+ "AddUint64",
+ "AddUintptr",
+ "CompareAndSwapInt32",
+ "CompareAndSwapInt64",
+ "CompareAndSwapPointer",
+ "CompareAndSwapUint32",
+ "CompareAndSwapUint64",
+ "CompareAndSwapUintptr",
+ "LoadInt32",
+ "LoadInt64",
+ "LoadPointer",
+ "LoadUint32",
+ "LoadUint64",
+ "LoadUintptr",
+ "StoreInt32",
+ "StoreInt64",
+ "StorePointer",
+ "StoreUint32",
+ "StoreUint64",
+ "StoreUintptr",
+ "SwapInt32",
+ "SwapInt64",
+ "SwapPointer",
+ "SwapUint32",
+ "SwapUint64",
+ "SwapUintptr",
+ "Value",
+ },
+ "syscall": []string{
+ "AF_ALG",
+ "AF_APPLETALK",
+ "AF_ARP",
+ "AF_ASH",
+ "AF_ATM",
+ "AF_ATMPVC",
+ "AF_ATMSVC",
+ "AF_AX25",
+ "AF_BLUETOOTH",
+ "AF_BRIDGE",
+ "AF_CAIF",
+ "AF_CAN",
+ "AF_CCITT",
+ "AF_CHAOS",
+ "AF_CNT",
+ "AF_COIP",
+ "AF_DATAKIT",
+ "AF_DECnet",
+ "AF_DLI",
+ "AF_E164",
+ "AF_ECMA",
+ "AF_ECONET",
+ "AF_ENCAP",
+ "AF_FILE",
+ "AF_HYLINK",
+ "AF_IEEE80211",
+ "AF_IEEE802154",
+ "AF_IMPLINK",
+ "AF_INET",
+ "AF_INET6",
+ "AF_INET6_SDP",
+ "AF_INET_SDP",
+ "AF_IPX",
+ "AF_IRDA",
+ "AF_ISDN",
+ "AF_ISO",
+ "AF_IUCV",
+ "AF_KEY",
+ "AF_LAT",
+ "AF_LINK",
+ "AF_LLC",
+ "AF_LOCAL",
+ "AF_MAX",
+ "AF_MPLS",
+ "AF_NATM",
+ "AF_NDRV",
+ "AF_NETBEUI",
+ "AF_NETBIOS",
+ "AF_NETGRAPH",
+ "AF_NETLINK",
+ "AF_NETROM",
+ "AF_NS",
+ "AF_OROUTE",
+ "AF_OSI",
+ "AF_PACKET",
+ "AF_PHONET",
+ "AF_PPP",
+ "AF_PPPOX",
+ "AF_PUP",
+ "AF_RDS",
+ "AF_RESERVED_36",
+ "AF_ROSE",
+ "AF_ROUTE",
+ "AF_RXRPC",
+ "AF_SCLUSTER",
+ "AF_SECURITY",
+ "AF_SIP",
+ "AF_SLOW",
+ "AF_SNA",
+ "AF_SYSTEM",
+ "AF_TIPC",
+ "AF_UNIX",
+ "AF_UNSPEC",
+ "AF_VENDOR00",
+ "AF_VENDOR01",
+ "AF_VENDOR02",
+ "AF_VENDOR03",
+ "AF_VENDOR04",
+ "AF_VENDOR05",
+ "AF_VENDOR06",
+ "AF_VENDOR07",
+ "AF_VENDOR08",
+ "AF_VENDOR09",
+ "AF_VENDOR10",
+ "AF_VENDOR11",
+ "AF_VENDOR12",
+ "AF_VENDOR13",
+ "AF_VENDOR14",
+ "AF_VENDOR15",
+ "AF_VENDOR16",
+ "AF_VENDOR17",
+ "AF_VENDOR18",
+ "AF_VENDOR19",
+ "AF_VENDOR20",
+ "AF_VENDOR21",
+ "AF_VENDOR22",
+ "AF_VENDOR23",
+ "AF_VENDOR24",
+ "AF_VENDOR25",
+ "AF_VENDOR26",
+ "AF_VENDOR27",
+ "AF_VENDOR28",
+ "AF_VENDOR29",
+ "AF_VENDOR30",
+ "AF_VENDOR31",
+ "AF_VENDOR32",
+ "AF_VENDOR33",
+ "AF_VENDOR34",
+ "AF_VENDOR35",
+ "AF_VENDOR36",
+ "AF_VENDOR37",
+ "AF_VENDOR38",
+ "AF_VENDOR39",
+ "AF_VENDOR40",
+ "AF_VENDOR41",
+ "AF_VENDOR42",
+ "AF_VENDOR43",
+ "AF_VENDOR44",
+ "AF_VENDOR45",
+ "AF_VENDOR46",
+ "AF_VENDOR47",
+ "AF_WANPIPE",
+ "AF_X25",
+ "AI_CANONNAME",
+ "AI_NUMERICHOST",
+ "AI_PASSIVE",
+ "APPLICATION_ERROR",
+ "ARPHRD_ADAPT",
+ "ARPHRD_APPLETLK",
+ "ARPHRD_ARCNET",
+ "ARPHRD_ASH",
+ "ARPHRD_ATM",
+ "ARPHRD_AX25",
+ "ARPHRD_BIF",
+ "ARPHRD_CHAOS",
+ "ARPHRD_CISCO",
+ "ARPHRD_CSLIP",
+ "ARPHRD_CSLIP6",
+ "ARPHRD_DDCMP",
+ "ARPHRD_DLCI",
+ "ARPHRD_ECONET",
+ "ARPHRD_EETHER",
+ "ARPHRD_ETHER",
+ "ARPHRD_EUI64",
+ "ARPHRD_FCAL",
+ "ARPHRD_FCFABRIC",
+ "ARPHRD_FCPL",
+ "ARPHRD_FCPP",
+ "ARPHRD_FDDI",
+ "ARPHRD_FRAD",
+ "ARPHRD_FRELAY",
+ "ARPHRD_HDLC",
+ "ARPHRD_HIPPI",
+ "ARPHRD_HWX25",
+ "ARPHRD_IEEE1394",
+ "ARPHRD_IEEE802",
+ "ARPHRD_IEEE80211",
+ "ARPHRD_IEEE80211_PRISM",
+ "ARPHRD_IEEE80211_RADIOTAP",
+ "ARPHRD_IEEE802154",
+ "ARPHRD_IEEE802154_PHY",
+ "ARPHRD_IEEE802_TR",
+ "ARPHRD_INFINIBAND",
+ "ARPHRD_IPDDP",
+ "ARPHRD_IPGRE",
+ "ARPHRD_IRDA",
+ "ARPHRD_LAPB",
+ "ARPHRD_LOCALTLK",
+ "ARPHRD_LOOPBACK",
+ "ARPHRD_METRICOM",
+ "ARPHRD_NETROM",
+ "ARPHRD_NONE",
+ "ARPHRD_PIMREG",
+ "ARPHRD_PPP",
+ "ARPHRD_PRONET",
+ "ARPHRD_RAWHDLC",
+ "ARPHRD_ROSE",
+ "ARPHRD_RSRVD",
+ "ARPHRD_SIT",
+ "ARPHRD_SKIP",
+ "ARPHRD_SLIP",
+ "ARPHRD_SLIP6",
+ "ARPHRD_STRIP",
+ "ARPHRD_TUNNEL",
+ "ARPHRD_TUNNEL6",
+ "ARPHRD_VOID",
+ "ARPHRD_X25",
+ "AUTHTYPE_CLIENT",
+ "AUTHTYPE_SERVER",
+ "Accept",
+ "Accept4",
+ "AcceptEx",
+ "Access",
+ "Acct",
+ "AddrinfoW",
+ "Adjtime",
+ "Adjtimex",
+ "AllThreadsSyscall",
+ "AllThreadsSyscall6",
+ "AttachLsf",
+ "B0",
+ "B1000000",
+ "B110",
+ "B115200",
+ "B1152000",
+ "B1200",
+ "B134",
+ "B14400",
+ "B150",
+ "B1500000",
+ "B1800",
+ "B19200",
+ "B200",
+ "B2000000",
+ "B230400",
+ "B2400",
+ "B2500000",
+ "B28800",
+ "B300",
+ "B3000000",
+ "B3500000",
+ "B38400",
+ "B4000000",
+ "B460800",
+ "B4800",
+ "B50",
+ "B500000",
+ "B57600",
+ "B576000",
+ "B600",
+ "B7200",
+ "B75",
+ "B76800",
+ "B921600",
+ "B9600",
+ "BASE_PROTOCOL",
+ "BIOCFEEDBACK",
+ "BIOCFLUSH",
+ "BIOCGBLEN",
+ "BIOCGDIRECTION",
+ "BIOCGDIRFILT",
+ "BIOCGDLT",
+ "BIOCGDLTLIST",
+ "BIOCGETBUFMODE",
+ "BIOCGETIF",
+ "BIOCGETZMAX",
+ "BIOCGFEEDBACK",
+ "BIOCGFILDROP",
+ "BIOCGHDRCMPLT",
+ "BIOCGRSIG",
+ "BIOCGRTIMEOUT",
+ "BIOCGSEESENT",
+ "BIOCGSTATS",
+ "BIOCGSTATSOLD",
+ "BIOCGTSTAMP",
+ "BIOCIMMEDIATE",
+ "BIOCLOCK",
+ "BIOCPROMISC",
+ "BIOCROTZBUF",
+ "BIOCSBLEN",
+ "BIOCSDIRECTION",
+ "BIOCSDIRFILT",
+ "BIOCSDLT",
+ "BIOCSETBUFMODE",
+ "BIOCSETF",
+ "BIOCSETFNR",
+ "BIOCSETIF",
+ "BIOCSETWF",
+ "BIOCSETZBUF",
+ "BIOCSFEEDBACK",
+ "BIOCSFILDROP",
+ "BIOCSHDRCMPLT",
+ "BIOCSRSIG",
+ "BIOCSRTIMEOUT",
+ "BIOCSSEESENT",
+ "BIOCSTCPF",
+ "BIOCSTSTAMP",
+ "BIOCSUDPF",
+ "BIOCVERSION",
+ "BPF_A",
+ "BPF_ABS",
+ "BPF_ADD",
+ "BPF_ALIGNMENT",
+ "BPF_ALIGNMENT32",
+ "BPF_ALU",
+ "BPF_AND",
+ "BPF_B",
+ "BPF_BUFMODE_BUFFER",
+ "BPF_BUFMODE_ZBUF",
+ "BPF_DFLTBUFSIZE",
+ "BPF_DIRECTION_IN",
+ "BPF_DIRECTION_OUT",
+ "BPF_DIV",
+ "BPF_H",
+ "BPF_IMM",
+ "BPF_IND",
+ "BPF_JA",
+ "BPF_JEQ",
+ "BPF_JGE",
+ "BPF_JGT",
+ "BPF_JMP",
+ "BPF_JSET",
+ "BPF_K",
+ "BPF_LD",
+ "BPF_LDX",
+ "BPF_LEN",
+ "BPF_LSH",
+ "BPF_MAJOR_VERSION",
+ "BPF_MAXBUFSIZE",
+ "BPF_MAXINSNS",
+ "BPF_MEM",
+ "BPF_MEMWORDS",
+ "BPF_MINBUFSIZE",
+ "BPF_MINOR_VERSION",
+ "BPF_MISC",
+ "BPF_MSH",
+ "BPF_MUL",
+ "BPF_NEG",
+ "BPF_OR",
+ "BPF_RELEASE",
+ "BPF_RET",
+ "BPF_RSH",
+ "BPF_ST",
+ "BPF_STX",
+ "BPF_SUB",
+ "BPF_TAX",
+ "BPF_TXA",
+ "BPF_T_BINTIME",
+ "BPF_T_BINTIME_FAST",
+ "BPF_T_BINTIME_MONOTONIC",
+ "BPF_T_BINTIME_MONOTONIC_FAST",
+ "BPF_T_FAST",
+ "BPF_T_FLAG_MASK",
+ "BPF_T_FORMAT_MASK",
+ "BPF_T_MICROTIME",
+ "BPF_T_MICROTIME_FAST",
+ "BPF_T_MICROTIME_MONOTONIC",
+ "BPF_T_MICROTIME_MONOTONIC_FAST",
+ "BPF_T_MONOTONIC",
+ "BPF_T_MONOTONIC_FAST",
+ "BPF_T_NANOTIME",
+ "BPF_T_NANOTIME_FAST",
+ "BPF_T_NANOTIME_MONOTONIC",
+ "BPF_T_NANOTIME_MONOTONIC_FAST",
+ "BPF_T_NONE",
+ "BPF_T_NORMAL",
+ "BPF_W",
+ "BPF_X",
+ "BRKINT",
+ "Bind",
+ "BindToDevice",
+ "BpfBuflen",
+ "BpfDatalink",
+ "BpfHdr",
+ "BpfHeadercmpl",
+ "BpfInsn",
+ "BpfInterface",
+ "BpfJump",
+ "BpfProgram",
+ "BpfStat",
+ "BpfStats",
+ "BpfStmt",
+ "BpfTimeout",
+ "BpfTimeval",
+ "BpfVersion",
+ "BpfZbuf",
+ "BpfZbufHeader",
+ "ByHandleFileInformation",
+ "BytePtrFromString",
+ "ByteSliceFromString",
+ "CCR0_FLUSH",
+ "CERT_CHAIN_POLICY_AUTHENTICODE",
+ "CERT_CHAIN_POLICY_AUTHENTICODE_TS",
+ "CERT_CHAIN_POLICY_BASE",
+ "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS",
+ "CERT_CHAIN_POLICY_EV",
+ "CERT_CHAIN_POLICY_MICROSOFT_ROOT",
+ "CERT_CHAIN_POLICY_NT_AUTH",
+ "CERT_CHAIN_POLICY_SSL",
+ "CERT_E_CN_NO_MATCH",
+ "CERT_E_EXPIRED",
+ "CERT_E_PURPOSE",
+ "CERT_E_ROLE",
+ "CERT_E_UNTRUSTEDROOT",
+ "CERT_STORE_ADD_ALWAYS",
+ "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG",
+ "CERT_STORE_PROV_MEMORY",
+ "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT",
+ "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT",
+ "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT",
+ "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT",
+ "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT",
+ "CERT_TRUST_INVALID_BASIC_CONSTRAINTS",
+ "CERT_TRUST_INVALID_EXTENSION",
+ "CERT_TRUST_INVALID_NAME_CONSTRAINTS",
+ "CERT_TRUST_INVALID_POLICY_CONSTRAINTS",
+ "CERT_TRUST_IS_CYCLIC",
+ "CERT_TRUST_IS_EXPLICIT_DISTRUST",
+ "CERT_TRUST_IS_NOT_SIGNATURE_VALID",
+ "CERT_TRUST_IS_NOT_TIME_VALID",
+ "CERT_TRUST_IS_NOT_VALID_FOR_USAGE",
+ "CERT_TRUST_IS_OFFLINE_REVOCATION",
+ "CERT_TRUST_IS_REVOKED",
+ "CERT_TRUST_IS_UNTRUSTED_ROOT",
+ "CERT_TRUST_NO_ERROR",
+ "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY",
+ "CERT_TRUST_REVOCATION_STATUS_UNKNOWN",
+ "CFLUSH",
+ "CLOCAL",
+ "CLONE_CHILD_CLEARTID",
+ "CLONE_CHILD_SETTID",
+ "CLONE_CSIGNAL",
+ "CLONE_DETACHED",
+ "CLONE_FILES",
+ "CLONE_FS",
+ "CLONE_IO",
+ "CLONE_NEWIPC",
+ "CLONE_NEWNET",
+ "CLONE_NEWNS",
+ "CLONE_NEWPID",
+ "CLONE_NEWUSER",
+ "CLONE_NEWUTS",
+ "CLONE_PARENT",
+ "CLONE_PARENT_SETTID",
+ "CLONE_PID",
+ "CLONE_PTRACE",
+ "CLONE_SETTLS",
+ "CLONE_SIGHAND",
+ "CLONE_SYSVSEM",
+ "CLONE_THREAD",
+ "CLONE_UNTRACED",
+ "CLONE_VFORK",
+ "CLONE_VM",
+ "CPUID_CFLUSH",
+ "CREAD",
+ "CREATE_ALWAYS",
+ "CREATE_NEW",
+ "CREATE_NEW_PROCESS_GROUP",
+ "CREATE_UNICODE_ENVIRONMENT",
+ "CRYPT_DEFAULT_CONTAINER_OPTIONAL",
+ "CRYPT_DELETEKEYSET",
+ "CRYPT_MACHINE_KEYSET",
+ "CRYPT_NEWKEYSET",
+ "CRYPT_SILENT",
+ "CRYPT_VERIFYCONTEXT",
+ "CS5",
+ "CS6",
+ "CS7",
+ "CS8",
+ "CSIZE",
+ "CSTART",
+ "CSTATUS",
+ "CSTOP",
+ "CSTOPB",
+ "CSUSP",
+ "CTL_MAXNAME",
+ "CTL_NET",
+ "CTL_QUERY",
+ "CTRL_BREAK_EVENT",
+ "CTRL_CLOSE_EVENT",
+ "CTRL_C_EVENT",
+ "CTRL_LOGOFF_EVENT",
+ "CTRL_SHUTDOWN_EVENT",
+ "CancelIo",
+ "CancelIoEx",
+ "CertAddCertificateContextToStore",
+ "CertChainContext",
+ "CertChainElement",
+ "CertChainPara",
+ "CertChainPolicyPara",
+ "CertChainPolicyStatus",
+ "CertCloseStore",
+ "CertContext",
+ "CertCreateCertificateContext",
+ "CertEnhKeyUsage",
+ "CertEnumCertificatesInStore",
+ "CertFreeCertificateChain",
+ "CertFreeCertificateContext",
+ "CertGetCertificateChain",
+ "CertInfo",
+ "CertOpenStore",
+ "CertOpenSystemStore",
+ "CertRevocationCrlInfo",
+ "CertRevocationInfo",
+ "CertSimpleChain",
+ "CertTrustListInfo",
+ "CertTrustStatus",
+ "CertUsageMatch",
+ "CertVerifyCertificateChainPolicy",
+ "Chdir",
+ "CheckBpfVersion",
+ "Chflags",
+ "Chmod",
+ "Chown",
+ "Chroot",
+ "Clearenv",
+ "Close",
+ "CloseHandle",
+ "CloseOnExec",
+ "Closesocket",
+ "CmsgLen",
+ "CmsgSpace",
+ "Cmsghdr",
+ "CommandLineToArgv",
+ "ComputerName",
+ "Conn",
+ "Connect",
+ "ConnectEx",
+ "ConvertSidToStringSid",
+ "ConvertStringSidToSid",
+ "CopySid",
+ "Creat",
+ "CreateDirectory",
+ "CreateFile",
+ "CreateFileMapping",
+ "CreateHardLink",
+ "CreateIoCompletionPort",
+ "CreatePipe",
+ "CreateProcess",
+ "CreateProcessAsUser",
+ "CreateSymbolicLink",
+ "CreateToolhelp32Snapshot",
+ "Credential",
+ "CryptAcquireContext",
+ "CryptGenRandom",
+ "CryptReleaseContext",
+ "DIOCBSFLUSH",
+ "DIOCOSFPFLUSH",
+ "DLL",
+ "DLLError",
+ "DLT_A429",
+ "DLT_A653_ICM",
+ "DLT_AIRONET_HEADER",
+ "DLT_AOS",
+ "DLT_APPLE_IP_OVER_IEEE1394",
+ "DLT_ARCNET",
+ "DLT_ARCNET_LINUX",
+ "DLT_ATM_CLIP",
+ "DLT_ATM_RFC1483",
+ "DLT_AURORA",
+ "DLT_AX25",
+ "DLT_AX25_KISS",
+ "DLT_BACNET_MS_TP",
+ "DLT_BLUETOOTH_HCI_H4",
+ "DLT_BLUETOOTH_HCI_H4_WITH_PHDR",
+ "DLT_CAN20B",
+ "DLT_CAN_SOCKETCAN",
+ "DLT_CHAOS",
+ "DLT_CHDLC",
+ "DLT_CISCO_IOS",
+ "DLT_C_HDLC",
+ "DLT_C_HDLC_WITH_DIR",
+ "DLT_DBUS",
+ "DLT_DECT",
+ "DLT_DOCSIS",
+ "DLT_DVB_CI",
+ "DLT_ECONET",
+ "DLT_EN10MB",
+ "DLT_EN3MB",
+ "DLT_ENC",
+ "DLT_ERF",
+ "DLT_ERF_ETH",
+ "DLT_ERF_POS",
+ "DLT_FC_2",
+ "DLT_FC_2_WITH_FRAME_DELIMS",
+ "DLT_FDDI",
+ "DLT_FLEXRAY",
+ "DLT_FRELAY",
+ "DLT_FRELAY_WITH_DIR",
+ "DLT_GCOM_SERIAL",
+ "DLT_GCOM_T1E1",
+ "DLT_GPF_F",
+ "DLT_GPF_T",
+ "DLT_GPRS_LLC",
+ "DLT_GSMTAP_ABIS",
+ "DLT_GSMTAP_UM",
+ "DLT_HDLC",
+ "DLT_HHDLC",
+ "DLT_HIPPI",
+ "DLT_IBM_SN",
+ "DLT_IBM_SP",
+ "DLT_IEEE802",
+ "DLT_IEEE802_11",
+ "DLT_IEEE802_11_RADIO",
+ "DLT_IEEE802_11_RADIO_AVS",
+ "DLT_IEEE802_15_4",
+ "DLT_IEEE802_15_4_LINUX",
+ "DLT_IEEE802_15_4_NOFCS",
+ "DLT_IEEE802_15_4_NONASK_PHY",
+ "DLT_IEEE802_16_MAC_CPS",
+ "DLT_IEEE802_16_MAC_CPS_RADIO",
+ "DLT_IPFILTER",
+ "DLT_IPMB",
+ "DLT_IPMB_LINUX",
+ "DLT_IPNET",
+ "DLT_IPOIB",
+ "DLT_IPV4",
+ "DLT_IPV6",
+ "DLT_IP_OVER_FC",
+ "DLT_JUNIPER_ATM1",
+ "DLT_JUNIPER_ATM2",
+ "DLT_JUNIPER_ATM_CEMIC",
+ "DLT_JUNIPER_CHDLC",
+ "DLT_JUNIPER_ES",
+ "DLT_JUNIPER_ETHER",
+ "DLT_JUNIPER_FIBRECHANNEL",
+ "DLT_JUNIPER_FRELAY",
+ "DLT_JUNIPER_GGSN",
+ "DLT_JUNIPER_ISM",
+ "DLT_JUNIPER_MFR",
+ "DLT_JUNIPER_MLFR",
+ "DLT_JUNIPER_MLPPP",
+ "DLT_JUNIPER_MONITOR",
+ "DLT_JUNIPER_PIC_PEER",
+ "DLT_JUNIPER_PPP",
+ "DLT_JUNIPER_PPPOE",
+ "DLT_JUNIPER_PPPOE_ATM",
+ "DLT_JUNIPER_SERVICES",
+ "DLT_JUNIPER_SRX_E2E",
+ "DLT_JUNIPER_ST",
+ "DLT_JUNIPER_VP",
+ "DLT_JUNIPER_VS",
+ "DLT_LAPB_WITH_DIR",
+ "DLT_LAPD",
+ "DLT_LIN",
+ "DLT_LINUX_EVDEV",
+ "DLT_LINUX_IRDA",
+ "DLT_LINUX_LAPD",
+ "DLT_LINUX_PPP_WITHDIRECTION",
+ "DLT_LINUX_SLL",
+ "DLT_LOOP",
+ "DLT_LTALK",
+ "DLT_MATCHING_MAX",
+ "DLT_MATCHING_MIN",
+ "DLT_MFR",
+ "DLT_MOST",
+ "DLT_MPEG_2_TS",
+ "DLT_MPLS",
+ "DLT_MTP2",
+ "DLT_MTP2_WITH_PHDR",
+ "DLT_MTP3",
+ "DLT_MUX27010",
+ "DLT_NETANALYZER",
+ "DLT_NETANALYZER_TRANSPARENT",
+ "DLT_NFC_LLCP",
+ "DLT_NFLOG",
+ "DLT_NG40",
+ "DLT_NULL",
+ "DLT_PCI_EXP",
+ "DLT_PFLOG",
+ "DLT_PFSYNC",
+ "DLT_PPI",
+ "DLT_PPP",
+ "DLT_PPP_BSDOS",
+ "DLT_PPP_ETHER",
+ "DLT_PPP_PPPD",
+ "DLT_PPP_SERIAL",
+ "DLT_PPP_WITH_DIR",
+ "DLT_PPP_WITH_DIRECTION",
+ "DLT_PRISM_HEADER",
+ "DLT_PRONET",
+ "DLT_RAIF1",
+ "DLT_RAW",
+ "DLT_RAWAF_MASK",
+ "DLT_RIO",
+ "DLT_SCCP",
+ "DLT_SITA",
+ "DLT_SLIP",
+ "DLT_SLIP_BSDOS",
+ "DLT_STANAG_5066_D_PDU",
+ "DLT_SUNATM",
+ "DLT_SYMANTEC_FIREWALL",
+ "DLT_TZSP",
+ "DLT_USB",
+ "DLT_USB_LINUX",
+ "DLT_USB_LINUX_MMAPPED",
+ "DLT_USER0",
+ "DLT_USER1",
+ "DLT_USER10",
+ "DLT_USER11",
+ "DLT_USER12",
+ "DLT_USER13",
+ "DLT_USER14",
+ "DLT_USER15",
+ "DLT_USER2",
+ "DLT_USER3",
+ "DLT_USER4",
+ "DLT_USER5",
+ "DLT_USER6",
+ "DLT_USER7",
+ "DLT_USER8",
+ "DLT_USER9",
+ "DLT_WIHART",
+ "DLT_X2E_SERIAL",
+ "DLT_X2E_XORAYA",
+ "DNSMXData",
+ "DNSPTRData",
+ "DNSRecord",
+ "DNSSRVData",
+ "DNSTXTData",
+ "DNS_INFO_NO_RECORDS",
+ "DNS_TYPE_A",
+ "DNS_TYPE_A6",
+ "DNS_TYPE_AAAA",
+ "DNS_TYPE_ADDRS",
+ "DNS_TYPE_AFSDB",
+ "DNS_TYPE_ALL",
+ "DNS_TYPE_ANY",
+ "DNS_TYPE_ATMA",
+ "DNS_TYPE_AXFR",
+ "DNS_TYPE_CERT",
+ "DNS_TYPE_CNAME",
+ "DNS_TYPE_DHCID",
+ "DNS_TYPE_DNAME",
+ "DNS_TYPE_DNSKEY",
+ "DNS_TYPE_DS",
+ "DNS_TYPE_EID",
+ "DNS_TYPE_GID",
+ "DNS_TYPE_GPOS",
+ "DNS_TYPE_HINFO",
+ "DNS_TYPE_ISDN",
+ "DNS_TYPE_IXFR",
+ "DNS_TYPE_KEY",
+ "DNS_TYPE_KX",
+ "DNS_TYPE_LOC",
+ "DNS_TYPE_MAILA",
+ "DNS_TYPE_MAILB",
+ "DNS_TYPE_MB",
+ "DNS_TYPE_MD",
+ "DNS_TYPE_MF",
+ "DNS_TYPE_MG",
+ "DNS_TYPE_MINFO",
+ "DNS_TYPE_MR",
+ "DNS_TYPE_MX",
+ "DNS_TYPE_NAPTR",
+ "DNS_TYPE_NBSTAT",
+ "DNS_TYPE_NIMLOC",
+ "DNS_TYPE_NS",
+ "DNS_TYPE_NSAP",
+ "DNS_TYPE_NSAPPTR",
+ "DNS_TYPE_NSEC",
+ "DNS_TYPE_NULL",
+ "DNS_TYPE_NXT",
+ "DNS_TYPE_OPT",
+ "DNS_TYPE_PTR",
+ "DNS_TYPE_PX",
+ "DNS_TYPE_RP",
+ "DNS_TYPE_RRSIG",
+ "DNS_TYPE_RT",
+ "DNS_TYPE_SIG",
+ "DNS_TYPE_SINK",
+ "DNS_TYPE_SOA",
+ "DNS_TYPE_SRV",
+ "DNS_TYPE_TEXT",
+ "DNS_TYPE_TKEY",
+ "DNS_TYPE_TSIG",
+ "DNS_TYPE_UID",
+ "DNS_TYPE_UINFO",
+ "DNS_TYPE_UNSPEC",
+ "DNS_TYPE_WINS",
+ "DNS_TYPE_WINSR",
+ "DNS_TYPE_WKS",
+ "DNS_TYPE_X25",
+ "DT_BLK",
+ "DT_CHR",
+ "DT_DIR",
+ "DT_FIFO",
+ "DT_LNK",
+ "DT_REG",
+ "DT_SOCK",
+ "DT_UNKNOWN",
+ "DT_WHT",
+ "DUPLICATE_CLOSE_SOURCE",
+ "DUPLICATE_SAME_ACCESS",
+ "DeleteFile",
+ "DetachLsf",
+ "DeviceIoControl",
+ "Dirent",
+ "DnsNameCompare",
+ "DnsQuery",
+ "DnsRecordListFree",
+ "DnsSectionAdditional",
+ "DnsSectionAnswer",
+ "DnsSectionAuthority",
+ "DnsSectionQuestion",
+ "Dup",
+ "Dup2",
+ "Dup3",
+ "DuplicateHandle",
+ "E2BIG",
+ "EACCES",
+ "EADDRINUSE",
+ "EADDRNOTAVAIL",
+ "EADV",
+ "EAFNOSUPPORT",
+ "EAGAIN",
+ "EALREADY",
+ "EAUTH",
+ "EBADARCH",
+ "EBADE",
+ "EBADEXEC",
+ "EBADF",
+ "EBADFD",
+ "EBADMACHO",
+ "EBADMSG",
+ "EBADR",
+ "EBADRPC",
+ "EBADRQC",
+ "EBADSLT",
+ "EBFONT",
+ "EBUSY",
+ "ECANCELED",
+ "ECAPMODE",
+ "ECHILD",
+ "ECHO",
+ "ECHOCTL",
+ "ECHOE",
+ "ECHOK",
+ "ECHOKE",
+ "ECHONL",
+ "ECHOPRT",
+ "ECHRNG",
+ "ECOMM",
+ "ECONNABORTED",
+ "ECONNREFUSED",
+ "ECONNRESET",
+ "EDEADLK",
+ "EDEADLOCK",
+ "EDESTADDRREQ",
+ "EDEVERR",
+ "EDOM",
+ "EDOOFUS",
+ "EDOTDOT",
+ "EDQUOT",
+ "EEXIST",
+ "EFAULT",
+ "EFBIG",
+ "EFER_LMA",
+ "EFER_LME",
+ "EFER_NXE",
+ "EFER_SCE",
+ "EFTYPE",
+ "EHOSTDOWN",
+ "EHOSTUNREACH",
+ "EHWPOISON",
+ "EIDRM",
+ "EILSEQ",
+ "EINPROGRESS",
+ "EINTR",
+ "EINVAL",
+ "EIO",
+ "EIPSEC",
+ "EISCONN",
+ "EISDIR",
+ "EISNAM",
+ "EKEYEXPIRED",
+ "EKEYREJECTED",
+ "EKEYREVOKED",
+ "EL2HLT",
+ "EL2NSYNC",
+ "EL3HLT",
+ "EL3RST",
+ "ELAST",
+ "ELF_NGREG",
+ "ELF_PRARGSZ",
+ "ELIBACC",
+ "ELIBBAD",
+ "ELIBEXEC",
+ "ELIBMAX",
+ "ELIBSCN",
+ "ELNRNG",
+ "ELOOP",
+ "EMEDIUMTYPE",
+ "EMFILE",
+ "EMLINK",
+ "EMSGSIZE",
+ "EMT_TAGOVF",
+ "EMULTIHOP",
+ "EMUL_ENABLED",
+ "EMUL_LINUX",
+ "EMUL_LINUX32",
+ "EMUL_MAXID",
+ "EMUL_NATIVE",
+ "ENAMETOOLONG",
+ "ENAVAIL",
+ "ENDRUNDISC",
+ "ENEEDAUTH",
+ "ENETDOWN",
+ "ENETRESET",
+ "ENETUNREACH",
+ "ENFILE",
+ "ENOANO",
+ "ENOATTR",
+ "ENOBUFS",
+ "ENOCSI",
+ "ENODATA",
+ "ENODEV",
+ "ENOENT",
+ "ENOEXEC",
+ "ENOKEY",
+ "ENOLCK",
+ "ENOLINK",
+ "ENOMEDIUM",
+ "ENOMEM",
+ "ENOMSG",
+ "ENONET",
+ "ENOPKG",
+ "ENOPOLICY",
+ "ENOPROTOOPT",
+ "ENOSPC",
+ "ENOSR",
+ "ENOSTR",
+ "ENOSYS",
+ "ENOTBLK",
+ "ENOTCAPABLE",
+ "ENOTCONN",
+ "ENOTDIR",
+ "ENOTEMPTY",
+ "ENOTNAM",
+ "ENOTRECOVERABLE",
+ "ENOTSOCK",
+ "ENOTSUP",
+ "ENOTTY",
+ "ENOTUNIQ",
+ "ENXIO",
+ "EN_SW_CTL_INF",
+ "EN_SW_CTL_PREC",
+ "EN_SW_CTL_ROUND",
+ "EN_SW_DATACHAIN",
+ "EN_SW_DENORM",
+ "EN_SW_INVOP",
+ "EN_SW_OVERFLOW",
+ "EN_SW_PRECLOSS",
+ "EN_SW_UNDERFLOW",
+ "EN_SW_ZERODIV",
+ "EOPNOTSUPP",
+ "EOVERFLOW",
+ "EOWNERDEAD",
+ "EPERM",
+ "EPFNOSUPPORT",
+ "EPIPE",
+ "EPOLLERR",
+ "EPOLLET",
+ "EPOLLHUP",
+ "EPOLLIN",
+ "EPOLLMSG",
+ "EPOLLONESHOT",
+ "EPOLLOUT",
+ "EPOLLPRI",
+ "EPOLLRDBAND",
+ "EPOLLRDHUP",
+ "EPOLLRDNORM",
+ "EPOLLWRBAND",
+ "EPOLLWRNORM",
+ "EPOLL_CLOEXEC",
+ "EPOLL_CTL_ADD",
+ "EPOLL_CTL_DEL",
+ "EPOLL_CTL_MOD",
+ "EPOLL_NONBLOCK",
+ "EPROCLIM",
+ "EPROCUNAVAIL",
+ "EPROGMISMATCH",
+ "EPROGUNAVAIL",
+ "EPROTO",
+ "EPROTONOSUPPORT",
+ "EPROTOTYPE",
+ "EPWROFF",
+ "ERANGE",
+ "EREMCHG",
+ "EREMOTE",
+ "EREMOTEIO",
+ "ERESTART",
+ "ERFKILL",
+ "EROFS",
+ "ERPCMISMATCH",
+ "ERROR_ACCESS_DENIED",
+ "ERROR_ALREADY_EXISTS",
+ "ERROR_BROKEN_PIPE",
+ "ERROR_BUFFER_OVERFLOW",
+ "ERROR_DIR_NOT_EMPTY",
+ "ERROR_ENVVAR_NOT_FOUND",
+ "ERROR_FILE_EXISTS",
+ "ERROR_FILE_NOT_FOUND",
+ "ERROR_HANDLE_EOF",
+ "ERROR_INSUFFICIENT_BUFFER",
+ "ERROR_IO_PENDING",
+ "ERROR_MOD_NOT_FOUND",
+ "ERROR_MORE_DATA",
+ "ERROR_NETNAME_DELETED",
+ "ERROR_NOT_FOUND",
+ "ERROR_NO_MORE_FILES",
+ "ERROR_OPERATION_ABORTED",
+ "ERROR_PATH_NOT_FOUND",
+ "ERROR_PRIVILEGE_NOT_HELD",
+ "ERROR_PROC_NOT_FOUND",
+ "ESHLIBVERS",
+ "ESHUTDOWN",
+ "ESOCKTNOSUPPORT",
+ "ESPIPE",
+ "ESRCH",
+ "ESRMNT",
+ "ESTALE",
+ "ESTRPIPE",
+ "ETHERCAP_JUMBO_MTU",
+ "ETHERCAP_VLAN_HWTAGGING",
+ "ETHERCAP_VLAN_MTU",
+ "ETHERMIN",
+ "ETHERMTU",
+ "ETHERMTU_JUMBO",
+ "ETHERTYPE_8023",
+ "ETHERTYPE_AARP",
+ "ETHERTYPE_ACCTON",
+ "ETHERTYPE_AEONIC",
+ "ETHERTYPE_ALPHA",
+ "ETHERTYPE_AMBER",
+ "ETHERTYPE_AMOEBA",
+ "ETHERTYPE_AOE",
+ "ETHERTYPE_APOLLO",
+ "ETHERTYPE_APOLLODOMAIN",
+ "ETHERTYPE_APPLETALK",
+ "ETHERTYPE_APPLITEK",
+ "ETHERTYPE_ARGONAUT",
+ "ETHERTYPE_ARP",
+ "ETHERTYPE_AT",
+ "ETHERTYPE_ATALK",
+ "ETHERTYPE_ATOMIC",
+ "ETHERTYPE_ATT",
+ "ETHERTYPE_ATTSTANFORD",
+ "ETHERTYPE_AUTOPHON",
+ "ETHERTYPE_AXIS",
+ "ETHERTYPE_BCLOOP",
+ "ETHERTYPE_BOFL",
+ "ETHERTYPE_CABLETRON",
+ "ETHERTYPE_CHAOS",
+ "ETHERTYPE_COMDESIGN",
+ "ETHERTYPE_COMPUGRAPHIC",
+ "ETHERTYPE_COUNTERPOINT",
+ "ETHERTYPE_CRONUS",
+ "ETHERTYPE_CRONUSVLN",
+ "ETHERTYPE_DCA",
+ "ETHERTYPE_DDE",
+ "ETHERTYPE_DEBNI",
+ "ETHERTYPE_DECAM",
+ "ETHERTYPE_DECCUST",
+ "ETHERTYPE_DECDIAG",
+ "ETHERTYPE_DECDNS",
+ "ETHERTYPE_DECDTS",
+ "ETHERTYPE_DECEXPER",
+ "ETHERTYPE_DECLAST",
+ "ETHERTYPE_DECLTM",
+ "ETHERTYPE_DECMUMPS",
+ "ETHERTYPE_DECNETBIOS",
+ "ETHERTYPE_DELTACON",
+ "ETHERTYPE_DIDDLE",
+ "ETHERTYPE_DLOG1",
+ "ETHERTYPE_DLOG2",
+ "ETHERTYPE_DN",
+ "ETHERTYPE_DOGFIGHT",
+ "ETHERTYPE_DSMD",
+ "ETHERTYPE_ECMA",
+ "ETHERTYPE_ENCRYPT",
+ "ETHERTYPE_ES",
+ "ETHERTYPE_EXCELAN",
+ "ETHERTYPE_EXPERDATA",
+ "ETHERTYPE_FLIP",
+ "ETHERTYPE_FLOWCONTROL",
+ "ETHERTYPE_FRARP",
+ "ETHERTYPE_GENDYN",
+ "ETHERTYPE_HAYES",
+ "ETHERTYPE_HIPPI_FP",
+ "ETHERTYPE_HITACHI",
+ "ETHERTYPE_HP",
+ "ETHERTYPE_IEEEPUP",
+ "ETHERTYPE_IEEEPUPAT",
+ "ETHERTYPE_IMLBL",
+ "ETHERTYPE_IMLBLDIAG",
+ "ETHERTYPE_IP",
+ "ETHERTYPE_IPAS",
+ "ETHERTYPE_IPV6",
+ "ETHERTYPE_IPX",
+ "ETHERTYPE_IPXNEW",
+ "ETHERTYPE_KALPANA",
+ "ETHERTYPE_LANBRIDGE",
+ "ETHERTYPE_LANPROBE",
+ "ETHERTYPE_LAT",
+ "ETHERTYPE_LBACK",
+ "ETHERTYPE_LITTLE",
+ "ETHERTYPE_LLDP",
+ "ETHERTYPE_LOGICRAFT",
+ "ETHERTYPE_LOOPBACK",
+ "ETHERTYPE_MATRA",
+ "ETHERTYPE_MAX",
+ "ETHERTYPE_MERIT",
+ "ETHERTYPE_MICP",
+ "ETHERTYPE_MOPDL",
+ "ETHERTYPE_MOPRC",
+ "ETHERTYPE_MOTOROLA",
+ "ETHERTYPE_MPLS",
+ "ETHERTYPE_MPLS_MCAST",
+ "ETHERTYPE_MUMPS",
+ "ETHERTYPE_NBPCC",
+ "ETHERTYPE_NBPCLAIM",
+ "ETHERTYPE_NBPCLREQ",
+ "ETHERTYPE_NBPCLRSP",
+ "ETHERTYPE_NBPCREQ",
+ "ETHERTYPE_NBPCRSP",
+ "ETHERTYPE_NBPDG",
+ "ETHERTYPE_NBPDGB",
+ "ETHERTYPE_NBPDLTE",
+ "ETHERTYPE_NBPRAR",
+ "ETHERTYPE_NBPRAS",
+ "ETHERTYPE_NBPRST",
+ "ETHERTYPE_NBPSCD",
+ "ETHERTYPE_NBPVCD",
+ "ETHERTYPE_NBS",
+ "ETHERTYPE_NCD",
+ "ETHERTYPE_NESTAR",
+ "ETHERTYPE_NETBEUI",
+ "ETHERTYPE_NOVELL",
+ "ETHERTYPE_NS",
+ "ETHERTYPE_NSAT",
+ "ETHERTYPE_NSCOMPAT",
+ "ETHERTYPE_NTRAILER",
+ "ETHERTYPE_OS9",
+ "ETHERTYPE_OS9NET",
+ "ETHERTYPE_PACER",
+ "ETHERTYPE_PAE",
+ "ETHERTYPE_PCS",
+ "ETHERTYPE_PLANNING",
+ "ETHERTYPE_PPP",
+ "ETHERTYPE_PPPOE",
+ "ETHERTYPE_PPPOEDISC",
+ "ETHERTYPE_PRIMENTS",
+ "ETHERTYPE_PUP",
+ "ETHERTYPE_PUPAT",
+ "ETHERTYPE_QINQ",
+ "ETHERTYPE_RACAL",
+ "ETHERTYPE_RATIONAL",
+ "ETHERTYPE_RAWFR",
+ "ETHERTYPE_RCL",
+ "ETHERTYPE_RDP",
+ "ETHERTYPE_RETIX",
+ "ETHERTYPE_REVARP",
+ "ETHERTYPE_SCA",
+ "ETHERTYPE_SECTRA",
+ "ETHERTYPE_SECUREDATA",
+ "ETHERTYPE_SGITW",
+ "ETHERTYPE_SG_BOUNCE",
+ "ETHERTYPE_SG_DIAG",
+ "ETHERTYPE_SG_NETGAMES",
+ "ETHERTYPE_SG_RESV",
+ "ETHERTYPE_SIMNET",
+ "ETHERTYPE_SLOW",
+ "ETHERTYPE_SLOWPROTOCOLS",
+ "ETHERTYPE_SNA",
+ "ETHERTYPE_SNMP",
+ "ETHERTYPE_SONIX",
+ "ETHERTYPE_SPIDER",
+ "ETHERTYPE_SPRITE",
+ "ETHERTYPE_STP",
+ "ETHERTYPE_TALARIS",
+ "ETHERTYPE_TALARISMC",
+ "ETHERTYPE_TCPCOMP",
+ "ETHERTYPE_TCPSM",
+ "ETHERTYPE_TEC",
+ "ETHERTYPE_TIGAN",
+ "ETHERTYPE_TRAIL",
+ "ETHERTYPE_TRANSETHER",
+ "ETHERTYPE_TYMSHARE",
+ "ETHERTYPE_UBBST",
+ "ETHERTYPE_UBDEBUG",
+ "ETHERTYPE_UBDIAGLOOP",
+ "ETHERTYPE_UBDL",
+ "ETHERTYPE_UBNIU",
+ "ETHERTYPE_UBNMC",
+ "ETHERTYPE_VALID",
+ "ETHERTYPE_VARIAN",
+ "ETHERTYPE_VAXELN",
+ "ETHERTYPE_VEECO",
+ "ETHERTYPE_VEXP",
+ "ETHERTYPE_VGLAB",
+ "ETHERTYPE_VINES",
+ "ETHERTYPE_VINESECHO",
+ "ETHERTYPE_VINESLOOP",
+ "ETHERTYPE_VITAL",
+ "ETHERTYPE_VLAN",
+ "ETHERTYPE_VLTLMAN",
+ "ETHERTYPE_VPROD",
+ "ETHERTYPE_VURESERVED",
+ "ETHERTYPE_WATERLOO",
+ "ETHERTYPE_WELLFLEET",
+ "ETHERTYPE_X25",
+ "ETHERTYPE_X75",
+ "ETHERTYPE_XNSSM",
+ "ETHERTYPE_XTP",
+ "ETHER_ADDR_LEN",
+ "ETHER_ALIGN",
+ "ETHER_CRC_LEN",
+ "ETHER_CRC_POLY_BE",
+ "ETHER_CRC_POLY_LE",
+ "ETHER_HDR_LEN",
+ "ETHER_MAX_DIX_LEN",
+ "ETHER_MAX_LEN",
+ "ETHER_MAX_LEN_JUMBO",
+ "ETHER_MIN_LEN",
+ "ETHER_PPPOE_ENCAP_LEN",
+ "ETHER_TYPE_LEN",
+ "ETHER_VLAN_ENCAP_LEN",
+ "ETH_P_1588",
+ "ETH_P_8021Q",
+ "ETH_P_802_2",
+ "ETH_P_802_3",
+ "ETH_P_AARP",
+ "ETH_P_ALL",
+ "ETH_P_AOE",
+ "ETH_P_ARCNET",
+ "ETH_P_ARP",
+ "ETH_P_ATALK",
+ "ETH_P_ATMFATE",
+ "ETH_P_ATMMPOA",
+ "ETH_P_AX25",
+ "ETH_P_BPQ",
+ "ETH_P_CAIF",
+ "ETH_P_CAN",
+ "ETH_P_CONTROL",
+ "ETH_P_CUST",
+ "ETH_P_DDCMP",
+ "ETH_P_DEC",
+ "ETH_P_DIAG",
+ "ETH_P_DNA_DL",
+ "ETH_P_DNA_RC",
+ "ETH_P_DNA_RT",
+ "ETH_P_DSA",
+ "ETH_P_ECONET",
+ "ETH_P_EDSA",
+ "ETH_P_FCOE",
+ "ETH_P_FIP",
+ "ETH_P_HDLC",
+ "ETH_P_IEEE802154",
+ "ETH_P_IEEEPUP",
+ "ETH_P_IEEEPUPAT",
+ "ETH_P_IP",
+ "ETH_P_IPV6",
+ "ETH_P_IPX",
+ "ETH_P_IRDA",
+ "ETH_P_LAT",
+ "ETH_P_LINK_CTL",
+ "ETH_P_LOCALTALK",
+ "ETH_P_LOOP",
+ "ETH_P_MOBITEX",
+ "ETH_P_MPLS_MC",
+ "ETH_P_MPLS_UC",
+ "ETH_P_PAE",
+ "ETH_P_PAUSE",
+ "ETH_P_PHONET",
+ "ETH_P_PPPTALK",
+ "ETH_P_PPP_DISC",
+ "ETH_P_PPP_MP",
+ "ETH_P_PPP_SES",
+ "ETH_P_PUP",
+ "ETH_P_PUPAT",
+ "ETH_P_RARP",
+ "ETH_P_SCA",
+ "ETH_P_SLOW",
+ "ETH_P_SNAP",
+ "ETH_P_TEB",
+ "ETH_P_TIPC",
+ "ETH_P_TRAILER",
+ "ETH_P_TR_802_2",
+ "ETH_P_WAN_PPP",
+ "ETH_P_WCCP",
+ "ETH_P_X25",
+ "ETIME",
+ "ETIMEDOUT",
+ "ETOOMANYREFS",
+ "ETXTBSY",
+ "EUCLEAN",
+ "EUNATCH",
+ "EUSERS",
+ "EVFILT_AIO",
+ "EVFILT_FS",
+ "EVFILT_LIO",
+ "EVFILT_MACHPORT",
+ "EVFILT_PROC",
+ "EVFILT_READ",
+ "EVFILT_SIGNAL",
+ "EVFILT_SYSCOUNT",
+ "EVFILT_THREADMARKER",
+ "EVFILT_TIMER",
+ "EVFILT_USER",
+ "EVFILT_VM",
+ "EVFILT_VNODE",
+ "EVFILT_WRITE",
+ "EV_ADD",
+ "EV_CLEAR",
+ "EV_DELETE",
+ "EV_DISABLE",
+ "EV_DISPATCH",
+ "EV_DROP",
+ "EV_ENABLE",
+ "EV_EOF",
+ "EV_ERROR",
+ "EV_FLAG0",
+ "EV_FLAG1",
+ "EV_ONESHOT",
+ "EV_OOBAND",
+ "EV_POLL",
+ "EV_RECEIPT",
+ "EV_SYSFLAGS",
+ "EWINDOWS",
+ "EWOULDBLOCK",
+ "EXDEV",
+ "EXFULL",
+ "EXTA",
+ "EXTB",
+ "EXTPROC",
+ "Environ",
+ "EpollCreate",
+ "EpollCreate1",
+ "EpollCtl",
+ "EpollEvent",
+ "EpollWait",
+ "Errno",
+ "EscapeArg",
+ "Exchangedata",
+ "Exec",
+ "Exit",
+ "ExitProcess",
+ "FD_CLOEXEC",
+ "FD_SETSIZE",
+ "FILE_ACTION_ADDED",
+ "FILE_ACTION_MODIFIED",
+ "FILE_ACTION_REMOVED",
+ "FILE_ACTION_RENAMED_NEW_NAME",
+ "FILE_ACTION_RENAMED_OLD_NAME",
+ "FILE_APPEND_DATA",
+ "FILE_ATTRIBUTE_ARCHIVE",
+ "FILE_ATTRIBUTE_DIRECTORY",
+ "FILE_ATTRIBUTE_HIDDEN",
+ "FILE_ATTRIBUTE_NORMAL",
+ "FILE_ATTRIBUTE_READONLY",
+ "FILE_ATTRIBUTE_REPARSE_POINT",
+ "FILE_ATTRIBUTE_SYSTEM",
+ "FILE_BEGIN",
+ "FILE_CURRENT",
+ "FILE_END",
+ "FILE_FLAG_BACKUP_SEMANTICS",
+ "FILE_FLAG_OPEN_REPARSE_POINT",
+ "FILE_FLAG_OVERLAPPED",
+ "FILE_LIST_DIRECTORY",
+ "FILE_MAP_COPY",
+ "FILE_MAP_EXECUTE",
+ "FILE_MAP_READ",
+ "FILE_MAP_WRITE",
+ "FILE_NOTIFY_CHANGE_ATTRIBUTES",
+ "FILE_NOTIFY_CHANGE_CREATION",
+ "FILE_NOTIFY_CHANGE_DIR_NAME",
+ "FILE_NOTIFY_CHANGE_FILE_NAME",
+ "FILE_NOTIFY_CHANGE_LAST_ACCESS",
+ "FILE_NOTIFY_CHANGE_LAST_WRITE",
+ "FILE_NOTIFY_CHANGE_SIZE",
+ "FILE_SHARE_DELETE",
+ "FILE_SHARE_READ",
+ "FILE_SHARE_WRITE",
+ "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS",
+ "FILE_SKIP_SET_EVENT_ON_HANDLE",
+ "FILE_TYPE_CHAR",
+ "FILE_TYPE_DISK",
+ "FILE_TYPE_PIPE",
+ "FILE_TYPE_REMOTE",
+ "FILE_TYPE_UNKNOWN",
+ "FILE_WRITE_ATTRIBUTES",
+ "FLUSHO",
+ "FORMAT_MESSAGE_ALLOCATE_BUFFER",
+ "FORMAT_MESSAGE_ARGUMENT_ARRAY",
+ "FORMAT_MESSAGE_FROM_HMODULE",
+ "FORMAT_MESSAGE_FROM_STRING",
+ "FORMAT_MESSAGE_FROM_SYSTEM",
+ "FORMAT_MESSAGE_IGNORE_INSERTS",
+ "FORMAT_MESSAGE_MAX_WIDTH_MASK",
+ "FSCTL_GET_REPARSE_POINT",
+ "F_ADDFILESIGS",
+ "F_ADDSIGS",
+ "F_ALLOCATEALL",
+ "F_ALLOCATECONTIG",
+ "F_CANCEL",
+ "F_CHKCLEAN",
+ "F_CLOSEM",
+ "F_DUP2FD",
+ "F_DUP2FD_CLOEXEC",
+ "F_DUPFD",
+ "F_DUPFD_CLOEXEC",
+ "F_EXLCK",
+ "F_FLUSH_DATA",
+ "F_FREEZE_FS",
+ "F_FSCTL",
+ "F_FSDIRMASK",
+ "F_FSIN",
+ "F_FSINOUT",
+ "F_FSOUT",
+ "F_FSPRIV",
+ "F_FSVOID",
+ "F_FULLFSYNC",
+ "F_GETFD",
+ "F_GETFL",
+ "F_GETLEASE",
+ "F_GETLK",
+ "F_GETLK64",
+ "F_GETLKPID",
+ "F_GETNOSIGPIPE",
+ "F_GETOWN",
+ "F_GETOWN_EX",
+ "F_GETPATH",
+ "F_GETPATH_MTMINFO",
+ "F_GETPIPE_SZ",
+ "F_GETPROTECTIONCLASS",
+ "F_GETSIG",
+ "F_GLOBAL_NOCACHE",
+ "F_LOCK",
+ "F_LOG2PHYS",
+ "F_LOG2PHYS_EXT",
+ "F_MARKDEPENDENCY",
+ "F_MAXFD",
+ "F_NOCACHE",
+ "F_NODIRECT",
+ "F_NOTIFY",
+ "F_OGETLK",
+ "F_OK",
+ "F_OSETLK",
+ "F_OSETLKW",
+ "F_PARAM_MASK",
+ "F_PARAM_MAX",
+ "F_PATHPKG_CHECK",
+ "F_PEOFPOSMODE",
+ "F_PREALLOCATE",
+ "F_RDADVISE",
+ "F_RDAHEAD",
+ "F_RDLCK",
+ "F_READAHEAD",
+ "F_READBOOTSTRAP",
+ "F_SETBACKINGSTORE",
+ "F_SETFD",
+ "F_SETFL",
+ "F_SETLEASE",
+ "F_SETLK",
+ "F_SETLK64",
+ "F_SETLKW",
+ "F_SETLKW64",
+ "F_SETLK_REMOTE",
+ "F_SETNOSIGPIPE",
+ "F_SETOWN",
+ "F_SETOWN_EX",
+ "F_SETPIPE_SZ",
+ "F_SETPROTECTIONCLASS",
+ "F_SETSIG",
+ "F_SETSIZE",
+ "F_SHLCK",
+ "F_TEST",
+ "F_THAW_FS",
+ "F_TLOCK",
+ "F_ULOCK",
+ "F_UNLCK",
+ "F_UNLCKSYS",
+ "F_VOLPOSMODE",
+ "F_WRITEBOOTSTRAP",
+ "F_WRLCK",
+ "Faccessat",
+ "Fallocate",
+ "Fbootstraptransfer_t",
+ "Fchdir",
+ "Fchflags",
+ "Fchmod",
+ "Fchmodat",
+ "Fchown",
+ "Fchownat",
+ "FcntlFlock",
+ "FdSet",
+ "Fdatasync",
+ "FileNotifyInformation",
+ "Filetime",
+ "FindClose",
+ "FindFirstFile",
+ "FindNextFile",
+ "Flock",
+ "Flock_t",
+ "FlushBpf",
+ "FlushFileBuffers",
+ "FlushViewOfFile",
+ "ForkExec",
+ "ForkLock",
+ "FormatMessage",
+ "Fpathconf",
+ "FreeAddrInfoW",
+ "FreeEnvironmentStrings",
+ "FreeLibrary",
+ "Fsid",
+ "Fstat",
+ "Fstatat",
+ "Fstatfs",
+ "Fstore_t",
+ "Fsync",
+ "Ftruncate",
+ "FullPath",
+ "Futimes",
+ "Futimesat",
+ "GENERIC_ALL",
+ "GENERIC_EXECUTE",
+ "GENERIC_READ",
+ "GENERIC_WRITE",
+ "GUID",
+ "GetAcceptExSockaddrs",
+ "GetAdaptersInfo",
+ "GetAddrInfoW",
+ "GetCommandLine",
+ "GetComputerName",
+ "GetConsoleMode",
+ "GetCurrentDirectory",
+ "GetCurrentProcess",
+ "GetEnvironmentStrings",
+ "GetEnvironmentVariable",
+ "GetExitCodeProcess",
+ "GetFileAttributes",
+ "GetFileAttributesEx",
+ "GetFileExInfoStandard",
+ "GetFileExMaxInfoLevel",
+ "GetFileInformationByHandle",
+ "GetFileType",
+ "GetFullPathName",
+ "GetHostByName",
+ "GetIfEntry",
+ "GetLastError",
+ "GetLengthSid",
+ "GetLongPathName",
+ "GetProcAddress",
+ "GetProcessTimes",
+ "GetProtoByName",
+ "GetQueuedCompletionStatus",
+ "GetServByName",
+ "GetShortPathName",
+ "GetStartupInfo",
+ "GetStdHandle",
+ "GetSystemTimeAsFileTime",
+ "GetTempPath",
+ "GetTimeZoneInformation",
+ "GetTokenInformation",
+ "GetUserNameEx",
+ "GetUserProfileDirectory",
+ "GetVersion",
+ "Getcwd",
+ "Getdents",
+ "Getdirentries",
+ "Getdtablesize",
+ "Getegid",
+ "Getenv",
+ "Geteuid",
+ "Getfsstat",
+ "Getgid",
+ "Getgroups",
+ "Getpagesize",
+ "Getpeername",
+ "Getpgid",
+ "Getpgrp",
+ "Getpid",
+ "Getppid",
+ "Getpriority",
+ "Getrlimit",
+ "Getrusage",
+ "Getsid",
+ "Getsockname",
+ "Getsockopt",
+ "GetsockoptByte",
+ "GetsockoptICMPv6Filter",
+ "GetsockoptIPMreq",
+ "GetsockoptIPMreqn",
+ "GetsockoptIPv6MTUInfo",
+ "GetsockoptIPv6Mreq",
+ "GetsockoptInet4Addr",
+ "GetsockoptInt",
+ "GetsockoptUcred",
+ "Gettid",
+ "Gettimeofday",
+ "Getuid",
+ "Getwd",
+ "Getxattr",
+ "HANDLE_FLAG_INHERIT",
+ "HKEY_CLASSES_ROOT",
+ "HKEY_CURRENT_CONFIG",
+ "HKEY_CURRENT_USER",
+ "HKEY_DYN_DATA",
+ "HKEY_LOCAL_MACHINE",
+ "HKEY_PERFORMANCE_DATA",
+ "HKEY_USERS",
+ "HUPCL",
+ "Handle",
+ "Hostent",
+ "ICANON",
+ "ICMP6_FILTER",
+ "ICMPV6_FILTER",
+ "ICMPv6Filter",
+ "ICRNL",
+ "IEXTEN",
+ "IFAN_ARRIVAL",
+ "IFAN_DEPARTURE",
+ "IFA_ADDRESS",
+ "IFA_ANYCAST",
+ "IFA_BROADCAST",
+ "IFA_CACHEINFO",
+ "IFA_F_DADFAILED",
+ "IFA_F_DEPRECATED",
+ "IFA_F_HOMEADDRESS",
+ "IFA_F_NODAD",
+ "IFA_F_OPTIMISTIC",
+ "IFA_F_PERMANENT",
+ "IFA_F_SECONDARY",
+ "IFA_F_TEMPORARY",
+ "IFA_F_TENTATIVE",
+ "IFA_LABEL",
+ "IFA_LOCAL",
+ "IFA_MAX",
+ "IFA_MULTICAST",
+ "IFA_ROUTE",
+ "IFA_UNSPEC",
+ "IFF_ALLMULTI",
+ "IFF_ALTPHYS",
+ "IFF_AUTOMEDIA",
+ "IFF_BROADCAST",
+ "IFF_CANTCHANGE",
+ "IFF_CANTCONFIG",
+ "IFF_DEBUG",
+ "IFF_DRV_OACTIVE",
+ "IFF_DRV_RUNNING",
+ "IFF_DYING",
+ "IFF_DYNAMIC",
+ "IFF_LINK0",
+ "IFF_LINK1",
+ "IFF_LINK2",
+ "IFF_LOOPBACK",
+ "IFF_MASTER",
+ "IFF_MONITOR",
+ "IFF_MULTICAST",
+ "IFF_NOARP",
+ "IFF_NOTRAILERS",
+ "IFF_NO_PI",
+ "IFF_OACTIVE",
+ "IFF_ONE_QUEUE",
+ "IFF_POINTOPOINT",
+ "IFF_POINTTOPOINT",
+ "IFF_PORTSEL",
+ "IFF_PPROMISC",
+ "IFF_PROMISC",
+ "IFF_RENAMING",
+ "IFF_RUNNING",
+ "IFF_SIMPLEX",
+ "IFF_SLAVE",
+ "IFF_SMART",
+ "IFF_STATICARP",
+ "IFF_TAP",
+ "IFF_TUN",
+ "IFF_TUN_EXCL",
+ "IFF_UP",
+ "IFF_VNET_HDR",
+ "IFLA_ADDRESS",
+ "IFLA_BROADCAST",
+ "IFLA_COST",
+ "IFLA_IFALIAS",
+ "IFLA_IFNAME",
+ "IFLA_LINK",
+ "IFLA_LINKINFO",
+ "IFLA_LINKMODE",
+ "IFLA_MAP",
+ "IFLA_MASTER",
+ "IFLA_MAX",
+ "IFLA_MTU",
+ "IFLA_NET_NS_PID",
+ "IFLA_OPERSTATE",
+ "IFLA_PRIORITY",
+ "IFLA_PROTINFO",
+ "IFLA_QDISC",
+ "IFLA_STATS",
+ "IFLA_TXQLEN",
+ "IFLA_UNSPEC",
+ "IFLA_WEIGHT",
+ "IFLA_WIRELESS",
+ "IFNAMSIZ",
+ "IFT_1822",
+ "IFT_A12MPPSWITCH",
+ "IFT_AAL2",
+ "IFT_AAL5",
+ "IFT_ADSL",
+ "IFT_AFLANE8023",
+ "IFT_AFLANE8025",
+ "IFT_ARAP",
+ "IFT_ARCNET",
+ "IFT_ARCNETPLUS",
+ "IFT_ASYNC",
+ "IFT_ATM",
+ "IFT_ATMDXI",
+ "IFT_ATMFUNI",
+ "IFT_ATMIMA",
+ "IFT_ATMLOGICAL",
+ "IFT_ATMRADIO",
+ "IFT_ATMSUBINTERFACE",
+ "IFT_ATMVCIENDPT",
+ "IFT_ATMVIRTUAL",
+ "IFT_BGPPOLICYACCOUNTING",
+ "IFT_BLUETOOTH",
+ "IFT_BRIDGE",
+ "IFT_BSC",
+ "IFT_CARP",
+ "IFT_CCTEMUL",
+ "IFT_CELLULAR",
+ "IFT_CEPT",
+ "IFT_CES",
+ "IFT_CHANNEL",
+ "IFT_CNR",
+ "IFT_COFFEE",
+ "IFT_COMPOSITELINK",
+ "IFT_DCN",
+ "IFT_DIGITALPOWERLINE",
+ "IFT_DIGITALWRAPPEROVERHEADCHANNEL",
+ "IFT_DLSW",
+ "IFT_DOCSCABLEDOWNSTREAM",
+ "IFT_DOCSCABLEMACLAYER",
+ "IFT_DOCSCABLEUPSTREAM",
+ "IFT_DOCSCABLEUPSTREAMCHANNEL",
+ "IFT_DS0",
+ "IFT_DS0BUNDLE",
+ "IFT_DS1FDL",
+ "IFT_DS3",
+ "IFT_DTM",
+ "IFT_DUMMY",
+ "IFT_DVBASILN",
+ "IFT_DVBASIOUT",
+ "IFT_DVBRCCDOWNSTREAM",
+ "IFT_DVBRCCMACLAYER",
+ "IFT_DVBRCCUPSTREAM",
+ "IFT_ECONET",
+ "IFT_ENC",
+ "IFT_EON",
+ "IFT_EPLRS",
+ "IFT_ESCON",
+ "IFT_ETHER",
+ "IFT_FAITH",
+ "IFT_FAST",
+ "IFT_FASTETHER",
+ "IFT_FASTETHERFX",
+ "IFT_FDDI",
+ "IFT_FIBRECHANNEL",
+ "IFT_FRAMERELAYINTERCONNECT",
+ "IFT_FRAMERELAYMPI",
+ "IFT_FRDLCIENDPT",
+ "IFT_FRELAY",
+ "IFT_FRELAYDCE",
+ "IFT_FRF16MFRBUNDLE",
+ "IFT_FRFORWARD",
+ "IFT_G703AT2MB",
+ "IFT_G703AT64K",
+ "IFT_GIF",
+ "IFT_GIGABITETHERNET",
+ "IFT_GR303IDT",
+ "IFT_GR303RDT",
+ "IFT_H323GATEKEEPER",
+ "IFT_H323PROXY",
+ "IFT_HDH1822",
+ "IFT_HDLC",
+ "IFT_HDSL2",
+ "IFT_HIPERLAN2",
+ "IFT_HIPPI",
+ "IFT_HIPPIINTERFACE",
+ "IFT_HOSTPAD",
+ "IFT_HSSI",
+ "IFT_HY",
+ "IFT_IBM370PARCHAN",
+ "IFT_IDSL",
+ "IFT_IEEE1394",
+ "IFT_IEEE80211",
+ "IFT_IEEE80212",
+ "IFT_IEEE8023ADLAG",
+ "IFT_IFGSN",
+ "IFT_IMT",
+ "IFT_INFINIBAND",
+ "IFT_INTERLEAVE",
+ "IFT_IP",
+ "IFT_IPFORWARD",
+ "IFT_IPOVERATM",
+ "IFT_IPOVERCDLC",
+ "IFT_IPOVERCLAW",
+ "IFT_IPSWITCH",
+ "IFT_IPXIP",
+ "IFT_ISDN",
+ "IFT_ISDNBASIC",
+ "IFT_ISDNPRIMARY",
+ "IFT_ISDNS",
+ "IFT_ISDNU",
+ "IFT_ISO88022LLC",
+ "IFT_ISO88023",
+ "IFT_ISO88024",
+ "IFT_ISO88025",
+ "IFT_ISO88025CRFPINT",
+ "IFT_ISO88025DTR",
+ "IFT_ISO88025FIBER",
+ "IFT_ISO88026",
+ "IFT_ISUP",
+ "IFT_L2VLAN",
+ "IFT_L3IPVLAN",
+ "IFT_L3IPXVLAN",
+ "IFT_LAPB",
+ "IFT_LAPD",
+ "IFT_LAPF",
+ "IFT_LINEGROUP",
+ "IFT_LOCALTALK",
+ "IFT_LOOP",
+ "IFT_MEDIAMAILOVERIP",
+ "IFT_MFSIGLINK",
+ "IFT_MIOX25",
+ "IFT_MODEM",
+ "IFT_MPC",
+ "IFT_MPLS",
+ "IFT_MPLSTUNNEL",
+ "IFT_MSDSL",
+ "IFT_MVL",
+ "IFT_MYRINET",
+ "IFT_NFAS",
+ "IFT_NSIP",
+ "IFT_OPTICALCHANNEL",
+ "IFT_OPTICALTRANSPORT",
+ "IFT_OTHER",
+ "IFT_P10",
+ "IFT_P80",
+ "IFT_PARA",
+ "IFT_PDP",
+ "IFT_PFLOG",
+ "IFT_PFLOW",
+ "IFT_PFSYNC",
+ "IFT_PLC",
+ "IFT_PON155",
+ "IFT_PON622",
+ "IFT_POS",
+ "IFT_PPP",
+ "IFT_PPPMULTILINKBUNDLE",
+ "IFT_PROPATM",
+ "IFT_PROPBWAP2MP",
+ "IFT_PROPCNLS",
+ "IFT_PROPDOCSWIRELESSDOWNSTREAM",
+ "IFT_PROPDOCSWIRELESSMACLAYER",
+ "IFT_PROPDOCSWIRELESSUPSTREAM",
+ "IFT_PROPMUX",
+ "IFT_PROPVIRTUAL",
+ "IFT_PROPWIRELESSP2P",
+ "IFT_PTPSERIAL",
+ "IFT_PVC",
+ "IFT_Q2931",
+ "IFT_QLLC",
+ "IFT_RADIOMAC",
+ "IFT_RADSL",
+ "IFT_REACHDSL",
+ "IFT_RFC1483",
+ "IFT_RS232",
+ "IFT_RSRB",
+ "IFT_SDLC",
+ "IFT_SDSL",
+ "IFT_SHDSL",
+ "IFT_SIP",
+ "IFT_SIPSIG",
+ "IFT_SIPTG",
+ "IFT_SLIP",
+ "IFT_SMDSDXI",
+ "IFT_SMDSICIP",
+ "IFT_SONET",
+ "IFT_SONETOVERHEADCHANNEL",
+ "IFT_SONETPATH",
+ "IFT_SONETVT",
+ "IFT_SRP",
+ "IFT_SS7SIGLINK",
+ "IFT_STACKTOSTACK",
+ "IFT_STARLAN",
+ "IFT_STF",
+ "IFT_T1",
+ "IFT_TDLC",
+ "IFT_TELINK",
+ "IFT_TERMPAD",
+ "IFT_TR008",
+ "IFT_TRANSPHDLC",
+ "IFT_TUNNEL",
+ "IFT_ULTRA",
+ "IFT_USB",
+ "IFT_V11",
+ "IFT_V35",
+ "IFT_V36",
+ "IFT_V37",
+ "IFT_VDSL",
+ "IFT_VIRTUALIPADDRESS",
+ "IFT_VIRTUALTG",
+ "IFT_VOICEDID",
+ "IFT_VOICEEM",
+ "IFT_VOICEEMFGD",
+ "IFT_VOICEENCAP",
+ "IFT_VOICEFGDEANA",
+ "IFT_VOICEFXO",
+ "IFT_VOICEFXS",
+ "IFT_VOICEOVERATM",
+ "IFT_VOICEOVERCABLE",
+ "IFT_VOICEOVERFRAMERELAY",
+ "IFT_VOICEOVERIP",
+ "IFT_X213",
+ "IFT_X25",
+ "IFT_X25DDN",
+ "IFT_X25HUNTGROUP",
+ "IFT_X25MLP",
+ "IFT_X25PLE",
+ "IFT_XETHER",
+ "IGNBRK",
+ "IGNCR",
+ "IGNORE",
+ "IGNPAR",
+ "IMAXBEL",
+ "INFINITE",
+ "INLCR",
+ "INPCK",
+ "INVALID_FILE_ATTRIBUTES",
+ "IN_ACCESS",
+ "IN_ALL_EVENTS",
+ "IN_ATTRIB",
+ "IN_CLASSA_HOST",
+ "IN_CLASSA_MAX",
+ "IN_CLASSA_NET",
+ "IN_CLASSA_NSHIFT",
+ "IN_CLASSB_HOST",
+ "IN_CLASSB_MAX",
+ "IN_CLASSB_NET",
+ "IN_CLASSB_NSHIFT",
+ "IN_CLASSC_HOST",
+ "IN_CLASSC_NET",
+ "IN_CLASSC_NSHIFT",
+ "IN_CLASSD_HOST",
+ "IN_CLASSD_NET",
+ "IN_CLASSD_NSHIFT",
+ "IN_CLOEXEC",
+ "IN_CLOSE",
+ "IN_CLOSE_NOWRITE",
+ "IN_CLOSE_WRITE",
+ "IN_CREATE",
+ "IN_DELETE",
+ "IN_DELETE_SELF",
+ "IN_DONT_FOLLOW",
+ "IN_EXCL_UNLINK",
+ "IN_IGNORED",
+ "IN_ISDIR",
+ "IN_LINKLOCALNETNUM",
+ "IN_LOOPBACKNET",
+ "IN_MASK_ADD",
+ "IN_MODIFY",
+ "IN_MOVE",
+ "IN_MOVED_FROM",
+ "IN_MOVED_TO",
+ "IN_MOVE_SELF",
+ "IN_NONBLOCK",
+ "IN_ONESHOT",
+ "IN_ONLYDIR",
+ "IN_OPEN",
+ "IN_Q_OVERFLOW",
+ "IN_RFC3021_HOST",
+ "IN_RFC3021_MASK",
+ "IN_RFC3021_NET",
+ "IN_RFC3021_NSHIFT",
+ "IN_UNMOUNT",
+ "IOC_IN",
+ "IOC_INOUT",
+ "IOC_OUT",
+ "IOC_VENDOR",
+ "IOC_WS2",
+ "IO_REPARSE_TAG_SYMLINK",
+ "IPMreq",
+ "IPMreqn",
+ "IPPROTO_3PC",
+ "IPPROTO_ADFS",
+ "IPPROTO_AH",
+ "IPPROTO_AHIP",
+ "IPPROTO_APES",
+ "IPPROTO_ARGUS",
+ "IPPROTO_AX25",
+ "IPPROTO_BHA",
+ "IPPROTO_BLT",
+ "IPPROTO_BRSATMON",
+ "IPPROTO_CARP",
+ "IPPROTO_CFTP",
+ "IPPROTO_CHAOS",
+ "IPPROTO_CMTP",
+ "IPPROTO_COMP",
+ "IPPROTO_CPHB",
+ "IPPROTO_CPNX",
+ "IPPROTO_DCCP",
+ "IPPROTO_DDP",
+ "IPPROTO_DGP",
+ "IPPROTO_DIVERT",
+ "IPPROTO_DIVERT_INIT",
+ "IPPROTO_DIVERT_RESP",
+ "IPPROTO_DONE",
+ "IPPROTO_DSTOPTS",
+ "IPPROTO_EGP",
+ "IPPROTO_EMCON",
+ "IPPROTO_ENCAP",
+ "IPPROTO_EON",
+ "IPPROTO_ESP",
+ "IPPROTO_ETHERIP",
+ "IPPROTO_FRAGMENT",
+ "IPPROTO_GGP",
+ "IPPROTO_GMTP",
+ "IPPROTO_GRE",
+ "IPPROTO_HELLO",
+ "IPPROTO_HMP",
+ "IPPROTO_HOPOPTS",
+ "IPPROTO_ICMP",
+ "IPPROTO_ICMPV6",
+ "IPPROTO_IDP",
+ "IPPROTO_IDPR",
+ "IPPROTO_IDRP",
+ "IPPROTO_IGMP",
+ "IPPROTO_IGP",
+ "IPPROTO_IGRP",
+ "IPPROTO_IL",
+ "IPPROTO_INLSP",
+ "IPPROTO_INP",
+ "IPPROTO_IP",
+ "IPPROTO_IPCOMP",
+ "IPPROTO_IPCV",
+ "IPPROTO_IPEIP",
+ "IPPROTO_IPIP",
+ "IPPROTO_IPPC",
+ "IPPROTO_IPV4",
+ "IPPROTO_IPV6",
+ "IPPROTO_IPV6_ICMP",
+ "IPPROTO_IRTP",
+ "IPPROTO_KRYPTOLAN",
+ "IPPROTO_LARP",
+ "IPPROTO_LEAF1",
+ "IPPROTO_LEAF2",
+ "IPPROTO_MAX",
+ "IPPROTO_MAXID",
+ "IPPROTO_MEAS",
+ "IPPROTO_MH",
+ "IPPROTO_MHRP",
+ "IPPROTO_MICP",
+ "IPPROTO_MOBILE",
+ "IPPROTO_MPLS",
+ "IPPROTO_MTP",
+ "IPPROTO_MUX",
+ "IPPROTO_ND",
+ "IPPROTO_NHRP",
+ "IPPROTO_NONE",
+ "IPPROTO_NSP",
+ "IPPROTO_NVPII",
+ "IPPROTO_OLD_DIVERT",
+ "IPPROTO_OSPFIGP",
+ "IPPROTO_PFSYNC",
+ "IPPROTO_PGM",
+ "IPPROTO_PIGP",
+ "IPPROTO_PIM",
+ "IPPROTO_PRM",
+ "IPPROTO_PUP",
+ "IPPROTO_PVP",
+ "IPPROTO_RAW",
+ "IPPROTO_RCCMON",
+ "IPPROTO_RDP",
+ "IPPROTO_ROUTING",
+ "IPPROTO_RSVP",
+ "IPPROTO_RVD",
+ "IPPROTO_SATEXPAK",
+ "IPPROTO_SATMON",
+ "IPPROTO_SCCSP",
+ "IPPROTO_SCTP",
+ "IPPROTO_SDRP",
+ "IPPROTO_SEND",
+ "IPPROTO_SEP",
+ "IPPROTO_SKIP",
+ "IPPROTO_SPACER",
+ "IPPROTO_SRPC",
+ "IPPROTO_ST",
+ "IPPROTO_SVMTP",
+ "IPPROTO_SWIPE",
+ "IPPROTO_TCF",
+ "IPPROTO_TCP",
+ "IPPROTO_TLSP",
+ "IPPROTO_TP",
+ "IPPROTO_TPXX",
+ "IPPROTO_TRUNK1",
+ "IPPROTO_TRUNK2",
+ "IPPROTO_TTP",
+ "IPPROTO_UDP",
+ "IPPROTO_UDPLITE",
+ "IPPROTO_VINES",
+ "IPPROTO_VISA",
+ "IPPROTO_VMTP",
+ "IPPROTO_VRRP",
+ "IPPROTO_WBEXPAK",
+ "IPPROTO_WBMON",
+ "IPPROTO_WSN",
+ "IPPROTO_XNET",
+ "IPPROTO_XTP",
+ "IPV6_2292DSTOPTS",
+ "IPV6_2292HOPLIMIT",
+ "IPV6_2292HOPOPTS",
+ "IPV6_2292NEXTHOP",
+ "IPV6_2292PKTINFO",
+ "IPV6_2292PKTOPTIONS",
+ "IPV6_2292RTHDR",
+ "IPV6_ADDRFORM",
+ "IPV6_ADD_MEMBERSHIP",
+ "IPV6_AUTHHDR",
+ "IPV6_AUTH_LEVEL",
+ "IPV6_AUTOFLOWLABEL",
+ "IPV6_BINDANY",
+ "IPV6_BINDV6ONLY",
+ "IPV6_BOUND_IF",
+ "IPV6_CHECKSUM",
+ "IPV6_DEFAULT_MULTICAST_HOPS",
+ "IPV6_DEFAULT_MULTICAST_LOOP",
+ "IPV6_DEFHLIM",
+ "IPV6_DONTFRAG",
+ "IPV6_DROP_MEMBERSHIP",
+ "IPV6_DSTOPTS",
+ "IPV6_ESP_NETWORK_LEVEL",
+ "IPV6_ESP_TRANS_LEVEL",
+ "IPV6_FAITH",
+ "IPV6_FLOWINFO_MASK",
+ "IPV6_FLOWLABEL_MASK",
+ "IPV6_FRAGTTL",
+ "IPV6_FW_ADD",
+ "IPV6_FW_DEL",
+ "IPV6_FW_FLUSH",
+ "IPV6_FW_GET",
+ "IPV6_FW_ZERO",
+ "IPV6_HLIMDEC",
+ "IPV6_HOPLIMIT",
+ "IPV6_HOPOPTS",
+ "IPV6_IPCOMP_LEVEL",
+ "IPV6_IPSEC_POLICY",
+ "IPV6_JOIN_ANYCAST",
+ "IPV6_JOIN_GROUP",
+ "IPV6_LEAVE_ANYCAST",
+ "IPV6_LEAVE_GROUP",
+ "IPV6_MAXHLIM",
+ "IPV6_MAXOPTHDR",
+ "IPV6_MAXPACKET",
+ "IPV6_MAX_GROUP_SRC_FILTER",
+ "IPV6_MAX_MEMBERSHIPS",
+ "IPV6_MAX_SOCK_SRC_FILTER",
+ "IPV6_MIN_MEMBERSHIPS",
+ "IPV6_MMTU",
+ "IPV6_MSFILTER",
+ "IPV6_MTU",
+ "IPV6_MTU_DISCOVER",
+ "IPV6_MULTICAST_HOPS",
+ "IPV6_MULTICAST_IF",
+ "IPV6_MULTICAST_LOOP",
+ "IPV6_NEXTHOP",
+ "IPV6_OPTIONS",
+ "IPV6_PATHMTU",
+ "IPV6_PIPEX",
+ "IPV6_PKTINFO",
+ "IPV6_PMTUDISC_DO",
+ "IPV6_PMTUDISC_DONT",
+ "IPV6_PMTUDISC_PROBE",
+ "IPV6_PMTUDISC_WANT",
+ "IPV6_PORTRANGE",
+ "IPV6_PORTRANGE_DEFAULT",
+ "IPV6_PORTRANGE_HIGH",
+ "IPV6_PORTRANGE_LOW",
+ "IPV6_PREFER_TEMPADDR",
+ "IPV6_RECVDSTOPTS",
+ "IPV6_RECVDSTPORT",
+ "IPV6_RECVERR",
+ "IPV6_RECVHOPLIMIT",
+ "IPV6_RECVHOPOPTS",
+ "IPV6_RECVPATHMTU",
+ "IPV6_RECVPKTINFO",
+ "IPV6_RECVRTHDR",
+ "IPV6_RECVTCLASS",
+ "IPV6_ROUTER_ALERT",
+ "IPV6_RTABLE",
+ "IPV6_RTHDR",
+ "IPV6_RTHDRDSTOPTS",
+ "IPV6_RTHDR_LOOSE",
+ "IPV6_RTHDR_STRICT",
+ "IPV6_RTHDR_TYPE_0",
+ "IPV6_RXDSTOPTS",
+ "IPV6_RXHOPOPTS",
+ "IPV6_SOCKOPT_RESERVED1",
+ "IPV6_TCLASS",
+ "IPV6_UNICAST_HOPS",
+ "IPV6_USE_MIN_MTU",
+ "IPV6_V6ONLY",
+ "IPV6_VERSION",
+ "IPV6_VERSION_MASK",
+ "IPV6_XFRM_POLICY",
+ "IP_ADD_MEMBERSHIP",
+ "IP_ADD_SOURCE_MEMBERSHIP",
+ "IP_AUTH_LEVEL",
+ "IP_BINDANY",
+ "IP_BLOCK_SOURCE",
+ "IP_BOUND_IF",
+ "IP_DEFAULT_MULTICAST_LOOP",
+ "IP_DEFAULT_MULTICAST_TTL",
+ "IP_DF",
+ "IP_DIVERTFL",
+ "IP_DONTFRAG",
+ "IP_DROP_MEMBERSHIP",
+ "IP_DROP_SOURCE_MEMBERSHIP",
+ "IP_DUMMYNET3",
+ "IP_DUMMYNET_CONFIGURE",
+ "IP_DUMMYNET_DEL",
+ "IP_DUMMYNET_FLUSH",
+ "IP_DUMMYNET_GET",
+ "IP_EF",
+ "IP_ERRORMTU",
+ "IP_ESP_NETWORK_LEVEL",
+ "IP_ESP_TRANS_LEVEL",
+ "IP_FAITH",
+ "IP_FREEBIND",
+ "IP_FW3",
+ "IP_FW_ADD",
+ "IP_FW_DEL",
+ "IP_FW_FLUSH",
+ "IP_FW_GET",
+ "IP_FW_NAT_CFG",
+ "IP_FW_NAT_DEL",
+ "IP_FW_NAT_GET_CONFIG",
+ "IP_FW_NAT_GET_LOG",
+ "IP_FW_RESETLOG",
+ "IP_FW_TABLE_ADD",
+ "IP_FW_TABLE_DEL",
+ "IP_FW_TABLE_FLUSH",
+ "IP_FW_TABLE_GETSIZE",
+ "IP_FW_TABLE_LIST",
+ "IP_FW_ZERO",
+ "IP_HDRINCL",
+ "IP_IPCOMP_LEVEL",
+ "IP_IPSECFLOWINFO",
+ "IP_IPSEC_LOCAL_AUTH",
+ "IP_IPSEC_LOCAL_CRED",
+ "IP_IPSEC_LOCAL_ID",
+ "IP_IPSEC_POLICY",
+ "IP_IPSEC_REMOTE_AUTH",
+ "IP_IPSEC_REMOTE_CRED",
+ "IP_IPSEC_REMOTE_ID",
+ "IP_MAXPACKET",
+ "IP_MAX_GROUP_SRC_FILTER",
+ "IP_MAX_MEMBERSHIPS",
+ "IP_MAX_SOCK_MUTE_FILTER",
+ "IP_MAX_SOCK_SRC_FILTER",
+ "IP_MAX_SOURCE_FILTER",
+ "IP_MF",
+ "IP_MINFRAGSIZE",
+ "IP_MINTTL",
+ "IP_MIN_MEMBERSHIPS",
+ "IP_MSFILTER",
+ "IP_MSS",
+ "IP_MTU",
+ "IP_MTU_DISCOVER",
+ "IP_MULTICAST_IF",
+ "IP_MULTICAST_IFINDEX",
+ "IP_MULTICAST_LOOP",
+ "IP_MULTICAST_TTL",
+ "IP_MULTICAST_VIF",
+ "IP_NAT__XXX",
+ "IP_OFFMASK",
+ "IP_OLD_FW_ADD",
+ "IP_OLD_FW_DEL",
+ "IP_OLD_FW_FLUSH",
+ "IP_OLD_FW_GET",
+ "IP_OLD_FW_RESETLOG",
+ "IP_OLD_FW_ZERO",
+ "IP_ONESBCAST",
+ "IP_OPTIONS",
+ "IP_ORIGDSTADDR",
+ "IP_PASSSEC",
+ "IP_PIPEX",
+ "IP_PKTINFO",
+ "IP_PKTOPTIONS",
+ "IP_PMTUDISC",
+ "IP_PMTUDISC_DO",
+ "IP_PMTUDISC_DONT",
+ "IP_PMTUDISC_PROBE",
+ "IP_PMTUDISC_WANT",
+ "IP_PORTRANGE",
+ "IP_PORTRANGE_DEFAULT",
+ "IP_PORTRANGE_HIGH",
+ "IP_PORTRANGE_LOW",
+ "IP_RECVDSTADDR",
+ "IP_RECVDSTPORT",
+ "IP_RECVERR",
+ "IP_RECVIF",
+ "IP_RECVOPTS",
+ "IP_RECVORIGDSTADDR",
+ "IP_RECVPKTINFO",
+ "IP_RECVRETOPTS",
+ "IP_RECVRTABLE",
+ "IP_RECVTOS",
+ "IP_RECVTTL",
+ "IP_RETOPTS",
+ "IP_RF",
+ "IP_ROUTER_ALERT",
+ "IP_RSVP_OFF",
+ "IP_RSVP_ON",
+ "IP_RSVP_VIF_OFF",
+ "IP_RSVP_VIF_ON",
+ "IP_RTABLE",
+ "IP_SENDSRCADDR",
+ "IP_STRIPHDR",
+ "IP_TOS",
+ "IP_TRAFFIC_MGT_BACKGROUND",
+ "IP_TRANSPARENT",
+ "IP_TTL",
+ "IP_UNBLOCK_SOURCE",
+ "IP_XFRM_POLICY",
+ "IPv6MTUInfo",
+ "IPv6Mreq",
+ "ISIG",
+ "ISTRIP",
+ "IUCLC",
+ "IUTF8",
+ "IXANY",
+ "IXOFF",
+ "IXON",
+ "IfAddrmsg",
+ "IfAnnounceMsghdr",
+ "IfData",
+ "IfInfomsg",
+ "IfMsghdr",
+ "IfaMsghdr",
+ "IfmaMsghdr",
+ "IfmaMsghdr2",
+ "ImplementsGetwd",
+ "Inet4Pktinfo",
+ "Inet6Pktinfo",
+ "InotifyAddWatch",
+ "InotifyEvent",
+ "InotifyInit",
+ "InotifyInit1",
+ "InotifyRmWatch",
+ "InterfaceAddrMessage",
+ "InterfaceAnnounceMessage",
+ "InterfaceInfo",
+ "InterfaceMessage",
+ "InterfaceMulticastAddrMessage",
+ "InvalidHandle",
+ "Ioperm",
+ "Iopl",
+ "Iovec",
+ "IpAdapterInfo",
+ "IpAddrString",
+ "IpAddressString",
+ "IpMaskString",
+ "Issetugid",
+ "KEY_ALL_ACCESS",
+ "KEY_CREATE_LINK",
+ "KEY_CREATE_SUB_KEY",
+ "KEY_ENUMERATE_SUB_KEYS",
+ "KEY_EXECUTE",
+ "KEY_NOTIFY",
+ "KEY_QUERY_VALUE",
+ "KEY_READ",
+ "KEY_SET_VALUE",
+ "KEY_WOW64_32KEY",
+ "KEY_WOW64_64KEY",
+ "KEY_WRITE",
+ "Kevent",
+ "Kevent_t",
+ "Kill",
+ "Klogctl",
+ "Kqueue",
+ "LANG_ENGLISH",
+ "LAYERED_PROTOCOL",
+ "LCNT_OVERLOAD_FLUSH",
+ "LINUX_REBOOT_CMD_CAD_OFF",
+ "LINUX_REBOOT_CMD_CAD_ON",
+ "LINUX_REBOOT_CMD_HALT",
+ "LINUX_REBOOT_CMD_KEXEC",
+ "LINUX_REBOOT_CMD_POWER_OFF",
+ "LINUX_REBOOT_CMD_RESTART",
+ "LINUX_REBOOT_CMD_RESTART2",
+ "LINUX_REBOOT_CMD_SW_SUSPEND",
+ "LINUX_REBOOT_MAGIC1",
+ "LINUX_REBOOT_MAGIC2",
+ "LOCK_EX",
+ "LOCK_NB",
+ "LOCK_SH",
+ "LOCK_UN",
+ "LazyDLL",
+ "LazyProc",
+ "Lchown",
+ "Linger",
+ "Link",
+ "Listen",
+ "Listxattr",
+ "LoadCancelIoEx",
+ "LoadConnectEx",
+ "LoadCreateSymbolicLink",
+ "LoadDLL",
+ "LoadGetAddrInfo",
+ "LoadLibrary",
+ "LoadSetFileCompletionNotificationModes",
+ "LocalFree",
+ "Log2phys_t",
+ "LookupAccountName",
+ "LookupAccountSid",
+ "LookupSID",
+ "LsfJump",
+ "LsfSocket",
+ "LsfStmt",
+ "Lstat",
+ "MADV_AUTOSYNC",
+ "MADV_CAN_REUSE",
+ "MADV_CORE",
+ "MADV_DOFORK",
+ "MADV_DONTFORK",
+ "MADV_DONTNEED",
+ "MADV_FREE",
+ "MADV_FREE_REUSABLE",
+ "MADV_FREE_REUSE",
+ "MADV_HUGEPAGE",
+ "MADV_HWPOISON",
+ "MADV_MERGEABLE",
+ "MADV_NOCORE",
+ "MADV_NOHUGEPAGE",
+ "MADV_NORMAL",
+ "MADV_NOSYNC",
+ "MADV_PROTECT",
+ "MADV_RANDOM",
+ "MADV_REMOVE",
+ "MADV_SEQUENTIAL",
+ "MADV_SPACEAVAIL",
+ "MADV_UNMERGEABLE",
+ "MADV_WILLNEED",
+ "MADV_ZERO_WIRED_PAGES",
+ "MAP_32BIT",
+ "MAP_ALIGNED_SUPER",
+ "MAP_ALIGNMENT_16MB",
+ "MAP_ALIGNMENT_1TB",
+ "MAP_ALIGNMENT_256TB",
+ "MAP_ALIGNMENT_4GB",
+ "MAP_ALIGNMENT_64KB",
+ "MAP_ALIGNMENT_64PB",
+ "MAP_ALIGNMENT_MASK",
+ "MAP_ALIGNMENT_SHIFT",
+ "MAP_ANON",
+ "MAP_ANONYMOUS",
+ "MAP_COPY",
+ "MAP_DENYWRITE",
+ "MAP_EXECUTABLE",
+ "MAP_FILE",
+ "MAP_FIXED",
+ "MAP_FLAGMASK",
+ "MAP_GROWSDOWN",
+ "MAP_HASSEMAPHORE",
+ "MAP_HUGETLB",
+ "MAP_INHERIT",
+ "MAP_INHERIT_COPY",
+ "MAP_INHERIT_DEFAULT",
+ "MAP_INHERIT_DONATE_COPY",
+ "MAP_INHERIT_NONE",
+ "MAP_INHERIT_SHARE",
+ "MAP_JIT",
+ "MAP_LOCKED",
+ "MAP_NOCACHE",
+ "MAP_NOCORE",
+ "MAP_NOEXTEND",
+ "MAP_NONBLOCK",
+ "MAP_NORESERVE",
+ "MAP_NOSYNC",
+ "MAP_POPULATE",
+ "MAP_PREFAULT_READ",
+ "MAP_PRIVATE",
+ "MAP_RENAME",
+ "MAP_RESERVED0080",
+ "MAP_RESERVED0100",
+ "MAP_SHARED",
+ "MAP_STACK",
+ "MAP_TRYFIXED",
+ "MAP_TYPE",
+ "MAP_WIRED",
+ "MAXIMUM_REPARSE_DATA_BUFFER_SIZE",
+ "MAXLEN_IFDESCR",
+ "MAXLEN_PHYSADDR",
+ "MAX_ADAPTER_ADDRESS_LENGTH",
+ "MAX_ADAPTER_DESCRIPTION_LENGTH",
+ "MAX_ADAPTER_NAME_LENGTH",
+ "MAX_COMPUTERNAME_LENGTH",
+ "MAX_INTERFACE_NAME_LEN",
+ "MAX_LONG_PATH",
+ "MAX_PATH",
+ "MAX_PROTOCOL_CHAIN",
+ "MCL_CURRENT",
+ "MCL_FUTURE",
+ "MNT_DETACH",
+ "MNT_EXPIRE",
+ "MNT_FORCE",
+ "MSG_BCAST",
+ "MSG_CMSG_CLOEXEC",
+ "MSG_COMPAT",
+ "MSG_CONFIRM",
+ "MSG_CONTROLMBUF",
+ "MSG_CTRUNC",
+ "MSG_DONTROUTE",
+ "MSG_DONTWAIT",
+ "MSG_EOF",
+ "MSG_EOR",
+ "MSG_ERRQUEUE",
+ "MSG_FASTOPEN",
+ "MSG_FIN",
+ "MSG_FLUSH",
+ "MSG_HAVEMORE",
+ "MSG_HOLD",
+ "MSG_IOVUSRSPACE",
+ "MSG_LENUSRSPACE",
+ "MSG_MCAST",
+ "MSG_MORE",
+ "MSG_NAMEMBUF",
+ "MSG_NBIO",
+ "MSG_NEEDSA",
+ "MSG_NOSIGNAL",
+ "MSG_NOTIFICATION",
+ "MSG_OOB",
+ "MSG_PEEK",
+ "MSG_PROXY",
+ "MSG_RCVMORE",
+ "MSG_RST",
+ "MSG_SEND",
+ "MSG_SYN",
+ "MSG_TRUNC",
+ "MSG_TRYHARD",
+ "MSG_USERFLAGS",
+ "MSG_WAITALL",
+ "MSG_WAITFORONE",
+ "MSG_WAITSTREAM",
+ "MS_ACTIVE",
+ "MS_ASYNC",
+ "MS_BIND",
+ "MS_DEACTIVATE",
+ "MS_DIRSYNC",
+ "MS_INVALIDATE",
+ "MS_I_VERSION",
+ "MS_KERNMOUNT",
+ "MS_KILLPAGES",
+ "MS_MANDLOCK",
+ "MS_MGC_MSK",
+ "MS_MGC_VAL",
+ "MS_MOVE",
+ "MS_NOATIME",
+ "MS_NODEV",
+ "MS_NODIRATIME",
+ "MS_NOEXEC",
+ "MS_NOSUID",
+ "MS_NOUSER",
+ "MS_POSIXACL",
+ "MS_PRIVATE",
+ "MS_RDONLY",
+ "MS_REC",
+ "MS_RELATIME",
+ "MS_REMOUNT",
+ "MS_RMT_MASK",
+ "MS_SHARED",
+ "MS_SILENT",
+ "MS_SLAVE",
+ "MS_STRICTATIME",
+ "MS_SYNC",
+ "MS_SYNCHRONOUS",
+ "MS_UNBINDABLE",
+ "Madvise",
+ "MapViewOfFile",
+ "MaxTokenInfoClass",
+ "Mclpool",
+ "MibIfRow",
+ "Mkdir",
+ "Mkdirat",
+ "Mkfifo",
+ "Mknod",
+ "Mknodat",
+ "Mlock",
+ "Mlockall",
+ "Mmap",
+ "Mount",
+ "MoveFile",
+ "Mprotect",
+ "Msghdr",
+ "Munlock",
+ "Munlockall",
+ "Munmap",
+ "MustLoadDLL",
+ "NAME_MAX",
+ "NETLINK_ADD_MEMBERSHIP",
+ "NETLINK_AUDIT",
+ "NETLINK_BROADCAST_ERROR",
+ "NETLINK_CONNECTOR",
+ "NETLINK_DNRTMSG",
+ "NETLINK_DROP_MEMBERSHIP",
+ "NETLINK_ECRYPTFS",
+ "NETLINK_FIB_LOOKUP",
+ "NETLINK_FIREWALL",
+ "NETLINK_GENERIC",
+ "NETLINK_INET_DIAG",
+ "NETLINK_IP6_FW",
+ "NETLINK_ISCSI",
+ "NETLINK_KOBJECT_UEVENT",
+ "NETLINK_NETFILTER",
+ "NETLINK_NFLOG",
+ "NETLINK_NO_ENOBUFS",
+ "NETLINK_PKTINFO",
+ "NETLINK_RDMA",
+ "NETLINK_ROUTE",
+ "NETLINK_SCSITRANSPORT",
+ "NETLINK_SELINUX",
+ "NETLINK_UNUSED",
+ "NETLINK_USERSOCK",
+ "NETLINK_XFRM",
+ "NET_RT_DUMP",
+ "NET_RT_DUMP2",
+ "NET_RT_FLAGS",
+ "NET_RT_IFLIST",
+ "NET_RT_IFLIST2",
+ "NET_RT_IFLISTL",
+ "NET_RT_IFMALIST",
+ "NET_RT_MAXID",
+ "NET_RT_OIFLIST",
+ "NET_RT_OOIFLIST",
+ "NET_RT_STAT",
+ "NET_RT_STATS",
+ "NET_RT_TABLE",
+ "NET_RT_TRASH",
+ "NLA_ALIGNTO",
+ "NLA_F_NESTED",
+ "NLA_F_NET_BYTEORDER",
+ "NLA_HDRLEN",
+ "NLMSG_ALIGNTO",
+ "NLMSG_DONE",
+ "NLMSG_ERROR",
+ "NLMSG_HDRLEN",
+ "NLMSG_MIN_TYPE",
+ "NLMSG_NOOP",
+ "NLMSG_OVERRUN",
+ "NLM_F_ACK",
+ "NLM_F_APPEND",
+ "NLM_F_ATOMIC",
+ "NLM_F_CREATE",
+ "NLM_F_DUMP",
+ "NLM_F_ECHO",
+ "NLM_F_EXCL",
+ "NLM_F_MATCH",
+ "NLM_F_MULTI",
+ "NLM_F_REPLACE",
+ "NLM_F_REQUEST",
+ "NLM_F_ROOT",
+ "NOFLSH",
+ "NOTE_ABSOLUTE",
+ "NOTE_ATTRIB",
+ "NOTE_CHILD",
+ "NOTE_DELETE",
+ "NOTE_EOF",
+ "NOTE_EXEC",
+ "NOTE_EXIT",
+ "NOTE_EXITSTATUS",
+ "NOTE_EXTEND",
+ "NOTE_FFAND",
+ "NOTE_FFCOPY",
+ "NOTE_FFCTRLMASK",
+ "NOTE_FFLAGSMASK",
+ "NOTE_FFNOP",
+ "NOTE_FFOR",
+ "NOTE_FORK",
+ "NOTE_LINK",
+ "NOTE_LOWAT",
+ "NOTE_NONE",
+ "NOTE_NSECONDS",
+ "NOTE_PCTRLMASK",
+ "NOTE_PDATAMASK",
+ "NOTE_REAP",
+ "NOTE_RENAME",
+ "NOTE_RESOURCEEND",
+ "NOTE_REVOKE",
+ "NOTE_SECONDS",
+ "NOTE_SIGNAL",
+ "NOTE_TRACK",
+ "NOTE_TRACKERR",
+ "NOTE_TRIGGER",
+ "NOTE_TRUNCATE",
+ "NOTE_USECONDS",
+ "NOTE_VM_ERROR",
+ "NOTE_VM_PRESSURE",
+ "NOTE_VM_PRESSURE_SUDDEN_TERMINATE",
+ "NOTE_VM_PRESSURE_TERMINATE",
+ "NOTE_WRITE",
+ "NameCanonical",
+ "NameCanonicalEx",
+ "NameDisplay",
+ "NameDnsDomain",
+ "NameFullyQualifiedDN",
+ "NameSamCompatible",
+ "NameServicePrincipal",
+ "NameUniqueId",
+ "NameUnknown",
+ "NameUserPrincipal",
+ "Nanosleep",
+ "NetApiBufferFree",
+ "NetGetJoinInformation",
+ "NetSetupDomainName",
+ "NetSetupUnjoined",
+ "NetSetupUnknownStatus",
+ "NetSetupWorkgroupName",
+ "NetUserGetInfo",
+ "NetlinkMessage",
+ "NetlinkRIB",
+ "NetlinkRouteAttr",
+ "NetlinkRouteRequest",
+ "NewCallback",
+ "NewCallbackCDecl",
+ "NewLazyDLL",
+ "NlAttr",
+ "NlMsgerr",
+ "NlMsghdr",
+ "NsecToFiletime",
+ "NsecToTimespec",
+ "NsecToTimeval",
+ "Ntohs",
+ "OCRNL",
+ "OFDEL",
+ "OFILL",
+ "OFIOGETBMAP",
+ "OID_PKIX_KP_SERVER_AUTH",
+ "OID_SERVER_GATED_CRYPTO",
+ "OID_SGC_NETSCAPE",
+ "OLCUC",
+ "ONLCR",
+ "ONLRET",
+ "ONOCR",
+ "ONOEOT",
+ "OPEN_ALWAYS",
+ "OPEN_EXISTING",
+ "OPOST",
+ "O_ACCMODE",
+ "O_ALERT",
+ "O_ALT_IO",
+ "O_APPEND",
+ "O_ASYNC",
+ "O_CLOEXEC",
+ "O_CREAT",
+ "O_DIRECT",
+ "O_DIRECTORY",
+ "O_DSYNC",
+ "O_EVTONLY",
+ "O_EXCL",
+ "O_EXEC",
+ "O_EXLOCK",
+ "O_FSYNC",
+ "O_LARGEFILE",
+ "O_NDELAY",
+ "O_NOATIME",
+ "O_NOCTTY",
+ "O_NOFOLLOW",
+ "O_NONBLOCK",
+ "O_NOSIGPIPE",
+ "O_POPUP",
+ "O_RDONLY",
+ "O_RDWR",
+ "O_RSYNC",
+ "O_SHLOCK",
+ "O_SYMLINK",
+ "O_SYNC",
+ "O_TRUNC",
+ "O_TTY_INIT",
+ "O_WRONLY",
+ "Open",
+ "OpenCurrentProcessToken",
+ "OpenProcess",
+ "OpenProcessToken",
+ "Openat",
+ "Overlapped",
+ "PACKET_ADD_MEMBERSHIP",
+ "PACKET_BROADCAST",
+ "PACKET_DROP_MEMBERSHIP",
+ "PACKET_FASTROUTE",
+ "PACKET_HOST",
+ "PACKET_LOOPBACK",
+ "PACKET_MR_ALLMULTI",
+ "PACKET_MR_MULTICAST",
+ "PACKET_MR_PROMISC",
+ "PACKET_MULTICAST",
+ "PACKET_OTHERHOST",
+ "PACKET_OUTGOING",
+ "PACKET_RECV_OUTPUT",
+ "PACKET_RX_RING",
+ "PACKET_STATISTICS",
+ "PAGE_EXECUTE_READ",
+ "PAGE_EXECUTE_READWRITE",
+ "PAGE_EXECUTE_WRITECOPY",
+ "PAGE_READONLY",
+ "PAGE_READWRITE",
+ "PAGE_WRITECOPY",
+ "PARENB",
+ "PARMRK",
+ "PARODD",
+ "PENDIN",
+ "PFL_HIDDEN",
+ "PFL_MATCHES_PROTOCOL_ZERO",
+ "PFL_MULTIPLE_PROTO_ENTRIES",
+ "PFL_NETWORKDIRECT_PROVIDER",
+ "PFL_RECOMMENDED_PROTO_ENTRY",
+ "PF_FLUSH",
+ "PKCS_7_ASN_ENCODING",
+ "PMC5_PIPELINE_FLUSH",
+ "PRIO_PGRP",
+ "PRIO_PROCESS",
+ "PRIO_USER",
+ "PRI_IOFLUSH",
+ "PROCESS_QUERY_INFORMATION",
+ "PROCESS_TERMINATE",
+ "PROT_EXEC",
+ "PROT_GROWSDOWN",
+ "PROT_GROWSUP",
+ "PROT_NONE",
+ "PROT_READ",
+ "PROT_WRITE",
+ "PROV_DH_SCHANNEL",
+ "PROV_DSS",
+ "PROV_DSS_DH",
+ "PROV_EC_ECDSA_FULL",
+ "PROV_EC_ECDSA_SIG",
+ "PROV_EC_ECNRA_FULL",
+ "PROV_EC_ECNRA_SIG",
+ "PROV_FORTEZZA",
+ "PROV_INTEL_SEC",
+ "PROV_MS_EXCHANGE",
+ "PROV_REPLACE_OWF",
+ "PROV_RNG",
+ "PROV_RSA_AES",
+ "PROV_RSA_FULL",
+ "PROV_RSA_SCHANNEL",
+ "PROV_RSA_SIG",
+ "PROV_SPYRUS_LYNKS",
+ "PROV_SSL",
+ "PR_CAPBSET_DROP",
+ "PR_CAPBSET_READ",
+ "PR_CLEAR_SECCOMP_FILTER",
+ "PR_ENDIAN_BIG",
+ "PR_ENDIAN_LITTLE",
+ "PR_ENDIAN_PPC_LITTLE",
+ "PR_FPEMU_NOPRINT",
+ "PR_FPEMU_SIGFPE",
+ "PR_FP_EXC_ASYNC",
+ "PR_FP_EXC_DISABLED",
+ "PR_FP_EXC_DIV",
+ "PR_FP_EXC_INV",
+ "PR_FP_EXC_NONRECOV",
+ "PR_FP_EXC_OVF",
+ "PR_FP_EXC_PRECISE",
+ "PR_FP_EXC_RES",
+ "PR_FP_EXC_SW_ENABLE",
+ "PR_FP_EXC_UND",
+ "PR_GET_DUMPABLE",
+ "PR_GET_ENDIAN",
+ "PR_GET_FPEMU",
+ "PR_GET_FPEXC",
+ "PR_GET_KEEPCAPS",
+ "PR_GET_NAME",
+ "PR_GET_PDEATHSIG",
+ "PR_GET_SECCOMP",
+ "PR_GET_SECCOMP_FILTER",
+ "PR_GET_SECUREBITS",
+ "PR_GET_TIMERSLACK",
+ "PR_GET_TIMING",
+ "PR_GET_TSC",
+ "PR_GET_UNALIGN",
+ "PR_MCE_KILL",
+ "PR_MCE_KILL_CLEAR",
+ "PR_MCE_KILL_DEFAULT",
+ "PR_MCE_KILL_EARLY",
+ "PR_MCE_KILL_GET",
+ "PR_MCE_KILL_LATE",
+ "PR_MCE_KILL_SET",
+ "PR_SECCOMP_FILTER_EVENT",
+ "PR_SECCOMP_FILTER_SYSCALL",
+ "PR_SET_DUMPABLE",
+ "PR_SET_ENDIAN",
+ "PR_SET_FPEMU",
+ "PR_SET_FPEXC",
+ "PR_SET_KEEPCAPS",
+ "PR_SET_NAME",
+ "PR_SET_PDEATHSIG",
+ "PR_SET_PTRACER",
+ "PR_SET_SECCOMP",
+ "PR_SET_SECCOMP_FILTER",
+ "PR_SET_SECUREBITS",
+ "PR_SET_TIMERSLACK",
+ "PR_SET_TIMING",
+ "PR_SET_TSC",
+ "PR_SET_UNALIGN",
+ "PR_TASK_PERF_EVENTS_DISABLE",
+ "PR_TASK_PERF_EVENTS_ENABLE",
+ "PR_TIMING_STATISTICAL",
+ "PR_TIMING_TIMESTAMP",
+ "PR_TSC_ENABLE",
+ "PR_TSC_SIGSEGV",
+ "PR_UNALIGN_NOPRINT",
+ "PR_UNALIGN_SIGBUS",
+ "PTRACE_ARCH_PRCTL",
+ "PTRACE_ATTACH",
+ "PTRACE_CONT",
+ "PTRACE_DETACH",
+ "PTRACE_EVENT_CLONE",
+ "PTRACE_EVENT_EXEC",
+ "PTRACE_EVENT_EXIT",
+ "PTRACE_EVENT_FORK",
+ "PTRACE_EVENT_VFORK",
+ "PTRACE_EVENT_VFORK_DONE",
+ "PTRACE_GETCRUNCHREGS",
+ "PTRACE_GETEVENTMSG",
+ "PTRACE_GETFPREGS",
+ "PTRACE_GETFPXREGS",
+ "PTRACE_GETHBPREGS",
+ "PTRACE_GETREGS",
+ "PTRACE_GETREGSET",
+ "PTRACE_GETSIGINFO",
+ "PTRACE_GETVFPREGS",
+ "PTRACE_GETWMMXREGS",
+ "PTRACE_GET_THREAD_AREA",
+ "PTRACE_KILL",
+ "PTRACE_OLDSETOPTIONS",
+ "PTRACE_O_MASK",
+ "PTRACE_O_TRACECLONE",
+ "PTRACE_O_TRACEEXEC",
+ "PTRACE_O_TRACEEXIT",
+ "PTRACE_O_TRACEFORK",
+ "PTRACE_O_TRACESYSGOOD",
+ "PTRACE_O_TRACEVFORK",
+ "PTRACE_O_TRACEVFORKDONE",
+ "PTRACE_PEEKDATA",
+ "PTRACE_PEEKTEXT",
+ "PTRACE_PEEKUSR",
+ "PTRACE_POKEDATA",
+ "PTRACE_POKETEXT",
+ "PTRACE_POKEUSR",
+ "PTRACE_SETCRUNCHREGS",
+ "PTRACE_SETFPREGS",
+ "PTRACE_SETFPXREGS",
+ "PTRACE_SETHBPREGS",
+ "PTRACE_SETOPTIONS",
+ "PTRACE_SETREGS",
+ "PTRACE_SETREGSET",
+ "PTRACE_SETSIGINFO",
+ "PTRACE_SETVFPREGS",
+ "PTRACE_SETWMMXREGS",
+ "PTRACE_SET_SYSCALL",
+ "PTRACE_SET_THREAD_AREA",
+ "PTRACE_SINGLEBLOCK",
+ "PTRACE_SINGLESTEP",
+ "PTRACE_SYSCALL",
+ "PTRACE_SYSEMU",
+ "PTRACE_SYSEMU_SINGLESTEP",
+ "PTRACE_TRACEME",
+ "PT_ATTACH",
+ "PT_ATTACHEXC",
+ "PT_CONTINUE",
+ "PT_DATA_ADDR",
+ "PT_DENY_ATTACH",
+ "PT_DETACH",
+ "PT_FIRSTMACH",
+ "PT_FORCEQUOTA",
+ "PT_KILL",
+ "PT_MASK",
+ "PT_READ_D",
+ "PT_READ_I",
+ "PT_READ_U",
+ "PT_SIGEXC",
+ "PT_STEP",
+ "PT_TEXT_ADDR",
+ "PT_TEXT_END_ADDR",
+ "PT_THUPDATE",
+ "PT_TRACE_ME",
+ "PT_WRITE_D",
+ "PT_WRITE_I",
+ "PT_WRITE_U",
+ "ParseDirent",
+ "ParseNetlinkMessage",
+ "ParseNetlinkRouteAttr",
+ "ParseRoutingMessage",
+ "ParseRoutingSockaddr",
+ "ParseSocketControlMessage",
+ "ParseUnixCredentials",
+ "ParseUnixRights",
+ "PathMax",
+ "Pathconf",
+ "Pause",
+ "Pipe",
+ "Pipe2",
+ "PivotRoot",
+ "Pointer",
+ "PostQueuedCompletionStatus",
+ "Pread",
+ "Proc",
+ "ProcAttr",
+ "Process32First",
+ "Process32Next",
+ "ProcessEntry32",
+ "ProcessInformation",
+ "Protoent",
+ "PtraceAttach",
+ "PtraceCont",
+ "PtraceDetach",
+ "PtraceGetEventMsg",
+ "PtraceGetRegs",
+ "PtracePeekData",
+ "PtracePeekText",
+ "PtracePokeData",
+ "PtracePokeText",
+ "PtraceRegs",
+ "PtraceSetOptions",
+ "PtraceSetRegs",
+ "PtraceSingleStep",
+ "PtraceSyscall",
+ "Pwrite",
+ "REG_BINARY",
+ "REG_DWORD",
+ "REG_DWORD_BIG_ENDIAN",
+ "REG_DWORD_LITTLE_ENDIAN",
+ "REG_EXPAND_SZ",
+ "REG_FULL_RESOURCE_DESCRIPTOR",
+ "REG_LINK",
+ "REG_MULTI_SZ",
+ "REG_NONE",
+ "REG_QWORD",
+ "REG_QWORD_LITTLE_ENDIAN",
+ "REG_RESOURCE_LIST",
+ "REG_RESOURCE_REQUIREMENTS_LIST",
+ "REG_SZ",
+ "RLIMIT_AS",
+ "RLIMIT_CORE",
+ "RLIMIT_CPU",
+ "RLIMIT_DATA",
+ "RLIMIT_FSIZE",
+ "RLIMIT_NOFILE",
+ "RLIMIT_STACK",
+ "RLIM_INFINITY",
+ "RTAX_ADVMSS",
+ "RTAX_AUTHOR",
+ "RTAX_BRD",
+ "RTAX_CWND",
+ "RTAX_DST",
+ "RTAX_FEATURES",
+ "RTAX_FEATURE_ALLFRAG",
+ "RTAX_FEATURE_ECN",
+ "RTAX_FEATURE_SACK",
+ "RTAX_FEATURE_TIMESTAMP",
+ "RTAX_GATEWAY",
+ "RTAX_GENMASK",
+ "RTAX_HOPLIMIT",
+ "RTAX_IFA",
+ "RTAX_IFP",
+ "RTAX_INITCWND",
+ "RTAX_INITRWND",
+ "RTAX_LABEL",
+ "RTAX_LOCK",
+ "RTAX_MAX",
+ "RTAX_MTU",
+ "RTAX_NETMASK",
+ "RTAX_REORDERING",
+ "RTAX_RTO_MIN",
+ "RTAX_RTT",
+ "RTAX_RTTVAR",
+ "RTAX_SRC",
+ "RTAX_SRCMASK",
+ "RTAX_SSTHRESH",
+ "RTAX_TAG",
+ "RTAX_UNSPEC",
+ "RTAX_WINDOW",
+ "RTA_ALIGNTO",
+ "RTA_AUTHOR",
+ "RTA_BRD",
+ "RTA_CACHEINFO",
+ "RTA_DST",
+ "RTA_FLOW",
+ "RTA_GATEWAY",
+ "RTA_GENMASK",
+ "RTA_IFA",
+ "RTA_IFP",
+ "RTA_IIF",
+ "RTA_LABEL",
+ "RTA_MAX",
+ "RTA_METRICS",
+ "RTA_MULTIPATH",
+ "RTA_NETMASK",
+ "RTA_OIF",
+ "RTA_PREFSRC",
+ "RTA_PRIORITY",
+ "RTA_SRC",
+ "RTA_SRCMASK",
+ "RTA_TABLE",
+ "RTA_TAG",
+ "RTA_UNSPEC",
+ "RTCF_DIRECTSRC",
+ "RTCF_DOREDIRECT",
+ "RTCF_LOG",
+ "RTCF_MASQ",
+ "RTCF_NAT",
+ "RTCF_VALVE",
+ "RTF_ADDRCLASSMASK",
+ "RTF_ADDRCONF",
+ "RTF_ALLONLINK",
+ "RTF_ANNOUNCE",
+ "RTF_BLACKHOLE",
+ "RTF_BROADCAST",
+ "RTF_CACHE",
+ "RTF_CLONED",
+ "RTF_CLONING",
+ "RTF_CONDEMNED",
+ "RTF_DEFAULT",
+ "RTF_DELCLONE",
+ "RTF_DONE",
+ "RTF_DYNAMIC",
+ "RTF_FLOW",
+ "RTF_FMASK",
+ "RTF_GATEWAY",
+ "RTF_GWFLAG_COMPAT",
+ "RTF_HOST",
+ "RTF_IFREF",
+ "RTF_IFSCOPE",
+ "RTF_INTERFACE",
+ "RTF_IRTT",
+ "RTF_LINKRT",
+ "RTF_LLDATA",
+ "RTF_LLINFO",
+ "RTF_LOCAL",
+ "RTF_MASK",
+ "RTF_MODIFIED",
+ "RTF_MPATH",
+ "RTF_MPLS",
+ "RTF_MSS",
+ "RTF_MTU",
+ "RTF_MULTICAST",
+ "RTF_NAT",
+ "RTF_NOFORWARD",
+ "RTF_NONEXTHOP",
+ "RTF_NOPMTUDISC",
+ "RTF_PERMANENT_ARP",
+ "RTF_PINNED",
+ "RTF_POLICY",
+ "RTF_PRCLONING",
+ "RTF_PROTO1",
+ "RTF_PROTO2",
+ "RTF_PROTO3",
+ "RTF_REINSTATE",
+ "RTF_REJECT",
+ "RTF_RNH_LOCKED",
+ "RTF_SOURCE",
+ "RTF_SRC",
+ "RTF_STATIC",
+ "RTF_STICKY",
+ "RTF_THROW",
+ "RTF_TUNNEL",
+ "RTF_UP",
+ "RTF_USETRAILERS",
+ "RTF_WASCLONED",
+ "RTF_WINDOW",
+ "RTF_XRESOLVE",
+ "RTM_ADD",
+ "RTM_BASE",
+ "RTM_CHANGE",
+ "RTM_CHGADDR",
+ "RTM_DELACTION",
+ "RTM_DELADDR",
+ "RTM_DELADDRLABEL",
+ "RTM_DELETE",
+ "RTM_DELLINK",
+ "RTM_DELMADDR",
+ "RTM_DELNEIGH",
+ "RTM_DELQDISC",
+ "RTM_DELROUTE",
+ "RTM_DELRULE",
+ "RTM_DELTCLASS",
+ "RTM_DELTFILTER",
+ "RTM_DESYNC",
+ "RTM_F_CLONED",
+ "RTM_F_EQUALIZE",
+ "RTM_F_NOTIFY",
+ "RTM_F_PREFIX",
+ "RTM_GET",
+ "RTM_GET2",
+ "RTM_GETACTION",
+ "RTM_GETADDR",
+ "RTM_GETADDRLABEL",
+ "RTM_GETANYCAST",
+ "RTM_GETDCB",
+ "RTM_GETLINK",
+ "RTM_GETMULTICAST",
+ "RTM_GETNEIGH",
+ "RTM_GETNEIGHTBL",
+ "RTM_GETQDISC",
+ "RTM_GETROUTE",
+ "RTM_GETRULE",
+ "RTM_GETTCLASS",
+ "RTM_GETTFILTER",
+ "RTM_IEEE80211",
+ "RTM_IFANNOUNCE",
+ "RTM_IFINFO",
+ "RTM_IFINFO2",
+ "RTM_LLINFO_UPD",
+ "RTM_LOCK",
+ "RTM_LOSING",
+ "RTM_MAX",
+ "RTM_MAXSIZE",
+ "RTM_MISS",
+ "RTM_NEWACTION",
+ "RTM_NEWADDR",
+ "RTM_NEWADDRLABEL",
+ "RTM_NEWLINK",
+ "RTM_NEWMADDR",
+ "RTM_NEWMADDR2",
+ "RTM_NEWNDUSEROPT",
+ "RTM_NEWNEIGH",
+ "RTM_NEWNEIGHTBL",
+ "RTM_NEWPREFIX",
+ "RTM_NEWQDISC",
+ "RTM_NEWROUTE",
+ "RTM_NEWRULE",
+ "RTM_NEWTCLASS",
+ "RTM_NEWTFILTER",
+ "RTM_NR_FAMILIES",
+ "RTM_NR_MSGTYPES",
+ "RTM_OIFINFO",
+ "RTM_OLDADD",
+ "RTM_OLDDEL",
+ "RTM_OOIFINFO",
+ "RTM_REDIRECT",
+ "RTM_RESOLVE",
+ "RTM_RTTUNIT",
+ "RTM_SETDCB",
+ "RTM_SETGATE",
+ "RTM_SETLINK",
+ "RTM_SETNEIGHTBL",
+ "RTM_VERSION",
+ "RTNH_ALIGNTO",
+ "RTNH_F_DEAD",
+ "RTNH_F_ONLINK",
+ "RTNH_F_PERVASIVE",
+ "RTNLGRP_IPV4_IFADDR",
+ "RTNLGRP_IPV4_MROUTE",
+ "RTNLGRP_IPV4_ROUTE",
+ "RTNLGRP_IPV4_RULE",
+ "RTNLGRP_IPV6_IFADDR",
+ "RTNLGRP_IPV6_IFINFO",
+ "RTNLGRP_IPV6_MROUTE",
+ "RTNLGRP_IPV6_PREFIX",
+ "RTNLGRP_IPV6_ROUTE",
+ "RTNLGRP_IPV6_RULE",
+ "RTNLGRP_LINK",
+ "RTNLGRP_ND_USEROPT",
+ "RTNLGRP_NEIGH",
+ "RTNLGRP_NONE",
+ "RTNLGRP_NOTIFY",
+ "RTNLGRP_TC",
+ "RTN_ANYCAST",
+ "RTN_BLACKHOLE",
+ "RTN_BROADCAST",
+ "RTN_LOCAL",
+ "RTN_MAX",
+ "RTN_MULTICAST",
+ "RTN_NAT",
+ "RTN_PROHIBIT",
+ "RTN_THROW",
+ "RTN_UNICAST",
+ "RTN_UNREACHABLE",
+ "RTN_UNSPEC",
+ "RTN_XRESOLVE",
+ "RTPROT_BIRD",
+ "RTPROT_BOOT",
+ "RTPROT_DHCP",
+ "RTPROT_DNROUTED",
+ "RTPROT_GATED",
+ "RTPROT_KERNEL",
+ "RTPROT_MRT",
+ "RTPROT_NTK",
+ "RTPROT_RA",
+ "RTPROT_REDIRECT",
+ "RTPROT_STATIC",
+ "RTPROT_UNSPEC",
+ "RTPROT_XORP",
+ "RTPROT_ZEBRA",
+ "RTV_EXPIRE",
+ "RTV_HOPCOUNT",
+ "RTV_MTU",
+ "RTV_RPIPE",
+ "RTV_RTT",
+ "RTV_RTTVAR",
+ "RTV_SPIPE",
+ "RTV_SSTHRESH",
+ "RTV_WEIGHT",
+ "RT_CACHING_CONTEXT",
+ "RT_CLASS_DEFAULT",
+ "RT_CLASS_LOCAL",
+ "RT_CLASS_MAIN",
+ "RT_CLASS_MAX",
+ "RT_CLASS_UNSPEC",
+ "RT_DEFAULT_FIB",
+ "RT_NORTREF",
+ "RT_SCOPE_HOST",
+ "RT_SCOPE_LINK",
+ "RT_SCOPE_NOWHERE",
+ "RT_SCOPE_SITE",
+ "RT_SCOPE_UNIVERSE",
+ "RT_TABLEID_MAX",
+ "RT_TABLE_COMPAT",
+ "RT_TABLE_DEFAULT",
+ "RT_TABLE_LOCAL",
+ "RT_TABLE_MAIN",
+ "RT_TABLE_MAX",
+ "RT_TABLE_UNSPEC",
+ "RUSAGE_CHILDREN",
+ "RUSAGE_SELF",
+ "RUSAGE_THREAD",
+ "Radvisory_t",
+ "RawConn",
+ "RawSockaddr",
+ "RawSockaddrAny",
+ "RawSockaddrDatalink",
+ "RawSockaddrInet4",
+ "RawSockaddrInet6",
+ "RawSockaddrLinklayer",
+ "RawSockaddrNetlink",
+ "RawSockaddrUnix",
+ "RawSyscall",
+ "RawSyscall6",
+ "Read",
+ "ReadConsole",
+ "ReadDirectoryChanges",
+ "ReadDirent",
+ "ReadFile",
+ "Readlink",
+ "Reboot",
+ "Recvfrom",
+ "Recvmsg",
+ "RegCloseKey",
+ "RegEnumKeyEx",
+ "RegOpenKeyEx",
+ "RegQueryInfoKey",
+ "RegQueryValueEx",
+ "RemoveDirectory",
+ "Removexattr",
+ "Rename",
+ "Renameat",
+ "Revoke",
+ "Rlimit",
+ "Rmdir",
+ "RouteMessage",
+ "RouteRIB",
+ "RoutingMessage",
+ "RtAttr",
+ "RtGenmsg",
+ "RtMetrics",
+ "RtMsg",
+ "RtMsghdr",
+ "RtNexthop",
+ "Rusage",
+ "SCM_BINTIME",
+ "SCM_CREDENTIALS",
+ "SCM_CREDS",
+ "SCM_RIGHTS",
+ "SCM_TIMESTAMP",
+ "SCM_TIMESTAMPING",
+ "SCM_TIMESTAMPNS",
+ "SCM_TIMESTAMP_MONOTONIC",
+ "SHUT_RD",
+ "SHUT_RDWR",
+ "SHUT_WR",
+ "SID",
+ "SIDAndAttributes",
+ "SIGABRT",
+ "SIGALRM",
+ "SIGBUS",
+ "SIGCHLD",
+ "SIGCLD",
+ "SIGCONT",
+ "SIGEMT",
+ "SIGFPE",
+ "SIGHUP",
+ "SIGILL",
+ "SIGINFO",
+ "SIGINT",
+ "SIGIO",
+ "SIGIOT",
+ "SIGKILL",
+ "SIGLIBRT",
+ "SIGLWP",
+ "SIGPIPE",
+ "SIGPOLL",
+ "SIGPROF",
+ "SIGPWR",
+ "SIGQUIT",
+ "SIGSEGV",
+ "SIGSTKFLT",
+ "SIGSTOP",
+ "SIGSYS",
+ "SIGTERM",
+ "SIGTHR",
+ "SIGTRAP",
+ "SIGTSTP",
+ "SIGTTIN",
+ "SIGTTOU",
+ "SIGUNUSED",
+ "SIGURG",
+ "SIGUSR1",
+ "SIGUSR2",
+ "SIGVTALRM",
+ "SIGWINCH",
+ "SIGXCPU",
+ "SIGXFSZ",
+ "SIOCADDDLCI",
+ "SIOCADDMULTI",
+ "SIOCADDRT",
+ "SIOCAIFADDR",
+ "SIOCAIFGROUP",
+ "SIOCALIFADDR",
+ "SIOCARPIPLL",
+ "SIOCATMARK",
+ "SIOCAUTOADDR",
+ "SIOCAUTONETMASK",
+ "SIOCBRDGADD",
+ "SIOCBRDGADDS",
+ "SIOCBRDGARL",
+ "SIOCBRDGDADDR",
+ "SIOCBRDGDEL",
+ "SIOCBRDGDELS",
+ "SIOCBRDGFLUSH",
+ "SIOCBRDGFRL",
+ "SIOCBRDGGCACHE",
+ "SIOCBRDGGFD",
+ "SIOCBRDGGHT",
+ "SIOCBRDGGIFFLGS",
+ "SIOCBRDGGMA",
+ "SIOCBRDGGPARAM",
+ "SIOCBRDGGPRI",
+ "SIOCBRDGGRL",
+ "SIOCBRDGGSIFS",
+ "SIOCBRDGGTO",
+ "SIOCBRDGIFS",
+ "SIOCBRDGRTS",
+ "SIOCBRDGSADDR",
+ "SIOCBRDGSCACHE",
+ "SIOCBRDGSFD",
+ "SIOCBRDGSHT",
+ "SIOCBRDGSIFCOST",
+ "SIOCBRDGSIFFLGS",
+ "SIOCBRDGSIFPRIO",
+ "SIOCBRDGSMA",
+ "SIOCBRDGSPRI",
+ "SIOCBRDGSPROTO",
+ "SIOCBRDGSTO",
+ "SIOCBRDGSTXHC",
+ "SIOCDARP",
+ "SIOCDELDLCI",
+ "SIOCDELMULTI",
+ "SIOCDELRT",
+ "SIOCDEVPRIVATE",
+ "SIOCDIFADDR",
+ "SIOCDIFGROUP",
+ "SIOCDIFPHYADDR",
+ "SIOCDLIFADDR",
+ "SIOCDRARP",
+ "SIOCGARP",
+ "SIOCGDRVSPEC",
+ "SIOCGETKALIVE",
+ "SIOCGETLABEL",
+ "SIOCGETPFLOW",
+ "SIOCGETPFSYNC",
+ "SIOCGETSGCNT",
+ "SIOCGETVIFCNT",
+ "SIOCGETVLAN",
+ "SIOCGHIWAT",
+ "SIOCGIFADDR",
+ "SIOCGIFADDRPREF",
+ "SIOCGIFALIAS",
+ "SIOCGIFALTMTU",
+ "SIOCGIFASYNCMAP",
+ "SIOCGIFBOND",
+ "SIOCGIFBR",
+ "SIOCGIFBRDADDR",
+ "SIOCGIFCAP",
+ "SIOCGIFCONF",
+ "SIOCGIFCOUNT",
+ "SIOCGIFDATA",
+ "SIOCGIFDESCR",
+ "SIOCGIFDEVMTU",
+ "SIOCGIFDLT",
+ "SIOCGIFDSTADDR",
+ "SIOCGIFENCAP",
+ "SIOCGIFFIB",
+ "SIOCGIFFLAGS",
+ "SIOCGIFGATTR",
+ "SIOCGIFGENERIC",
+ "SIOCGIFGMEMB",
+ "SIOCGIFGROUP",
+ "SIOCGIFHARDMTU",
+ "SIOCGIFHWADDR",
+ "SIOCGIFINDEX",
+ "SIOCGIFKPI",
+ "SIOCGIFMAC",
+ "SIOCGIFMAP",
+ "SIOCGIFMEDIA",
+ "SIOCGIFMEM",
+ "SIOCGIFMETRIC",
+ "SIOCGIFMTU",
+ "SIOCGIFNAME",
+ "SIOCGIFNETMASK",
+ "SIOCGIFPDSTADDR",
+ "SIOCGIFPFLAGS",
+ "SIOCGIFPHYS",
+ "SIOCGIFPRIORITY",
+ "SIOCGIFPSRCADDR",
+ "SIOCGIFRDOMAIN",
+ "SIOCGIFRTLABEL",
+ "SIOCGIFSLAVE",
+ "SIOCGIFSTATUS",
+ "SIOCGIFTIMESLOT",
+ "SIOCGIFTXQLEN",
+ "SIOCGIFVLAN",
+ "SIOCGIFWAKEFLAGS",
+ "SIOCGIFXFLAGS",
+ "SIOCGLIFADDR",
+ "SIOCGLIFPHYADDR",
+ "SIOCGLIFPHYRTABLE",
+ "SIOCGLIFPHYTTL",
+ "SIOCGLINKSTR",
+ "SIOCGLOWAT",
+ "SIOCGPGRP",
+ "SIOCGPRIVATE_0",
+ "SIOCGPRIVATE_1",
+ "SIOCGRARP",
+ "SIOCGSPPPPARAMS",
+ "SIOCGSTAMP",
+ "SIOCGSTAMPNS",
+ "SIOCGVH",
+ "SIOCGVNETID",
+ "SIOCIFCREATE",
+ "SIOCIFCREATE2",
+ "SIOCIFDESTROY",
+ "SIOCIFGCLONERS",
+ "SIOCINITIFADDR",
+ "SIOCPROTOPRIVATE",
+ "SIOCRSLVMULTI",
+ "SIOCRTMSG",
+ "SIOCSARP",
+ "SIOCSDRVSPEC",
+ "SIOCSETKALIVE",
+ "SIOCSETLABEL",
+ "SIOCSETPFLOW",
+ "SIOCSETPFSYNC",
+ "SIOCSETVLAN",
+ "SIOCSHIWAT",
+ "SIOCSIFADDR",
+ "SIOCSIFADDRPREF",
+ "SIOCSIFALTMTU",
+ "SIOCSIFASYNCMAP",
+ "SIOCSIFBOND",
+ "SIOCSIFBR",
+ "SIOCSIFBRDADDR",
+ "SIOCSIFCAP",
+ "SIOCSIFDESCR",
+ "SIOCSIFDSTADDR",
+ "SIOCSIFENCAP",
+ "SIOCSIFFIB",
+ "SIOCSIFFLAGS",
+ "SIOCSIFGATTR",
+ "SIOCSIFGENERIC",
+ "SIOCSIFHWADDR",
+ "SIOCSIFHWBROADCAST",
+ "SIOCSIFKPI",
+ "SIOCSIFLINK",
+ "SIOCSIFLLADDR",
+ "SIOCSIFMAC",
+ "SIOCSIFMAP",
+ "SIOCSIFMEDIA",
+ "SIOCSIFMEM",
+ "SIOCSIFMETRIC",
+ "SIOCSIFMTU",
+ "SIOCSIFNAME",
+ "SIOCSIFNETMASK",
+ "SIOCSIFPFLAGS",
+ "SIOCSIFPHYADDR",
+ "SIOCSIFPHYS",
+ "SIOCSIFPRIORITY",
+ "SIOCSIFRDOMAIN",
+ "SIOCSIFRTLABEL",
+ "SIOCSIFRVNET",
+ "SIOCSIFSLAVE",
+ "SIOCSIFTIMESLOT",
+ "SIOCSIFTXQLEN",
+ "SIOCSIFVLAN",
+ "SIOCSIFVNET",
+ "SIOCSIFXFLAGS",
+ "SIOCSLIFPHYADDR",
+ "SIOCSLIFPHYRTABLE",
+ "SIOCSLIFPHYTTL",
+ "SIOCSLINKSTR",
+ "SIOCSLOWAT",
+ "SIOCSPGRP",
+ "SIOCSRARP",
+ "SIOCSSPPPPARAMS",
+ "SIOCSVH",
+ "SIOCSVNETID",
+ "SIOCZIFDATA",
+ "SIO_GET_EXTENSION_FUNCTION_POINTER",
+ "SIO_GET_INTERFACE_LIST",
+ "SIO_KEEPALIVE_VALS",
+ "SIO_UDP_CONNRESET",
+ "SOCK_CLOEXEC",
+ "SOCK_DCCP",
+ "SOCK_DGRAM",
+ "SOCK_FLAGS_MASK",
+ "SOCK_MAXADDRLEN",
+ "SOCK_NONBLOCK",
+ "SOCK_NOSIGPIPE",
+ "SOCK_PACKET",
+ "SOCK_RAW",
+ "SOCK_RDM",
+ "SOCK_SEQPACKET",
+ "SOCK_STREAM",
+ "SOL_AAL",
+ "SOL_ATM",
+ "SOL_DECNET",
+ "SOL_ICMPV6",
+ "SOL_IP",
+ "SOL_IPV6",
+ "SOL_IRDA",
+ "SOL_PACKET",
+ "SOL_RAW",
+ "SOL_SOCKET",
+ "SOL_TCP",
+ "SOL_X25",
+ "SOMAXCONN",
+ "SO_ACCEPTCONN",
+ "SO_ACCEPTFILTER",
+ "SO_ATTACH_FILTER",
+ "SO_BINDANY",
+ "SO_BINDTODEVICE",
+ "SO_BINTIME",
+ "SO_BROADCAST",
+ "SO_BSDCOMPAT",
+ "SO_DEBUG",
+ "SO_DETACH_FILTER",
+ "SO_DOMAIN",
+ "SO_DONTROUTE",
+ "SO_DONTTRUNC",
+ "SO_ERROR",
+ "SO_KEEPALIVE",
+ "SO_LABEL",
+ "SO_LINGER",
+ "SO_LINGER_SEC",
+ "SO_LISTENINCQLEN",
+ "SO_LISTENQLEN",
+ "SO_LISTENQLIMIT",
+ "SO_MARK",
+ "SO_NETPROC",
+ "SO_NKE",
+ "SO_NOADDRERR",
+ "SO_NOHEADER",
+ "SO_NOSIGPIPE",
+ "SO_NOTIFYCONFLICT",
+ "SO_NO_CHECK",
+ "SO_NO_DDP",
+ "SO_NO_OFFLOAD",
+ "SO_NP_EXTENSIONS",
+ "SO_NREAD",
+ "SO_NWRITE",
+ "SO_OOBINLINE",
+ "SO_OVERFLOWED",
+ "SO_PASSCRED",
+ "SO_PASSSEC",
+ "SO_PEERCRED",
+ "SO_PEERLABEL",
+ "SO_PEERNAME",
+ "SO_PEERSEC",
+ "SO_PRIORITY",
+ "SO_PROTOCOL",
+ "SO_PROTOTYPE",
+ "SO_RANDOMPORT",
+ "SO_RCVBUF",
+ "SO_RCVBUFFORCE",
+ "SO_RCVLOWAT",
+ "SO_RCVTIMEO",
+ "SO_RESTRICTIONS",
+ "SO_RESTRICT_DENYIN",
+ "SO_RESTRICT_DENYOUT",
+ "SO_RESTRICT_DENYSET",
+ "SO_REUSEADDR",
+ "SO_REUSEPORT",
+ "SO_REUSESHAREUID",
+ "SO_RTABLE",
+ "SO_RXQ_OVFL",
+ "SO_SECURITY_AUTHENTICATION",
+ "SO_SECURITY_ENCRYPTION_NETWORK",
+ "SO_SECURITY_ENCRYPTION_TRANSPORT",
+ "SO_SETFIB",
+ "SO_SNDBUF",
+ "SO_SNDBUFFORCE",
+ "SO_SNDLOWAT",
+ "SO_SNDTIMEO",
+ "SO_SPLICE",
+ "SO_TIMESTAMP",
+ "SO_TIMESTAMPING",
+ "SO_TIMESTAMPNS",
+ "SO_TIMESTAMP_MONOTONIC",
+ "SO_TYPE",
+ "SO_UPCALLCLOSEWAIT",
+ "SO_UPDATE_ACCEPT_CONTEXT",
+ "SO_UPDATE_CONNECT_CONTEXT",
+ "SO_USELOOPBACK",
+ "SO_USER_COOKIE",
+ "SO_VENDOR",
+ "SO_WANTMORE",
+ "SO_WANTOOBFLAG",
+ "SSLExtraCertChainPolicyPara",
+ "STANDARD_RIGHTS_ALL",
+ "STANDARD_RIGHTS_EXECUTE",
+ "STANDARD_RIGHTS_READ",
+ "STANDARD_RIGHTS_REQUIRED",
+ "STANDARD_RIGHTS_WRITE",
+ "STARTF_USESHOWWINDOW",
+ "STARTF_USESTDHANDLES",
+ "STD_ERROR_HANDLE",
+ "STD_INPUT_HANDLE",
+ "STD_OUTPUT_HANDLE",
+ "SUBLANG_ENGLISH_US",
+ "SW_FORCEMINIMIZE",
+ "SW_HIDE",
+ "SW_MAXIMIZE",
+ "SW_MINIMIZE",
+ "SW_NORMAL",
+ "SW_RESTORE",
+ "SW_SHOW",
+ "SW_SHOWDEFAULT",
+ "SW_SHOWMAXIMIZED",
+ "SW_SHOWMINIMIZED",
+ "SW_SHOWMINNOACTIVE",
+ "SW_SHOWNA",
+ "SW_SHOWNOACTIVATE",
+ "SW_SHOWNORMAL",
+ "SYMBOLIC_LINK_FLAG_DIRECTORY",
+ "SYNCHRONIZE",
+ "SYSCTL_VERSION",
+ "SYSCTL_VERS_0",
+ "SYSCTL_VERS_1",
+ "SYSCTL_VERS_MASK",
+ "SYS_ABORT2",
+ "SYS_ACCEPT",
+ "SYS_ACCEPT4",
+ "SYS_ACCEPT_NOCANCEL",
+ "SYS_ACCESS",
+ "SYS_ACCESS_EXTENDED",
+ "SYS_ACCT",
+ "SYS_ADD_KEY",
+ "SYS_ADD_PROFIL",
+ "SYS_ADJFREQ",
+ "SYS_ADJTIME",
+ "SYS_ADJTIMEX",
+ "SYS_AFS_SYSCALL",
+ "SYS_AIO_CANCEL",
+ "SYS_AIO_ERROR",
+ "SYS_AIO_FSYNC",
+ "SYS_AIO_READ",
+ "SYS_AIO_RETURN",
+ "SYS_AIO_SUSPEND",
+ "SYS_AIO_SUSPEND_NOCANCEL",
+ "SYS_AIO_WRITE",
+ "SYS_ALARM",
+ "SYS_ARCH_PRCTL",
+ "SYS_ARM_FADVISE64_64",
+ "SYS_ARM_SYNC_FILE_RANGE",
+ "SYS_ATGETMSG",
+ "SYS_ATPGETREQ",
+ "SYS_ATPGETRSP",
+ "SYS_ATPSNDREQ",
+ "SYS_ATPSNDRSP",
+ "SYS_ATPUTMSG",
+ "SYS_ATSOCKET",
+ "SYS_AUDIT",
+ "SYS_AUDITCTL",
+ "SYS_AUDITON",
+ "SYS_AUDIT_SESSION_JOIN",
+ "SYS_AUDIT_SESSION_PORT",
+ "SYS_AUDIT_SESSION_SELF",
+ "SYS_BDFLUSH",
+ "SYS_BIND",
+ "SYS_BINDAT",
+ "SYS_BREAK",
+ "SYS_BRK",
+ "SYS_BSDTHREAD_CREATE",
+ "SYS_BSDTHREAD_REGISTER",
+ "SYS_BSDTHREAD_TERMINATE",
+ "SYS_CAPGET",
+ "SYS_CAPSET",
+ "SYS_CAP_ENTER",
+ "SYS_CAP_FCNTLS_GET",
+ "SYS_CAP_FCNTLS_LIMIT",
+ "SYS_CAP_GETMODE",
+ "SYS_CAP_GETRIGHTS",
+ "SYS_CAP_IOCTLS_GET",
+ "SYS_CAP_IOCTLS_LIMIT",
+ "SYS_CAP_NEW",
+ "SYS_CAP_RIGHTS_GET",
+ "SYS_CAP_RIGHTS_LIMIT",
+ "SYS_CHDIR",
+ "SYS_CHFLAGS",
+ "SYS_CHFLAGSAT",
+ "SYS_CHMOD",
+ "SYS_CHMOD_EXTENDED",
+ "SYS_CHOWN",
+ "SYS_CHOWN32",
+ "SYS_CHROOT",
+ "SYS_CHUD",
+ "SYS_CLOCK_ADJTIME",
+ "SYS_CLOCK_GETCPUCLOCKID2",
+ "SYS_CLOCK_GETRES",
+ "SYS_CLOCK_GETTIME",
+ "SYS_CLOCK_NANOSLEEP",
+ "SYS_CLOCK_SETTIME",
+ "SYS_CLONE",
+ "SYS_CLOSE",
+ "SYS_CLOSEFROM",
+ "SYS_CLOSE_NOCANCEL",
+ "SYS_CONNECT",
+ "SYS_CONNECTAT",
+ "SYS_CONNECT_NOCANCEL",
+ "SYS_COPYFILE",
+ "SYS_CPUSET",
+ "SYS_CPUSET_GETAFFINITY",
+ "SYS_CPUSET_GETID",
+ "SYS_CPUSET_SETAFFINITY",
+ "SYS_CPUSET_SETID",
+ "SYS_CREAT",
+ "SYS_CREATE_MODULE",
+ "SYS_CSOPS",
+ "SYS_DELETE",
+ "SYS_DELETE_MODULE",
+ "SYS_DUP",
+ "SYS_DUP2",
+ "SYS_DUP3",
+ "SYS_EACCESS",
+ "SYS_EPOLL_CREATE",
+ "SYS_EPOLL_CREATE1",
+ "SYS_EPOLL_CTL",
+ "SYS_EPOLL_CTL_OLD",
+ "SYS_EPOLL_PWAIT",
+ "SYS_EPOLL_WAIT",
+ "SYS_EPOLL_WAIT_OLD",
+ "SYS_EVENTFD",
+ "SYS_EVENTFD2",
+ "SYS_EXCHANGEDATA",
+ "SYS_EXECVE",
+ "SYS_EXIT",
+ "SYS_EXIT_GROUP",
+ "SYS_EXTATTRCTL",
+ "SYS_EXTATTR_DELETE_FD",
+ "SYS_EXTATTR_DELETE_FILE",
+ "SYS_EXTATTR_DELETE_LINK",
+ "SYS_EXTATTR_GET_FD",
+ "SYS_EXTATTR_GET_FILE",
+ "SYS_EXTATTR_GET_LINK",
+ "SYS_EXTATTR_LIST_FD",
+ "SYS_EXTATTR_LIST_FILE",
+ "SYS_EXTATTR_LIST_LINK",
+ "SYS_EXTATTR_SET_FD",
+ "SYS_EXTATTR_SET_FILE",
+ "SYS_EXTATTR_SET_LINK",
+ "SYS_FACCESSAT",
+ "SYS_FADVISE64",
+ "SYS_FADVISE64_64",
+ "SYS_FALLOCATE",
+ "SYS_FANOTIFY_INIT",
+ "SYS_FANOTIFY_MARK",
+ "SYS_FCHDIR",
+ "SYS_FCHFLAGS",
+ "SYS_FCHMOD",
+ "SYS_FCHMODAT",
+ "SYS_FCHMOD_EXTENDED",
+ "SYS_FCHOWN",
+ "SYS_FCHOWN32",
+ "SYS_FCHOWNAT",
+ "SYS_FCHROOT",
+ "SYS_FCNTL",
+ "SYS_FCNTL64",
+ "SYS_FCNTL_NOCANCEL",
+ "SYS_FDATASYNC",
+ "SYS_FEXECVE",
+ "SYS_FFCLOCK_GETCOUNTER",
+ "SYS_FFCLOCK_GETESTIMATE",
+ "SYS_FFCLOCK_SETESTIMATE",
+ "SYS_FFSCTL",
+ "SYS_FGETATTRLIST",
+ "SYS_FGETXATTR",
+ "SYS_FHOPEN",
+ "SYS_FHSTAT",
+ "SYS_FHSTATFS",
+ "SYS_FILEPORT_MAKEFD",
+ "SYS_FILEPORT_MAKEPORT",
+ "SYS_FKTRACE",
+ "SYS_FLISTXATTR",
+ "SYS_FLOCK",
+ "SYS_FORK",
+ "SYS_FPATHCONF",
+ "SYS_FREEBSD6_FTRUNCATE",
+ "SYS_FREEBSD6_LSEEK",
+ "SYS_FREEBSD6_MMAP",
+ "SYS_FREEBSD6_PREAD",
+ "SYS_FREEBSD6_PWRITE",
+ "SYS_FREEBSD6_TRUNCATE",
+ "SYS_FREMOVEXATTR",
+ "SYS_FSCTL",
+ "SYS_FSETATTRLIST",
+ "SYS_FSETXATTR",
+ "SYS_FSGETPATH",
+ "SYS_FSTAT",
+ "SYS_FSTAT64",
+ "SYS_FSTAT64_EXTENDED",
+ "SYS_FSTATAT",
+ "SYS_FSTATAT64",
+ "SYS_FSTATFS",
+ "SYS_FSTATFS64",
+ "SYS_FSTATV",
+ "SYS_FSTATVFS1",
+ "SYS_FSTAT_EXTENDED",
+ "SYS_FSYNC",
+ "SYS_FSYNC_NOCANCEL",
+ "SYS_FSYNC_RANGE",
+ "SYS_FTIME",
+ "SYS_FTRUNCATE",
+ "SYS_FTRUNCATE64",
+ "SYS_FUTEX",
+ "SYS_FUTIMENS",
+ "SYS_FUTIMES",
+ "SYS_FUTIMESAT",
+ "SYS_GETATTRLIST",
+ "SYS_GETAUDIT",
+ "SYS_GETAUDIT_ADDR",
+ "SYS_GETAUID",
+ "SYS_GETCONTEXT",
+ "SYS_GETCPU",
+ "SYS_GETCWD",
+ "SYS_GETDENTS",
+ "SYS_GETDENTS64",
+ "SYS_GETDIRENTRIES",
+ "SYS_GETDIRENTRIES64",
+ "SYS_GETDIRENTRIESATTR",
+ "SYS_GETDTABLECOUNT",
+ "SYS_GETDTABLESIZE",
+ "SYS_GETEGID",
+ "SYS_GETEGID32",
+ "SYS_GETEUID",
+ "SYS_GETEUID32",
+ "SYS_GETFH",
+ "SYS_GETFSSTAT",
+ "SYS_GETFSSTAT64",
+ "SYS_GETGID",
+ "SYS_GETGID32",
+ "SYS_GETGROUPS",
+ "SYS_GETGROUPS32",
+ "SYS_GETHOSTUUID",
+ "SYS_GETITIMER",
+ "SYS_GETLCID",
+ "SYS_GETLOGIN",
+ "SYS_GETLOGINCLASS",
+ "SYS_GETPEERNAME",
+ "SYS_GETPGID",
+ "SYS_GETPGRP",
+ "SYS_GETPID",
+ "SYS_GETPMSG",
+ "SYS_GETPPID",
+ "SYS_GETPRIORITY",
+ "SYS_GETRESGID",
+ "SYS_GETRESGID32",
+ "SYS_GETRESUID",
+ "SYS_GETRESUID32",
+ "SYS_GETRLIMIT",
+ "SYS_GETRTABLE",
+ "SYS_GETRUSAGE",
+ "SYS_GETSGROUPS",
+ "SYS_GETSID",
+ "SYS_GETSOCKNAME",
+ "SYS_GETSOCKOPT",
+ "SYS_GETTHRID",
+ "SYS_GETTID",
+ "SYS_GETTIMEOFDAY",
+ "SYS_GETUID",
+ "SYS_GETUID32",
+ "SYS_GETVFSSTAT",
+ "SYS_GETWGROUPS",
+ "SYS_GETXATTR",
+ "SYS_GET_KERNEL_SYMS",
+ "SYS_GET_MEMPOLICY",
+ "SYS_GET_ROBUST_LIST",
+ "SYS_GET_THREAD_AREA",
+ "SYS_GTTY",
+ "SYS_IDENTITYSVC",
+ "SYS_IDLE",
+ "SYS_INITGROUPS",
+ "SYS_INIT_MODULE",
+ "SYS_INOTIFY_ADD_WATCH",
+ "SYS_INOTIFY_INIT",
+ "SYS_INOTIFY_INIT1",
+ "SYS_INOTIFY_RM_WATCH",
+ "SYS_IOCTL",
+ "SYS_IOPERM",
+ "SYS_IOPL",
+ "SYS_IOPOLICYSYS",
+ "SYS_IOPRIO_GET",
+ "SYS_IOPRIO_SET",
+ "SYS_IO_CANCEL",
+ "SYS_IO_DESTROY",
+ "SYS_IO_GETEVENTS",
+ "SYS_IO_SETUP",
+ "SYS_IO_SUBMIT",
+ "SYS_IPC",
+ "SYS_ISSETUGID",
+ "SYS_JAIL",
+ "SYS_JAIL_ATTACH",
+ "SYS_JAIL_GET",
+ "SYS_JAIL_REMOVE",
+ "SYS_JAIL_SET",
+ "SYS_KDEBUG_TRACE",
+ "SYS_KENV",
+ "SYS_KEVENT",
+ "SYS_KEVENT64",
+ "SYS_KEXEC_LOAD",
+ "SYS_KEYCTL",
+ "SYS_KILL",
+ "SYS_KLDFIND",
+ "SYS_KLDFIRSTMOD",
+ "SYS_KLDLOAD",
+ "SYS_KLDNEXT",
+ "SYS_KLDSTAT",
+ "SYS_KLDSYM",
+ "SYS_KLDUNLOAD",
+ "SYS_KLDUNLOADF",
+ "SYS_KQUEUE",
+ "SYS_KQUEUE1",
+ "SYS_KTIMER_CREATE",
+ "SYS_KTIMER_DELETE",
+ "SYS_KTIMER_GETOVERRUN",
+ "SYS_KTIMER_GETTIME",
+ "SYS_KTIMER_SETTIME",
+ "SYS_KTRACE",
+ "SYS_LCHFLAGS",
+ "SYS_LCHMOD",
+ "SYS_LCHOWN",
+ "SYS_LCHOWN32",
+ "SYS_LGETFH",
+ "SYS_LGETXATTR",
+ "SYS_LINK",
+ "SYS_LINKAT",
+ "SYS_LIO_LISTIO",
+ "SYS_LISTEN",
+ "SYS_LISTXATTR",
+ "SYS_LLISTXATTR",
+ "SYS_LOCK",
+ "SYS_LOOKUP_DCOOKIE",
+ "SYS_LPATHCONF",
+ "SYS_LREMOVEXATTR",
+ "SYS_LSEEK",
+ "SYS_LSETXATTR",
+ "SYS_LSTAT",
+ "SYS_LSTAT64",
+ "SYS_LSTAT64_EXTENDED",
+ "SYS_LSTATV",
+ "SYS_LSTAT_EXTENDED",
+ "SYS_LUTIMES",
+ "SYS_MAC_SYSCALL",
+ "SYS_MADVISE",
+ "SYS_MADVISE1",
+ "SYS_MAXSYSCALL",
+ "SYS_MBIND",
+ "SYS_MIGRATE_PAGES",
+ "SYS_MINCORE",
+ "SYS_MINHERIT",
+ "SYS_MKCOMPLEX",
+ "SYS_MKDIR",
+ "SYS_MKDIRAT",
+ "SYS_MKDIR_EXTENDED",
+ "SYS_MKFIFO",
+ "SYS_MKFIFOAT",
+ "SYS_MKFIFO_EXTENDED",
+ "SYS_MKNOD",
+ "SYS_MKNODAT",
+ "SYS_MLOCK",
+ "SYS_MLOCKALL",
+ "SYS_MMAP",
+ "SYS_MMAP2",
+ "SYS_MODCTL",
+ "SYS_MODFIND",
+ "SYS_MODFNEXT",
+ "SYS_MODIFY_LDT",
+ "SYS_MODNEXT",
+ "SYS_MODSTAT",
+ "SYS_MODWATCH",
+ "SYS_MOUNT",
+ "SYS_MOVE_PAGES",
+ "SYS_MPROTECT",
+ "SYS_MPX",
+ "SYS_MQUERY",
+ "SYS_MQ_GETSETATTR",
+ "SYS_MQ_NOTIFY",
+ "SYS_MQ_OPEN",
+ "SYS_MQ_TIMEDRECEIVE",
+ "SYS_MQ_TIMEDSEND",
+ "SYS_MQ_UNLINK",
+ "SYS_MREMAP",
+ "SYS_MSGCTL",
+ "SYS_MSGGET",
+ "SYS_MSGRCV",
+ "SYS_MSGRCV_NOCANCEL",
+ "SYS_MSGSND",
+ "SYS_MSGSND_NOCANCEL",
+ "SYS_MSGSYS",
+ "SYS_MSYNC",
+ "SYS_MSYNC_NOCANCEL",
+ "SYS_MUNLOCK",
+ "SYS_MUNLOCKALL",
+ "SYS_MUNMAP",
+ "SYS_NAME_TO_HANDLE_AT",
+ "SYS_NANOSLEEP",
+ "SYS_NEWFSTATAT",
+ "SYS_NFSCLNT",
+ "SYS_NFSSERVCTL",
+ "SYS_NFSSVC",
+ "SYS_NFSTAT",
+ "SYS_NICE",
+ "SYS_NLSTAT",
+ "SYS_NMOUNT",
+ "SYS_NSTAT",
+ "SYS_NTP_ADJTIME",
+ "SYS_NTP_GETTIME",
+ "SYS_OABI_SYSCALL_BASE",
+ "SYS_OBREAK",
+ "SYS_OLDFSTAT",
+ "SYS_OLDLSTAT",
+ "SYS_OLDOLDUNAME",
+ "SYS_OLDSTAT",
+ "SYS_OLDUNAME",
+ "SYS_OPEN",
+ "SYS_OPENAT",
+ "SYS_OPENBSD_POLL",
+ "SYS_OPEN_BY_HANDLE_AT",
+ "SYS_OPEN_EXTENDED",
+ "SYS_OPEN_NOCANCEL",
+ "SYS_OVADVISE",
+ "SYS_PACCEPT",
+ "SYS_PATHCONF",
+ "SYS_PAUSE",
+ "SYS_PCICONFIG_IOBASE",
+ "SYS_PCICONFIG_READ",
+ "SYS_PCICONFIG_WRITE",
+ "SYS_PDFORK",
+ "SYS_PDGETPID",
+ "SYS_PDKILL",
+ "SYS_PERF_EVENT_OPEN",
+ "SYS_PERSONALITY",
+ "SYS_PID_HIBERNATE",
+ "SYS_PID_RESUME",
+ "SYS_PID_SHUTDOWN_SOCKETS",
+ "SYS_PID_SUSPEND",
+ "SYS_PIPE",
+ "SYS_PIPE2",
+ "SYS_PIVOT_ROOT",
+ "SYS_PMC_CONTROL",
+ "SYS_PMC_GET_INFO",
+ "SYS_POLL",
+ "SYS_POLLTS",
+ "SYS_POLL_NOCANCEL",
+ "SYS_POSIX_FADVISE",
+ "SYS_POSIX_FALLOCATE",
+ "SYS_POSIX_OPENPT",
+ "SYS_POSIX_SPAWN",
+ "SYS_PPOLL",
+ "SYS_PRCTL",
+ "SYS_PREAD",
+ "SYS_PREAD64",
+ "SYS_PREADV",
+ "SYS_PREAD_NOCANCEL",
+ "SYS_PRLIMIT64",
+ "SYS_PROCCTL",
+ "SYS_PROCESS_POLICY",
+ "SYS_PROCESS_VM_READV",
+ "SYS_PROCESS_VM_WRITEV",
+ "SYS_PROC_INFO",
+ "SYS_PROF",
+ "SYS_PROFIL",
+ "SYS_PSELECT",
+ "SYS_PSELECT6",
+ "SYS_PSET_ASSIGN",
+ "SYS_PSET_CREATE",
+ "SYS_PSET_DESTROY",
+ "SYS_PSYNCH_CVBROAD",
+ "SYS_PSYNCH_CVCLRPREPOST",
+ "SYS_PSYNCH_CVSIGNAL",
+ "SYS_PSYNCH_CVWAIT",
+ "SYS_PSYNCH_MUTEXDROP",
+ "SYS_PSYNCH_MUTEXWAIT",
+ "SYS_PSYNCH_RW_DOWNGRADE",
+ "SYS_PSYNCH_RW_LONGRDLOCK",
+ "SYS_PSYNCH_RW_RDLOCK",
+ "SYS_PSYNCH_RW_UNLOCK",
+ "SYS_PSYNCH_RW_UNLOCK2",
+ "SYS_PSYNCH_RW_UPGRADE",
+ "SYS_PSYNCH_RW_WRLOCK",
+ "SYS_PSYNCH_RW_YIELDWRLOCK",
+ "SYS_PTRACE",
+ "SYS_PUTPMSG",
+ "SYS_PWRITE",
+ "SYS_PWRITE64",
+ "SYS_PWRITEV",
+ "SYS_PWRITE_NOCANCEL",
+ "SYS_QUERY_MODULE",
+ "SYS_QUOTACTL",
+ "SYS_RASCTL",
+ "SYS_RCTL_ADD_RULE",
+ "SYS_RCTL_GET_LIMITS",
+ "SYS_RCTL_GET_RACCT",
+ "SYS_RCTL_GET_RULES",
+ "SYS_RCTL_REMOVE_RULE",
+ "SYS_READ",
+ "SYS_READAHEAD",
+ "SYS_READDIR",
+ "SYS_READLINK",
+ "SYS_READLINKAT",
+ "SYS_READV",
+ "SYS_READV_NOCANCEL",
+ "SYS_READ_NOCANCEL",
+ "SYS_REBOOT",
+ "SYS_RECV",
+ "SYS_RECVFROM",
+ "SYS_RECVFROM_NOCANCEL",
+ "SYS_RECVMMSG",
+ "SYS_RECVMSG",
+ "SYS_RECVMSG_NOCANCEL",
+ "SYS_REMAP_FILE_PAGES",
+ "SYS_REMOVEXATTR",
+ "SYS_RENAME",
+ "SYS_RENAMEAT",
+ "SYS_REQUEST_KEY",
+ "SYS_RESTART_SYSCALL",
+ "SYS_REVOKE",
+ "SYS_RFORK",
+ "SYS_RMDIR",
+ "SYS_RTPRIO",
+ "SYS_RTPRIO_THREAD",
+ "SYS_RT_SIGACTION",
+ "SYS_RT_SIGPENDING",
+ "SYS_RT_SIGPROCMASK",
+ "SYS_RT_SIGQUEUEINFO",
+ "SYS_RT_SIGRETURN",
+ "SYS_RT_SIGSUSPEND",
+ "SYS_RT_SIGTIMEDWAIT",
+ "SYS_RT_TGSIGQUEUEINFO",
+ "SYS_SBRK",
+ "SYS_SCHED_GETAFFINITY",
+ "SYS_SCHED_GETPARAM",
+ "SYS_SCHED_GETSCHEDULER",
+ "SYS_SCHED_GET_PRIORITY_MAX",
+ "SYS_SCHED_GET_PRIORITY_MIN",
+ "SYS_SCHED_RR_GET_INTERVAL",
+ "SYS_SCHED_SETAFFINITY",
+ "SYS_SCHED_SETPARAM",
+ "SYS_SCHED_SETSCHEDULER",
+ "SYS_SCHED_YIELD",
+ "SYS_SCTP_GENERIC_RECVMSG",
+ "SYS_SCTP_GENERIC_SENDMSG",
+ "SYS_SCTP_GENERIC_SENDMSG_IOV",
+ "SYS_SCTP_PEELOFF",
+ "SYS_SEARCHFS",
+ "SYS_SECURITY",
+ "SYS_SELECT",
+ "SYS_SELECT_NOCANCEL",
+ "SYS_SEMCONFIG",
+ "SYS_SEMCTL",
+ "SYS_SEMGET",
+ "SYS_SEMOP",
+ "SYS_SEMSYS",
+ "SYS_SEMTIMEDOP",
+ "SYS_SEM_CLOSE",
+ "SYS_SEM_DESTROY",
+ "SYS_SEM_GETVALUE",
+ "SYS_SEM_INIT",
+ "SYS_SEM_OPEN",
+ "SYS_SEM_POST",
+ "SYS_SEM_TRYWAIT",
+ "SYS_SEM_UNLINK",
+ "SYS_SEM_WAIT",
+ "SYS_SEM_WAIT_NOCANCEL",
+ "SYS_SEND",
+ "SYS_SENDFILE",
+ "SYS_SENDFILE64",
+ "SYS_SENDMMSG",
+ "SYS_SENDMSG",
+ "SYS_SENDMSG_NOCANCEL",
+ "SYS_SENDTO",
+ "SYS_SENDTO_NOCANCEL",
+ "SYS_SETATTRLIST",
+ "SYS_SETAUDIT",
+ "SYS_SETAUDIT_ADDR",
+ "SYS_SETAUID",
+ "SYS_SETCONTEXT",
+ "SYS_SETDOMAINNAME",
+ "SYS_SETEGID",
+ "SYS_SETEUID",
+ "SYS_SETFIB",
+ "SYS_SETFSGID",
+ "SYS_SETFSGID32",
+ "SYS_SETFSUID",
+ "SYS_SETFSUID32",
+ "SYS_SETGID",
+ "SYS_SETGID32",
+ "SYS_SETGROUPS",
+ "SYS_SETGROUPS32",
+ "SYS_SETHOSTNAME",
+ "SYS_SETITIMER",
+ "SYS_SETLCID",
+ "SYS_SETLOGIN",
+ "SYS_SETLOGINCLASS",
+ "SYS_SETNS",
+ "SYS_SETPGID",
+ "SYS_SETPRIORITY",
+ "SYS_SETPRIVEXEC",
+ "SYS_SETREGID",
+ "SYS_SETREGID32",
+ "SYS_SETRESGID",
+ "SYS_SETRESGID32",
+ "SYS_SETRESUID",
+ "SYS_SETRESUID32",
+ "SYS_SETREUID",
+ "SYS_SETREUID32",
+ "SYS_SETRLIMIT",
+ "SYS_SETRTABLE",
+ "SYS_SETSGROUPS",
+ "SYS_SETSID",
+ "SYS_SETSOCKOPT",
+ "SYS_SETTID",
+ "SYS_SETTID_WITH_PID",
+ "SYS_SETTIMEOFDAY",
+ "SYS_SETUID",
+ "SYS_SETUID32",
+ "SYS_SETWGROUPS",
+ "SYS_SETXATTR",
+ "SYS_SET_MEMPOLICY",
+ "SYS_SET_ROBUST_LIST",
+ "SYS_SET_THREAD_AREA",
+ "SYS_SET_TID_ADDRESS",
+ "SYS_SGETMASK",
+ "SYS_SHARED_REGION_CHECK_NP",
+ "SYS_SHARED_REGION_MAP_AND_SLIDE_NP",
+ "SYS_SHMAT",
+ "SYS_SHMCTL",
+ "SYS_SHMDT",
+ "SYS_SHMGET",
+ "SYS_SHMSYS",
+ "SYS_SHM_OPEN",
+ "SYS_SHM_UNLINK",
+ "SYS_SHUTDOWN",
+ "SYS_SIGACTION",
+ "SYS_SIGALTSTACK",
+ "SYS_SIGNAL",
+ "SYS_SIGNALFD",
+ "SYS_SIGNALFD4",
+ "SYS_SIGPENDING",
+ "SYS_SIGPROCMASK",
+ "SYS_SIGQUEUE",
+ "SYS_SIGQUEUEINFO",
+ "SYS_SIGRETURN",
+ "SYS_SIGSUSPEND",
+ "SYS_SIGSUSPEND_NOCANCEL",
+ "SYS_SIGTIMEDWAIT",
+ "SYS_SIGWAIT",
+ "SYS_SIGWAITINFO",
+ "SYS_SOCKET",
+ "SYS_SOCKETCALL",
+ "SYS_SOCKETPAIR",
+ "SYS_SPLICE",
+ "SYS_SSETMASK",
+ "SYS_SSTK",
+ "SYS_STACK_SNAPSHOT",
+ "SYS_STAT",
+ "SYS_STAT64",
+ "SYS_STAT64_EXTENDED",
+ "SYS_STATFS",
+ "SYS_STATFS64",
+ "SYS_STATV",
+ "SYS_STATVFS1",
+ "SYS_STAT_EXTENDED",
+ "SYS_STIME",
+ "SYS_STTY",
+ "SYS_SWAPCONTEXT",
+ "SYS_SWAPCTL",
+ "SYS_SWAPOFF",
+ "SYS_SWAPON",
+ "SYS_SYMLINK",
+ "SYS_SYMLINKAT",
+ "SYS_SYNC",
+ "SYS_SYNCFS",
+ "SYS_SYNC_FILE_RANGE",
+ "SYS_SYSARCH",
+ "SYS_SYSCALL",
+ "SYS_SYSCALL_BASE",
+ "SYS_SYSFS",
+ "SYS_SYSINFO",
+ "SYS_SYSLOG",
+ "SYS_TEE",
+ "SYS_TGKILL",
+ "SYS_THREAD_SELFID",
+ "SYS_THR_CREATE",
+ "SYS_THR_EXIT",
+ "SYS_THR_KILL",
+ "SYS_THR_KILL2",
+ "SYS_THR_NEW",
+ "SYS_THR_SELF",
+ "SYS_THR_SET_NAME",
+ "SYS_THR_SUSPEND",
+ "SYS_THR_WAKE",
+ "SYS_TIME",
+ "SYS_TIMERFD_CREATE",
+ "SYS_TIMERFD_GETTIME",
+ "SYS_TIMERFD_SETTIME",
+ "SYS_TIMER_CREATE",
+ "SYS_TIMER_DELETE",
+ "SYS_TIMER_GETOVERRUN",
+ "SYS_TIMER_GETTIME",
+ "SYS_TIMER_SETTIME",
+ "SYS_TIMES",
+ "SYS_TKILL",
+ "SYS_TRUNCATE",
+ "SYS_TRUNCATE64",
+ "SYS_TUXCALL",
+ "SYS_UGETRLIMIT",
+ "SYS_ULIMIT",
+ "SYS_UMASK",
+ "SYS_UMASK_EXTENDED",
+ "SYS_UMOUNT",
+ "SYS_UMOUNT2",
+ "SYS_UNAME",
+ "SYS_UNDELETE",
+ "SYS_UNLINK",
+ "SYS_UNLINKAT",
+ "SYS_UNMOUNT",
+ "SYS_UNSHARE",
+ "SYS_USELIB",
+ "SYS_USTAT",
+ "SYS_UTIME",
+ "SYS_UTIMENSAT",
+ "SYS_UTIMES",
+ "SYS_UTRACE",
+ "SYS_UUIDGEN",
+ "SYS_VADVISE",
+ "SYS_VFORK",
+ "SYS_VHANGUP",
+ "SYS_VM86",
+ "SYS_VM86OLD",
+ "SYS_VMSPLICE",
+ "SYS_VM_PRESSURE_MONITOR",
+ "SYS_VSERVER",
+ "SYS_WAIT4",
+ "SYS_WAIT4_NOCANCEL",
+ "SYS_WAIT6",
+ "SYS_WAITEVENT",
+ "SYS_WAITID",
+ "SYS_WAITID_NOCANCEL",
+ "SYS_WAITPID",
+ "SYS_WATCHEVENT",
+ "SYS_WORKQ_KERNRETURN",
+ "SYS_WORKQ_OPEN",
+ "SYS_WRITE",
+ "SYS_WRITEV",
+ "SYS_WRITEV_NOCANCEL",
+ "SYS_WRITE_NOCANCEL",
+ "SYS_YIELD",
+ "SYS__LLSEEK",
+ "SYS__LWP_CONTINUE",
+ "SYS__LWP_CREATE",
+ "SYS__LWP_CTL",
+ "SYS__LWP_DETACH",
+ "SYS__LWP_EXIT",
+ "SYS__LWP_GETNAME",
+ "SYS__LWP_GETPRIVATE",
+ "SYS__LWP_KILL",
+ "SYS__LWP_PARK",
+ "SYS__LWP_SELF",
+ "SYS__LWP_SETNAME",
+ "SYS__LWP_SETPRIVATE",
+ "SYS__LWP_SUSPEND",
+ "SYS__LWP_UNPARK",
+ "SYS__LWP_UNPARK_ALL",
+ "SYS__LWP_WAIT",
+ "SYS__LWP_WAKEUP",
+ "SYS__NEWSELECT",
+ "SYS__PSET_BIND",
+ "SYS__SCHED_GETAFFINITY",
+ "SYS__SCHED_GETPARAM",
+ "SYS__SCHED_SETAFFINITY",
+ "SYS__SCHED_SETPARAM",
+ "SYS__SYSCTL",
+ "SYS__UMTX_LOCK",
+ "SYS__UMTX_OP",
+ "SYS__UMTX_UNLOCK",
+ "SYS___ACL_ACLCHECK_FD",
+ "SYS___ACL_ACLCHECK_FILE",
+ "SYS___ACL_ACLCHECK_LINK",
+ "SYS___ACL_DELETE_FD",
+ "SYS___ACL_DELETE_FILE",
+ "SYS___ACL_DELETE_LINK",
+ "SYS___ACL_GET_FD",
+ "SYS___ACL_GET_FILE",
+ "SYS___ACL_GET_LINK",
+ "SYS___ACL_SET_FD",
+ "SYS___ACL_SET_FILE",
+ "SYS___ACL_SET_LINK",
+ "SYS___CLONE",
+ "SYS___DISABLE_THREADSIGNAL",
+ "SYS___GETCWD",
+ "SYS___GETLOGIN",
+ "SYS___GET_TCB",
+ "SYS___MAC_EXECVE",
+ "SYS___MAC_GETFSSTAT",
+ "SYS___MAC_GET_FD",
+ "SYS___MAC_GET_FILE",
+ "SYS___MAC_GET_LCID",
+ "SYS___MAC_GET_LCTX",
+ "SYS___MAC_GET_LINK",
+ "SYS___MAC_GET_MOUNT",
+ "SYS___MAC_GET_PID",
+ "SYS___MAC_GET_PROC",
+ "SYS___MAC_MOUNT",
+ "SYS___MAC_SET_FD",
+ "SYS___MAC_SET_FILE",
+ "SYS___MAC_SET_LCTX",
+ "SYS___MAC_SET_LINK",
+ "SYS___MAC_SET_PROC",
+ "SYS___MAC_SYSCALL",
+ "SYS___OLD_SEMWAIT_SIGNAL",
+ "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL",
+ "SYS___POSIX_CHOWN",
+ "SYS___POSIX_FCHOWN",
+ "SYS___POSIX_LCHOWN",
+ "SYS___POSIX_RENAME",
+ "SYS___PTHREAD_CANCELED",
+ "SYS___PTHREAD_CHDIR",
+ "SYS___PTHREAD_FCHDIR",
+ "SYS___PTHREAD_KILL",
+ "SYS___PTHREAD_MARKCANCEL",
+ "SYS___PTHREAD_SIGMASK",
+ "SYS___QUOTACTL",
+ "SYS___SEMCTL",
+ "SYS___SEMWAIT_SIGNAL",
+ "SYS___SEMWAIT_SIGNAL_NOCANCEL",
+ "SYS___SETLOGIN",
+ "SYS___SETUGID",
+ "SYS___SET_TCB",
+ "SYS___SIGACTION_SIGTRAMP",
+ "SYS___SIGTIMEDWAIT",
+ "SYS___SIGWAIT",
+ "SYS___SIGWAIT_NOCANCEL",
+ "SYS___SYSCTL",
+ "SYS___TFORK",
+ "SYS___THREXIT",
+ "SYS___THRSIGDIVERT",
+ "SYS___THRSLEEP",
+ "SYS___THRWAKEUP",
+ "S_ARCH1",
+ "S_ARCH2",
+ "S_BLKSIZE",
+ "S_IEXEC",
+ "S_IFBLK",
+ "S_IFCHR",
+ "S_IFDIR",
+ "S_IFIFO",
+ "S_IFLNK",
+ "S_IFMT",
+ "S_IFREG",
+ "S_IFSOCK",
+ "S_IFWHT",
+ "S_IREAD",
+ "S_IRGRP",
+ "S_IROTH",
+ "S_IRUSR",
+ "S_IRWXG",
+ "S_IRWXO",
+ "S_IRWXU",
+ "S_ISGID",
+ "S_ISTXT",
+ "S_ISUID",
+ "S_ISVTX",
+ "S_IWGRP",
+ "S_IWOTH",
+ "S_IWRITE",
+ "S_IWUSR",
+ "S_IXGRP",
+ "S_IXOTH",
+ "S_IXUSR",
+ "S_LOGIN_SET",
+ "SecurityAttributes",
+ "Seek",
+ "Select",
+ "Sendfile",
+ "Sendmsg",
+ "SendmsgN",
+ "Sendto",
+ "Servent",
+ "SetBpf",
+ "SetBpfBuflen",
+ "SetBpfDatalink",
+ "SetBpfHeadercmpl",
+ "SetBpfImmediate",
+ "SetBpfInterface",
+ "SetBpfPromisc",
+ "SetBpfTimeout",
+ "SetCurrentDirectory",
+ "SetEndOfFile",
+ "SetEnvironmentVariable",
+ "SetFileAttributes",
+ "SetFileCompletionNotificationModes",
+ "SetFilePointer",
+ "SetFileTime",
+ "SetHandleInformation",
+ "SetKevent",
+ "SetLsfPromisc",
+ "SetNonblock",
+ "Setdomainname",
+ "Setegid",
+ "Setenv",
+ "Seteuid",
+ "Setfsgid",
+ "Setfsuid",
+ "Setgid",
+ "Setgroups",
+ "Sethostname",
+ "Setlogin",
+ "Setpgid",
+ "Setpriority",
+ "Setprivexec",
+ "Setregid",
+ "Setresgid",
+ "Setresuid",
+ "Setreuid",
+ "Setrlimit",
+ "Setsid",
+ "Setsockopt",
+ "SetsockoptByte",
+ "SetsockoptICMPv6Filter",
+ "SetsockoptIPMreq",
+ "SetsockoptIPMreqn",
+ "SetsockoptIPv6Mreq",
+ "SetsockoptInet4Addr",
+ "SetsockoptInt",
+ "SetsockoptLinger",
+ "SetsockoptString",
+ "SetsockoptTimeval",
+ "Settimeofday",
+ "Setuid",
+ "Setxattr",
+ "Shutdown",
+ "SidTypeAlias",
+ "SidTypeComputer",
+ "SidTypeDeletedAccount",
+ "SidTypeDomain",
+ "SidTypeGroup",
+ "SidTypeInvalid",
+ "SidTypeLabel",
+ "SidTypeUnknown",
+ "SidTypeUser",
+ "SidTypeWellKnownGroup",
+ "Signal",
+ "SizeofBpfHdr",
+ "SizeofBpfInsn",
+ "SizeofBpfProgram",
+ "SizeofBpfStat",
+ "SizeofBpfVersion",
+ "SizeofBpfZbuf",
+ "SizeofBpfZbufHeader",
+ "SizeofCmsghdr",
+ "SizeofICMPv6Filter",
+ "SizeofIPMreq",
+ "SizeofIPMreqn",
+ "SizeofIPv6MTUInfo",
+ "SizeofIPv6Mreq",
+ "SizeofIfAddrmsg",
+ "SizeofIfAnnounceMsghdr",
+ "SizeofIfData",
+ "SizeofIfInfomsg",
+ "SizeofIfMsghdr",
+ "SizeofIfaMsghdr",
+ "SizeofIfmaMsghdr",
+ "SizeofIfmaMsghdr2",
+ "SizeofInet4Pktinfo",
+ "SizeofInet6Pktinfo",
+ "SizeofInotifyEvent",
+ "SizeofLinger",
+ "SizeofMsghdr",
+ "SizeofNlAttr",
+ "SizeofNlMsgerr",
+ "SizeofNlMsghdr",
+ "SizeofRtAttr",
+ "SizeofRtGenmsg",
+ "SizeofRtMetrics",
+ "SizeofRtMsg",
+ "SizeofRtMsghdr",
+ "SizeofRtNexthop",
+ "SizeofSockFilter",
+ "SizeofSockFprog",
+ "SizeofSockaddrAny",
+ "SizeofSockaddrDatalink",
+ "SizeofSockaddrInet4",
+ "SizeofSockaddrInet6",
+ "SizeofSockaddrLinklayer",
+ "SizeofSockaddrNetlink",
+ "SizeofSockaddrUnix",
+ "SizeofTCPInfo",
+ "SizeofUcred",
+ "SlicePtrFromStrings",
+ "SockFilter",
+ "SockFprog",
+ "Sockaddr",
+ "SockaddrDatalink",
+ "SockaddrGen",
+ "SockaddrInet4",
+ "SockaddrInet6",
+ "SockaddrLinklayer",
+ "SockaddrNetlink",
+ "SockaddrUnix",
+ "Socket",
+ "SocketControlMessage",
+ "SocketDisableIPv6",
+ "Socketpair",
+ "Splice",
+ "StartProcess",
+ "StartupInfo",
+ "Stat",
+ "Stat_t",
+ "Statfs",
+ "Statfs_t",
+ "Stderr",
+ "Stdin",
+ "Stdout",
+ "StringBytePtr",
+ "StringByteSlice",
+ "StringSlicePtr",
+ "StringToSid",
+ "StringToUTF16",
+ "StringToUTF16Ptr",
+ "Symlink",
+ "Sync",
+ "SyncFileRange",
+ "SysProcAttr",
+ "SysProcIDMap",
+ "Syscall",
+ "Syscall12",
+ "Syscall15",
+ "Syscall18",
+ "Syscall6",
+ "Syscall9",
+ "Sysctl",
+ "SysctlUint32",
+ "Sysctlnode",
+ "Sysinfo",
+ "Sysinfo_t",
+ "Systemtime",
+ "TCGETS",
+ "TCIFLUSH",
+ "TCIOFLUSH",
+ "TCOFLUSH",
+ "TCPInfo",
+ "TCPKeepalive",
+ "TCP_CA_NAME_MAX",
+ "TCP_CONGCTL",
+ "TCP_CONGESTION",
+ "TCP_CONNECTIONTIMEOUT",
+ "TCP_CORK",
+ "TCP_DEFER_ACCEPT",
+ "TCP_INFO",
+ "TCP_KEEPALIVE",
+ "TCP_KEEPCNT",
+ "TCP_KEEPIDLE",
+ "TCP_KEEPINIT",
+ "TCP_KEEPINTVL",
+ "TCP_LINGER2",
+ "TCP_MAXBURST",
+ "TCP_MAXHLEN",
+ "TCP_MAXOLEN",
+ "TCP_MAXSEG",
+ "TCP_MAXWIN",
+ "TCP_MAX_SACK",
+ "TCP_MAX_WINSHIFT",
+ "TCP_MD5SIG",
+ "TCP_MD5SIG_MAXKEYLEN",
+ "TCP_MINMSS",
+ "TCP_MINMSSOVERLOAD",
+ "TCP_MSS",
+ "TCP_NODELAY",
+ "TCP_NOOPT",
+ "TCP_NOPUSH",
+ "TCP_NSTATES",
+ "TCP_QUICKACK",
+ "TCP_RXT_CONNDROPTIME",
+ "TCP_RXT_FINDROP",
+ "TCP_SACK_ENABLE",
+ "TCP_SYNCNT",
+ "TCP_VENDOR",
+ "TCP_WINDOW_CLAMP",
+ "TCSAFLUSH",
+ "TCSETS",
+ "TF_DISCONNECT",
+ "TF_REUSE_SOCKET",
+ "TF_USE_DEFAULT_WORKER",
+ "TF_USE_KERNEL_APC",
+ "TF_USE_SYSTEM_THREAD",
+ "TF_WRITE_BEHIND",
+ "TH32CS_INHERIT",
+ "TH32CS_SNAPALL",
+ "TH32CS_SNAPHEAPLIST",
+ "TH32CS_SNAPMODULE",
+ "TH32CS_SNAPMODULE32",
+ "TH32CS_SNAPPROCESS",
+ "TH32CS_SNAPTHREAD",
+ "TIME_ZONE_ID_DAYLIGHT",
+ "TIME_ZONE_ID_STANDARD",
+ "TIME_ZONE_ID_UNKNOWN",
+ "TIOCCBRK",
+ "TIOCCDTR",
+ "TIOCCONS",
+ "TIOCDCDTIMESTAMP",
+ "TIOCDRAIN",
+ "TIOCDSIMICROCODE",
+ "TIOCEXCL",
+ "TIOCEXT",
+ "TIOCFLAG_CDTRCTS",
+ "TIOCFLAG_CLOCAL",
+ "TIOCFLAG_CRTSCTS",
+ "TIOCFLAG_MDMBUF",
+ "TIOCFLAG_PPS",
+ "TIOCFLAG_SOFTCAR",
+ "TIOCFLUSH",
+ "TIOCGDEV",
+ "TIOCGDRAINWAIT",
+ "TIOCGETA",
+ "TIOCGETD",
+ "TIOCGFLAGS",
+ "TIOCGICOUNT",
+ "TIOCGLCKTRMIOS",
+ "TIOCGLINED",
+ "TIOCGPGRP",
+ "TIOCGPTN",
+ "TIOCGQSIZE",
+ "TIOCGRANTPT",
+ "TIOCGRS485",
+ "TIOCGSERIAL",
+ "TIOCGSID",
+ "TIOCGSIZE",
+ "TIOCGSOFTCAR",
+ "TIOCGTSTAMP",
+ "TIOCGWINSZ",
+ "TIOCINQ",
+ "TIOCIXOFF",
+ "TIOCIXON",
+ "TIOCLINUX",
+ "TIOCMBIC",
+ "TIOCMBIS",
+ "TIOCMGDTRWAIT",
+ "TIOCMGET",
+ "TIOCMIWAIT",
+ "TIOCMODG",
+ "TIOCMODS",
+ "TIOCMSDTRWAIT",
+ "TIOCMSET",
+ "TIOCM_CAR",
+ "TIOCM_CD",
+ "TIOCM_CTS",
+ "TIOCM_DCD",
+ "TIOCM_DSR",
+ "TIOCM_DTR",
+ "TIOCM_LE",
+ "TIOCM_RI",
+ "TIOCM_RNG",
+ "TIOCM_RTS",
+ "TIOCM_SR",
+ "TIOCM_ST",
+ "TIOCNOTTY",
+ "TIOCNXCL",
+ "TIOCOUTQ",
+ "TIOCPKT",
+ "TIOCPKT_DATA",
+ "TIOCPKT_DOSTOP",
+ "TIOCPKT_FLUSHREAD",
+ "TIOCPKT_FLUSHWRITE",
+ "TIOCPKT_IOCTL",
+ "TIOCPKT_NOSTOP",
+ "TIOCPKT_START",
+ "TIOCPKT_STOP",
+ "TIOCPTMASTER",
+ "TIOCPTMGET",
+ "TIOCPTSNAME",
+ "TIOCPTYGNAME",
+ "TIOCPTYGRANT",
+ "TIOCPTYUNLK",
+ "TIOCRCVFRAME",
+ "TIOCREMOTE",
+ "TIOCSBRK",
+ "TIOCSCONS",
+ "TIOCSCTTY",
+ "TIOCSDRAINWAIT",
+ "TIOCSDTR",
+ "TIOCSERCONFIG",
+ "TIOCSERGETLSR",
+ "TIOCSERGETMULTI",
+ "TIOCSERGSTRUCT",
+ "TIOCSERGWILD",
+ "TIOCSERSETMULTI",
+ "TIOCSERSWILD",
+ "TIOCSER_TEMT",
+ "TIOCSETA",
+ "TIOCSETAF",
+ "TIOCSETAW",
+ "TIOCSETD",
+ "TIOCSFLAGS",
+ "TIOCSIG",
+ "TIOCSLCKTRMIOS",
+ "TIOCSLINED",
+ "TIOCSPGRP",
+ "TIOCSPTLCK",
+ "TIOCSQSIZE",
+ "TIOCSRS485",
+ "TIOCSSERIAL",
+ "TIOCSSIZE",
+ "TIOCSSOFTCAR",
+ "TIOCSTART",
+ "TIOCSTAT",
+ "TIOCSTI",
+ "TIOCSTOP",
+ "TIOCSTSTAMP",
+ "TIOCSWINSZ",
+ "TIOCTIMESTAMP",
+ "TIOCUCNTL",
+ "TIOCVHANGUP",
+ "TIOCXMTFRAME",
+ "TOKEN_ADJUST_DEFAULT",
+ "TOKEN_ADJUST_GROUPS",
+ "TOKEN_ADJUST_PRIVILEGES",
+ "TOKEN_ADJUST_SESSIONID",
+ "TOKEN_ALL_ACCESS",
+ "TOKEN_ASSIGN_PRIMARY",
+ "TOKEN_DUPLICATE",
+ "TOKEN_EXECUTE",
+ "TOKEN_IMPERSONATE",
+ "TOKEN_QUERY",
+ "TOKEN_QUERY_SOURCE",
+ "TOKEN_READ",
+ "TOKEN_WRITE",
+ "TOSTOP",
+ "TRUNCATE_EXISTING",
+ "TUNATTACHFILTER",
+ "TUNDETACHFILTER",
+ "TUNGETFEATURES",
+ "TUNGETIFF",
+ "TUNGETSNDBUF",
+ "TUNGETVNETHDRSZ",
+ "TUNSETDEBUG",
+ "TUNSETGROUP",
+ "TUNSETIFF",
+ "TUNSETLINK",
+ "TUNSETNOCSUM",
+ "TUNSETOFFLOAD",
+ "TUNSETOWNER",
+ "TUNSETPERSIST",
+ "TUNSETSNDBUF",
+ "TUNSETTXFILTER",
+ "TUNSETVNETHDRSZ",
+ "Tee",
+ "TerminateProcess",
+ "Termios",
+ "Tgkill",
+ "Time",
+ "Time_t",
+ "Times",
+ "Timespec",
+ "TimespecToNsec",
+ "Timeval",
+ "Timeval32",
+ "TimevalToNsec",
+ "Timex",
+ "Timezoneinformation",
+ "Tms",
+ "Token",
+ "TokenAccessInformation",
+ "TokenAuditPolicy",
+ "TokenDefaultDacl",
+ "TokenElevation",
+ "TokenElevationType",
+ "TokenGroups",
+ "TokenGroupsAndPrivileges",
+ "TokenHasRestrictions",
+ "TokenImpersonationLevel",
+ "TokenIntegrityLevel",
+ "TokenLinkedToken",
+ "TokenLogonSid",
+ "TokenMandatoryPolicy",
+ "TokenOrigin",
+ "TokenOwner",
+ "TokenPrimaryGroup",
+ "TokenPrivileges",
+ "TokenRestrictedSids",
+ "TokenSandBoxInert",
+ "TokenSessionId",
+ "TokenSessionReference",
+ "TokenSource",
+ "TokenStatistics",
+ "TokenType",
+ "TokenUIAccess",
+ "TokenUser",
+ "TokenVirtualizationAllowed",
+ "TokenVirtualizationEnabled",
+ "Tokenprimarygroup",
+ "Tokenuser",
+ "TranslateAccountName",
+ "TranslateName",
+ "TransmitFile",
+ "TransmitFileBuffers",
+ "Truncate",
+ "UNIX_PATH_MAX",
+ "USAGE_MATCH_TYPE_AND",
+ "USAGE_MATCH_TYPE_OR",
+ "UTF16FromString",
+ "UTF16PtrFromString",
+ "UTF16ToString",
+ "Ucred",
+ "Umask",
+ "Uname",
+ "Undelete",
+ "UnixCredentials",
+ "UnixRights",
+ "Unlink",
+ "Unlinkat",
+ "UnmapViewOfFile",
+ "Unmount",
+ "Unsetenv",
+ "Unshare",
+ "UserInfo10",
+ "Ustat",
+ "Ustat_t",
+ "Utimbuf",
+ "Utime",
+ "Utimes",
+ "UtimesNano",
+ "Utsname",
+ "VDISCARD",
+ "VDSUSP",
+ "VEOF",
+ "VEOL",
+ "VEOL2",
+ "VERASE",
+ "VERASE2",
+ "VINTR",
+ "VKILL",
+ "VLNEXT",
+ "VMIN",
+ "VQUIT",
+ "VREPRINT",
+ "VSTART",
+ "VSTATUS",
+ "VSTOP",
+ "VSUSP",
+ "VSWTC",
+ "VT0",
+ "VT1",
+ "VTDLY",
+ "VTIME",
+ "VWERASE",
+ "VirtualLock",
+ "VirtualUnlock",
+ "WAIT_ABANDONED",
+ "WAIT_FAILED",
+ "WAIT_OBJECT_0",
+ "WAIT_TIMEOUT",
+ "WALL",
+ "WALLSIG",
+ "WALTSIG",
+ "WCLONE",
+ "WCONTINUED",
+ "WCOREFLAG",
+ "WEXITED",
+ "WLINUXCLONE",
+ "WNOHANG",
+ "WNOTHREAD",
+ "WNOWAIT",
+ "WNOZOMBIE",
+ "WOPTSCHECKED",
+ "WORDSIZE",
+ "WSABuf",
+ "WSACleanup",
+ "WSADESCRIPTION_LEN",
+ "WSAData",
+ "WSAEACCES",
+ "WSAECONNABORTED",
+ "WSAECONNRESET",
+ "WSAEnumProtocols",
+ "WSAID_CONNECTEX",
+ "WSAIoctl",
+ "WSAPROTOCOL_LEN",
+ "WSAProtocolChain",
+ "WSAProtocolInfo",
+ "WSARecv",
+ "WSARecvFrom",
+ "WSASYS_STATUS_LEN",
+ "WSASend",
+ "WSASendTo",
+ "WSASendto",
+ "WSAStartup",
+ "WSTOPPED",
+ "WTRAPPED",
+ "WUNTRACED",
+ "Wait4",
+ "WaitForSingleObject",
+ "WaitStatus",
+ "Win32FileAttributeData",
+ "Win32finddata",
+ "Write",
+ "WriteConsole",
+ "WriteFile",
+ "X509_ASN_ENCODING",
+ "XCASE",
+ "XP1_CONNECTIONLESS",
+ "XP1_CONNECT_DATA",
+ "XP1_DISCONNECT_DATA",
+ "XP1_EXPEDITED_DATA",
+ "XP1_GRACEFUL_CLOSE",
+ "XP1_GUARANTEED_DELIVERY",
+ "XP1_GUARANTEED_ORDER",
+ "XP1_IFS_HANDLES",
+ "XP1_MESSAGE_ORIENTED",
+ "XP1_MULTIPOINT_CONTROL_PLANE",
+ "XP1_MULTIPOINT_DATA_PLANE",
+ "XP1_PARTIAL_MESSAGE",
+ "XP1_PSEUDO_STREAM",
+ "XP1_QOS_SUPPORTED",
+ "XP1_SAN_SUPPORT_SDP",
+ "XP1_SUPPORT_BROADCAST",
+ "XP1_SUPPORT_MULTIPOINT",
+ "XP1_UNI_RECV",
+ "XP1_UNI_SEND",
+ },
+ "syscall/js": []string{
+ "CopyBytesToGo",
+ "CopyBytesToJS",
+ "Error",
+ "Func",
+ "FuncOf",
+ "Global",
+ "Null",
+ "Type",
+ "TypeBoolean",
+ "TypeFunction",
+ "TypeNull",
+ "TypeNumber",
+ "TypeObject",
+ "TypeString",
+ "TypeSymbol",
+ "TypeUndefined",
+ "Undefined",
+ "Value",
+ "ValueError",
+ "ValueOf",
+ "Wrapper",
+ },
+ "testing": []string{
+ "AllocsPerRun",
+ "B",
+ "Benchmark",
+ "BenchmarkResult",
+ "Cover",
+ "CoverBlock",
+ "CoverMode",
+ "Coverage",
+ "Init",
+ "InternalBenchmark",
+ "InternalExample",
+ "InternalTest",
+ "M",
+ "Main",
+ "MainStart",
+ "PB",
+ "RegisterCover",
+ "RunBenchmarks",
+ "RunExamples",
+ "RunTests",
+ "Short",
+ "T",
+ "TB",
+ "Verbose",
+ },
+ "testing/fstest": []string{
+ "MapFS",
+ "MapFile",
+ "TestFS",
+ },
+ "testing/iotest": []string{
+ "DataErrReader",
+ "ErrReader",
+ "ErrTimeout",
+ "HalfReader",
+ "NewReadLogger",
+ "NewWriteLogger",
+ "OneByteReader",
+ "TestReader",
+ "TimeoutReader",
+ "TruncateWriter",
+ },
+ "testing/quick": []string{
+ "Check",
+ "CheckEqual",
+ "CheckEqualError",
+ "CheckError",
+ "Config",
+ "Generator",
+ "SetupError",
+ "Value",
+ },
+ "text/scanner": []string{
+ "Char",
+ "Comment",
+ "EOF",
+ "Float",
+ "GoTokens",
+ "GoWhitespace",
+ "Ident",
+ "Int",
+ "Position",
+ "RawString",
+ "ScanChars",
+ "ScanComments",
+ "ScanFloats",
+ "ScanIdents",
+ "ScanInts",
+ "ScanRawStrings",
+ "ScanStrings",
+ "Scanner",
+ "SkipComments",
+ "String",
+ "TokenString",
+ },
+ "text/tabwriter": []string{
+ "AlignRight",
+ "Debug",
+ "DiscardEmptyColumns",
+ "Escape",
+ "FilterHTML",
+ "NewWriter",
+ "StripEscape",
+ "TabIndent",
+ "Writer",
+ },
+ "text/template": []string{
+ "ExecError",
+ "FuncMap",
+ "HTMLEscape",
+ "HTMLEscapeString",
+ "HTMLEscaper",
+ "IsTrue",
+ "JSEscape",
+ "JSEscapeString",
+ "JSEscaper",
+ "Must",
+ "New",
+ "ParseFS",
+ "ParseFiles",
+ "ParseGlob",
+ "Template",
+ "URLQueryEscaper",
+ },
+ "text/template/parse": []string{
+ "ActionNode",
+ "BoolNode",
+ "BranchNode",
+ "ChainNode",
+ "CommandNode",
+ "CommentNode",
+ "DotNode",
+ "FieldNode",
+ "IdentifierNode",
+ "IfNode",
+ "IsEmptyTree",
+ "ListNode",
+ "Mode",
+ "New",
+ "NewIdentifier",
+ "NilNode",
+ "Node",
+ "NodeAction",
+ "NodeBool",
+ "NodeChain",
+ "NodeCommand",
+ "NodeComment",
+ "NodeDot",
+ "NodeField",
+ "NodeIdentifier",
+ "NodeIf",
+ "NodeList",
+ "NodeNil",
+ "NodeNumber",
+ "NodePipe",
+ "NodeRange",
+ "NodeString",
+ "NodeTemplate",
+ "NodeText",
+ "NodeType",
+ "NodeVariable",
+ "NodeWith",
+ "NumberNode",
+ "Parse",
+ "ParseComments",
+ "PipeNode",
+ "Pos",
+ "RangeNode",
+ "SkipFuncCheck",
+ "StringNode",
+ "TemplateNode",
+ "TextNode",
+ "Tree",
+ "VariableNode",
+ "WithNode",
+ },
+ "time": []string{
+ "ANSIC",
+ "After",
+ "AfterFunc",
+ "April",
+ "August",
+ "Date",
+ "December",
+ "Duration",
+ "February",
+ "FixedZone",
+ "Friday",
+ "Hour",
+ "January",
+ "July",
+ "June",
+ "Kitchen",
+ "Layout",
+ "LoadLocation",
+ "LoadLocationFromTZData",
+ "Local",
+ "Location",
+ "March",
+ "May",
+ "Microsecond",
+ "Millisecond",
+ "Minute",
+ "Monday",
+ "Month",
+ "Nanosecond",
+ "NewTicker",
+ "NewTimer",
+ "November",
+ "Now",
+ "October",
+ "Parse",
+ "ParseDuration",
+ "ParseError",
+ "ParseInLocation",
+ "RFC1123",
+ "RFC1123Z",
+ "RFC3339",
+ "RFC3339Nano",
+ "RFC822",
+ "RFC822Z",
+ "RFC850",
+ "RubyDate",
+ "Saturday",
+ "Second",
+ "September",
+ "Since",
+ "Sleep",
+ "Stamp",
+ "StampMicro",
+ "StampMilli",
+ "StampNano",
+ "Sunday",
+ "Thursday",
+ "Tick",
+ "Ticker",
+ "Time",
+ "Timer",
+ "Tuesday",
+ "UTC",
+ "Unix",
+ "UnixDate",
+ "UnixMicro",
+ "UnixMilli",
+ "Until",
+ "Wednesday",
+ "Weekday",
+ },
+ "unicode": []string{
+ "ASCII_Hex_Digit",
+ "Adlam",
+ "Ahom",
+ "Anatolian_Hieroglyphs",
+ "Arabic",
+ "Armenian",
+ "Avestan",
+ "AzeriCase",
+ "Balinese",
+ "Bamum",
+ "Bassa_Vah",
+ "Batak",
+ "Bengali",
+ "Bhaiksuki",
+ "Bidi_Control",
+ "Bopomofo",
+ "Brahmi",
+ "Braille",
+ "Buginese",
+ "Buhid",
+ "C",
+ "Canadian_Aboriginal",
+ "Carian",
+ "CaseRange",
+ "CaseRanges",
+ "Categories",
+ "Caucasian_Albanian",
+ "Cc",
+ "Cf",
+ "Chakma",
+ "Cham",
+ "Cherokee",
+ "Chorasmian",
+ "Co",
+ "Common",
+ "Coptic",
+ "Cs",
+ "Cuneiform",
+ "Cypriot",
+ "Cyrillic",
+ "Dash",
+ "Deprecated",
+ "Deseret",
+ "Devanagari",
+ "Diacritic",
+ "Digit",
+ "Dives_Akuru",
+ "Dogra",
+ "Duployan",
+ "Egyptian_Hieroglyphs",
+ "Elbasan",
+ "Elymaic",
+ "Ethiopic",
+ "Extender",
+ "FoldCategory",
+ "FoldScript",
+ "Georgian",
+ "Glagolitic",
+ "Gothic",
+ "Grantha",
+ "GraphicRanges",
+ "Greek",
+ "Gujarati",
+ "Gunjala_Gondi",
+ "Gurmukhi",
+ "Han",
+ "Hangul",
+ "Hanifi_Rohingya",
+ "Hanunoo",
+ "Hatran",
+ "Hebrew",
+ "Hex_Digit",
+ "Hiragana",
+ "Hyphen",
+ "IDS_Binary_Operator",
+ "IDS_Trinary_Operator",
+ "Ideographic",
+ "Imperial_Aramaic",
+ "In",
+ "Inherited",
+ "Inscriptional_Pahlavi",
+ "Inscriptional_Parthian",
+ "Is",
+ "IsControl",
+ "IsDigit",
+ "IsGraphic",
+ "IsLetter",
+ "IsLower",
+ "IsMark",
+ "IsNumber",
+ "IsOneOf",
+ "IsPrint",
+ "IsPunct",
+ "IsSpace",
+ "IsSymbol",
+ "IsTitle",
+ "IsUpper",
+ "Javanese",
+ "Join_Control",
+ "Kaithi",
+ "Kannada",
+ "Katakana",
+ "Kayah_Li",
+ "Kharoshthi",
+ "Khitan_Small_Script",
+ "Khmer",
+ "Khojki",
+ "Khudawadi",
+ "L",
+ "Lao",
+ "Latin",
+ "Lepcha",
+ "Letter",
+ "Limbu",
+ "Linear_A",
+ "Linear_B",
+ "Lisu",
+ "Ll",
+ "Lm",
+ "Lo",
+ "Logical_Order_Exception",
+ "Lower",
+ "LowerCase",
+ "Lt",
+ "Lu",
+ "Lycian",
+ "Lydian",
+ "M",
+ "Mahajani",
+ "Makasar",
+ "Malayalam",
+ "Mandaic",
+ "Manichaean",
+ "Marchen",
+ "Mark",
+ "Masaram_Gondi",
+ "MaxASCII",
+ "MaxCase",
+ "MaxLatin1",
+ "MaxRune",
+ "Mc",
+ "Me",
+ "Medefaidrin",
+ "Meetei_Mayek",
+ "Mende_Kikakui",
+ "Meroitic_Cursive",
+ "Meroitic_Hieroglyphs",
+ "Miao",
+ "Mn",
+ "Modi",
+ "Mongolian",
+ "Mro",
+ "Multani",
+ "Myanmar",
+ "N",
+ "Nabataean",
+ "Nandinagari",
+ "Nd",
+ "New_Tai_Lue",
+ "Newa",
+ "Nko",
+ "Nl",
+ "No",
+ "Noncharacter_Code_Point",
+ "Number",
+ "Nushu",
+ "Nyiakeng_Puachue_Hmong",
+ "Ogham",
+ "Ol_Chiki",
+ "Old_Hungarian",
+ "Old_Italic",
+ "Old_North_Arabian",
+ "Old_Permic",
+ "Old_Persian",
+ "Old_Sogdian",
+ "Old_South_Arabian",
+ "Old_Turkic",
+ "Oriya",
+ "Osage",
+ "Osmanya",
+ "Other",
+ "Other_Alphabetic",
+ "Other_Default_Ignorable_Code_Point",
+ "Other_Grapheme_Extend",
+ "Other_ID_Continue",
+ "Other_ID_Start",
+ "Other_Lowercase",
+ "Other_Math",
+ "Other_Uppercase",
+ "P",
+ "Pahawh_Hmong",
+ "Palmyrene",
+ "Pattern_Syntax",
+ "Pattern_White_Space",
+ "Pau_Cin_Hau",
+ "Pc",
+ "Pd",
+ "Pe",
+ "Pf",
+ "Phags_Pa",
+ "Phoenician",
+ "Pi",
+ "Po",
+ "Prepended_Concatenation_Mark",
+ "PrintRanges",
+ "Properties",
+ "Ps",
+ "Psalter_Pahlavi",
+ "Punct",
+ "Quotation_Mark",
+ "Radical",
+ "Range16",
+ "Range32",
+ "RangeTable",
+ "Regional_Indicator",
+ "Rejang",
+ "ReplacementChar",
+ "Runic",
+ "S",
+ "STerm",
+ "Samaritan",
+ "Saurashtra",
+ "Sc",
+ "Scripts",
+ "Sentence_Terminal",
+ "Sharada",
+ "Shavian",
+ "Siddham",
+ "SignWriting",
+ "SimpleFold",
+ "Sinhala",
+ "Sk",
+ "Sm",
+ "So",
+ "Soft_Dotted",
+ "Sogdian",
+ "Sora_Sompeng",
+ "Soyombo",
+ "Space",
+ "SpecialCase",
+ "Sundanese",
+ "Syloti_Nagri",
+ "Symbol",
+ "Syriac",
+ "Tagalog",
+ "Tagbanwa",
+ "Tai_Le",
+ "Tai_Tham",
+ "Tai_Viet",
+ "Takri",
+ "Tamil",
+ "Tangut",
+ "Telugu",
+ "Terminal_Punctuation",
+ "Thaana",
+ "Thai",
+ "Tibetan",
+ "Tifinagh",
+ "Tirhuta",
+ "Title",
+ "TitleCase",
+ "To",
+ "ToLower",
+ "ToTitle",
+ "ToUpper",
+ "TurkishCase",
+ "Ugaritic",
+ "Unified_Ideograph",
+ "Upper",
+ "UpperCase",
+ "UpperLower",
+ "Vai",
+ "Variation_Selector",
+ "Version",
+ "Wancho",
+ "Warang_Citi",
+ "White_Space",
+ "Yezidi",
+ "Yi",
+ "Z",
+ "Zanabazar_Square",
+ "Zl",
+ "Zp",
+ "Zs",
+ },
+ "unicode/utf16": []string{
+ "Decode",
+ "DecodeRune",
+ "Encode",
+ "EncodeRune",
+ "IsSurrogate",
+ },
+ "unicode/utf8": []string{
+ "DecodeLastRune",
+ "DecodeLastRuneInString",
+ "DecodeRune",
+ "DecodeRuneInString",
+ "EncodeRune",
+ "FullRune",
+ "FullRuneInString",
+ "MaxRune",
+ "RuneCount",
+ "RuneCountInString",
+ "RuneError",
+ "RuneLen",
+ "RuneSelf",
+ "RuneStart",
+ "UTFMax",
+ "Valid",
+ "ValidRune",
+ "ValidString",
+ },
+ "unsafe": []string{
+ "Alignof",
+ "ArbitraryType",
+ "Offsetof",
+ "Pointer",
+ "Sizeof",
+ },
+}
diff --git a/vendor/golang.org/x/xerrors/LICENSE b/vendor/golang.org/x/xerrors/LICENSE
new file mode 100644
index 000000000..e4a47e17f
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2019 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/xerrors/PATENTS b/vendor/golang.org/x/xerrors/PATENTS
new file mode 100644
index 000000000..733099041
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/xerrors/README b/vendor/golang.org/x/xerrors/README
new file mode 100644
index 000000000..aac7867a5
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/README
@@ -0,0 +1,2 @@
+This repository holds the transition packages for the new Go 1.13 error values.
+See golang.org/design/29934-error-values.
diff --git a/vendor/golang.org/x/xerrors/adaptor.go b/vendor/golang.org/x/xerrors/adaptor.go
new file mode 100644
index 000000000..4317f2483
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/adaptor.go
@@ -0,0 +1,193 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "reflect"
+ "strconv"
+)
+
+// FormatError calls the FormatError method of f with an errors.Printer
+// configured according to s and verb, and writes the result to s.
+func FormatError(f Formatter, s fmt.State, verb rune) {
+ // Assuming this function is only called from the Format method, and given
+ // that FormatError takes precedence over Format, it cannot be called from
+ // any package that supports errors.Formatter. It is therefore safe to
+ // disregard that State may be a specific printer implementation and use one
+ // of our choice instead.
+
+ // limitations: does not support printing error as Go struct.
+
+ var (
+ sep = " " // separator before next error
+ p = &state{State: s}
+ direct = true
+ )
+
+ var err error = f
+
+ switch verb {
+ // Note that this switch must match the preference order
+ // for ordinary string printing (%#v before %+v, and so on).
+
+ case 'v':
+ if s.Flag('#') {
+ if stringer, ok := err.(fmt.GoStringer); ok {
+ io.WriteString(&p.buf, stringer.GoString())
+ goto exit
+ }
+ // proceed as if it were %v
+ } else if s.Flag('+') {
+ p.printDetail = true
+ sep = "\n - "
+ }
+ case 's':
+ case 'q', 'x', 'X':
+ // Use an intermediate buffer in the rare cases that precision,
+ // truncation, or one of the alternative verbs (q, x, and X) are
+ // specified.
+ direct = false
+
+ default:
+ p.buf.WriteString("%!")
+ p.buf.WriteRune(verb)
+ p.buf.WriteByte('(')
+ switch {
+ case err != nil:
+ p.buf.WriteString(reflect.TypeOf(f).String())
+ default:
+ p.buf.WriteString("<nil>")
+ }
+ p.buf.WriteByte(')')
+ io.Copy(s, &p.buf)
+ return
+ }
+
+loop:
+ for {
+ switch v := err.(type) {
+ case Formatter:
+ err = v.FormatError((*printer)(p))
+ case fmt.Formatter:
+ v.Format(p, 'v')
+ break loop
+ default:
+ io.WriteString(&p.buf, v.Error())
+ break loop
+ }
+ if err == nil {
+ break
+ }
+ if p.needColon || !p.printDetail {
+ p.buf.WriteByte(':')
+ p.needColon = false
+ }
+ p.buf.WriteString(sep)
+ p.inDetail = false
+ p.needNewline = false
+ }
+
+exit:
+ width, okW := s.Width()
+ prec, okP := s.Precision()
+
+ if !direct || (okW && width > 0) || okP {
+ // Construct format string from State s.
+ format := []byte{'%'}
+ if s.Flag('-') {
+ format = append(format, '-')
+ }
+ if s.Flag('+') {
+ format = append(format, '+')
+ }
+ if s.Flag(' ') {
+ format = append(format, ' ')
+ }
+ if okW {
+ format = strconv.AppendInt(format, int64(width), 10)
+ }
+ if okP {
+ format = append(format, '.')
+ format = strconv.AppendInt(format, int64(prec), 10)
+ }
+ format = append(format, string(verb)...)
+ fmt.Fprintf(s, string(format), p.buf.String())
+ } else {
+ io.Copy(s, &p.buf)
+ }
+}
+
+var detailSep = []byte("\n ")
+
+// state tracks error printing state. It implements fmt.State.
+type state struct {
+ fmt.State
+ buf bytes.Buffer
+
+ printDetail bool
+ inDetail bool
+ needColon bool
+ needNewline bool
+}
+
+func (s *state) Write(b []byte) (n int, err error) {
+ if s.printDetail {
+ if len(b) == 0 {
+ return 0, nil
+ }
+ if s.inDetail && s.needColon {
+ s.needNewline = true
+ if b[0] == '\n' {
+ b = b[1:]
+ }
+ }
+ k := 0
+ for i, c := range b {
+ if s.needNewline {
+ if s.inDetail && s.needColon {
+ s.buf.WriteByte(':')
+ s.needColon = false
+ }
+ s.buf.Write(detailSep)
+ s.needNewline = false
+ }
+ if c == '\n' {
+ s.buf.Write(b[k:i])
+ k = i + 1
+ s.needNewline = true
+ }
+ }
+ s.buf.Write(b[k:])
+ if !s.inDetail {
+ s.needColon = true
+ }
+ } else if !s.inDetail {
+ s.buf.Write(b)
+ }
+ return len(b), nil
+}
+
+// printer wraps a state to implement an xerrors.Printer.
+type printer state
+
+func (s *printer) Print(args ...interface{}) {
+ if !s.inDetail || s.printDetail {
+ fmt.Fprint((*state)(s), args...)
+ }
+}
+
+func (s *printer) Printf(format string, args ...interface{}) {
+ if !s.inDetail || s.printDetail {
+ fmt.Fprintf((*state)(s), format, args...)
+ }
+}
+
+func (s *printer) Detail() bool {
+ s.inDetail = true
+ return s.printDetail
+}
diff --git a/vendor/golang.org/x/xerrors/codereview.cfg b/vendor/golang.org/x/xerrors/codereview.cfg
new file mode 100644
index 000000000..3f8b14b64
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/codereview.cfg
@@ -0,0 +1 @@
+issuerepo: golang/go
diff --git a/vendor/golang.org/x/xerrors/doc.go b/vendor/golang.org/x/xerrors/doc.go
new file mode 100644
index 000000000..eef99d9d5
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/doc.go
@@ -0,0 +1,22 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package xerrors implements functions to manipulate errors.
+//
+// This package is based on the Go 2 proposal for error values:
+// https://golang.org/design/29934-error-values
+//
+// These functions were incorporated into the standard library's errors package
+// in Go 1.13:
+// - Is
+// - As
+// - Unwrap
+//
+// Also, Errorf's %w verb was incorporated into fmt.Errorf.
+//
+// Use this package to get equivalent behavior in all supported Go versions.
+//
+// No other features of this package were included in Go 1.13, and at present
+// there are no plans to include any of them.
+package xerrors // import "golang.org/x/xerrors"
diff --git a/vendor/golang.org/x/xerrors/errors.go b/vendor/golang.org/x/xerrors/errors.go
new file mode 100644
index 000000000..e88d3772d
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/errors.go
@@ -0,0 +1,33 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import "fmt"
+
+// errorString is a trivial implementation of error.
+type errorString struct {
+ s string
+ frame Frame
+}
+
+// New returns an error that formats as the given text.
+//
+// The returned error contains a Frame set to the caller's location and
+// implements Formatter to show this information when printed with details.
+func New(text string) error {
+ return &errorString{text, Caller(1)}
+}
+
+func (e *errorString) Error() string {
+ return e.s
+}
+
+func (e *errorString) Format(s fmt.State, v rune) { FormatError(e, s, v) }
+
+func (e *errorString) FormatError(p Printer) (next error) {
+ p.Print(e.s)
+ e.frame.Format(p)
+ return nil
+}
diff --git a/vendor/golang.org/x/xerrors/fmt.go b/vendor/golang.org/x/xerrors/fmt.go
new file mode 100644
index 000000000..829862ddf
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/fmt.go
@@ -0,0 +1,187 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "fmt"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/xerrors/internal"
+)
+
+const percentBangString = "%!"
+
+// Errorf formats according to a format specifier and returns the string as a
+// value that satisfies error.
+//
+// The returned error includes the file and line number of the caller when
+// formatted with additional detail enabled. If the last argument is an error
+// the returned error's Format method will return it if the format string ends
+// with ": %s", ": %v", or ": %w". If the last argument is an error and the
+// format string ends with ": %w", the returned error implements an Unwrap
+// method returning it.
+//
+// If the format specifier includes a %w verb with an error operand in a
+// position other than at the end, the returned error will still implement an
+// Unwrap method returning the operand, but the error's Format method will not
+// return the wrapped error.
+//
+// It is invalid to include more than one %w verb or to supply it with an
+// operand that does not implement the error interface. The %w verb is otherwise
+// a synonym for %v.
+func Errorf(format string, a ...interface{}) error {
+ format = formatPlusW(format)
+ // Support a ": %[wsv]" suffix, which works well with xerrors.Formatter.
+ wrap := strings.HasSuffix(format, ": %w")
+ idx, format2, ok := parsePercentW(format)
+ percentWElsewhere := !wrap && idx >= 0
+ if !percentWElsewhere && (wrap || strings.HasSuffix(format, ": %s") || strings.HasSuffix(format, ": %v")) {
+ err := errorAt(a, len(a)-1)
+ if err == nil {
+ return &noWrapError{fmt.Sprintf(format, a...), nil, Caller(1)}
+ }
+ // TODO: this is not entirely correct. The error value could be
+ // printed elsewhere in format if it mixes numbered with unnumbered
+ // substitutions. With relatively small changes to doPrintf we can
+ // have it optionally ignore extra arguments and pass the argument
+ // list in its entirety.
+ msg := fmt.Sprintf(format[:len(format)-len(": %s")], a[:len(a)-1]...)
+ frame := Frame{}
+ if internal.EnableTrace {
+ frame = Caller(1)
+ }
+ if wrap {
+ return &wrapError{msg, err, frame}
+ }
+ return &noWrapError{msg, err, frame}
+ }
+ // Support %w anywhere.
+ // TODO: don't repeat the wrapped error's message when %w occurs in the middle.
+ msg := fmt.Sprintf(format2, a...)
+ if idx < 0 {
+ return &noWrapError{msg, nil, Caller(1)}
+ }
+ err := errorAt(a, idx)
+ if !ok || err == nil {
+ // Too many %ws or argument of %w is not an error. Approximate the Go
+ // 1.13 fmt.Errorf message.
+ return &noWrapError{fmt.Sprintf("%sw(%s)", percentBangString, msg), nil, Caller(1)}
+ }
+ frame := Frame{}
+ if internal.EnableTrace {
+ frame = Caller(1)
+ }
+ return &wrapError{msg, err, frame}
+}
+
+func errorAt(args []interface{}, i int) error {
+ if i < 0 || i >= len(args) {
+ return nil
+ }
+ err, ok := args[i].(error)
+ if !ok {
+ return nil
+ }
+ return err
+}
+
+// formatPlusW is used to avoid the vet check that will barf at %w.
+func formatPlusW(s string) string {
+ return s
+}
+
+// Return the index of the only %w in format, or -1 if none.
+// Also return a rewritten format string with %w replaced by %v, and
+// false if there is more than one %w.
+// TODO: handle "%[N]w".
+func parsePercentW(format string) (idx int, newFormat string, ok bool) {
+ // Loosely copied from golang.org/x/tools/go/analysis/passes/printf/printf.go.
+ idx = -1
+ ok = true
+ n := 0
+ sz := 0
+ var isW bool
+ for i := 0; i < len(format); i += sz {
+ if format[i] != '%' {
+ sz = 1
+ continue
+ }
+ // "%%" is not a format directive.
+ if i+1 < len(format) && format[i+1] == '%' {
+ sz = 2
+ continue
+ }
+ sz, isW = parsePrintfVerb(format[i:])
+ if isW {
+ if idx >= 0 {
+ ok = false
+ } else {
+ idx = n
+ }
+ // "Replace" the last character, the 'w', with a 'v'.
+ p := i + sz - 1
+ format = format[:p] + "v" + format[p+1:]
+ }
+ n++
+ }
+ return idx, format, ok
+}
+
+// Parse the printf verb starting with a % at s[0].
+// Return how many bytes it occupies and whether the verb is 'w'.
+func parsePrintfVerb(s string) (int, bool) {
+ // Assume only that the directive is a sequence of non-letters followed by a single letter.
+ sz := 0
+ var r rune
+ for i := 1; i < len(s); i += sz {
+ r, sz = utf8.DecodeRuneInString(s[i:])
+ if unicode.IsLetter(r) {
+ return i + sz, r == 'w'
+ }
+ }
+ return len(s), false
+}
+
+type noWrapError struct {
+ msg string
+ err error
+ frame Frame
+}
+
+func (e *noWrapError) Error() string {
+ return fmt.Sprint(e)
+}
+
+func (e *noWrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) }
+
+func (e *noWrapError) FormatError(p Printer) (next error) {
+ p.Print(e.msg)
+ e.frame.Format(p)
+ return e.err
+}
+
+type wrapError struct {
+ msg string
+ err error
+ frame Frame
+}
+
+func (e *wrapError) Error() string {
+ return fmt.Sprint(e)
+}
+
+func (e *wrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) }
+
+func (e *wrapError) FormatError(p Printer) (next error) {
+ p.Print(e.msg)
+ e.frame.Format(p)
+ return e.err
+}
+
+func (e *wrapError) Unwrap() error {
+ return e.err
+}
diff --git a/vendor/golang.org/x/xerrors/format.go b/vendor/golang.org/x/xerrors/format.go
new file mode 100644
index 000000000..1bc9c26b9
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/format.go
@@ -0,0 +1,34 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+// A Formatter formats error messages.
+type Formatter interface {
+ error
+
+ // FormatError prints the receiver's first error and returns the next error in
+ // the error chain, if any.
+ FormatError(p Printer) (next error)
+}
+
+// A Printer formats error messages.
+//
+// The most common implementation of Printer is the one provided by package fmt
+// during Printf (as of Go 1.13). Localization packages such as golang.org/x/text/message
+// typically provide their own implementations.
+type Printer interface {
+ // Print appends args to the message output.
+ Print(args ...interface{})
+
+ // Printf writes a formatted string.
+ Printf(format string, args ...interface{})
+
+ // Detail reports whether error detail is requested.
+ // After the first call to Detail, all text written to the Printer
+ // is formatted as additional detail, or ignored when
+ // detail has not been requested.
+ // If Detail returns false, the caller can avoid printing the detail at all.
+ Detail() bool
+}
diff --git a/vendor/golang.org/x/xerrors/frame.go b/vendor/golang.org/x/xerrors/frame.go
new file mode 100644
index 000000000..0de628ec5
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/frame.go
@@ -0,0 +1,56 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "runtime"
+)
+
+// A Frame contains part of a call stack.
+type Frame struct {
+ // Make room for three PCs: the one we were asked for, what it called,
+ // and possibly a PC for skipPleaseUseCallersFrames. See:
+ // https://go.googlesource.com/go/+/032678e0fb/src/runtime/extern.go#169
+ frames [3]uintptr
+}
+
+// Caller returns a Frame that describes a frame on the caller's stack.
+// The argument skip is the number of frames to skip over.
+// Caller(0) returns the frame for the caller of Caller.
+func Caller(skip int) Frame {
+ var s Frame
+ runtime.Callers(skip+1, s.frames[:])
+ return s
+}
+
+// location reports the file, line, and function of a frame.
+//
+// The returned function may be "" even if file and line are not.
+func (f Frame) location() (function, file string, line int) {
+ frames := runtime.CallersFrames(f.frames[:])
+ if _, ok := frames.Next(); !ok {
+ return "", "", 0
+ }
+ fr, ok := frames.Next()
+ if !ok {
+ return "", "", 0
+ }
+ return fr.Function, fr.File, fr.Line
+}
+
+// Format prints the stack as error detail.
+// It should be called from an error's Format implementation
+// after printing any other error detail.
+func (f Frame) Format(p Printer) {
+ if p.Detail() {
+ function, file, line := f.location()
+ if function != "" {
+ p.Printf("%s\n ", function)
+ }
+ if file != "" {
+ p.Printf("%s:%d\n", file, line)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/xerrors/go.mod b/vendor/golang.org/x/xerrors/go.mod
new file mode 100644
index 000000000..870d4f612
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/go.mod
@@ -0,0 +1,3 @@
+module golang.org/x/xerrors
+
+go 1.11
diff --git a/vendor/golang.org/x/xerrors/internal/internal.go b/vendor/golang.org/x/xerrors/internal/internal.go
new file mode 100644
index 000000000..89f4eca5d
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/internal/internal.go
@@ -0,0 +1,8 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package internal
+
+// EnableTrace indicates whether stack information should be recorded in errors.
+var EnableTrace = true
diff --git a/vendor/golang.org/x/xerrors/wrap.go b/vendor/golang.org/x/xerrors/wrap.go
new file mode 100644
index 000000000..9a3b51037
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/wrap.go
@@ -0,0 +1,106 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "reflect"
+)
+
+// A Wrapper provides context around another error.
+type Wrapper interface {
+ // Unwrap returns the next error in the error chain.
+ // If there is no next error, Unwrap returns nil.
+ Unwrap() error
+}
+
+// Opaque returns an error with the same error formatting as err
+// but that does not match err and cannot be unwrapped.
+func Opaque(err error) error {
+ return noWrapper{err}
+}
+
+type noWrapper struct {
+ error
+}
+
+func (e noWrapper) FormatError(p Printer) (next error) {
+ if f, ok := e.error.(Formatter); ok {
+ return f.FormatError(p)
+ }
+ p.Print(e.error)
+ return nil
+}
+
+// Unwrap returns the result of calling the Unwrap method on err, if err implements
+// Unwrap. Otherwise, Unwrap returns nil.
+func Unwrap(err error) error {
+ u, ok := err.(Wrapper)
+ if !ok {
+ return nil
+ }
+ return u.Unwrap()
+}
+
+// Is reports whether any error in err's chain matches target.
+//
+// An error is considered to match a target if it is equal to that target or if
+// it implements a method Is(error) bool such that Is(target) returns true.
+func Is(err, target error) bool {
+ if target == nil {
+ return err == target
+ }
+
+ isComparable := reflect.TypeOf(target).Comparable()
+ for {
+ if isComparable && err == target {
+ return true
+ }
+ if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) {
+ return true
+ }
+ // TODO: consider supporing target.Is(err). This would allow
+ // user-definable predicates, but also may allow for coping with sloppy
+ // APIs, thereby making it easier to get away with them.
+ if err = Unwrap(err); err == nil {
+ return false
+ }
+ }
+}
+
+// As finds the first error in err's chain that matches the type to which target
+// points, and if so, sets the target to its value and returns true. An error
+// matches a type if it is assignable to the target type, or if it has a method
+// As(interface{}) bool such that As(target) returns true. As will panic if target
+// is not a non-nil pointer to a type which implements error or is of interface type.
+//
+// The As method should set the target to its value and return true if err
+// matches the type to which target points.
+func As(err error, target interface{}) bool {
+ if target == nil {
+ panic("errors: target cannot be nil")
+ }
+ val := reflect.ValueOf(target)
+ typ := val.Type()
+ if typ.Kind() != reflect.Ptr || val.IsNil() {
+ panic("errors: target must be a non-nil pointer")
+ }
+ if e := typ.Elem(); e.Kind() != reflect.Interface && !e.Implements(errorType) {
+ panic("errors: *target must be interface or implement error")
+ }
+ targetType := typ.Elem()
+ for err != nil {
+ if reflect.TypeOf(err).AssignableTo(targetType) {
+ val.Elem().Set(reflect.ValueOf(err))
+ return true
+ }
+ if x, ok := err.(interface{ As(interface{}) bool }); ok && x.As(target) {
+ return true
+ }
+ err = Unwrap(err)
+ }
+ return false
+}
+
+var errorType = reflect.TypeOf((*error)(nil)).Elem()
diff --git a/vendor/modules.txt b/vendor/modules.txt
index c2c17a6b9..e4e67fceb 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -299,6 +299,10 @@ github.com/coreos/stream-metadata-go/release
github.com/coreos/stream-metadata-go/release/rhcos
github.com/coreos/stream-metadata-go/stream
github.com/coreos/stream-metadata-go/stream/rhcos
+# github.com/cpuguy83/go-md2man/v2 v2.0.2
+## explicit
+github.com/cpuguy83/go-md2man/v2
+github.com/cpuguy83/go-md2man/v2/md2man
# github.com/cyphar/filepath-securejoin v0.2.3
## explicit
github.com/cyphar/filepath-securejoin
@@ -439,6 +443,8 @@ github.com/hashicorp/errwrap
# github.com/hashicorp/go-multierror v1.1.1
## explicit
github.com/hashicorp/go-multierror
+# github.com/hashicorp/go-version v1.2.1
+github.com/hashicorp/go-version
# github.com/imdario/mergo v0.3.12
github.com/imdario/mergo
# github.com/inconshreveable/mousetrap v1.0.0
@@ -626,6 +632,8 @@ github.com/rootless-containers/rootlesskit/pkg/port/builtin/parent/tcp
github.com/rootless-containers/rootlesskit/pkg/port/builtin/parent/udp
github.com/rootless-containers/rootlesskit/pkg/port/builtin/parent/udp/udpproxy
github.com/rootless-containers/rootlesskit/pkg/port/portutil
+# github.com/russross/blackfriday/v2 v2.1.0
+github.com/russross/blackfriday/v2
# github.com/seccomp/libseccomp-golang v0.9.2-0.20210429002308-3879420cc921
github.com/seccomp/libseccomp-golang
# github.com/sirupsen/logrus v1.8.1
@@ -665,6 +673,15 @@ github.com/ulikunitz/xz
github.com/ulikunitz/xz/internal/hash
github.com/ulikunitz/xz/internal/xlog
github.com/ulikunitz/xz/lzma
+# github.com/vbatts/git-validation v1.1.0
+## explicit
+github.com/vbatts/git-validation
+github.com/vbatts/git-validation/git
+github.com/vbatts/git-validation/rules/danglingwhitespace
+github.com/vbatts/git-validation/rules/dco
+github.com/vbatts/git-validation/rules/messageregexp
+github.com/vbatts/git-validation/rules/shortsubject
+github.com/vbatts/git-validation/validate
# github.com/vbatts/tar-split v0.11.2
github.com/vbatts/tar-split/archive/tar
github.com/vbatts/tar-split/tar/asm
@@ -719,6 +736,10 @@ golang.org/x/crypto/ssh
golang.org/x/crypto/ssh/agent
golang.org/x/crypto/ssh/internal/bcrypt_pbkdf
golang.org/x/crypto/ssh/knownhosts
+# golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3
+golang.org/x/mod/internal/lazyregexp
+golang.org/x/mod/module
+golang.org/x/mod/semver
# golang.org/x/net v0.0.0-20220225172249-27dd8689420f
golang.org/x/net/context
golang.org/x/net/html
@@ -769,8 +790,22 @@ golang.org/x/text/transform
golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm
# golang.org/x/tools v0.1.10
+## explicit
+golang.org/x/tools/cmd/goimports
+golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/ast/inspector
+golang.org/x/tools/internal/event
+golang.org/x/tools/internal/event/core
+golang.org/x/tools/internal/event/keys
+golang.org/x/tools/internal/event/label
+golang.org/x/tools/internal/fastwalk
+golang.org/x/tools/internal/gocommand
+golang.org/x/tools/internal/gopathwalk
+golang.org/x/tools/internal/imports
golang.org/x/tools/internal/typeparams
+# golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
+golang.org/x/xerrors
+golang.org/x/xerrors/internal
# google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8
google.golang.org/genproto/googleapis/rpc/status
# google.golang.org/grpc v1.44.0