summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.drone.yml3
-rw-r--r--docs/api/swagger.yaml846
-rw-r--r--go.mod33
-rw-r--r--go.sum182
-rw-r--r--internal/api/activitypub/users/common.go2
-rw-r--r--internal/api/activitypub/users/featured.go8
-rw-r--r--internal/api/client/accounts/accountupdate.go65
-rw-r--r--internal/api/client/admin/domainkeysexpire.go5
-rw-r--r--internal/api/client/admin/emojicategoriesget.go10
-rw-r--r--internal/api/client/admin/emojicreate.go3
-rw-r--r--internal/api/client/admin/emojiupdate.go7
-rw-r--r--internal/api/client/admin/reportresolve.go3
-rw-r--r--internal/api/client/admin/rulecreate.go9
-rw-r--r--internal/api/client/admin/ruledelete.go6
-rw-r--r--internal/api/client/admin/rulesget.go2
-rw-r--r--internal/api/client/admin/ruleupdate.go18
-rw-r--r--internal/api/client/filters/v1/filterpost.go39
-rw-r--r--internal/api/client/filters/v1/filterput.go39
-rw-r--r--internal/api/client/instance/instancepatch.go10
-rw-r--r--internal/api/client/lists/listupdate.go11
-rw-r--r--internal/api/client/notifications/notificationget.go8
-rw-r--r--internal/api/client/statuses/statuscreate.go139
-rw-r--r--internal/api/client/statuses/statuscreate_test.go70
-rw-r--r--internal/api/client/timelines/tag.go8
-rw-r--r--internal/api/model/account.go4
-rw-r--r--internal/api/model/announcement.go2
-rw-r--r--internal/api/model/announcementreaction.go2
-rw-r--r--internal/api/model/domain.go6
-rw-r--r--internal/api/model/emoji.go4
-rw-r--r--internal/api/model/headerfilter.go8
-rw-r--r--internal/api/model/instancev1.go2
-rw-r--r--internal/api/model/list.go15
-rw-r--r--internal/api/model/marker.go2
-rw-r--r--internal/api/model/multistatus.go6
-rw-r--r--internal/api/model/poll.go10
-rw-r--r--internal/api/model/report.go12
-rw-r--r--internal/api/model/rule.go19
-rw-r--r--internal/api/model/status.go20
-rwxr-xr-xtest/swagger.sh30
-rw-r--r--tools/tools.go28
-rw-r--r--vendor/github.com/Masterminds/goutils/.travis.yml18
-rw-r--r--vendor/github.com/Masterminds/goutils/CHANGELOG.md8
-rw-r--r--vendor/github.com/Masterminds/goutils/LICENSE.txt202
-rw-r--r--vendor/github.com/Masterminds/goutils/README.md70
-rw-r--r--vendor/github.com/Masterminds/goutils/appveyor.yml21
-rw-r--r--vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go230
-rw-r--r--vendor/github.com/Masterminds/goutils/randomstringutils.go248
-rw-r--r--vendor/github.com/Masterminds/goutils/stringutils.go240
-rw-r--r--vendor/github.com/Masterminds/goutils/wordutils.go357
-rw-r--r--vendor/github.com/Masterminds/semver/v3/.gitignore1
-rw-r--r--vendor/github.com/Masterminds/semver/v3/.golangci.yml30
-rw-r--r--vendor/github.com/Masterminds/semver/v3/CHANGELOG.md214
-rw-r--r--vendor/github.com/Masterminds/semver/v3/LICENSE.txt19
-rw-r--r--vendor/github.com/Masterminds/semver/v3/Makefile37
-rw-r--r--vendor/github.com/Masterminds/semver/v3/README.md244
-rw-r--r--vendor/github.com/Masterminds/semver/v3/collection.go24
-rw-r--r--vendor/github.com/Masterminds/semver/v3/constraints.go594
-rw-r--r--vendor/github.com/Masterminds/semver/v3/doc.go184
-rw-r--r--vendor/github.com/Masterminds/semver/v3/fuzz.go22
-rw-r--r--vendor/github.com/Masterminds/semver/v3/version.go639
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/.gitignore2
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md383
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/LICENSE.txt19
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/Makefile9
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/README.md100
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/crypto.go653
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/date.go152
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/defaults.go163
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/dict.go174
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/doc.go19
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/functions.go382
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/list.go464
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/network.go12
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/numeric.go186
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/reflect.go28
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/regex.go83
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/semver.go23
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/strings.go236
-rw-r--r--vendor/github.com/Masterminds/sprig/v3/url.go66
-rw-r--r--vendor/github.com/asaskevich/govalidator/.gitignore15
-rw-r--r--vendor/github.com/asaskevich/govalidator/.travis.yml12
-rw-r--r--vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md43
-rw-r--r--vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md63
-rw-r--r--vendor/github.com/asaskevich/govalidator/LICENSE21
-rw-r--r--vendor/github.com/asaskevich/govalidator/README.md622
-rw-r--r--vendor/github.com/asaskevich/govalidator/arrays.go87
-rw-r--r--vendor/github.com/asaskevich/govalidator/converter.go81
-rw-r--r--vendor/github.com/asaskevich/govalidator/doc.go3
-rw-r--r--vendor/github.com/asaskevich/govalidator/error.go47
-rw-r--r--vendor/github.com/asaskevich/govalidator/numerics.go100
-rw-r--r--vendor/github.com/asaskevich/govalidator/patterns.go113
-rw-r--r--vendor/github.com/asaskevich/govalidator/types.go656
-rw-r--r--vendor/github.com/asaskevich/govalidator/utils.go270
-rw-r--r--vendor/github.com/asaskevich/govalidator/validator.go1768
-rw-r--r--vendor/github.com/asaskevich/govalidator/wercker.yml15
-rw-r--r--vendor/github.com/docker/go-units/size.go70
-rw-r--r--vendor/github.com/felixge/httpsnoop/.gitignore0
-rw-r--r--vendor/github.com/felixge/httpsnoop/.travis.yml6
-rw-r--r--vendor/github.com/felixge/httpsnoop/LICENSE.txt19
-rw-r--r--vendor/github.com/felixge/httpsnoop/Makefile10
-rw-r--r--vendor/github.com/felixge/httpsnoop/README.md95
-rw-r--r--vendor/github.com/felixge/httpsnoop/capture_metrics.go86
-rw-r--r--vendor/github.com/felixge/httpsnoop/docs.go10
-rw-r--r--vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go436
-rw-r--r--vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go278
-rw-r--r--vendor/github.com/go-openapi/analysis/.codecov.yml5
-rw-r--r--vendor/github.com/go-openapi/analysis/.gitattributes2
-rw-r--r--vendor/github.com/go-openapi/analysis/.gitignore5
-rw-r--r--vendor/github.com/go-openapi/analysis/.golangci.yml56
-rw-r--r--vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/analysis/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/analysis/README.md31
-rw-r--r--vendor/github.com/go-openapi/analysis/analyzer.go1064
-rw-r--r--vendor/github.com/go-openapi/analysis/appveyor.yml32
-rw-r--r--vendor/github.com/go-openapi/analysis/debug.go23
-rw-r--r--vendor/github.com/go-openapi/analysis/doc.go43
-rw-r--r--vendor/github.com/go-openapi/analysis/fixer.go79
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten.go802
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten_name.go293
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten_options.go78
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/debug/debug.go41
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go87
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go90
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go434
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go29
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go201
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go141
-rw-r--r--vendor/github.com/go-openapi/analysis/mixin.go515
-rw-r--r--vendor/github.com/go-openapi/analysis/schema.go256
-rw-r--r--vendor/github.com/go-openapi/errors/.gitattributes1
-rw-r--r--vendor/github.com/go-openapi/errors/.gitignore2
-rw-r--r--vendor/github.com/go-openapi/errors/.golangci.yml48
-rw-r--r--vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/errors/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/errors/README.md11
-rw-r--r--vendor/github.com/go-openapi/errors/api.go182
-rw-r--r--vendor/github.com/go-openapi/errors/auth.go22
-rw-r--r--vendor/github.com/go-openapi/errors/doc.go26
-rw-r--r--vendor/github.com/go-openapi/errors/headers.go103
-rw-r--r--vendor/github.com/go-openapi/errors/middleware.go50
-rw-r--r--vendor/github.com/go-openapi/errors/parsing.go78
-rw-r--r--vendor/github.com/go-openapi/errors/schema.go611
-rw-r--r--vendor/github.com/go-openapi/inflect/.hgignore1
-rw-r--r--vendor/github.com/go-openapi/inflect/LICENCE7
-rw-r--r--vendor/github.com/go-openapi/inflect/README168
-rw-r--r--vendor/github.com/go-openapi/inflect/inflect.go713
-rw-r--r--vendor/github.com/go-openapi/jsonpointer/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/jsonpointer/.gitignore1
-rw-r--r--vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/jsonpointer/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/jsonpointer/README.md15
-rw-r--r--vendor/github.com/go-openapi/jsonpointer/pointer.go390
-rw-r--r--vendor/github.com/go-openapi/jsonreference/.gitignore1
-rw-r--r--vendor/github.com/go-openapi/jsonreference/.golangci.yml50
-rw-r--r--vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/jsonreference/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/jsonreference/README.md15
-rw-r--r--vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go69
-rw-r--r--vendor/github.com/go-openapi/jsonreference/reference.go158
-rw-r--r--vendor/github.com/go-openapi/loads/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/loads/.gitignore4
-rw-r--r--vendor/github.com/go-openapi/loads/.golangci.yml44
-rw-r--r--vendor/github.com/go-openapi/loads/.travis.yml25
-rw-r--r--vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/loads/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/loads/README.md6
-rw-r--r--vendor/github.com/go-openapi/loads/doc.go21
-rw-r--r--vendor/github.com/go-openapi/loads/fmts/yaml.go30
-rw-r--r--vendor/github.com/go-openapi/loads/loaders.go134
-rw-r--r--vendor/github.com/go-openapi/loads/options.go61
-rw-r--r--vendor/github.com/go-openapi/loads/spec.go266
-rw-r--r--vendor/github.com/go-openapi/runtime/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/runtime/.gitattributes1
-rw-r--r--vendor/github.com/go-openapi/runtime/.gitignore5
-rw-r--r--vendor/github.com/go-openapi/runtime/.golangci.yml44
-rw-r--r--vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/runtime/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/runtime/README.md7
-rw-r--r--vendor/github.com/go-openapi/runtime/bytestream.go169
-rw-r--r--vendor/github.com/go-openapi/runtime/client_auth_info.go30
-rw-r--r--vendor/github.com/go-openapi/runtime/client_operation.go41
-rw-r--r--vendor/github.com/go-openapi/runtime/client_request.go152
-rw-r--r--vendor/github.com/go-openapi/runtime/client_response.go110
-rw-r--r--vendor/github.com/go-openapi/runtime/constants.go49
-rw-r--r--vendor/github.com/go-openapi/runtime/csv.go77
-rw-r--r--vendor/github.com/go-openapi/runtime/discard.go9
-rw-r--r--vendor/github.com/go-openapi/runtime/file.go19
-rw-r--r--vendor/github.com/go-openapi/runtime/headers.go45
-rw-r--r--vendor/github.com/go-openapi/runtime/interfaces.go112
-rw-r--r--vendor/github.com/go-openapi/runtime/json.go38
-rw-r--r--vendor/github.com/go-openapi/runtime/logger/logger.go20
-rw-r--r--vendor/github.com/go-openapi/runtime/logger/standard.go22
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/context.go635
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE19
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/denco/README.md180
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/denco/router.go460
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/denco/server.go106
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/denco/util.go12
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/doc.go62
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/go18.go9
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/header/header.go329
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/negotiate.go98
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/not_implemented.go67
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/operation.go30
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/parameter.go485
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/pre_go18.go9
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/rapidoc.go90
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/redoc.go103
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/request.go104
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/router.go488
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/security.go39
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/spec.go48
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/swaggerui.go168
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go122
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/untyped/api.go286
-rw-r--r--vendor/github.com/go-openapi/runtime/middleware/validation.go126
-rw-r--r--vendor/github.com/go-openapi/runtime/request.go139
-rw-r--r--vendor/github.com/go-openapi/runtime/security/authenticator.go276
-rw-r--r--vendor/github.com/go-openapi/runtime/security/authorizer.go27
-rw-r--r--vendor/github.com/go-openapi/runtime/statuses.go90
-rw-r--r--vendor/github.com/go-openapi/runtime/text.go116
-rw-r--r--vendor/github.com/go-openapi/runtime/values.go19
-rw-r--r--vendor/github.com/go-openapi/runtime/xml.go36
-rw-r--r--vendor/github.com/go-openapi/spec/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/spec/.gitignore2
-rw-r--r--vendor/github.com/go-openapi/spec/.golangci.yml42
-rw-r--r--vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/spec/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/spec/README.md34
-rw-r--r--vendor/github.com/go-openapi/spec/appveyor.yml32
-rw-r--r--vendor/github.com/go-openapi/spec/bindata.go297
-rw-r--r--vendor/github.com/go-openapi/spec/cache.go98
-rw-r--r--vendor/github.com/go-openapi/spec/contact_info.go57
-rw-r--r--vendor/github.com/go-openapi/spec/debug.go49
-rw-r--r--vendor/github.com/go-openapi/spec/errors.go19
-rw-r--r--vendor/github.com/go-openapi/spec/expander.go594
-rw-r--r--vendor/github.com/go-openapi/spec/external_docs.go24
-rw-r--r--vendor/github.com/go-openapi/spec/header.go203
-rw-r--r--vendor/github.com/go-openapi/spec/info.go184
-rw-r--r--vendor/github.com/go-openapi/spec/items.go234
-rw-r--r--vendor/github.com/go-openapi/spec/license.go56
-rw-r--r--vendor/github.com/go-openapi/spec/normalizer.go202
-rw-r--r--vendor/github.com/go-openapi/spec/normalizer_nonwindows.go44
-rw-r--r--vendor/github.com/go-openapi/spec/normalizer_windows.go154
-rw-r--r--vendor/github.com/go-openapi/spec/operation.go397
-rw-r--r--vendor/github.com/go-openapi/spec/parameter.go326
-rw-r--r--vendor/github.com/go-openapi/spec/path_item.go87
-rw-r--r--vendor/github.com/go-openapi/spec/paths.go97
-rw-r--r--vendor/github.com/go-openapi/spec/properties.go91
-rw-r--r--vendor/github.com/go-openapi/spec/ref.go193
-rw-r--r--vendor/github.com/go-openapi/spec/resolver.go127
-rw-r--r--vendor/github.com/go-openapi/spec/response.go152
-rw-r--r--vendor/github.com/go-openapi/spec/responses.go140
-rw-r--r--vendor/github.com/go-openapi/spec/schema.go645
-rw-r--r--vendor/github.com/go-openapi/spec/schema_loader.go338
-rw-r--r--vendor/github.com/go-openapi/spec/security_scheme.go170
-rw-r--r--vendor/github.com/go-openapi/spec/spec.go78
-rw-r--r--vendor/github.com/go-openapi/spec/swagger.go448
-rw-r--r--vendor/github.com/go-openapi/spec/tag.go75
-rw-r--r--vendor/github.com/go-openapi/spec/url_go18.go8
-rw-r--r--vendor/github.com/go-openapi/spec/url_go19.go14
-rw-r--r--vendor/github.com/go-openapi/spec/validations.go215
-rw-r--r--vendor/github.com/go-openapi/spec/xml_object.go68
-rw-r--r--vendor/github.com/go-openapi/strfmt/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/strfmt/.gitattributes2
-rw-r--r--vendor/github.com/go-openapi/strfmt/.gitignore2
-rw-r--r--vendor/github.com/go-openapi/strfmt/.golangci.yml59
-rw-r--r--vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/strfmt/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/strfmt/README.md88
-rw-r--r--vendor/github.com/go-openapi/strfmt/bson.go165
-rw-r--r--vendor/github.com/go-openapi/strfmt/date.go187
-rw-r--r--vendor/github.com/go-openapi/strfmt/default.go2035
-rw-r--r--vendor/github.com/go-openapi/strfmt/doc.go18
-rw-r--r--vendor/github.com/go-openapi/strfmt/duration.go211
-rw-r--r--vendor/github.com/go-openapi/strfmt/format.go326
-rw-r--r--vendor/github.com/go-openapi/strfmt/time.go319
-rw-r--r--vendor/github.com/go-openapi/strfmt/ulid.go230
-rw-r--r--vendor/github.com/go-openapi/swag/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/swag/.gitattributes2
-rw-r--r--vendor/github.com/go-openapi/swag/.gitignore4
-rw-r--r--vendor/github.com/go-openapi/swag/.golangci.yml54
-rw-r--r--vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/swag/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/swag/README.md21
-rw-r--r--vendor/github.com/go-openapi/swag/convert.go208
-rw-r--r--vendor/github.com/go-openapi/swag/convert_types.go730
-rw-r--r--vendor/github.com/go-openapi/swag/doc.go31
-rw-r--r--vendor/github.com/go-openapi/swag/file.go33
-rw-r--r--vendor/github.com/go-openapi/swag/json.go312
-rw-r--r--vendor/github.com/go-openapi/swag/loading.go121
-rw-r--r--vendor/github.com/go-openapi/swag/name_lexem.go87
-rw-r--r--vendor/github.com/go-openapi/swag/net.go38
-rw-r--r--vendor/github.com/go-openapi/swag/path.go59
-rw-r--r--vendor/github.com/go-openapi/swag/post_go18.go24
-rw-r--r--vendor/github.com/go-openapi/swag/post_go19.go68
-rw-r--r--vendor/github.com/go-openapi/swag/pre_go18.go24
-rw-r--r--vendor/github.com/go-openapi/swag/pre_go19.go70
-rw-r--r--vendor/github.com/go-openapi/swag/split.go262
-rw-r--r--vendor/github.com/go-openapi/swag/util.go394
-rw-r--r--vendor/github.com/go-openapi/swag/yaml.go450
-rw-r--r--vendor/github.com/go-openapi/validate/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/validate/.gitattributes2
-rw-r--r--vendor/github.com/go-openapi/validate/.gitignore5
-rw-r--r--vendor/github.com/go-openapi/validate/.golangci.yml50
-rw-r--r--vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/validate/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/validate/README.md38
-rw-r--r--vendor/github.com/go-openapi/validate/appveyor.yml32
-rw-r--r--vendor/github.com/go-openapi/validate/context.go56
-rw-r--r--vendor/github.com/go-openapi/validate/debug.go47
-rw-r--r--vendor/github.com/go-openapi/validate/default_validator.go281
-rw-r--r--vendor/github.com/go-openapi/validate/doc.go85
-rw-r--r--vendor/github.com/go-openapi/validate/example_validator.go270
-rw-r--r--vendor/github.com/go-openapi/validate/formats.go69
-rw-r--r--vendor/github.com/go-openapi/validate/helpers.go324
-rw-r--r--vendor/github.com/go-openapi/validate/object_validator.go279
-rw-r--r--vendor/github.com/go-openapi/validate/options.go43
-rw-r--r--vendor/github.com/go-openapi/validate/result.go486
-rw-r--r--vendor/github.com/go-openapi/validate/rexp.go71
-rw-r--r--vendor/github.com/go-openapi/validate/schema.go260
-rw-r--r--vendor/github.com/go-openapi/validate/schema_messages.go78
-rw-r--r--vendor/github.com/go-openapi/validate/schema_option.go54
-rw-r--r--vendor/github.com/go-openapi/validate/schema_props.go240
-rw-r--r--vendor/github.com/go-openapi/validate/slice_validator.go105
-rw-r--r--vendor/github.com/go-openapi/validate/spec.go804
-rw-r--r--vendor/github.com/go-openapi/validate/spec_messages.go360
-rw-r--r--vendor/github.com/go-openapi/validate/type.go177
-rw-r--r--vendor/github.com/go-openapi/validate/update-fixtures.sh15
-rw-r--r--vendor/github.com/go-openapi/validate/validator.go645
-rw-r--r--vendor/github.com/go-openapi/validate/values.go450
-rw-r--r--vendor/github.com/go-swagger/go-swagger/LICENSE202
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore5
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go145
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go106
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go266
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go111
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go22
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go337
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go82
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go118
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go126
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go759
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go216
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go163
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go81
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go48
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go26
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go86
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go17
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go33
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go98
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go104
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go119
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go240
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go19
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go8
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go125
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go119
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go67
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go13
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go111
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go117
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go117
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go83
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go37
-rw-r--r--vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go143
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/README.md3
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/application.go674
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/doc.go6
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/enum.go32
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/meta.go252
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/operations.go170
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parameters.go518
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parser.go1667
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go51
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go42
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go96
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/responses.go454
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/route_params.go263
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/routes.go93
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/schema.go1155
-rw-r--r--vendor/github.com/go-swagger/go-swagger/codescan/spec.go258
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/.gitignore1
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/bindata.go40
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/client.go120
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/config.go61
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/debug.go64
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/discriminators.go75
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/doc.go78
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/formats.go226
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go50
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go12
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/language.go440
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/media.go191
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/model.go2118
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/operation.go1303
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/shared.go1096
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/spec.go273
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/structs.go803
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/support.go546
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/template_repo.go855
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl242
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl77
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl28
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl25
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl230
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl97
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl59
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl193
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl127
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl129
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl406
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl346
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md311
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl111
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl83
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl222
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl9
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl25
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl20
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl527
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl27
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl131
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl330
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl21
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl53
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl1194
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl94
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl11
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl180
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl69
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl19
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl15
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl172
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl66
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl205
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl446
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl167
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl63
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl186
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl92
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl720
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl271
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl660
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl213
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl41
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl30
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl3
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl23
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl29
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl62
-rw-r--r--vendor/github.com/go-swagger/go-swagger/generator/types.go1284
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/README.md3
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/classifier.go166
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/doc.go89
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/enum.go84
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/meta.go246
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/operations.go85
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/parameters.go515
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/path.go151
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/responses.go453
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/route_params.go253
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/routes.go146
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/scanner.go974
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/schema.go1358
-rw-r--r--vendor/github.com/go-swagger/go-swagger/scan/validators.go829
-rw-r--r--vendor/github.com/gorilla/handlers/LICENSE22
-rw-r--r--vendor/github.com/gorilla/handlers/README.md56
-rw-r--r--vendor/github.com/gorilla/handlers/canonical.go74
-rw-r--r--vendor/github.com/gorilla/handlers/compress.go143
-rw-r--r--vendor/github.com/gorilla/handlers/cors.go355
-rw-r--r--vendor/github.com/gorilla/handlers/doc.go9
-rw-r--r--vendor/github.com/gorilla/handlers/handlers.go147
-rw-r--r--vendor/github.com/gorilla/handlers/logging.go244
-rw-r--r--vendor/github.com/gorilla/handlers/proxy_headers.go120
-rw-r--r--vendor/github.com/gorilla/handlers/recovery.go96
-rw-r--r--vendor/github.com/huandu/xstrings/.gitignore24
-rw-r--r--vendor/github.com/huandu/xstrings/CONTRIBUTING.md23
-rw-r--r--vendor/github.com/huandu/xstrings/LICENSE22
-rw-r--r--vendor/github.com/huandu/xstrings/README.md117
-rw-r--r--vendor/github.com/huandu/xstrings/common.go21
-rw-r--r--vendor/github.com/huandu/xstrings/convert.go590
-rw-r--r--vendor/github.com/huandu/xstrings/count.go120
-rw-r--r--vendor/github.com/huandu/xstrings/doc.go8
-rw-r--r--vendor/github.com/huandu/xstrings/format.go169
-rw-r--r--vendor/github.com/huandu/xstrings/manipulate.go216
-rw-r--r--vendor/github.com/huandu/xstrings/stringbuilder.go7
-rw-r--r--vendor/github.com/huandu/xstrings/stringbuilder_go110.go9
-rw-r--r--vendor/github.com/huandu/xstrings/translate.go546
-rw-r--r--vendor/github.com/imdario/mergo/.deepsource.toml12
-rw-r--r--vendor/github.com/imdario/mergo/.gitignore33
-rw-r--r--vendor/github.com/imdario/mergo/.travis.yml12
-rw-r--r--vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md46
-rw-r--r--vendor/github.com/imdario/mergo/LICENSE28
-rw-r--r--vendor/github.com/imdario/mergo/README.md247
-rw-r--r--vendor/github.com/imdario/mergo/doc.go143
-rw-r--r--vendor/github.com/imdario/mergo/map.go178
-rw-r--r--vendor/github.com/imdario/mergo/merge.go380
-rw-r--r--vendor/github.com/imdario/mergo/mergo.go78
-rw-r--r--vendor/github.com/jessevdk/go-flags/.travis.yml39
-rw-r--r--vendor/github.com/jessevdk/go-flags/LICENSE26
-rw-r--r--vendor/github.com/jessevdk/go-flags/README.md139
-rw-r--r--vendor/github.com/jessevdk/go-flags/arg.go27
-rw-r--r--vendor/github.com/jessevdk/go-flags/check_crosscompile.sh20
-rw-r--r--vendor/github.com/jessevdk/go-flags/closest.go59
-rw-r--r--vendor/github.com/jessevdk/go-flags/command.go465
-rw-r--r--vendor/github.com/jessevdk/go-flags/completion.go315
-rw-r--r--vendor/github.com/jessevdk/go-flags/convert.go357
-rw-r--r--vendor/github.com/jessevdk/go-flags/error.go138
-rw-r--r--vendor/github.com/jessevdk/go-flags/flags.go263
-rw-r--r--vendor/github.com/jessevdk/go-flags/group.go429
-rw-r--r--vendor/github.com/jessevdk/go-flags/help.go514
-rw-r--r--vendor/github.com/jessevdk/go-flags/ini.go615
-rw-r--r--vendor/github.com/jessevdk/go-flags/man.go223
-rw-r--r--vendor/github.com/jessevdk/go-flags/multitag.go140
-rw-r--r--vendor/github.com/jessevdk/go-flags/option.go569
-rw-r--r--vendor/github.com/jessevdk/go-flags/optstyle_other.go67
-rw-r--r--vendor/github.com/jessevdk/go-flags/optstyle_windows.go108
-rw-r--r--vendor/github.com/jessevdk/go-flags/parser.go714
-rw-r--r--vendor/github.com/jessevdk/go-flags/termsize.go15
-rw-r--r--vendor/github.com/jessevdk/go-flags/termsize_nosysioctl.go7
-rw-r--r--vendor/github.com/jessevdk/go-flags/termsize_windows.go85
-rw-r--r--vendor/github.com/josharian/intern/README.md5
-rw-r--r--vendor/github.com/josharian/intern/intern.go44
-rw-r--r--vendor/github.com/josharian/intern/license.md21
-rw-r--r--vendor/github.com/kr/pretty/.gitignore5
-rw-r--r--vendor/github.com/kr/pretty/License19
-rw-r--r--vendor/github.com/kr/pretty/Readme9
-rw-r--r--vendor/github.com/kr/pretty/diff.go295
-rw-r--r--vendor/github.com/kr/pretty/formatter.go355
-rw-r--r--vendor/github.com/kr/pretty/pretty.go108
-rw-r--r--vendor/github.com/kr/pretty/zero.go41
-rw-r--r--vendor/github.com/kr/text/License19
-rw-r--r--vendor/github.com/kr/text/Readme3
-rw-r--r--vendor/github.com/kr/text/doc.go3
-rw-r--r--vendor/github.com/kr/text/indent.go74
-rw-r--r--vendor/github.com/kr/text/wrap.go86
-rw-r--r--vendor/github.com/mailru/easyjson/LICENSE7
-rw-r--r--vendor/github.com/mailru/easyjson/buffer/pool.go278
-rw-r--r--vendor/github.com/mailru/easyjson/jlexer/bytestostr.go24
-rw-r--r--vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go13
-rw-r--r--vendor/github.com/mailru/easyjson/jlexer/error.go15
-rw-r--r--vendor/github.com/mailru/easyjson/jlexer/lexer.go1244
-rw-r--r--vendor/github.com/mailru/easyjson/jwriter/writer.go405
-rw-r--r--vendor/github.com/mitchellh/copystructure/LICENSE21
-rw-r--r--vendor/github.com/mitchellh/copystructure/README.md21
-rw-r--r--vendor/github.com/mitchellh/copystructure/copier_time.go15
-rw-r--r--vendor/github.com/mitchellh/copystructure/copystructure.go631
-rw-r--r--vendor/github.com/mitchellh/reflectwalk/.travis.yml1
-rw-r--r--vendor/github.com/mitchellh/reflectwalk/LICENSE21
-rw-r--r--vendor/github.com/mitchellh/reflectwalk/README.md6
-rw-r--r--vendor/github.com/mitchellh/reflectwalk/location.go19
-rw-r--r--vendor/github.com/mitchellh/reflectwalk/location_string.go16
-rw-r--r--vendor/github.com/mitchellh/reflectwalk/reflectwalk.go420
-rw-r--r--vendor/github.com/rogpeppe/go-internal/LICENSE27
-rw-r--r--vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go20
-rw-r--r--vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go209
-rw-r--r--vendor/github.com/shopspring/decimal/.gitignore6
-rw-r--r--vendor/github.com/shopspring/decimal/.travis.yml13
-rw-r--r--vendor/github.com/shopspring/decimal/CHANGELOG.md19
-rw-r--r--vendor/github.com/shopspring/decimal/LICENSE45
-rw-r--r--vendor/github.com/shopspring/decimal/README.md130
-rw-r--r--vendor/github.com/shopspring/decimal/decimal-go.go415
-rw-r--r--vendor/github.com/shopspring/decimal/decimal.go1477
-rw-r--r--vendor/github.com/shopspring/decimal/rounding.go119
-rw-r--r--vendor/github.com/toqueteos/webbrowser/.travis.yml9
-rw-r--r--vendor/github.com/toqueteos/webbrowser/CONTRIBUTING.md11
-rw-r--r--vendor/github.com/toqueteos/webbrowser/LICENSE.md19
-rw-r--r--vendor/github.com/toqueteos/webbrowser/README.md56
-rw-r--r--vendor/github.com/toqueteos/webbrowser/webbrowser.go137
-rw-r--r--vendor/go.mongodb.org/mongo-driver/LICENSE201
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bson.go50
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go50
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go238
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go111
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go63
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go1729
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go766
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go90
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go147
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go309
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go65
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go109
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go14
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go469
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go199
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go119
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go664
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go139
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go127
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go57
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go173
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go38
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go8
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go38
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go67
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go38
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go41
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go87
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go38
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go38
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go445
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go9
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go806
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go644
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go223
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_wrappers.go492
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go732
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/json_scanner.go528
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/mode.go108
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/reader.go63
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_reader.go874
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_writer.go606
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsonrw/writer.go78
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/bsontype/bsontype.go97
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/decoder.go141
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/doc.go141
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/encoder.go99
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/marshal.go248
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/primitive/decimal.go423
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/primitive/objectid.go206
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/primitive/primitive.go217
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/primitive_codecs.go92
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/raw.go85
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/raw_element.go51
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/raw_value.go309
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/registry.go24
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/types.go36
-rw-r--r--vendor/go.mongodb.org/mongo-driver/bson/unmarshal.go101
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/array.go164
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_arraybuilder.go201
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_documentbuilder.go189
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bsoncore.go862
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document.go386
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document_sequence.go189
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/element.go152
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/tables.go223
-rw-r--r--vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/value.go980
-rw-r--r--vendor/golang.org/x/crypto/scrypt/scrypt.go212
-rw-r--r--vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go78
-rw-r--r--vendor/golang.org/x/mod/module/module.go841
-rw-r--r--vendor/golang.org/x/mod/module/pseudo.go250
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/enclosing.go634
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go485
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/rewrite.go486
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/util.go18
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/allpackages.go195
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/fakecontext.go111
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/overlay.go101
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/tags.go80
-rw-r--r--vendor/golang.org/x/tools/go/buildutil/util.go209
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo.go219
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go39
-rw-r--r--vendor/golang.org/x/tools/go/loader/doc.go202
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader.go1066
-rw-r--r--vendor/golang.org/x/tools/go/loader/util.go123
-rw-r--r--vendor/golang.org/x/tools/imports/forward.go77
-rw-r--r--vendor/golang.org/x/tools/internal/gopathwalk/walk.go331
-rw-r--r--vendor/golang.org/x/tools/internal/imports/fix.go1769
-rw-r--r--vendor/golang.org/x/tools/internal/imports/imports.go356
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod.go723
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod_cache.go236
-rw-r--r--vendor/golang.org/x/tools/internal/imports/sortimports.go297
-rw-r--r--vendor/golang.org/x/tools/internal/imports/zstdlib.go11345
-rw-r--r--vendor/modules.txt134
672 files changed, 135624 insertions, 713 deletions
diff --git a/.drone.yml b/.drone.yml
index 5145ff3c0..5d157aff3 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -38,6 +38,7 @@ steps:
- apk update --no-cache && apk add git
- CGO_ENABLED=0 GTS_DB_TYPE="sqlite" GTS_DB_ADDRESS=":memory:" go test ./...
- CGO_ENABLED=0 ./test/envparsing.sh
+ - CGO_ENABLED=0 ./test/swagger.sh
when:
event:
include:
@@ -192,6 +193,6 @@ steps:
---
kind: signature
-hmac: 00f69df57e8852d610f8d570c504aae22d315c2a0ff4808ef8f191554745c5ae
+hmac: 4789cebf9156b2c3cb0f097311ea7620e709e4332f130dcae51a938515dc952e
...
diff --git a/docs/api/swagger.yaml b/docs/api/swagger.yaml
index 2d5e9bed8..3cf9922a7 100644
--- a/docs/api/swagger.yaml
+++ b/docs/api/swagger.yaml
@@ -1,9 +1,5 @@
basePath: /
definitions:
- EmojiUpdateType:
- title: EmojiUpdateType models an admin update action to take on a custom emoji.
- type: string
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
InstanceConfigurationEmojis:
properties:
emoji_size_limit:
@@ -160,6 +156,24 @@ definitions:
title: Source represents display or publishing preferences of user's own account.
type: object
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
+ TimelineMarker:
+ properties:
+ last_read_id:
+ description: The ID of the most recently viewed entity.
+ type: string
+ x-go-name: LastReadID
+ updated_at:
+ description: The timestamp of when the marker was set (ISO 8601 Datetime)
+ type: string
+ x-go-name: UpdatedAt
+ version:
+ description: Used for locking to prevent write conflicts.
+ format: int64
+ type: integer
+ x-go-name: Version
+ title: TimelineMarker contains information about a user's progress through a specific timeline.
+ type: object
+ x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
account:
description: The modelled account can be either a remote account, or one on this instance.
properties:
@@ -622,151 +636,6 @@ definitions:
type: object
x-go-name: AdminReport
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- advancedVisibilityFlagsForm:
- description: |-
- AdvancedVisibilityFlagsForm allows a few more advanced flags to be set on new statuses, in addition
- to the standard mastodon-compatible ones.
- properties:
- boostable:
- description: This status can be boosted/reblogged.
- type: boolean
- x-go-name: Boostable
- federated:
- description: This status will be federated beyond the local timeline(s).
- type: boolean
- x-go-name: Federated
- likeable:
- description: This status can be liked/faved.
- type: boolean
- x-go-name: Likeable
- replyable:
- description: This status can be replied to.
- type: boolean
- x-go-name: Replyable
- type: object
- x-go-name: AdvancedVisibilityFlagsForm
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- announcement:
- properties:
- all_day:
- description: Announcement doesn't have begin time and end time, but begin day and end day.
- type: boolean
- x-go-name: AllDay
- content:
- description: |-
- The body of the announcement.
- Should be HTML formatted.
- example: <p>This is an announcement. No malarky.</p>
- type: string
- x-go-name: Content
- emoji:
- description: Emojis used in this announcement.
- items:
- $ref: '#/definitions/emoji'
- type: array
- x-go-name: Emojis
- ends_at:
- description: |-
- When the announcement should stop being displayed (ISO 8601 Datetime).
- If the announcement has no end time, this will be omitted or empty.
- example: "2021-07-30T09:20:25+00:00"
- type: string
- x-go-name: EndsAt
- id:
- description: The ID of the announcement.
- example: 01FC30T7X4TNCZK0TH90QYF3M4
- type: string
- x-go-name: ID
- mentions:
- description: Mentions this announcement contains.
- items:
- $ref: '#/definitions/Mention'
- type: array
- x-go-name: Mentions
- published:
- description: |-
- Announcement is 'published', ie., visible to users.
- Announcements that are not published should be shown only to admins.
- type: boolean
- x-go-name: Published
- published_at:
- description: When the announcement was first published (ISO 8601 Datetime).
- example: "2021-07-30T09:20:25+00:00"
- type: string
- x-go-name: PublishedAt
- reactions:
- description: Reactions to this announcement.
- items:
- $ref: '#/definitions/announcementReaction'
- type: array
- x-go-name: Reactions
- read:
- description: Requesting account has seen this announcement.
- type: boolean
- x-go-name: Read
- starts_at:
- description: |-
- When the announcement should begin to be displayed (ISO 8601 Datetime).
- If the announcement has no start time, this will be omitted or empty.
- example: "2021-07-30T09:20:25+00:00"
- type: string
- x-go-name: StartsAt
- statuses:
- description: Statuses contained in this announcement.
- items:
- $ref: '#/definitions/status'
- type: array
- x-go-name: Statuses
- tags:
- description: Tags used in this announcement.
- items:
- $ref: '#/definitions/tag'
- type: array
- x-go-name: Tags
- updated_at:
- description: When the announcement was last updated (ISO 8601 Datetime).
- example: "2021-07-30T09:20:25+00:00"
- type: string
- x-go-name: UpdatedAt
- title: Announcement models an admin announcement for the instance.
- type: object
- x-go-name: Announcement
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- announcementReaction:
- properties:
- count:
- description: The total number of users who have added this reaction.
- example: 5
- format: int64
- type: integer
- x-go-name: Count
- me:
- description: This reaction belongs to the account viewing it.
- type: boolean
- x-go-name: Me
- name:
- description: The emoji used for the reaction. Either a unicode emoji, or a custom emoji's shortcode.
- example: blobcat_uwu
- type: string
- x-go-name: Name
- static_url:
- description: |-
- Web link to a non-animated image of the custom emoji.
- Empty for unicode emojis.
- example: https://example.org/custom_emojis/statuc/blobcat_uwu.png
- type: string
- x-go-name: StaticURL
- url:
- description: |-
- Web link to the image of the custom emoji.
- Empty for unicode emojis.
- example: https://example.org/custom_emojis/original/blobcat_uwu.png
- type: string
- x-go-name: URL
- title: AnnouncementReaction models a user reaction to an announcement.
- type: object
- x-go-name: AnnouncementReaction
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
application:
properties:
client_id:
@@ -1003,16 +872,6 @@ definitions:
type: object
x-go-name: Domain
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- domainKeysExpireRequest:
- properties:
- domain:
- description: hostname/domain to expire keys for.
- type: string
- x-go-name: Domain
- title: DomainBlockCreateRequest is the form submitted as a POST to /api/v1/admin/domain_keys_expire to expire a domain's public keys.
- type: object
- x-go-name: DomainKeysExpireRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
domainPermission:
properties:
created_at:
@@ -1070,43 +929,6 @@ definitions:
type: object
x-go-name: DomainPermission
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- domainPermissionCreateRequest:
- properties:
- domain:
- description: |-
- A single domain for which this permission request should apply.
- Only used if import=true is NOT specified or if import=false.
- example: example.org
- type: string
- x-go-name: Domain
- domains:
- description: |-
- A list of domains for which this permission request should apply.
- Only used if import=true is specified.
- x-go-name: Domains
- obfuscate:
- description: |-
- Obfuscate the domain name when displaying this permission entry publicly.
- Ie., instead of 'example.org' show something like 'e**mpl*.or*'.
- example: false
- type: boolean
- x-go-name: Obfuscate
- private_comment:
- description: Private comment for other admins on why this permission entry was created.
- example: don't like 'em!!!!
- type: string
- x-go-name: PrivateComment
- public_comment:
- description: |-
- Public comment on why this permission entry was created.
- Will be visible to requesters at /api/v1/instance/peers if this endpoint is exposed.
- example: "foss dorks \U0001F62B"
- type: string
- x-go-name: PublicComment
- title: DomainPermissionRequest is the form submitted as a POST to create a new domain permission entry (allow/block).
- type: object
- x-go-name: DomainPermissionRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
emoji:
properties:
category:
@@ -1152,42 +974,6 @@ definitions:
type: object
x-go-name: EmojiCategory
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- emojiCreateRequest:
- properties:
- CategoryName:
- description: |-
- Category in which to place the new emoji. Will be uncategorized by default.
- CategoryName length should not exceed 64 characters.
- type: string
- Image:
- description: Image file to use for the emoji. Must be png or gif and no larger than 50kb.
- Shortcode:
- description: Desired shortcode for the emoji, without surrounding colons. This must be unique for the domain.
- example: blobcat_uwu
- type: string
- title: EmojiCreateRequest represents a request to create a custom emoji made through the admin API.
- type: object
- x-go-name: EmojiCreateRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- emojiUpdateRequest:
- properties:
- CategoryName:
- description: Category in which to place the emoji.
- type: string
- Image:
- description: |-
- Image file to use for the emoji.
- Must be png or gif and no larger than 50kb.
- Shortcode:
- description: Desired shortcode for the emoji, without surrounding colons. This must be unique for the domain.
- example: blobcat_uwu
- type: string
- type:
- $ref: '#/definitions/EmojiUpdateType'
- title: EmojiUpdateRequest represents a request to update a custom emoji, made through the admin API.
- type: object
- x-go-name: EmojiUpdateRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
field:
properties:
name:
@@ -1264,19 +1050,39 @@ definitions:
type: object
x-go-name: FilterV1
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- headerFilterCreateRequest:
+ headerFilter:
properties:
+ created_at:
+ description: Time at which the header filter was created (ISO 8601 Datetime).
+ example: "2021-07-30T09:20:25+00:00"
+ readOnly: true
+ type: string
+ x-go-name: CreatedAt
+ created_by:
+ description: The ID of the admin account that created this header filter.
+ example: 01FBW2758ZB6PBR200YPDDJK4C
+ readOnly: true
+ type: string
+ x-go-name: CreatedBy
header:
- description: The HTTP header to match against (e.g. User-Agent).
+ description: The HTTP header to match against.
+ example: User-Agent
type: string
x-go-name: Header
+ id:
+ description: The ID of the header filter.
+ example: 01FBW21XJA09XYX51KV5JVBW0F
+ readOnly: true
+ type: string
+ x-go-name: ID
regex:
description: The header value matching regular expression.
+ example: .*Firefox.*
type: string
x-go-name: Regex
- title: HeaderFilterRequest is the form submitted as a POST to create a new header filter entry (allow / block).
+ title: HeaderFilter represents a regex value filter applied to one particular HTTP header (allow / block).
type: object
- x-go-name: HeaderFilterRequest
+ x-go-name: HeaderFilter
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
hostmeta:
description: 'See: https://www.rfc-editor.org/rfc/rfc6415.html#section-3'
@@ -1445,24 +1251,6 @@ definitions:
type: object
x-go-name: InstanceRule
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- instanceRuleCreateRequest:
- properties:
- Text:
- type: string
- title: InstanceRuleCreateRequest represents a request to create a new instance rule, made through the admin API.
- type: object
- x-go-name: InstanceRuleCreateRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- instanceRuleUpdateRequest:
- properties:
- ID:
- type: string
- Text:
- type: string
- title: InstanceRuleUpdateRequest represents a request to update the text of an instance rule, made through the admin API.
- type: object
- x-go-name: InstanceRuleUpdateRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
instanceV1:
properties:
account_domain:
@@ -1505,7 +1293,8 @@ definitions:
x-go-name: InvitesEnabled
languages:
description: Primary language of the instance.
- example: en
+ example:
+ - en
items:
type: string
type: array
@@ -1889,6 +1678,16 @@ definitions:
type: object
x-go-name: List
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
+ markers:
+ properties:
+ home:
+ $ref: '#/definitions/TimelineMarker'
+ notifications:
+ $ref: '#/definitions/TimelineMarker'
+ title: Marker represents the last read position within a user's timelines.
+ type: object
+ x-go-name: Marker
+ x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
mediaDimensions:
properties:
aspect:
@@ -1980,72 +1779,6 @@ definitions:
type: object
x-go-name: MediaMeta
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- multiStatus:
- description: |-
- This model should be transmitted along with http code
- 207 MULTI-STATUS to indicate a mixture of responses.
- See https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/207
- properties:
- data:
- items:
- $ref: '#/definitions/multiStatusEntry'
- type: array
- x-go-name: Data
- metadata:
- $ref: '#/definitions/multiStatusMetadata'
- title: MultiStatus models a multistatus HTTP response body.
- type: object
- x-go-name: MultiStatus
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- multiStatusEntry:
- description: |-
- It can model either a success or a failure. The type
- and value of `Resource` is left to the discretion of
- the caller, but at minimum it should be expected to be
- JSON-serializable.
- properties:
- message:
- description: Message/error message for this entry.
- type: string
- x-go-name: Message
- resource:
- description: |-
- The resource/result for this entry.
- Value may be any type, check the docs
- per endpoint to see which to expect.
- x-go-name: Resource
- status:
- description: HTTP status code of this entry.
- format: int64
- type: integer
- x-go-name: Status
- title: MultiStatusEntry models one entry in multistatus data.
- type: object
- x-go-name: MultiStatusEntry
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- multiStatusMetadata:
- description: |-
- MultiStatusMetadata models an at-a-glance summary of
- the data contained in the MultiStatus.
- properties:
- failure:
- description: Count of unsuccessful results (!2xx).
- format: int64
- type: integer
- x-go-name: Failure
- success:
- description: Count of successful results (2xx).
- format: int64
- type: integer
- x-go-name: Success
- total:
- description: Success count + failure count.
- format: int64
- type: integer
- x-go-name: Total
- type: object
- x-go-name: MultiStatusMetadata
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
nodeinfo:
description: 'See: https://nodeinfo.diaspora.software/schema.html'
properties:
@@ -2214,40 +1947,6 @@ definitions:
type: object
x-go-name: PollOption
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- pollRequest:
- properties:
- ExpiresIn:
- description: |-
- Duration the poll should be open, in seconds.
- If provided, media_ids cannot be used, and poll[options] must be provided.
- format: int64
- type: integer
- expires_in:
- description: |-
- Duration the poll should be open, in seconds.
- If provided, media_ids cannot be used, and poll[options] must be provided.
- x-go-name: ExpiresInI
- hide_totals:
- description: Hide vote counts until the poll ends.
- type: boolean
- x-go-name: HideTotals
- multiple:
- description: Allow multiple choices on this poll.
- type: boolean
- x-go-name: Multiple
- options:
- description: |-
- Array of possible answers.
- If provided, media_ids cannot be used, and poll[expires_in] must be provided.
- name: poll[options]
- items:
- type: string
- type: array
- x-go-name: Options
- title: PollRequest models a request to create a poll.
- type: object
- x-go-name: PollRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
report:
properties:
action_taken:
@@ -2502,80 +2201,6 @@ definitions:
type: object
x-go-name: Context
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- statusCreateRequest:
- properties:
- content_type:
- description: |-
- Content type to use when parsing this status.
- in: formData
- type: string
- x-go-name: ContentType
- in_reply_to_id:
- description: |-
- ID of the status being replied to, if status is a reply.
- in: formData
- type: string
- x-go-name: InReplyToID
- language:
- description: |-
- ISO 639 language code for this status.
- in: formData
- type: string
- x-go-name: Language
- media_ids:
- description: |-
- Array of Attachment ids to be attached as media.
- If provided, status becomes optional, and poll cannot be used.
-
- If the status is being submitted as a form, the key is 'media_ids[]',
- but if it's json or xml, the key is 'media_ids'.
-
- in: formData
- items:
- type: string
- type: array
- x-go-name: MediaIDs
- poll:
- $ref: '#/definitions/pollRequest'
- scheduled_at:
- description: |-
- ISO 8601 Datetime at which to schedule a status.
- Providing this parameter will cause ScheduledStatus to be returned instead of Status.
- Must be at least 5 minutes in the future.
- in: formData
- type: string
- x-go-name: ScheduledAt
- sensitive:
- description: |-
- Status and attached media should be marked as sensitive.
- in: formData
- type: boolean
- x-go-name: Sensitive
- spoiler_text:
- description: |-
- Text to be shown as a warning or subject before the actual content.
- Statuses are generally collapsed behind this field.
- in: formData
- type: string
- x-go-name: SpoilerText
- status:
- description: |-
- Text content of the status.
- If media_ids is provided, this becomes optional.
- Attaching a poll is optional while status is provided.
- in: formData
- type: string
- x-go-name: Status
- visibility:
- description: |-
- Visibility of the posted status.
- in: formData
- type: string
- x-go-name: Visibility
- title: StatusCreateRequest models status creation parameters.
- type: object
- x-go-name: StatusCreateRequest
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
statusReblogged:
properties:
account:
@@ -2798,7 +2423,9 @@ definitions:
x-go-name: ID
items:
description: List of status URIs.
- example: '[''https://example.org/users/some_user/statuses/01GSZ0F7Q8SJKNRF777GJD271R'', ''https://example.org/users/some_user/statuses/01GSZ0G012CBQ7TEKX689S3QRE'']'
+ example:
+ - https://example.org/users/some_user/statuses/01GSZ0F7Q8SJKNRF777GJD271R
+ - https://example.org/users/some_user/statuses/01GSZ0G012CBQ7TEKX689S3QRE
items:
type: string
type: array
@@ -2836,43 +2463,6 @@ definitions:
type: object
x-go-name: Tag
x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- updateField:
- description: By default, max 6 fields and 255 characters per property/value.
- properties:
- name:
- description: Name of the field
- type: string
- x-go-name: Name
- value:
- description: Value of the field
- type: string
- x-go-name: Value
- title: UpdateField is to be used specifically in an UpdateCredentialsRequest.
- type: object
- x-go-name: UpdateField
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
- updateSource:
- properties:
- language:
- description: Default language to use for authored statuses. (ISO 6391)
- type: string
- x-go-name: Language
- privacy:
- description: Default post privacy for authored statuses.
- type: string
- x-go-name: Privacy
- sensitive:
- description: Mark authored statuses as sensitive by default.
- type: boolean
- x-go-name: Sensitive
- status_content_type:
- description: Default format for authored statuses (text/plain or text/markdown).
- type: string
- x-go-name: StatusContentType
- title: UpdateSource is to be used specifically in an UpdateCredentialsRequest.
- type: object
- x-go-name: UpdateSource
- x-go-package: github.com/superseriousbusiness/gotosocial/internal/api/model
wellKnownResponse:
description: See https://webfinger.net/
properties:
@@ -3904,12 +3494,54 @@ paths:
in: formData
name: enable_rss
type: boolean
- - description: Profile fields to be added to this account's profile
+ - description: Name of 1st profile field to be added to this account's profile. (The index may be any string; add more indexes to send more fields.)
in: formData
- items:
- type: object
- name: fields_attributes
- type: array
+ name: fields_attributes[0][name]
+ type: string
+ - description: Value of 1st profile field to be added to this account's profile. (The index may be any string; add more indexes to send more fields.)
+ in: formData
+ name: fields_attributes[0][value]
+ type: string
+ - description: Name of 2nd profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[1][name]
+ type: string
+ - description: Value of 2nd profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[1][value]
+ type: string
+ - description: Name of 3rd profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[2][name]
+ type: string
+ - description: Value of 3rd profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[2][value]
+ type: string
+ - description: Name of 4th profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[3][name]
+ type: string
+ - description: Value of 4th profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[3][value]
+ type: string
+ - description: Name of 5th profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[4][name]
+ type: string
+ - description: Value of 5th profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[4][value]
+ type: string
+ - description: Name of 6th profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[5][name]
+ type: string
+ - description: Value of 6th profile field to be added to this account's profile.
+ in: formData
+ name: fields_attributes[5][value]
+ type: string
produces:
- application/json
responses:
@@ -4095,7 +3727,7 @@ paths:
name: image
required: true
type: file
- - description: Category in which to place the new emoji. 64 characters or less. If left blank, emoji will be uncategorized. If a category with the given name doesn't exist yet, it will be created.
+ - description: Category in which to place the new emoji. If left blank, emoji will be uncategorized. If a category with the given name doesn't exist yet, it will be created.
in: formData
name: category
type: string
@@ -4222,6 +3854,10 @@ paths:
Type of action to be taken. One of: (`disable`, `copy`, `modify`).
For REMOTE emojis, `copy` or `disable` are supported.
For LOCAL emojis, only `modify` is supported.
+ enum:
+ - copy
+ - disable
+ - modify
in: formData
name: type
required: true
@@ -4235,7 +3871,7 @@ paths:
in: formData
name: image
type: file
- - description: Category in which to place the emoji. 64 characters or less. If a category with the given name doesn't exist yet, it will be created.
+ - description: Category in which to place the emoji. If a category with the given name doesn't exist yet, it will be created.
in: formData
name: category
type: string
@@ -4267,12 +3903,6 @@ paths:
/api/v1/admin/custom_emojis/categories:
get:
operationId: emojiCategoriesGet
- parameters:
- - description: The id of the emoji.
- in: path
- name: id
- required: true
- type: string
produces:
- application/json
responses:
@@ -4280,7 +3910,7 @@ paths:
description: Array of existing emoji categories.
schema:
items:
- $ref: '#/definitions/adminEmojiCategory'
+ $ref: '#/definitions/emojiCategory'
type: array
"400":
description: bad request
@@ -4682,11 +4312,13 @@ paths:
be performed.
operationId: domainKeysExpire
parameters:
- - description: Domain to expire keys for.
- example: example.org
+ - description: |-
+ Domain to expire keys for.
+ Sample: example.org
in: formData
name: domain
type: string
+ x-go-name: Domain
produces:
- application/json
responses:
@@ -4789,6 +4421,19 @@ paths:
The parameters can also be given in the body of the request, as JSON, if the content-type is set to 'application/json'.
The parameters can also be given in the body of the request, as XML, if the content-type is set to 'application/xml'.
operationId: headerFilterAllowCreate
+ parameters:
+ - description: The HTTP header to match against (e.g. User-Agent).
+ in: formData
+ name: header
+ required: true
+ type: string
+ x-go-name: Header
+ - description: The header value matching regular expression.
+ in: formData
+ name: regex
+ required: true
+ type: string
+ x-go-name: Regex
produces:
- application/json
responses:
@@ -4902,6 +4547,19 @@ paths:
The parameters can also be given in the body of the request, as JSON, if the content-type is set to 'application/json'.
The parameters can also be given in the body of the request, as XML, if the content-type is set to 'application/xml'.
operationId: headerFilterBlockCreate
+ parameters:
+ - description: The HTTP header to match against (e.g. User-Agent).
+ in: formData
+ name: header
+ required: true
+ type: string
+ x-go-name: Header
+ - description: The header value matching regular expression.
+ in: formData
+ name: regex
+ required: true
+ type: string
+ x-go-name: Regex
produces:
- application/json
responses:
@@ -4991,6 +4649,7 @@ paths:
name: text
required: true
type: string
+ x-go-name: Text
produces:
- application/json
responses:
@@ -5016,17 +4675,17 @@ paths:
summary: Create a new instance rule.
tags:
- admin
- /api/v1/admin/instance/rules{id}:
+ /api/v1/admin/instance/rules/{id}:
delete:
consumes:
- multipart/form-data
operationId: ruleDelete
parameters:
- description: The id of the rule to delete.
- in: formData
+ in: path
name: id
required: true
- type: path
+ type: string
produces:
- application/json
responses:
@@ -5058,15 +4717,17 @@ paths:
operationId: ruleUpdate
parameters:
- description: The id of the rule to update.
- in: formData
+ in: path
name: id
required: true
- type: path
+ type: string
+ x-go-name: ID
- description: Text body for the updated instance rule, plaintext.
in: formData
name: text
required: true
type: string
+ x-go-name: Text
produces:
- application/json
responses:
@@ -5279,8 +4940,9 @@ paths:
name: id
required: true
type: string
- - description: Optional admin comment on the action taken in response to this report. Useful for providing an explanation about what action was taken (if any) before the report was marked as resolved. This will be visible to the user that created the report!
- example: The reported account was suspended.
+ - description: |-
+ Optional admin comment on the action taken in response to this report. Useful for providing an explanation about what action was taken (if any) before the report was marked as resolved. This will be visible to the user that created the report!
+ Sample: The reported account was suspended.
in: formData
name: action_taken_comment
type: string
@@ -5310,7 +4972,7 @@ paths:
/api/v1/admin/rules:
get:
description: The rules will be returned in order (sorted by Order ascending).
- operationId: rules
+ operationId: adminsRuleGet
produces:
- application/json
responses:
@@ -5658,45 +5320,53 @@ paths:
- application/x-www-form-urlencoded
operationId: filterV1Post
parameters:
- - description: The text to be filtered.
- example: fnord
+ - description: |-
+ The text to be filtered.
+
+ Sample: fnord
in: formData
maxLength: 40
name: phrase
required: true
type: string
- - description: The contexts in which the filter should be applied.
+ - description: |-
+ The contexts in which the filter should be applied.
+
+ Sample: home, public
enum:
- home
- notifications
- public
- thread
- account
- example:
- - home
- - public
in: formData
items:
- $ref: '#/definitions/filterContext'
- minLength: 1
+ type: string
+ minItems: 1
name: context
required: true
type: array
uniqueItems: true
- - description: Number of seconds from now that the filter should expire. If omitted, filter never expires.
- example: 86400
+ - description: |-
+ Number of seconds from now that the filter should expire. If omitted, filter never expires.
+
+ Sample: 86400
in: formData
name: expires_in
type: number
- default: false
- description: Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
- example: false
+ description: |-
+ Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
+
+ Sample: false
in: formData
name: irreversible
type: boolean
- default: false
- description: Should the filter consider word boundaries?
- example: true
+ description: |-
+ Should the filter consider word boundaries?
+
+ Sample: true
in: formData
name: whole_word
type: boolean
@@ -5798,45 +5468,53 @@ paths:
name: id
required: true
type: string
- - description: The text to be filtered.
- example: fnord
+ - description: |-
+ The text to be filtered.
+
+ Sample: fnord
in: formData
maxLength: 40
name: phrase
required: true
type: string
- - description: The contexts in which the filter should be applied.
+ - description: |-
+ The contexts in which the filter should be applied.
+
+ Sample: home, public
enum:
- home
- notifications
- public
- thread
- account
- example:
- - home
- - public
in: formData
items:
- $ref: '#/definitions/filterContext'
- minLength: 1
+ type: string
+ minItems: 1
name: context
required: true
type: array
uniqueItems: true
- - description: Number of seconds from now that the filter should expire. If omitted, filter never expires.
- example: 86400
+ - description: |-
+ Number of seconds from now that the filter should expire. If omitted, filter never expires.
+
+ Sample: 86400
in: formData
name: expires_in
type: number
- default: false
- description: Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
- example: false
+ description: |-
+ Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
+
+ Sample: false
in: formData
name: irreversible
type: boolean
- default: false
- description: Should the filter consider word boundaries?
- example: true
+ description: |-
+ Should the filter consider word boundaries?
+
+ Sample: true
in: formData
name: whole_word
type: boolean
@@ -6015,7 +5693,7 @@ paths:
- allowEmptyValue: true
description: Title to use for the instance.
in: formData
- maximum: 40
+ maxLength: 40
name: title
type: string
- allowEmptyValue: true
@@ -6031,19 +5709,19 @@ paths:
- allowEmptyValue: true
description: Short description of the instance.
in: formData
- maximum: 500
+ maxLength: 500
name: short_description
type: string
- allowEmptyValue: true
description: Longer description of the instance.
in: formData
- maximum: 5000
+ maxLength: 5000
name: description
type: string
- allowEmptyValue: true
description: Terms and conditions of the instance.
in: formData
- maximum: 5000
+ maxLength: 5000
name: terms
type: string
- description: Thumbnail image to use for the instance.
@@ -6064,7 +5742,7 @@ paths:
"200":
description: The newly updated instance.
schema:
- $ref: '#/definitions/instance'
+ $ref: '#/definitions/instanceV1'
"400":
description: bad request
"401":
@@ -6192,8 +5870,9 @@ paths:
- application/x-www-form-urlencoded
operationId: listCreate
parameters:
- - description: Title of this list.
- example: Cool People
+ - description: |-
+ Title of this list.
+ Sample: Cool People
in: formData
name: title
required: true
@@ -6205,7 +5884,7 @@ paths:
followed = Show replies to any followed user
list = Show replies to members of the list
none = Show replies to no one
- example: list
+ Sample: list
in: formData
name: replies_policy
type: string
@@ -6304,24 +5983,26 @@ paths:
operationId: listUpdate
parameters:
- description: ID of the list
- example: Cool People
in: path
name: id
required: true
type: string
- x-go-name: Title
- - description: Title of this list.
- example: Cool People
+ - description: |-
+ Title of this list.
+ Sample: Cool People
in: formData
name: title
type: string
- x-go-name: RepliesPolicy
- description: |-
RepliesPolicy for this list.
followed = Show replies to any followed user
list = Show replies to members of the list
none = Show replies to no one
- example: list
+ Sample: list
+ enum:
+ - followed
+ - list
+ - none
in: formData
name: replies_policy
type: string
@@ -6647,6 +6328,12 @@ paths:
/api/v1/notification/{id}:
get:
operationId: notification
+ parameters:
+ - description: The ID of the notification.
+ in: path
+ name: id
+ required: true
+ type: string
produces:
- application/json
responses:
@@ -6950,32 +6637,34 @@ paths:
- application/x-www-form-urlencoded
operationId: reportCreate
parameters:
- - description: ID of the account to report.
- example: 01GPE75FXSH2EGFBF85NXPH3KP
+ - description: |-
+ ID of the account to report.
+ Sample: 01GPE75FXSH2EGFBF85NXPH3KP
in: formData
name: account_id
required: true
type: string
x-go-name: AccountID
- - description: IDs of statuses to attach to the report to provide additional context.
- example:
- - 01GPE76N4SBVRZ8K24TW51ZZQ4
- - 01GPE76WN9JZE62EPT3Q9FRRD4
+ - description: |-
+ IDs of statuses to attach to the report to provide additional context.
+ Sample: ["01GPE76N4SBVRZ8K24TW51ZZQ4","01GPE76WN9JZE62EPT3Q9FRRD4"]
in: formData
items:
type: string
name: status_ids
type: array
x-go-name: StatusIDs
- - description: The reason for the report. Default maximum of 1000 characters.
- example: Anti-Blackness, transphobia.
+ - description: |-
+ The reason for the report. Default maximum of 1000 characters.
+ Sample: Anti-Blackness, transphobia.
in: formData
name: comment
type: string
x-go-name: Comment
- default: false
- description: If the account is remote, should the report be forwarded to the remote admin?
- example: true
+ description: |-
+ If the account is remote, should the report be forwarded to the remote admin?
+ Sample: true
in: formData
name: forward
type: boolean
@@ -6984,15 +6673,14 @@ paths:
description: |-
Specify if the report is due to spam, violation of enumerated instance rules, or some other reason.
Currently only 'other' is supported.
- example: other
+ Sample: other
in: formData
name: category
type: string
x-go-name: Category
- - description: IDs of rules on this instance which have been broken according to the reporter.
- example:
- - 01GPBN5YDY6JKBWE44H7YQBDCQ
- - 01GPBN65PDWSBPWVDD0SQCFFY3
+ - description: |-
+ IDs of rules on this instance which have been broken according to the reporter.
+ Sample: ["01GPBN5YDY6JKBWE44H7YQBDCQ","01GPBN65PDWSBPWVDD0SQCFFY3"]
in: formData
items:
type: string
@@ -7085,11 +6773,35 @@ paths:
name: media_ids
type: array
x-go-name: MediaIDs
- - $ref: '#/definitions/pollRequest'
- description: Poll to include with this status.
+ - description: |-
+ Array of possible poll answers.
+ If provided, media_ids cannot be used, and poll[expires_in] must be provided.
+ in: formData
+ items:
+ type: string
+ name: poll[options][]
+ type: array
+ x-go-name: PollOptions
+ - description: |-
+ Duration the poll should be open, in seconds.
+ If provided, media_ids cannot be used, and poll[options] must be provided.
+ format: int64
+ in: formData
+ name: poll[expires_in]
+ type: integer
+ x-go-name: PollExpiresIn
+ - default: false
+ description: Allow multiple choices on this poll.
in: formData
- name: poll
- x-go-name: Poll
+ name: poll[multiple]
+ type: boolean
+ x-go-name: PollMultiple
+ - default: true
+ description: Hide vote counts until the poll ends.
+ in: formData
+ name: poll[hide_totals]
+ type: boolean
+ x-go-name: PollHideTotals
- description: ID of the status being replied to, if status is a reply.
in: formData
name: in_reply_to_id
@@ -7108,6 +6820,12 @@ paths:
type: string
x-go-name: SpoilerText
- description: Visibility of the posted status.
+ enum:
+ - public
+ - unlisted
+ - private
+ - mutuals_only
+ - direct
in: formData
name: visibility
type: string
@@ -7116,6 +6834,8 @@ paths:
ISO 8601 Datetime at which to schedule a status.
Providing this parameter will cause ScheduledStatus to be returned instead of Status.
Must be at least 5 minutes in the future.
+
+ This feature isn't implemented yet.
in: formData
name: scheduled_at
type: string
@@ -7126,27 +6846,30 @@ paths:
type: string
x-go-name: Language
- description: Content type to use when parsing this status.
+ enum:
+ - text/plain
+ - text/markdown
in: formData
name: content_type
type: string
x-go-name: ContentType
- description: This status will be federated beyond the local timeline(s).
- in: query
+ in: formData
name: federated
type: boolean
x-go-name: Federated
- description: This status can be boosted/reblogged.
- in: query
+ in: formData
name: boostable
type: boolean
x-go-name: Boostable
- description: This status can be replied to.
- in: query
+ in: formData
name: replyable
type: boolean
x-go-name: Replyable
- description: This status can be liked/faved.
- in: query
+ in: formData
name: likeable
type: boolean
x-go-name: Likeable
@@ -8000,6 +7723,11 @@ paths:
````
operationId: tagTimeline
parameters:
+ - description: Name of the tag
+ in: path
+ name: tag_name
+ required: true
+ type: string
- description: Return only statuses *OLDER* than the given max status ID. The status with the specified ID will not be included in the response.
in: query
name: max_id
@@ -8129,6 +7857,12 @@ paths:
HTTP signature is required on the request.
operationId: s2sFeaturedCollectionGet
+ parameters:
+ - description: Account name of the user
+ in: path
+ name: username
+ required: true
+ type: string
produces:
- application/activity+json
responses:
diff --git a/go.mod b/go.mod
index 8c0b01716..fef81bc08 100644
--- a/go.mod
+++ b/go.mod
@@ -32,6 +32,7 @@ require (
github.com/gin-contrib/sessions v0.0.5
github.com/gin-gonic/gin v1.9.1
github.com/go-playground/form/v4 v4.2.1
+ github.com/go-swagger/go-swagger v0.30.5
github.com/google/uuid v1.6.0
github.com/gorilla/feeds v1.1.2
github.com/gorilla/websocket v1.5.1
@@ -84,6 +85,10 @@ require (
codeberg.org/gruf/go-bitutil v1.1.0 // indirect
codeberg.org/gruf/go-fastpath/v2 v2.0.0 // indirect
codeberg.org/gruf/go-maps v1.0.3 // indirect
+ github.com/Masterminds/goutils v1.1.1 // indirect
+ github.com/Masterminds/semver/v3 v3.2.0 // indirect
+ github.com/Masterminds/sprig/v3 v3.2.3 // indirect
+ github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/sonic v1.10.1 // indirect
@@ -96,13 +101,14 @@ require (
github.com/coreos/go-systemd/v22 v22.3.2 // indirect
github.com/cornelk/hashmap v1.0.8 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
- github.com/docker/go-units v0.4.0 // indirect
+ github.com/docker/go-units v0.5.0 // indirect
github.com/dsoprea/go-exif/v3 v3.0.0-20210625224831-a6301f85c82b // indirect
github.com/dsoprea/go-iptc v0.0.0-20200610044640-bc9ca208b413 // indirect
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd // indirect
github.com/dsoprea/go-photoshop-info-format v0.0.0-20200610045659-121dd752914d // indirect
github.com/dsoprea/go-utility/v2 v2.0.0-20200717064901-2fccff4aa15e // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
+ github.com/felixge/httpsnoop v1.0.3 // indirect
github.com/fsnotify/fsnotify v1.7.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
@@ -111,6 +117,17 @@ require (
github.com/go-jose/go-jose/v3 v3.0.1 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
+ github.com/go-openapi/analysis v0.21.4 // indirect
+ github.com/go-openapi/errors v0.20.4 // indirect
+ github.com/go-openapi/inflect v0.19.0 // indirect
+ github.com/go-openapi/jsonpointer v0.19.6 // indirect
+ github.com/go-openapi/jsonreference v0.20.2 // indirect
+ github.com/go-openapi/loads v0.21.2 // indirect
+ github.com/go-openapi/runtime v0.26.0 // indirect
+ github.com/go-openapi/spec v0.20.9 // indirect
+ github.com/go-openapi/strfmt v0.21.7 // indirect
+ github.com/go-openapi/swag v0.22.4 // indirect
+ github.com/go-openapi/validate v0.22.1 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.15.5 // indirect
@@ -122,25 +139,35 @@ require (
github.com/golang/protobuf v1.5.3 // indirect
github.com/gorilla/context v1.1.1 // indirect
github.com/gorilla/css v1.0.0 // indirect
+ github.com/gorilla/handlers v1.5.1 // indirect
github.com/gorilla/securecookie v1.1.1 // indirect
github.com/gorilla/sessions v1.2.1 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
+ github.com/huandu/xstrings v1.3.3 // indirect
+ github.com/imdario/mergo v0.3.12 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
github.com/jackc/puddle/v2 v2.2.1 // indirect
+ github.com/jessevdk/go-flags v1.5.0 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
+ github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.17.4 // indirect
github.com/klauspost/cpuid/v2 v2.2.6 // indirect
+ github.com/kr/pretty v0.3.1 // indirect
+ github.com/kr/text v0.2.0 // indirect
github.com/leodido/go-urn v1.2.4 // indirect
github.com/magiconair/properties v1.8.7 // indirect
+ github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/sha256-simd v1.0.1 // indirect
+ github.com/mitchellh/copystructure v1.2.0 // indirect
+ github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
@@ -154,9 +181,11 @@ require (
github.com/prometheus/procfs v0.12.0 // indirect
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
+ github.com/rogpeppe/go-internal v1.10.0 // indirect
github.com/rs/xid v1.5.0 // indirect
github.com/sagikazarmark/locafero v0.4.0 // indirect
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
+ github.com/shopspring/decimal v1.2.0 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/spf13/afero v1.11.0 // indirect
@@ -167,12 +196,14 @@ require (
github.com/superseriousbusiness/go-png-image-structure/v2 v2.0.1-SSB // indirect
github.com/tdewolff/parse/v2 v2.7.12 // indirect
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
+ github.com/toqueteos/webbrowser v1.2.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
github.com/uptrace/opentelemetry-go-extra/otelsql v0.2.3 // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
+ go.mongodb.org/mongo-driver v1.11.3 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.20.0 // indirect
go.opentelemetry.io/proto/otlp v1.0.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
diff --git a/go.sum b/go.sum
index 6a0860af2..d18fe92b9 100644
--- a/go.sum
+++ b/go.sum
@@ -79,6 +79,14 @@ github.com/DmitriyVTitov/size v1.5.0 h1:/PzqxYrOyOUX1BXj6J9OuVRVGe+66VL4D9FlUaW5
github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2QcJ+aYbNgiU0=
github.com/KimMachineGun/automemlimit v0.5.0 h1:BeOe+BbJc8L5chL3OwzVYjVzyvPALdd5wxVVOWuUZmQ=
github.com/KimMachineGun/automemlimit v0.5.0/go.mod h1:di3GCKiu9Y+1fs92erCbUvKzPkNyViN3mA0vti/ykEQ=
+github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
+github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
+github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
+github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
+github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
+github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/abema/go-mp4 v1.2.0 h1:gi4X8xg/m179N/J15Fn5ugywN9vtI6PLk6iLldHGLAk=
github.com/abema/go-mp4 v1.2.0/go.mod h1:vPl9t5ZK7K0x68jh12/+ECWBCXoWuIDtNgPtU2f04ws=
github.com/ajg/form v1.5.1 h1:t9c7v8JUKu/XxOGBU0yjNpaMloxGEJhUkqFRq0ibGeU=
@@ -86,6 +94,9 @@ github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY
github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs=
github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
+github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
+github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
+github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
@@ -131,8 +142,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
-github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=
-github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
+github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
+github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dsoprea/go-exif/v2 v2.0.0-20200321225314-640175a69fe4/go.mod h1:Lm2lMM2zx8p4a34ZemkaUV95AnMl4ZvLbCUbwOvLC2E=
github.com/dsoprea/go-exif/v3 v3.0.0-20200717053412-08f1b6708903/go.mod h1:0nsO1ce0mh5czxGeLo4+OCZ/C6Eo6ZlMWsz7rH/Gxv8=
github.com/dsoprea/go-exif/v3 v3.0.0-20210428042052-dca55bf8ca15/go.mod h1:cg5SNYKHMmzxsr9X6ZeLh/nfBRHHp5PngtEPcujONtk=
@@ -158,6 +169,9 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
+github.com/felixge/httpsnoop v1.0.1/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
+github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
+github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
@@ -196,6 +210,46 @@ github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/go-openapi/analysis v0.21.2/go.mod h1:HZwRk4RRisyG8vx2Oe6aqeSQcoxRp47Xkp3+K6q+LdY=
+github.com/go-openapi/analysis v0.21.4 h1:ZDFLvSNxpDaomuCueM0BlSXxpANBlFYiBvr+GXrvIHc=
+github.com/go-openapi/analysis v0.21.4/go.mod h1:4zQ35W4neeZTqh3ol0rv/O8JBbka9QyAgQRPp9y3pfo=
+github.com/go-openapi/errors v0.19.8/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
+github.com/go-openapi/errors v0.19.9/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
+github.com/go-openapi/errors v0.20.2/go.mod h1:cM//ZKUKyO06HSwqAelJ5NsEMMcpa6VpXe8DOa1Mi1M=
+github.com/go-openapi/errors v0.20.4 h1:unTcVm6PispJsMECE3zWgvG4xTiKda1LIR5rCRWLG6M=
+github.com/go-openapi/errors v0.20.4/go.mod h1:Z3FlZ4I8jEGxjUK+bugx3on2mIAk4txuAOhlsB1FSgk=
+github.com/go-openapi/inflect v0.19.0 h1:9jCH9scKIbHeV9m12SmPilScz6krDxKRasNNSNPXu/4=
+github.com/go-openapi/inflect v0.19.0/go.mod h1:lHpZVlpIQqLyKwJ4N+YSc9hchQy/i12fJykb83CRBH4=
+github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE=
+github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs=
+github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
+github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
+github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE=
+github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k=
+github.com/go-openapi/loads v0.21.1/go.mod h1:/DtAMXXneXFjbQMGEtbamCZb+4x7eGwkvZCvBmwUG+g=
+github.com/go-openapi/loads v0.21.2 h1:r2a/xFIYeZ4Qd2TnGpWDIQNcP80dIaZgf704za8enro=
+github.com/go-openapi/loads v0.21.2/go.mod h1:Jq58Os6SSGz0rzh62ptiu8Z31I+OTHqmULx5e/gJbNw=
+github.com/go-openapi/runtime v0.26.0 h1:HYOFtG00FM1UvqrcxbEJg/SwvDRvYLQKGhw2zaQjTcc=
+github.com/go-openapi/runtime v0.26.0/go.mod h1:QgRGeZwrUcSHdeh4Ka9Glvo0ug1LC5WyE+EV88plZrQ=
+github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
+github.com/go-openapi/spec v0.20.6/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
+github.com/go-openapi/spec v0.20.9 h1:xnlYNQAwKd2VQRRfwTEI0DcK+2cbuvI/0c7jx3gA8/8=
+github.com/go-openapi/spec v0.20.9/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
+github.com/go-openapi/strfmt v0.21.0/go.mod h1:ZRQ409bWMj+SOgXofQAGTIo2Ebu72Gs+WaRADcS5iNg=
+github.com/go-openapi/strfmt v0.21.1/go.mod h1:I/XVKeLc5+MM5oPNN7P6urMOpuLXEcNrCX/rPGuWb0k=
+github.com/go-openapi/strfmt v0.21.3/go.mod h1:k+RzNO0Da+k3FrrynSNN8F7n/peCmQQqbbXjtDfvmGg=
+github.com/go-openapi/strfmt v0.21.7 h1:rspiXgNWgeUzhjo1YU01do6qsahtJNByjLVbPLNHb8k=
+github.com/go-openapi/strfmt v0.21.7/go.mod h1:adeGTkxE44sPyLk0JV235VQAO/ZXUr8KAzYjclFs3ew=
+github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
+github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
+github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
+github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
+github.com/go-openapi/swag v0.22.4 h1:QLMzNJnMGPRNDCbySlcj1x01tzU8/9LTTL9hZZZogBU=
+github.com/go-openapi/swag v0.22.4/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
+github.com/go-openapi/validate v0.22.1 h1:G+c2ub6q47kfX1sOBLwIQwzBVt8qmOAARyo/9Fqs9NU=
+github.com/go-openapi/validate v0.22.1/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
@@ -211,10 +265,39 @@ github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXS
github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7NLylN+x8TTueE24=
github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-session/session v3.1.2+incompatible/go.mod h1:8B3iivBQjrz/JtC68Np2T1yBBLxTan3mn/3OM0CyRt0=
+github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/go-swagger/go-swagger v0.30.5 h1:SQ2+xSonWjjoEMOV5tcOnZJVlfyUfCBhGQGArS1b9+U=
+github.com/go-swagger/go-swagger v0.30.5/go.mod h1:cWUhSyCNqV7J1wkkxfr5QmbcnCewetCdvEXqgPvbc/Q=
+github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013 h1:l9rI6sNaZgNC0LnF3MiE+qTmyBA/tZAg1rtyrGbUMK0=
+github.com/go-swagger/scan-repo-boundary v0.0.0-20180623220736-973b3573c013/go.mod h1:b65mBPzqzZWxOZGxSWrqs4GInLIn+u99Q9q7p+GKni0=
github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM=
github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850 h1:PSPmmucxGiFBtbQcttHTUc4LQ3P09AW+ldO2qspyKdY=
github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
+github.com/gobuffalo/attrs v0.0.0-20190224210810-a9411de4debd/go.mod h1:4duuawTqi2wkkpB4ePgWMaai6/Kc6WEz83bhFwpHzj0=
+github.com/gobuffalo/depgen v0.0.0-20190329151759-d478694a28d3/go.mod h1:3STtPUQYuzV0gBVOY3vy6CfMm/ljR4pABfrTeHNLHUY=
+github.com/gobuffalo/depgen v0.1.0/go.mod h1:+ifsuy7fhi15RWncXQQKjWS9JPkdah5sZvtHc2RXGlg=
+github.com/gobuffalo/envy v1.6.15/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
+github.com/gobuffalo/envy v1.7.0/go.mod h1:n7DRkBerg/aorDM8kbduw5dN3oXGswK5liaSCx4T5NI=
+github.com/gobuffalo/flect v0.1.0/go.mod h1:d2ehjJqGOH/Kjqcoz+F7jHTBbmDb38yXA598Hb50EGs=
+github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
+github.com/gobuffalo/flect v0.1.3/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
+github.com/gobuffalo/genny v0.0.0-20190329151137-27723ad26ef9/go.mod h1:rWs4Z12d1Zbf19rlsn0nurr75KqhYp52EAGGxTbBhNk=
+github.com/gobuffalo/genny v0.0.0-20190403191548-3ca520ef0d9e/go.mod h1:80lIj3kVJWwOrXWWMRzzdhW3DsrdjILVil/SFKBzF28=
+github.com/gobuffalo/genny v0.1.0/go.mod h1:XidbUqzak3lHdS//TPu2OgiFB+51Ur5f7CSnXZ/JDvo=
+github.com/gobuffalo/genny v0.1.1/go.mod h1:5TExbEyY48pfunL4QSXxlDOmdsD44RRq4mVZ0Ex28Xk=
+github.com/gobuffalo/gitgen v0.0.0-20190315122116-cc086187d211/go.mod h1:vEHJk/E9DmhejeLeNt7UVvlSGv3ziL+djtTr3yyzcOw=
+github.com/gobuffalo/gogen v0.0.0-20190315121717-8f38393713f5/go.mod h1:V9QVDIxsgKNZs6L2IYiGR8datgMhB577vzTDqypH360=
+github.com/gobuffalo/gogen v0.1.0/go.mod h1:8NTelM5qd8RZ15VjQTFkAW6qOMx5wBbW4dSCS3BY8gg=
+github.com/gobuffalo/gogen v0.1.1/go.mod h1:y8iBtmHmGc4qa3urIyo1shvOD8JftTtfcKi+71xfDNE=
+github.com/gobuffalo/logger v0.0.0-20190315122211-86e12af44bc2/go.mod h1:QdxcLw541hSGtBnhUc4gaNIXRjiDppFGaDqzbrBd3v8=
+github.com/gobuffalo/mapi v1.0.1/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
+github.com/gobuffalo/mapi v1.0.2/go.mod h1:4VAGh89y6rVOvm5A8fKFxYG+wIW6LO1FMTG9hnKStFc=
+github.com/gobuffalo/packd v0.0.0-20190315124812-a385830c7fc0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
+github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWeG2RIxq4=
+github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ=
+github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0=
+github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
@@ -261,6 +344,7 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
@@ -270,6 +354,7 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
@@ -302,6 +387,8 @@ github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
github.com/gorilla/feeds v1.1.2 h1:pxzZ5PD3RJdhFH2FsJJ4x6PqMqbgFk1+Vez4XWBW8Iw=
github.com/gorilla/feeds v1.1.2/go.mod h1:WMib8uJP3BbY+X8Szd1rA5Pzhdfh+HCCAYT2z7Fza6Y=
+github.com/gorilla/handlers v1.5.1 h1:9lRY6j8DEeeBT10CvO9hGW0gmky0BprnvDI5vfhUHH4=
+github.com/gorilla/handlers v1.5.1/go.mod h1:t8XrUpc4KVXb7HGyJ4/cEnwQiaxrX/hz1Zv/4g96P1Q=
github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
@@ -320,9 +407,15 @@ github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyf
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
+github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
+github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
+github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/imkira/go-interpol v1.1.0 h1:KIiKr0VSG2CUW1hl1jpiyuzuJeKUUpC8iM1AIE7N1Vk=
github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
@@ -334,8 +427,13 @@ github.com/jackc/pgx/v5 v5.5.3/go.mod h1:ez9gk+OAat140fv9ErkZDYFWmXLfV+++K0uAOiw
github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk=
github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
+github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=
+github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
+github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
@@ -343,9 +441,12 @@ github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/X
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
+github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
+github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.10.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
github.com/klauspost/compress v1.10.10/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
+github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4=
github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
@@ -353,6 +454,8 @@ github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02
github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc=
github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
@@ -368,6 +471,13 @@ github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
+github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
+github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
+github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
@@ -387,17 +497,27 @@ github.com/minio/minio-go/v7 v7.0.67 h1:BeBvZWAS+kRJm1vGTMJYVjKUNoo0FoEt/wUWdUtf
github.com/minio/minio-go/v7 v7.0.67/go.mod h1:+UXocnUeZ3wHvVh5s95gcrA4YjMIbccT6ubB+1m054A=
github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM=
github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8=
+github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
+github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
+github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
+github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
+github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
+github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
github.com/moul/http2curl v1.0.0 h1:dRMWoAtb+ePxMlLkrCbAqh4TlPHXvoGUSQ323/9Zahs=
github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
@@ -412,10 +532,13 @@ github.com/orcaman/writerseeker v0.0.0-20200621085525-1d3f536ff85e h1:s2RNOM/IGd
github.com/orcaman/writerseeker v0.0.0-20200621085525-1d3f536ff85e/go.mod h1:nBdnFKj15wFbf94Rwfq4m30eAcyY9V/IyKAGQFtqkW0=
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
+github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
github.com/pelletier/go-toml/v2 v2.1.1 h1:LWAJwfNvjQZCFIDKWYQaM62NcYeYViCmWIwmOStowAI=
github.com/pelletier/go-toml/v2 v2.1.1/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -436,9 +559,12 @@ github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b h1:aUNXCGgukb4gtY
github.com/quasoft/memstore v0.0.0-20191010062613-2bce066d2b0b/go.mod h1:wTPjTepVu7uJBYgZ0SdWHQlIas582j6cn2jgk4DDdlg=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
+github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
@@ -451,6 +577,11 @@ github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWR
github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
+github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
+github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
+github.com/sirupsen/logrus v1.4.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
+github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
+github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
@@ -461,19 +592,25 @@ github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9yS
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
+github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
+github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0=
github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho=
+github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ=
github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@@ -527,6 +664,8 @@ github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYm
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc/go.mod h1:bciPuU6GHm1iF1pBvUfxfsH0Wmnc2VbpgvbI9ZWuIRs=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
+github.com/toqueteos/webbrowser v1.2.0 h1:tVP/gpK69Fx+qMJKsLE7TD8LuGWPnEV71wBN9rrstGQ=
+github.com/toqueteos/webbrowser v1.2.0/go.mod h1:XWoZq4cyp9WeUeak7w7LXRUQf1F1ATJMir8RTqb4ayM=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
@@ -557,6 +696,11 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh
github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds=
github.com/wagslane/go-password-validator v0.3.0 h1:vfxOPzGHkz5S146HDpavl0cw1DSVP061Ry2PX0/ON6I=
github.com/wagslane/go-password-validator v0.3.0/go.mod h1:TI1XJ6T5fRdRnHqHt14pvy1tNVnrwe7m3/f1f2fDphQ=
+github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
+github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs=
+github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
+github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM=
+github.com/xdg-go/stringprep v1.0.3/go.mod h1:W3f5j4i+9rC0kuIEJL0ky1VpHXQU3ocBgklLGvcBnW8=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=
@@ -565,6 +709,7 @@ github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0 h1:6fRhSjgLCkTD3JnJxvaJ4Sj+TYblw757bqYgZaOq5ZY=
github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI=
+github.com/youmark/pkcs8 v0.0.0-20181117223130-1be2e3e5546d/go.mod h1:rHwXgn7JulP+udvsHwJoVG1YGAP6VLg4y9I5dyZdqmA=
github.com/yudai/gojsondiff v1.0.0 h1:27cbfqXLVEJ1o8I6v3y9lg8Ydm53EKqHXAOMxEGlCOA=
github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg=
github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82 h1:BHyfKlQyqbsFN5p3IfnEUduWvb9is428/nNb5L3U01M=
@@ -580,6 +725,11 @@ github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
+go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg=
+go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng=
+go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8=
+go.mongodb.org/mongo-driver v1.11.3 h1:Ql6K6qYHEzB6xvu4+AU0BoRoqf9vFPcc4o7MUIdPW8Y=
+go.mongodb.org/mongo-driver v1.11.3/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5quethTUn9WM+2g=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
@@ -614,14 +764,19 @@ go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN8
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg=
golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -692,7 +847,10 @@ golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81R
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
+golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@@ -707,20 +865,27 @@ golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190419153524-e8e3143a4f4a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -746,18 +911,23 @@ golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -765,9 +935,11 @@ golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -780,9 +952,13 @@ golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190531172133-b3315ee88b7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
@@ -912,6 +1088,7 @@ google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqw
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
@@ -931,6 +1108,7 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
diff --git a/internal/api/activitypub/users/common.go b/internal/api/activitypub/users/common.go
index 2f16c7331..a10b99d95 100644
--- a/internal/api/activitypub/users/common.go
+++ b/internal/api/activitypub/users/common.go
@@ -72,7 +72,7 @@ type SwaggerFeaturedCollection struct {
// example: OrderedCollection
Type string `json:"type"`
// List of status URIs.
- // example: ['https://example.org/users/some_user/statuses/01GSZ0F7Q8SJKNRF777GJD271R', 'https://example.org/users/some_user/statuses/01GSZ0G012CBQ7TEKX689S3QRE']
+ // example: ["https://example.org/users/some_user/statuses/01GSZ0F7Q8SJKNRF777GJD271R", "https://example.org/users/some_user/statuses/01GSZ0G012CBQ7TEKX689S3QRE"]
Items []string `json:"items"`
// Number of items in this collection.
// example: 2
diff --git a/internal/api/activitypub/users/featured.go b/internal/api/activitypub/users/featured.go
index f256c1e75..b8b4fe681 100644
--- a/internal/api/activitypub/users/featured.go
+++ b/internal/api/activitypub/users/featured.go
@@ -44,6 +44,14 @@ import (
// produces:
// - application/activity+json
//
+// parameters:
+// -
+// name: username
+// type: string
+// description: Account name of the user
+// in: path
+// required: true
+//
// responses:
// '200':
// in: body
diff --git a/internal/api/client/accounts/accountupdate.go b/internal/api/client/accounts/accountupdate.go
index 41f76adf5..02c0fc5ac 100644
--- a/internal/api/client/accounts/accountupdate.go
+++ b/internal/api/client/accounts/accountupdate.go
@@ -120,12 +120,67 @@ import (
// description: Enable RSS feed for this account's Public posts at `/[username]/feed.rss`
// type: boolean
// -
-// name: fields_attributes
+// name: fields_attributes[0][name]
// in: formData
-// description: Profile fields to be added to this account's profile
-// type: array
-// items:
-// type: object
+// description: Name of 1st profile field to be added to this account's profile.
+// (The index may be any string; add more indexes to send more fields.)
+// type: string
+// -
+// name: fields_attributes[0][value]
+// in: formData
+// description: Value of 1st profile field to be added to this account's profile.
+// (The index may be any string; add more indexes to send more fields.)
+// type: string
+// -
+// name: fields_attributes[1][name]
+// in: formData
+// description: Name of 2nd profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[1][value]
+// in: formData
+// description: Value of 2nd profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[2][name]
+// in: formData
+// description: Name of 3rd profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[2][value]
+// in: formData
+// description: Value of 3rd profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[3][name]
+// in: formData
+// description: Name of 4th profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[3][value]
+// in: formData
+// description: Value of 4th profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[4][name]
+// in: formData
+// description: Name of 5th profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[4][value]
+// in: formData
+// description: Value of 5th profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[5][name]
+// in: formData
+// description: Name of 6th profile field to be added to this account's profile.
+// type: string
+// -
+// name: fields_attributes[5][value]
+// in: formData
+// description: Value of 6th profile field to be added to this account's profile.
+// type: string
//
// security:
// - OAuth2 Bearer:
diff --git a/internal/api/client/admin/domainkeysexpire.go b/internal/api/client/admin/domainkeysexpire.go
index 10a7597a4..4990d879f 100644
--- a/internal/api/client/admin/domainkeysexpire.go
+++ b/internal/api/client/admin/domainkeysexpire.go
@@ -61,8 +61,9 @@ import (
// -
// name: domain
// in: formData
-// description: Domain to expire keys for.
-// example: example.org
+// description: |-
+// Domain to expire keys for.
+// Sample: example.org
// type: string
//
// security:
diff --git a/internal/api/client/admin/emojicategoriesget.go b/internal/api/client/admin/emojicategoriesget.go
index 2c097c6df..51eb8fee4 100644
--- a/internal/api/client/admin/emojicategoriesget.go
+++ b/internal/api/client/admin/emojicategoriesget.go
@@ -38,21 +38,13 @@ import (
// produces:
// - application/json
//
-// parameters:
-// -
-// name: id
-// type: string
-// description: The id of the emoji.
-// in: path
-// required: true
-//
// responses:
// '200':
// description: Array of existing emoji categories.
// schema:
// type: array
// items:
-// "$ref": "#/definitions/adminEmojiCategory"
+// "$ref": "#/definitions/emojiCategory"
// '400':
// description: bad request
// '401':
diff --git a/internal/api/client/admin/emojicreate.go b/internal/api/client/admin/emojicreate.go
index 5d024f039..9086b27e0 100644
--- a/internal/api/client/admin/emojicreate.go
+++ b/internal/api/client/admin/emojicreate.go
@@ -67,10 +67,11 @@ import (
// name: category
// in: formData
// description: >-
-// Category in which to place the new emoji. 64 characters or less.
+// Category in which to place the new emoji.
// If left blank, emoji will be uncategorized. If a category with the
// given name doesn't exist yet, it will be created.
// type: string
+// maximumLength: 64
// required: false
//
// security:
diff --git a/internal/api/client/admin/emojiupdate.go b/internal/api/client/admin/emojiupdate.go
index f531f36f9..ffde2d597 100644
--- a/internal/api/client/admin/emojiupdate.go
+++ b/internal/api/client/admin/emojiupdate.go
@@ -73,6 +73,10 @@ import (
// For REMOTE emojis, `copy` or `disable` are supported.
// For LOCAL emojis, only `modify` is supported.
// type: string
+// enum:
+// - copy
+// - disable
+// - modify
// required: true
// -
// name: shortcode
@@ -94,9 +98,10 @@ import (
// name: category
// in: formData
// description: >-
-// Category in which to place the emoji. 64 characters or less.
+// Category in which to place the emoji.
// If a category with the given name doesn't exist yet, it will be created.
// type: string
+// maximumLength: 64
//
// security:
// - OAuth2 Bearer:
diff --git a/internal/api/client/admin/reportresolve.go b/internal/api/client/admin/reportresolve.go
index 9c1c32afe..2ad979b0b 100644
--- a/internal/api/client/admin/reportresolve.go
+++ b/internal/api/client/admin/reportresolve.go
@@ -60,8 +60,9 @@ import (
// Useful for providing an explanation about what action was taken (if any)
// before the report was marked as resolved. This will be visible to the user
// that created the report!
+//
+// Sample: The reported account was suspended.
// type: string
-// example: The reported account was suspended.
//
// security:
// - OAuth2 Bearer:
diff --git a/internal/api/client/admin/rulecreate.go b/internal/api/client/admin/rulecreate.go
index e838bff1e..155c69db0 100644
--- a/internal/api/client/admin/rulecreate.go
+++ b/internal/api/client/admin/rulecreate.go
@@ -43,15 +43,6 @@ import (
// produces:
// - application/json
//
-// parameters:
-// -
-// name: text
-// in: formData
-// description: >-
-// Text body for the instance rule, plaintext.
-// type: string
-// required: true
-//
// security:
// - OAuth2 Bearer:
// - admin
diff --git a/internal/api/client/admin/ruledelete.go b/internal/api/client/admin/ruledelete.go
index dfa84615f..834149978 100644
--- a/internal/api/client/admin/ruledelete.go
+++ b/internal/api/client/admin/ruledelete.go
@@ -28,7 +28,7 @@ import (
"github.com/superseriousbusiness/gotosocial/internal/oauth"
)
-// RuleDELETEHandler swagger:operation DELETE /api/v1/admin/instance/rules{id} ruleDelete
+// RuleDELETEHandler swagger:operation DELETE /api/v1/admin/instance/rules/{id} ruleDelete
//
// Delete an existing instance rule.
//
@@ -45,10 +45,10 @@ import (
// parameters:
// -
// name: id
-// in: formData
+// in: path
// description: >-
// The id of the rule to delete.
-// type: path
+// type: string
// required: true
//
// security:
diff --git a/internal/api/client/admin/rulesget.go b/internal/api/client/admin/rulesget.go
index 2cc9e0158..e00ff1c07 100644
--- a/internal/api/client/admin/rulesget.go
+++ b/internal/api/client/admin/rulesget.go
@@ -27,7 +27,7 @@ import (
"github.com/superseriousbusiness/gotosocial/internal/oauth"
)
-// rulesGETHandler swagger:operation GET /api/v1/admin/rules rules
+// RulesGETHandler swagger:operation GET /api/v1/admin/rules adminsRuleGet
//
// View instance rules, with IDs.
//
diff --git a/internal/api/client/admin/ruleupdate.go b/internal/api/client/admin/ruleupdate.go
index eafa3af34..2ba31485e 100644
--- a/internal/api/client/admin/ruleupdate.go
+++ b/internal/api/client/admin/ruleupdate.go
@@ -29,7 +29,7 @@ import (
"github.com/superseriousbusiness/gotosocial/internal/oauth"
)
-// RulePATCHHandler swagger:operation PATCH /api/v1/admin/instance/rules{id} ruleUpdate
+// RulePATCHHandler swagger:operation PATCH /api/v1/admin/instance/rules/{id} ruleUpdate
//
// Update an existing instance rule.
//
@@ -43,22 +43,6 @@ import (
// produces:
// - application/json
//
-// parameters:
-// -
-// name: id
-// in: formData
-// description: >-
-// The id of the rule to update.
-// type: path
-// required: true
-// -
-// name: text
-// in: formData
-// description: >-
-// Text body for the updated instance rule, plaintext.
-// type: string
-// required: true
-//
// security:
// - OAuth2 Bearer:
// - admin
diff --git a/internal/api/client/filters/v1/filterpost.go b/internal/api/client/filters/v1/filterpost.go
index b0a626199..e150e8609 100644
--- a/internal/api/client/filters/v1/filterpost.go
+++ b/internal/api/client/filters/v1/filterpost.go
@@ -48,49 +48,58 @@ import (
// name: phrase
// in: formData
// required: true
-// description: The text to be filtered.
+// description: |-
+// The text to be filtered.
+//
+// Sample: fnord
// maxLength: 40
// type: string
-// example: "fnord"
// -
// name: context
// in: formData
// required: true
-// description: The contexts in which the filter should be applied.
+// description: |-
+// The contexts in which the filter should be applied.
+//
+// Sample: home, public
// enum:
// - home
// - notifications
// - public
// - thread
// - account
-// example:
-// - home
-// - public
-// items:
-// $ref: '#/definitions/filterContext'
-// minLength: 1
// type: array
+// items:
+// type:
+// string
+// minItems: 1
// uniqueItems: true
// -
// name: expires_in
// in: formData
-// description: Number of seconds from now that the filter should expire. If omitted, filter never expires.
+// description: |-
+// Number of seconds from now that the filter should expire. If omitted, filter never expires.
+//
+// Sample: 86400
// type: number
-// example: 86400
// -
// name: irreversible
// in: formData
-// description: Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
+// description: |-
+// Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
+//
+// Sample: false
// type: boolean
// default: false
-// example: false
// -
// name: whole_word
// in: formData
-// description: Should the filter consider word boundaries?
+// description: |-
+// Should the filter consider word boundaries?
+//
+// Sample: true
// type: boolean
// default: false
-// example: true
//
// security:
// - OAuth2 Bearer:
diff --git a/internal/api/client/filters/v1/filterput.go b/internal/api/client/filters/v1/filterput.go
index c686e4515..1822efb6d 100644
--- a/internal/api/client/filters/v1/filterput.go
+++ b/internal/api/client/filters/v1/filterput.go
@@ -54,49 +54,58 @@ import (
// name: phrase
// in: formData
// required: true
-// description: The text to be filtered.
+// description: |-
+// The text to be filtered.
+//
+// Sample: fnord
// maxLength: 40
// type: string
-// example: "fnord"
// -
// name: context
// in: formData
// required: true
-// description: The contexts in which the filter should be applied.
+// description: |-
+// The contexts in which the filter should be applied.
+//
+// Sample: home, public
// enum:
// - home
// - notifications
// - public
// - thread
// - account
-// example:
-// - home
-// - public
-// items:
-// $ref: '#/definitions/filterContext'
-// minLength: 1
// type: array
+// items:
+// type:
+// string
+// minItems: 1
// uniqueItems: true
// -
// name: expires_in
// in: formData
-// description: Number of seconds from now that the filter should expire. If omitted, filter never expires.
+// description: |-
+// Number of seconds from now that the filter should expire. If omitted, filter never expires.
+//
+// Sample: 86400
// type: number
-// example: 86400
// -
// name: irreversible
// in: formData
-// description: Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
+// description: |-
+// Should matching entities be removed from the user's timelines/views, instead of hidden? Not supported yet.
+//
+// Sample: false
// type: boolean
// default: false
-// example: false
// -
// name: whole_word
// in: formData
-// description: Should the filter consider word boundaries?
+// description: |-
+// Should the filter consider word boundaries?
+//
+// Sample: true
// type: boolean
// default: false
-// example: true
//
// security:
// - OAuth2 Bearer:
diff --git a/internal/api/client/instance/instancepatch.go b/internal/api/client/instance/instancepatch.go
index 484579cf2..58549a866 100644
--- a/internal/api/client/instance/instancepatch.go
+++ b/internal/api/client/instance/instancepatch.go
@@ -52,7 +52,7 @@ import (
// in: formData
// description: Title to use for the instance.
// type: string
-// maximum: 40
+// maxLength: 40
// allowEmptyValue: true
// -
// name: contact_username
@@ -73,21 +73,21 @@ import (
// in: formData
// description: Short description of the instance.
// type: string
-// maximum: 500
+// maxLength: 500
// allowEmptyValue: true
// -
// name: description
// in: formData
// description: Longer description of the instance.
// type: string
-// maximum: 5000
+// maxLength: 5000
// allowEmptyValue: true
// -
// name: terms
// in: formData
// description: Terms and conditions of the instance.
// type: string
-// maximum: 5000
+// maxLength: 5000
// allowEmptyValue: true
// -
// name: thumbnail
@@ -113,7 +113,7 @@ import (
// '200':
// description: "The newly updated instance."
// schema:
-// "$ref": "#/definitions/instance"
+// "$ref": "#/definitions/instanceV1"
// '400':
// description: bad request
// '401':
diff --git a/internal/api/client/lists/listupdate.go b/internal/api/client/lists/listupdate.go
index 58a4cf1c4..966de4098 100644
--- a/internal/api/client/lists/listupdate.go
+++ b/internal/api/client/lists/listupdate.go
@@ -57,9 +57,10 @@ import (
// -
// name: title
// type: string
-// description: Title of this list.
+// description: |-
+// Title of this list.
+// Sample: Cool People
// in: formData
-// example: Cool People
// -
// name: replies_policy
// type: string
@@ -68,8 +69,12 @@ import (
// followed = Show replies to any followed user
// list = Show replies to members of the list
// none = Show replies to no one
+// Sample: list
+// enum:
+// - followed
+// - list
+// - none
// in: formData
-// example: list
//
// security:
// - OAuth2 Bearer:
diff --git a/internal/api/client/notifications/notificationget.go b/internal/api/client/notifications/notificationget.go
index 551eeca39..66bdefb28 100644
--- a/internal/api/client/notifications/notificationget.go
+++ b/internal/api/client/notifications/notificationget.go
@@ -38,6 +38,14 @@ import (
// produces:
// - application/json
//
+// parameters:
+// -
+// name: id
+// type: string
+// description: The ID of the notification.
+// in: path
+// required: true
+//
// security:
// - OAuth2 Bearer:
// - read:notifications
diff --git a/internal/api/client/statuses/statuscreate.go b/internal/api/client/statuses/statuscreate.go
index 929adaa6f..efbe79223 100644
--- a/internal/api/client/statuses/statuscreate.go
+++ b/internal/api/client/statuses/statuscreate.go
@@ -48,6 +48,145 @@ import (
// - application/xml
// - application/x-www-form-urlencoded
//
+// parameters:
+// -
+// name: status
+// x-go-name: Status
+// description: |-
+// Text content of the status.
+// If media_ids is provided, this becomes optional.
+// Attaching a poll is optional while status is provided.
+// type: string
+// in: formData
+// -
+// name: media_ids
+// x-go-name: MediaIDs
+// description: |-
+// Array of Attachment ids to be attached as media.
+// If provided, status becomes optional, and poll cannot be used.
+//
+// If the status is being submitted as a form, the key is 'media_ids[]',
+// but if it's json or xml, the key is 'media_ids'.
+// type: array
+// items:
+// type: string
+// in: formData
+// -
+// name: poll[options][]
+// x-go-name: PollOptions
+// description: |-
+// Array of possible poll answers.
+// If provided, media_ids cannot be used, and poll[expires_in] must be provided.
+// type: array
+// items:
+// type: string
+// in: formData
+// -
+// name: poll[expires_in]
+// x-go-name: PollExpiresIn
+// description: |-
+// Duration the poll should be open, in seconds.
+// If provided, media_ids cannot be used, and poll[options] must be provided.
+// type: integer
+// format: int64
+// in: formData
+// -
+// name: poll[multiple]
+// x-go-name: PollMultiple
+// description: Allow multiple choices on this poll.
+// type: boolean
+// default: false
+// in: formData
+// -
+// name: poll[hide_totals]
+// x-go-name: PollHideTotals
+// description: Hide vote counts until the poll ends.
+// type: boolean
+// default: true
+// in: formData
+// -
+// name: in_reply_to_id
+// x-go-name: InReplyToID
+// description: ID of the status being replied to, if status is a reply.
+// type: string
+// in: formData
+// -
+// name: sensitive
+// x-go-name: Sensitive
+// description: Status and attached media should be marked as sensitive.
+// type: boolean
+// in: formData
+// -
+// name: spoiler_text
+// x-go-name: SpoilerText
+// description: |-
+// Text to be shown as a warning or subject before the actual content.
+// Statuses are generally collapsed behind this field.
+// type: string
+// in: formData
+// -
+// name: visibility
+// x-go-name: Visibility
+// description: Visibility of the posted status.
+// type: string
+// enum:
+// - public
+// - unlisted
+// - private
+// - mutuals_only
+// - direct
+// in: formData
+// -
+// name: scheduled_at
+// x-go-name: ScheduledAt
+// description: |-
+// ISO 8601 Datetime at which to schedule a status.
+// Providing this parameter will cause ScheduledStatus to be returned instead of Status.
+// Must be at least 5 minutes in the future.
+//
+// This feature isn't implemented yet.
+// type: string
+// in: formData
+// -
+// name: language
+// x-go-name: Language
+// description: ISO 639 language code for this status.
+// type: string
+// in: formData
+// -
+// name: content_type
+// x-go-name: ContentType
+// description: Content type to use when parsing this status.
+// type: string
+// enum:
+// - text/plain
+// - text/markdown
+// in: formData
+// -
+// name: federated
+// x-go-name: Federated
+// description: This status will be federated beyond the local timeline(s).
+// in: formData
+// type: boolean
+// -
+// name: boostable
+// x-go-name: Boostable
+// description: This status can be boosted/reblogged.
+// in: formData
+// type: boolean
+// -
+// name: replyable
+// x-go-name: Replyable
+// description: This status can be replied to.
+// in: formData
+// type: boolean
+// -
+// name: likeable
+// x-go-name: Likeable
+// description: This status can be liked/faved.
+// in: formData
+// type: boolean
+//
// produces:
// - application/json
//
diff --git a/internal/api/client/statuses/statuscreate_test.go b/internal/api/client/statuses/statuscreate_test.go
index 881943450..ab7c67abf 100644
--- a/internal/api/client/statuses/statuscreate_test.go
+++ b/internal/api/client/statuses/statuscreate_test.go
@@ -21,10 +21,12 @@ import (
"context"
"encoding/json"
"fmt"
+ "io"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
+ "strings"
"testing"
"github.com/stretchr/testify/suite"
@@ -427,6 +429,74 @@ func (suite *StatusCreateTestSuite) TestPostNewStatusWithNoncanonicalLanguageTag
suite.Equal("en-US", *statusReply.Language)
}
+// Post a new status with an attached poll.
+func (suite *StatusCreateTestSuite) testPostNewStatusWithPoll(configure func(request *http.Request)) {
+ t := suite.testTokens["local_account_1"]
+ oauthToken := oauth.DBTokenToToken(t)
+
+ // setup
+ recorder := httptest.NewRecorder()
+ ctx, _ := testrig.CreateGinTestContext(recorder, nil)
+ ctx.Set(oauth.SessionAuthorizedApplication, suite.testApplications["application_1"])
+ ctx.Set(oauth.SessionAuthorizedToken, oauthToken)
+ ctx.Set(oauth.SessionAuthorizedUser, suite.testUsers["local_account_1"])
+ ctx.Set(oauth.SessionAuthorizedAccount, suite.testAccounts["local_account_1"])
+ ctx.Request = httptest.NewRequest(http.MethodPost, fmt.Sprintf("http://localhost:8080/%s", statuses.BasePath), nil) // the endpoint we're hitting
+ ctx.Request.Header.Set("accept", "application/json")
+ configure(ctx.Request)
+ suite.statusModule.StatusCreatePOSTHandler(ctx)
+
+ suite.EqualValues(http.StatusOK, recorder.Code)
+
+ result := recorder.Result()
+ defer result.Body.Close()
+ b, err := ioutil.ReadAll(result.Body)
+ suite.NoError(err)
+
+ statusReply := &apimodel.Status{}
+ err = json.Unmarshal(b, statusReply)
+ suite.NoError(err)
+
+ suite.Equal("<p>this is a status with a poll!</p>", statusReply.Content)
+ suite.Equal(apimodel.VisibilityPublic, statusReply.Visibility)
+ if suite.NotNil(statusReply.Poll) {
+ if suite.Len(statusReply.Poll.Options, 2) {
+ suite.Equal("first option", statusReply.Poll.Options[0].Title)
+ suite.Equal("second option", statusReply.Poll.Options[1].Title)
+ }
+ suite.NotZero(statusReply.Poll.ExpiresAt)
+ suite.False(statusReply.Poll.Expired)
+ suite.True(statusReply.Poll.Multiple)
+ }
+}
+
+func (suite *StatusCreateTestSuite) TestPostNewStatusWithPollForm() {
+ suite.testPostNewStatusWithPoll(func(request *http.Request) {
+ request.Form = url.Values{
+ "status": {"this is a status with a poll!"},
+ "visibility": {"public"},
+ "poll[options][]": {"first option", "second option"},
+ "poll[expires_in]": {"3600"},
+ "poll[multiple]": {"true"},
+ }
+ })
+}
+
+func (suite *StatusCreateTestSuite) TestPostNewStatusWithPollJSON() {
+ suite.testPostNewStatusWithPoll(func(request *http.Request) {
+ request.Header.Set("content-type", "application/json")
+ request.Body = io.NopCloser(strings.NewReader(`{
+ "status": "this is a status with a poll!",
+ "visibility": "public",
+ "poll": {
+ "options": ["first option", "second option"],
+ "expires_in": 3600,
+ "multiple": true
+ }
+ }`))
+ })
+}
+
func TestStatusCreateTestSuite(t *testing.T) {
suite.Run(t, new(StatusCreateTestSuite))
}
diff --git a/internal/api/client/timelines/tag.go b/internal/api/client/timelines/tag.go
index 0d95a6c58..e66955a73 100644
--- a/internal/api/client/timelines/tag.go
+++ b/internal/api/client/timelines/tag.go
@@ -26,7 +26,7 @@ import (
"github.com/superseriousbusiness/gotosocial/internal/oauth"
)
-// HomeTimelineGETHandler swagger:operation GET /api/v1/timelines/tag/{tag_name} tagTimeline
+// TagTimelineGETHandler swagger:operation GET /api/v1/timelines/tag/{tag_name} tagTimeline
//
// See public statuses that use the given hashtag (case insensitive).
//
@@ -49,6 +49,12 @@ import (
//
// parameters:
// -
+// name: tag_name
+// type: string
+// description: Name of the tag
+// in: path
+// required: true
+// -
// name: max_id
// type: string
// description: >-
diff --git a/internal/api/model/account.go b/internal/api/model/account.go
index 46b043f80..7d3fa4b62 100644
--- a/internal/api/model/account.go
+++ b/internal/api/model/account.go
@@ -170,7 +170,7 @@ type UpdateCredentialsRequest struct {
// UpdateSource is to be used specifically in an UpdateCredentialsRequest.
//
-// swagger:model updateSource
+// swagger:ignore
type UpdateSource struct {
// Default post privacy for authored statuses.
Privacy *string `form:"privacy" json:"privacy"`
@@ -185,7 +185,7 @@ type UpdateSource struct {
// UpdateField is to be used specifically in an UpdateCredentialsRequest.
// By default, max 6 fields and 255 characters per property/value.
//
-// swagger:model updateField
+// swagger:ignore
type UpdateField struct {
// Key this form field was submitted with;
// only set if it was submitted as JSON.
diff --git a/internal/api/model/announcement.go b/internal/api/model/announcement.go
index f2b3b102c..f776ecf92 100644
--- a/internal/api/model/announcement.go
+++ b/internal/api/model/announcement.go
@@ -19,7 +19,7 @@ package model
// Announcement models an admin announcement for the instance.
//
-// swagger:model announcement
+// TODO: swagger:model announcement once announcement API is supported
type Announcement struct {
// The ID of the announcement.
// example: 01FC30T7X4TNCZK0TH90QYF3M4
diff --git a/internal/api/model/announcementreaction.go b/internal/api/model/announcementreaction.go
index a77e13bf6..f0a7006e9 100644
--- a/internal/api/model/announcementreaction.go
+++ b/internal/api/model/announcementreaction.go
@@ -19,7 +19,7 @@ package model
// AnnouncementReaction models a user reaction to an announcement.
//
-// swagger:model announcementReaction
+// TODO: swagger:model announcementReaction once announcement API is supported
type AnnouncementReaction struct {
// The emoji used for the reaction. Either a unicode emoji, or a custom emoji's shortcode.
// example: blobcat_uwu
diff --git a/internal/api/model/domain.go b/internal/api/model/domain.go
index a5e1ddf10..ddc96ef05 100644
--- a/internal/api/model/domain.go
+++ b/internal/api/model/domain.go
@@ -65,7 +65,7 @@ type DomainPermission struct {
// DomainPermissionRequest is the form submitted as a POST to create a new domain permission entry (allow/block).
//
-// swagger:model domainPermissionCreateRequest
+// swagger:ignore
type DomainPermissionRequest struct {
// A list of domains for which this permission request should apply.
// Only used if import=true is specified.
@@ -87,9 +87,9 @@ type DomainPermissionRequest struct {
PublicComment string `form:"public_comment" json:"public_comment" xml:"public_comment"`
}
-// DomainBlockCreateRequest is the form submitted as a POST to /api/v1/admin/domain_keys_expire to expire a domain's public keys.
+// DomainKeysExpireRequest is the form submitted as a POST to /api/v1/admin/domain_keys_expire to expire a domain's public keys.
//
-// swagger:model domainKeysExpireRequest
+// swagger:parameters domainKeysExpire
type DomainKeysExpireRequest struct {
// hostname/domain to expire keys for.
Domain string `form:"domain" json:"domain" xml:"domain"`
diff --git a/internal/api/model/emoji.go b/internal/api/model/emoji.go
index acf1f1332..cebc38196 100644
--- a/internal/api/model/emoji.go
+++ b/internal/api/model/emoji.go
@@ -42,7 +42,7 @@ type Emoji struct {
// EmojiCreateRequest represents a request to create a custom emoji made through the admin API.
//
-// swagger:model emojiCreateRequest
+// swagger:ignore
type EmojiCreateRequest struct {
// Desired shortcode for the emoji, without surrounding colons. This must be unique for the domain.
// example: blobcat_uwu
@@ -56,7 +56,7 @@ type EmojiCreateRequest struct {
// EmojiUpdateRequest represents a request to update a custom emoji, made through the admin API.
//
-// swagger:model emojiUpdateRequest
+// swagger:ignore
type EmojiUpdateRequest struct {
// Type of action. One of disable, modify, copy.
Type EmojiUpdateType `form:"type" json:"type" xml:"type"`
diff --git a/internal/api/model/headerfilter.go b/internal/api/model/headerfilter.go
index 96ba819f5..0800adeb2 100644
--- a/internal/api/model/headerfilter.go
+++ b/internal/api/model/headerfilter.go
@@ -18,6 +18,8 @@
package model
// HeaderFilter represents a regex value filter applied to one particular HTTP header (allow / block).
+//
+// swagger:model headerFilter
type HeaderFilter struct {
// The ID of the header filter.
// example: 01FBW21XJA09XYX51KV5JVBW0F
@@ -45,11 +47,15 @@ type HeaderFilter struct {
// HeaderFilterRequest is the form submitted as a POST to create a new header filter entry (allow / block).
//
-// swagger:model headerFilterCreateRequest
+// swagger:parameters headerFilterAllowCreate headerFilterBlockCreate
type HeaderFilterRequest struct {
// The HTTP header to match against (e.g. User-Agent).
+ // required: true
+ // in: formData
Header string `form:"header" json:"header" xml:"header"`
// The header value matching regular expression.
+ // required: true
+ // in: formData
Regex string `form:"regex" json:"regex" xml:"regex"`
}
diff --git a/internal/api/model/instancev1.go b/internal/api/model/instancev1.go
index b402cdefe..03e27619c 100644
--- a/internal/api/model/instancev1.go
+++ b/internal/api/model/instancev1.go
@@ -60,7 +60,7 @@ type InstanceV1 struct {
// example: 0.1.1 cb85f65
Version string `json:"version"`
// Primary language of the instance.
- // example: en
+ // example: ["en"]
Languages []string `json:"languages"`
// New account registrations are enabled on this instance.
Registrations bool `json:"registrations"`
diff --git a/internal/api/model/list.go b/internal/api/model/list.go
index f897bcc88..03ea3420d 100644
--- a/internal/api/model/list.go
+++ b/internal/api/model/list.go
@@ -37,7 +37,7 @@ type List struct {
// swagger:parameters listCreate
type ListCreateRequest struct {
// Title of this list.
- // example: Cool People
+ // Sample: Cool People
// in: formData
// required: true
Title string `form:"title" json:"title" xml:"title"`
@@ -45,28 +45,35 @@ type ListCreateRequest struct {
// followed = Show replies to any followed user
// list = Show replies to members of the list
// none = Show replies to no one
- // example: list
+ // Sample: list
// default: list
// in: formData
+ // enum:
+ // - followed
+ // - list
+ // - none
RepliesPolicy string `form:"replies_policy" json:"replies_policy" xml:"replies_policy"`
}
// ListUpdateRequest models list update parameters.
//
-// swagger:parameters listUpdate
+// swagger:ignore
type ListUpdateRequest struct {
// Title of this list.
- // example: Cool People
+ // Sample: Cool People
// in: formData
Title *string `form:"title" json:"title" xml:"title"`
// RepliesPolicy for this list.
// followed = Show replies to any followed user
// list = Show replies to members of the list
// none = Show replies to no one
+ // Sample: list
// in: formData
RepliesPolicy *string `form:"replies_policy" json:"replies_policy" xml:"replies_policy"`
}
+// ListAccountsChangeRequest is a list of account IDs to add to or remove from a list.
+//
// swagger:ignore
type ListAccountsChangeRequest struct {
AccountIDs []string `form:"account_ids[]" json:"account_ids" xml:"account_ids"`
diff --git a/internal/api/model/marker.go b/internal/api/model/marker.go
index f2d5cb296..d0d5dc67e 100644
--- a/internal/api/model/marker.go
+++ b/internal/api/model/marker.go
@@ -18,6 +18,8 @@
package model
// Marker represents the last read position within a user's timelines.
+//
+// swagger:model markers
type Marker struct {
// Information about the user's position in the home timeline.
Home *TimelineMarker `json:"home,omitempty"`
diff --git a/internal/api/model/multistatus.go b/internal/api/model/multistatus.go
index cac8b4f9b..d58ea8eae 100644
--- a/internal/api/model/multistatus.go
+++ b/internal/api/model/multistatus.go
@@ -22,7 +22,7 @@ package model
// 207 MULTI-STATUS to indicate a mixture of responses.
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/207
//
-// swagger:model multiStatus
+// TODO: swagger:model multiStatus once domain permissions API supports HTTP 207
type MultiStatus struct {
Data []MultiStatusEntry `json:"data"`
Metadata MultiStatusMetadata `json:"metadata"`
@@ -34,7 +34,7 @@ type MultiStatus struct {
// the caller, but at minimum it should be expected to be
// JSON-serializable.
//
-// swagger:model multiStatusEntry
+// TODO: swagger:model multiStatusEntry once domain permissions API supports HTTP 207
type MultiStatusEntry struct {
// The resource/result for this entry.
// Value may be any type, check the docs
@@ -49,7 +49,7 @@ type MultiStatusEntry struct {
// MultiStatusMetadata models an at-a-glance summary of
// the data contained in the MultiStatus.
//
-// swagger:model multiStatusMetadata
+// TODO: swagger:model multiStatusMetadata once domain permissions API supports HTTP 207
type MultiStatusMetadata struct {
// Success count + failure count.
Total int `json:"total"`
diff --git a/internal/api/model/poll.go b/internal/api/model/poll.go
index 5603ff222..089dfc0f0 100644
--- a/internal/api/model/poll.go
+++ b/internal/api/model/poll.go
@@ -73,26 +73,26 @@ type PollOption struct {
// PollRequest models a request to create a poll.
//
-// swagger:model pollRequest
+// swagger:ignore
type PollRequest struct {
// Array of possible answers.
// If provided, media_ids cannot be used, and poll[expires_in] must be provided.
// name: poll[options]
- Options []string `form:"options" json:"options" xml:"options"`
+ Options []string `form:"poll[options][]" json:"options" xml:"options"`
// Duration the poll should be open, in seconds.
// If provided, media_ids cannot be used, and poll[options] must be provided.
- ExpiresIn int `form:"expires_in" xml:"expires_in"`
+ ExpiresIn int `form:"poll[expires_in]" xml:"expires_in"`
// Duration the poll should be open, in seconds.
// If provided, media_ids cannot be used, and poll[options] must be provided.
ExpiresInI interface{} `json:"expires_in"`
// Allow multiple choices on this poll.
- Multiple bool `form:"multiple" json:"multiple" xml:"multiple"`
+ Multiple bool `form:"poll[multiple]" json:"multiple" xml:"multiple"`
// Hide vote counts until the poll ends.
- HideTotals bool `form:"hide_totals" json:"hide_totals" xml:"hide_totals"`
+ HideTotals bool `form:"poll[hide_totals]" json:"hide_totals" xml:"hide_totals"`
}
// PollVoteRequest models a request to vote in a poll.
diff --git a/internal/api/model/report.go b/internal/api/model/report.go
index b9b8c77d2..175875204 100644
--- a/internal/api/model/report.go
+++ b/internal/api/model/report.go
@@ -65,31 +65,31 @@ type Report struct {
// swagger:parameters reportCreate
type ReportCreateRequest struct {
// ID of the account to report.
- // example: 01GPE75FXSH2EGFBF85NXPH3KP
+ // Sample: 01GPE75FXSH2EGFBF85NXPH3KP
// in: formData
// required: true
AccountID string `form:"account_id" json:"account_id" xml:"account_id"`
// IDs of statuses to attach to the report to provide additional context.
- // example: ["01GPE76N4SBVRZ8K24TW51ZZQ4","01GPE76WN9JZE62EPT3Q9FRRD4"]
+ // Sample: ["01GPE76N4SBVRZ8K24TW51ZZQ4","01GPE76WN9JZE62EPT3Q9FRRD4"]
// in: formData
StatusIDs []string `form:"status_ids[]" json:"status_ids" xml:"status_ids"`
// The reason for the report. Default maximum of 1000 characters.
- // example: Anti-Blackness, transphobia.
+ // Sample: Anti-Blackness, transphobia.
// in: formData
Comment string `form:"comment" json:"comment" xml:"comment"`
// If the account is remote, should the report be forwarded to the remote admin?
- // example: true
+ // Sample: true
// default: false
// in: formData
Forward bool `form:"forward" json:"forward" xml:"forward"`
// Specify if the report is due to spam, violation of enumerated instance rules, or some other reason.
// Currently only 'other' is supported.
- // example: other
+ // Sample: other
// default: other
// in: formData
Category string `form:"category" json:"category" xml:"category"`
// IDs of rules on this instance which have been broken according to the reporter.
- // example: ["01GPBN5YDY6JKBWE44H7YQBDCQ","01GPBN65PDWSBPWVDD0SQCFFY3"]
+ // Sample: ["01GPBN5YDY6JKBWE44H7YQBDCQ","01GPBN65PDWSBPWVDD0SQCFFY3"]
// in: formData
RuleIDs []string `form:"rule_ids[]" json:"rule_ids" xml:"rule_ids"`
}
diff --git a/internal/api/model/rule.go b/internal/api/model/rule.go
index f4caf7dd0..b94c502c5 100644
--- a/internal/api/model/rule.go
+++ b/internal/api/model/rule.go
@@ -27,15 +27,24 @@ type InstanceRule struct {
// InstanceRuleCreateRequest represents a request to create a new instance rule, made through the admin API.
//
-// swagger:model instanceRuleCreateRequest
+// swagger:parameters ruleCreate
type InstanceRuleCreateRequest struct {
- Text string `form:"text" validation:"required"`
+ // Text body for the instance rule, plaintext.
+ // required: true
+ // in: formData
+ Text string `form:"text" json:"text" validation:"required"`
}
// InstanceRuleUpdateRequest represents a request to update the text of an instance rule, made through the admin API.
//
-// swagger:model instanceRuleUpdateRequest
+// swagger:parameters ruleUpdate
type InstanceRuleUpdateRequest struct {
- ID string `form:"id"`
- Text string `form:"text"`
+ // The id of the rule to update.
+ // required: true
+ // in: path
+ ID string `form:"id" json:"id"`
+ // Text body for the updated instance rule, plaintext.
+ // required: true
+ // in: formData
+ Text string `form:"text" json:"text"`
}
diff --git a/internal/api/model/status.go b/internal/api/model/status.go
index 8ca41c767..fed2cdf37 100644
--- a/internal/api/model/status.go
+++ b/internal/api/model/status.go
@@ -163,47 +163,33 @@ type StatusReblogged struct {
// StatusCreateRequest models status creation parameters.
//
-// swagger:model statusCreateRequest
+// swagger:ignore
type StatusCreateRequest struct {
// Text content of the status.
// If media_ids is provided, this becomes optional.
// Attaching a poll is optional while status is provided.
- // in: formData
Status string `form:"status" json:"status" xml:"status"`
// Array of Attachment ids to be attached as media.
// If provided, status becomes optional, and poll cannot be used.
- //
- // If the status is being submitted as a form, the key is 'media_ids[]',
- // but if it's json or xml, the key is 'media_ids'.
- //
- // in: formData
MediaIDs []string `form:"media_ids[]" json:"media_ids" xml:"media_ids"`
// Poll to include with this status.
- // in: formData
Poll *PollRequest `form:"poll" json:"poll" xml:"poll"`
// ID of the status being replied to, if status is a reply.
- // in: formData
InReplyToID string `form:"in_reply_to_id" json:"in_reply_to_id" xml:"in_reply_to_id"`
// Status and attached media should be marked as sensitive.
- // in: formData
Sensitive bool `form:"sensitive" json:"sensitive" xml:"sensitive"`
// Text to be shown as a warning or subject before the actual content.
// Statuses are generally collapsed behind this field.
- // in: formData
SpoilerText string `form:"spoiler_text" json:"spoiler_text" xml:"spoiler_text"`
// Visibility of the posted status.
- // in: formData
Visibility Visibility `form:"visibility" json:"visibility" xml:"visibility"`
// ISO 8601 Datetime at which to schedule a status.
// Providing this parameter will cause ScheduledStatus to be returned instead of Status.
// Must be at least 5 minutes in the future.
- // in: formData
ScheduledAt string `form:"scheduled_at" json:"scheduled_at" xml:"scheduled_at"`
// ISO 639 language code for this status.
- // in: formData
Language string `form:"language" json:"language" xml:"language"`
// Content type to use when parsing this status.
- // in: formData
ContentType StatusContentType `form:"content_type" json:"content_type" xml:"content_type"`
}
@@ -229,7 +215,7 @@ const (
// AdvancedStatusCreateForm wraps the mastodon-compatible status create form along with the GTS advanced
// visibility settings.
//
-// swagger:parameters statusCreate
+// swagger:ignore
type AdvancedStatusCreateForm struct {
StatusCreateRequest
AdvancedVisibilityFlagsForm
@@ -238,7 +224,7 @@ type AdvancedStatusCreateForm struct {
// AdvancedVisibilityFlagsForm allows a few more advanced flags to be set on new statuses, in addition
// to the standard mastodon-compatible ones.
//
-// swagger:model advancedVisibilityFlagsForm
+// swagger:ignore
type AdvancedVisibilityFlagsForm struct {
// This status will be federated beyond the local timeline(s).
Federated *bool `form:"federated" json:"federated" xml:"federated"`
diff --git a/test/swagger.sh b/test/swagger.sh
new file mode 100755
index 000000000..e8b4b5864
--- /dev/null
+++ b/test/swagger.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+# Test that the Swagger spec is up to date and valid.
+
+set -eu
+
+swagger_cmd() {
+ go run github.com/go-swagger/go-swagger/cmd/swagger "$@"
+}
+swagger_spec='docs/api/swagger.yaml'
+
+# Temporary directory for the regenerated Swagger spec.
+temp_dir=$(mktemp -d)
+# Can't use mktemp directly because we need to control the file extension.
+regenerated_swagger_spec="${temp_dir}/swagger.yaml"
+cleanup() {
+ rm -rf "${temp_dir}"
+}
+trap cleanup INT TERM EXIT
+
+# Regenerate the Swagger spec and compare it to the working copy.
+swagger_cmd generate spec --scan-models --exclude-deps --output "${regenerated_swagger_spec}"
+if ! diff -u "${swagger_spec}" "${regenerated_swagger_spec}" > /dev/null; then
+ echo "${swagger_spec} is out of date. Please run the following command to update it:" >&2
+ echo " go run github.com/go-swagger/go-swagger/cmd/swagger generate spec --scan-models --exclude-deps --output ${swagger_spec}" >&2
+ exit 1
+fi
+
+# Validate the Swagger spec.
+swagger_cmd validate "${swagger_spec}"
diff --git a/tools/tools.go b/tools/tools.go
new file mode 100644
index 000000000..e97320e70
--- /dev/null
+++ b/tools/tools.go
@@ -0,0 +1,28 @@
+//go:build tools
+
+// GoToSocial
+// Copyright (C) GoToSocial Authors admin@gotosocial.org
+// SPDX-License-Identifier: AGPL-3.0-or-later
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+// tools exists to pull in command-line tools that we need to go get,
+// and is behind a build tag that is otherwise unused and thus only visible
+// to dependency management commands. See https://stackoverflow.com/a/54028731.
+package tools
+
+import (
+ // Provides swagger command used by tests/swagger.sh
+ _ "github.com/go-swagger/go-swagger/cmd/swagger"
+)
diff --git a/vendor/github.com/Masterminds/goutils/.travis.yml b/vendor/github.com/Masterminds/goutils/.travis.yml
new file mode 100644
index 000000000..4025e01ec
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/.travis.yml
@@ -0,0 +1,18 @@
+language: go
+
+go:
+ - 1.6
+ - 1.7
+ - 1.8
+ - tip
+
+script:
+ - go test -v
+
+notifications:
+ webhooks:
+ urls:
+ - https://webhooks.gitter.im/e/06e3328629952dabe3e0
+ on_success: change # options: [always|never|change] default: always
+ on_failure: always # options: [always|never|change] default: always
+ on_start: never # options: [always|never|change] default: always
diff --git a/vendor/github.com/Masterminds/goutils/CHANGELOG.md b/vendor/github.com/Masterminds/goutils/CHANGELOG.md
new file mode 100644
index 000000000..d700ec47f
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/CHANGELOG.md
@@ -0,0 +1,8 @@
+# 1.0.1 (2017-05-31)
+
+## Fixed
+- #21: Fix generation of alphanumeric strings (thanks @dbarranco)
+
+# 1.0.0 (2014-04-30)
+
+- Initial release.
diff --git a/vendor/github.com/Masterminds/goutils/LICENSE.txt b/vendor/github.com/Masterminds/goutils/LICENSE.txt
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/Masterminds/goutils/README.md b/vendor/github.com/Masterminds/goutils/README.md
new file mode 100644
index 000000000..163ffe72a
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/README.md
@@ -0,0 +1,70 @@
+GoUtils
+===========
+[![Stability: Maintenance](https://masterminds.github.io/stability/maintenance.svg)](https://masterminds.github.io/stability/maintenance.html)
+[![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils) [![Build Status](https://travis-ci.org/Masterminds/goutils.svg?branch=master)](https://travis-ci.org/Masterminds/goutils) [![Build status](https://ci.appveyor.com/api/projects/status/sc2b1ew0m7f0aiju?svg=true)](https://ci.appveyor.com/project/mattfarina/goutils)
+
+
+GoUtils provides users with utility functions to manipulate strings in various ways. It is a Go implementation of some
+string manipulation libraries of Java Apache Commons. GoUtils includes the following Java Apache Commons classes:
+* WordUtils
+* RandomStringUtils
+* StringUtils (partial implementation)
+
+## Installation
+If you have Go set up on your system, from the GOPATH directory within the command line/terminal, enter this:
+
+ go get github.com/Masterminds/goutils
+
+If you do not have Go set up on your system, please follow the [Go installation directions from the documenation](http://golang.org/doc/install), and then follow the instructions above to install GoUtils.
+
+
+## Documentation
+GoUtils doc is available here: [![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils)
+
+
+## Usage
+The code snippets below show examples of how to use GoUtils. Some functions return errors while others do not. The first instance below, which does not return an error, is the `Initials` function (located within the `wordutils.go` file).
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/Masterminds/goutils"
+ )
+
+ func main() {
+
+ // EXAMPLE 1: A goutils function which returns no errors
+ fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF"
+
+ }
+Some functions return errors mainly due to illegal arguements used as parameters. The code example below illustrates how to deal with function that returns an error. In this instance, the function is the `Random` function (located within the `randomstringutils.go` file).
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/Masterminds/goutils"
+ )
+
+ func main() {
+
+ // EXAMPLE 2: A goutils function which returns an error
+ rand1, err1 := goutils.Random (-1, 0, 0, true, true)
+
+ if err1 != nil {
+ fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...)
+ } else {
+ fmt.Println(rand1)
+ }
+
+ }
+
+## License
+GoUtils is licensed under the Apache License, Version 2.0. Please check the LICENSE.txt file or visit http://www.apache.org/licenses/LICENSE-2.0 for a copy of the license.
+
+## Issue Reporting
+Make suggestions or report issues using the Git issue tracker: https://github.com/Masterminds/goutils/issues
+
+## Website
+* [GoUtils webpage](http://Masterminds.github.io/goutils/)
diff --git a/vendor/github.com/Masterminds/goutils/appveyor.yml b/vendor/github.com/Masterminds/goutils/appveyor.yml
new file mode 100644
index 000000000..657564a84
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/appveyor.yml
@@ -0,0 +1,21 @@
+version: build-{build}.{branch}
+
+clone_folder: C:\gopath\src\github.com\Masterminds\goutils
+shallow_clone: true
+
+environment:
+ GOPATH: C:\gopath
+
+platform:
+ - x64
+
+build: off
+
+install:
+ - go version
+ - go env
+
+test_script:
+ - go test -v
+
+deploy: off
diff --git a/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go b/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go
new file mode 100644
index 000000000..8dbd92485
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/cryptorandomstringutils.go
@@ -0,0 +1,230 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package goutils
+
+import (
+ "crypto/rand"
+ "fmt"
+ "math"
+ "math/big"
+ "unicode"
+)
+
+/*
+CryptoRandomNonAlphaNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of all characters (ASCII/Unicode values between 0 to 2,147,483,647 (math.MaxInt32)).
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...)
+*/
+func CryptoRandomNonAlphaNumeric(count int) (string, error) {
+ return CryptoRandomAlphaNumericCustom(count, false, false)
+}
+
+/*
+CryptoRandomAscii creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of characters whose ASCII value is between 32 and 126 (inclusive).
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...)
+*/
+func CryptoRandomAscii(count int) (string, error) {
+ return CryptoRandom(count, 32, 127, false, false)
+}
+
+/*
+CryptoRandomNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of numeric characters.
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...)
+*/
+func CryptoRandomNumeric(count int) (string, error) {
+ return CryptoRandom(count, 0, 0, false, true)
+}
+
+/*
+CryptoRandomAlphabetic creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments.
+
+Parameters:
+ count - the length of random string to create
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...)
+*/
+func CryptoRandomAlphabetic(count int) (string, error) {
+ return CryptoRandom(count, 0, 0, true, false)
+}
+
+/*
+CryptoRandomAlphaNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters.
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...)
+*/
+func CryptoRandomAlphaNumeric(count int) (string, error) {
+ return CryptoRandom(count, 0, 0, true, true)
+}
+
+/*
+CryptoRandomAlphaNumericCustom creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments.
+
+Parameters:
+ count - the length of random string to create
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, CryptoRandom(...)
+*/
+func CryptoRandomAlphaNumericCustom(count int, letters bool, numbers bool) (string, error) {
+ return CryptoRandom(count, 0, 0, letters, numbers)
+}
+
+/*
+CryptoRandom creates a random string based on a variety of options, using using golang's crypto/rand source of randomness.
+If the parameters start and end are both 0, start and end are set to ' ' and 'z', the ASCII printable characters, will be used,
+unless letters and numbers are both false, in which case, start and end are set to 0 and math.MaxInt32, respectively.
+If chars is not nil, characters stored in chars that are between start and end are chosen.
+
+Parameters:
+ count - the length of random string to create
+ start - the position in set of chars (ASCII/Unicode int) to start at
+ end - the position in set of chars (ASCII/Unicode int) to end before
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+ chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars.
+
+Returns:
+ string - the random string
+ error - an error stemming from invalid parameters: if count < 0; or the provided chars array is empty; or end <= start; or end > len(chars)
+*/
+func CryptoRandom(count int, start int, end int, letters bool, numbers bool, chars ...rune) (string, error) {
+ if count == 0 {
+ return "", nil
+ } else if count < 0 {
+ err := fmt.Errorf("randomstringutils illegal argument: Requested random string length %v is less than 0.", count) // equiv to err := errors.New("...")
+ return "", err
+ }
+ if chars != nil && len(chars) == 0 {
+ err := fmt.Errorf("randomstringutils illegal argument: The chars array must not be empty")
+ return "", err
+ }
+
+ if start == 0 && end == 0 {
+ if chars != nil {
+ end = len(chars)
+ } else {
+ if !letters && !numbers {
+ end = math.MaxInt32
+ } else {
+ end = 'z' + 1
+ start = ' '
+ }
+ }
+ } else {
+ if end <= start {
+ err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) must be greater than start (%v)", end, start)
+ return "", err
+ }
+
+ if chars != nil && end > len(chars) {
+ err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) cannot be greater than len(chars) (%v)", end, len(chars))
+ return "", err
+ }
+ }
+
+ buffer := make([]rune, count)
+ gap := end - start
+
+ // high-surrogates range, (\uD800-\uDBFF) = 55296 - 56319
+ // low-surrogates range, (\uDC00-\uDFFF) = 56320 - 57343
+
+ for count != 0 {
+ count--
+ var ch rune
+ if chars == nil {
+ ch = rune(getCryptoRandomInt(gap) + int64(start))
+ } else {
+ ch = chars[getCryptoRandomInt(gap)+int64(start)]
+ }
+
+ if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers {
+ if ch >= 56320 && ch <= 57343 { // low surrogate range
+ if count == 0 {
+ count++
+ } else {
+ // Insert low surrogate
+ buffer[count] = ch
+ count--
+ // Insert high surrogate
+ buffer[count] = rune(55296 + getCryptoRandomInt(128))
+ }
+ } else if ch >= 55296 && ch <= 56191 { // High surrogates range (Partial)
+ if count == 0 {
+ count++
+ } else {
+ // Insert low surrogate
+ buffer[count] = rune(56320 + getCryptoRandomInt(128))
+ count--
+ // Insert high surrogate
+ buffer[count] = ch
+ }
+ } else if ch >= 56192 && ch <= 56319 {
+ // private high surrogate, skip it
+ count++
+ } else {
+ // not one of the surrogates*
+ buffer[count] = ch
+ }
+ } else {
+ count++
+ }
+ }
+ return string(buffer), nil
+}
+
+func getCryptoRandomInt(count int) int64 {
+ nBig, err := rand.Int(rand.Reader, big.NewInt(int64(count)))
+ if err != nil {
+ panic(err)
+ }
+ return nBig.Int64()
+}
diff --git a/vendor/github.com/Masterminds/goutils/randomstringutils.go b/vendor/github.com/Masterminds/goutils/randomstringutils.go
new file mode 100644
index 000000000..272670231
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/randomstringutils.go
@@ -0,0 +1,248 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package goutils
+
+import (
+ "fmt"
+ "math"
+ "math/rand"
+ "time"
+ "unicode"
+)
+
+// RANDOM provides the time-based seed used to generate random numbers
+var RANDOM = rand.New(rand.NewSource(time.Now().UnixNano()))
+
+/*
+RandomNonAlphaNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of all characters (ASCII/Unicode values between 0 to 2,147,483,647 (math.MaxInt32)).
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomNonAlphaNumeric(count int) (string, error) {
+ return RandomAlphaNumericCustom(count, false, false)
+}
+
+/*
+RandomAscii creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of characters whose ASCII value is between 32 and 126 (inclusive).
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAscii(count int) (string, error) {
+ return Random(count, 32, 127, false, false)
+}
+
+/*
+RandomNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of numeric characters.
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomNumeric(count int) (string, error) {
+ return Random(count, 0, 0, false, true)
+}
+
+/*
+RandomAlphabetic creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alphabetic characters.
+
+Parameters:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAlphabetic(count int) (string, error) {
+ return Random(count, 0, 0, true, false)
+}
+
+/*
+RandomAlphaNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters.
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAlphaNumeric(count int) (string, error) {
+ return Random(count, 0, 0, true, true)
+}
+
+/*
+RandomAlphaNumericCustom creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments.
+
+Parameters:
+ count - the length of random string to create
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAlphaNumericCustom(count int, letters bool, numbers bool) (string, error) {
+ return Random(count, 0, 0, letters, numbers)
+}
+
+/*
+Random creates a random string based on a variety of options, using default source of randomness.
+This method has exactly the same semantics as RandomSeed(int, int, int, bool, bool, []char, *rand.Rand), but
+instead of using an externally supplied source of randomness, it uses the internal *rand.Rand instance.
+
+Parameters:
+ count - the length of random string to create
+ start - the position in set of chars (ASCII/Unicode int) to start at
+ end - the position in set of chars (ASCII/Unicode int) to end before
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+ chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars.
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func Random(count int, start int, end int, letters bool, numbers bool, chars ...rune) (string, error) {
+ return RandomSeed(count, start, end, letters, numbers, chars, RANDOM)
+}
+
+/*
+RandomSeed creates a random string based on a variety of options, using supplied source of randomness.
+If the parameters start and end are both 0, start and end are set to ' ' and 'z', the ASCII printable characters, will be used,
+unless letters and numbers are both false, in which case, start and end are set to 0 and math.MaxInt32, respectively.
+If chars is not nil, characters stored in chars that are between start and end are chosen.
+This method accepts a user-supplied *rand.Rand instance to use as a source of randomness. By seeding a single *rand.Rand instance
+with a fixed seed and using it for each call, the same random sequence of strings can be generated repeatedly and predictably.
+
+Parameters:
+ count - the length of random string to create
+ start - the position in set of chars (ASCII/Unicode decimals) to start at
+ end - the position in set of chars (ASCII/Unicode decimals) to end before
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+ chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars.
+ random - a source of randomness.
+
+Returns:
+ string - the random string
+ error - an error stemming from invalid parameters: if count < 0; or the provided chars array is empty; or end <= start; or end > len(chars)
+*/
+func RandomSeed(count int, start int, end int, letters bool, numbers bool, chars []rune, random *rand.Rand) (string, error) {
+
+ if count == 0 {
+ return "", nil
+ } else if count < 0 {
+ err := fmt.Errorf("randomstringutils illegal argument: Requested random string length %v is less than 0.", count) // equiv to err := errors.New("...")
+ return "", err
+ }
+ if chars != nil && len(chars) == 0 {
+ err := fmt.Errorf("randomstringutils illegal argument: The chars array must not be empty")
+ return "", err
+ }
+
+ if start == 0 && end == 0 {
+ if chars != nil {
+ end = len(chars)
+ } else {
+ if !letters && !numbers {
+ end = math.MaxInt32
+ } else {
+ end = 'z' + 1
+ start = ' '
+ }
+ }
+ } else {
+ if end <= start {
+ err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) must be greater than start (%v)", end, start)
+ return "", err
+ }
+
+ if chars != nil && end > len(chars) {
+ err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) cannot be greater than len(chars) (%v)", end, len(chars))
+ return "", err
+ }
+ }
+
+ buffer := make([]rune, count)
+ gap := end - start
+
+ // high-surrogates range, (\uD800-\uDBFF) = 55296 - 56319
+ // low-surrogates range, (\uDC00-\uDFFF) = 56320 - 57343
+
+ for count != 0 {
+ count--
+ var ch rune
+ if chars == nil {
+ ch = rune(random.Intn(gap) + start)
+ } else {
+ ch = chars[random.Intn(gap)+start]
+ }
+
+ if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers {
+ if ch >= 56320 && ch <= 57343 { // low surrogate range
+ if count == 0 {
+ count++
+ } else {
+ // Insert low surrogate
+ buffer[count] = ch
+ count--
+ // Insert high surrogate
+ buffer[count] = rune(55296 + random.Intn(128))
+ }
+ } else if ch >= 55296 && ch <= 56191 { // High surrogates range (Partial)
+ if count == 0 {
+ count++
+ } else {
+ // Insert low surrogate
+ buffer[count] = rune(56320 + random.Intn(128))
+ count--
+ // Insert high surrogate
+ buffer[count] = ch
+ }
+ } else if ch >= 56192 && ch <= 56319 {
+ // private high surrogate, skip it
+ count++
+ } else {
+ // not one of the surrogates*
+ buffer[count] = ch
+ }
+ } else {
+ count++
+ }
+ }
+ return string(buffer), nil
+}
diff --git a/vendor/github.com/Masterminds/goutils/stringutils.go b/vendor/github.com/Masterminds/goutils/stringutils.go
new file mode 100644
index 000000000..741bb530e
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/stringutils.go
@@ -0,0 +1,240 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package goutils
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "unicode"
+)
+
+// Typically returned by functions where a searched item cannot be found
+const INDEX_NOT_FOUND = -1
+
+/*
+Abbreviate abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "Now is the time for..."
+
+Specifically, the algorithm is as follows:
+
+ - If str is less than maxWidth characters long, return it.
+ - Else abbreviate it to (str[0:maxWidth - 3] + "...").
+ - If maxWidth is less than 4, return an illegal argument error.
+ - In no case will it return a string of length greater than maxWidth.
+
+Parameters:
+ str - the string to check
+ maxWidth - maximum length of result string, must be at least 4
+
+Returns:
+ string - abbreviated string
+ error - if the width is too small
+*/
+func Abbreviate(str string, maxWidth int) (string, error) {
+ return AbbreviateFull(str, 0, maxWidth)
+}
+
+/*
+AbbreviateFull abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "...is the time for..."
+This function works like Abbreviate(string, int), but allows you to specify a "left edge" offset. Note that this left edge is not
+necessarily going to be the leftmost character in the result, or the first character following the ellipses, but it will appear
+somewhere in the result.
+In no case will it return a string of length greater than maxWidth.
+
+Parameters:
+ str - the string to check
+ offset - left edge of source string
+ maxWidth - maximum length of result string, must be at least 4
+
+Returns:
+ string - abbreviated string
+ error - if the width is too small
+*/
+func AbbreviateFull(str string, offset int, maxWidth int) (string, error) {
+ if str == "" {
+ return "", nil
+ }
+ if maxWidth < 4 {
+ err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width is 4")
+ return "", err
+ }
+ if len(str) <= maxWidth {
+ return str, nil
+ }
+ if offset > len(str) {
+ offset = len(str)
+ }
+ if len(str)-offset < (maxWidth - 3) { // 15 - 5 < 10 - 3 = 10 < 7
+ offset = len(str) - (maxWidth - 3)
+ }
+ abrevMarker := "..."
+ if offset <= 4 {
+ return str[0:maxWidth-3] + abrevMarker, nil // str.substring(0, maxWidth - 3) + abrevMarker;
+ }
+ if maxWidth < 7 {
+ err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width with offset is 7")
+ return "", err
+ }
+ if (offset + maxWidth - 3) < len(str) { // 5 + (10-3) < 15 = 12 < 15
+ abrevStr, _ := Abbreviate(str[offset:len(str)], (maxWidth - 3))
+ return abrevMarker + abrevStr, nil // abrevMarker + abbreviate(str.substring(offset), maxWidth - 3);
+ }
+ return abrevMarker + str[(len(str)-(maxWidth-3)):len(str)], nil // abrevMarker + str.substring(str.length() - (maxWidth - 3));
+}
+
+/*
+DeleteWhiteSpace deletes all whitespaces from a string as defined by unicode.IsSpace(rune).
+It returns the string without whitespaces.
+
+Parameter:
+ str - the string to delete whitespace from, may be nil
+
+Returns:
+ the string without whitespaces
+*/
+func DeleteWhiteSpace(str string) string {
+ if str == "" {
+ return str
+ }
+ sz := len(str)
+ var chs bytes.Buffer
+ count := 0
+ for i := 0; i < sz; i++ {
+ ch := rune(str[i])
+ if !unicode.IsSpace(ch) {
+ chs.WriteRune(ch)
+ count++
+ }
+ }
+ if count == sz {
+ return str
+ }
+ return chs.String()
+}
+
+/*
+IndexOfDifference compares two strings, and returns the index at which the strings begin to differ.
+
+Parameters:
+ str1 - the first string
+ str2 - the second string
+
+Returns:
+ the index where str1 and str2 begin to differ; -1 if they are equal
+*/
+func IndexOfDifference(str1 string, str2 string) int {
+ if str1 == str2 {
+ return INDEX_NOT_FOUND
+ }
+ if IsEmpty(str1) || IsEmpty(str2) {
+ return 0
+ }
+ var i int
+ for i = 0; i < len(str1) && i < len(str2); i++ {
+ if rune(str1[i]) != rune(str2[i]) {
+ break
+ }
+ }
+ if i < len(str2) || i < len(str1) {
+ return i
+ }
+ return INDEX_NOT_FOUND
+}
+
+/*
+IsBlank checks if a string is whitespace or empty (""). Observe the following behavior:
+
+ goutils.IsBlank("") = true
+ goutils.IsBlank(" ") = true
+ goutils.IsBlank("bob") = false
+ goutils.IsBlank(" bob ") = false
+
+Parameter:
+ str - the string to check
+
+Returns:
+ true - if the string is whitespace or empty ("")
+*/
+func IsBlank(str string) bool {
+ strLen := len(str)
+ if str == "" || strLen == 0 {
+ return true
+ }
+ for i := 0; i < strLen; i++ {
+ if unicode.IsSpace(rune(str[i])) == false {
+ return false
+ }
+ }
+ return true
+}
+
+/*
+IndexOf returns the index of the first instance of sub in str, with the search beginning from the
+index start point specified. -1 is returned if sub is not present in str.
+
+An empty string ("") will return -1 (INDEX_NOT_FOUND). A negative start position is treated as zero.
+A start position greater than the string length returns -1.
+
+Parameters:
+ str - the string to check
+ sub - the substring to find
+ start - the start position; negative treated as zero
+
+Returns:
+ the first index where the sub string was found (always >= start)
+*/
+func IndexOf(str string, sub string, start int) int {
+
+ if start < 0 {
+ start = 0
+ }
+
+ if len(str) < start {
+ return INDEX_NOT_FOUND
+ }
+
+ if IsEmpty(str) || IsEmpty(sub) {
+ return INDEX_NOT_FOUND
+ }
+
+ partialIndex := strings.Index(str[start:len(str)], sub)
+ if partialIndex == -1 {
+ return INDEX_NOT_FOUND
+ }
+ return partialIndex + start
+}
+
+// IsEmpty checks if a string is empty (""). Returns true if empty, and false otherwise.
+func IsEmpty(str string) bool {
+ return len(str) == 0
+}
+
+// Returns either the passed in string, or if the string is empty, the value of defaultStr.
+func DefaultString(str string, defaultStr string) string {
+ if IsEmpty(str) {
+ return defaultStr
+ }
+ return str
+}
+
+// Returns either the passed in string, or if the string is whitespace, empty (""), the value of defaultStr.
+func DefaultIfBlank(str string, defaultStr string) string {
+ if IsBlank(str) {
+ return defaultStr
+ }
+ return str
+}
diff --git a/vendor/github.com/Masterminds/goutils/wordutils.go b/vendor/github.com/Masterminds/goutils/wordutils.go
new file mode 100644
index 000000000..034cad8e2
--- /dev/null
+++ b/vendor/github.com/Masterminds/goutils/wordutils.go
@@ -0,0 +1,357 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/*
+Package goutils provides utility functions to manipulate strings in various ways.
+The code snippets below show examples of how to use goutils. Some functions return
+errors while others do not, so usage would vary as a result.
+
+Example:
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/aokoli/goutils"
+ )
+
+ func main() {
+
+ // EXAMPLE 1: A goutils function which returns no errors
+ fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF"
+
+
+
+ // EXAMPLE 2: A goutils function which returns an error
+ rand1, err1 := goutils.Random (-1, 0, 0, true, true)
+
+ if err1 != nil {
+ fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...)
+ } else {
+ fmt.Println(rand1)
+ }
+ }
+*/
+package goutils
+
+import (
+ "bytes"
+ "strings"
+ "unicode"
+)
+
+// VERSION indicates the current version of goutils
+const VERSION = "1.0.0"
+
+/*
+Wrap wraps a single line of text, identifying words by ' '.
+New lines will be separated by '\n'. Very long words, such as URLs will not be wrapped.
+Leading spaces on a new line are stripped. Trailing spaces are not stripped.
+
+Parameters:
+ str - the string to be word wrapped
+ wrapLength - the column (a column can fit only one character) to wrap the words at, less than 1 is treated as 1
+
+Returns:
+ a line with newlines inserted
+*/
+func Wrap(str string, wrapLength int) string {
+ return WrapCustom(str, wrapLength, "", false)
+}
+
+/*
+WrapCustom wraps a single line of text, identifying words by ' '.
+Leading spaces on a new line are stripped. Trailing spaces are not stripped.
+
+Parameters:
+ str - the string to be word wrapped
+ wrapLength - the column number (a column can fit only one character) to wrap the words at, less than 1 is treated as 1
+ newLineStr - the string to insert for a new line, "" uses '\n'
+ wrapLongWords - true if long words (such as URLs) should be wrapped
+
+Returns:
+ a line with newlines inserted
+*/
+func WrapCustom(str string, wrapLength int, newLineStr string, wrapLongWords bool) string {
+
+ if str == "" {
+ return ""
+ }
+ if newLineStr == "" {
+ newLineStr = "\n" // TODO Assumes "\n" is seperator. Explore SystemUtils.LINE_SEPARATOR from Apache Commons
+ }
+ if wrapLength < 1 {
+ wrapLength = 1
+ }
+
+ inputLineLength := len(str)
+ offset := 0
+
+ var wrappedLine bytes.Buffer
+
+ for inputLineLength-offset > wrapLength {
+
+ if rune(str[offset]) == ' ' {
+ offset++
+ continue
+ }
+
+ end := wrapLength + offset + 1
+ spaceToWrapAt := strings.LastIndex(str[offset:end], " ") + offset
+
+ if spaceToWrapAt >= offset {
+ // normal word (not longer than wrapLength)
+ wrappedLine.WriteString(str[offset:spaceToWrapAt])
+ wrappedLine.WriteString(newLineStr)
+ offset = spaceToWrapAt + 1
+
+ } else {
+ // long word or URL
+ if wrapLongWords {
+ end := wrapLength + offset
+ // long words are wrapped one line at a time
+ wrappedLine.WriteString(str[offset:end])
+ wrappedLine.WriteString(newLineStr)
+ offset += wrapLength
+ } else {
+ // long words aren't wrapped, just extended beyond limit
+ end := wrapLength + offset
+ index := strings.IndexRune(str[end:len(str)], ' ')
+ if index == -1 {
+ wrappedLine.WriteString(str[offset:len(str)])
+ offset = inputLineLength
+ } else {
+ spaceToWrapAt = index + end
+ wrappedLine.WriteString(str[offset:spaceToWrapAt])
+ wrappedLine.WriteString(newLineStr)
+ offset = spaceToWrapAt + 1
+ }
+ }
+ }
+ }
+
+ wrappedLine.WriteString(str[offset:len(str)])
+
+ return wrappedLine.String()
+
+}
+
+/*
+Capitalize capitalizes all the delimiter separated words in a string. Only the first letter of each word is changed.
+To convert the rest of each word to lowercase at the same time, use CapitalizeFully(str string, delimiters ...rune).
+The delimiters represent a set of characters understood to separate words. The first string character
+and the first non-delimiter character after a delimiter will be capitalized. A "" input string returns "".
+Capitalization uses the Unicode title case, normally equivalent to upper case.
+
+Parameters:
+ str - the string to capitalize
+ delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter
+
+Returns:
+ capitalized string
+*/
+func Capitalize(str string, delimiters ...rune) string {
+
+ var delimLen int
+
+ if delimiters == nil {
+ delimLen = -1
+ } else {
+ delimLen = len(delimiters)
+ }
+
+ if str == "" || delimLen == 0 {
+ return str
+ }
+
+ buffer := []rune(str)
+ capitalizeNext := true
+ for i := 0; i < len(buffer); i++ {
+ ch := buffer[i]
+ if isDelimiter(ch, delimiters...) {
+ capitalizeNext = true
+ } else if capitalizeNext {
+ buffer[i] = unicode.ToTitle(ch)
+ capitalizeNext = false
+ }
+ }
+ return string(buffer)
+
+}
+
+/*
+CapitalizeFully converts all the delimiter separated words in a string into capitalized words, that is each word is made up of a
+titlecase character and then a series of lowercase characters. The delimiters represent a set of characters understood
+to separate words. The first string character and the first non-delimiter character after a delimiter will be capitalized.
+Capitalization uses the Unicode title case, normally equivalent to upper case.
+
+Parameters:
+ str - the string to capitalize fully
+ delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter
+
+Returns:
+ capitalized string
+*/
+func CapitalizeFully(str string, delimiters ...rune) string {
+
+ var delimLen int
+
+ if delimiters == nil {
+ delimLen = -1
+ } else {
+ delimLen = len(delimiters)
+ }
+
+ if str == "" || delimLen == 0 {
+ return str
+ }
+ str = strings.ToLower(str)
+ return Capitalize(str, delimiters...)
+}
+
+/*
+Uncapitalize uncapitalizes all the whitespace separated words in a string. Only the first letter of each word is changed.
+The delimiters represent a set of characters understood to separate words. The first string character and the first non-delimiter
+character after a delimiter will be uncapitalized. Whitespace is defined by unicode.IsSpace(char).
+
+Parameters:
+ str - the string to uncapitalize fully
+ delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter
+
+Returns:
+ uncapitalized string
+*/
+func Uncapitalize(str string, delimiters ...rune) string {
+
+ var delimLen int
+
+ if delimiters == nil {
+ delimLen = -1
+ } else {
+ delimLen = len(delimiters)
+ }
+
+ if str == "" || delimLen == 0 {
+ return str
+ }
+
+ buffer := []rune(str)
+ uncapitalizeNext := true // TODO Always makes capitalize/un apply to first char.
+ for i := 0; i < len(buffer); i++ {
+ ch := buffer[i]
+ if isDelimiter(ch, delimiters...) {
+ uncapitalizeNext = true
+ } else if uncapitalizeNext {
+ buffer[i] = unicode.ToLower(ch)
+ uncapitalizeNext = false
+ }
+ }
+ return string(buffer)
+}
+
+/*
+SwapCase swaps the case of a string using a word based algorithm.
+
+Conversion algorithm:
+
+ Upper case character converts to Lower case
+ Title case character converts to Lower case
+ Lower case character after Whitespace or at start converts to Title case
+ Other Lower case character converts to Upper case
+ Whitespace is defined by unicode.IsSpace(char).
+
+Parameters:
+ str - the string to swap case
+
+Returns:
+ the changed string
+*/
+func SwapCase(str string) string {
+ if str == "" {
+ return str
+ }
+ buffer := []rune(str)
+
+ whitespace := true
+
+ for i := 0; i < len(buffer); i++ {
+ ch := buffer[i]
+ if unicode.IsUpper(ch) {
+ buffer[i] = unicode.ToLower(ch)
+ whitespace = false
+ } else if unicode.IsTitle(ch) {
+ buffer[i] = unicode.ToLower(ch)
+ whitespace = false
+ } else if unicode.IsLower(ch) {
+ if whitespace {
+ buffer[i] = unicode.ToTitle(ch)
+ whitespace = false
+ } else {
+ buffer[i] = unicode.ToUpper(ch)
+ }
+ } else {
+ whitespace = unicode.IsSpace(ch)
+ }
+ }
+ return string(buffer)
+}
+
+/*
+Initials extracts the initial letters from each word in the string. The first letter of the string and all first
+letters after the defined delimiters are returned as a new string. Their case is not changed. If the delimiters
+parameter is excluded, then Whitespace is used. Whitespace is defined by unicode.IsSpacea(char). An empty delimiter array returns an empty string.
+
+Parameters:
+ str - the string to get initials from
+ delimiters - set of characters to determine words, exclusion of this parameter means whitespace would be delimeter
+Returns:
+ string of initial letters
+*/
+func Initials(str string, delimiters ...rune) string {
+ if str == "" {
+ return str
+ }
+ if delimiters != nil && len(delimiters) == 0 {
+ return ""
+ }
+ strLen := len(str)
+ var buf bytes.Buffer
+ lastWasGap := true
+ for i := 0; i < strLen; i++ {
+ ch := rune(str[i])
+
+ if isDelimiter(ch, delimiters...) {
+ lastWasGap = true
+ } else if lastWasGap {
+ buf.WriteRune(ch)
+ lastWasGap = false
+ }
+ }
+ return buf.String()
+}
+
+// private function (lower case func name)
+func isDelimiter(ch rune, delimiters ...rune) bool {
+ if delimiters == nil {
+ return unicode.IsSpace(ch)
+ }
+ for _, delimiter := range delimiters {
+ if ch == delimiter {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/Masterminds/semver/v3/.gitignore b/vendor/github.com/Masterminds/semver/v3/.gitignore
new file mode 100644
index 000000000..6b061e617
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/.gitignore
@@ -0,0 +1 @@
+_fuzz/ \ No newline at end of file
diff --git a/vendor/github.com/Masterminds/semver/v3/.golangci.yml b/vendor/github.com/Masterminds/semver/v3/.golangci.yml
new file mode 100644
index 000000000..c87d1c4b9
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/.golangci.yml
@@ -0,0 +1,30 @@
+run:
+ deadline: 2m
+
+linters:
+ disable-all: true
+ enable:
+ - misspell
+ - structcheck
+ - govet
+ - staticcheck
+ - deadcode
+ - errcheck
+ - varcheck
+ - unparam
+ - ineffassign
+ - nakedret
+ - gocyclo
+ - dupl
+ - goimports
+ - revive
+ - gosec
+ - gosimple
+ - typecheck
+ - unused
+
+linters-settings:
+ gofmt:
+ simplify: true
+ dupl:
+ threshold: 600
diff --git a/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
new file mode 100644
index 000000000..f12626423
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
@@ -0,0 +1,214 @@
+# Changelog
+
+## 3.2.0 (2022-11-28)
+
+### Added
+
+- #190: Added text marshaling and unmarshaling
+- #167: Added JSON marshalling for constraints (thanks @SimonTheLeg)
+- #173: Implement encoding.TextMarshaler and encoding.TextUnmarshaler on Version (thanks @MarkRosemaker)
+- #179: Added New() version constructor (thanks @kazhuravlev)
+
+### Changed
+
+- #182/#183: Updated CI testing setup
+
+### Fixed
+
+- #186: Fixing issue where validation of constraint section gave false positives
+- #176: Fix constraints check with *-0 (thanks @mtt0)
+- #181: Fixed Caret operator (^) gives unexpected results when the minor version in constraint is 0 (thanks @arshchimni)
+- #161: Fixed godoc (thanks @afirth)
+
+## 3.1.1 (2020-11-23)
+
+### Fixed
+
+- #158: Fixed issue with generated regex operation order that could cause problem
+
+## 3.1.0 (2020-04-15)
+
+### Added
+
+- #131: Add support for serializing/deserializing SQL (thanks @ryancurrah)
+
+### Changed
+
+- #148: More accurate validation messages on constraints
+
+## 3.0.3 (2019-12-13)
+
+### Fixed
+
+- #141: Fixed issue with <= comparison
+
+## 3.0.2 (2019-11-14)
+
+### Fixed
+
+- #134: Fixed broken constraint checking with ^0.0 (thanks @krmichelos)
+
+## 3.0.1 (2019-09-13)
+
+### Fixed
+
+- #125: Fixes issue with module path for v3
+
+## 3.0.0 (2019-09-12)
+
+This is a major release of the semver package which includes API changes. The Go
+API is compatible with ^1. The Go API was not changed because many people are using
+`go get` without Go modules for their applications and API breaking changes cause
+errors which we have or would need to support.
+
+The changes in this release are the handling based on the data passed into the
+functions. These are described in the added and changed sections below.
+
+### Added
+
+- StrictNewVersion function. This is similar to NewVersion but will return an
+ error if the version passed in is not a strict semantic version. For example,
+ 1.2.3 would pass but v1.2.3 or 1.2 would fail because they are not strictly
+ speaking semantic versions. This function is faster, performs fewer operations,
+ and uses fewer allocations than NewVersion.
+- Fuzzing has been performed on NewVersion, StrictNewVersion, and NewConstraint.
+ The Makefile contains the operations used. For more information on you can start
+ on Wikipedia at https://en.wikipedia.org/wiki/Fuzzing
+- Now using Go modules
+
+### Changed
+
+- NewVersion has proper prerelease and metadata validation with error messages
+ to signal an issue with either of them
+- ^ now operates using a similar set of rules to npm/js and Rust/Cargo. If the
+ version is >=1 the ^ ranges works the same as v1. For major versions of 0 the
+ rules have changed. The minor version is treated as the stable version unless
+ a patch is specified and then it is equivalent to =. One difference from npm/js
+ is that prereleases there are only to a specific version (e.g. 1.2.3).
+ Prereleases here look over multiple versions and follow semantic version
+ ordering rules. This pattern now follows along with the expected and requested
+ handling of this packaged by numerous users.
+
+## 1.5.0 (2019-09-11)
+
+### Added
+
+- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c)
+
+### Changed
+
+- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil)
+- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil)
+- #72: Adding docs comment pointing to vert for a cli
+- #71: Update the docs on pre-release comparator handling
+- #89: Test with new go versions (thanks @thedevsaddam)
+- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll)
+
+### Fixed
+
+- #78: Fix unchecked error in example code (thanks @ravron)
+- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
+- #97: Fixed copyright file for proper display on GitHub
+- #107: Fix handling prerelease when sorting alphanum and num
+- #109: Fixed where Validate sometimes returns wrong message on error
+
+## 1.4.2 (2018-04-10)
+
+### Changed
+
+- #72: Updated the docs to point to vert for a console appliaction
+- #71: Update the docs on pre-release comparator handling
+
+### Fixed
+
+- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
+
+## 1.4.1 (2018-04-02)
+
+### Fixed
+
+- Fixed #64: Fix pre-release precedence issue (thanks @uudashr)
+
+## 1.4.0 (2017-10-04)
+
+### Changed
+
+- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill)
+
+## 1.3.1 (2017-07-10)
+
+### Fixed
+
+- Fixed #57: number comparisons in prerelease sometimes inaccurate
+
+## 1.3.0 (2017-05-02)
+
+### Added
+
+- #45: Added json (un)marshaling support (thanks @mh-cbon)
+- Stability marker. See https://masterminds.github.io/stability/
+
+### Fixed
+
+- #51: Fix handling of single digit tilde constraint (thanks @dgodd)
+
+### Changed
+
+- #55: The godoc icon moved from png to svg
+
+## 1.2.3 (2017-04-03)
+
+### Fixed
+
+- #46: Fixed 0.x.x and 0.0.x in constraints being treated as *
+
+## Release 1.2.2 (2016-12-13)
+
+### Fixed
+
+- #34: Fixed issue where hyphen range was not working with pre-release parsing.
+
+## Release 1.2.1 (2016-11-28)
+
+### Fixed
+
+- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha"
+ properly.
+
+## Release 1.2.0 (2016-11-04)
+
+### Added
+
+- #20: Added MustParse function for versions (thanks @adamreese)
+- #15: Added increment methods on versions (thanks @mh-cbon)
+
+### Fixed
+
+- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and
+ might not satisfy the intended compatibility. The change here ignores pre-releases
+ on constraint checks (e.g., ~ or ^) when a pre-release is not part of the
+ constraint. For example, `^1.2.3` will ignore pre-releases while
+ `^1.2.3-alpha` will include them.
+
+## Release 1.1.1 (2016-06-30)
+
+### Changed
+
+- Issue #9: Speed up version comparison performance (thanks @sdboyer)
+- Issue #8: Added benchmarks (thanks @sdboyer)
+- Updated Go Report Card URL to new location
+- Updated Readme to add code snippet formatting (thanks @mh-cbon)
+- Updating tagging to v[SemVer] structure for compatibility with other tools.
+
+## Release 1.1.0 (2016-03-11)
+
+- Issue #2: Implemented validation to provide reasons a versions failed a
+ constraint.
+
+## Release 1.0.1 (2015-12-31)
+
+- Fixed #1: * constraint failing on valid versions.
+
+## Release 1.0.0 (2015-10-20)
+
+- Initial release
diff --git a/vendor/github.com/Masterminds/semver/v3/LICENSE.txt b/vendor/github.com/Masterminds/semver/v3/LICENSE.txt
new file mode 100644
index 000000000..9ff7da9c4
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/LICENSE.txt
@@ -0,0 +1,19 @@
+Copyright (C) 2014-2019, Matt Butcher and Matt Farina
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/Masterminds/semver/v3/Makefile b/vendor/github.com/Masterminds/semver/v3/Makefile
new file mode 100644
index 000000000..eac19178f
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/Makefile
@@ -0,0 +1,37 @@
+GOPATH=$(shell go env GOPATH)
+GOLANGCI_LINT=$(GOPATH)/bin/golangci-lint
+GOFUZZBUILD = $(GOPATH)/bin/go-fuzz-build
+GOFUZZ = $(GOPATH)/bin/go-fuzz
+
+.PHONY: lint
+lint: $(GOLANGCI_LINT)
+ @echo "==> Linting codebase"
+ @$(GOLANGCI_LINT) run
+
+.PHONY: test
+test:
+ @echo "==> Running tests"
+ GO111MODULE=on go test -v
+
+.PHONY: test-cover
+test-cover:
+ @echo "==> Running Tests with coverage"
+ GO111MODULE=on go test -cover .
+
+.PHONY: fuzz
+fuzz: $(GOFUZZBUILD) $(GOFUZZ)
+ @echo "==> Fuzz testing"
+ $(GOFUZZBUILD)
+ $(GOFUZZ) -workdir=_fuzz
+
+$(GOLANGCI_LINT):
+ # Install golangci-lint. The configuration for it is in the .golangci.yml
+ # file in the root of the repository
+ echo ${GOPATH}
+ curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(GOPATH)/bin v1.17.1
+
+$(GOFUZZBUILD):
+ cd / && go get -u github.com/dvyukov/go-fuzz/go-fuzz-build
+
+$(GOFUZZ):
+ cd / && go get -u github.com/dvyukov/go-fuzz/go-fuzz github.com/dvyukov/go-fuzz/go-fuzz-dep \ No newline at end of file
diff --git a/vendor/github.com/Masterminds/semver/v3/README.md b/vendor/github.com/Masterminds/semver/v3/README.md
new file mode 100644
index 000000000..d8f54dcbd
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/README.md
@@ -0,0 +1,244 @@
+# SemVer
+
+The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to:
+
+* Parse semantic versions
+* Sort semantic versions
+* Check if a semantic version fits within a set of constraints
+* Optionally work with a `v` prefix
+
+[![Stability:
+Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html)
+[![](https://github.com/Masterminds/semver/workflows/Tests/badge.svg)](https://github.com/Masterminds/semver/actions)
+[![GoDoc](https://img.shields.io/static/v1?label=godoc&message=reference&color=blue)](https://pkg.go.dev/github.com/Masterminds/semver/v3)
+[![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver)
+
+If you are looking for a command line tool for version comparisons please see
+[vert](https://github.com/Masterminds/vert) which uses this library.
+
+## Package Versions
+
+There are three major versions fo the `semver` package.
+
+* 3.x.x is the new stable and active version. This version is focused on constraint
+ compatibility for range handling in other tools from other languages. It has
+ a similar API to the v1 releases. The development of this version is on the master
+ branch. The documentation for this version is below.
+* 2.x was developed primarily for [dep](https://github.com/golang/dep). There are
+ no tagged releases and the development was performed by [@sdboyer](https://github.com/sdboyer).
+ There are API breaking changes from v1. This version lives on the [2.x branch](https://github.com/Masterminds/semver/tree/2.x).
+* 1.x.x is the most widely used version with numerous tagged releases. This is the
+ previous stable and is still maintained for bug fixes. The development, to fix
+ bugs, occurs on the release-1 branch. You can read the documentation [here](https://github.com/Masterminds/semver/blob/release-1/README.md).
+
+## Parsing Semantic Versions
+
+There are two functions that can parse semantic versions. The `StrictNewVersion`
+function only parses valid version 2 semantic versions as outlined in the
+specification. The `NewVersion` function attempts to coerce a version into a
+semantic version and parse it. For example, if there is a leading v or a version
+listed without all 3 parts (e.g. `v1.2`) it will attempt to coerce it into a valid
+semantic version (e.g., 1.2.0). In both cases a `Version` object is returned
+that can be sorted, compared, and used in constraints.
+
+When parsing a version an error is returned if there is an issue parsing the
+version. For example,
+
+ v, err := semver.NewVersion("1.2.3-beta.1+build345")
+
+The version object has methods to get the parts of the version, compare it to
+other versions, convert the version back into a string, and get the original
+string. Getting the original string is useful if the semantic version was coerced
+into a valid form.
+
+## Sorting Semantic Versions
+
+A set of versions can be sorted using the `sort` package from the standard library.
+For example,
+
+```go
+raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
+vs := make([]*semver.Version, len(raw))
+for i, r := range raw {
+ v, err := semver.NewVersion(r)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ vs[i] = v
+}
+
+sort.Sort(semver.Collection(vs))
+```
+
+## Checking Version Constraints
+
+There are two methods for comparing versions. One uses comparison methods on
+`Version` instances and the other uses `Constraints`. There are some important
+differences to notes between these two methods of comparison.
+
+1. When two versions are compared using functions such as `Compare`, `LessThan`,
+ and others it will follow the specification and always include prereleases
+ within the comparison. It will provide an answer that is valid with the
+ comparison section of the spec at https://semver.org/#spec-item-11
+2. When constraint checking is used for checks or validation it will follow a
+ different set of rules that are common for ranges with tools like npm/js
+ and Rust/Cargo. This includes considering prereleases to be invalid if the
+ ranges does not include one. If you want to have it include pre-releases a
+ simple solution is to include `-0` in your range.
+3. Constraint ranges can have some complex rules including the shorthand use of
+ ~ and ^. For more details on those see the options below.
+
+There are differences between the two methods or checking versions because the
+comparison methods on `Version` follow the specification while comparison ranges
+are not part of the specification. Different packages and tools have taken it
+upon themselves to come up with range rules. This has resulted in differences.
+For example, npm/js and Cargo/Rust follow similar patterns while PHP has a
+different pattern for ^. The comparison features in this package follow the
+npm/js and Cargo/Rust lead because applications using it have followed similar
+patters with their versions.
+
+Checking a version against version constraints is one of the most featureful
+parts of the package.
+
+```go
+c, err := semver.NewConstraint(">= 1.2.3")
+if err != nil {
+ // Handle constraint not being parsable.
+}
+
+v, err := semver.NewVersion("1.3")
+if err != nil {
+ // Handle version not being parsable.
+}
+// Check if the version meets the constraints. The a variable will be true.
+a := c.Check(v)
+```
+
+### Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of space or comma separated AND comparisons. These are then separated by || (OR)
+comparisons. For example, `">= 1.2 < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3.
+
+The basic comparisons are:
+
+* `=`: equal (aliased to no operator)
+* `!=`: not equal
+* `>`: greater than
+* `<`: less than
+* `>=`: greater than or equal to
+* `<=`: less than or equal to
+
+### Working With Prerelease Versions
+
+Pre-releases, for those not familiar with them, are used for software releases
+prior to stable or generally available releases. Examples of prereleases include
+development, alpha, beta, and release candidate releases. A prerelease may be
+a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the
+order of precedence, prereleases come before their associated releases. In this
+example `1.2.3-beta.1 < 1.2.3`.
+
+According to the Semantic Version specification prereleases may not be
+API compliant with their release counterpart. It says,
+
+> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version.
+
+SemVer comparisons using constraints without a prerelease comparator will skip
+prerelease versions. For example, `>=1.2.3` will skip prereleases when looking
+at a list of releases while `>=1.2.3-0` will evaluate and find prereleases.
+
+The reason for the `0` as a pre-release version in the example comparison is
+because pre-releases can only contain ASCII alphanumerics and hyphens (along with
+`.` separators), per the spec. Sorting happens in ASCII sort order, again per the
+spec. The lowest character is a `0` in ASCII sort order
+(see an [ASCII Table](http://www.asciitable.com/))
+
+Understanding ASCII sort ordering is important because A-Z comes before a-z. That
+means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case
+sensitivity doesn't apply here. This is due to ASCII sort ordering which is what
+the spec specifies.
+
+### Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+* `1.2 - 1.4.5` which is equivalent to `>= 1.2 <= 1.4.5`
+* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4 <= 4.5`
+
+### Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the patch level comparison (see tilde below). For example,
+
+* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `>= 1.2.x` is equivalent to `>= 1.2.0`
+* `<= 2.x` is equivalent to `< 3`
+* `*` is equivalent to `>= 0.0.0`
+
+### Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
+* `~1` is equivalent to `>= 1, < 2`
+* `~2.3` is equivalent to `>= 2.3, < 2.4`
+* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `~1.x` is equivalent to `>= 1, < 2`
+
+### Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes once a stable
+(1.0.0) release has occurred. Prior to a 1.0.0 release the minor versions acts
+as the API stability level. This is useful when comparisons of API versions as a
+major change is API breaking. For example,
+
+* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+* `^2.3` is equivalent to `>= 2.3, < 3`
+* `^2.x` is equivalent to `>= 2.0.0, < 3`
+* `^0.2.3` is equivalent to `>=0.2.3 <0.3.0`
+* `^0.2` is equivalent to `>=0.2.0 <0.3.0`
+* `^0.0.3` is equivalent to `>=0.0.3 <0.0.4`
+* `^0.0` is equivalent to `>=0.0.0 <0.1.0`
+* `^0` is equivalent to `>=0.0.0 <1.0.0`
+
+## Validation
+
+In addition to testing a version against a constraint, a version can be validated
+against a constraint. When validation fails a slice of errors containing why a
+version didn't meet the constraint is returned. For example,
+
+```go
+c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
+if err != nil {
+ // Handle constraint not being parseable.
+}
+
+v, err := semver.NewVersion("1.3")
+if err != nil {
+ // Handle version not being parseable.
+}
+
+// Validate a version against a constraint.
+a, msgs := c.Validate(v)
+// a is false
+for _, m := range msgs {
+ fmt.Println(m)
+
+ // Loops over the errors which would read
+ // "1.3 is greater than 1.2.3"
+ // "1.3 is less than 1.4"
+}
+```
+
+## Contribute
+
+If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues)
+or [create a pull request](https://github.com/Masterminds/semver/pulls).
diff --git a/vendor/github.com/Masterminds/semver/v3/collection.go b/vendor/github.com/Masterminds/semver/v3/collection.go
new file mode 100644
index 000000000..a78235895
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/collection.go
@@ -0,0 +1,24 @@
+package semver
+
+// Collection is a collection of Version instances and implements the sort
+// interface. See the sort package for more details.
+// https://golang.org/pkg/sort/
+type Collection []*Version
+
+// Len returns the length of a collection. The number of Version instances
+// on the slice.
+func (c Collection) Len() int {
+ return len(c)
+}
+
+// Less is needed for the sort interface to compare two Version objects on the
+// slice. If checks if one is less than the other.
+func (c Collection) Less(i, j int) bool {
+ return c[i].LessThan(c[j])
+}
+
+// Swap is needed for the sort interface to replace the Version objects
+// at two different positions in the slice.
+func (c Collection) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
diff --git a/vendor/github.com/Masterminds/semver/v3/constraints.go b/vendor/github.com/Masterminds/semver/v3/constraints.go
new file mode 100644
index 000000000..203072e46
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/constraints.go
@@ -0,0 +1,594 @@
+package semver
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+// Constraints is one or more constraint that a semantic version can be
+// checked against.
+type Constraints struct {
+ constraints [][]*constraint
+}
+
+// NewConstraint returns a Constraints instance that a Version instance can
+// be checked against. If there is a parse error it will be returned.
+func NewConstraint(c string) (*Constraints, error) {
+
+ // Rewrite - ranges into a comparison operation.
+ c = rewriteRange(c)
+
+ ors := strings.Split(c, "||")
+ or := make([][]*constraint, len(ors))
+ for k, v := range ors {
+
+ // TODO: Find a way to validate and fetch all the constraints in a simpler form
+
+ // Validate the segment
+ if !validConstraintRegex.MatchString(v) {
+ return nil, fmt.Errorf("improper constraint: %s", v)
+ }
+
+ cs := findConstraintRegex.FindAllString(v, -1)
+ if cs == nil {
+ cs = append(cs, v)
+ }
+ result := make([]*constraint, len(cs))
+ for i, s := range cs {
+ pc, err := parseConstraint(s)
+ if err != nil {
+ return nil, err
+ }
+
+ result[i] = pc
+ }
+ or[k] = result
+ }
+
+ o := &Constraints{constraints: or}
+ return o, nil
+}
+
+// Check tests if a version satisfies the constraints.
+func (cs Constraints) Check(v *Version) bool {
+ // TODO(mattfarina): For v4 of this library consolidate the Check and Validate
+ // functions as the underlying functions make that possible now.
+ // loop over the ORs and check the inner ANDs
+ for _, o := range cs.constraints {
+ joy := true
+ for _, c := range o {
+ if check, _ := c.check(v); !check {
+ joy = false
+ break
+ }
+ }
+
+ if joy {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Validate checks if a version satisfies a constraint. If not a slice of
+// reasons for the failure are returned in addition to a bool.
+func (cs Constraints) Validate(v *Version) (bool, []error) {
+ // loop over the ORs and check the inner ANDs
+ var e []error
+
+ // Capture the prerelease message only once. When it happens the first time
+ // this var is marked
+ var prerelesase bool
+ for _, o := range cs.constraints {
+ joy := true
+ for _, c := range o {
+ // Before running the check handle the case there the version is
+ // a prerelease and the check is not searching for prereleases.
+ if c.con.pre == "" && v.pre != "" {
+ if !prerelesase {
+ em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ e = append(e, em)
+ prerelesase = true
+ }
+ joy = false
+
+ } else {
+
+ if _, err := c.check(v); err != nil {
+ e = append(e, err)
+ joy = false
+ }
+ }
+ }
+
+ if joy {
+ return true, []error{}
+ }
+ }
+
+ return false, e
+}
+
+func (cs Constraints) String() string {
+ buf := make([]string, len(cs.constraints))
+ var tmp bytes.Buffer
+
+ for k, v := range cs.constraints {
+ tmp.Reset()
+ vlen := len(v)
+ for kk, c := range v {
+ tmp.WriteString(c.string())
+
+ // Space separate the AND conditions
+ if vlen > 1 && kk < vlen-1 {
+ tmp.WriteString(" ")
+ }
+ }
+ buf[k] = tmp.String()
+ }
+
+ return strings.Join(buf, " || ")
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+func (cs *Constraints) UnmarshalText(text []byte) error {
+ temp, err := NewConstraint(string(text))
+ if err != nil {
+ return err
+ }
+
+ *cs = *temp
+
+ return nil
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+func (cs Constraints) MarshalText() ([]byte, error) {
+ return []byte(cs.String()), nil
+}
+
+var constraintOps map[string]cfunc
+var constraintRegex *regexp.Regexp
+var constraintRangeRegex *regexp.Regexp
+
+// Used to find individual constraints within a multi-constraint string
+var findConstraintRegex *regexp.Regexp
+
+// Used to validate an segment of ANDs is valid
+var validConstraintRegex *regexp.Regexp
+
+const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
+ `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
+ `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
+
+func init() {
+ constraintOps = map[string]cfunc{
+ "": constraintTildeOrEqual,
+ "=": constraintTildeOrEqual,
+ "!=": constraintNotEqual,
+ ">": constraintGreaterThan,
+ "<": constraintLessThan,
+ ">=": constraintGreaterThanEqual,
+ "=>": constraintGreaterThanEqual,
+ "<=": constraintLessThanEqual,
+ "=<": constraintLessThanEqual,
+ "~": constraintTilde,
+ "~>": constraintTilde,
+ "^": constraintCaret,
+ }
+
+ ops := `=||!=|>|<|>=|=>|<=|=<|~|~>|\^`
+
+ constraintRegex = regexp.MustCompile(fmt.Sprintf(
+ `^\s*(%s)\s*(%s)\s*$`,
+ ops,
+ cvRegex))
+
+ constraintRangeRegex = regexp.MustCompile(fmt.Sprintf(
+ `\s*(%s)\s+-\s+(%s)\s*`,
+ cvRegex, cvRegex))
+
+ findConstraintRegex = regexp.MustCompile(fmt.Sprintf(
+ `(%s)\s*(%s)`,
+ ops,
+ cvRegex))
+
+ // The first time a constraint shows up will look slightly different from
+ // future times it shows up due to a leading space or comma in a given
+ // string.
+ validConstraintRegex = regexp.MustCompile(fmt.Sprintf(
+ `^(\s*(%s)\s*(%s)\s*)((?:\s+|,\s*)(%s)\s*(%s)\s*)*$`,
+ ops,
+ cvRegex,
+ ops,
+ cvRegex))
+}
+
+// An individual constraint
+type constraint struct {
+ // The version used in the constraint check. For example, if a constraint
+ // is '<= 2.0.0' the con a version instance representing 2.0.0.
+ con *Version
+
+ // The original parsed version (e.g., 4.x from != 4.x)
+ orig string
+
+ // The original operator for the constraint
+ origfunc string
+
+ // When an x is used as part of the version (e.g., 1.x)
+ minorDirty bool
+ dirty bool
+ patchDirty bool
+}
+
+// Check if a version meets the constraint
+func (c *constraint) check(v *Version) (bool, error) {
+ return constraintOps[c.origfunc](v, c)
+}
+
+// String prints an individual constraint into a string
+func (c *constraint) string() string {
+ return c.origfunc + c.orig
+}
+
+type cfunc func(v *Version, c *constraint) (bool, error)
+
+func parseConstraint(c string) (*constraint, error) {
+ if len(c) > 0 {
+ m := constraintRegex.FindStringSubmatch(c)
+ if m == nil {
+ return nil, fmt.Errorf("improper constraint: %s", c)
+ }
+
+ cs := &constraint{
+ orig: m[2],
+ origfunc: m[1],
+ }
+
+ ver := m[2]
+ minorDirty := false
+ patchDirty := false
+ dirty := false
+ if isX(m[3]) || m[3] == "" {
+ ver = fmt.Sprintf("0.0.0%s", m[6])
+ dirty = true
+ } else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" {
+ minorDirty = true
+ dirty = true
+ ver = fmt.Sprintf("%s.0.0%s", m[3], m[6])
+ } else if isX(strings.TrimPrefix(m[5], ".")) || m[5] == "" {
+ dirty = true
+ patchDirty = true
+ ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6])
+ }
+
+ con, err := NewVersion(ver)
+ if err != nil {
+
+ // The constraintRegex should catch any regex parsing errors. So,
+ // we should never get here.
+ return nil, errors.New("constraint Parser Error")
+ }
+
+ cs.con = con
+ cs.minorDirty = minorDirty
+ cs.patchDirty = patchDirty
+ cs.dirty = dirty
+
+ return cs, nil
+ }
+
+ // The rest is the special case where an empty string was passed in which
+ // is equivalent to * or >=0.0.0
+ con, err := StrictNewVersion("0.0.0")
+ if err != nil {
+
+ // The constraintRegex should catch any regex parsing errors. So,
+ // we should never get here.
+ return nil, errors.New("constraint Parser Error")
+ }
+
+ cs := &constraint{
+ con: con,
+ orig: c,
+ origfunc: "",
+ minorDirty: false,
+ patchDirty: false,
+ dirty: true,
+ }
+ return cs, nil
+}
+
+// Constraint functions
+func constraintNotEqual(v *Version, c *constraint) (bool, error) {
+ if c.dirty {
+
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ if c.con.Major() != v.Major() {
+ return true, nil
+ }
+ if c.con.Minor() != v.Minor() && !c.minorDirty {
+ return true, nil
+ } else if c.minorDirty {
+ return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+ } else if c.con.Patch() != v.Patch() && !c.patchDirty {
+ return true, nil
+ } else if c.patchDirty {
+ // Need to handle prereleases if present
+ if v.Prerelease() != "" || c.con.Prerelease() != "" {
+ eq := comparePrerelease(v.Prerelease(), c.con.Prerelease()) != 0
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+ }
+ return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+ }
+ }
+
+ eq := v.Equal(c.con)
+ if eq {
+ return false, fmt.Errorf("%s is equal to %s", v, c.orig)
+ }
+
+ return true, nil
+}
+
+func constraintGreaterThan(v *Version, c *constraint) (bool, error) {
+
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ var eq bool
+
+ if !c.dirty {
+ eq = v.Compare(c.con) == 1
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+ }
+
+ if v.Major() > c.con.Major() {
+ return true, nil
+ } else if v.Major() < c.con.Major() {
+ return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+ } else if c.minorDirty {
+ // This is a range case such as >11. When the version is something like
+ // 11.1.0 is it not > 11. For that we would need 12 or higher
+ return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+ } else if c.patchDirty {
+ // This is for ranges such as >11.1. A version of 11.1.1 is not greater
+ // which one of 11.2.1 is greater
+ eq = v.Minor() > c.con.Minor()
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+ }
+
+ // If we have gotten here we are not comparing pre-preleases and can use the
+ // Compare function to accomplish that.
+ eq = v.Compare(c.con) == 1
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
+}
+
+func constraintLessThan(v *Version, c *constraint) (bool, error) {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ eq := v.Compare(c.con) < 0
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is greater than or equal to %s", v, c.orig)
+}
+
+func constraintGreaterThanEqual(v *Version, c *constraint) (bool, error) {
+
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ eq := v.Compare(c.con) >= 0
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is less than %s", v, c.orig)
+}
+
+func constraintLessThanEqual(v *Version, c *constraint) (bool, error) {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ var eq bool
+
+ if !c.dirty {
+ eq = v.Compare(c.con) <= 0
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s is greater than %s", v, c.orig)
+ }
+
+ if v.Major() > c.con.Major() {
+ return false, fmt.Errorf("%s is greater than %s", v, c.orig)
+ } else if v.Major() == c.con.Major() && v.Minor() > c.con.Minor() && !c.minorDirty {
+ return false, fmt.Errorf("%s is greater than %s", v, c.orig)
+ }
+
+ return true, nil
+}
+
+// ~*, ~>* --> >= 0.0.0 (any)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
+func constraintTilde(v *Version, c *constraint) (bool, error) {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ if v.LessThan(c.con) {
+ return false, fmt.Errorf("%s is less than %s", v, c.orig)
+ }
+
+ // ~0.0.0 is a special case where all constraints are accepted. It's
+ // equivalent to >= 0.0.0.
+ if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 &&
+ !c.minorDirty && !c.patchDirty {
+ return true, nil
+ }
+
+ if v.Major() != c.con.Major() {
+ return false, fmt.Errorf("%s does not have same major version as %s", v, c.orig)
+ }
+
+ if v.Minor() != c.con.Minor() && !c.minorDirty {
+ return false, fmt.Errorf("%s does not have same major and minor version as %s", v, c.orig)
+ }
+
+ return true, nil
+}
+
+// When there is a .x (dirty) status it automatically opts in to ~. Otherwise
+// it's a straight =
+func constraintTildeOrEqual(v *Version, c *constraint) (bool, error) {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ if c.dirty {
+ return constraintTilde(v, c)
+ }
+
+ eq := v.Equal(c.con)
+ if eq {
+ return true, nil
+ }
+
+ return false, fmt.Errorf("%s is not equal to %s", v, c.orig)
+}
+
+// ^* --> (any)
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2 --> >=1.2.0 <2.0.0
+// ^1 --> >=1.0.0 <2.0.0
+// ^0.2.3 --> >=0.2.3 <0.3.0
+// ^0.2 --> >=0.2.0 <0.3.0
+// ^0.0.3 --> >=0.0.3 <0.0.4
+// ^0.0 --> >=0.0.0 <0.1.0
+// ^0 --> >=0.0.0 <1.0.0
+func constraintCaret(v *Version, c *constraint) (bool, error) {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
+ }
+
+ // This less than handles prereleases
+ if v.LessThan(c.con) {
+ return false, fmt.Errorf("%s is less than %s", v, c.orig)
+ }
+
+ var eq bool
+
+ // ^ when the major > 0 is >=x.y.z < x+1
+ if c.con.Major() > 0 || c.minorDirty {
+
+ // ^ has to be within a major range for > 0. Everything less than was
+ // filtered out with the LessThan call above. This filters out those
+ // that greater but not within the same major range.
+ eq = v.Major() == c.con.Major()
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s does not have same major version as %s", v, c.orig)
+ }
+
+ // ^ when the major is 0 and minor > 0 is >=0.y.z < 0.y+1
+ if c.con.Major() == 0 && v.Major() > 0 {
+ return false, fmt.Errorf("%s does not have same major version as %s", v, c.orig)
+ }
+ // If the con Minor is > 0 it is not dirty
+ if c.con.Minor() > 0 || c.patchDirty {
+ eq = v.Minor() == c.con.Minor()
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s does not have same minor version as %s. Expected minor versions to match when constraint major version is 0", v, c.orig)
+ }
+ // ^ when the minor is 0 and minor > 0 is =0.0.z
+ if c.con.Minor() == 0 && v.Minor() > 0 {
+ return false, fmt.Errorf("%s does not have same minor version as %s", v, c.orig)
+ }
+
+ // At this point the major is 0 and the minor is 0 and not dirty. The patch
+ // is not dirty so we need to check if they are equal. If they are not equal
+ eq = c.con.Patch() == v.Patch()
+ if eq {
+ return true, nil
+ }
+ return false, fmt.Errorf("%s does not equal %s. Expect version and constraint to equal when major and minor versions are 0", v, c.orig)
+}
+
+func isX(x string) bool {
+ switch x {
+ case "x", "*", "X":
+ return true
+ default:
+ return false
+ }
+}
+
+func rewriteRange(i string) string {
+ m := constraintRangeRegex.FindAllStringSubmatch(i, -1)
+ if m == nil {
+ return i
+ }
+ o := i
+ for _, v := range m {
+ t := fmt.Sprintf(">= %s, <= %s", v[1], v[11])
+ o = strings.Replace(o, v[0], t, 1)
+ }
+
+ return o
+}
diff --git a/vendor/github.com/Masterminds/semver/v3/doc.go b/vendor/github.com/Masterminds/semver/v3/doc.go
new file mode 100644
index 000000000..74f97caa5
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/doc.go
@@ -0,0 +1,184 @@
+/*
+Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go.
+
+Specifically it provides the ability to:
+
+ - Parse semantic versions
+ - Sort semantic versions
+ - Check if a semantic version fits within a set of constraints
+ - Optionally work with a `v` prefix
+
+# Parsing Semantic Versions
+
+There are two functions that can parse semantic versions. The `StrictNewVersion`
+function only parses valid version 2 semantic versions as outlined in the
+specification. The `NewVersion` function attempts to coerce a version into a
+semantic version and parse it. For example, if there is a leading v or a version
+listed without all 3 parts (e.g. 1.2) it will attempt to coerce it into a valid
+semantic version (e.g., 1.2.0). In both cases a `Version` object is returned
+that can be sorted, compared, and used in constraints.
+
+When parsing a version an optional error can be returned if there is an issue
+parsing the version. For example,
+
+ v, err := semver.NewVersion("1.2.3-beta.1+b345")
+
+The version object has methods to get the parts of the version, compare it to
+other versions, convert the version back into a string, and get the original
+string. For more details please see the documentation
+at https://godoc.org/github.com/Masterminds/semver.
+
+# Sorting Semantic Versions
+
+A set of versions can be sorted using the `sort` package from the standard library.
+For example,
+
+ raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
+ vs := make([]*semver.Version, len(raw))
+ for i, r := range raw {
+ v, err := semver.NewVersion(r)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ vs[i] = v
+ }
+
+ sort.Sort(semver.Collection(vs))
+
+# Checking Version Constraints and Comparing Versions
+
+There are two methods for comparing versions. One uses comparison methods on
+`Version` instances and the other is using Constraints. There are some important
+differences to notes between these two methods of comparison.
+
+ 1. When two versions are compared using functions such as `Compare`, `LessThan`,
+ and others it will follow the specification and always include prereleases
+ within the comparison. It will provide an answer valid with the comparison
+ spec section at https://semver.org/#spec-item-11
+ 2. When constraint checking is used for checks or validation it will follow a
+ different set of rules that are common for ranges with tools like npm/js
+ and Rust/Cargo. This includes considering prereleases to be invalid if the
+ ranges does not include on. If you want to have it include pre-releases a
+ simple solution is to include `-0` in your range.
+ 3. Constraint ranges can have some complex rules including the shorthard use of
+ ~ and ^. For more details on those see the options below.
+
+There are differences between the two methods or checking versions because the
+comparison methods on `Version` follow the specification while comparison ranges
+are not part of the specification. Different packages and tools have taken it
+upon themselves to come up with range rules. This has resulted in differences.
+For example, npm/js and Cargo/Rust follow similar patterns which PHP has a
+different pattern for ^. The comparison features in this package follow the
+npm/js and Cargo/Rust lead because applications using it have followed similar
+patters with their versions.
+
+Checking a version against version constraints is one of the most featureful
+parts of the package.
+
+ c, err := semver.NewConstraint(">= 1.2.3")
+ if err != nil {
+ // Handle constraint not being parsable.
+ }
+
+ v, err := semver.NewVersion("1.3")
+ if err != nil {
+ // Handle version not being parsable.
+ }
+ // Check if the version meets the constraints. The a variable will be true.
+ a := c.Check(v)
+
+# Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of comma or space separated AND comparisons. These are then separated by || (OR)
+comparisons. For example, `">= 1.2 < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3. This can also be written as
+`">= 1.2, < 3.0.0 || >= 4.2.3"`
+
+The basic comparisons are:
+
+ - `=`: equal (aliased to no operator)
+ - `!=`: not equal
+ - `>`: greater than
+ - `<`: less than
+ - `>=`: greater than or equal to
+ - `<=`: less than or equal to
+
+# Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+ - `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
+ - `2.3.4 - 4.5` which is equivalent to `>= 2.3.4 <= 4.5`
+
+# Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the tilde operation. For example,
+
+ - `1.2.x` is equivalent to `>= 1.2.0 < 1.3.0`
+ - `>= 1.2.x` is equivalent to `>= 1.2.0`
+ - `<= 2.x` is equivalent to `<= 3`
+ - `*` is equivalent to `>= 0.0.0`
+
+Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+ - `~1.2.3` is equivalent to `>= 1.2.3 < 1.3.0`
+ - `~1` is equivalent to `>= 1, < 2`
+ - `~2.3` is equivalent to `>= 2.3 < 2.4`
+ - `~1.2.x` is equivalent to `>= 1.2.0 < 1.3.0`
+ - `~1.x` is equivalent to `>= 1 < 2`
+
+Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes once a stable
+(1.0.0) release has occurred. Prior to a 1.0.0 release the minor versions acts
+as the API stability level. This is useful when comparisons of API versions as a
+major change is API breaking. For example,
+
+ - `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+ - `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+ - `^2.3` is equivalent to `>= 2.3, < 3`
+ - `^2.x` is equivalent to `>= 2.0.0, < 3`
+ - `^0.2.3` is equivalent to `>=0.2.3 <0.3.0`
+ - `^0.2` is equivalent to `>=0.2.0 <0.3.0`
+ - `^0.0.3` is equivalent to `>=0.0.3 <0.0.4`
+ - `^0.0` is equivalent to `>=0.0.0 <0.1.0`
+ - `^0` is equivalent to `>=0.0.0 <1.0.0`
+
+# Validation
+
+In addition to testing a version against a constraint, a version can be validated
+against a constraint. When validation fails a slice of errors containing why a
+version didn't meet the constraint is returned. For example,
+
+ c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
+ if err != nil {
+ // Handle constraint not being parseable.
+ }
+
+ v, _ := semver.NewVersion("1.3")
+ if err != nil {
+ // Handle version not being parseable.
+ }
+
+ // Validate a version against a constraint.
+ a, msgs := c.Validate(v)
+ // a is false
+ for _, m := range msgs {
+ fmt.Println(m)
+
+ // Loops over the errors which would read
+ // "1.3 is greater than 1.2.3"
+ // "1.3 is less than 1.4"
+ }
+*/
+package semver
diff --git a/vendor/github.com/Masterminds/semver/v3/fuzz.go b/vendor/github.com/Masterminds/semver/v3/fuzz.go
new file mode 100644
index 000000000..a242ad705
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/fuzz.go
@@ -0,0 +1,22 @@
+// +build gofuzz
+
+package semver
+
+func Fuzz(data []byte) int {
+ d := string(data)
+
+ // Test NewVersion
+ _, _ = NewVersion(d)
+
+ // Test StrictNewVersion
+ _, _ = StrictNewVersion(d)
+
+ // Test NewConstraint
+ _, _ = NewConstraint(d)
+
+ // The return value should be 0 normally, 1 if the priority in future tests
+ // should be increased, and -1 if future tests should skip passing in that
+ // data. We do not have a reason to change priority so 0 is always returned.
+ // There are example tests that do this.
+ return 0
+}
diff --git a/vendor/github.com/Masterminds/semver/v3/version.go b/vendor/github.com/Masterminds/semver/v3/version.go
new file mode 100644
index 000000000..7c4bed334
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/v3/version.go
@@ -0,0 +1,639 @@
+package semver
+
+import (
+ "bytes"
+ "database/sql/driver"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// The compiled version of the regex created at init() is cached here so it
+// only needs to be created once.
+var versionRegex *regexp.Regexp
+
+var (
+ // ErrInvalidSemVer is returned a version is found to be invalid when
+ // being parsed.
+ ErrInvalidSemVer = errors.New("Invalid Semantic Version")
+
+ // ErrEmptyString is returned when an empty string is passed in for parsing.
+ ErrEmptyString = errors.New("Version string empty")
+
+ // ErrInvalidCharacters is returned when invalid characters are found as
+ // part of a version
+ ErrInvalidCharacters = errors.New("Invalid characters in version")
+
+ // ErrSegmentStartsZero is returned when a version segment starts with 0.
+ // This is invalid in SemVer.
+ ErrSegmentStartsZero = errors.New("Version segment starts with 0")
+
+ // ErrInvalidMetadata is returned when the metadata is an invalid format
+ ErrInvalidMetadata = errors.New("Invalid Metadata string")
+
+ // ErrInvalidPrerelease is returned when the pre-release is an invalid format
+ ErrInvalidPrerelease = errors.New("Invalid Prerelease string")
+)
+
+// semVerRegex is the regular expression used to parse a semantic version.
+const semVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` +
+ `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
+ `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
+
+// Version represents a single semantic version.
+type Version struct {
+ major, minor, patch uint64
+ pre string
+ metadata string
+ original string
+}
+
+func init() {
+ versionRegex = regexp.MustCompile("^" + semVerRegex + "$")
+}
+
+const (
+ num string = "0123456789"
+ allowed string = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-" + num
+)
+
+// StrictNewVersion parses a given version and returns an instance of Version or
+// an error if unable to parse the version. Only parses valid semantic versions.
+// Performs checking that can find errors within the version.
+// If you want to coerce a version such as 1 or 1.2 and parse it as the 1.x
+// releases of semver did, use the NewVersion() function.
+func StrictNewVersion(v string) (*Version, error) {
+ // Parsing here does not use RegEx in order to increase performance and reduce
+ // allocations.
+
+ if len(v) == 0 {
+ return nil, ErrEmptyString
+ }
+
+ // Split the parts into [0]major, [1]minor, and [2]patch,prerelease,build
+ parts := strings.SplitN(v, ".", 3)
+ if len(parts) != 3 {
+ return nil, ErrInvalidSemVer
+ }
+
+ sv := &Version{
+ original: v,
+ }
+
+ // check for prerelease or build metadata
+ var extra []string
+ if strings.ContainsAny(parts[2], "-+") {
+ // Start with the build metadata first as it needs to be on the right
+ extra = strings.SplitN(parts[2], "+", 2)
+ if len(extra) > 1 {
+ // build metadata found
+ sv.metadata = extra[1]
+ parts[2] = extra[0]
+ }
+
+ extra = strings.SplitN(parts[2], "-", 2)
+ if len(extra) > 1 {
+ // prerelease found
+ sv.pre = extra[1]
+ parts[2] = extra[0]
+ }
+ }
+
+ // Validate the number segments are valid. This includes only having positive
+ // numbers and no leading 0's.
+ for _, p := range parts {
+ if !containsOnly(p, num) {
+ return nil, ErrInvalidCharacters
+ }
+
+ if len(p) > 1 && p[0] == '0' {
+ return nil, ErrSegmentStartsZero
+ }
+ }
+
+ // Extract the major, minor, and patch elements onto the returned Version
+ var err error
+ sv.major, err = strconv.ParseUint(parts[0], 10, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ sv.minor, err = strconv.ParseUint(parts[1], 10, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ sv.patch, err = strconv.ParseUint(parts[2], 10, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ // No prerelease or build metadata found so returning now as a fastpath.
+ if sv.pre == "" && sv.metadata == "" {
+ return sv, nil
+ }
+
+ if sv.pre != "" {
+ if err = validatePrerelease(sv.pre); err != nil {
+ return nil, err
+ }
+ }
+
+ if sv.metadata != "" {
+ if err = validateMetadata(sv.metadata); err != nil {
+ return nil, err
+ }
+ }
+
+ return sv, nil
+}
+
+// NewVersion parses a given version and returns an instance of Version or
+// an error if unable to parse the version. If the version is SemVer-ish it
+// attempts to convert it to SemVer. If you want to validate it was a strict
+// semantic version at parse time see StrictNewVersion().
+func NewVersion(v string) (*Version, error) {
+ m := versionRegex.FindStringSubmatch(v)
+ if m == nil {
+ return nil, ErrInvalidSemVer
+ }
+
+ sv := &Version{
+ metadata: m[8],
+ pre: m[5],
+ original: v,
+ }
+
+ var err error
+ sv.major, err = strconv.ParseUint(m[1], 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("Error parsing version segment: %s", err)
+ }
+
+ if m[2] != "" {
+ sv.minor, err = strconv.ParseUint(strings.TrimPrefix(m[2], "."), 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("Error parsing version segment: %s", err)
+ }
+ } else {
+ sv.minor = 0
+ }
+
+ if m[3] != "" {
+ sv.patch, err = strconv.ParseUint(strings.TrimPrefix(m[3], "."), 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("Error parsing version segment: %s", err)
+ }
+ } else {
+ sv.patch = 0
+ }
+
+ // Perform some basic due diligence on the extra parts to ensure they are
+ // valid.
+
+ if sv.pre != "" {
+ if err = validatePrerelease(sv.pre); err != nil {
+ return nil, err
+ }
+ }
+
+ if sv.metadata != "" {
+ if err = validateMetadata(sv.metadata); err != nil {
+ return nil, err
+ }
+ }
+
+ return sv, nil
+}
+
+// New creates a new instance of Version with each of the parts passed in as
+// arguments instead of parsing a version string.
+func New(major, minor, patch uint64, pre, metadata string) *Version {
+ v := Version{
+ major: major,
+ minor: minor,
+ patch: patch,
+ pre: pre,
+ metadata: metadata,
+ original: "",
+ }
+
+ v.original = v.String()
+
+ return &v
+}
+
+// MustParse parses a given version and panics on error.
+func MustParse(v string) *Version {
+ sv, err := NewVersion(v)
+ if err != nil {
+ panic(err)
+ }
+ return sv
+}
+
+// String converts a Version object to a string.
+// Note, if the original version contained a leading v this version will not.
+// See the Original() method to retrieve the original value. Semantic Versions
+// don't contain a leading v per the spec. Instead it's optional on
+// implementation.
+func (v Version) String() string {
+ var buf bytes.Buffer
+
+ fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch)
+ if v.pre != "" {
+ fmt.Fprintf(&buf, "-%s", v.pre)
+ }
+ if v.metadata != "" {
+ fmt.Fprintf(&buf, "+%s", v.metadata)
+ }
+
+ return buf.String()
+}
+
+// Original returns the original value passed in to be parsed.
+func (v *Version) Original() string {
+ return v.original
+}
+
+// Major returns the major version.
+func (v Version) Major() uint64 {
+ return v.major
+}
+
+// Minor returns the minor version.
+func (v Version) Minor() uint64 {
+ return v.minor
+}
+
+// Patch returns the patch version.
+func (v Version) Patch() uint64 {
+ return v.patch
+}
+
+// Prerelease returns the pre-release version.
+func (v Version) Prerelease() string {
+ return v.pre
+}
+
+// Metadata returns the metadata on the version.
+func (v Version) Metadata() string {
+ return v.metadata
+}
+
+// originalVPrefix returns the original 'v' prefix if any.
+func (v Version) originalVPrefix() string {
+ // Note, only lowercase v is supported as a prefix by the parser.
+ if v.original != "" && v.original[:1] == "v" {
+ return v.original[:1]
+ }
+ return ""
+}
+
+// IncPatch produces the next patch version.
+// If the current version does not have prerelease/metadata information,
+// it unsets metadata and prerelease values, increments patch number.
+// If the current version has any of prerelease or metadata information,
+// it unsets both values and keeps current patch value
+func (v Version) IncPatch() Version {
+ vNext := v
+ // according to http://semver.org/#spec-item-9
+ // Pre-release versions have a lower precedence than the associated normal version.
+ // according to http://semver.org/#spec-item-10
+ // Build metadata SHOULD be ignored when determining version precedence.
+ if v.pre != "" {
+ vNext.metadata = ""
+ vNext.pre = ""
+ } else {
+ vNext.metadata = ""
+ vNext.pre = ""
+ vNext.patch = v.patch + 1
+ }
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext
+}
+
+// IncMinor produces the next minor version.
+// Sets patch to 0.
+// Increments minor number.
+// Unsets metadata.
+// Unsets prerelease status.
+func (v Version) IncMinor() Version {
+ vNext := v
+ vNext.metadata = ""
+ vNext.pre = ""
+ vNext.patch = 0
+ vNext.minor = v.minor + 1
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext
+}
+
+// IncMajor produces the next major version.
+// Sets patch to 0.
+// Sets minor to 0.
+// Increments major number.
+// Unsets metadata.
+// Unsets prerelease status.
+func (v Version) IncMajor() Version {
+ vNext := v
+ vNext.metadata = ""
+ vNext.pre = ""
+ vNext.patch = 0
+ vNext.minor = 0
+ vNext.major = v.major + 1
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext
+}
+
+// SetPrerelease defines the prerelease value.
+// Value must not include the required 'hyphen' prefix.
+func (v Version) SetPrerelease(prerelease string) (Version, error) {
+ vNext := v
+ if len(prerelease) > 0 {
+ if err := validatePrerelease(prerelease); err != nil {
+ return vNext, err
+ }
+ }
+ vNext.pre = prerelease
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext, nil
+}
+
+// SetMetadata defines metadata value.
+// Value must not include the required 'plus' prefix.
+func (v Version) SetMetadata(metadata string) (Version, error) {
+ vNext := v
+ if len(metadata) > 0 {
+ if err := validateMetadata(metadata); err != nil {
+ return vNext, err
+ }
+ }
+ vNext.metadata = metadata
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext, nil
+}
+
+// LessThan tests if one version is less than another one.
+func (v *Version) LessThan(o *Version) bool {
+ return v.Compare(o) < 0
+}
+
+// GreaterThan tests if one version is greater than another one.
+func (v *Version) GreaterThan(o *Version) bool {
+ return v.Compare(o) > 0
+}
+
+// Equal tests if two versions are equal to each other.
+// Note, versions can be equal with different metadata since metadata
+// is not considered part of the comparable version.
+func (v *Version) Equal(o *Version) bool {
+ return v.Compare(o) == 0
+}
+
+// Compare compares this version to another one. It returns -1, 0, or 1 if
+// the version smaller, equal, or larger than the other version.
+//
+// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is
+// lower than the version without a prerelease. Compare always takes into account
+// prereleases. If you want to work with ranges using typical range syntaxes that
+// skip prereleases if the range is not looking for them use constraints.
+func (v *Version) Compare(o *Version) int {
+ // Compare the major, minor, and patch version for differences. If a
+ // difference is found return the comparison.
+ if d := compareSegment(v.Major(), o.Major()); d != 0 {
+ return d
+ }
+ if d := compareSegment(v.Minor(), o.Minor()); d != 0 {
+ return d
+ }
+ if d := compareSegment(v.Patch(), o.Patch()); d != 0 {
+ return d
+ }
+
+ // At this point the major, minor, and patch versions are the same.
+ ps := v.pre
+ po := o.Prerelease()
+
+ if ps == "" && po == "" {
+ return 0
+ }
+ if ps == "" {
+ return 1
+ }
+ if po == "" {
+ return -1
+ }
+
+ return comparePrerelease(ps, po)
+}
+
+// UnmarshalJSON implements JSON.Unmarshaler interface.
+func (v *Version) UnmarshalJSON(b []byte) error {
+ var s string
+ if err := json.Unmarshal(b, &s); err != nil {
+ return err
+ }
+ temp, err := NewVersion(s)
+ if err != nil {
+ return err
+ }
+ v.major = temp.major
+ v.minor = temp.minor
+ v.patch = temp.patch
+ v.pre = temp.pre
+ v.metadata = temp.metadata
+ v.original = temp.original
+ return nil
+}
+
+// MarshalJSON implements JSON.Marshaler interface.
+func (v Version) MarshalJSON() ([]byte, error) {
+ return json.Marshal(v.String())
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+func (v *Version) UnmarshalText(text []byte) error {
+ temp, err := NewVersion(string(text))
+ if err != nil {
+ return err
+ }
+
+ *v = *temp
+
+ return nil
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+func (v Version) MarshalText() ([]byte, error) {
+ return []byte(v.String()), nil
+}
+
+// Scan implements the SQL.Scanner interface.
+func (v *Version) Scan(value interface{}) error {
+ var s string
+ s, _ = value.(string)
+ temp, err := NewVersion(s)
+ if err != nil {
+ return err
+ }
+ v.major = temp.major
+ v.minor = temp.minor
+ v.patch = temp.patch
+ v.pre = temp.pre
+ v.metadata = temp.metadata
+ v.original = temp.original
+ return nil
+}
+
+// Value implements the Driver.Valuer interface.
+func (v Version) Value() (driver.Value, error) {
+ return v.String(), nil
+}
+
+func compareSegment(v, o uint64) int {
+ if v < o {
+ return -1
+ }
+ if v > o {
+ return 1
+ }
+
+ return 0
+}
+
+func comparePrerelease(v, o string) int {
+ // split the prelease versions by their part. The separator, per the spec,
+ // is a .
+ sparts := strings.Split(v, ".")
+ oparts := strings.Split(o, ".")
+
+ // Find the longer length of the parts to know how many loop iterations to
+ // go through.
+ slen := len(sparts)
+ olen := len(oparts)
+
+ l := slen
+ if olen > slen {
+ l = olen
+ }
+
+ // Iterate over each part of the prereleases to compare the differences.
+ for i := 0; i < l; i++ {
+ // Since the lentgh of the parts can be different we need to create
+ // a placeholder. This is to avoid out of bounds issues.
+ stemp := ""
+ if i < slen {
+ stemp = sparts[i]
+ }
+
+ otemp := ""
+ if i < olen {
+ otemp = oparts[i]
+ }
+
+ d := comparePrePart(stemp, otemp)
+ if d != 0 {
+ return d
+ }
+ }
+
+ // Reaching here means two versions are of equal value but have different
+ // metadata (the part following a +). They are not identical in string form
+ // but the version comparison finds them to be equal.
+ return 0
+}
+
+func comparePrePart(s, o string) int {
+ // Fastpath if they are equal
+ if s == o {
+ return 0
+ }
+
+ // When s or o are empty we can use the other in an attempt to determine
+ // the response.
+ if s == "" {
+ if o != "" {
+ return -1
+ }
+ return 1
+ }
+
+ if o == "" {
+ if s != "" {
+ return 1
+ }
+ return -1
+ }
+
+ // When comparing strings "99" is greater than "103". To handle
+ // cases like this we need to detect numbers and compare them. According
+ // to the semver spec, numbers are always positive. If there is a - at the
+ // start like -99 this is to be evaluated as an alphanum. numbers always
+ // have precedence over alphanum. Parsing as Uints because negative numbers
+ // are ignored.
+
+ oi, n1 := strconv.ParseUint(o, 10, 64)
+ si, n2 := strconv.ParseUint(s, 10, 64)
+
+ // The case where both are strings compare the strings
+ if n1 != nil && n2 != nil {
+ if s > o {
+ return 1
+ }
+ return -1
+ } else if n1 != nil {
+ // o is a string and s is a number
+ return -1
+ } else if n2 != nil {
+ // s is a string and o is a number
+ return 1
+ }
+ // Both are numbers
+ if si > oi {
+ return 1
+ }
+ return -1
+}
+
+// Like strings.ContainsAny but does an only instead of any.
+func containsOnly(s string, comp string) bool {
+ return strings.IndexFunc(s, func(r rune) bool {
+ return !strings.ContainsRune(comp, r)
+ }) == -1
+}
+
+// From the spec, "Identifiers MUST comprise only
+// ASCII alphanumerics and hyphen [0-9A-Za-z-]. Identifiers MUST NOT be empty.
+// Numeric identifiers MUST NOT include leading zeroes.". These segments can
+// be dot separated.
+func validatePrerelease(p string) error {
+ eparts := strings.Split(p, ".")
+ for _, p := range eparts {
+ if containsOnly(p, num) {
+ if len(p) > 1 && p[0] == '0' {
+ return ErrSegmentStartsZero
+ }
+ } else if !containsOnly(p, allowed) {
+ return ErrInvalidPrerelease
+ }
+ }
+
+ return nil
+}
+
+// From the spec, "Build metadata MAY be denoted by
+// appending a plus sign and a series of dot separated identifiers immediately
+// following the patch or pre-release version. Identifiers MUST comprise only
+// ASCII alphanumerics and hyphen [0-9A-Za-z-]. Identifiers MUST NOT be empty."
+func validateMetadata(m string) error {
+ eparts := strings.Split(m, ".")
+ for _, p := range eparts {
+ if !containsOnly(p, allowed) {
+ return ErrInvalidMetadata
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/.gitignore b/vendor/github.com/Masterminds/sprig/v3/.gitignore
new file mode 100644
index 000000000..5e3002f88
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/.gitignore
@@ -0,0 +1,2 @@
+vendor/
+/.glide
diff --git a/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md b/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md
new file mode 100644
index 000000000..2ce45dd4e
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/CHANGELOG.md
@@ -0,0 +1,383 @@
+# Changelog
+
+## Release 3.2.3 (2022-11-29)
+
+### Changed
+
+- Updated docs (thanks @book987 @aJetHorn @neelayu @pellizzetti @apricote @SaigyoujiYuyuko233 @AlekSi)
+- #348: Updated huandu/xstrings which fixed a snake case bug (thanks @yxxhero)
+- #353: Updated masterminds/semver which included bug fixes
+- #354: Updated golang.org/x/crypto which included bug fixes
+
+## Release 3.2.2 (2021-02-04)
+
+This is a re-release of 3.2.1 to satisfy something with the Go module system.
+
+## Release 3.2.1 (2021-02-04)
+
+### Changed
+
+- Upgraded `Masterminds/goutils` to `v1.1.1`. see the [Security Advisory](https://github.com/Masterminds/goutils/security/advisories/GHSA-xg2h-wx96-xgxr)
+
+## Release 3.2.0 (2020-12-14)
+
+### Added
+
+- #211: Added randInt function (thanks @kochurovro)
+- #223: Added fromJson and mustFromJson functions (thanks @mholt)
+- #242: Added a bcrypt function (thanks @robbiet480)
+- #253: Added randBytes function (thanks @MikaelSmith)
+- #254: Added dig function for dicts (thanks @nyarly)
+- #257: Added regexQuoteMeta for quoting regex metadata (thanks @rheaton)
+- #261: Added filepath functions osBase, osDir, osExt, osClean, osIsAbs (thanks @zugl)
+- #268: Added and and all functions for testing conditions (thanks @phuslu)
+- #181: Added float64 arithmetic addf, add1f, subf, divf, mulf, maxf, and minf
+ (thanks @andrewmostello)
+- #265: Added chunk function to split array into smaller arrays (thanks @karelbilek)
+- #270: Extend certificate functions to handle non-RSA keys + add support for
+ ed25519 keys (thanks @misberner)
+
+### Changed
+
+- Removed testing and support for Go 1.12. ed25519 support requires Go 1.13 or newer
+- Using semver 3.1.1 and mergo 0.3.11
+
+### Fixed
+
+- #249: Fix htmlDateInZone example (thanks @spawnia)
+
+NOTE: The dependency github.com/imdario/mergo reverted the breaking change in
+0.3.9 via 0.3.10 release.
+
+## Release 3.1.0 (2020-04-16)
+
+NOTE: The dependency github.com/imdario/mergo made a behavior change in 0.3.9
+that impacts sprig functionality. Do not use sprig with a version newer than 0.3.8.
+
+### Added
+
+- #225: Added support for generating htpasswd hash (thanks @rustycl0ck)
+- #224: Added duration filter (thanks @frebib)
+- #205: Added `seq` function (thanks @thadc23)
+
+### Changed
+
+- #203: Unlambda functions with correct signature (thanks @muesli)
+- #236: Updated the license formatting for GitHub display purposes
+- #238: Updated package dependency versions. Note, mergo not updated to 0.3.9
+ as it causes a breaking change for sprig. That issue is tracked at
+ https://github.com/imdario/mergo/issues/139
+
+### Fixed
+
+- #229: Fix `seq` example in docs (thanks @kalmant)
+
+## Release 3.0.2 (2019-12-13)
+
+### Fixed
+
+- #220: Updating to semver v3.0.3 to fix issue with <= ranges
+- #218: fix typo elyptical->elliptic in ecdsa key description (thanks @laverya)
+
+## Release 3.0.1 (2019-12-08)
+
+### Fixed
+
+- #212: Updated semver fixing broken constraint checking with ^0.0
+
+## Release 3.0.0 (2019-10-02)
+
+### Added
+
+- #187: Added durationRound function (thanks @yjp20)
+- #189: Added numerous template functions that return errors rather than panic (thanks @nrvnrvn)
+- #193: Added toRawJson support (thanks @Dean-Coakley)
+- #197: Added get support to dicts (thanks @Dean-Coakley)
+
+### Changed
+
+- #186: Moving dependency management to Go modules
+- #186: Updated semver to v3. This has changes in the way ^ is handled
+- #194: Updated documentation on merging and how it copies. Added example using deepCopy
+- #196: trunc now supports negative values (thanks @Dean-Coakley)
+
+## Release 2.22.0 (2019-10-02)
+
+### Added
+
+- #173: Added getHostByName function to resolve dns names to ips (thanks @fcgravalos)
+- #195: Added deepCopy function for use with dicts
+
+### Changed
+
+- Updated merge and mergeOverwrite documentation to explain copying and how to
+ use deepCopy with it
+
+## Release 2.21.0 (2019-09-18)
+
+### Added
+
+- #122: Added encryptAES/decryptAES functions (thanks @n0madic)
+- #128: Added toDecimal support (thanks @Dean-Coakley)
+- #169: Added list contcat (thanks @astorath)
+- #174: Added deepEqual function (thanks @bonifaido)
+- #170: Added url parse and join functions (thanks @astorath)
+
+### Changed
+
+- #171: Updated glide config for Google UUID to v1 and to add ranges to semver and testify
+
+### Fixed
+
+- #172: Fix semver wildcard example (thanks @piepmatz)
+- #175: Fix dateInZone doc example (thanks @s3than)
+
+## Release 2.20.0 (2019-06-18)
+
+### Added
+
+- #164: Adding function to get unix epoch for a time (@mattfarina)
+- #166: Adding tests for date_in_zone (@mattfarina)
+
+### Changed
+
+- #144: Fix function comments based on best practices from Effective Go (@CodeLingoTeam)
+- #150: Handles pointer type for time.Time in "htmlDate" (@mapreal19)
+- #161, #157, #160, #153, #158, #156, #155, #159, #152 documentation updates (@badeadan)
+
+### Fixed
+
+## Release 2.19.0 (2019-03-02)
+
+IMPORTANT: This release reverts a change from 2.18.0
+
+In the previous release (2.18), we prematurely merged a partial change to the crypto functions that led to creating two sets of crypto functions (I blame @technosophos -- since that's me). This release rolls back that change, and does what was originally intended: It alters the existing crypto functions to use secure random.
+
+We debated whether this classifies as a change worthy of major revision, but given the proximity to the last release, we have decided that treating 2.18 as a faulty release is the correct course of action. We apologize for any inconvenience.
+
+### Changed
+
+- Fix substr panic 35fb796 (Alexey igrychev)
+- Remove extra period 1eb7729 (Matthew Lorimor)
+- Make random string functions use crypto by default 6ceff26 (Matthew Lorimor)
+- README edits/fixes/suggestions 08fe136 (Lauri Apple)
+
+
+## Release 2.18.0 (2019-02-12)
+
+### Added
+
+- Added mergeOverwrite function
+- cryptographic functions that use secure random (see fe1de12)
+
+### Changed
+
+- Improve documentation of regexMatch function, resolves #139 90b89ce (Jan Tagscherer)
+- Handle has for nil list 9c10885 (Daniel Cohen)
+- Document behaviour of mergeOverwrite fe0dbe9 (Lukas Rieder)
+- doc: adds missing documentation. 4b871e6 (Fernandez Ludovic)
+- Replace outdated goutils imports 01893d2 (Matthew Lorimor)
+- Surface crypto secure random strings from goutils fe1de12 (Matthew Lorimor)
+- Handle untyped nil values as paramters to string functions 2b2ec8f (Morten Torkildsen)
+
+### Fixed
+
+- Fix dict merge issue and provide mergeOverwrite .dst .src1 to overwrite from src -> dst 4c59c12 (Lukas Rieder)
+- Fix substr var names and comments d581f80 (Dean Coakley)
+- Fix substr documentation 2737203 (Dean Coakley)
+
+## Release 2.17.1 (2019-01-03)
+
+### Fixed
+
+The 2.17.0 release did not have a version pinned for xstrings, which caused compilation failures when xstrings < 1.2 was used. This adds the correct version string to glide.yaml.
+
+## Release 2.17.0 (2019-01-03)
+
+### Added
+
+- adds alder32sum function and test 6908fc2 (marshallford)
+- Added kebabcase function ca331a1 (Ilyes512)
+
+### Changed
+
+- Update goutils to 1.1.0 4e1125d (Matt Butcher)
+
+### Fixed
+
+- Fix 'has' documentation e3f2a85 (dean-coakley)
+- docs(dict): fix typo in pick example dc424f9 (Dustin Specker)
+- fixes spelling errors... not sure how that happened 4cf188a (marshallford)
+
+## Release 2.16.0 (2018-08-13)
+
+### Added
+
+- add splitn function fccb0b0 (Helgi Þorbjörnsson)
+- Add slice func df28ca7 (gongdo)
+- Generate serial number a3bdffd (Cody Coons)
+- Extract values of dict with values function df39312 (Lawrence Jones)
+
+### Changed
+
+- Modify panic message for list.slice ae38335 (gongdo)
+- Minor improvement in code quality - Removed an unreachable piece of code at defaults.go#L26:6 - Resolve formatting issues. 5834241 (Abhishek Kashyap)
+- Remove duplicated documentation 1d97af1 (Matthew Fisher)
+- Test on go 1.11 49df809 (Helgi Þormar Þorbjörnsson)
+
+### Fixed
+
+- Fix file permissions c5f40b5 (gongdo)
+- Fix example for buildCustomCert 7779e0d (Tin Lam)
+
+## Release 2.15.0 (2018-04-02)
+
+### Added
+
+- #68 and #69: Add json helpers to docs (thanks @arunvelsriram)
+- #66: Add ternary function (thanks @binoculars)
+- #67: Allow keys function to take multiple dicts (thanks @binoculars)
+- #89: Added sha1sum to crypto function (thanks @benkeil)
+- #81: Allow customizing Root CA that used by genSignedCert (thanks @chenzhiwei)
+- #92: Add travis testing for go 1.10
+- #93: Adding appveyor config for windows testing
+
+### Changed
+
+- #90: Updating to more recent dependencies
+- #73: replace satori/go.uuid with google/uuid (thanks @petterw)
+
+### Fixed
+
+- #76: Fixed documentation typos (thanks @Thiht)
+- Fixed rounding issue on the `ago` function. Note, the removes support for Go 1.8 and older
+
+## Release 2.14.1 (2017-12-01)
+
+### Fixed
+
+- #60: Fix typo in function name documentation (thanks @neil-ca-moore)
+- #61: Removing line with {{ due to blocking github pages genertion
+- #64: Update the list functions to handle int, string, and other slices for compatibility
+
+## Release 2.14.0 (2017-10-06)
+
+This new version of Sprig adds a set of functions for generating and working with SSL certificates.
+
+- `genCA` generates an SSL Certificate Authority
+- `genSelfSignedCert` generates an SSL self-signed certificate
+- `genSignedCert` generates an SSL certificate and key based on a given CA
+
+## Release 2.13.0 (2017-09-18)
+
+This release adds new functions, including:
+
+- `regexMatch`, `regexFindAll`, `regexFind`, `regexReplaceAll`, `regexReplaceAllLiteral`, and `regexSplit` to work with regular expressions
+- `floor`, `ceil`, and `round` math functions
+- `toDate` converts a string to a date
+- `nindent` is just like `indent` but also prepends a new line
+- `ago` returns the time from `time.Now`
+
+### Added
+
+- #40: Added basic regex functionality (thanks @alanquillin)
+- #41: Added ceil floor and round functions (thanks @alanquillin)
+- #48: Added toDate function (thanks @andreynering)
+- #50: Added nindent function (thanks @binoculars)
+- #46: Added ago function (thanks @slayer)
+
+### Changed
+
+- #51: Updated godocs to include new string functions (thanks @curtisallen)
+- #49: Added ability to merge multiple dicts (thanks @binoculars)
+
+## Release 2.12.0 (2017-05-17)
+
+- `snakecase`, `camelcase`, and `shuffle` are three new string functions
+- `fail` allows you to bail out of a template render when conditions are not met
+
+## Release 2.11.0 (2017-05-02)
+
+- Added `toJson` and `toPrettyJson`
+- Added `merge`
+- Refactored documentation
+
+## Release 2.10.0 (2017-03-15)
+
+- Added `semver` and `semverCompare` for Semantic Versions
+- `list` replaces `tuple`
+- Fixed issue with `join`
+- Added `first`, `last`, `intial`, `rest`, `prepend`, `append`, `toString`, `toStrings`, `sortAlpha`, `reverse`, `coalesce`, `pluck`, `pick`, `compact`, `keys`, `omit`, `uniq`, `has`, `without`
+
+## Release 2.9.0 (2017-02-23)
+
+- Added `splitList` to split a list
+- Added crypto functions of `genPrivateKey` and `derivePassword`
+
+## Release 2.8.0 (2016-12-21)
+
+- Added access to several path functions (`base`, `dir`, `clean`, `ext`, and `abs`)
+- Added functions for _mutating_ dictionaries (`set`, `unset`, `hasKey`)
+
+## Release 2.7.0 (2016-12-01)
+
+- Added `sha256sum` to generate a hash of an input
+- Added functions to convert a numeric or string to `int`, `int64`, `float64`
+
+## Release 2.6.0 (2016-10-03)
+
+- Added a `uuidv4` template function for generating UUIDs inside of a template.
+
+## Release 2.5.0 (2016-08-19)
+
+- New `trimSuffix`, `trimPrefix`, `hasSuffix`, and `hasPrefix` functions
+- New aliases have been added for a few functions that didn't follow the naming conventions (`trimAll` and `abbrevBoth`)
+- `trimall` and `abbrevboth` (notice the case) are deprecated and will be removed in 3.0.0
+
+## Release 2.4.0 (2016-08-16)
+
+- Adds two functions: `until` and `untilStep`
+
+## Release 2.3.0 (2016-06-21)
+
+- cat: Concatenate strings with whitespace separators.
+- replace: Replace parts of a string: `replace " " "-" "Me First"` renders "Me-First"
+- plural: Format plurals: `len "foo" | plural "one foo" "many foos"` renders "many foos"
+- indent: Indent blocks of text in a way that is sensitive to "\n" characters.
+
+## Release 2.2.0 (2016-04-21)
+
+- Added a `genPrivateKey` function (Thanks @bacongobbler)
+
+## Release 2.1.0 (2016-03-30)
+
+- `default` now prints the default value when it does not receive a value down the pipeline. It is much safer now to do `{{.Foo | default "bar"}}`.
+- Added accessors for "hermetic" functions. These return only functions that, when given the same input, produce the same output.
+
+## Release 2.0.0 (2016-03-29)
+
+Because we switched from `int` to `int64` as the return value for all integer math functions, the library's major version number has been incremented.
+
+- `min` complements `max` (formerly `biggest`)
+- `empty` indicates that a value is the empty value for its type
+- `tuple` creates a tuple inside of a template: `{{$t := tuple "a", "b" "c"}}`
+- `dict` creates a dictionary inside of a template `{{$d := dict "key1" "val1" "key2" "val2"}}`
+- Date formatters have been added for HTML dates (as used in `date` input fields)
+- Integer math functions can convert from a number of types, including `string` (via `strconv.ParseInt`).
+
+## Release 1.2.0 (2016-02-01)
+
+- Added quote and squote
+- Added b32enc and b32dec
+- add now takes varargs
+- biggest now takes varargs
+
+## Release 1.1.0 (2015-12-29)
+
+- Added #4: Added contains function. strings.Contains, but with the arguments
+ switched to simplify common pipelines. (thanks krancour)
+- Added Travis-CI testing support
+
+## Release 1.0.0 (2015-12-23)
+
+- Initial release
diff --git a/vendor/github.com/Masterminds/sprig/v3/LICENSE.txt b/vendor/github.com/Masterminds/sprig/v3/LICENSE.txt
new file mode 100644
index 000000000..f311b1eaa
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/LICENSE.txt
@@ -0,0 +1,19 @@
+Copyright (C) 2013-2020 Masterminds
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/Masterminds/sprig/v3/Makefile b/vendor/github.com/Masterminds/sprig/v3/Makefile
new file mode 100644
index 000000000..78d409cde
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/Makefile
@@ -0,0 +1,9 @@
+.PHONY: test
+test:
+ @echo "==> Running tests"
+ GO111MODULE=on go test -v
+
+.PHONY: test-cover
+test-cover:
+ @echo "==> Running Tests with coverage"
+ GO111MODULE=on go test -cover .
diff --git a/vendor/github.com/Masterminds/sprig/v3/README.md b/vendor/github.com/Masterminds/sprig/v3/README.md
new file mode 100644
index 000000000..3e22c60e1
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/README.md
@@ -0,0 +1,100 @@
+# Sprig: Template functions for Go templates
+
+[![GoDoc](https://img.shields.io/static/v1?label=godoc&message=reference&color=blue)](https://pkg.go.dev/github.com/Masterminds/sprig/v3)
+[![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/sprig)](https://goreportcard.com/report/github.com/Masterminds/sprig)
+[![Stability: Sustained](https://masterminds.github.io/stability/sustained.svg)](https://masterminds.github.io/stability/sustained.html)
+[![](https://github.com/Masterminds/sprig/workflows/Tests/badge.svg)](https://github.com/Masterminds/sprig/actions)
+
+The Go language comes with a [built-in template
+language](http://golang.org/pkg/text/template/), but not
+very many template functions. Sprig is a library that provides more than 100 commonly
+used template functions.
+
+It is inspired by the template functions found in
+[Twig](http://twig.sensiolabs.org/documentation) and in various
+JavaScript libraries, such as [underscore.js](http://underscorejs.org/).
+
+## IMPORTANT NOTES
+
+Sprig leverages [mergo](https://github.com/imdario/mergo) to handle merges. In
+its v0.3.9 release, there was a behavior change that impacts merging template
+functions in sprig. It is currently recommended to use v0.3.10 or later of that package.
+Using v0.3.9 will cause sprig tests to fail.
+
+## Package Versions
+
+There are two active major versions of the `sprig` package.
+
+* v3 is currently stable release series on the `master` branch. The Go API should
+ remain compatible with v2, the current stable version. Behavior change behind
+ some functions is the reason for the new major version.
+* v2 is the previous stable release series. It has been more than three years since
+ the initial release of v2. You can read the documentation and see the code
+ on the [release-2](https://github.com/Masterminds/sprig/tree/release-2) branch.
+ Bug fixes to this major version will continue for some time.
+
+## Usage
+
+**Template developers**: Please use Sprig's [function documentation](http://masterminds.github.io/sprig/) for
+detailed instructions and code snippets for the >100 template functions available.
+
+**Go developers**: If you'd like to include Sprig as a library in your program,
+our API documentation is available [at GoDoc.org](http://godoc.org/github.com/Masterminds/sprig).
+
+For standard usage, read on.
+
+### Load the Sprig library
+
+To load the Sprig `FuncMap`:
+
+```go
+
+import (
+ "github.com/Masterminds/sprig/v3"
+ "html/template"
+)
+
+// This example illustrates that the FuncMap *must* be set before the
+// templates themselves are loaded.
+tpl := template.Must(
+ template.New("base").Funcs(sprig.FuncMap()).ParseGlob("*.html")
+)
+
+
+```
+
+### Calling the functions inside of templates
+
+By convention, all functions are lowercase. This seems to follow the Go
+idiom for template functions (as opposed to template methods, which are
+TitleCase). For example, this:
+
+```
+{{ "hello!" | upper | repeat 5 }}
+```
+
+produces this:
+
+```
+HELLO!HELLO!HELLO!HELLO!HELLO!
+```
+
+## Principles Driving Our Function Selection
+
+We followed these principles to decide which functions to add and how to implement them:
+
+- Use template functions to build layout. The following
+ types of operations are within the domain of template functions:
+ - Formatting
+ - Layout
+ - Simple type conversions
+ - Utilities that assist in handling common formatting and layout needs (e.g. arithmetic)
+- Template functions should not return errors unless there is no way to print
+ a sensible value. For example, converting a string to an integer should not
+ produce an error if conversion fails. Instead, it should display a default
+ value.
+- Simple math is necessary for grid layouts, pagers, and so on. Complex math
+ (anything other than arithmetic) should be done outside of templates.
+- Template functions only deal with the data passed into them. They never retrieve
+ data from a source.
+- Finally, do not override core Go template functions.
diff --git a/vendor/github.com/Masterminds/sprig/v3/crypto.go b/vendor/github.com/Masterminds/sprig/v3/crypto.go
new file mode 100644
index 000000000..13a5cd559
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/crypto.go
@@ -0,0 +1,653 @@
+package sprig
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/aes"
+ "crypto/cipher"
+ "crypto/dsa"
+ "crypto/ecdsa"
+ "crypto/ed25519"
+ "crypto/elliptic"
+ "crypto/hmac"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/sha1"
+ "crypto/sha256"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "encoding/base64"
+ "encoding/binary"
+ "encoding/hex"
+ "encoding/pem"
+ "errors"
+ "fmt"
+ "hash/adler32"
+ "io"
+ "math/big"
+ "net"
+ "time"
+
+ "strings"
+
+ "github.com/google/uuid"
+ bcrypt_lib "golang.org/x/crypto/bcrypt"
+ "golang.org/x/crypto/scrypt"
+)
+
+func sha256sum(input string) string {
+ hash := sha256.Sum256([]byte(input))
+ return hex.EncodeToString(hash[:])
+}
+
+func sha1sum(input string) string {
+ hash := sha1.Sum([]byte(input))
+ return hex.EncodeToString(hash[:])
+}
+
+func adler32sum(input string) string {
+ hash := adler32.Checksum([]byte(input))
+ return fmt.Sprintf("%d", hash)
+}
+
+func bcrypt(input string) string {
+ hash, err := bcrypt_lib.GenerateFromPassword([]byte(input), bcrypt_lib.DefaultCost)
+ if err != nil {
+ return fmt.Sprintf("failed to encrypt string with bcrypt: %s", err)
+ }
+
+ return string(hash)
+}
+
+func htpasswd(username string, password string) string {
+ if strings.Contains(username, ":") {
+ return fmt.Sprintf("invalid username: %s", username)
+ }
+ return fmt.Sprintf("%s:%s", username, bcrypt(password))
+}
+
+func randBytes(count int) (string, error) {
+ buf := make([]byte, count)
+ if _, err := rand.Read(buf); err != nil {
+ return "", err
+ }
+ return base64.StdEncoding.EncodeToString(buf), nil
+}
+
+// uuidv4 provides a safe and secure UUID v4 implementation
+func uuidv4() string {
+ return uuid.New().String()
+}
+
+var masterPasswordSeed = "com.lyndir.masterpassword"
+
+var passwordTypeTemplates = map[string][][]byte{
+ "maximum": {[]byte("anoxxxxxxxxxxxxxxxxx"), []byte("axxxxxxxxxxxxxxxxxno")},
+ "long": {[]byte("CvcvnoCvcvCvcv"), []byte("CvcvCvcvnoCvcv"), []byte("CvcvCvcvCvcvno"), []byte("CvccnoCvcvCvcv"), []byte("CvccCvcvnoCvcv"),
+ []byte("CvccCvcvCvcvno"), []byte("CvcvnoCvccCvcv"), []byte("CvcvCvccnoCvcv"), []byte("CvcvCvccCvcvno"), []byte("CvcvnoCvcvCvcc"),
+ []byte("CvcvCvcvnoCvcc"), []byte("CvcvCvcvCvccno"), []byte("CvccnoCvccCvcv"), []byte("CvccCvccnoCvcv"), []byte("CvccCvccCvcvno"),
+ []byte("CvcvnoCvccCvcc"), []byte("CvcvCvccnoCvcc"), []byte("CvcvCvccCvccno"), []byte("CvccnoCvcvCvcc"), []byte("CvccCvcvnoCvcc"),
+ []byte("CvccCvcvCvccno")},
+ "medium": {[]byte("CvcnoCvc"), []byte("CvcCvcno")},
+ "short": {[]byte("Cvcn")},
+ "basic": {[]byte("aaanaaan"), []byte("aannaaan"), []byte("aaannaaa")},
+ "pin": {[]byte("nnnn")},
+}
+
+var templateCharacters = map[byte]string{
+ 'V': "AEIOU",
+ 'C': "BCDFGHJKLMNPQRSTVWXYZ",
+ 'v': "aeiou",
+ 'c': "bcdfghjklmnpqrstvwxyz",
+ 'A': "AEIOUBCDFGHJKLMNPQRSTVWXYZ",
+ 'a': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz",
+ 'n': "0123456789",
+ 'o': "@&%?,=[]_:-+*$#!'^~;()/.",
+ 'x': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz0123456789!@#$%^&*()",
+}
+
+func derivePassword(counter uint32, passwordType, password, user, site string) string {
+ var templates = passwordTypeTemplates[passwordType]
+ if templates == nil {
+ return fmt.Sprintf("cannot find password template %s", passwordType)
+ }
+
+ var buffer bytes.Buffer
+ buffer.WriteString(masterPasswordSeed)
+ binary.Write(&buffer, binary.BigEndian, uint32(len(user)))
+ buffer.WriteString(user)
+
+ salt := buffer.Bytes()
+ key, err := scrypt.Key([]byte(password), salt, 32768, 8, 2, 64)
+ if err != nil {
+ return fmt.Sprintf("failed to derive password: %s", err)
+ }
+
+ buffer.Truncate(len(masterPasswordSeed))
+ binary.Write(&buffer, binary.BigEndian, uint32(len(site)))
+ buffer.WriteString(site)
+ binary.Write(&buffer, binary.BigEndian, counter)
+
+ var hmacv = hmac.New(sha256.New, key)
+ hmacv.Write(buffer.Bytes())
+ var seed = hmacv.Sum(nil)
+ var temp = templates[int(seed[0])%len(templates)]
+
+ buffer.Truncate(0)
+ for i, element := range temp {
+ passChars := templateCharacters[element]
+ passChar := passChars[int(seed[i+1])%len(passChars)]
+ buffer.WriteByte(passChar)
+ }
+
+ return buffer.String()
+}
+
+func generatePrivateKey(typ string) string {
+ var priv interface{}
+ var err error
+ switch typ {
+ case "", "rsa":
+ // good enough for government work
+ priv, err = rsa.GenerateKey(rand.Reader, 4096)
+ case "dsa":
+ key := new(dsa.PrivateKey)
+ // again, good enough for government work
+ if err = dsa.GenerateParameters(&key.Parameters, rand.Reader, dsa.L2048N256); err != nil {
+ return fmt.Sprintf("failed to generate dsa params: %s", err)
+ }
+ err = dsa.GenerateKey(key, rand.Reader)
+ priv = key
+ case "ecdsa":
+ // again, good enough for government work
+ priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
+ case "ed25519":
+ _, priv, err = ed25519.GenerateKey(rand.Reader)
+ default:
+ return "Unknown type " + typ
+ }
+ if err != nil {
+ return fmt.Sprintf("failed to generate private key: %s", err)
+ }
+
+ return string(pem.EncodeToMemory(pemBlockForKey(priv)))
+}
+
+// DSAKeyFormat stores the format for DSA keys.
+// Used by pemBlockForKey
+type DSAKeyFormat struct {
+ Version int
+ P, Q, G, Y, X *big.Int
+}
+
+func pemBlockForKey(priv interface{}) *pem.Block {
+ switch k := priv.(type) {
+ case *rsa.PrivateKey:
+ return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
+ case *dsa.PrivateKey:
+ val := DSAKeyFormat{
+ P: k.P, Q: k.Q, G: k.G,
+ Y: k.Y, X: k.X,
+ }
+ bytes, _ := asn1.Marshal(val)
+ return &pem.Block{Type: "DSA PRIVATE KEY", Bytes: bytes}
+ case *ecdsa.PrivateKey:
+ b, _ := x509.MarshalECPrivateKey(k)
+ return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}
+ default:
+ // attempt PKCS#8 format for all other keys
+ b, err := x509.MarshalPKCS8PrivateKey(k)
+ if err != nil {
+ return nil
+ }
+ return &pem.Block{Type: "PRIVATE KEY", Bytes: b}
+ }
+}
+
+func parsePrivateKeyPEM(pemBlock string) (crypto.PrivateKey, error) {
+ block, _ := pem.Decode([]byte(pemBlock))
+ if block == nil {
+ return nil, errors.New("no PEM data in input")
+ }
+
+ if block.Type == "PRIVATE KEY" {
+ priv, err := x509.ParsePKCS8PrivateKey(block.Bytes)
+ if err != nil {
+ return nil, fmt.Errorf("decoding PEM as PKCS#8: %s", err)
+ }
+ return priv, nil
+ } else if !strings.HasSuffix(block.Type, " PRIVATE KEY") {
+ return nil, fmt.Errorf("no private key data in PEM block of type %s", block.Type)
+ }
+
+ switch block.Type[:len(block.Type)-12] { // strip " PRIVATE KEY"
+ case "RSA":
+ priv, err := x509.ParsePKCS1PrivateKey(block.Bytes)
+ if err != nil {
+ return nil, fmt.Errorf("parsing RSA private key from PEM: %s", err)
+ }
+ return priv, nil
+ case "EC":
+ priv, err := x509.ParseECPrivateKey(block.Bytes)
+ if err != nil {
+ return nil, fmt.Errorf("parsing EC private key from PEM: %s", err)
+ }
+ return priv, nil
+ case "DSA":
+ var k DSAKeyFormat
+ _, err := asn1.Unmarshal(block.Bytes, &k)
+ if err != nil {
+ return nil, fmt.Errorf("parsing DSA private key from PEM: %s", err)
+ }
+ priv := &dsa.PrivateKey{
+ PublicKey: dsa.PublicKey{
+ Parameters: dsa.Parameters{
+ P: k.P, Q: k.Q, G: k.G,
+ },
+ Y: k.Y,
+ },
+ X: k.X,
+ }
+ return priv, nil
+ default:
+ return nil, fmt.Errorf("invalid private key type %s", block.Type)
+ }
+}
+
+func getPublicKey(priv crypto.PrivateKey) (crypto.PublicKey, error) {
+ switch k := priv.(type) {
+ case interface{ Public() crypto.PublicKey }:
+ return k.Public(), nil
+ case *dsa.PrivateKey:
+ return &k.PublicKey, nil
+ default:
+ return nil, fmt.Errorf("unable to get public key for type %T", priv)
+ }
+}
+
+type certificate struct {
+ Cert string
+ Key string
+}
+
+func buildCustomCertificate(b64cert string, b64key string) (certificate, error) {
+ crt := certificate{}
+
+ cert, err := base64.StdEncoding.DecodeString(b64cert)
+ if err != nil {
+ return crt, errors.New("unable to decode base64 certificate")
+ }
+
+ key, err := base64.StdEncoding.DecodeString(b64key)
+ if err != nil {
+ return crt, errors.New("unable to decode base64 private key")
+ }
+
+ decodedCert, _ := pem.Decode(cert)
+ if decodedCert == nil {
+ return crt, errors.New("unable to decode certificate")
+ }
+ _, err = x509.ParseCertificate(decodedCert.Bytes)
+ if err != nil {
+ return crt, fmt.Errorf(
+ "error parsing certificate: decodedCert.Bytes: %s",
+ err,
+ )
+ }
+
+ _, err = parsePrivateKeyPEM(string(key))
+ if err != nil {
+ return crt, fmt.Errorf(
+ "error parsing private key: %s",
+ err,
+ )
+ }
+
+ crt.Cert = string(cert)
+ crt.Key = string(key)
+
+ return crt, nil
+}
+
+func generateCertificateAuthority(
+ cn string,
+ daysValid int,
+) (certificate, error) {
+ priv, err := rsa.GenerateKey(rand.Reader, 2048)
+ if err != nil {
+ return certificate{}, fmt.Errorf("error generating rsa key: %s", err)
+ }
+
+ return generateCertificateAuthorityWithKeyInternal(cn, daysValid, priv)
+}
+
+func generateCertificateAuthorityWithPEMKey(
+ cn string,
+ daysValid int,
+ privPEM string,
+) (certificate, error) {
+ priv, err := parsePrivateKeyPEM(privPEM)
+ if err != nil {
+ return certificate{}, fmt.Errorf("parsing private key: %s", err)
+ }
+ return generateCertificateAuthorityWithKeyInternal(cn, daysValid, priv)
+}
+
+func generateCertificateAuthorityWithKeyInternal(
+ cn string,
+ daysValid int,
+ priv crypto.PrivateKey,
+) (certificate, error) {
+ ca := certificate{}
+
+ template, err := getBaseCertTemplate(cn, nil, nil, daysValid)
+ if err != nil {
+ return ca, err
+ }
+ // Override KeyUsage and IsCA
+ template.KeyUsage = x509.KeyUsageKeyEncipherment |
+ x509.KeyUsageDigitalSignature |
+ x509.KeyUsageCertSign
+ template.IsCA = true
+
+ ca.Cert, ca.Key, err = getCertAndKey(template, priv, template, priv)
+
+ return ca, err
+}
+
+func generateSelfSignedCertificate(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+) (certificate, error) {
+ priv, err := rsa.GenerateKey(rand.Reader, 2048)
+ if err != nil {
+ return certificate{}, fmt.Errorf("error generating rsa key: %s", err)
+ }
+ return generateSelfSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, priv)
+}
+
+func generateSelfSignedCertificateWithPEMKey(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+ privPEM string,
+) (certificate, error) {
+ priv, err := parsePrivateKeyPEM(privPEM)
+ if err != nil {
+ return certificate{}, fmt.Errorf("parsing private key: %s", err)
+ }
+ return generateSelfSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, priv)
+}
+
+func generateSelfSignedCertificateWithKeyInternal(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+ priv crypto.PrivateKey,
+) (certificate, error) {
+ cert := certificate{}
+
+ template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid)
+ if err != nil {
+ return cert, err
+ }
+
+ cert.Cert, cert.Key, err = getCertAndKey(template, priv, template, priv)
+
+ return cert, err
+}
+
+func generateSignedCertificate(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+ ca certificate,
+) (certificate, error) {
+ priv, err := rsa.GenerateKey(rand.Reader, 2048)
+ if err != nil {
+ return certificate{}, fmt.Errorf("error generating rsa key: %s", err)
+ }
+ return generateSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, ca, priv)
+}
+
+func generateSignedCertificateWithPEMKey(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+ ca certificate,
+ privPEM string,
+) (certificate, error) {
+ priv, err := parsePrivateKeyPEM(privPEM)
+ if err != nil {
+ return certificate{}, fmt.Errorf("parsing private key: %s", err)
+ }
+ return generateSignedCertificateWithKeyInternal(cn, ips, alternateDNS, daysValid, ca, priv)
+}
+
+func generateSignedCertificateWithKeyInternal(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+ ca certificate,
+ priv crypto.PrivateKey,
+) (certificate, error) {
+ cert := certificate{}
+
+ decodedSignerCert, _ := pem.Decode([]byte(ca.Cert))
+ if decodedSignerCert == nil {
+ return cert, errors.New("unable to decode certificate")
+ }
+ signerCert, err := x509.ParseCertificate(decodedSignerCert.Bytes)
+ if err != nil {
+ return cert, fmt.Errorf(
+ "error parsing certificate: decodedSignerCert.Bytes: %s",
+ err,
+ )
+ }
+ signerKey, err := parsePrivateKeyPEM(ca.Key)
+ if err != nil {
+ return cert, fmt.Errorf(
+ "error parsing private key: %s",
+ err,
+ )
+ }
+
+ template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid)
+ if err != nil {
+ return cert, err
+ }
+
+ cert.Cert, cert.Key, err = getCertAndKey(
+ template,
+ priv,
+ signerCert,
+ signerKey,
+ )
+
+ return cert, err
+}
+
+func getCertAndKey(
+ template *x509.Certificate,
+ signeeKey crypto.PrivateKey,
+ parent *x509.Certificate,
+ signingKey crypto.PrivateKey,
+) (string, string, error) {
+ signeePubKey, err := getPublicKey(signeeKey)
+ if err != nil {
+ return "", "", fmt.Errorf("error retrieving public key from signee key: %s", err)
+ }
+ derBytes, err := x509.CreateCertificate(
+ rand.Reader,
+ template,
+ parent,
+ signeePubKey,
+ signingKey,
+ )
+ if err != nil {
+ return "", "", fmt.Errorf("error creating certificate: %s", err)
+ }
+
+ certBuffer := bytes.Buffer{}
+ if err := pem.Encode(
+ &certBuffer,
+ &pem.Block{Type: "CERTIFICATE", Bytes: derBytes},
+ ); err != nil {
+ return "", "", fmt.Errorf("error pem-encoding certificate: %s", err)
+ }
+
+ keyBuffer := bytes.Buffer{}
+ if err := pem.Encode(
+ &keyBuffer,
+ pemBlockForKey(signeeKey),
+ ); err != nil {
+ return "", "", fmt.Errorf("error pem-encoding key: %s", err)
+ }
+
+ return certBuffer.String(), keyBuffer.String(), nil
+}
+
+func getBaseCertTemplate(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+) (*x509.Certificate, error) {
+ ipAddresses, err := getNetIPs(ips)
+ if err != nil {
+ return nil, err
+ }
+ dnsNames, err := getAlternateDNSStrs(alternateDNS)
+ if err != nil {
+ return nil, err
+ }
+ serialNumberUpperBound := new(big.Int).Lsh(big.NewInt(1), 128)
+ serialNumber, err := rand.Int(rand.Reader, serialNumberUpperBound)
+ if err != nil {
+ return nil, err
+ }
+ return &x509.Certificate{
+ SerialNumber: serialNumber,
+ Subject: pkix.Name{
+ CommonName: cn,
+ },
+ IPAddresses: ipAddresses,
+ DNSNames: dnsNames,
+ NotBefore: time.Now(),
+ NotAfter: time.Now().Add(time.Hour * 24 * time.Duration(daysValid)),
+ KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
+ ExtKeyUsage: []x509.ExtKeyUsage{
+ x509.ExtKeyUsageServerAuth,
+ x509.ExtKeyUsageClientAuth,
+ },
+ BasicConstraintsValid: true,
+ }, nil
+}
+
+func getNetIPs(ips []interface{}) ([]net.IP, error) {
+ if ips == nil {
+ return []net.IP{}, nil
+ }
+ var ipStr string
+ var ok bool
+ var netIP net.IP
+ netIPs := make([]net.IP, len(ips))
+ for i, ip := range ips {
+ ipStr, ok = ip.(string)
+ if !ok {
+ return nil, fmt.Errorf("error parsing ip: %v is not a string", ip)
+ }
+ netIP = net.ParseIP(ipStr)
+ if netIP == nil {
+ return nil, fmt.Errorf("error parsing ip: %s", ipStr)
+ }
+ netIPs[i] = netIP
+ }
+ return netIPs, nil
+}
+
+func getAlternateDNSStrs(alternateDNS []interface{}) ([]string, error) {
+ if alternateDNS == nil {
+ return []string{}, nil
+ }
+ var dnsStr string
+ var ok bool
+ alternateDNSStrs := make([]string, len(alternateDNS))
+ for i, dns := range alternateDNS {
+ dnsStr, ok = dns.(string)
+ if !ok {
+ return nil, fmt.Errorf(
+ "error processing alternate dns name: %v is not a string",
+ dns,
+ )
+ }
+ alternateDNSStrs[i] = dnsStr
+ }
+ return alternateDNSStrs, nil
+}
+
+func encryptAES(password string, plaintext string) (string, error) {
+ if plaintext == "" {
+ return "", nil
+ }
+
+ key := make([]byte, 32)
+ copy(key, []byte(password))
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ return "", err
+ }
+
+ content := []byte(plaintext)
+ blockSize := block.BlockSize()
+ padding := blockSize - len(content)%blockSize
+ padtext := bytes.Repeat([]byte{byte(padding)}, padding)
+ content = append(content, padtext...)
+
+ ciphertext := make([]byte, aes.BlockSize+len(content))
+
+ iv := ciphertext[:aes.BlockSize]
+ if _, err := io.ReadFull(rand.Reader, iv); err != nil {
+ return "", err
+ }
+
+ mode := cipher.NewCBCEncrypter(block, iv)
+ mode.CryptBlocks(ciphertext[aes.BlockSize:], content)
+
+ return base64.StdEncoding.EncodeToString(ciphertext), nil
+}
+
+func decryptAES(password string, crypt64 string) (string, error) {
+ if crypt64 == "" {
+ return "", nil
+ }
+
+ key := make([]byte, 32)
+ copy(key, []byte(password))
+
+ crypt, err := base64.StdEncoding.DecodeString(crypt64)
+ if err != nil {
+ return "", err
+ }
+
+ block, err := aes.NewCipher(key)
+ if err != nil {
+ return "", err
+ }
+
+ iv := crypt[:aes.BlockSize]
+ crypt = crypt[aes.BlockSize:]
+ decrypted := make([]byte, len(crypt))
+ mode := cipher.NewCBCDecrypter(block, iv)
+ mode.CryptBlocks(decrypted, crypt)
+
+ return string(decrypted[:len(decrypted)-int(decrypted[len(decrypted)-1])]), nil
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/date.go b/vendor/github.com/Masterminds/sprig/v3/date.go
new file mode 100644
index 000000000..ed022ddac
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/date.go
@@ -0,0 +1,152 @@
+package sprig
+
+import (
+ "strconv"
+ "time"
+)
+
+// Given a format and a date, format the date string.
+//
+// Date can be a `time.Time` or an `int, int32, int64`.
+// In the later case, it is treated as seconds since UNIX
+// epoch.
+func date(fmt string, date interface{}) string {
+ return dateInZone(fmt, date, "Local")
+}
+
+func htmlDate(date interface{}) string {
+ return dateInZone("2006-01-02", date, "Local")
+}
+
+func htmlDateInZone(date interface{}, zone string) string {
+ return dateInZone("2006-01-02", date, zone)
+}
+
+func dateInZone(fmt string, date interface{}, zone string) string {
+ var t time.Time
+ switch date := date.(type) {
+ default:
+ t = time.Now()
+ case time.Time:
+ t = date
+ case *time.Time:
+ t = *date
+ case int64:
+ t = time.Unix(date, 0)
+ case int:
+ t = time.Unix(int64(date), 0)
+ case int32:
+ t = time.Unix(int64(date), 0)
+ }
+
+ loc, err := time.LoadLocation(zone)
+ if err != nil {
+ loc, _ = time.LoadLocation("UTC")
+ }
+
+ return t.In(loc).Format(fmt)
+}
+
+func dateModify(fmt string, date time.Time) time.Time {
+ d, err := time.ParseDuration(fmt)
+ if err != nil {
+ return date
+ }
+ return date.Add(d)
+}
+
+func mustDateModify(fmt string, date time.Time) (time.Time, error) {
+ d, err := time.ParseDuration(fmt)
+ if err != nil {
+ return time.Time{}, err
+ }
+ return date.Add(d), nil
+}
+
+func dateAgo(date interface{}) string {
+ var t time.Time
+
+ switch date := date.(type) {
+ default:
+ t = time.Now()
+ case time.Time:
+ t = date
+ case int64:
+ t = time.Unix(date, 0)
+ case int:
+ t = time.Unix(int64(date), 0)
+ }
+ // Drop resolution to seconds
+ duration := time.Since(t).Round(time.Second)
+ return duration.String()
+}
+
+func duration(sec interface{}) string {
+ var n int64
+ switch value := sec.(type) {
+ default:
+ n = 0
+ case string:
+ n, _ = strconv.ParseInt(value, 10, 64)
+ case int64:
+ n = value
+ }
+ return (time.Duration(n) * time.Second).String()
+}
+
+func durationRound(duration interface{}) string {
+ var d time.Duration
+ switch duration := duration.(type) {
+ default:
+ d = 0
+ case string:
+ d, _ = time.ParseDuration(duration)
+ case int64:
+ d = time.Duration(duration)
+ case time.Time:
+ d = time.Since(duration)
+ }
+
+ u := uint64(d)
+ neg := d < 0
+ if neg {
+ u = -u
+ }
+
+ var (
+ year = uint64(time.Hour) * 24 * 365
+ month = uint64(time.Hour) * 24 * 30
+ day = uint64(time.Hour) * 24
+ hour = uint64(time.Hour)
+ minute = uint64(time.Minute)
+ second = uint64(time.Second)
+ )
+ switch {
+ case u > year:
+ return strconv.FormatUint(u/year, 10) + "y"
+ case u > month:
+ return strconv.FormatUint(u/month, 10) + "mo"
+ case u > day:
+ return strconv.FormatUint(u/day, 10) + "d"
+ case u > hour:
+ return strconv.FormatUint(u/hour, 10) + "h"
+ case u > minute:
+ return strconv.FormatUint(u/minute, 10) + "m"
+ case u > second:
+ return strconv.FormatUint(u/second, 10) + "s"
+ }
+ return "0s"
+}
+
+func toDate(fmt, str string) time.Time {
+ t, _ := time.ParseInLocation(fmt, str, time.Local)
+ return t
+}
+
+func mustToDate(fmt, str string) (time.Time, error) {
+ return time.ParseInLocation(fmt, str, time.Local)
+}
+
+func unixEpoch(date time.Time) string {
+ return strconv.FormatInt(date.Unix(), 10)
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/defaults.go b/vendor/github.com/Masterminds/sprig/v3/defaults.go
new file mode 100644
index 000000000..b9f979666
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/defaults.go
@@ -0,0 +1,163 @@
+package sprig
+
+import (
+ "bytes"
+ "encoding/json"
+ "math/rand"
+ "reflect"
+ "strings"
+ "time"
+)
+
+func init() {
+ rand.Seed(time.Now().UnixNano())
+}
+
+// dfault checks whether `given` is set, and returns default if not set.
+//
+// This returns `d` if `given` appears not to be set, and `given` otherwise.
+//
+// For numeric types 0 is unset.
+// For strings, maps, arrays, and slices, len() = 0 is considered unset.
+// For bool, false is unset.
+// Structs are never considered unset.
+//
+// For everything else, including pointers, a nil value is unset.
+func dfault(d interface{}, given ...interface{}) interface{} {
+
+ if empty(given) || empty(given[0]) {
+ return d
+ }
+ return given[0]
+}
+
+// empty returns true if the given value has the zero value for its type.
+func empty(given interface{}) bool {
+ g := reflect.ValueOf(given)
+ if !g.IsValid() {
+ return true
+ }
+
+ // Basically adapted from text/template.isTrue
+ switch g.Kind() {
+ default:
+ return g.IsNil()
+ case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
+ return g.Len() == 0
+ case reflect.Bool:
+ return !g.Bool()
+ case reflect.Complex64, reflect.Complex128:
+ return g.Complex() == 0
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return g.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return g.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return g.Float() == 0
+ case reflect.Struct:
+ return false
+ }
+}
+
+// coalesce returns the first non-empty value.
+func coalesce(v ...interface{}) interface{} {
+ for _, val := range v {
+ if !empty(val) {
+ return val
+ }
+ }
+ return nil
+}
+
+// all returns true if empty(x) is false for all values x in the list.
+// If the list is empty, return true.
+func all(v ...interface{}) bool {
+ for _, val := range v {
+ if empty(val) {
+ return false
+ }
+ }
+ return true
+}
+
+// any returns true if empty(x) is false for any x in the list.
+// If the list is empty, return false.
+func any(v ...interface{}) bool {
+ for _, val := range v {
+ if !empty(val) {
+ return true
+ }
+ }
+ return false
+}
+
+// fromJson decodes JSON into a structured value, ignoring errors.
+func fromJson(v string) interface{} {
+ output, _ := mustFromJson(v)
+ return output
+}
+
+// mustFromJson decodes JSON into a structured value, returning errors.
+func mustFromJson(v string) (interface{}, error) {
+ var output interface{}
+ err := json.Unmarshal([]byte(v), &output)
+ return output, err
+}
+
+// toJson encodes an item into a JSON string
+func toJson(v interface{}) string {
+ output, _ := json.Marshal(v)
+ return string(output)
+}
+
+func mustToJson(v interface{}) (string, error) {
+ output, err := json.Marshal(v)
+ if err != nil {
+ return "", err
+ }
+ return string(output), nil
+}
+
+// toPrettyJson encodes an item into a pretty (indented) JSON string
+func toPrettyJson(v interface{}) string {
+ output, _ := json.MarshalIndent(v, "", " ")
+ return string(output)
+}
+
+func mustToPrettyJson(v interface{}) (string, error) {
+ output, err := json.MarshalIndent(v, "", " ")
+ if err != nil {
+ return "", err
+ }
+ return string(output), nil
+}
+
+// toRawJson encodes an item into a JSON string with no escaping of HTML characters.
+func toRawJson(v interface{}) string {
+ output, err := mustToRawJson(v)
+ if err != nil {
+ panic(err)
+ }
+ return string(output)
+}
+
+// mustToRawJson encodes an item into a JSON string with no escaping of HTML characters.
+func mustToRawJson(v interface{}) (string, error) {
+ buf := new(bytes.Buffer)
+ enc := json.NewEncoder(buf)
+ enc.SetEscapeHTML(false)
+ err := enc.Encode(&v)
+ if err != nil {
+ return "", err
+ }
+ return strings.TrimSuffix(buf.String(), "\n"), nil
+}
+
+// ternary returns the first value if the last value is true, otherwise returns the second value.
+func ternary(vt interface{}, vf interface{}, v bool) interface{} {
+ if v {
+ return vt
+ }
+
+ return vf
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/dict.go b/vendor/github.com/Masterminds/sprig/v3/dict.go
new file mode 100644
index 000000000..ade889698
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/dict.go
@@ -0,0 +1,174 @@
+package sprig
+
+import (
+ "github.com/imdario/mergo"
+ "github.com/mitchellh/copystructure"
+)
+
+func get(d map[string]interface{}, key string) interface{} {
+ if val, ok := d[key]; ok {
+ return val
+ }
+ return ""
+}
+
+func set(d map[string]interface{}, key string, value interface{}) map[string]interface{} {
+ d[key] = value
+ return d
+}
+
+func unset(d map[string]interface{}, key string) map[string]interface{} {
+ delete(d, key)
+ return d
+}
+
+func hasKey(d map[string]interface{}, key string) bool {
+ _, ok := d[key]
+ return ok
+}
+
+func pluck(key string, d ...map[string]interface{}) []interface{} {
+ res := []interface{}{}
+ for _, dict := range d {
+ if val, ok := dict[key]; ok {
+ res = append(res, val)
+ }
+ }
+ return res
+}
+
+func keys(dicts ...map[string]interface{}) []string {
+ k := []string{}
+ for _, dict := range dicts {
+ for key := range dict {
+ k = append(k, key)
+ }
+ }
+ return k
+}
+
+func pick(dict map[string]interface{}, keys ...string) map[string]interface{} {
+ res := map[string]interface{}{}
+ for _, k := range keys {
+ if v, ok := dict[k]; ok {
+ res[k] = v
+ }
+ }
+ return res
+}
+
+func omit(dict map[string]interface{}, keys ...string) map[string]interface{} {
+ res := map[string]interface{}{}
+
+ omit := make(map[string]bool, len(keys))
+ for _, k := range keys {
+ omit[k] = true
+ }
+
+ for k, v := range dict {
+ if _, ok := omit[k]; !ok {
+ res[k] = v
+ }
+ }
+ return res
+}
+
+func dict(v ...interface{}) map[string]interface{} {
+ dict := map[string]interface{}{}
+ lenv := len(v)
+ for i := 0; i < lenv; i += 2 {
+ key := strval(v[i])
+ if i+1 >= lenv {
+ dict[key] = ""
+ continue
+ }
+ dict[key] = v[i+1]
+ }
+ return dict
+}
+
+func merge(dst map[string]interface{}, srcs ...map[string]interface{}) interface{} {
+ for _, src := range srcs {
+ if err := mergo.Merge(&dst, src); err != nil {
+ // Swallow errors inside of a template.
+ return ""
+ }
+ }
+ return dst
+}
+
+func mustMerge(dst map[string]interface{}, srcs ...map[string]interface{}) (interface{}, error) {
+ for _, src := range srcs {
+ if err := mergo.Merge(&dst, src); err != nil {
+ return nil, err
+ }
+ }
+ return dst, nil
+}
+
+func mergeOverwrite(dst map[string]interface{}, srcs ...map[string]interface{}) interface{} {
+ for _, src := range srcs {
+ if err := mergo.MergeWithOverwrite(&dst, src); err != nil {
+ // Swallow errors inside of a template.
+ return ""
+ }
+ }
+ return dst
+}
+
+func mustMergeOverwrite(dst map[string]interface{}, srcs ...map[string]interface{}) (interface{}, error) {
+ for _, src := range srcs {
+ if err := mergo.MergeWithOverwrite(&dst, src); err != nil {
+ return nil, err
+ }
+ }
+ return dst, nil
+}
+
+func values(dict map[string]interface{}) []interface{} {
+ values := []interface{}{}
+ for _, value := range dict {
+ values = append(values, value)
+ }
+
+ return values
+}
+
+func deepCopy(i interface{}) interface{} {
+ c, err := mustDeepCopy(i)
+ if err != nil {
+ panic("deepCopy error: " + err.Error())
+ }
+
+ return c
+}
+
+func mustDeepCopy(i interface{}) (interface{}, error) {
+ return copystructure.Copy(i)
+}
+
+func dig(ps ...interface{}) (interface{}, error) {
+ if len(ps) < 3 {
+ panic("dig needs at least three arguments")
+ }
+ dict := ps[len(ps)-1].(map[string]interface{})
+ def := ps[len(ps)-2]
+ ks := make([]string, len(ps)-2)
+ for i := 0; i < len(ks); i++ {
+ ks[i] = ps[i].(string)
+ }
+
+ return digFromDict(dict, def, ks)
+}
+
+func digFromDict(dict map[string]interface{}, d interface{}, ks []string) (interface{}, error) {
+ k, ns := ks[0], ks[1:len(ks)]
+ step, has := dict[k]
+ if !has {
+ return d, nil
+ }
+ if len(ns) == 0 {
+ return step, nil
+ }
+ return digFromDict(step.(map[string]interface{}), d, ns)
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/doc.go b/vendor/github.com/Masterminds/sprig/v3/doc.go
new file mode 100644
index 000000000..aabb9d448
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/doc.go
@@ -0,0 +1,19 @@
+/*
+Package sprig provides template functions for Go.
+
+This package contains a number of utility functions for working with data
+inside of Go `html/template` and `text/template` files.
+
+To add these functions, use the `template.Funcs()` method:
+
+ t := templates.New("foo").Funcs(sprig.FuncMap())
+
+Note that you should add the function map before you parse any template files.
+
+ In several cases, Sprig reverses the order of arguments from the way they
+ appear in the standard library. This is to make it easier to pipe
+ arguments into functions.
+
+See http://masterminds.github.io/sprig/ for more detailed documentation on each of the available functions.
+*/
+package sprig
diff --git a/vendor/github.com/Masterminds/sprig/v3/functions.go b/vendor/github.com/Masterminds/sprig/v3/functions.go
new file mode 100644
index 000000000..57fcec1d9
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/functions.go
@@ -0,0 +1,382 @@
+package sprig
+
+import (
+ "errors"
+ "html/template"
+ "math/rand"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+ ttemplate "text/template"
+ "time"
+
+ util "github.com/Masterminds/goutils"
+ "github.com/huandu/xstrings"
+ "github.com/shopspring/decimal"
+)
+
+// FuncMap produces the function map.
+//
+// Use this to pass the functions into the template engine:
+//
+// tpl := template.New("foo").Funcs(sprig.FuncMap()))
+//
+func FuncMap() template.FuncMap {
+ return HtmlFuncMap()
+}
+
+// HermeticTxtFuncMap returns a 'text/template'.FuncMap with only repeatable functions.
+func HermeticTxtFuncMap() ttemplate.FuncMap {
+ r := TxtFuncMap()
+ for _, name := range nonhermeticFunctions {
+ delete(r, name)
+ }
+ return r
+}
+
+// HermeticHtmlFuncMap returns an 'html/template'.Funcmap with only repeatable functions.
+func HermeticHtmlFuncMap() template.FuncMap {
+ r := HtmlFuncMap()
+ for _, name := range nonhermeticFunctions {
+ delete(r, name)
+ }
+ return r
+}
+
+// TxtFuncMap returns a 'text/template'.FuncMap
+func TxtFuncMap() ttemplate.FuncMap {
+ return ttemplate.FuncMap(GenericFuncMap())
+}
+
+// HtmlFuncMap returns an 'html/template'.Funcmap
+func HtmlFuncMap() template.FuncMap {
+ return template.FuncMap(GenericFuncMap())
+}
+
+// GenericFuncMap returns a copy of the basic function map as a map[string]interface{}.
+func GenericFuncMap() map[string]interface{} {
+ gfm := make(map[string]interface{}, len(genericMap))
+ for k, v := range genericMap {
+ gfm[k] = v
+ }
+ return gfm
+}
+
+// These functions are not guaranteed to evaluate to the same result for given input, because they
+// refer to the environment or global state.
+var nonhermeticFunctions = []string{
+ // Date functions
+ "date",
+ "date_in_zone",
+ "date_modify",
+ "now",
+ "htmlDate",
+ "htmlDateInZone",
+ "dateInZone",
+ "dateModify",
+
+ // Strings
+ "randAlphaNum",
+ "randAlpha",
+ "randAscii",
+ "randNumeric",
+ "randBytes",
+ "uuidv4",
+
+ // OS
+ "env",
+ "expandenv",
+
+ // Network
+ "getHostByName",
+}
+
+var genericMap = map[string]interface{}{
+ "hello": func() string { return "Hello!" },
+
+ // Date functions
+ "ago": dateAgo,
+ "date": date,
+ "date_in_zone": dateInZone,
+ "date_modify": dateModify,
+ "dateInZone": dateInZone,
+ "dateModify": dateModify,
+ "duration": duration,
+ "durationRound": durationRound,
+ "htmlDate": htmlDate,
+ "htmlDateInZone": htmlDateInZone,
+ "must_date_modify": mustDateModify,
+ "mustDateModify": mustDateModify,
+ "mustToDate": mustToDate,
+ "now": time.Now,
+ "toDate": toDate,
+ "unixEpoch": unixEpoch,
+
+ // Strings
+ "abbrev": abbrev,
+ "abbrevboth": abbrevboth,
+ "trunc": trunc,
+ "trim": strings.TrimSpace,
+ "upper": strings.ToUpper,
+ "lower": strings.ToLower,
+ "title": strings.Title,
+ "untitle": untitle,
+ "substr": substring,
+ // Switch order so that "foo" | repeat 5
+ "repeat": func(count int, str string) string { return strings.Repeat(str, count) },
+ // Deprecated: Use trimAll.
+ "trimall": func(a, b string) string { return strings.Trim(b, a) },
+ // Switch order so that "$foo" | trimall "$"
+ "trimAll": func(a, b string) string { return strings.Trim(b, a) },
+ "trimSuffix": func(a, b string) string { return strings.TrimSuffix(b, a) },
+ "trimPrefix": func(a, b string) string { return strings.TrimPrefix(b, a) },
+ "nospace": util.DeleteWhiteSpace,
+ "initials": initials,
+ "randAlphaNum": randAlphaNumeric,
+ "randAlpha": randAlpha,
+ "randAscii": randAscii,
+ "randNumeric": randNumeric,
+ "swapcase": util.SwapCase,
+ "shuffle": xstrings.Shuffle,
+ "snakecase": xstrings.ToSnakeCase,
+ "camelcase": xstrings.ToCamelCase,
+ "kebabcase": xstrings.ToKebabCase,
+ "wrap": func(l int, s string) string { return util.Wrap(s, l) },
+ "wrapWith": func(l int, sep, str string) string { return util.WrapCustom(str, l, sep, true) },
+ // Switch order so that "foobar" | contains "foo"
+ "contains": func(substr string, str string) bool { return strings.Contains(str, substr) },
+ "hasPrefix": func(substr string, str string) bool { return strings.HasPrefix(str, substr) },
+ "hasSuffix": func(substr string, str string) bool { return strings.HasSuffix(str, substr) },
+ "quote": quote,
+ "squote": squote,
+ "cat": cat,
+ "indent": indent,
+ "nindent": nindent,
+ "replace": replace,
+ "plural": plural,
+ "sha1sum": sha1sum,
+ "sha256sum": sha256sum,
+ "adler32sum": adler32sum,
+ "toString": strval,
+
+ // Wrap Atoi to stop errors.
+ "atoi": func(a string) int { i, _ := strconv.Atoi(a); return i },
+ "int64": toInt64,
+ "int": toInt,
+ "float64": toFloat64,
+ "seq": seq,
+ "toDecimal": toDecimal,
+
+ //"gt": func(a, b int) bool {return a > b},
+ //"gte": func(a, b int) bool {return a >= b},
+ //"lt": func(a, b int) bool {return a < b},
+ //"lte": func(a, b int) bool {return a <= b},
+
+ // split "/" foo/bar returns map[int]string{0: foo, 1: bar}
+ "split": split,
+ "splitList": func(sep, orig string) []string { return strings.Split(orig, sep) },
+ // splitn "/" foo/bar/fuu returns map[int]string{0: foo, 1: bar/fuu}
+ "splitn": splitn,
+ "toStrings": strslice,
+
+ "until": until,
+ "untilStep": untilStep,
+
+ // VERY basic arithmetic.
+ "add1": func(i interface{}) int64 { return toInt64(i) + 1 },
+ "add": func(i ...interface{}) int64 {
+ var a int64 = 0
+ for _, b := range i {
+ a += toInt64(b)
+ }
+ return a
+ },
+ "sub": func(a, b interface{}) int64 { return toInt64(a) - toInt64(b) },
+ "div": func(a, b interface{}) int64 { return toInt64(a) / toInt64(b) },
+ "mod": func(a, b interface{}) int64 { return toInt64(a) % toInt64(b) },
+ "mul": func(a interface{}, v ...interface{}) int64 {
+ val := toInt64(a)
+ for _, b := range v {
+ val = val * toInt64(b)
+ }
+ return val
+ },
+ "randInt": func(min, max int) int { return rand.Intn(max-min) + min },
+ "add1f": func(i interface{}) float64 {
+ return execDecimalOp(i, []interface{}{1}, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Add(d2) })
+ },
+ "addf": func(i ...interface{}) float64 {
+ a := interface{}(float64(0))
+ return execDecimalOp(a, i, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Add(d2) })
+ },
+ "subf": func(a interface{}, v ...interface{}) float64 {
+ return execDecimalOp(a, v, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Sub(d2) })
+ },
+ "divf": func(a interface{}, v ...interface{}) float64 {
+ return execDecimalOp(a, v, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Div(d2) })
+ },
+ "mulf": func(a interface{}, v ...interface{}) float64 {
+ return execDecimalOp(a, v, func(d1, d2 decimal.Decimal) decimal.Decimal { return d1.Mul(d2) })
+ },
+ "biggest": max,
+ "max": max,
+ "min": min,
+ "maxf": maxf,
+ "minf": minf,
+ "ceil": ceil,
+ "floor": floor,
+ "round": round,
+
+ // string slices. Note that we reverse the order b/c that's better
+ // for template processing.
+ "join": join,
+ "sortAlpha": sortAlpha,
+
+ // Defaults
+ "default": dfault,
+ "empty": empty,
+ "coalesce": coalesce,
+ "all": all,
+ "any": any,
+ "compact": compact,
+ "mustCompact": mustCompact,
+ "fromJson": fromJson,
+ "toJson": toJson,
+ "toPrettyJson": toPrettyJson,
+ "toRawJson": toRawJson,
+ "mustFromJson": mustFromJson,
+ "mustToJson": mustToJson,
+ "mustToPrettyJson": mustToPrettyJson,
+ "mustToRawJson": mustToRawJson,
+ "ternary": ternary,
+ "deepCopy": deepCopy,
+ "mustDeepCopy": mustDeepCopy,
+
+ // Reflection
+ "typeOf": typeOf,
+ "typeIs": typeIs,
+ "typeIsLike": typeIsLike,
+ "kindOf": kindOf,
+ "kindIs": kindIs,
+ "deepEqual": reflect.DeepEqual,
+
+ // OS:
+ "env": os.Getenv,
+ "expandenv": os.ExpandEnv,
+
+ // Network:
+ "getHostByName": getHostByName,
+
+ // Paths:
+ "base": path.Base,
+ "dir": path.Dir,
+ "clean": path.Clean,
+ "ext": path.Ext,
+ "isAbs": path.IsAbs,
+
+ // Filepaths:
+ "osBase": filepath.Base,
+ "osClean": filepath.Clean,
+ "osDir": filepath.Dir,
+ "osExt": filepath.Ext,
+ "osIsAbs": filepath.IsAbs,
+
+ // Encoding:
+ "b64enc": base64encode,
+ "b64dec": base64decode,
+ "b32enc": base32encode,
+ "b32dec": base32decode,
+
+ // Data Structures:
+ "tuple": list, // FIXME: with the addition of append/prepend these are no longer immutable.
+ "list": list,
+ "dict": dict,
+ "get": get,
+ "set": set,
+ "unset": unset,
+ "hasKey": hasKey,
+ "pluck": pluck,
+ "keys": keys,
+ "pick": pick,
+ "omit": omit,
+ "merge": merge,
+ "mergeOverwrite": mergeOverwrite,
+ "mustMerge": mustMerge,
+ "mustMergeOverwrite": mustMergeOverwrite,
+ "values": values,
+
+ "append": push, "push": push,
+ "mustAppend": mustPush, "mustPush": mustPush,
+ "prepend": prepend,
+ "mustPrepend": mustPrepend,
+ "first": first,
+ "mustFirst": mustFirst,
+ "rest": rest,
+ "mustRest": mustRest,
+ "last": last,
+ "mustLast": mustLast,
+ "initial": initial,
+ "mustInitial": mustInitial,
+ "reverse": reverse,
+ "mustReverse": mustReverse,
+ "uniq": uniq,
+ "mustUniq": mustUniq,
+ "without": without,
+ "mustWithout": mustWithout,
+ "has": has,
+ "mustHas": mustHas,
+ "slice": slice,
+ "mustSlice": mustSlice,
+ "concat": concat,
+ "dig": dig,
+ "chunk": chunk,
+ "mustChunk": mustChunk,
+
+ // Crypto:
+ "bcrypt": bcrypt,
+ "htpasswd": htpasswd,
+ "genPrivateKey": generatePrivateKey,
+ "derivePassword": derivePassword,
+ "buildCustomCert": buildCustomCertificate,
+ "genCA": generateCertificateAuthority,
+ "genCAWithKey": generateCertificateAuthorityWithPEMKey,
+ "genSelfSignedCert": generateSelfSignedCertificate,
+ "genSelfSignedCertWithKey": generateSelfSignedCertificateWithPEMKey,
+ "genSignedCert": generateSignedCertificate,
+ "genSignedCertWithKey": generateSignedCertificateWithPEMKey,
+ "encryptAES": encryptAES,
+ "decryptAES": decryptAES,
+ "randBytes": randBytes,
+
+ // UUIDs:
+ "uuidv4": uuidv4,
+
+ // SemVer:
+ "semver": semver,
+ "semverCompare": semverCompare,
+
+ // Flow Control:
+ "fail": func(msg string) (string, error) { return "", errors.New(msg) },
+
+ // Regex
+ "regexMatch": regexMatch,
+ "mustRegexMatch": mustRegexMatch,
+ "regexFindAll": regexFindAll,
+ "mustRegexFindAll": mustRegexFindAll,
+ "regexFind": regexFind,
+ "mustRegexFind": mustRegexFind,
+ "regexReplaceAll": regexReplaceAll,
+ "mustRegexReplaceAll": mustRegexReplaceAll,
+ "regexReplaceAllLiteral": regexReplaceAllLiteral,
+ "mustRegexReplaceAllLiteral": mustRegexReplaceAllLiteral,
+ "regexSplit": regexSplit,
+ "mustRegexSplit": mustRegexSplit,
+ "regexQuoteMeta": regexQuoteMeta,
+
+ // URLs:
+ "urlParse": urlParse,
+ "urlJoin": urlJoin,
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/list.go b/vendor/github.com/Masterminds/sprig/v3/list.go
new file mode 100644
index 000000000..ca0fbb789
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/list.go
@@ -0,0 +1,464 @@
+package sprig
+
+import (
+ "fmt"
+ "math"
+ "reflect"
+ "sort"
+)
+
+// Reflection is used in these functions so that slices and arrays of strings,
+// ints, and other types not implementing []interface{} can be worked with.
+// For example, this is useful if you need to work on the output of regexs.
+
+func list(v ...interface{}) []interface{} {
+ return v
+}
+
+func push(list interface{}, v interface{}) []interface{} {
+ l, err := mustPush(list, v)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustPush(list interface{}, v interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ nl := make([]interface{}, l)
+ for i := 0; i < l; i++ {
+ nl[i] = l2.Index(i).Interface()
+ }
+
+ return append(nl, v), nil
+
+ default:
+ return nil, fmt.Errorf("Cannot push on type %s", tp)
+ }
+}
+
+func prepend(list interface{}, v interface{}) []interface{} {
+ l, err := mustPrepend(list, v)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustPrepend(list interface{}, v interface{}) ([]interface{}, error) {
+ //return append([]interface{}{v}, list...)
+
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ nl := make([]interface{}, l)
+ for i := 0; i < l; i++ {
+ nl[i] = l2.Index(i).Interface()
+ }
+
+ return append([]interface{}{v}, nl...), nil
+
+ default:
+ return nil, fmt.Errorf("Cannot prepend on type %s", tp)
+ }
+}
+
+func chunk(size int, list interface{}) [][]interface{} {
+ l, err := mustChunk(size, list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustChunk(size int, list interface{}) ([][]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+
+ cs := int(math.Floor(float64(l-1)/float64(size)) + 1)
+ nl := make([][]interface{}, cs)
+
+ for i := 0; i < cs; i++ {
+ clen := size
+ if i == cs-1 {
+ clen = int(math.Floor(math.Mod(float64(l), float64(size))))
+ if clen == 0 {
+ clen = size
+ }
+ }
+
+ nl[i] = make([]interface{}, clen)
+
+ for j := 0; j < clen; j++ {
+ ix := i*size + j
+ nl[i][j] = l2.Index(ix).Interface()
+ }
+ }
+
+ return nl, nil
+
+ default:
+ return nil, fmt.Errorf("Cannot chunk type %s", tp)
+ }
+}
+
+func last(list interface{}) interface{} {
+ l, err := mustLast(list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustLast(list interface{}) (interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil, nil
+ }
+
+ return l2.Index(l - 1).Interface(), nil
+ default:
+ return nil, fmt.Errorf("Cannot find last on type %s", tp)
+ }
+}
+
+func first(list interface{}) interface{} {
+ l, err := mustFirst(list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustFirst(list interface{}) (interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil, nil
+ }
+
+ return l2.Index(0).Interface(), nil
+ default:
+ return nil, fmt.Errorf("Cannot find first on type %s", tp)
+ }
+}
+
+func rest(list interface{}) []interface{} {
+ l, err := mustRest(list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustRest(list interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil, nil
+ }
+
+ nl := make([]interface{}, l-1)
+ for i := 1; i < l; i++ {
+ nl[i-1] = l2.Index(i).Interface()
+ }
+
+ return nl, nil
+ default:
+ return nil, fmt.Errorf("Cannot find rest on type %s", tp)
+ }
+}
+
+func initial(list interface{}) []interface{} {
+ l, err := mustInitial(list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustInitial(list interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil, nil
+ }
+
+ nl := make([]interface{}, l-1)
+ for i := 0; i < l-1; i++ {
+ nl[i] = l2.Index(i).Interface()
+ }
+
+ return nl, nil
+ default:
+ return nil, fmt.Errorf("Cannot find initial on type %s", tp)
+ }
+}
+
+func sortAlpha(list interface{}) []string {
+ k := reflect.Indirect(reflect.ValueOf(list)).Kind()
+ switch k {
+ case reflect.Slice, reflect.Array:
+ a := strslice(list)
+ s := sort.StringSlice(a)
+ s.Sort()
+ return s
+ }
+ return []string{strval(list)}
+}
+
+func reverse(v interface{}) []interface{} {
+ l, err := mustReverse(v)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustReverse(v interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(v).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(v)
+
+ l := l2.Len()
+ // We do not sort in place because the incoming array should not be altered.
+ nl := make([]interface{}, l)
+ for i := 0; i < l; i++ {
+ nl[l-i-1] = l2.Index(i).Interface()
+ }
+
+ return nl, nil
+ default:
+ return nil, fmt.Errorf("Cannot find reverse on type %s", tp)
+ }
+}
+
+func compact(list interface{}) []interface{} {
+ l, err := mustCompact(list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustCompact(list interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ nl := []interface{}{}
+ var item interface{}
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if !empty(item) {
+ nl = append(nl, item)
+ }
+ }
+
+ return nl, nil
+ default:
+ return nil, fmt.Errorf("Cannot compact on type %s", tp)
+ }
+}
+
+func uniq(list interface{}) []interface{} {
+ l, err := mustUniq(list)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustUniq(list interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ dest := []interface{}{}
+ var item interface{}
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if !inList(dest, item) {
+ dest = append(dest, item)
+ }
+ }
+
+ return dest, nil
+ default:
+ return nil, fmt.Errorf("Cannot find uniq on type %s", tp)
+ }
+}
+
+func inList(haystack []interface{}, needle interface{}) bool {
+ for _, h := range haystack {
+ if reflect.DeepEqual(needle, h) {
+ return true
+ }
+ }
+ return false
+}
+
+func without(list interface{}, omit ...interface{}) []interface{} {
+ l, err := mustWithout(list, omit...)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustWithout(list interface{}, omit ...interface{}) ([]interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ res := []interface{}{}
+ var item interface{}
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if !inList(omit, item) {
+ res = append(res, item)
+ }
+ }
+
+ return res, nil
+ default:
+ return nil, fmt.Errorf("Cannot find without on type %s", tp)
+ }
+}
+
+func has(needle interface{}, haystack interface{}) bool {
+ l, err := mustHas(needle, haystack)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustHas(needle interface{}, haystack interface{}) (bool, error) {
+ if haystack == nil {
+ return false, nil
+ }
+ tp := reflect.TypeOf(haystack).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(haystack)
+ var item interface{}
+ l := l2.Len()
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if reflect.DeepEqual(needle, item) {
+ return true, nil
+ }
+ }
+
+ return false, nil
+ default:
+ return false, fmt.Errorf("Cannot find has on type %s", tp)
+ }
+}
+
+// $list := [1, 2, 3, 4, 5]
+// slice $list -> list[0:5] = list[:]
+// slice $list 0 3 -> list[0:3] = list[:3]
+// slice $list 3 5 -> list[3:5]
+// slice $list 3 -> list[3:5] = list[3:]
+func slice(list interface{}, indices ...interface{}) interface{} {
+ l, err := mustSlice(list, indices...)
+ if err != nil {
+ panic(err)
+ }
+
+ return l
+}
+
+func mustSlice(list interface{}, indices ...interface{}) (interface{}, error) {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil, nil
+ }
+
+ var start, end int
+ if len(indices) > 0 {
+ start = toInt(indices[0])
+ }
+ if len(indices) < 2 {
+ end = l
+ } else {
+ end = toInt(indices[1])
+ }
+
+ return l2.Slice(start, end).Interface(), nil
+ default:
+ return nil, fmt.Errorf("list should be type of slice or array but %s", tp)
+ }
+}
+
+func concat(lists ...interface{}) interface{} {
+ var res []interface{}
+ for _, list := range lists {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+ for i := 0; i < l2.Len(); i++ {
+ res = append(res, l2.Index(i).Interface())
+ }
+ default:
+ panic(fmt.Sprintf("Cannot concat type %s as list", tp))
+ }
+ }
+ return res
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/network.go b/vendor/github.com/Masterminds/sprig/v3/network.go
new file mode 100644
index 000000000..108d78a94
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/network.go
@@ -0,0 +1,12 @@
+package sprig
+
+import (
+ "math/rand"
+ "net"
+)
+
+func getHostByName(name string) string {
+ addrs, _ := net.LookupHost(name)
+ //TODO: add error handing when release v3 comes out
+ return addrs[rand.Intn(len(addrs))]
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/numeric.go b/vendor/github.com/Masterminds/sprig/v3/numeric.go
new file mode 100644
index 000000000..f68e4182e
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/numeric.go
@@ -0,0 +1,186 @@
+package sprig
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+
+ "github.com/spf13/cast"
+ "github.com/shopspring/decimal"
+)
+
+// toFloat64 converts 64-bit floats
+func toFloat64(v interface{}) float64 {
+ return cast.ToFloat64(v)
+}
+
+func toInt(v interface{}) int {
+ return cast.ToInt(v)
+}
+
+// toInt64 converts integer types to 64-bit integers
+func toInt64(v interface{}) int64 {
+ return cast.ToInt64(v)
+}
+
+func max(a interface{}, i ...interface{}) int64 {
+ aa := toInt64(a)
+ for _, b := range i {
+ bb := toInt64(b)
+ if bb > aa {
+ aa = bb
+ }
+ }
+ return aa
+}
+
+func maxf(a interface{}, i ...interface{}) float64 {
+ aa := toFloat64(a)
+ for _, b := range i {
+ bb := toFloat64(b)
+ aa = math.Max(aa, bb)
+ }
+ return aa
+}
+
+func min(a interface{}, i ...interface{}) int64 {
+ aa := toInt64(a)
+ for _, b := range i {
+ bb := toInt64(b)
+ if bb < aa {
+ aa = bb
+ }
+ }
+ return aa
+}
+
+func minf(a interface{}, i ...interface{}) float64 {
+ aa := toFloat64(a)
+ for _, b := range i {
+ bb := toFloat64(b)
+ aa = math.Min(aa, bb)
+ }
+ return aa
+}
+
+func until(count int) []int {
+ step := 1
+ if count < 0 {
+ step = -1
+ }
+ return untilStep(0, count, step)
+}
+
+func untilStep(start, stop, step int) []int {
+ v := []int{}
+
+ if stop < start {
+ if step >= 0 {
+ return v
+ }
+ for i := start; i > stop; i += step {
+ v = append(v, i)
+ }
+ return v
+ }
+
+ if step <= 0 {
+ return v
+ }
+ for i := start; i < stop; i += step {
+ v = append(v, i)
+ }
+ return v
+}
+
+func floor(a interface{}) float64 {
+ aa := toFloat64(a)
+ return math.Floor(aa)
+}
+
+func ceil(a interface{}) float64 {
+ aa := toFloat64(a)
+ return math.Ceil(aa)
+}
+
+func round(a interface{}, p int, rOpt ...float64) float64 {
+ roundOn := .5
+ if len(rOpt) > 0 {
+ roundOn = rOpt[0]
+ }
+ val := toFloat64(a)
+ places := toFloat64(p)
+
+ var round float64
+ pow := math.Pow(10, places)
+ digit := pow * val
+ _, div := math.Modf(digit)
+ if div >= roundOn {
+ round = math.Ceil(digit)
+ } else {
+ round = math.Floor(digit)
+ }
+ return round / pow
+}
+
+// converts unix octal to decimal
+func toDecimal(v interface{}) int64 {
+ result, err := strconv.ParseInt(fmt.Sprint(v), 8, 64)
+ if err != nil {
+ return 0
+ }
+ return result
+}
+
+func seq(params ...int) string {
+ increment := 1
+ switch len(params) {
+ case 0:
+ return ""
+ case 1:
+ start := 1
+ end := params[0]
+ if end < start {
+ increment = -1
+ }
+ return intArrayToString(untilStep(start, end+increment, increment), " ")
+ case 3:
+ start := params[0]
+ end := params[2]
+ step := params[1]
+ if end < start {
+ increment = -1
+ if step > 0 {
+ return ""
+ }
+ }
+ return intArrayToString(untilStep(start, end+increment, step), " ")
+ case 2:
+ start := params[0]
+ end := params[1]
+ step := 1
+ if end < start {
+ step = -1
+ }
+ return intArrayToString(untilStep(start, end+step, step), " ")
+ default:
+ return ""
+ }
+}
+
+func intArrayToString(slice []int, delimeter string) string {
+ return strings.Trim(strings.Join(strings.Fields(fmt.Sprint(slice)), delimeter), "[]")
+}
+
+// performs a float and subsequent decimal.Decimal conversion on inputs,
+// and iterates through a and b executing the mathmetical operation f
+func execDecimalOp(a interface{}, b []interface{}, f func(d1, d2 decimal.Decimal) decimal.Decimal) float64 {
+ prt := decimal.NewFromFloat(toFloat64(a))
+ for _, x := range b {
+ dx := decimal.NewFromFloat(toFloat64(x))
+ prt = f(prt, dx)
+ }
+ rslt, _ := prt.Float64()
+ return rslt
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/reflect.go b/vendor/github.com/Masterminds/sprig/v3/reflect.go
new file mode 100644
index 000000000..8a65c132f
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/reflect.go
@@ -0,0 +1,28 @@
+package sprig
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// typeIs returns true if the src is the type named in target.
+func typeIs(target string, src interface{}) bool {
+ return target == typeOf(src)
+}
+
+func typeIsLike(target string, src interface{}) bool {
+ t := typeOf(src)
+ return target == t || "*"+target == t
+}
+
+func typeOf(src interface{}) string {
+ return fmt.Sprintf("%T", src)
+}
+
+func kindIs(target string, src interface{}) bool {
+ return target == kindOf(src)
+}
+
+func kindOf(src interface{}) string {
+ return reflect.ValueOf(src).Kind().String()
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/regex.go b/vendor/github.com/Masterminds/sprig/v3/regex.go
new file mode 100644
index 000000000..fab551018
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/regex.go
@@ -0,0 +1,83 @@
+package sprig
+
+import (
+ "regexp"
+)
+
+func regexMatch(regex string, s string) bool {
+ match, _ := regexp.MatchString(regex, s)
+ return match
+}
+
+func mustRegexMatch(regex string, s string) (bool, error) {
+ return regexp.MatchString(regex, s)
+}
+
+func regexFindAll(regex string, s string, n int) []string {
+ r := regexp.MustCompile(regex)
+ return r.FindAllString(s, n)
+}
+
+func mustRegexFindAll(regex string, s string, n int) ([]string, error) {
+ r, err := regexp.Compile(regex)
+ if err != nil {
+ return []string{}, err
+ }
+ return r.FindAllString(s, n), nil
+}
+
+func regexFind(regex string, s string) string {
+ r := regexp.MustCompile(regex)
+ return r.FindString(s)
+}
+
+func mustRegexFind(regex string, s string) (string, error) {
+ r, err := regexp.Compile(regex)
+ if err != nil {
+ return "", err
+ }
+ return r.FindString(s), nil
+}
+
+func regexReplaceAll(regex string, s string, repl string) string {
+ r := regexp.MustCompile(regex)
+ return r.ReplaceAllString(s, repl)
+}
+
+func mustRegexReplaceAll(regex string, s string, repl string) (string, error) {
+ r, err := regexp.Compile(regex)
+ if err != nil {
+ return "", err
+ }
+ return r.ReplaceAllString(s, repl), nil
+}
+
+func regexReplaceAllLiteral(regex string, s string, repl string) string {
+ r := regexp.MustCompile(regex)
+ return r.ReplaceAllLiteralString(s, repl)
+}
+
+func mustRegexReplaceAllLiteral(regex string, s string, repl string) (string, error) {
+ r, err := regexp.Compile(regex)
+ if err != nil {
+ return "", err
+ }
+ return r.ReplaceAllLiteralString(s, repl), nil
+}
+
+func regexSplit(regex string, s string, n int) []string {
+ r := regexp.MustCompile(regex)
+ return r.Split(s, n)
+}
+
+func mustRegexSplit(regex string, s string, n int) ([]string, error) {
+ r, err := regexp.Compile(regex)
+ if err != nil {
+ return []string{}, err
+ }
+ return r.Split(s, n), nil
+}
+
+func regexQuoteMeta(s string) string {
+ return regexp.QuoteMeta(s)
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/semver.go b/vendor/github.com/Masterminds/sprig/v3/semver.go
new file mode 100644
index 000000000..3fbe08aa6
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/semver.go
@@ -0,0 +1,23 @@
+package sprig
+
+import (
+ sv2 "github.com/Masterminds/semver/v3"
+)
+
+func semverCompare(constraint, version string) (bool, error) {
+ c, err := sv2.NewConstraint(constraint)
+ if err != nil {
+ return false, err
+ }
+
+ v, err := sv2.NewVersion(version)
+ if err != nil {
+ return false, err
+ }
+
+ return c.Check(v), nil
+}
+
+func semver(version string) (*sv2.Version, error) {
+ return sv2.NewVersion(version)
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/strings.go b/vendor/github.com/Masterminds/sprig/v3/strings.go
new file mode 100644
index 000000000..e0ae628c8
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/strings.go
@@ -0,0 +1,236 @@
+package sprig
+
+import (
+ "encoding/base32"
+ "encoding/base64"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+
+ util "github.com/Masterminds/goutils"
+)
+
+func base64encode(v string) string {
+ return base64.StdEncoding.EncodeToString([]byte(v))
+}
+
+func base64decode(v string) string {
+ data, err := base64.StdEncoding.DecodeString(v)
+ if err != nil {
+ return err.Error()
+ }
+ return string(data)
+}
+
+func base32encode(v string) string {
+ return base32.StdEncoding.EncodeToString([]byte(v))
+}
+
+func base32decode(v string) string {
+ data, err := base32.StdEncoding.DecodeString(v)
+ if err != nil {
+ return err.Error()
+ }
+ return string(data)
+}
+
+func abbrev(width int, s string) string {
+ if width < 4 {
+ return s
+ }
+ r, _ := util.Abbreviate(s, width)
+ return r
+}
+
+func abbrevboth(left, right int, s string) string {
+ if right < 4 || left > 0 && right < 7 {
+ return s
+ }
+ r, _ := util.AbbreviateFull(s, left, right)
+ return r
+}
+func initials(s string) string {
+ // Wrap this just to eliminate the var args, which templates don't do well.
+ return util.Initials(s)
+}
+
+func randAlphaNumeric(count int) string {
+ // It is not possible, it appears, to actually generate an error here.
+ r, _ := util.CryptoRandomAlphaNumeric(count)
+ return r
+}
+
+func randAlpha(count int) string {
+ r, _ := util.CryptoRandomAlphabetic(count)
+ return r
+}
+
+func randAscii(count int) string {
+ r, _ := util.CryptoRandomAscii(count)
+ return r
+}
+
+func randNumeric(count int) string {
+ r, _ := util.CryptoRandomNumeric(count)
+ return r
+}
+
+func untitle(str string) string {
+ return util.Uncapitalize(str)
+}
+
+func quote(str ...interface{}) string {
+ out := make([]string, 0, len(str))
+ for _, s := range str {
+ if s != nil {
+ out = append(out, fmt.Sprintf("%q", strval(s)))
+ }
+ }
+ return strings.Join(out, " ")
+}
+
+func squote(str ...interface{}) string {
+ out := make([]string, 0, len(str))
+ for _, s := range str {
+ if s != nil {
+ out = append(out, fmt.Sprintf("'%v'", s))
+ }
+ }
+ return strings.Join(out, " ")
+}
+
+func cat(v ...interface{}) string {
+ v = removeNilElements(v)
+ r := strings.TrimSpace(strings.Repeat("%v ", len(v)))
+ return fmt.Sprintf(r, v...)
+}
+
+func indent(spaces int, v string) string {
+ pad := strings.Repeat(" ", spaces)
+ return pad + strings.Replace(v, "\n", "\n"+pad, -1)
+}
+
+func nindent(spaces int, v string) string {
+ return "\n" + indent(spaces, v)
+}
+
+func replace(old, new, src string) string {
+ return strings.Replace(src, old, new, -1)
+}
+
+func plural(one, many string, count int) string {
+ if count == 1 {
+ return one
+ }
+ return many
+}
+
+func strslice(v interface{}) []string {
+ switch v := v.(type) {
+ case []string:
+ return v
+ case []interface{}:
+ b := make([]string, 0, len(v))
+ for _, s := range v {
+ if s != nil {
+ b = append(b, strval(s))
+ }
+ }
+ return b
+ default:
+ val := reflect.ValueOf(v)
+ switch val.Kind() {
+ case reflect.Array, reflect.Slice:
+ l := val.Len()
+ b := make([]string, 0, l)
+ for i := 0; i < l; i++ {
+ value := val.Index(i).Interface()
+ if value != nil {
+ b = append(b, strval(value))
+ }
+ }
+ return b
+ default:
+ if v == nil {
+ return []string{}
+ }
+
+ return []string{strval(v)}
+ }
+ }
+}
+
+func removeNilElements(v []interface{}) []interface{} {
+ newSlice := make([]interface{}, 0, len(v))
+ for _, i := range v {
+ if i != nil {
+ newSlice = append(newSlice, i)
+ }
+ }
+ return newSlice
+}
+
+func strval(v interface{}) string {
+ switch v := v.(type) {
+ case string:
+ return v
+ case []byte:
+ return string(v)
+ case error:
+ return v.Error()
+ case fmt.Stringer:
+ return v.String()
+ default:
+ return fmt.Sprintf("%v", v)
+ }
+}
+
+func trunc(c int, s string) string {
+ if c < 0 && len(s)+c > 0 {
+ return s[len(s)+c:]
+ }
+ if c >= 0 && len(s) > c {
+ return s[:c]
+ }
+ return s
+}
+
+func join(sep string, v interface{}) string {
+ return strings.Join(strslice(v), sep)
+}
+
+func split(sep, orig string) map[string]string {
+ parts := strings.Split(orig, sep)
+ res := make(map[string]string, len(parts))
+ for i, v := range parts {
+ res["_"+strconv.Itoa(i)] = v
+ }
+ return res
+}
+
+func splitn(sep string, n int, orig string) map[string]string {
+ parts := strings.SplitN(orig, sep, n)
+ res := make(map[string]string, len(parts))
+ for i, v := range parts {
+ res["_"+strconv.Itoa(i)] = v
+ }
+ return res
+}
+
+// substring creates a substring of the given string.
+//
+// If start is < 0, this calls string[:end].
+//
+// If start is >= 0 and end < 0 or end bigger than s length, this calls string[start:]
+//
+// Otherwise, this calls string[start, end].
+func substring(start, end int, s string) string {
+ if start < 0 {
+ return s[:end]
+ }
+ if end < 0 || end > len(s) {
+ return s[start:]
+ }
+ return s[start:end]
+}
diff --git a/vendor/github.com/Masterminds/sprig/v3/url.go b/vendor/github.com/Masterminds/sprig/v3/url.go
new file mode 100644
index 000000000..b8e120e19
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/v3/url.go
@@ -0,0 +1,66 @@
+package sprig
+
+import (
+ "fmt"
+ "net/url"
+ "reflect"
+)
+
+func dictGetOrEmpty(dict map[string]interface{}, key string) string {
+ value, ok := dict[key]
+ if !ok {
+ return ""
+ }
+ tp := reflect.TypeOf(value).Kind()
+ if tp != reflect.String {
+ panic(fmt.Sprintf("unable to parse %s key, must be of type string, but %s found", key, tp.String()))
+ }
+ return reflect.ValueOf(value).String()
+}
+
+// parses given URL to return dict object
+func urlParse(v string) map[string]interface{} {
+ dict := map[string]interface{}{}
+ parsedURL, err := url.Parse(v)
+ if err != nil {
+ panic(fmt.Sprintf("unable to parse url: %s", err))
+ }
+ dict["scheme"] = parsedURL.Scheme
+ dict["host"] = parsedURL.Host
+ dict["hostname"] = parsedURL.Hostname()
+ dict["path"] = parsedURL.Path
+ dict["query"] = parsedURL.RawQuery
+ dict["opaque"] = parsedURL.Opaque
+ dict["fragment"] = parsedURL.Fragment
+ if parsedURL.User != nil {
+ dict["userinfo"] = parsedURL.User.String()
+ } else {
+ dict["userinfo"] = ""
+ }
+
+ return dict
+}
+
+// join given dict to URL string
+func urlJoin(d map[string]interface{}) string {
+ resURL := url.URL{
+ Scheme: dictGetOrEmpty(d, "scheme"),
+ Host: dictGetOrEmpty(d, "host"),
+ Path: dictGetOrEmpty(d, "path"),
+ RawQuery: dictGetOrEmpty(d, "query"),
+ Opaque: dictGetOrEmpty(d, "opaque"),
+ Fragment: dictGetOrEmpty(d, "fragment"),
+ }
+ userinfo := dictGetOrEmpty(d, "userinfo")
+ var user *url.Userinfo
+ if userinfo != "" {
+ tempURL, err := url.Parse(fmt.Sprintf("proto://%s@host", userinfo))
+ if err != nil {
+ panic(fmt.Sprintf("unable to parse userinfo in dict: %s", err))
+ }
+ user = tempURL.User
+ }
+
+ resURL.User = user
+ return resURL.String()
+}
diff --git a/vendor/github.com/asaskevich/govalidator/.gitignore b/vendor/github.com/asaskevich/govalidator/.gitignore
new file mode 100644
index 000000000..8d69a9418
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/.gitignore
@@ -0,0 +1,15 @@
+bin/
+.idea/
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
diff --git a/vendor/github.com/asaskevich/govalidator/.travis.yml b/vendor/github.com/asaskevich/govalidator/.travis.yml
new file mode 100644
index 000000000..bb83c6670
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/.travis.yml
@@ -0,0 +1,12 @@
+language: go
+dist: xenial
+go:
+ - '1.10'
+ - '1.11'
+ - '1.12'
+ - '1.13'
+ - 'tip'
+
+script:
+ - go test -coverpkg=./... -coverprofile=coverage.info -timeout=5s
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md b/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..4b462b0d8
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/CODE_OF_CONDUCT.md
@@ -0,0 +1,43 @@
+# Contributor Code of Conduct
+
+This project adheres to [The Code Manifesto](http://codemanifesto.com)
+as its guidelines for contributor interactions.
+
+## The Code Manifesto
+
+We want to work in an ecosystem that empowers developers to reach their
+potential — one that encourages growth and effective collaboration. A space
+that is safe for all.
+
+A space such as this benefits everyone that participates in it. It encourages
+new developers to enter our field. It is through discussion and collaboration
+that we grow, and through growth that we improve.
+
+In the effort to create such a place, we hold to these values:
+
+1. **Discrimination limits us.** This includes discrimination on the basis of
+ race, gender, sexual orientation, gender identity, age, nationality,
+ technology and any other arbitrary exclusion of a group of people.
+2. **Boundaries honor us.** Your comfort levels are not everyone’s comfort
+ levels. Remember that, and if brought to your attention, heed it.
+3. **We are our biggest assets.** None of us were born masters of our trade.
+ Each of us has been helped along the way. Return that favor, when and where
+ you can.
+4. **We are resources for the future.** As an extension of #3, share what you
+ know. Make yourself a resource to help those that come after you.
+5. **Respect defines us.** Treat others as you wish to be treated. Make your
+ discussions, criticisms and debates from a position of respectfulness. Ask
+ yourself, is it true? Is it necessary? Is it constructive? Anything less is
+ unacceptable.
+6. **Reactions require grace.** Angry responses are valid, but abusive language
+ and vindictive actions are toxic. When something happens that offends you,
+ handle it assertively, but be respectful. Escalate reasonably, and try to
+ allow the offender an opportunity to explain themselves, and possibly
+ correct the issue.
+7. **Opinions are just that: opinions.** Each and every one of us, due to our
+ background and upbringing, have varying opinions. That is perfectly
+ acceptable. Remember this: if you respect your own opinions, you should
+ respect the opinions of others.
+8. **To err is human.** You might not intend it, but mistakes do happen and
+ contribute to build experience. Tolerate honest mistakes, and don't
+ hesitate to apologize if you make one yourself.
diff --git a/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
new file mode 100644
index 000000000..7ed268a1e
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
@@ -0,0 +1,63 @@
+#### Support
+If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
+
+#### What to contribute
+If you don't know what to do, there are some features and functions that need to be done
+
+- [ ] Refactor code
+- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
+- [ ] Create actual list of contributors and projects that currently using this package
+- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
+- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
+- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
+- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
+- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
+- [ ] Implement fuzzing testing
+- [ ] Implement some struct/map/array utilities
+- [ ] Implement map/array validation
+- [ ] Implement benchmarking
+- [ ] Implement batch of examples
+- [ ] Look at forks for new features and fixes
+
+#### Advice
+Feel free to create what you want, but keep in mind when you implement new features:
+- Code must be clear and readable, names of variables/constants clearly describes what they are doing
+- Public functions must be documented and described in source file and added to README.md to the list of available functions
+- There are must be unit-tests for any new functions and improvements
+
+## Financial contributions
+
+We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator).
+Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed.
+
+
+## Credits
+
+
+### Contributors
+
+Thank you to all the people who have already contributed to govalidator!
+<a href="https://github.com/asaskevich/govalidator/graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
+
+
+### Backers
+
+Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)]
+
+<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
+
+
+### Sponsors
+
+Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor))
+
+<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a> \ No newline at end of file
diff --git a/vendor/github.com/asaskevich/govalidator/LICENSE b/vendor/github.com/asaskevich/govalidator/LICENSE
new file mode 100644
index 000000000..cacba9102
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2020 Alex Saskevich
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE. \ No newline at end of file
diff --git a/vendor/github.com/asaskevich/govalidator/README.md b/vendor/github.com/asaskevich/govalidator/README.md
new file mode 100644
index 000000000..2c3fc35eb
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/README.md
@@ -0,0 +1,622 @@
+govalidator
+===========
+[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator)
+[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator)
+[![Coverage](https://codecov.io/gh/asaskevich/govalidator/branch/master/graph/badge.svg)](https://codecov.io/gh/asaskevich/govalidator) [![Go Report Card](https://goreportcard.com/badge/github.com/asaskevich/govalidator)](https://goreportcard.com/report/github.com/asaskevich/govalidator) [![GoSearch](http://go-search.org/badge?id=github.com%2Fasaskevich%2Fgovalidator)](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [![Backers on Open Collective](https://opencollective.com/govalidator/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/govalidator/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield)
+
+A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
+
+#### Installation
+Make sure that Go is installed on your computer.
+Type the following command in your terminal:
+
+ go get github.com/asaskevich/govalidator
+
+or you can get specified release of the package with `gopkg.in`:
+
+ go get gopkg.in/asaskevich/govalidator.v10
+
+After it the package is ready to use.
+
+
+#### Import package in your project
+Add following line in your `*.go` file:
+```go
+import "github.com/asaskevich/govalidator"
+```
+If you are unhappy to use long `govalidator`, you can do something like this:
+```go
+import (
+ valid "github.com/asaskevich/govalidator"
+)
+```
+
+#### Activate behavior to require all fields have a validation tag by default
+`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function.
+
+`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors.
+
+```go
+import "github.com/asaskevich/govalidator"
+
+func init() {
+ govalidator.SetFieldsRequiredByDefault(true)
+}
+```
+
+Here's some code to explain it:
+```go
+// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
+type exampleStruct struct {
+ Name string ``
+ Email string `valid:"email"`
+}
+
+// this, however, will only fail when Email is empty or an invalid email address:
+type exampleStruct2 struct {
+ Name string `valid:"-"`
+ Email string `valid:"email"`
+}
+
+// lastly, this will only fail when Email is an invalid email address but not when it's empty:
+type exampleStruct2 struct {
+ Name string `valid:"-"`
+ Email string `valid:"email,optional"`
+}
+```
+
+#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123))
+##### Custom validator function signature
+A context was added as the second parameter, for structs this is the object being validated – this makes dependent validation possible.
+```go
+import "github.com/asaskevich/govalidator"
+
+// old signature
+func(i interface{}) bool
+
+// new signature
+func(i interface{}, o interface{}) bool
+```
+
+##### Adding a custom validator
+This was changed to prevent data races when accessing custom validators.
+```go
+import "github.com/asaskevich/govalidator"
+
+// before
+govalidator.CustomTypeTagMap["customByteArrayValidator"] = func(i interface{}, o interface{}) bool {
+ // ...
+}
+
+// after
+govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, o interface{}) bool {
+ // ...
+})
+```
+
+#### List of functions:
+```go
+func Abs(value float64) float64
+func BlackList(str, chars string) string
+func ByteLength(str string, params ...string) bool
+func CamelCaseToUnderscore(str string) string
+func Contains(str, substring string) bool
+func Count(array []interface{}, iterator ConditionIterator) int
+func Each(array []interface{}, iterator Iterator)
+func ErrorByField(e error, field string) string
+func ErrorsByField(e error) map[string]string
+func Filter(array []interface{}, iterator ConditionIterator) []interface{}
+func Find(array []interface{}, iterator ConditionIterator) interface{}
+func GetLine(s string, index int) (string, error)
+func GetLines(s string) []string
+func HasLowerCase(str string) bool
+func HasUpperCase(str string) bool
+func HasWhitespace(str string) bool
+func HasWhitespaceOnly(str string) bool
+func InRange(value interface{}, left interface{}, right interface{}) bool
+func InRangeFloat32(value, left, right float32) bool
+func InRangeFloat64(value, left, right float64) bool
+func InRangeInt(value, left, right interface{}) bool
+func IsASCII(str string) bool
+func IsAlpha(str string) bool
+func IsAlphanumeric(str string) bool
+func IsBase64(str string) bool
+func IsByteLength(str string, min, max int) bool
+func IsCIDR(str string) bool
+func IsCRC32(str string) bool
+func IsCRC32b(str string) bool
+func IsCreditCard(str string) bool
+func IsDNSName(str string) bool
+func IsDataURI(str string) bool
+func IsDialString(str string) bool
+func IsDivisibleBy(str, num string) bool
+func IsEmail(str string) bool
+func IsExistingEmail(email string) bool
+func IsFilePath(str string) (bool, int)
+func IsFloat(str string) bool
+func IsFullWidth(str string) bool
+func IsHalfWidth(str string) bool
+func IsHash(str string, algorithm string) bool
+func IsHexadecimal(str string) bool
+func IsHexcolor(str string) bool
+func IsHost(str string) bool
+func IsIP(str string) bool
+func IsIPv4(str string) bool
+func IsIPv6(str string) bool
+func IsISBN(str string, version int) bool
+func IsISBN10(str string) bool
+func IsISBN13(str string) bool
+func IsISO3166Alpha2(str string) bool
+func IsISO3166Alpha3(str string) bool
+func IsISO4217(str string) bool
+func IsISO693Alpha2(str string) bool
+func IsISO693Alpha3b(str string) bool
+func IsIn(str string, params ...string) bool
+func IsInRaw(str string, params ...string) bool
+func IsInt(str string) bool
+func IsJSON(str string) bool
+func IsLatitude(str string) bool
+func IsLongitude(str string) bool
+func IsLowerCase(str string) bool
+func IsMAC(str string) bool
+func IsMD4(str string) bool
+func IsMD5(str string) bool
+func IsMagnetURI(str string) bool
+func IsMongoID(str string) bool
+func IsMultibyte(str string) bool
+func IsNatural(value float64) bool
+func IsNegative(value float64) bool
+func IsNonNegative(value float64) bool
+func IsNonPositive(value float64) bool
+func IsNotNull(str string) bool
+func IsNull(str string) bool
+func IsNumeric(str string) bool
+func IsPort(str string) bool
+func IsPositive(value float64) bool
+func IsPrintableASCII(str string) bool
+func IsRFC3339(str string) bool
+func IsRFC3339WithoutZone(str string) bool
+func IsRGBcolor(str string) bool
+func IsRegex(str string) bool
+func IsRequestURI(rawurl string) bool
+func IsRequestURL(rawurl string) bool
+func IsRipeMD128(str string) bool
+func IsRipeMD160(str string) bool
+func IsRsaPub(str string, params ...string) bool
+func IsRsaPublicKey(str string, keylen int) bool
+func IsSHA1(str string) bool
+func IsSHA256(str string) bool
+func IsSHA384(str string) bool
+func IsSHA512(str string) bool
+func IsSSN(str string) bool
+func IsSemver(str string) bool
+func IsTiger128(str string) bool
+func IsTiger160(str string) bool
+func IsTiger192(str string) bool
+func IsTime(str string, format string) bool
+func IsType(v interface{}, params ...string) bool
+func IsURL(str string) bool
+func IsUTFDigit(str string) bool
+func IsUTFLetter(str string) bool
+func IsUTFLetterNumeric(str string) bool
+func IsUTFNumeric(str string) bool
+func IsUUID(str string) bool
+func IsUUIDv3(str string) bool
+func IsUUIDv4(str string) bool
+func IsUUIDv5(str string) bool
+func IsULID(str string) bool
+func IsUnixTime(str string) bool
+func IsUpperCase(str string) bool
+func IsVariableWidth(str string) bool
+func IsWhole(value float64) bool
+func LeftTrim(str, chars string) string
+func Map(array []interface{}, iterator ResultIterator) []interface{}
+func Matches(str, pattern string) bool
+func MaxStringLength(str string, params ...string) bool
+func MinStringLength(str string, params ...string) bool
+func NormalizeEmail(str string) (string, error)
+func PadBoth(str string, padStr string, padLen int) string
+func PadLeft(str string, padStr string, padLen int) string
+func PadRight(str string, padStr string, padLen int) string
+func PrependPathToErrors(err error, path string) error
+func Range(str string, params ...string) bool
+func RemoveTags(s string) string
+func ReplacePattern(str, pattern, replace string) string
+func Reverse(s string) string
+func RightTrim(str, chars string) string
+func RuneLength(str string, params ...string) bool
+func SafeFileName(str string) string
+func SetFieldsRequiredByDefault(value bool)
+func SetNilPtrAllowedByRequired(value bool)
+func Sign(value float64) float64
+func StringLength(str string, params ...string) bool
+func StringMatches(s string, params ...string) bool
+func StripLow(str string, keepNewLines bool) string
+func ToBoolean(str string) (bool, error)
+func ToFloat(str string) (float64, error)
+func ToInt(value interface{}) (res int64, err error)
+func ToJSON(obj interface{}) (string, error)
+func ToString(obj interface{}) string
+func Trim(str, chars string) string
+func Truncate(str string, length int, ending string) string
+func TruncatingErrorf(str string, args ...interface{}) error
+func UnderscoreToCamelCase(s string) string
+func ValidateMap(inputMap map[string]interface{}, validationMap map[string]interface{}) (bool, error)
+func ValidateStruct(s interface{}) (bool, error)
+func WhiteList(str, chars string) string
+type ConditionIterator
+type CustomTypeValidator
+type Error
+func (e Error) Error() string
+type Errors
+func (es Errors) Error() string
+func (es Errors) Errors() []error
+type ISO3166Entry
+type ISO693Entry
+type InterfaceParamValidator
+type Iterator
+type ParamValidator
+type ResultIterator
+type UnsupportedTypeError
+func (e *UnsupportedTypeError) Error() string
+type Validator
+```
+
+#### Examples
+###### IsURL
+```go
+println(govalidator.IsURL(`http://user@pass:domain.com/path/page`))
+```
+###### IsType
+```go
+println(govalidator.IsType("Bob", "string"))
+println(govalidator.IsType(1, "int"))
+i := 1
+println(govalidator.IsType(&i, "*int"))
+```
+
+IsType can be used through the tag `type` which is essential for map validation:
+```go
+type User struct {
+ Name string `valid:"type(string)"`
+ Age int `valid:"type(int)"`
+ Meta interface{} `valid:"type(string)"`
+}
+result, err := govalidator.ValidateStruct(User{"Bob", 20, "meta"})
+if err != nil {
+ println("error: " + err.Error())
+}
+println(result)
+```
+###### ToString
+```go
+type User struct {
+ FirstName string
+ LastName string
+}
+
+str := govalidator.ToString(&User{"John", "Juan"})
+println(str)
+```
+###### Each, Map, Filter, Count for slices
+Each iterates over the slice/array and calls Iterator for every item
+```go
+data := []interface{}{1, 2, 3, 4, 5}
+var fn govalidator.Iterator = func(value interface{}, index int) {
+ println(value.(int))
+}
+govalidator.Each(data, fn)
+```
+```go
+data := []interface{}{1, 2, 3, 4, 5}
+var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} {
+ return value.(int) * 3
+}
+_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
+```
+```go
+data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
+var fn govalidator.ConditionIterator = func(value interface{}, index int) bool {
+ return value.(int)%2 == 0
+}
+_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
+_ = govalidator.Count(data, fn) // result = 5
+```
+###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2)
+If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this:
+```go
+govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
+ return str == "duck"
+})
+```
+For completely custom validators (interface-based), see below.
+
+Here is a list of available validators for struct fields (validator - used function):
+```go
+"email": IsEmail,
+"url": IsURL,
+"dialstring": IsDialString,
+"requrl": IsRequestURL,
+"requri": IsRequestURI,
+"alpha": IsAlpha,
+"utfletter": IsUTFLetter,
+"alphanum": IsAlphanumeric,
+"utfletternum": IsUTFLetterNumeric,
+"numeric": IsNumeric,
+"utfnumeric": IsUTFNumeric,
+"utfdigit": IsUTFDigit,
+"hexadecimal": IsHexadecimal,
+"hexcolor": IsHexcolor,
+"rgbcolor": IsRGBcolor,
+"lowercase": IsLowerCase,
+"uppercase": IsUpperCase,
+"int": IsInt,
+"float": IsFloat,
+"null": IsNull,
+"uuid": IsUUID,
+"uuidv3": IsUUIDv3,
+"uuidv4": IsUUIDv4,
+"uuidv5": IsUUIDv5,
+"creditcard": IsCreditCard,
+"isbn10": IsISBN10,
+"isbn13": IsISBN13,
+"json": IsJSON,
+"multibyte": IsMultibyte,
+"ascii": IsASCII,
+"printableascii": IsPrintableASCII,
+"fullwidth": IsFullWidth,
+"halfwidth": IsHalfWidth,
+"variablewidth": IsVariableWidth,
+"base64": IsBase64,
+"datauri": IsDataURI,
+"ip": IsIP,
+"port": IsPort,
+"ipv4": IsIPv4,
+"ipv6": IsIPv6,
+"dns": IsDNSName,
+"host": IsHost,
+"mac": IsMAC,
+"latitude": IsLatitude,
+"longitude": IsLongitude,
+"ssn": IsSSN,
+"semver": IsSemver,
+"rfc3339": IsRFC3339,
+"rfc3339WithoutZone": IsRFC3339WithoutZone,
+"ISO3166Alpha2": IsISO3166Alpha2,
+"ISO3166Alpha3": IsISO3166Alpha3,
+"ulid": IsULID,
+```
+Validators with parameters
+
+```go
+"range(min|max)": Range,
+"length(min|max)": ByteLength,
+"runelength(min|max)": RuneLength,
+"stringlength(min|max)": StringLength,
+"matches(pattern)": StringMatches,
+"in(string1|string2|...|stringN)": IsIn,
+"rsapub(keylength)" : IsRsaPub,
+"minstringlength(int): MinStringLength,
+"maxstringlength(int): MaxStringLength,
+```
+Validators with parameters for any type
+
+```go
+"type(type)": IsType,
+```
+
+And here is small example of usage:
+```go
+type Post struct {
+ Title string `valid:"alphanum,required"`
+ Message string `valid:"duck,ascii"`
+ Message2 string `valid:"animal(dog)"`
+ AuthorIP string `valid:"ipv4"`
+ Date string `valid:"-"`
+}
+post := &Post{
+ Title: "My Example Post",
+ Message: "duck",
+ Message2: "dog",
+ AuthorIP: "123.234.54.3",
+}
+
+// Add your own struct validation tags
+govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
+ return str == "duck"
+})
+
+// Add your own struct validation tags with parameter
+govalidator.ParamTagMap["animal"] = govalidator.ParamValidator(func(str string, params ...string) bool {
+ species := params[0]
+ return str == species
+})
+govalidator.ParamTagRegexMap["animal"] = regexp.MustCompile("^animal\\((\\w+)\\)$")
+
+result, err := govalidator.ValidateStruct(post)
+if err != nil {
+ println("error: " + err.Error())
+}
+println(result)
+```
+###### ValidateMap [#2](https://github.com/asaskevich/govalidator/pull/338)
+If you want to validate maps, you can use the map to be validated and a validation map that contain the same tags used in ValidateStruct, both maps have to be in the form `map[string]interface{}`
+
+So here is small example of usage:
+```go
+var mapTemplate = map[string]interface{}{
+ "name":"required,alpha",
+ "family":"required,alpha",
+ "email":"required,email",
+ "cell-phone":"numeric",
+ "address":map[string]interface{}{
+ "line1":"required,alphanum",
+ "line2":"alphanum",
+ "postal-code":"numeric",
+ },
+}
+
+var inputMap = map[string]interface{}{
+ "name":"Bob",
+ "family":"Smith",
+ "email":"foo@bar.baz",
+ "address":map[string]interface{}{
+ "line1":"",
+ "line2":"",
+ "postal-code":"",
+ },
+}
+
+result, err := govalidator.ValidateMap(inputMap, mapTemplate)
+if err != nil {
+ println("error: " + err.Error())
+}
+println(result)
+```
+
+###### WhiteList
+```go
+// Remove all characters from string ignoring characters between "a" and "z"
+println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
+```
+
+###### Custom validation functions
+Custom validation using your own domain specific validators is also available - here's an example of how to use it:
+```go
+import "github.com/asaskevich/govalidator"
+
+type CustomByteArray [6]byte // custom types are supported and can be validated
+
+type StructWithCustomByteArray struct {
+ ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence
+ Email string `valid:"email"`
+ CustomMinLength int `valid:"-"`
+}
+
+govalidator.CustomTypeTagMap.Set("customByteArrayValidator", func(i interface{}, context interface{}) bool {
+ switch v := context.(type) { // you can type switch on the context interface being validated
+ case StructWithCustomByteArray:
+ // you can check and validate against some other field in the context,
+ // return early or not validate against the context at all – your choice
+ case SomeOtherType:
+ // ...
+ default:
+ // expecting some other type? Throw/panic here or continue
+ }
+
+ switch v := i.(type) { // type switch on the struct field being validated
+ case CustomByteArray:
+ for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes
+ if e != 0 {
+ return true
+ }
+ }
+ }
+ return false
+})
+govalidator.CustomTypeTagMap.Set("customMinLengthValidator", func(i interface{}, context interface{}) bool {
+ switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation
+ case StructWithCustomByteArray:
+ return len(v.ID) >= v.CustomMinLength
+ }
+ return false
+})
+```
+
+###### Loop over Error()
+By default .Error() returns all errors in a single String. To access each error you can do this:
+```go
+ if err != nil {
+ errs := err.(govalidator.Errors).Errors()
+ for _, e := range errs {
+ fmt.Println(e.Error())
+ }
+ }
+```
+
+###### Custom error messages
+Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it:
+```go
+type Ticket struct {
+ Id int64 `json:"id"`
+ FirstName string `json:"firstname" valid:"required~First name is blank"`
+}
+```
+
+#### Notes
+Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator).
+Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator).
+
+#### Support
+If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
+
+#### What to contribute
+If you don't know what to do, there are some features and functions that need to be done
+
+- [ ] Refactor code
+- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
+- [ ] Create actual list of contributors and projects that currently using this package
+- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
+- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
+- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
+- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
+- [x] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
+- [ ] Implement fuzzing testing
+- [ ] Implement some struct/map/array utilities
+- [ ] Implement map/array validation
+- [ ] Implement benchmarking
+- [ ] Implement batch of examples
+- [ ] Look at forks for new features and fixes
+
+#### Advice
+Feel free to create what you want, but keep in mind when you implement new features:
+- Code must be clear and readable, names of variables/constants clearly describes what they are doing
+- Public functions must be documented and described in source file and added to README.md to the list of available functions
+- There are must be unit-tests for any new functions and improvements
+
+## Credits
+### Contributors
+
+This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)].
+
+#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors)
+* [Daniel Lohse](https://github.com/annismckenzie)
+* [Attila Oláh](https://github.com/attilaolah)
+* [Daniel Korner](https://github.com/Dadie)
+* [Steven Wilkin](https://github.com/stevenwilkin)
+* [Deiwin Sarjas](https://github.com/deiwin)
+* [Noah Shibley](https://github.com/slugmobile)
+* [Nathan Davies](https://github.com/nathj07)
+* [Matt Sanford](https://github.com/mzsanford)
+* [Simon ccl1115](https://github.com/ccl1115)
+
+<a href="https://github.com/asaskevich/govalidator/graphs/contributors"><img src="https://opencollective.com/govalidator/contributors.svg?width=890" /></a>
+
+
+### Backers
+
+Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/govalidator#backer)]
+
+<a href="https://opencollective.com/govalidator#backers" target="_blank"><img src="https://opencollective.com/govalidator/backers.svg?width=890"></a>
+
+
+### Sponsors
+
+Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)]
+
+<a href="https://opencollective.com/govalidator/sponsor/0/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/0/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/1/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/1/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/2/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/2/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/3/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/3/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/4/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/4/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/5/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/5/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/6/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/6/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/7/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/7/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/8/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/8/avatar.svg"></a>
+<a href="https://opencollective.com/govalidator/sponsor/9/website" target="_blank"><img src="https://opencollective.com/govalidator/sponsor/9/avatar.svg"></a>
+
+
+
+
+## License
+[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large)
diff --git a/vendor/github.com/asaskevich/govalidator/arrays.go b/vendor/github.com/asaskevich/govalidator/arrays.go
new file mode 100644
index 000000000..3e1da7cb4
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/arrays.go
@@ -0,0 +1,87 @@
+package govalidator
+
+// Iterator is the function that accepts element of slice/array and its index
+type Iterator func(interface{}, int)
+
+// ResultIterator is the function that accepts element of slice/array and its index and returns any result
+type ResultIterator func(interface{}, int) interface{}
+
+// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean
+type ConditionIterator func(interface{}, int) bool
+
+// ReduceIterator is the function that accepts two element of slice/array and returns result of merging those values
+type ReduceIterator func(interface{}, interface{}) interface{}
+
+// Some validates that any item of array corresponds to ConditionIterator. Returns boolean.
+func Some(array []interface{}, iterator ConditionIterator) bool {
+ res := false
+ for index, data := range array {
+ res = res || iterator(data, index)
+ }
+ return res
+}
+
+// Every validates that every item of array corresponds to ConditionIterator. Returns boolean.
+func Every(array []interface{}, iterator ConditionIterator) bool {
+ res := true
+ for index, data := range array {
+ res = res && iterator(data, index)
+ }
+ return res
+}
+
+// Reduce boils down a list of values into a single value by ReduceIterator
+func Reduce(array []interface{}, iterator ReduceIterator, initialValue interface{}) interface{} {
+ for _, data := range array {
+ initialValue = iterator(initialValue, data)
+ }
+ return initialValue
+}
+
+// Each iterates over the slice and apply Iterator to every item
+func Each(array []interface{}, iterator Iterator) {
+ for index, data := range array {
+ iterator(data, index)
+ }
+}
+
+// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result.
+func Map(array []interface{}, iterator ResultIterator) []interface{} {
+ var result = make([]interface{}, len(array))
+ for index, data := range array {
+ result[index] = iterator(data, index)
+ }
+ return result
+}
+
+// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise.
+func Find(array []interface{}, iterator ConditionIterator) interface{} {
+ for index, data := range array {
+ if iterator(data, index) {
+ return data
+ }
+ }
+ return nil
+}
+
+// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice.
+func Filter(array []interface{}, iterator ConditionIterator) []interface{} {
+ var result = make([]interface{}, 0)
+ for index, data := range array {
+ if iterator(data, index) {
+ result = append(result, data)
+ }
+ }
+ return result
+}
+
+// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator.
+func Count(array []interface{}, iterator ConditionIterator) int {
+ count := 0
+ for index, data := range array {
+ if iterator(data, index) {
+ count = count + 1
+ }
+ }
+ return count
+}
diff --git a/vendor/github.com/asaskevich/govalidator/converter.go b/vendor/github.com/asaskevich/govalidator/converter.go
new file mode 100644
index 000000000..d68e990fc
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/converter.go
@@ -0,0 +1,81 @@
+package govalidator
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "strconv"
+)
+
+// ToString convert the input to a string.
+func ToString(obj interface{}) string {
+ res := fmt.Sprintf("%v", obj)
+ return res
+}
+
+// ToJSON convert the input to a valid JSON string
+func ToJSON(obj interface{}) (string, error) {
+ res, err := json.Marshal(obj)
+ if err != nil {
+ res = []byte("")
+ }
+ return string(res), err
+}
+
+// ToFloat convert the input string to a float, or 0.0 if the input is not a float.
+func ToFloat(value interface{}) (res float64, err error) {
+ val := reflect.ValueOf(value)
+
+ switch value.(type) {
+ case int, int8, int16, int32, int64:
+ res = float64(val.Int())
+ case uint, uint8, uint16, uint32, uint64:
+ res = float64(val.Uint())
+ case float32, float64:
+ res = val.Float()
+ case string:
+ res, err = strconv.ParseFloat(val.String(), 64)
+ if err != nil {
+ res = 0
+ }
+ default:
+ err = fmt.Errorf("ToInt: unknown interface type %T", value)
+ res = 0
+ }
+
+ return
+}
+
+// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer.
+func ToInt(value interface{}) (res int64, err error) {
+ val := reflect.ValueOf(value)
+
+ switch value.(type) {
+ case int, int8, int16, int32, int64:
+ res = val.Int()
+ case uint, uint8, uint16, uint32, uint64:
+ res = int64(val.Uint())
+ case float32, float64:
+ res = int64(val.Float())
+ case string:
+ if IsInt(val.String()) {
+ res, err = strconv.ParseInt(val.String(), 0, 64)
+ if err != nil {
+ res = 0
+ }
+ } else {
+ err = fmt.Errorf("ToInt: invalid numeric format %g", value)
+ res = 0
+ }
+ default:
+ err = fmt.Errorf("ToInt: unknown interface type %T", value)
+ res = 0
+ }
+
+ return
+}
+
+// ToBoolean convert the input string to a boolean.
+func ToBoolean(str string) (bool, error) {
+ return strconv.ParseBool(str)
+}
diff --git a/vendor/github.com/asaskevich/govalidator/doc.go b/vendor/github.com/asaskevich/govalidator/doc.go
new file mode 100644
index 000000000..55dce62dc
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/doc.go
@@ -0,0 +1,3 @@
+package govalidator
+
+// A package of validators and sanitizers for strings, structures and collections.
diff --git a/vendor/github.com/asaskevich/govalidator/error.go b/vendor/github.com/asaskevich/govalidator/error.go
new file mode 100644
index 000000000..1da2336f4
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/error.go
@@ -0,0 +1,47 @@
+package govalidator
+
+import (
+ "sort"
+ "strings"
+)
+
+// Errors is an array of multiple errors and conforms to the error interface.
+type Errors []error
+
+// Errors returns itself.
+func (es Errors) Errors() []error {
+ return es
+}
+
+func (es Errors) Error() string {
+ var errs []string
+ for _, e := range es {
+ errs = append(errs, e.Error())
+ }
+ sort.Strings(errs)
+ return strings.Join(errs, ";")
+}
+
+// Error encapsulates a name, an error and whether there's a custom error message or not.
+type Error struct {
+ Name string
+ Err error
+ CustomErrorMessageExists bool
+
+ // Validator indicates the name of the validator that failed
+ Validator string
+ Path []string
+}
+
+func (e Error) Error() string {
+ if e.CustomErrorMessageExists {
+ return e.Err.Error()
+ }
+
+ errName := e.Name
+ if len(e.Path) > 0 {
+ errName = strings.Join(append(e.Path, e.Name), ".")
+ }
+
+ return errName + ": " + e.Err.Error()
+}
diff --git a/vendor/github.com/asaskevich/govalidator/numerics.go b/vendor/github.com/asaskevich/govalidator/numerics.go
new file mode 100644
index 000000000..5041d9e86
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/numerics.go
@@ -0,0 +1,100 @@
+package govalidator
+
+import (
+ "math"
+)
+
+// Abs returns absolute value of number
+func Abs(value float64) float64 {
+ return math.Abs(value)
+}
+
+// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise
+func Sign(value float64) float64 {
+ if value > 0 {
+ return 1
+ } else if value < 0 {
+ return -1
+ } else {
+ return 0
+ }
+}
+
+// IsNegative returns true if value < 0
+func IsNegative(value float64) bool {
+ return value < 0
+}
+
+// IsPositive returns true if value > 0
+func IsPositive(value float64) bool {
+ return value > 0
+}
+
+// IsNonNegative returns true if value >= 0
+func IsNonNegative(value float64) bool {
+ return value >= 0
+}
+
+// IsNonPositive returns true if value <= 0
+func IsNonPositive(value float64) bool {
+ return value <= 0
+}
+
+// InRangeInt returns true if value lies between left and right border
+func InRangeInt(value, left, right interface{}) bool {
+ value64, _ := ToInt(value)
+ left64, _ := ToInt(left)
+ right64, _ := ToInt(right)
+ if left64 > right64 {
+ left64, right64 = right64, left64
+ }
+ return value64 >= left64 && value64 <= right64
+}
+
+// InRangeFloat32 returns true if value lies between left and right border
+func InRangeFloat32(value, left, right float32) bool {
+ if left > right {
+ left, right = right, left
+ }
+ return value >= left && value <= right
+}
+
+// InRangeFloat64 returns true if value lies between left and right border
+func InRangeFloat64(value, left, right float64) bool {
+ if left > right {
+ left, right = right, left
+ }
+ return value >= left && value <= right
+}
+
+// InRange returns true if value lies between left and right border, generic type to handle int, float32, float64 and string.
+// All types must the same type.
+// False if value doesn't lie in range or if it incompatible or not comparable
+func InRange(value interface{}, left interface{}, right interface{}) bool {
+ switch value.(type) {
+ case int:
+ intValue, _ := ToInt(value)
+ intLeft, _ := ToInt(left)
+ intRight, _ := ToInt(right)
+ return InRangeInt(intValue, intLeft, intRight)
+ case float32, float64:
+ intValue, _ := ToFloat(value)
+ intLeft, _ := ToFloat(left)
+ intRight, _ := ToFloat(right)
+ return InRangeFloat64(intValue, intLeft, intRight)
+ case string:
+ return value.(string) >= left.(string) && value.(string) <= right.(string)
+ default:
+ return false
+ }
+}
+
+// IsWhole returns true if value is whole number
+func IsWhole(value float64) bool {
+ return math.Remainder(value, 1) == 0
+}
+
+// IsNatural returns true if value is natural number (positive and whole)
+func IsNatural(value float64) bool {
+ return IsWhole(value) && IsPositive(value)
+}
diff --git a/vendor/github.com/asaskevich/govalidator/patterns.go b/vendor/github.com/asaskevich/govalidator/patterns.go
new file mode 100644
index 000000000..bafc3765e
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/patterns.go
@@ -0,0 +1,113 @@
+package govalidator
+
+import "regexp"
+
+// Basic regular expressions for validating strings
+const (
+ Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
+ CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11}|6[27][0-9]{14})$"
+ ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
+ ISBN13 string = "^(?:[0-9]{13})$"
+ UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$"
+ UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
+ UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
+ UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
+ Alpha string = "^[a-zA-Z]+$"
+ Alphanumeric string = "^[a-zA-Z0-9]+$"
+ Numeric string = "^[0-9]+$"
+ Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
+ Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
+ Hexadecimal string = "^[0-9a-fA-F]+$"
+ Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$"
+ RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$"
+ ASCII string = "^[\x00-\x7F]+$"
+ Multibyte string = "[^\x00-\x7F]"
+ FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
+ HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
+ Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$"
+ PrintableASCII string = "^[\x20-\x7E]+$"
+ DataURI string = "^data:.+\\/(.+);base64$"
+ MagnetURI string = "^magnet:\\?xt=urn:[a-zA-Z0-9]+:[a-zA-Z0-9]{32,40}&dn=.+&tr=.+$"
+ Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
+ Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
+ DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$`
+ IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
+ URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
+ URLUsername string = `(\S+(:\S*)?@)`
+ URLPath string = `((\/|\?|#)[^\s]*)`
+ URLPort string = `(:(\d{1,5}))`
+ URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3]|24\d|25[0-5])(\.(\d{1,2}|1\d\d|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-5]))`
+ URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))`
+ URL = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
+ SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
+ WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
+ UnixPath string = `^(/[^/\x00]*)+/?$`
+ WinARPath string = `^(?:(?:[a-zA-Z]:|\\\\[a-z0-9_.$●-]+\\[a-z0-9_.$●-]+)\\|\\?[^\\/:*?"<>|\r\n]+\\?)(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
+ UnixARPath string = `^((\.{0,2}/)?([^/\x00]*))+/?$`
+ Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
+ tagName string = "valid"
+ hasLowerCase string = ".*[[:lower:]]"
+ hasUpperCase string = ".*[[:upper:]]"
+ hasWhitespace string = ".*[[:space:]]"
+ hasWhitespaceOnly string = "^[[:space:]]+$"
+ IMEI string = "^[0-9a-f]{14}$|^\\d{15}$|^\\d{18}$"
+ IMSI string = "^\\d{14,15}$"
+ E164 string = `^\+?[1-9]\d{1,14}$`
+)
+
+// Used by IsFilePath func
+const (
+ // Unknown is unresolved OS type
+ Unknown = iota
+ // Win is Windows type
+ Win
+ // Unix is *nix OS types
+ Unix
+)
+
+var (
+ userRegexp = regexp.MustCompile("^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~.-]+$")
+ hostRegexp = regexp.MustCompile("^[^\\s]+\\.[^\\s]+$")
+ userDotRegexp = regexp.MustCompile("(^[.]{1})|([.]{1}$)|([.]{2,})")
+ rxEmail = regexp.MustCompile(Email)
+ rxCreditCard = regexp.MustCompile(CreditCard)
+ rxISBN10 = regexp.MustCompile(ISBN10)
+ rxISBN13 = regexp.MustCompile(ISBN13)
+ rxUUID3 = regexp.MustCompile(UUID3)
+ rxUUID4 = regexp.MustCompile(UUID4)
+ rxUUID5 = regexp.MustCompile(UUID5)
+ rxUUID = regexp.MustCompile(UUID)
+ rxAlpha = regexp.MustCompile(Alpha)
+ rxAlphanumeric = regexp.MustCompile(Alphanumeric)
+ rxNumeric = regexp.MustCompile(Numeric)
+ rxInt = regexp.MustCompile(Int)
+ rxFloat = regexp.MustCompile(Float)
+ rxHexadecimal = regexp.MustCompile(Hexadecimal)
+ rxHexcolor = regexp.MustCompile(Hexcolor)
+ rxRGBcolor = regexp.MustCompile(RGBcolor)
+ rxASCII = regexp.MustCompile(ASCII)
+ rxPrintableASCII = regexp.MustCompile(PrintableASCII)
+ rxMultibyte = regexp.MustCompile(Multibyte)
+ rxFullWidth = regexp.MustCompile(FullWidth)
+ rxHalfWidth = regexp.MustCompile(HalfWidth)
+ rxBase64 = regexp.MustCompile(Base64)
+ rxDataURI = regexp.MustCompile(DataURI)
+ rxMagnetURI = regexp.MustCompile(MagnetURI)
+ rxLatitude = regexp.MustCompile(Latitude)
+ rxLongitude = regexp.MustCompile(Longitude)
+ rxDNSName = regexp.MustCompile(DNSName)
+ rxURL = regexp.MustCompile(URL)
+ rxSSN = regexp.MustCompile(SSN)
+ rxWinPath = regexp.MustCompile(WinPath)
+ rxUnixPath = regexp.MustCompile(UnixPath)
+ rxARWinPath = regexp.MustCompile(WinARPath)
+ rxARUnixPath = regexp.MustCompile(UnixARPath)
+ rxSemver = regexp.MustCompile(Semver)
+ rxHasLowerCase = regexp.MustCompile(hasLowerCase)
+ rxHasUpperCase = regexp.MustCompile(hasUpperCase)
+ rxHasWhitespace = regexp.MustCompile(hasWhitespace)
+ rxHasWhitespaceOnly = regexp.MustCompile(hasWhitespaceOnly)
+ rxIMEI = regexp.MustCompile(IMEI)
+ rxIMSI = regexp.MustCompile(IMSI)
+ rxE164 = regexp.MustCompile(E164)
+)
diff --git a/vendor/github.com/asaskevich/govalidator/types.go b/vendor/github.com/asaskevich/govalidator/types.go
new file mode 100644
index 000000000..c573abb51
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/types.go
@@ -0,0 +1,656 @@
+package govalidator
+
+import (
+ "reflect"
+ "regexp"
+ "sort"
+ "sync"
+)
+
+// Validator is a wrapper for a validator function that returns bool and accepts string.
+type Validator func(str string) bool
+
+// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type.
+// The second parameter should be the context (in the case of validating a struct: the whole object being validated).
+type CustomTypeValidator func(i interface{}, o interface{}) bool
+
+// ParamValidator is a wrapper for validator functions that accept additional parameters.
+type ParamValidator func(str string, params ...string) bool
+
+// InterfaceParamValidator is a wrapper for functions that accept variants parameters for an interface value
+type InterfaceParamValidator func(in interface{}, params ...string) bool
+type tagOptionsMap map[string]tagOption
+
+func (t tagOptionsMap) orderedKeys() []string {
+ var keys []string
+ for k := range t {
+ keys = append(keys, k)
+ }
+
+ sort.Slice(keys, func(a, b int) bool {
+ return t[keys[a]].order < t[keys[b]].order
+ })
+
+ return keys
+}
+
+type tagOption struct {
+ name string
+ customErrorMessage string
+ order int
+}
+
+// UnsupportedTypeError is a wrapper for reflect.Type
+type UnsupportedTypeError struct {
+ Type reflect.Type
+}
+
+// stringValues is a slice of reflect.Value holding *reflect.StringValue.
+// It implements the methods to sort by string.
+type stringValues []reflect.Value
+
+// InterfaceParamTagMap is a map of functions accept variants parameters for an interface value
+var InterfaceParamTagMap = map[string]InterfaceParamValidator{
+ "type": IsType,
+}
+
+// InterfaceParamTagRegexMap maps interface param tags to their respective regexes.
+var InterfaceParamTagRegexMap = map[string]*regexp.Regexp{
+ "type": regexp.MustCompile(`^type\((.*)\)$`),
+}
+
+// ParamTagMap is a map of functions accept variants parameters
+var ParamTagMap = map[string]ParamValidator{
+ "length": ByteLength,
+ "range": Range,
+ "runelength": RuneLength,
+ "stringlength": StringLength,
+ "matches": StringMatches,
+ "in": IsInRaw,
+ "rsapub": IsRsaPub,
+ "minstringlength": MinStringLength,
+ "maxstringlength": MaxStringLength,
+}
+
+// ParamTagRegexMap maps param tags to their respective regexes.
+var ParamTagRegexMap = map[string]*regexp.Regexp{
+ "range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
+ "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
+ "runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
+ "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
+ "in": regexp.MustCompile(`^in\((.*)\)`),
+ "matches": regexp.MustCompile(`^matches\((.+)\)$`),
+ "rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"),
+ "minstringlength": regexp.MustCompile("^minstringlength\\((\\d+)\\)$"),
+ "maxstringlength": regexp.MustCompile("^maxstringlength\\((\\d+)\\)$"),
+}
+
+type customTypeTagMap struct {
+ validators map[string]CustomTypeValidator
+
+ sync.RWMutex
+}
+
+func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) {
+ tm.RLock()
+ defer tm.RUnlock()
+ v, ok := tm.validators[name]
+ return v, ok
+}
+
+func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) {
+ tm.Lock()
+ defer tm.Unlock()
+ tm.validators[name] = ctv
+}
+
+// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function.
+// Use this to validate compound or custom types that need to be handled as a whole, e.g.
+// `type UUID [16]byte` (this would be handled as an array of bytes).
+var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)}
+
+// TagMap is a map of functions, that can be used as tags for ValidateStruct function.
+var TagMap = map[string]Validator{
+ "email": IsEmail,
+ "url": IsURL,
+ "dialstring": IsDialString,
+ "requrl": IsRequestURL,
+ "requri": IsRequestURI,
+ "alpha": IsAlpha,
+ "utfletter": IsUTFLetter,
+ "alphanum": IsAlphanumeric,
+ "utfletternum": IsUTFLetterNumeric,
+ "numeric": IsNumeric,
+ "utfnumeric": IsUTFNumeric,
+ "utfdigit": IsUTFDigit,
+ "hexadecimal": IsHexadecimal,
+ "hexcolor": IsHexcolor,
+ "rgbcolor": IsRGBcolor,
+ "lowercase": IsLowerCase,
+ "uppercase": IsUpperCase,
+ "int": IsInt,
+ "float": IsFloat,
+ "null": IsNull,
+ "notnull": IsNotNull,
+ "uuid": IsUUID,
+ "uuidv3": IsUUIDv3,
+ "uuidv4": IsUUIDv4,
+ "uuidv5": IsUUIDv5,
+ "creditcard": IsCreditCard,
+ "isbn10": IsISBN10,
+ "isbn13": IsISBN13,
+ "json": IsJSON,
+ "multibyte": IsMultibyte,
+ "ascii": IsASCII,
+ "printableascii": IsPrintableASCII,
+ "fullwidth": IsFullWidth,
+ "halfwidth": IsHalfWidth,
+ "variablewidth": IsVariableWidth,
+ "base64": IsBase64,
+ "datauri": IsDataURI,
+ "ip": IsIP,
+ "port": IsPort,
+ "ipv4": IsIPv4,
+ "ipv6": IsIPv6,
+ "dns": IsDNSName,
+ "host": IsHost,
+ "mac": IsMAC,
+ "latitude": IsLatitude,
+ "longitude": IsLongitude,
+ "ssn": IsSSN,
+ "semver": IsSemver,
+ "rfc3339": IsRFC3339,
+ "rfc3339WithoutZone": IsRFC3339WithoutZone,
+ "ISO3166Alpha2": IsISO3166Alpha2,
+ "ISO3166Alpha3": IsISO3166Alpha3,
+ "ISO4217": IsISO4217,
+ "IMEI": IsIMEI,
+ "ulid": IsULID,
+}
+
+// ISO3166Entry stores country codes
+type ISO3166Entry struct {
+ EnglishShortName string
+ FrenchShortName string
+ Alpha2Code string
+ Alpha3Code string
+ Numeric string
+}
+
+//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes"
+var ISO3166List = []ISO3166Entry{
+ {"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"},
+ {"Albania", "Albanie (l')", "AL", "ALB", "008"},
+ {"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"},
+ {"Algeria", "Algérie (l')", "DZ", "DZA", "012"},
+ {"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"},
+ {"Andorra", "Andorre (l')", "AD", "AND", "020"},
+ {"Angola", "Angola (l')", "AO", "AGO", "024"},
+ {"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"},
+ {"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"},
+ {"Argentina", "Argentine (l')", "AR", "ARG", "032"},
+ {"Australia", "Australie (l')", "AU", "AUS", "036"},
+ {"Austria", "Autriche (l')", "AT", "AUT", "040"},
+ {"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"},
+ {"Bahrain", "Bahreïn", "BH", "BHR", "048"},
+ {"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"},
+ {"Armenia", "Arménie (l')", "AM", "ARM", "051"},
+ {"Barbados", "Barbade (la)", "BB", "BRB", "052"},
+ {"Belgium", "Belgique (la)", "BE", "BEL", "056"},
+ {"Bermuda", "Bermudes (les)", "BM", "BMU", "060"},
+ {"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"},
+ {"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"},
+ {"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"},
+ {"Botswana", "Botswana (le)", "BW", "BWA", "072"},
+ {"Bouvet Island", "Bouvet (l'Île)", "BV", "BVT", "074"},
+ {"Brazil", "Brésil (le)", "BR", "BRA", "076"},
+ {"Belize", "Belize (le)", "BZ", "BLZ", "084"},
+ {"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"},
+ {"Solomon Islands", "Salomon (Îles)", "SB", "SLB", "090"},
+ {"Virgin Islands (British)", "Vierges britanniques (les Îles)", "VG", "VGB", "092"},
+ {"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"},
+ {"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"},
+ {"Myanmar", "Myanmar (le)", "MM", "MMR", "104"},
+ {"Burundi", "Burundi (le)", "BI", "BDI", "108"},
+ {"Belarus", "Bélarus (le)", "BY", "BLR", "112"},
+ {"Cambodia", "Cambodge (le)", "KH", "KHM", "116"},
+ {"Cameroon", "Cameroun (le)", "CM", "CMR", "120"},
+ {"Canada", "Canada (le)", "CA", "CAN", "124"},
+ {"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"},
+ {"Cayman Islands (the)", "Caïmans (les Îles)", "KY", "CYM", "136"},
+ {"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"},
+ {"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"},
+ {"Chad", "Tchad (le)", "TD", "TCD", "148"},
+ {"Chile", "Chili (le)", "CL", "CHL", "152"},
+ {"China", "Chine (la)", "CN", "CHN", "156"},
+ {"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"},
+ {"Christmas Island", "Christmas (l'Île)", "CX", "CXR", "162"},
+ {"Cocos (Keeling) Islands (the)", "Cocos (les Îles)/ Keeling (les Îles)", "CC", "CCK", "166"},
+ {"Colombia", "Colombie (la)", "CO", "COL", "170"},
+ {"Comoros (the)", "Comores (les)", "KM", "COM", "174"},
+ {"Mayotte", "Mayotte", "YT", "MYT", "175"},
+ {"Congo (the)", "Congo (le)", "CG", "COG", "178"},
+ {"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"},
+ {"Cook Islands (the)", "Cook (les Îles)", "CK", "COK", "184"},
+ {"Costa Rica", "Costa Rica (le)", "CR", "CRI", "188"},
+ {"Croatia", "Croatie (la)", "HR", "HRV", "191"},
+ {"Cuba", "Cuba", "CU", "CUB", "192"},
+ {"Cyprus", "Chypre", "CY", "CYP", "196"},
+ {"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"},
+ {"Benin", "Bénin (le)", "BJ", "BEN", "204"},
+ {"Denmark", "Danemark (le)", "DK", "DNK", "208"},
+ {"Dominica", "Dominique (la)", "DM", "DMA", "212"},
+ {"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"},
+ {"Ecuador", "Équateur (l')", "EC", "ECU", "218"},
+ {"El Salvador", "El Salvador", "SV", "SLV", "222"},
+ {"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"},
+ {"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"},
+ {"Eritrea", "Érythrée (l')", "ER", "ERI", "232"},
+ {"Estonia", "Estonie (l')", "EE", "EST", "233"},
+ {"Faroe Islands (the)", "Féroé (les Îles)", "FO", "FRO", "234"},
+ {"Falkland Islands (the) [Malvinas]", "Falkland (les Îles)/Malouines (les Îles)", "FK", "FLK", "238"},
+ {"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les Îles Sandwich du Sud (la)", "GS", "SGS", "239"},
+ {"Fiji", "Fidji (les)", "FJ", "FJI", "242"},
+ {"Finland", "Finlande (la)", "FI", "FIN", "246"},
+ {"Åland Islands", "Åland(les Îles)", "AX", "ALA", "248"},
+ {"France", "France (la)", "FR", "FRA", "250"},
+ {"French Guiana", "Guyane française (la )", "GF", "GUF", "254"},
+ {"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"},
+ {"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"},
+ {"Djibouti", "Djibouti", "DJ", "DJI", "262"},
+ {"Gabon", "Gabon (le)", "GA", "GAB", "266"},
+ {"Georgia", "Géorgie (la)", "GE", "GEO", "268"},
+ {"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"},
+ {"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"},
+ {"Germany", "Allemagne (l')", "DE", "DEU", "276"},
+ {"Ghana", "Ghana (le)", "GH", "GHA", "288"},
+ {"Gibraltar", "Gibraltar", "GI", "GIB", "292"},
+ {"Kiribati", "Kiribati", "KI", "KIR", "296"},
+ {"Greece", "Grèce (la)", "GR", "GRC", "300"},
+ {"Greenland", "Groenland (le)", "GL", "GRL", "304"},
+ {"Grenada", "Grenade (la)", "GD", "GRD", "308"},
+ {"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"},
+ {"Guam", "Guam", "GU", "GUM", "316"},
+ {"Guatemala", "Guatemala (le)", "GT", "GTM", "320"},
+ {"Guinea", "Guinée (la)", "GN", "GIN", "324"},
+ {"Guyana", "Guyana (le)", "GY", "GUY", "328"},
+ {"Haiti", "Haïti", "HT", "HTI", "332"},
+ {"Heard Island and McDonald Islands", "Heard-et-Îles MacDonald (l'Île)", "HM", "HMD", "334"},
+ {"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"},
+ {"Honduras", "Honduras (le)", "HN", "HND", "340"},
+ {"Hong Kong", "Hong Kong", "HK", "HKG", "344"},
+ {"Hungary", "Hongrie (la)", "HU", "HUN", "348"},
+ {"Iceland", "Islande (l')", "IS", "ISL", "352"},
+ {"India", "Inde (l')", "IN", "IND", "356"},
+ {"Indonesia", "Indonésie (l')", "ID", "IDN", "360"},
+ {"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"},
+ {"Iraq", "Iraq (l')", "IQ", "IRQ", "368"},
+ {"Ireland", "Irlande (l')", "IE", "IRL", "372"},
+ {"Israel", "Israël", "IL", "ISR", "376"},
+ {"Italy", "Italie (l')", "IT", "ITA", "380"},
+ {"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"},
+ {"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"},
+ {"Japan", "Japon (le)", "JP", "JPN", "392"},
+ {"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"},
+ {"Jordan", "Jordanie (la)", "JO", "JOR", "400"},
+ {"Kenya", "Kenya (le)", "KE", "KEN", "404"},
+ {"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"},
+ {"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"},
+ {"Kuwait", "Koweït (le)", "KW", "KWT", "414"},
+ {"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"},
+ {"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"},
+ {"Lebanon", "Liban (le)", "LB", "LBN", "422"},
+ {"Lesotho", "Lesotho (le)", "LS", "LSO", "426"},
+ {"Latvia", "Lettonie (la)", "LV", "LVA", "428"},
+ {"Liberia", "Libéria (le)", "LR", "LBR", "430"},
+ {"Libya", "Libye (la)", "LY", "LBY", "434"},
+ {"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"},
+ {"Lithuania", "Lituanie (la)", "LT", "LTU", "440"},
+ {"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"},
+ {"Macao", "Macao", "MO", "MAC", "446"},
+ {"Madagascar", "Madagascar", "MG", "MDG", "450"},
+ {"Malawi", "Malawi (le)", "MW", "MWI", "454"},
+ {"Malaysia", "Malaisie (la)", "MY", "MYS", "458"},
+ {"Maldives", "Maldives (les)", "MV", "MDV", "462"},
+ {"Mali", "Mali (le)", "ML", "MLI", "466"},
+ {"Malta", "Malte", "MT", "MLT", "470"},
+ {"Martinique", "Martinique (la)", "MQ", "MTQ", "474"},
+ {"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"},
+ {"Mauritius", "Maurice", "MU", "MUS", "480"},
+ {"Mexico", "Mexique (le)", "MX", "MEX", "484"},
+ {"Monaco", "Monaco", "MC", "MCO", "492"},
+ {"Mongolia", "Mongolie (la)", "MN", "MNG", "496"},
+ {"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"},
+ {"Montenegro", "Monténégro (le)", "ME", "MNE", "499"},
+ {"Montserrat", "Montserrat", "MS", "MSR", "500"},
+ {"Morocco", "Maroc (le)", "MA", "MAR", "504"},
+ {"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"},
+ {"Oman", "Oman", "OM", "OMN", "512"},
+ {"Namibia", "Namibie (la)", "NA", "NAM", "516"},
+ {"Nauru", "Nauru", "NR", "NRU", "520"},
+ {"Nepal", "Népal (le)", "NP", "NPL", "524"},
+ {"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"},
+ {"Curaçao", "Curaçao", "CW", "CUW", "531"},
+ {"Aruba", "Aruba", "AW", "ABW", "533"},
+ {"Sint Maarten (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"},
+ {"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"},
+ {"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"},
+ {"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"},
+ {"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"},
+ {"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"},
+ {"Niger (the)", "Niger (le)", "NE", "NER", "562"},
+ {"Nigeria", "Nigéria (le)", "NG", "NGA", "566"},
+ {"Niue", "Niue", "NU", "NIU", "570"},
+ {"Norfolk Island", "Norfolk (l'Île)", "NF", "NFK", "574"},
+ {"Norway", "Norvège (la)", "NO", "NOR", "578"},
+ {"Northern Mariana Islands (the)", "Mariannes du Nord (les Îles)", "MP", "MNP", "580"},
+ {"United States Minor Outlying Islands (the)", "Îles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"},
+ {"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"},
+ {"Marshall Islands (the)", "Marshall (Îles)", "MH", "MHL", "584"},
+ {"Palau", "Palaos (les)", "PW", "PLW", "585"},
+ {"Pakistan", "Pakistan (le)", "PK", "PAK", "586"},
+ {"Panama", "Panama (le)", "PA", "PAN", "591"},
+ {"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"},
+ {"Paraguay", "Paraguay (le)", "PY", "PRY", "600"},
+ {"Peru", "Pérou (le)", "PE", "PER", "604"},
+ {"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"},
+ {"Pitcairn", "Pitcairn", "PN", "PCN", "612"},
+ {"Poland", "Pologne (la)", "PL", "POL", "616"},
+ {"Portugal", "Portugal (le)", "PT", "PRT", "620"},
+ {"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"},
+ {"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"},
+ {"Puerto Rico", "Porto Rico", "PR", "PRI", "630"},
+ {"Qatar", "Qatar (le)", "QA", "QAT", "634"},
+ {"Réunion", "Réunion (La)", "RE", "REU", "638"},
+ {"Romania", "Roumanie (la)", "RO", "ROU", "642"},
+ {"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"},
+ {"Rwanda", "Rwanda (le)", "RW", "RWA", "646"},
+ {"Saint Barthélemy", "Saint-Barthélemy", "BL", "BLM", "652"},
+ {"Saint Helena, Ascension and Tristan da Cunha", "Sainte-Hélène, Ascension et Tristan da Cunha", "SH", "SHN", "654"},
+ {"Saint Kitts and Nevis", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"},
+ {"Anguilla", "Anguilla", "AI", "AIA", "660"},
+ {"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"},
+ {"Saint Martin (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"},
+ {"Saint Pierre and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"},
+ {"Saint Vincent and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"},
+ {"San Marino", "Saint-Marin", "SM", "SMR", "674"},
+ {"Sao Tome and Principe", "Sao Tomé-et-Principe", "ST", "STP", "678"},
+ {"Saudi Arabia", "Arabie saoudite (l')", "SA", "SAU", "682"},
+ {"Senegal", "Sénégal (le)", "SN", "SEN", "686"},
+ {"Serbia", "Serbie (la)", "RS", "SRB", "688"},
+ {"Seychelles", "Seychelles (les)", "SC", "SYC", "690"},
+ {"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"},
+ {"Singapore", "Singapour", "SG", "SGP", "702"},
+ {"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"},
+ {"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"},
+ {"Slovenia", "Slovénie (la)", "SI", "SVN", "705"},
+ {"Somalia", "Somalie (la)", "SO", "SOM", "706"},
+ {"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"},
+ {"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"},
+ {"Spain", "Espagne (l')", "ES", "ESP", "724"},
+ {"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"},
+ {"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"},
+ {"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"},
+ {"Suriname", "Suriname (le)", "SR", "SUR", "740"},
+ {"Svalbard and Jan Mayen", "Svalbard et l'Île Jan Mayen (le)", "SJ", "SJM", "744"},
+ {"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"},
+ {"Sweden", "Suède (la)", "SE", "SWE", "752"},
+ {"Switzerland", "Suisse (la)", "CH", "CHE", "756"},
+ {"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"},
+ {"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"},
+ {"Thailand", "Thaïlande (la)", "TH", "THA", "764"},
+ {"Togo", "Togo (le)", "TG", "TGO", "768"},
+ {"Tokelau", "Tokelau (les)", "TK", "TKL", "772"},
+ {"Tonga", "Tonga (les)", "TO", "TON", "776"},
+ {"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"},
+ {"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"},
+ {"Tunisia", "Tunisie (la)", "TN", "TUN", "788"},
+ {"Turkey", "Turquie (la)", "TR", "TUR", "792"},
+ {"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"},
+ {"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les Îles)", "TC", "TCA", "796"},
+ {"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"},
+ {"Uganda", "Ouganda (l')", "UG", "UGA", "800"},
+ {"Ukraine", "Ukraine (l')", "UA", "UKR", "804"},
+ {"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"},
+ {"Egypt", "Égypte (l')", "EG", "EGY", "818"},
+ {"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"},
+ {"Guernsey", "Guernesey", "GG", "GGY", "831"},
+ {"Jersey", "Jersey", "JE", "JEY", "832"},
+ {"Isle of Man", "Île de Man", "IM", "IMN", "833"},
+ {"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"},
+ {"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"},
+ {"Virgin Islands (U.S.)", "Vierges des États-Unis (les Îles)", "VI", "VIR", "850"},
+ {"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"},
+ {"Uruguay", "Uruguay (l')", "UY", "URY", "858"},
+ {"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"},
+ {"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"},
+ {"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"},
+ {"Samoa", "Samoa (le)", "WS", "WSM", "882"},
+ {"Yemen", "Yémen (le)", "YE", "YEM", "887"},
+ {"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
+}
+
+// ISO4217List is the list of ISO currency codes
+var ISO4217List = []string{
+ "AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
+ "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
+ "CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
+ "DJF", "DKK", "DOP", "DZD",
+ "EGP", "ERN", "ETB", "EUR",
+ "FJD", "FKP",
+ "GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
+ "HKD", "HNL", "HRK", "HTG", "HUF",
+ "IDR", "ILS", "INR", "IQD", "IRR", "ISK",
+ "JMD", "JOD", "JPY",
+ "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
+ "LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
+ "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
+ "NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
+ "OMR",
+ "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
+ "QAR",
+ "RON", "RSD", "RUB", "RWF",
+ "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "STN", "SVC", "SYP", "SZL",
+ "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
+ "UAH", "UGX", "USD", "USN", "UYI", "UYU", "UYW", "UZS",
+ "VEF", "VES", "VND", "VUV",
+ "WST",
+ "XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
+ "YER",
+ "ZAR", "ZMW", "ZWL",
+}
+
+// ISO693Entry stores ISO language codes
+type ISO693Entry struct {
+ Alpha3bCode string
+ Alpha2Code string
+ English string
+}
+
+//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json
+var ISO693List = []ISO693Entry{
+ {Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"},
+ {Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"},
+ {Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"},
+ {Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"},
+ {Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"},
+ {Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"},
+ {Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"},
+ {Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"},
+ {Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"},
+ {Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"},
+ {Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"},
+ {Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"},
+ {Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"},
+ {Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"},
+ {Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"},
+ {Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"},
+ {Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"},
+ {Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"},
+ {Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"},
+ {Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"},
+ {Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"},
+ {Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"},
+ {Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"},
+ {Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"},
+ {Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"},
+ {Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"},
+ {Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"},
+ {Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"},
+ {Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"},
+ {Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"},
+ {Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"},
+ {Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"},
+ {Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"},
+ {Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"},
+ {Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"},
+ {Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"},
+ {Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"},
+ {Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"},
+ {Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"},
+ {Alpha3bCode: "eng", Alpha2Code: "en", English: "English"},
+ {Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"},
+ {Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"},
+ {Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"},
+ {Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"},
+ {Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"},
+ {Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"},
+ {Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"},
+ {Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"},
+ {Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"},
+ {Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"},
+ {Alpha3bCode: "ger", Alpha2Code: "de", English: "German"},
+ {Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"},
+ {Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"},
+ {Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"},
+ {Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"},
+ {Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"},
+ {Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"},
+ {Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"},
+ {Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"},
+ {Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"},
+ {Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"},
+ {Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"},
+ {Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"},
+ {Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"},
+ {Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"},
+ {Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"},
+ {Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"},
+ {Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"},
+ {Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"},
+ {Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"},
+ {Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"},
+ {Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"},
+ {Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"},
+ {Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"},
+ {Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"},
+ {Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"},
+ {Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"},
+ {Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"},
+ {Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"},
+ {Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"},
+ {Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"},
+ {Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"},
+ {Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"},
+ {Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"},
+ {Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"},
+ {Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"},
+ {Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"},
+ {Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"},
+ {Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"},
+ {Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"},
+ {Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"},
+ {Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"},
+ {Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"},
+ {Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"},
+ {Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"},
+ {Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"},
+ {Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"},
+ {Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"},
+ {Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"},
+ {Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"},
+ {Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"},
+ {Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"},
+ {Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"},
+ {Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"},
+ {Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"},
+ {Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"},
+ {Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"},
+ {Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"},
+ {Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"},
+ {Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"},
+ {Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"},
+ {Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"},
+ {Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"},
+ {Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"},
+ {Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"},
+ {Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"},
+ {Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"},
+ {Alpha3bCode: "nob", Alpha2Code: "nb", English: "Bokmål, Norwegian; Norwegian Bokmål"},
+ {Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"},
+ {Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"},
+ {Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"},
+ {Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"},
+ {Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"},
+ {Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"},
+ {Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"},
+ {Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"},
+ {Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"},
+ {Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"},
+ {Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"},
+ {Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"},
+ {Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"},
+ {Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"},
+ {Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"},
+ {Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"},
+ {Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"},
+ {Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"},
+ {Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"},
+ {Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"},
+ {Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"},
+ {Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"},
+ {Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"},
+ {Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"},
+ {Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"},
+ {Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"},
+ {Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"},
+ {Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"},
+ {Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"},
+ {Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"},
+ {Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"},
+ {Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"},
+ {Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"},
+ {Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"},
+ {Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"},
+ {Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"},
+ {Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"},
+ {Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"},
+ {Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"},
+ {Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"},
+ {Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"},
+ {Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"},
+ {Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"},
+ {Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"},
+ {Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"},
+ {Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"},
+ {Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"},
+ {Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"},
+ {Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"},
+ {Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"},
+ {Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"},
+ {Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"},
+ {Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"},
+ {Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"},
+ {Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"},
+ {Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"},
+ {Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"},
+ {Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"},
+ {Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"},
+ {Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"},
+ {Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"},
+ {Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"},
+ {Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"},
+ {Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"},
+ {Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"},
+ {Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"},
+}
diff --git a/vendor/github.com/asaskevich/govalidator/utils.go b/vendor/github.com/asaskevich/govalidator/utils.go
new file mode 100644
index 000000000..f4c30f824
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/utils.go
@@ -0,0 +1,270 @@
+package govalidator
+
+import (
+ "errors"
+ "fmt"
+ "html"
+ "math"
+ "path"
+ "regexp"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Contains checks if the string contains the substring.
+func Contains(str, substring string) bool {
+ return strings.Contains(str, substring)
+}
+
+// Matches checks if string matches the pattern (pattern is regular expression)
+// In case of error return false
+func Matches(str, pattern string) bool {
+ match, _ := regexp.MatchString(pattern, str)
+ return match
+}
+
+// LeftTrim trims characters from the left side of the input.
+// If second argument is empty, it will remove leading spaces.
+func LeftTrim(str, chars string) string {
+ if chars == "" {
+ return strings.TrimLeftFunc(str, unicode.IsSpace)
+ }
+ r, _ := regexp.Compile("^[" + chars + "]+")
+ return r.ReplaceAllString(str, "")
+}
+
+// RightTrim trims characters from the right side of the input.
+// If second argument is empty, it will remove trailing spaces.
+func RightTrim(str, chars string) string {
+ if chars == "" {
+ return strings.TrimRightFunc(str, unicode.IsSpace)
+ }
+ r, _ := regexp.Compile("[" + chars + "]+$")
+ return r.ReplaceAllString(str, "")
+}
+
+// Trim trims characters from both sides of the input.
+// If second argument is empty, it will remove spaces.
+func Trim(str, chars string) string {
+ return LeftTrim(RightTrim(str, chars), chars)
+}
+
+// WhiteList removes characters that do not appear in the whitelist.
+func WhiteList(str, chars string) string {
+ pattern := "[^" + chars + "]+"
+ r, _ := regexp.Compile(pattern)
+ return r.ReplaceAllString(str, "")
+}
+
+// BlackList removes characters that appear in the blacklist.
+func BlackList(str, chars string) string {
+ pattern := "[" + chars + "]+"
+ r, _ := regexp.Compile(pattern)
+ return r.ReplaceAllString(str, "")
+}
+
+// StripLow removes characters with a numerical value < 32 and 127, mostly control characters.
+// If keep_new_lines is true, newline characters are preserved (\n and \r, hex 0xA and 0xD).
+func StripLow(str string, keepNewLines bool) string {
+ chars := ""
+ if keepNewLines {
+ chars = "\x00-\x09\x0B\x0C\x0E-\x1F\x7F"
+ } else {
+ chars = "\x00-\x1F\x7F"
+ }
+ return BlackList(str, chars)
+}
+
+// ReplacePattern replaces regular expression pattern in string
+func ReplacePattern(str, pattern, replace string) string {
+ r, _ := regexp.Compile(pattern)
+ return r.ReplaceAllString(str, replace)
+}
+
+// Escape replaces <, >, & and " with HTML entities.
+var Escape = html.EscapeString
+
+func addSegment(inrune, segment []rune) []rune {
+ if len(segment) == 0 {
+ return inrune
+ }
+ if len(inrune) != 0 {
+ inrune = append(inrune, '_')
+ }
+ inrune = append(inrune, segment...)
+ return inrune
+}
+
+// UnderscoreToCamelCase converts from underscore separated form to camel case form.
+// Ex.: my_func => MyFunc
+func UnderscoreToCamelCase(s string) string {
+ return strings.Replace(strings.Title(strings.Replace(strings.ToLower(s), "_", " ", -1)), " ", "", -1)
+}
+
+// CamelCaseToUnderscore converts from camel case form to underscore separated form.
+// Ex.: MyFunc => my_func
+func CamelCaseToUnderscore(str string) string {
+ var output []rune
+ var segment []rune
+ for _, r := range str {
+
+ // not treat number as separate segment
+ if !unicode.IsLower(r) && string(r) != "_" && !unicode.IsNumber(r) {
+ output = addSegment(output, segment)
+ segment = nil
+ }
+ segment = append(segment, unicode.ToLower(r))
+ }
+ output = addSegment(output, segment)
+ return string(output)
+}
+
+// Reverse returns reversed string
+func Reverse(s string) string {
+ r := []rune(s)
+ for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 {
+ r[i], r[j] = r[j], r[i]
+ }
+ return string(r)
+}
+
+// GetLines splits string by "\n" and return array of lines
+func GetLines(s string) []string {
+ return strings.Split(s, "\n")
+}
+
+// GetLine returns specified line of multiline string
+func GetLine(s string, index int) (string, error) {
+ lines := GetLines(s)
+ if index < 0 || index >= len(lines) {
+ return "", errors.New("line index out of bounds")
+ }
+ return lines[index], nil
+}
+
+// RemoveTags removes all tags from HTML string
+func RemoveTags(s string) string {
+ return ReplacePattern(s, "<[^>]*>", "")
+}
+
+// SafeFileName returns safe string that can be used in file names
+func SafeFileName(str string) string {
+ name := strings.ToLower(str)
+ name = path.Clean(path.Base(name))
+ name = strings.Trim(name, " ")
+ separators, err := regexp.Compile(`[ &_=+:]`)
+ if err == nil {
+ name = separators.ReplaceAllString(name, "-")
+ }
+ legal, err := regexp.Compile(`[^[:alnum:]-.]`)
+ if err == nil {
+ name = legal.ReplaceAllString(name, "")
+ }
+ for strings.Contains(name, "--") {
+ name = strings.Replace(name, "--", "-", -1)
+ }
+ return name
+}
+
+// NormalizeEmail canonicalize an email address.
+// The local part of the email address is lowercased for all domains; the hostname is always lowercased and
+// the local part of the email address is always lowercased for hosts that are known to be case-insensitive (currently only GMail).
+// Normalization follows special rules for known providers: currently, GMail addresses have dots removed in the local part and
+// are stripped of tags (e.g. some.one+tag@gmail.com becomes someone@gmail.com) and all @googlemail.com addresses are
+// normalized to @gmail.com.
+func NormalizeEmail(str string) (string, error) {
+ if !IsEmail(str) {
+ return "", fmt.Errorf("%s is not an email", str)
+ }
+ parts := strings.Split(str, "@")
+ parts[0] = strings.ToLower(parts[0])
+ parts[1] = strings.ToLower(parts[1])
+ if parts[1] == "gmail.com" || parts[1] == "googlemail.com" {
+ parts[1] = "gmail.com"
+ parts[0] = strings.Split(ReplacePattern(parts[0], `\.`, ""), "+")[0]
+ }
+ return strings.Join(parts, "@"), nil
+}
+
+// Truncate a string to the closest length without breaking words.
+func Truncate(str string, length int, ending string) string {
+ var aftstr, befstr string
+ if len(str) > length {
+ words := strings.Fields(str)
+ before, present := 0, 0
+ for i := range words {
+ befstr = aftstr
+ before = present
+ aftstr = aftstr + words[i] + " "
+ present = len(aftstr)
+ if present > length && i != 0 {
+ if (length - before) < (present - length) {
+ return Trim(befstr, " /\\.,\"'#!?&@+-") + ending
+ }
+ return Trim(aftstr, " /\\.,\"'#!?&@+-") + ending
+ }
+ }
+ }
+
+ return str
+}
+
+// PadLeft pads left side of a string if size of string is less then indicated pad length
+func PadLeft(str string, padStr string, padLen int) string {
+ return buildPadStr(str, padStr, padLen, true, false)
+}
+
+// PadRight pads right side of a string if size of string is less then indicated pad length
+func PadRight(str string, padStr string, padLen int) string {
+ return buildPadStr(str, padStr, padLen, false, true)
+}
+
+// PadBoth pads both sides of a string if size of string is less then indicated pad length
+func PadBoth(str string, padStr string, padLen int) string {
+ return buildPadStr(str, padStr, padLen, true, true)
+}
+
+// PadString either left, right or both sides.
+// Note that padding string can be unicode and more then one character
+func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
+
+ // When padded length is less then the current string size
+ if padLen < utf8.RuneCountInString(str) {
+ return str
+ }
+
+ padLen -= utf8.RuneCountInString(str)
+
+ targetLen := padLen
+
+ targetLenLeft := targetLen
+ targetLenRight := targetLen
+ if padLeft && padRight {
+ targetLenLeft = padLen / 2
+ targetLenRight = padLen - targetLenLeft
+ }
+
+ strToRepeatLen := utf8.RuneCountInString(padStr)
+
+ repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
+ repeatedString := strings.Repeat(padStr, repeatTimes)
+
+ leftSide := ""
+ if padLeft {
+ leftSide = repeatedString[0:targetLenLeft]
+ }
+
+ rightSide := ""
+ if padRight {
+ rightSide = repeatedString[0:targetLenRight]
+ }
+
+ return leftSide + str + rightSide
+}
+
+// TruncatingErrorf removes extra args from fmt.Errorf if not formatted in the str object
+func TruncatingErrorf(str string, args ...interface{}) error {
+ n := strings.Count(str, "%s")
+ return fmt.Errorf(str, args[:n]...)
+}
diff --git a/vendor/github.com/asaskevich/govalidator/validator.go b/vendor/github.com/asaskevich/govalidator/validator.go
new file mode 100644
index 000000000..c9c4fac06
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/validator.go
@@ -0,0 +1,1768 @@
+// Package govalidator is package of validators and sanitizers for strings, structs and collections.
+package govalidator
+
+import (
+ "bytes"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/base64"
+ "encoding/json"
+ "encoding/pem"
+ "fmt"
+ "io/ioutil"
+ "net"
+ "net/url"
+ "reflect"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode"
+ "unicode/utf8"
+)
+
+var (
+ fieldsRequiredByDefault bool
+ nilPtrAllowedByRequired = false
+ notNumberRegexp = regexp.MustCompile("[^0-9]+")
+ whiteSpacesAndMinus = regexp.MustCompile(`[\s-]+`)
+ paramsRegexp = regexp.MustCompile(`\(.*\)$`)
+)
+
+const maxURLRuneCount = 2083
+const minURLRuneCount = 3
+const rfc3339WithoutZone = "2006-01-02T15:04:05"
+
+// SetFieldsRequiredByDefault causes validation to fail when struct fields
+// do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`).
+// This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
+// type exampleStruct struct {
+// Name string ``
+// Email string `valid:"email"`
+// This, however, will only fail when Email is empty or an invalid email address:
+// type exampleStruct2 struct {
+// Name string `valid:"-"`
+// Email string `valid:"email"`
+// Lastly, this will only fail when Email is an invalid email address but not when it's empty:
+// type exampleStruct2 struct {
+// Name string `valid:"-"`
+// Email string `valid:"email,optional"`
+func SetFieldsRequiredByDefault(value bool) {
+ fieldsRequiredByDefault = value
+}
+
+// SetNilPtrAllowedByRequired causes validation to pass for nil ptrs when a field is set to required.
+// The validation will still reject ptr fields in their zero value state. Example with this enabled:
+// type exampleStruct struct {
+// Name *string `valid:"required"`
+// With `Name` set to "", this will be considered invalid input and will cause a validation error.
+// With `Name` set to nil, this will be considered valid by validation.
+// By default this is disabled.
+func SetNilPtrAllowedByRequired(value bool) {
+ nilPtrAllowedByRequired = value
+}
+
+// IsEmail checks if the string is an email.
+func IsEmail(str string) bool {
+ // TODO uppercase letters are not supported
+ return rxEmail.MatchString(str)
+}
+
+// IsExistingEmail checks if the string is an email of existing domain
+func IsExistingEmail(email string) bool {
+
+ if len(email) < 6 || len(email) > 254 {
+ return false
+ }
+ at := strings.LastIndex(email, "@")
+ if at <= 0 || at > len(email)-3 {
+ return false
+ }
+ user := email[:at]
+ host := email[at+1:]
+ if len(user) > 64 {
+ return false
+ }
+ switch host {
+ case "localhost", "example.com":
+ return true
+ }
+ if userDotRegexp.MatchString(user) || !userRegexp.MatchString(user) || !hostRegexp.MatchString(host) {
+ return false
+ }
+ if _, err := net.LookupMX(host); err != nil {
+ if _, err := net.LookupIP(host); err != nil {
+ return false
+ }
+ }
+
+ return true
+}
+
+// IsURL checks if the string is an URL.
+func IsURL(str string) bool {
+ if str == "" || utf8.RuneCountInString(str) >= maxURLRuneCount || len(str) <= minURLRuneCount || strings.HasPrefix(str, ".") {
+ return false
+ }
+ strTemp := str
+ if strings.Contains(str, ":") && !strings.Contains(str, "://") {
+ // support no indicated urlscheme but with colon for port number
+ // http:// is appended so url.Parse will succeed, strTemp used so it does not impact rxURL.MatchString
+ strTemp = "http://" + str
+ }
+ u, err := url.Parse(strTemp)
+ if err != nil {
+ return false
+ }
+ if strings.HasPrefix(u.Host, ".") {
+ return false
+ }
+ if u.Host == "" && (u.Path != "" && !strings.Contains(u.Path, ".")) {
+ return false
+ }
+ return rxURL.MatchString(str)
+}
+
+// IsRequestURL checks if the string rawurl, assuming
+// it was received in an HTTP request, is a valid
+// URL confirm to RFC 3986
+func IsRequestURL(rawurl string) bool {
+ url, err := url.ParseRequestURI(rawurl)
+ if err != nil {
+ return false //Couldn't even parse the rawurl
+ }
+ if len(url.Scheme) == 0 {
+ return false //No Scheme found
+ }
+ return true
+}
+
+// IsRequestURI checks if the string rawurl, assuming
+// it was received in an HTTP request, is an
+// absolute URI or an absolute path.
+func IsRequestURI(rawurl string) bool {
+ _, err := url.ParseRequestURI(rawurl)
+ return err == nil
+}
+
+// IsAlpha checks if the string contains only letters (a-zA-Z). Empty string is valid.
+func IsAlpha(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxAlpha.MatchString(str)
+}
+
+//IsUTFLetter checks if the string contains only unicode letter characters.
+//Similar to IsAlpha but for all languages. Empty string is valid.
+func IsUTFLetter(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+
+ for _, c := range str {
+ if !unicode.IsLetter(c) {
+ return false
+ }
+ }
+ return true
+
+}
+
+// IsAlphanumeric checks if the string contains only letters and numbers. Empty string is valid.
+func IsAlphanumeric(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxAlphanumeric.MatchString(str)
+}
+
+// IsUTFLetterNumeric checks if the string contains only unicode letters and numbers. Empty string is valid.
+func IsUTFLetterNumeric(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ for _, c := range str {
+ if !unicode.IsLetter(c) && !unicode.IsNumber(c) { //letters && numbers are ok
+ return false
+ }
+ }
+ return true
+
+}
+
+// IsNumeric checks if the string contains only numbers. Empty string is valid.
+func IsNumeric(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxNumeric.MatchString(str)
+}
+
+// IsUTFNumeric checks if the string contains only unicode numbers of any kind.
+// Numbers can be 0-9 but also Fractions ¾,Roman Ⅸ and Hangzhou 〩. Empty string is valid.
+func IsUTFNumeric(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ if strings.IndexAny(str, "+-") > 0 {
+ return false
+ }
+ if len(str) > 1 {
+ str = strings.TrimPrefix(str, "-")
+ str = strings.TrimPrefix(str, "+")
+ }
+ for _, c := range str {
+ if !unicode.IsNumber(c) { //numbers && minus sign are ok
+ return false
+ }
+ }
+ return true
+
+}
+
+// IsUTFDigit checks if the string contains only unicode radix-10 decimal digits. Empty string is valid.
+func IsUTFDigit(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ if strings.IndexAny(str, "+-") > 0 {
+ return false
+ }
+ if len(str) > 1 {
+ str = strings.TrimPrefix(str, "-")
+ str = strings.TrimPrefix(str, "+")
+ }
+ for _, c := range str {
+ if !unicode.IsDigit(c) { //digits && minus sign are ok
+ return false
+ }
+ }
+ return true
+
+}
+
+// IsHexadecimal checks if the string is a hexadecimal number.
+func IsHexadecimal(str string) bool {
+ return rxHexadecimal.MatchString(str)
+}
+
+// IsHexcolor checks if the string is a hexadecimal color.
+func IsHexcolor(str string) bool {
+ return rxHexcolor.MatchString(str)
+}
+
+// IsRGBcolor checks if the string is a valid RGB color in form rgb(RRR, GGG, BBB).
+func IsRGBcolor(str string) bool {
+ return rxRGBcolor.MatchString(str)
+}
+
+// IsLowerCase checks if the string is lowercase. Empty string is valid.
+func IsLowerCase(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return str == strings.ToLower(str)
+}
+
+// IsUpperCase checks if the string is uppercase. Empty string is valid.
+func IsUpperCase(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return str == strings.ToUpper(str)
+}
+
+// HasLowerCase checks if the string contains at least 1 lowercase. Empty string is valid.
+func HasLowerCase(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxHasLowerCase.MatchString(str)
+}
+
+// HasUpperCase checks if the string contains as least 1 uppercase. Empty string is valid.
+func HasUpperCase(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxHasUpperCase.MatchString(str)
+}
+
+// IsInt checks if the string is an integer. Empty string is valid.
+func IsInt(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxInt.MatchString(str)
+}
+
+// IsFloat checks if the string is a float.
+func IsFloat(str string) bool {
+ return str != "" && rxFloat.MatchString(str)
+}
+
+// IsDivisibleBy checks if the string is a number that's divisible by another.
+// If second argument is not valid integer or zero, it's return false.
+// Otherwise, if first argument is not valid integer or zero, it's return true (Invalid string converts to zero).
+func IsDivisibleBy(str, num string) bool {
+ f, _ := ToFloat(str)
+ p := int64(f)
+ q, _ := ToInt(num)
+ if q == 0 {
+ return false
+ }
+ return (p == 0) || (p%q == 0)
+}
+
+// IsNull checks if the string is null.
+func IsNull(str string) bool {
+ return len(str) == 0
+}
+
+// IsNotNull checks if the string is not null.
+func IsNotNull(str string) bool {
+ return !IsNull(str)
+}
+
+// HasWhitespaceOnly checks the string only contains whitespace
+func HasWhitespaceOnly(str string) bool {
+ return len(str) > 0 && rxHasWhitespaceOnly.MatchString(str)
+}
+
+// HasWhitespace checks if the string contains any whitespace
+func HasWhitespace(str string) bool {
+ return len(str) > 0 && rxHasWhitespace.MatchString(str)
+}
+
+// IsByteLength checks if the string's length (in bytes) falls in a range.
+func IsByteLength(str string, min, max int) bool {
+ return len(str) >= min && len(str) <= max
+}
+
+// IsUUIDv3 checks if the string is a UUID version 3.
+func IsUUIDv3(str string) bool {
+ return rxUUID3.MatchString(str)
+}
+
+// IsUUIDv4 checks if the string is a UUID version 4.
+func IsUUIDv4(str string) bool {
+ return rxUUID4.MatchString(str)
+}
+
+// IsUUIDv5 checks if the string is a UUID version 5.
+func IsUUIDv5(str string) bool {
+ return rxUUID5.MatchString(str)
+}
+
+// IsUUID checks if the string is a UUID (version 3, 4 or 5).
+func IsUUID(str string) bool {
+ return rxUUID.MatchString(str)
+}
+
+// Byte to index table for O(1) lookups when unmarshaling.
+// We use 0xFF as sentinel value for invalid indexes.
+var ulidDec = [...]byte{
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x01,
+ 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
+ 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14, 0x15, 0xFF,
+ 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C, 0x1D, 0x1E,
+ 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0A, 0x0B, 0x0C,
+ 0x0D, 0x0E, 0x0F, 0x10, 0x11, 0xFF, 0x12, 0x13, 0xFF, 0x14,
+ 0x15, 0xFF, 0x16, 0x17, 0x18, 0x19, 0x1A, 0xFF, 0x1B, 0x1C,
+ 0x1D, 0x1E, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+}
+
+// EncodedSize is the length of a text encoded ULID.
+const ulidEncodedSize = 26
+
+// IsULID checks if the string is a ULID.
+//
+// Implementation got from:
+// https://github.com/oklog/ulid (Apache-2.0 License)
+//
+func IsULID(str string) bool {
+ // Check if a base32 encoded ULID is the right length.
+ if len(str) != ulidEncodedSize {
+ return false
+ }
+
+ // Check if all the characters in a base32 encoded ULID are part of the
+ // expected base32 character set.
+ if ulidDec[str[0]] == 0xFF ||
+ ulidDec[str[1]] == 0xFF ||
+ ulidDec[str[2]] == 0xFF ||
+ ulidDec[str[3]] == 0xFF ||
+ ulidDec[str[4]] == 0xFF ||
+ ulidDec[str[5]] == 0xFF ||
+ ulidDec[str[6]] == 0xFF ||
+ ulidDec[str[7]] == 0xFF ||
+ ulidDec[str[8]] == 0xFF ||
+ ulidDec[str[9]] == 0xFF ||
+ ulidDec[str[10]] == 0xFF ||
+ ulidDec[str[11]] == 0xFF ||
+ ulidDec[str[12]] == 0xFF ||
+ ulidDec[str[13]] == 0xFF ||
+ ulidDec[str[14]] == 0xFF ||
+ ulidDec[str[15]] == 0xFF ||
+ ulidDec[str[16]] == 0xFF ||
+ ulidDec[str[17]] == 0xFF ||
+ ulidDec[str[18]] == 0xFF ||
+ ulidDec[str[19]] == 0xFF ||
+ ulidDec[str[20]] == 0xFF ||
+ ulidDec[str[21]] == 0xFF ||
+ ulidDec[str[22]] == 0xFF ||
+ ulidDec[str[23]] == 0xFF ||
+ ulidDec[str[24]] == 0xFF ||
+ ulidDec[str[25]] == 0xFF {
+ return false
+ }
+
+ // Check if the first character in a base32 encoded ULID will overflow. This
+ // happens because the base32 representation encodes 130 bits, while the
+ // ULID is only 128 bits.
+ //
+ // See https://github.com/oklog/ulid/issues/9 for details.
+ if str[0] > '7' {
+ return false
+ }
+ return true
+}
+
+// IsCreditCard checks if the string is a credit card.
+func IsCreditCard(str string) bool {
+ sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "")
+ if !rxCreditCard.MatchString(sanitized) {
+ return false
+ }
+
+ number, _ := ToInt(sanitized)
+ number, lastDigit := number / 10, number % 10
+
+ var sum int64
+ for i:=0; number > 0; i++ {
+ digit := number % 10
+
+ if i % 2 == 0 {
+ digit *= 2
+ if digit > 9 {
+ digit -= 9
+ }
+ }
+
+ sum += digit
+ number = number / 10
+ }
+
+ return (sum + lastDigit) % 10 == 0
+}
+
+// IsISBN10 checks if the string is an ISBN version 10.
+func IsISBN10(str string) bool {
+ return IsISBN(str, 10)
+}
+
+// IsISBN13 checks if the string is an ISBN version 13.
+func IsISBN13(str string) bool {
+ return IsISBN(str, 13)
+}
+
+// IsISBN checks if the string is an ISBN (version 10 or 13).
+// If version value is not equal to 10 or 13, it will be checks both variants.
+func IsISBN(str string, version int) bool {
+ sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "")
+ var checksum int32
+ var i int32
+ if version == 10 {
+ if !rxISBN10.MatchString(sanitized) {
+ return false
+ }
+ for i = 0; i < 9; i++ {
+ checksum += (i + 1) * int32(sanitized[i]-'0')
+ }
+ if sanitized[9] == 'X' {
+ checksum += 10 * 10
+ } else {
+ checksum += 10 * int32(sanitized[9]-'0')
+ }
+ if checksum%11 == 0 {
+ return true
+ }
+ return false
+ } else if version == 13 {
+ if !rxISBN13.MatchString(sanitized) {
+ return false
+ }
+ factor := []int32{1, 3}
+ for i = 0; i < 12; i++ {
+ checksum += factor[i%2] * int32(sanitized[i]-'0')
+ }
+ return (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0
+ }
+ return IsISBN(str, 10) || IsISBN(str, 13)
+}
+
+// IsJSON checks if the string is valid JSON (note: uses json.Unmarshal).
+func IsJSON(str string) bool {
+ var js json.RawMessage
+ return json.Unmarshal([]byte(str), &js) == nil
+}
+
+// IsMultibyte checks if the string contains one or more multibyte chars. Empty string is valid.
+func IsMultibyte(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxMultibyte.MatchString(str)
+}
+
+// IsASCII checks if the string contains ASCII chars only. Empty string is valid.
+func IsASCII(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxASCII.MatchString(str)
+}
+
+// IsPrintableASCII checks if the string contains printable ASCII chars only. Empty string is valid.
+func IsPrintableASCII(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxPrintableASCII.MatchString(str)
+}
+
+// IsFullWidth checks if the string contains any full-width chars. Empty string is valid.
+func IsFullWidth(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxFullWidth.MatchString(str)
+}
+
+// IsHalfWidth checks if the string contains any half-width chars. Empty string is valid.
+func IsHalfWidth(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxHalfWidth.MatchString(str)
+}
+
+// IsVariableWidth checks if the string contains a mixture of full and half-width chars. Empty string is valid.
+func IsVariableWidth(str string) bool {
+ if IsNull(str) {
+ return true
+ }
+ return rxHalfWidth.MatchString(str) && rxFullWidth.MatchString(str)
+}
+
+// IsBase64 checks if a string is base64 encoded.
+func IsBase64(str string) bool {
+ return rxBase64.MatchString(str)
+}
+
+// IsFilePath checks is a string is Win or Unix file path and returns it's type.
+func IsFilePath(str string) (bool, int) {
+ if rxWinPath.MatchString(str) {
+ //check windows path limit see:
+ // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath
+ if len(str[3:]) > 32767 {
+ return false, Win
+ }
+ return true, Win
+ } else if rxUnixPath.MatchString(str) {
+ return true, Unix
+ }
+ return false, Unknown
+}
+
+//IsWinFilePath checks both relative & absolute paths in Windows
+func IsWinFilePath(str string) bool {
+ if rxARWinPath.MatchString(str) {
+ //check windows path limit see:
+ // http://msdn.microsoft.com/en-us/library/aa365247(VS.85).aspx#maxpath
+ if len(str[3:]) > 32767 {
+ return false
+ }
+ return true
+ }
+ return false
+}
+
+//IsUnixFilePath checks both relative & absolute paths in Unix
+func IsUnixFilePath(str string) bool {
+ if rxARUnixPath.MatchString(str) {
+ return true
+ }
+ return false
+}
+
+// IsDataURI checks if a string is base64 encoded data URI such as an image
+func IsDataURI(str string) bool {
+ dataURI := strings.Split(str, ",")
+ if !rxDataURI.MatchString(dataURI[0]) {
+ return false
+ }
+ return IsBase64(dataURI[1])
+}
+
+// IsMagnetURI checks if a string is valid magnet URI
+func IsMagnetURI(str string) bool {
+ return rxMagnetURI.MatchString(str)
+}
+
+// IsISO3166Alpha2 checks if a string is valid two-letter country code
+func IsISO3166Alpha2(str string) bool {
+ for _, entry := range ISO3166List {
+ if str == entry.Alpha2Code {
+ return true
+ }
+ }
+ return false
+}
+
+// IsISO3166Alpha3 checks if a string is valid three-letter country code
+func IsISO3166Alpha3(str string) bool {
+ for _, entry := range ISO3166List {
+ if str == entry.Alpha3Code {
+ return true
+ }
+ }
+ return false
+}
+
+// IsISO693Alpha2 checks if a string is valid two-letter language code
+func IsISO693Alpha2(str string) bool {
+ for _, entry := range ISO693List {
+ if str == entry.Alpha2Code {
+ return true
+ }
+ }
+ return false
+}
+
+// IsISO693Alpha3b checks if a string is valid three-letter language code
+func IsISO693Alpha3b(str string) bool {
+ for _, entry := range ISO693List {
+ if str == entry.Alpha3bCode {
+ return true
+ }
+ }
+ return false
+}
+
+// IsDNSName will validate the given string as a DNS name
+func IsDNSName(str string) bool {
+ if str == "" || len(strings.Replace(str, ".", "", -1)) > 255 {
+ // constraints already violated
+ return false
+ }
+ return !IsIP(str) && rxDNSName.MatchString(str)
+}
+
+// IsHash checks if a string is a hash of type algorithm.
+// Algorithm is one of ['md4', 'md5', 'sha1', 'sha256', 'sha384', 'sha512', 'ripemd128', 'ripemd160', 'tiger128', 'tiger160', 'tiger192', 'crc32', 'crc32b']
+func IsHash(str string, algorithm string) bool {
+ var len string
+ algo := strings.ToLower(algorithm)
+
+ if algo == "crc32" || algo == "crc32b" {
+ len = "8"
+ } else if algo == "md5" || algo == "md4" || algo == "ripemd128" || algo == "tiger128" {
+ len = "32"
+ } else if algo == "sha1" || algo == "ripemd160" || algo == "tiger160" {
+ len = "40"
+ } else if algo == "tiger192" {
+ len = "48"
+ } else if algo == "sha3-224" {
+ len = "56"
+ } else if algo == "sha256" || algo == "sha3-256" {
+ len = "64"
+ } else if algo == "sha384" || algo == "sha3-384" {
+ len = "96"
+ } else if algo == "sha512" || algo == "sha3-512" {
+ len = "128"
+ } else {
+ return false
+ }
+
+ return Matches(str, "^[a-f0-9]{"+len+"}$")
+}
+
+// IsSHA3224 checks is a string is a SHA3-224 hash. Alias for `IsHash(str, "sha3-224")`
+func IsSHA3224(str string) bool {
+ return IsHash(str, "sha3-224")
+}
+
+// IsSHA3256 checks is a string is a SHA3-256 hash. Alias for `IsHash(str, "sha3-256")`
+func IsSHA3256(str string) bool {
+ return IsHash(str, "sha3-256")
+}
+
+// IsSHA3384 checks is a string is a SHA3-384 hash. Alias for `IsHash(str, "sha3-384")`
+func IsSHA3384(str string) bool {
+ return IsHash(str, "sha3-384")
+}
+
+// IsSHA3512 checks is a string is a SHA3-512 hash. Alias for `IsHash(str, "sha3-512")`
+func IsSHA3512(str string) bool {
+ return IsHash(str, "sha3-512")
+}
+
+// IsSHA512 checks is a string is a SHA512 hash. Alias for `IsHash(str, "sha512")`
+func IsSHA512(str string) bool {
+ return IsHash(str, "sha512")
+}
+
+// IsSHA384 checks is a string is a SHA384 hash. Alias for `IsHash(str, "sha384")`
+func IsSHA384(str string) bool {
+ return IsHash(str, "sha384")
+}
+
+// IsSHA256 checks is a string is a SHA256 hash. Alias for `IsHash(str, "sha256")`
+func IsSHA256(str string) bool {
+ return IsHash(str, "sha256")
+}
+
+// IsTiger192 checks is a string is a Tiger192 hash. Alias for `IsHash(str, "tiger192")`
+func IsTiger192(str string) bool {
+ return IsHash(str, "tiger192")
+}
+
+// IsTiger160 checks is a string is a Tiger160 hash. Alias for `IsHash(str, "tiger160")`
+func IsTiger160(str string) bool {
+ return IsHash(str, "tiger160")
+}
+
+// IsRipeMD160 checks is a string is a RipeMD160 hash. Alias for `IsHash(str, "ripemd160")`
+func IsRipeMD160(str string) bool {
+ return IsHash(str, "ripemd160")
+}
+
+// IsSHA1 checks is a string is a SHA-1 hash. Alias for `IsHash(str, "sha1")`
+func IsSHA1(str string) bool {
+ return IsHash(str, "sha1")
+}
+
+// IsTiger128 checks is a string is a Tiger128 hash. Alias for `IsHash(str, "tiger128")`
+func IsTiger128(str string) bool {
+ return IsHash(str, "tiger128")
+}
+
+// IsRipeMD128 checks is a string is a RipeMD128 hash. Alias for `IsHash(str, "ripemd128")`
+func IsRipeMD128(str string) bool {
+ return IsHash(str, "ripemd128")
+}
+
+// IsCRC32 checks is a string is a CRC32 hash. Alias for `IsHash(str, "crc32")`
+func IsCRC32(str string) bool {
+ return IsHash(str, "crc32")
+}
+
+// IsCRC32b checks is a string is a CRC32b hash. Alias for `IsHash(str, "crc32b")`
+func IsCRC32b(str string) bool {
+ return IsHash(str, "crc32b")
+}
+
+// IsMD5 checks is a string is a MD5 hash. Alias for `IsHash(str, "md5")`
+func IsMD5(str string) bool {
+ return IsHash(str, "md5")
+}
+
+// IsMD4 checks is a string is a MD4 hash. Alias for `IsHash(str, "md4")`
+func IsMD4(str string) bool {
+ return IsHash(str, "md4")
+}
+
+// IsDialString validates the given string for usage with the various Dial() functions
+func IsDialString(str string) bool {
+ if h, p, err := net.SplitHostPort(str); err == nil && h != "" && p != "" && (IsDNSName(h) || IsIP(h)) && IsPort(p) {
+ return true
+ }
+
+ return false
+}
+
+// IsIP checks if a string is either IP version 4 or 6. Alias for `net.ParseIP`
+func IsIP(str string) bool {
+ return net.ParseIP(str) != nil
+}
+
+// IsPort checks if a string represents a valid port
+func IsPort(str string) bool {
+ if i, err := strconv.Atoi(str); err == nil && i > 0 && i < 65536 {
+ return true
+ }
+ return false
+}
+
+// IsIPv4 checks if the string is an IP version 4.
+func IsIPv4(str string) bool {
+ ip := net.ParseIP(str)
+ return ip != nil && strings.Contains(str, ".")
+}
+
+// IsIPv6 checks if the string is an IP version 6.
+func IsIPv6(str string) bool {
+ ip := net.ParseIP(str)
+ return ip != nil && strings.Contains(str, ":")
+}
+
+// IsCIDR checks if the string is an valid CIDR notiation (IPV4 & IPV6)
+func IsCIDR(str string) bool {
+ _, _, err := net.ParseCIDR(str)
+ return err == nil
+}
+
+// IsMAC checks if a string is valid MAC address.
+// Possible MAC formats:
+// 01:23:45:67:89:ab
+// 01:23:45:67:89:ab:cd:ef
+// 01-23-45-67-89-ab
+// 01-23-45-67-89-ab-cd-ef
+// 0123.4567.89ab
+// 0123.4567.89ab.cdef
+func IsMAC(str string) bool {
+ _, err := net.ParseMAC(str)
+ return err == nil
+}
+
+// IsHost checks if the string is a valid IP (both v4 and v6) or a valid DNS name
+func IsHost(str string) bool {
+ return IsIP(str) || IsDNSName(str)
+}
+
+// IsMongoID checks if the string is a valid hex-encoded representation of a MongoDB ObjectId.
+func IsMongoID(str string) bool {
+ return rxHexadecimal.MatchString(str) && (len(str) == 24)
+}
+
+// IsLatitude checks if a string is valid latitude.
+func IsLatitude(str string) bool {
+ return rxLatitude.MatchString(str)
+}
+
+// IsLongitude checks if a string is valid longitude.
+func IsLongitude(str string) bool {
+ return rxLongitude.MatchString(str)
+}
+
+// IsIMEI checks if a string is valid IMEI
+func IsIMEI(str string) bool {
+ return rxIMEI.MatchString(str)
+}
+
+// IsIMSI checks if a string is valid IMSI
+func IsIMSI(str string) bool {
+ if !rxIMSI.MatchString(str) {
+ return false
+ }
+
+ mcc, err := strconv.ParseInt(str[0:3], 10, 32)
+ if err != nil {
+ return false
+ }
+
+ switch mcc {
+ case 202, 204, 206, 208, 212, 213, 214, 216, 218, 219:
+ case 220, 221, 222, 226, 228, 230, 231, 232, 234, 235:
+ case 238, 240, 242, 244, 246, 247, 248, 250, 255, 257:
+ case 259, 260, 262, 266, 268, 270, 272, 274, 276, 278:
+ case 280, 282, 283, 284, 286, 288, 289, 290, 292, 293:
+ case 294, 295, 297, 302, 308, 310, 311, 312, 313, 314:
+ case 315, 316, 330, 332, 334, 338, 340, 342, 344, 346:
+ case 348, 350, 352, 354, 356, 358, 360, 362, 363, 364:
+ case 365, 366, 368, 370, 372, 374, 376, 400, 401, 402:
+ case 404, 405, 406, 410, 412, 413, 414, 415, 416, 417:
+ case 418, 419, 420, 421, 422, 424, 425, 426, 427, 428:
+ case 429, 430, 431, 432, 434, 436, 437, 438, 440, 441:
+ case 450, 452, 454, 455, 456, 457, 460, 461, 466, 467:
+ case 470, 472, 502, 505, 510, 514, 515, 520, 525, 528:
+ case 530, 536, 537, 539, 540, 541, 542, 543, 544, 545:
+ case 546, 547, 548, 549, 550, 551, 552, 553, 554, 555:
+ case 602, 603, 604, 605, 606, 607, 608, 609, 610, 611:
+ case 612, 613, 614, 615, 616, 617, 618, 619, 620, 621:
+ case 622, 623, 624, 625, 626, 627, 628, 629, 630, 631:
+ case 632, 633, 634, 635, 636, 637, 638, 639, 640, 641:
+ case 642, 643, 645, 646, 647, 648, 649, 650, 651, 652:
+ case 653, 654, 655, 657, 658, 659, 702, 704, 706, 708:
+ case 710, 712, 714, 716, 722, 724, 730, 732, 734, 736:
+ case 738, 740, 742, 744, 746, 748, 750, 995:
+ return true
+ default:
+ return false
+ }
+ return true
+}
+
+// IsRsaPublicKey checks if a string is valid public key with provided length
+func IsRsaPublicKey(str string, keylen int) bool {
+ bb := bytes.NewBufferString(str)
+ pemBytes, err := ioutil.ReadAll(bb)
+ if err != nil {
+ return false
+ }
+ block, _ := pem.Decode(pemBytes)
+ if block != nil && block.Type != "PUBLIC KEY" {
+ return false
+ }
+ var der []byte
+
+ if block != nil {
+ der = block.Bytes
+ } else {
+ der, err = base64.StdEncoding.DecodeString(str)
+ if err != nil {
+ return false
+ }
+ }
+
+ key, err := x509.ParsePKIXPublicKey(der)
+ if err != nil {
+ return false
+ }
+ pubkey, ok := key.(*rsa.PublicKey)
+ if !ok {
+ return false
+ }
+ bitlen := len(pubkey.N.Bytes()) * 8
+ return bitlen == int(keylen)
+}
+
+// IsRegex checks if a give string is a valid regex with RE2 syntax or not
+func IsRegex(str string) bool {
+ if _, err := regexp.Compile(str); err == nil {
+ return true
+ }
+ return false
+}
+
+func toJSONName(tag string) string {
+ if tag == "" {
+ return ""
+ }
+
+ // JSON name always comes first. If there's no options then split[0] is
+ // JSON name, if JSON name is not set, then split[0] is an empty string.
+ split := strings.SplitN(tag, ",", 2)
+
+ name := split[0]
+
+ // However it is possible that the field is skipped when
+ // (de-)serializing from/to JSON, in which case assume that there is no
+ // tag name to use
+ if name == "-" {
+ return ""
+ }
+ return name
+}
+
+func prependPathToErrors(err error, path string) error {
+ switch err2 := err.(type) {
+ case Error:
+ err2.Path = append([]string{path}, err2.Path...)
+ return err2
+ case Errors:
+ errors := err2.Errors()
+ for i, err3 := range errors {
+ errors[i] = prependPathToErrors(err3, path)
+ }
+ return err2
+ }
+ return err
+}
+
+// ValidateArray performs validation according to condition iterator that validates every element of the array
+func ValidateArray(array []interface{}, iterator ConditionIterator) bool {
+ return Every(array, iterator)
+}
+
+// ValidateMap use validation map for fields.
+// result will be equal to `false` if there are any errors.
+// s is the map containing the data to be validated.
+// m is the validation map in the form:
+// map[string]interface{}{"name":"required,alpha","address":map[string]interface{}{"line1":"required,alphanum"}}
+func ValidateMap(s map[string]interface{}, m map[string]interface{}) (bool, error) {
+ if s == nil {
+ return true, nil
+ }
+ result := true
+ var err error
+ var errs Errors
+ var index int
+ val := reflect.ValueOf(s)
+ for key, value := range s {
+ presentResult := true
+ validator, ok := m[key]
+ if !ok {
+ presentResult = false
+ var err error
+ err = fmt.Errorf("all map keys has to be present in the validation map; got %s", key)
+ err = prependPathToErrors(err, key)
+ errs = append(errs, err)
+ }
+ valueField := reflect.ValueOf(value)
+ mapResult := true
+ typeResult := true
+ structResult := true
+ resultField := true
+ switch subValidator := validator.(type) {
+ case map[string]interface{}:
+ var err error
+ if v, ok := value.(map[string]interface{}); !ok {
+ mapResult = false
+ err = fmt.Errorf("map validator has to be for the map type only; got %s", valueField.Type().String())
+ err = prependPathToErrors(err, key)
+ errs = append(errs, err)
+ } else {
+ mapResult, err = ValidateMap(v, subValidator)
+ if err != nil {
+ mapResult = false
+ err = prependPathToErrors(err, key)
+ errs = append(errs, err)
+ }
+ }
+ case string:
+ if (valueField.Kind() == reflect.Struct ||
+ (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) &&
+ subValidator != "-" {
+ var err error
+ structResult, err = ValidateStruct(valueField.Interface())
+ if err != nil {
+ err = prependPathToErrors(err, key)
+ errs = append(errs, err)
+ }
+ }
+ resultField, err = typeCheck(valueField, reflect.StructField{
+ Name: key,
+ PkgPath: "",
+ Type: val.Type(),
+ Tag: reflect.StructTag(fmt.Sprintf("%s:%q", tagName, subValidator)),
+ Offset: 0,
+ Index: []int{index},
+ Anonymous: false,
+ }, val, nil)
+ if err != nil {
+ errs = append(errs, err)
+ }
+ case nil:
+ // already handlerd when checked before
+ default:
+ typeResult = false
+ err = fmt.Errorf("map validator has to be either map[string]interface{} or string; got %s", valueField.Type().String())
+ err = prependPathToErrors(err, key)
+ errs = append(errs, err)
+ }
+ result = result && presentResult && typeResult && resultField && structResult && mapResult
+ index++
+ }
+ // checks required keys
+ requiredResult := true
+ for key, value := range m {
+ if schema, ok := value.(string); ok {
+ tags := parseTagIntoMap(schema)
+ if required, ok := tags["required"]; ok {
+ if _, ok := s[key]; !ok {
+ requiredResult = false
+ if required.customErrorMessage != "" {
+ err = Error{key, fmt.Errorf(required.customErrorMessage), true, "required", []string{}}
+ } else {
+ err = Error{key, fmt.Errorf("required field missing"), false, "required", []string{}}
+ }
+ errs = append(errs, err)
+ }
+ }
+ }
+ }
+
+ if len(errs) > 0 {
+ err = errs
+ }
+ return result && requiredResult, err
+}
+
+// ValidateStruct use tags for fields.
+// result will be equal to `false` if there are any errors.
+// todo currently there is no guarantee that errors will be returned in predictable order (tests may to fail)
+func ValidateStruct(s interface{}) (bool, error) {
+ if s == nil {
+ return true, nil
+ }
+ result := true
+ var err error
+ val := reflect.ValueOf(s)
+ if val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr {
+ val = val.Elem()
+ }
+ // we only accept structs
+ if val.Kind() != reflect.Struct {
+ return false, fmt.Errorf("function only accepts structs; got %s", val.Kind())
+ }
+ var errs Errors
+ for i := 0; i < val.NumField(); i++ {
+ valueField := val.Field(i)
+ typeField := val.Type().Field(i)
+ if typeField.PkgPath != "" {
+ continue // Private field
+ }
+ structResult := true
+ if valueField.Kind() == reflect.Interface {
+ valueField = valueField.Elem()
+ }
+ if (valueField.Kind() == reflect.Struct ||
+ (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) &&
+ typeField.Tag.Get(tagName) != "-" {
+ var err error
+ structResult, err = ValidateStruct(valueField.Interface())
+ if err != nil {
+ err = prependPathToErrors(err, typeField.Name)
+ errs = append(errs, err)
+ }
+ }
+ resultField, err2 := typeCheck(valueField, typeField, val, nil)
+ if err2 != nil {
+
+ // Replace structure name with JSON name if there is a tag on the variable
+ jsonTag := toJSONName(typeField.Tag.Get("json"))
+ if jsonTag != "" {
+ switch jsonError := err2.(type) {
+ case Error:
+ jsonError.Name = jsonTag
+ err2 = jsonError
+ case Errors:
+ for i2, err3 := range jsonError {
+ switch customErr := err3.(type) {
+ case Error:
+ customErr.Name = jsonTag
+ jsonError[i2] = customErr
+ }
+ }
+
+ err2 = jsonError
+ }
+ }
+
+ errs = append(errs, err2)
+ }
+ result = result && resultField && structResult
+ }
+ if len(errs) > 0 {
+ err = errs
+ }
+ return result, err
+}
+
+// ValidateStructAsync performs async validation of the struct and returns results through the channels
+func ValidateStructAsync(s interface{}) (<-chan bool, <-chan error) {
+ res := make(chan bool)
+ errors := make(chan error)
+
+ go func() {
+ defer close(res)
+ defer close(errors)
+
+ isValid, isFailed := ValidateStruct(s)
+
+ res <- isValid
+ errors <- isFailed
+ }()
+
+ return res, errors
+}
+
+// ValidateMapAsync performs async validation of the map and returns results through the channels
+func ValidateMapAsync(s map[string]interface{}, m map[string]interface{}) (<-chan bool, <-chan error) {
+ res := make(chan bool)
+ errors := make(chan error)
+
+ go func() {
+ defer close(res)
+ defer close(errors)
+
+ isValid, isFailed := ValidateMap(s, m)
+
+ res <- isValid
+ errors <- isFailed
+ }()
+
+ return res, errors
+}
+
+// parseTagIntoMap parses a struct tag `valid:required~Some error message,length(2|3)` into map[string]string{"required": "Some error message", "length(2|3)": ""}
+func parseTagIntoMap(tag string) tagOptionsMap {
+ optionsMap := make(tagOptionsMap)
+ options := strings.Split(tag, ",")
+
+ for i, option := range options {
+ option = strings.TrimSpace(option)
+
+ validationOptions := strings.Split(option, "~")
+ if !isValidTag(validationOptions[0]) {
+ continue
+ }
+ if len(validationOptions) == 2 {
+ optionsMap[validationOptions[0]] = tagOption{validationOptions[0], validationOptions[1], i}
+ } else {
+ optionsMap[validationOptions[0]] = tagOption{validationOptions[0], "", i}
+ }
+ }
+ return optionsMap
+}
+
+func isValidTag(s string) bool {
+ if s == "" {
+ return false
+ }
+ for _, c := range s {
+ switch {
+ case strings.ContainsRune("\\'\"!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
+ // Backslash and quote chars are reserved, but
+ // otherwise any punctuation chars are allowed
+ // in a tag name.
+ default:
+ if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
+ return false
+ }
+ }
+ }
+ return true
+}
+
+// IsSSN will validate the given string as a U.S. Social Security Number
+func IsSSN(str string) bool {
+ if str == "" || len(str) != 11 {
+ return false
+ }
+ return rxSSN.MatchString(str)
+}
+
+// IsSemver checks if string is valid semantic version
+func IsSemver(str string) bool {
+ return rxSemver.MatchString(str)
+}
+
+// IsType checks if interface is of some type
+func IsType(v interface{}, params ...string) bool {
+ if len(params) == 1 {
+ typ := params[0]
+ return strings.Replace(reflect.TypeOf(v).String(), " ", "", -1) == strings.Replace(typ, " ", "", -1)
+ }
+ return false
+}
+
+// IsTime checks if string is valid according to given format
+func IsTime(str string, format string) bool {
+ _, err := time.Parse(format, str)
+ return err == nil
+}
+
+// IsUnixTime checks if string is valid unix timestamp value
+func IsUnixTime(str string) bool {
+ if _, err := strconv.Atoi(str); err == nil {
+ return true
+ }
+ return false
+}
+
+// IsRFC3339 checks if string is valid timestamp value according to RFC3339
+func IsRFC3339(str string) bool {
+ return IsTime(str, time.RFC3339)
+}
+
+// IsRFC3339WithoutZone checks if string is valid timestamp value according to RFC3339 which excludes the timezone.
+func IsRFC3339WithoutZone(str string) bool {
+ return IsTime(str, rfc3339WithoutZone)
+}
+
+// IsISO4217 checks if string is valid ISO currency code
+func IsISO4217(str string) bool {
+ for _, currency := range ISO4217List {
+ if str == currency {
+ return true
+ }
+ }
+
+ return false
+}
+
+// ByteLength checks string's length
+func ByteLength(str string, params ...string) bool {
+ if len(params) == 2 {
+ min, _ := ToInt(params[0])
+ max, _ := ToInt(params[1])
+ return len(str) >= int(min) && len(str) <= int(max)
+ }
+
+ return false
+}
+
+// RuneLength checks string's length
+// Alias for StringLength
+func RuneLength(str string, params ...string) bool {
+ return StringLength(str, params...)
+}
+
+// IsRsaPub checks whether string is valid RSA key
+// Alias for IsRsaPublicKey
+func IsRsaPub(str string, params ...string) bool {
+ if len(params) == 1 {
+ len, _ := ToInt(params[0])
+ return IsRsaPublicKey(str, int(len))
+ }
+
+ return false
+}
+
+// StringMatches checks if a string matches a given pattern.
+func StringMatches(s string, params ...string) bool {
+ if len(params) == 1 {
+ pattern := params[0]
+ return Matches(s, pattern)
+ }
+ return false
+}
+
+// StringLength checks string's length (including multi byte strings)
+func StringLength(str string, params ...string) bool {
+
+ if len(params) == 2 {
+ strLength := utf8.RuneCountInString(str)
+ min, _ := ToInt(params[0])
+ max, _ := ToInt(params[1])
+ return strLength >= int(min) && strLength <= int(max)
+ }
+
+ return false
+}
+
+// MinStringLength checks string's minimum length (including multi byte strings)
+func MinStringLength(str string, params ...string) bool {
+
+ if len(params) == 1 {
+ strLength := utf8.RuneCountInString(str)
+ min, _ := ToInt(params[0])
+ return strLength >= int(min)
+ }
+
+ return false
+}
+
+// MaxStringLength checks string's maximum length (including multi byte strings)
+func MaxStringLength(str string, params ...string) bool {
+
+ if len(params) == 1 {
+ strLength := utf8.RuneCountInString(str)
+ max, _ := ToInt(params[0])
+ return strLength <= int(max)
+ }
+
+ return false
+}
+
+// Range checks string's length
+func Range(str string, params ...string) bool {
+ if len(params) == 2 {
+ value, _ := ToFloat(str)
+ min, _ := ToFloat(params[0])
+ max, _ := ToFloat(params[1])
+ return InRange(value, min, max)
+ }
+
+ return false
+}
+
+// IsInRaw checks if string is in list of allowed values
+func IsInRaw(str string, params ...string) bool {
+ if len(params) == 1 {
+ rawParams := params[0]
+
+ parsedParams := strings.Split(rawParams, "|")
+
+ return IsIn(str, parsedParams...)
+ }
+
+ return false
+}
+
+// IsIn checks if string str is a member of the set of strings params
+func IsIn(str string, params ...string) bool {
+ for _, param := range params {
+ if str == param {
+ return true
+ }
+ }
+
+ return false
+}
+
+func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) {
+ if nilPtrAllowedByRequired {
+ k := v.Kind()
+ if (k == reflect.Ptr || k == reflect.Interface) && v.IsNil() {
+ return true, nil
+ }
+ }
+
+ if requiredOption, isRequired := options["required"]; isRequired {
+ if len(requiredOption.customErrorMessage) > 0 {
+ return false, Error{t.Name, fmt.Errorf(requiredOption.customErrorMessage), true, "required", []string{}}
+ }
+ return false, Error{t.Name, fmt.Errorf("non zero value required"), false, "required", []string{}}
+ } else if _, isOptional := options["optional"]; fieldsRequiredByDefault && !isOptional {
+ return false, Error{t.Name, fmt.Errorf("Missing required field"), false, "required", []string{}}
+ }
+ // not required and empty is valid
+ return true, nil
+}
+
+func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value, options tagOptionsMap) (isValid bool, resultErr error) {
+ if !v.IsValid() {
+ return false, nil
+ }
+
+ tag := t.Tag.Get(tagName)
+
+ // checks if the field should be ignored
+ switch tag {
+ case "":
+ if v.Kind() != reflect.Slice && v.Kind() != reflect.Map {
+ if !fieldsRequiredByDefault {
+ return true, nil
+ }
+ return false, Error{t.Name, fmt.Errorf("All fields are required to at least have one validation defined"), false, "required", []string{}}
+ }
+ case "-":
+ return true, nil
+ }
+
+ isRootType := false
+ if options == nil {
+ isRootType = true
+ options = parseTagIntoMap(tag)
+ }
+
+ if isEmptyValue(v) {
+ // an empty value is not validated, checks only required
+ isValid, resultErr = checkRequired(v, t, options)
+ for key := range options {
+ delete(options, key)
+ }
+ return isValid, resultErr
+ }
+
+ var customTypeErrors Errors
+ optionsOrder := options.orderedKeys()
+ for _, validatorName := range optionsOrder {
+ validatorStruct := options[validatorName]
+ if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok {
+ delete(options, validatorName)
+
+ if result := validatefunc(v.Interface(), o.Interface()); !result {
+ if len(validatorStruct.customErrorMessage) > 0 {
+ customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: TruncatingErrorf(validatorStruct.customErrorMessage, fmt.Sprint(v), validatorName), CustomErrorMessageExists: true, Validator: stripParams(validatorName)})
+ continue
+ }
+ customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), validatorName), CustomErrorMessageExists: false, Validator: stripParams(validatorName)})
+ }
+ }
+ }
+
+ if len(customTypeErrors.Errors()) > 0 {
+ return false, customTypeErrors
+ }
+
+ if isRootType {
+ // Ensure that we've checked the value by all specified validators before report that the value is valid
+ defer func() {
+ delete(options, "optional")
+ delete(options, "required")
+
+ if isValid && resultErr == nil && len(options) != 0 {
+ optionsOrder := options.orderedKeys()
+ for _, validator := range optionsOrder {
+ isValid = false
+ resultErr = Error{t.Name, fmt.Errorf(
+ "The following validator is invalid or can't be applied to the field: %q", validator), false, stripParams(validator), []string{}}
+ return
+ }
+ }
+ }()
+ }
+
+ for _, validatorSpec := range optionsOrder {
+ validatorStruct := options[validatorSpec]
+ var negate bool
+ validator := validatorSpec
+ customMsgExists := len(validatorStruct.customErrorMessage) > 0
+
+ // checks whether the tag looks like '!something' or 'something'
+ if validator[0] == '!' {
+ validator = validator[1:]
+ negate = true
+ }
+
+ // checks for interface param validators
+ for key, value := range InterfaceParamTagRegexMap {
+ ps := value.FindStringSubmatch(validator)
+ if len(ps) == 0 {
+ continue
+ }
+
+ validatefunc, ok := InterfaceParamTagMap[key]
+ if !ok {
+ continue
+ }
+
+ delete(options, validatorSpec)
+
+ field := fmt.Sprint(v)
+ if result := validatefunc(v.Interface(), ps[1:]...); (!result && !negate) || (result && negate) {
+ if customMsgExists {
+ return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ if negate {
+ return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ }
+ }
+
+ switch v.Kind() {
+ case reflect.Bool,
+ reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
+ reflect.Float32, reflect.Float64,
+ reflect.String:
+ // for each tag option checks the map of validator functions
+ for _, validatorSpec := range optionsOrder {
+ validatorStruct := options[validatorSpec]
+ var negate bool
+ validator := validatorSpec
+ customMsgExists := len(validatorStruct.customErrorMessage) > 0
+
+ // checks whether the tag looks like '!something' or 'something'
+ if validator[0] == '!' {
+ validator = validator[1:]
+ negate = true
+ }
+
+ // checks for param validators
+ for key, value := range ParamTagRegexMap {
+ ps := value.FindStringSubmatch(validator)
+ if len(ps) == 0 {
+ continue
+ }
+
+ validatefunc, ok := ParamTagMap[key]
+ if !ok {
+ continue
+ }
+
+ delete(options, validatorSpec)
+
+ switch v.Kind() {
+ case reflect.String,
+ reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
+ reflect.Float32, reflect.Float64:
+
+ field := fmt.Sprint(v) // make value into string, then validate with regex
+ if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) {
+ if customMsgExists {
+ return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ if negate {
+ return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ default:
+ // type not yet supported, fail
+ return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false, stripParams(validatorSpec), []string{}}
+ }
+ }
+
+ if validatefunc, ok := TagMap[validator]; ok {
+ delete(options, validatorSpec)
+
+ switch v.Kind() {
+ case reflect.String,
+ reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
+ reflect.Float32, reflect.Float64:
+ field := fmt.Sprint(v) // make value into string, then validate with regex
+ if result := validatefunc(field); !result && !negate || result && negate {
+ if customMsgExists {
+ return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ if negate {
+ return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}}
+ }
+ default:
+ //Not Yet Supported Types (Fail here!)
+ err := fmt.Errorf("Validator %s doesn't support kind %s for value %v", validator, v.Kind(), v)
+ return false, Error{t.Name, err, false, stripParams(validatorSpec), []string{}}
+ }
+ }
+ }
+ return true, nil
+ case reflect.Map:
+ if v.Type().Key().Kind() != reflect.String {
+ return false, &UnsupportedTypeError{v.Type()}
+ }
+ var sv stringValues
+ sv = v.MapKeys()
+ sort.Sort(sv)
+ result := true
+ for i, k := range sv {
+ var resultItem bool
+ var err error
+ if v.MapIndex(k).Kind() != reflect.Struct {
+ resultItem, err = typeCheck(v.MapIndex(k), t, o, options)
+ if err != nil {
+ return false, err
+ }
+ } else {
+ resultItem, err = ValidateStruct(v.MapIndex(k).Interface())
+ if err != nil {
+ err = prependPathToErrors(err, t.Name+"."+sv[i].Interface().(string))
+ return false, err
+ }
+ }
+ result = result && resultItem
+ }
+ return result, nil
+ case reflect.Slice, reflect.Array:
+ result := true
+ for i := 0; i < v.Len(); i++ {
+ var resultItem bool
+ var err error
+ if v.Index(i).Kind() != reflect.Struct {
+ resultItem, err = typeCheck(v.Index(i), t, o, options)
+ if err != nil {
+ return false, err
+ }
+ } else {
+ resultItem, err = ValidateStruct(v.Index(i).Interface())
+ if err != nil {
+ err = prependPathToErrors(err, t.Name+"."+strconv.Itoa(i))
+ return false, err
+ }
+ }
+ result = result && resultItem
+ }
+ return result, nil
+ case reflect.Interface:
+ // If the value is an interface then encode its element
+ if v.IsNil() {
+ return true, nil
+ }
+ return ValidateStruct(v.Interface())
+ case reflect.Ptr:
+ // If the value is a pointer then checks its element
+ if v.IsNil() {
+ return true, nil
+ }
+ return typeCheck(v.Elem(), t, o, options)
+ case reflect.Struct:
+ return true, nil
+ default:
+ return false, &UnsupportedTypeError{v.Type()}
+ }
+}
+
+func stripParams(validatorString string) string {
+ return paramsRegexp.ReplaceAllString(validatorString, "")
+}
+
+// isEmptyValue checks whether value empty or not
+func isEmptyValue(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.String, reflect.Array:
+ return v.Len() == 0
+ case reflect.Map, reflect.Slice:
+ return v.Len() == 0 || v.IsNil()
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ }
+
+ return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface())
+}
+
+// ErrorByField returns error for specified field of the struct
+// validated by ValidateStruct or empty string if there are no errors
+// or this field doesn't exists or doesn't have any errors.
+func ErrorByField(e error, field string) string {
+ if e == nil {
+ return ""
+ }
+ return ErrorsByField(e)[field]
+}
+
+// ErrorsByField returns map of errors of the struct validated
+// by ValidateStruct or empty map if there are no errors.
+func ErrorsByField(e error) map[string]string {
+ m := make(map[string]string)
+ if e == nil {
+ return m
+ }
+ // prototype for ValidateStruct
+
+ switch e := e.(type) {
+ case Error:
+ m[e.Name] = e.Err.Error()
+ case Errors:
+ for _, item := range e.Errors() {
+ n := ErrorsByField(item)
+ for k, v := range n {
+ m[k] = v
+ }
+ }
+ }
+
+ return m
+}
+
+// Error returns string equivalent for reflect.Type
+func (e *UnsupportedTypeError) Error() string {
+ return "validator: unsupported type: " + e.Type.String()
+}
+
+func (sv stringValues) Len() int { return len(sv) }
+func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
+func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) }
+func (sv stringValues) get(i int) string { return sv[i].String() }
+
+func IsE164(str string) bool {
+ return rxE164.MatchString(str)
+}
diff --git a/vendor/github.com/asaskevich/govalidator/wercker.yml b/vendor/github.com/asaskevich/govalidator/wercker.yml
new file mode 100644
index 000000000..bc5f7b086
--- /dev/null
+++ b/vendor/github.com/asaskevich/govalidator/wercker.yml
@@ -0,0 +1,15 @@
+box: golang
+build:
+ steps:
+ - setup-go-workspace
+
+ - script:
+ name: go get
+ code: |
+ go version
+ go get -t ./...
+
+ - script:
+ name: go test
+ code: |
+ go test -race -v ./...
diff --git a/vendor/github.com/docker/go-units/size.go b/vendor/github.com/docker/go-units/size.go
index 85f6ab071..c245a8951 100644
--- a/vendor/github.com/docker/go-units/size.go
+++ b/vendor/github.com/docker/go-units/size.go
@@ -2,7 +2,6 @@ package units
import (
"fmt"
- "regexp"
"strconv"
"strings"
)
@@ -26,16 +25,17 @@ const (
PiB = 1024 * TiB
)
-type unitMap map[string]int64
+type unitMap map[byte]int64
var (
- decimalMap = unitMap{"k": KB, "m": MB, "g": GB, "t": TB, "p": PB}
- binaryMap = unitMap{"k": KiB, "m": MiB, "g": GiB, "t": TiB, "p": PiB}
- sizeRegex = regexp.MustCompile(`^(\d+(\.\d+)*) ?([kKmMgGtTpP])?[iI]?[bB]?$`)
+ decimalMap = unitMap{'k': KB, 'm': MB, 'g': GB, 't': TB, 'p': PB}
+ binaryMap = unitMap{'k': KiB, 'm': MiB, 'g': GiB, 't': TiB, 'p': PiB}
)
-var decimapAbbrs = []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"}
-var binaryAbbrs = []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"}
+var (
+ decimapAbbrs = []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"}
+ binaryAbbrs = []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"}
+)
func getSizeAndUnit(size float64, base float64, _map []string) (float64, string) {
i := 0
@@ -89,20 +89,66 @@ func RAMInBytes(size string) (int64, error) {
// Parses the human-readable size string into the amount it represents.
func parseSize(sizeStr string, uMap unitMap) (int64, error) {
- matches := sizeRegex.FindStringSubmatch(sizeStr)
- if len(matches) != 4 {
+ // TODO: rewrite to use strings.Cut if there's a space
+ // once Go < 1.18 is deprecated.
+ sep := strings.LastIndexAny(sizeStr, "01234567890. ")
+ if sep == -1 {
+ // There should be at least a digit.
return -1, fmt.Errorf("invalid size: '%s'", sizeStr)
}
+ var num, sfx string
+ if sizeStr[sep] != ' ' {
+ num = sizeStr[:sep+1]
+ sfx = sizeStr[sep+1:]
+ } else {
+ // Omit the space separator.
+ num = sizeStr[:sep]
+ sfx = sizeStr[sep+1:]
+ }
- size, err := strconv.ParseFloat(matches[1], 64)
+ size, err := strconv.ParseFloat(num, 64)
if err != nil {
return -1, err
}
+ // Backward compatibility: reject negative sizes.
+ if size < 0 {
+ return -1, fmt.Errorf("invalid size: '%s'", sizeStr)
+ }
+
+ if len(sfx) == 0 {
+ return int64(size), nil
+ }
- unitPrefix := strings.ToLower(matches[3])
- if mul, ok := uMap[unitPrefix]; ok {
+ // Process the suffix.
+
+ if len(sfx) > 3 { // Too long.
+ goto badSuffix
+ }
+ sfx = strings.ToLower(sfx)
+ // Trivial case: b suffix.
+ if sfx[0] == 'b' {
+ if len(sfx) > 1 { // no extra characters allowed after b.
+ goto badSuffix
+ }
+ return int64(size), nil
+ }
+ // A suffix from the map.
+ if mul, ok := uMap[sfx[0]]; ok {
size *= float64(mul)
+ } else {
+ goto badSuffix
+ }
+
+ // The suffix may have extra "b" or "ib" (e.g. KiB or MB).
+ switch {
+ case len(sfx) == 2 && sfx[1] != 'b':
+ goto badSuffix
+ case len(sfx) == 3 && sfx[1:] != "ib":
+ goto badSuffix
}
return int64(size), nil
+
+badSuffix:
+ return -1, fmt.Errorf("invalid suffix: '%s'", sfx)
}
diff --git a/vendor/github.com/felixge/httpsnoop/.gitignore b/vendor/github.com/felixge/httpsnoop/.gitignore
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/.gitignore
diff --git a/vendor/github.com/felixge/httpsnoop/.travis.yml b/vendor/github.com/felixge/httpsnoop/.travis.yml
new file mode 100644
index 000000000..bfc421200
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/.travis.yml
@@ -0,0 +1,6 @@
+language: go
+
+go:
+ - 1.6
+ - 1.7
+ - 1.8
diff --git a/vendor/github.com/felixge/httpsnoop/LICENSE.txt b/vendor/github.com/felixge/httpsnoop/LICENSE.txt
new file mode 100644
index 000000000..e028b46a9
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/LICENSE.txt
@@ -0,0 +1,19 @@
+Copyright (c) 2016 Felix Geisendörfer (felix@debuggable.com)
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ THE SOFTWARE.
diff --git a/vendor/github.com/felixge/httpsnoop/Makefile b/vendor/github.com/felixge/httpsnoop/Makefile
new file mode 100644
index 000000000..2d84889ae
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/Makefile
@@ -0,0 +1,10 @@
+.PHONY: ci generate clean
+
+ci: clean generate
+ go test -v ./...
+
+generate:
+ go generate .
+
+clean:
+ rm -rf *_generated*.go
diff --git a/vendor/github.com/felixge/httpsnoop/README.md b/vendor/github.com/felixge/httpsnoop/README.md
new file mode 100644
index 000000000..ddcecd13e
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/README.md
@@ -0,0 +1,95 @@
+# httpsnoop
+
+Package httpsnoop provides an easy way to capture http related metrics (i.e.
+response time, bytes written, and http status code) from your application's
+http.Handlers.
+
+Doing this requires non-trivial wrapping of the http.ResponseWriter interface,
+which is also exposed for users interested in a more low-level API.
+
+[![GoDoc](https://godoc.org/github.com/felixge/httpsnoop?status.svg)](https://godoc.org/github.com/felixge/httpsnoop)
+[![Build Status](https://travis-ci.org/felixge/httpsnoop.svg?branch=master)](https://travis-ci.org/felixge/httpsnoop)
+
+## Usage Example
+
+```go
+// myH is your app's http handler, perhaps a http.ServeMux or similar.
+var myH http.Handler
+// wrappedH wraps myH in order to log every request.
+wrappedH := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ m := httpsnoop.CaptureMetrics(myH, w, r)
+ log.Printf(
+ "%s %s (code=%d dt=%s written=%d)",
+ r.Method,
+ r.URL,
+ m.Code,
+ m.Duration,
+ m.Written,
+ )
+})
+http.ListenAndServe(":8080", wrappedH)
+```
+
+## Why this package exists
+
+Instrumenting an application's http.Handler is surprisingly difficult.
+
+However if you google for e.g. "capture ResponseWriter status code" you'll find
+lots of advise and code examples that suggest it to be a fairly trivial
+undertaking. Unfortunately everything I've seen so far has a high chance of
+breaking your application.
+
+The main problem is that a `http.ResponseWriter` often implements additional
+interfaces such as `http.Flusher`, `http.CloseNotifier`, `http.Hijacker`, `http.Pusher`, and
+`io.ReaderFrom`. So the naive approach of just wrapping `http.ResponseWriter`
+in your own struct that also implements the `http.ResponseWriter` interface
+will hide the additional interfaces mentioned above. This has a high change of
+introducing subtle bugs into any non-trivial application.
+
+Another approach I've seen people take is to return a struct that implements
+all of the interfaces above. However, that's also problematic, because it's
+difficult to fake some of these interfaces behaviors when the underlying
+`http.ResponseWriter` doesn't have an implementation. It's also dangerous,
+because an application may choose to operate differently, merely because it
+detects the presence of these additional interfaces.
+
+This package solves this problem by checking which additional interfaces a
+`http.ResponseWriter` implements, returning a wrapped version implementing the
+exact same set of interfaces.
+
+Additionally this package properly handles edge cases such as `WriteHeader` not
+being called, or called more than once, as well as concurrent calls to
+`http.ResponseWriter` methods, and even calls happening after the wrapped
+`ServeHTTP` has already returned.
+
+Unfortunately this package is not perfect either. It's possible that it is
+still missing some interfaces provided by the go core (let me know if you find
+one), and it won't work for applications adding their own interfaces into the
+mix. You can however use `httpsnoop.Unwrap(w)` to access the underlying
+`http.ResponseWriter` and type-assert the result to its other interfaces.
+
+However, hopefully the explanation above has sufficiently scared you of rolling
+your own solution to this problem. httpsnoop may still break your application,
+but at least it tries to avoid it as much as possible.
+
+Anyway, the real problem here is that smuggling additional interfaces inside
+`http.ResponseWriter` is a problematic design choice, but it probably goes as
+deep as the Go language specification itself. But that's okay, I still prefer
+Go over the alternatives ;).
+
+## Performance
+
+```
+BenchmarkBaseline-8 20000 94912 ns/op
+BenchmarkCaptureMetrics-8 20000 95461 ns/op
+```
+
+As you can see, using `CaptureMetrics` on a vanilla http.Handler introduces an
+overhead of ~500 ns per http request on my machine. However, the margin of
+error appears to be larger than that, therefor it should be reasonable to
+assume that the overhead introduced by `CaptureMetrics` is absolutely
+negligible.
+
+## License
+
+MIT
diff --git a/vendor/github.com/felixge/httpsnoop/capture_metrics.go b/vendor/github.com/felixge/httpsnoop/capture_metrics.go
new file mode 100644
index 000000000..b77cc7c00
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/capture_metrics.go
@@ -0,0 +1,86 @@
+package httpsnoop
+
+import (
+ "io"
+ "net/http"
+ "time"
+)
+
+// Metrics holds metrics captured from CaptureMetrics.
+type Metrics struct {
+ // Code is the first http response code passed to the WriteHeader func of
+ // the ResponseWriter. If no such call is made, a default code of 200 is
+ // assumed instead.
+ Code int
+ // Duration is the time it took to execute the handler.
+ Duration time.Duration
+ // Written is the number of bytes successfully written by the Write or
+ // ReadFrom function of the ResponseWriter. ResponseWriters may also write
+ // data to their underlaying connection directly (e.g. headers), but those
+ // are not tracked. Therefor the number of Written bytes will usually match
+ // the size of the response body.
+ Written int64
+}
+
+// CaptureMetrics wraps the given hnd, executes it with the given w and r, and
+// returns the metrics it captured from it.
+func CaptureMetrics(hnd http.Handler, w http.ResponseWriter, r *http.Request) Metrics {
+ return CaptureMetricsFn(w, func(ww http.ResponseWriter) {
+ hnd.ServeHTTP(ww, r)
+ })
+}
+
+// CaptureMetricsFn wraps w and calls fn with the wrapped w and returns the
+// resulting metrics. This is very similar to CaptureMetrics (which is just
+// sugar on top of this func), but is a more usable interface if your
+// application doesn't use the Go http.Handler interface.
+func CaptureMetricsFn(w http.ResponseWriter, fn func(http.ResponseWriter)) Metrics {
+ m := Metrics{Code: http.StatusOK}
+ m.CaptureMetrics(w, fn)
+ return m
+}
+
+// CaptureMetrics wraps w and calls fn with the wrapped w and updates
+// Metrics m with the resulting metrics. This is similar to CaptureMetricsFn,
+// but allows one to customize starting Metrics object.
+func (m *Metrics) CaptureMetrics(w http.ResponseWriter, fn func(http.ResponseWriter)) {
+ var (
+ start = time.Now()
+ headerWritten bool
+ hooks = Hooks{
+ WriteHeader: func(next WriteHeaderFunc) WriteHeaderFunc {
+ return func(code int) {
+ next(code)
+
+ if !headerWritten {
+ m.Code = code
+ headerWritten = true
+ }
+ }
+ },
+
+ Write: func(next WriteFunc) WriteFunc {
+ return func(p []byte) (int, error) {
+ n, err := next(p)
+
+ m.Written += int64(n)
+ headerWritten = true
+ return n, err
+ }
+ },
+
+ ReadFrom: func(next ReadFromFunc) ReadFromFunc {
+ return func(src io.Reader) (int64, error) {
+ n, err := next(src)
+
+ headerWritten = true
+ m.Written += n
+ return n, err
+ }
+ },
+ }
+ )
+
+ fn(Wrap(w, hooks))
+ m.Duration += time.Since(start)
+}
diff --git a/vendor/github.com/felixge/httpsnoop/docs.go b/vendor/github.com/felixge/httpsnoop/docs.go
new file mode 100644
index 000000000..203c35b3c
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/docs.go
@@ -0,0 +1,10 @@
+// Package httpsnoop provides an easy way to capture http related metrics (i.e.
+// response time, bytes written, and http status code) from your application's
+// http.Handlers.
+//
+// Doing this requires non-trivial wrapping of the http.ResponseWriter
+// interface, which is also exposed for users interested in a more low-level
+// API.
+package httpsnoop
+
+//go:generate go run codegen/main.go
diff --git a/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go b/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go
new file mode 100644
index 000000000..31cbdfb8e
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/wrap_generated_gteq_1.8.go
@@ -0,0 +1,436 @@
+// +build go1.8
+// Code generated by "httpsnoop/codegen"; DO NOT EDIT
+
+package httpsnoop
+
+import (
+ "bufio"
+ "io"
+ "net"
+ "net/http"
+)
+
+// HeaderFunc is part of the http.ResponseWriter interface.
+type HeaderFunc func() http.Header
+
+// WriteHeaderFunc is part of the http.ResponseWriter interface.
+type WriteHeaderFunc func(code int)
+
+// WriteFunc is part of the http.ResponseWriter interface.
+type WriteFunc func(b []byte) (int, error)
+
+// FlushFunc is part of the http.Flusher interface.
+type FlushFunc func()
+
+// CloseNotifyFunc is part of the http.CloseNotifier interface.
+type CloseNotifyFunc func() <-chan bool
+
+// HijackFunc is part of the http.Hijacker interface.
+type HijackFunc func() (net.Conn, *bufio.ReadWriter, error)
+
+// ReadFromFunc is part of the io.ReaderFrom interface.
+type ReadFromFunc func(src io.Reader) (int64, error)
+
+// PushFunc is part of the http.Pusher interface.
+type PushFunc func(target string, opts *http.PushOptions) error
+
+// Hooks defines a set of method interceptors for methods included in
+// http.ResponseWriter as well as some others. You can think of them as
+// middleware for the function calls they target. See Wrap for more details.
+type Hooks struct {
+ Header func(HeaderFunc) HeaderFunc
+ WriteHeader func(WriteHeaderFunc) WriteHeaderFunc
+ Write func(WriteFunc) WriteFunc
+ Flush func(FlushFunc) FlushFunc
+ CloseNotify func(CloseNotifyFunc) CloseNotifyFunc
+ Hijack func(HijackFunc) HijackFunc
+ ReadFrom func(ReadFromFunc) ReadFromFunc
+ Push func(PushFunc) PushFunc
+}
+
+// Wrap returns a wrapped version of w that provides the exact same interface
+// as w. Specifically if w implements any combination of:
+//
+// - http.Flusher
+// - http.CloseNotifier
+// - http.Hijacker
+// - io.ReaderFrom
+// - http.Pusher
+//
+// The wrapped version will implement the exact same combination. If no hooks
+// are set, the wrapped version also behaves exactly as w. Hooks targeting
+// methods not supported by w are ignored. Any other hooks will intercept the
+// method they target and may modify the call's arguments and/or return values.
+// The CaptureMetrics implementation serves as a working example for how the
+// hooks can be used.
+func Wrap(w http.ResponseWriter, hooks Hooks) http.ResponseWriter {
+ rw := &rw{w: w, h: hooks}
+ _, i0 := w.(http.Flusher)
+ _, i1 := w.(http.CloseNotifier)
+ _, i2 := w.(http.Hijacker)
+ _, i3 := w.(io.ReaderFrom)
+ _, i4 := w.(http.Pusher)
+ switch {
+ // combination 1/32
+ case !i0 && !i1 && !i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ }{rw, rw}
+ // combination 2/32
+ case !i0 && !i1 && !i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Pusher
+ }{rw, rw, rw}
+ // combination 3/32
+ case !i0 && !i1 && !i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ io.ReaderFrom
+ }{rw, rw, rw}
+ // combination 4/32
+ case !i0 && !i1 && !i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw}
+ // combination 5/32
+ case !i0 && !i1 && i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Hijacker
+ }{rw, rw, rw}
+ // combination 6/32
+ case !i0 && !i1 && i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Hijacker
+ http.Pusher
+ }{rw, rw, rw, rw}
+ // combination 7/32
+ case !i0 && !i1 && i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw}
+ // combination 8/32
+ case !i0 && !i1 && i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Hijacker
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw}
+ // combination 9/32
+ case !i0 && i1 && !i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ }{rw, rw, rw}
+ // combination 10/32
+ case !i0 && i1 && !i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Pusher
+ }{rw, rw, rw, rw}
+ // combination 11/32
+ case !i0 && i1 && !i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ io.ReaderFrom
+ }{rw, rw, rw, rw}
+ // combination 12/32
+ case !i0 && i1 && !i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw}
+ // combination 13/32
+ case !i0 && i1 && i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Hijacker
+ }{rw, rw, rw, rw}
+ // combination 14/32
+ case !i0 && i1 && i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Hijacker
+ http.Pusher
+ }{rw, rw, rw, rw, rw}
+ // combination 15/32
+ case !i0 && i1 && i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw}
+ // combination 16/32
+ case !i0 && i1 && i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Hijacker
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw, rw}
+ // combination 17/32
+ case i0 && !i1 && !i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ }{rw, rw, rw}
+ // combination 18/32
+ case i0 && !i1 && !i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Pusher
+ }{rw, rw, rw, rw}
+ // combination 19/32
+ case i0 && !i1 && !i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ io.ReaderFrom
+ }{rw, rw, rw, rw}
+ // combination 20/32
+ case i0 && !i1 && !i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw}
+ // combination 21/32
+ case i0 && !i1 && i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Hijacker
+ }{rw, rw, rw, rw}
+ // combination 22/32
+ case i0 && !i1 && i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Hijacker
+ http.Pusher
+ }{rw, rw, rw, rw, rw}
+ // combination 23/32
+ case i0 && !i1 && i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw}
+ // combination 24/32
+ case i0 && !i1 && i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Hijacker
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw, rw}
+ // combination 25/32
+ case i0 && i1 && !i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ }{rw, rw, rw, rw}
+ // combination 26/32
+ case i0 && i1 && !i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Pusher
+ }{rw, rw, rw, rw, rw}
+ // combination 27/32
+ case i0 && i1 && !i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw}
+ // combination 28/32
+ case i0 && i1 && !i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw, rw}
+ // combination 29/32
+ case i0 && i1 && i2 && !i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Hijacker
+ }{rw, rw, rw, rw, rw}
+ // combination 30/32
+ case i0 && i1 && i2 && !i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Hijacker
+ http.Pusher
+ }{rw, rw, rw, rw, rw, rw}
+ // combination 31/32
+ case i0 && i1 && i2 && i3 && !i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw, rw}
+ // combination 32/32
+ case i0 && i1 && i2 && i3 && i4:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Hijacker
+ io.ReaderFrom
+ http.Pusher
+ }{rw, rw, rw, rw, rw, rw, rw}
+ }
+ panic("unreachable")
+}
+
+type rw struct {
+ w http.ResponseWriter
+ h Hooks
+}
+
+func (w *rw) Unwrap() http.ResponseWriter {
+ return w.w
+}
+
+func (w *rw) Header() http.Header {
+ f := w.w.(http.ResponseWriter).Header
+ if w.h.Header != nil {
+ f = w.h.Header(f)
+ }
+ return f()
+}
+
+func (w *rw) WriteHeader(code int) {
+ f := w.w.(http.ResponseWriter).WriteHeader
+ if w.h.WriteHeader != nil {
+ f = w.h.WriteHeader(f)
+ }
+ f(code)
+}
+
+func (w *rw) Write(b []byte) (int, error) {
+ f := w.w.(http.ResponseWriter).Write
+ if w.h.Write != nil {
+ f = w.h.Write(f)
+ }
+ return f(b)
+}
+
+func (w *rw) Flush() {
+ f := w.w.(http.Flusher).Flush
+ if w.h.Flush != nil {
+ f = w.h.Flush(f)
+ }
+ f()
+}
+
+func (w *rw) CloseNotify() <-chan bool {
+ f := w.w.(http.CloseNotifier).CloseNotify
+ if w.h.CloseNotify != nil {
+ f = w.h.CloseNotify(f)
+ }
+ return f()
+}
+
+func (w *rw) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+ f := w.w.(http.Hijacker).Hijack
+ if w.h.Hijack != nil {
+ f = w.h.Hijack(f)
+ }
+ return f()
+}
+
+func (w *rw) ReadFrom(src io.Reader) (int64, error) {
+ f := w.w.(io.ReaderFrom).ReadFrom
+ if w.h.ReadFrom != nil {
+ f = w.h.ReadFrom(f)
+ }
+ return f(src)
+}
+
+func (w *rw) Push(target string, opts *http.PushOptions) error {
+ f := w.w.(http.Pusher).Push
+ if w.h.Push != nil {
+ f = w.h.Push(f)
+ }
+ return f(target, opts)
+}
+
+type Unwrapper interface {
+ Unwrap() http.ResponseWriter
+}
+
+// Unwrap returns the underlying http.ResponseWriter from within zero or more
+// layers of httpsnoop wrappers.
+func Unwrap(w http.ResponseWriter) http.ResponseWriter {
+ if rw, ok := w.(Unwrapper); ok {
+ // recurse until rw.Unwrap() returns a non-Unwrapper
+ return Unwrap(rw.Unwrap())
+ } else {
+ return w
+ }
+}
diff --git a/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go b/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go
new file mode 100644
index 000000000..ab99c07c7
--- /dev/null
+++ b/vendor/github.com/felixge/httpsnoop/wrap_generated_lt_1.8.go
@@ -0,0 +1,278 @@
+// +build !go1.8
+// Code generated by "httpsnoop/codegen"; DO NOT EDIT
+
+package httpsnoop
+
+import (
+ "bufio"
+ "io"
+ "net"
+ "net/http"
+)
+
+// HeaderFunc is part of the http.ResponseWriter interface.
+type HeaderFunc func() http.Header
+
+// WriteHeaderFunc is part of the http.ResponseWriter interface.
+type WriteHeaderFunc func(code int)
+
+// WriteFunc is part of the http.ResponseWriter interface.
+type WriteFunc func(b []byte) (int, error)
+
+// FlushFunc is part of the http.Flusher interface.
+type FlushFunc func()
+
+// CloseNotifyFunc is part of the http.CloseNotifier interface.
+type CloseNotifyFunc func() <-chan bool
+
+// HijackFunc is part of the http.Hijacker interface.
+type HijackFunc func() (net.Conn, *bufio.ReadWriter, error)
+
+// ReadFromFunc is part of the io.ReaderFrom interface.
+type ReadFromFunc func(src io.Reader) (int64, error)
+
+// Hooks defines a set of method interceptors for methods included in
+// http.ResponseWriter as well as some others. You can think of them as
+// middleware for the function calls they target. See Wrap for more details.
+type Hooks struct {
+ Header func(HeaderFunc) HeaderFunc
+ WriteHeader func(WriteHeaderFunc) WriteHeaderFunc
+ Write func(WriteFunc) WriteFunc
+ Flush func(FlushFunc) FlushFunc
+ CloseNotify func(CloseNotifyFunc) CloseNotifyFunc
+ Hijack func(HijackFunc) HijackFunc
+ ReadFrom func(ReadFromFunc) ReadFromFunc
+}
+
+// Wrap returns a wrapped version of w that provides the exact same interface
+// as w. Specifically if w implements any combination of:
+//
+// - http.Flusher
+// - http.CloseNotifier
+// - http.Hijacker
+// - io.ReaderFrom
+//
+// The wrapped version will implement the exact same combination. If no hooks
+// are set, the wrapped version also behaves exactly as w. Hooks targeting
+// methods not supported by w are ignored. Any other hooks will intercept the
+// method they target and may modify the call's arguments and/or return values.
+// The CaptureMetrics implementation serves as a working example for how the
+// hooks can be used.
+func Wrap(w http.ResponseWriter, hooks Hooks) http.ResponseWriter {
+ rw := &rw{w: w, h: hooks}
+ _, i0 := w.(http.Flusher)
+ _, i1 := w.(http.CloseNotifier)
+ _, i2 := w.(http.Hijacker)
+ _, i3 := w.(io.ReaderFrom)
+ switch {
+ // combination 1/16
+ case !i0 && !i1 && !i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ }{rw, rw}
+ // combination 2/16
+ case !i0 && !i1 && !i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ io.ReaderFrom
+ }{rw, rw, rw}
+ // combination 3/16
+ case !i0 && !i1 && i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Hijacker
+ }{rw, rw, rw}
+ // combination 4/16
+ case !i0 && !i1 && i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw}
+ // combination 5/16
+ case !i0 && i1 && !i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ }{rw, rw, rw}
+ // combination 6/16
+ case !i0 && i1 && !i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ io.ReaderFrom
+ }{rw, rw, rw, rw}
+ // combination 7/16
+ case !i0 && i1 && i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Hijacker
+ }{rw, rw, rw, rw}
+ // combination 8/16
+ case !i0 && i1 && i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.CloseNotifier
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw}
+ // combination 9/16
+ case i0 && !i1 && !i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ }{rw, rw, rw}
+ // combination 10/16
+ case i0 && !i1 && !i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ io.ReaderFrom
+ }{rw, rw, rw, rw}
+ // combination 11/16
+ case i0 && !i1 && i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Hijacker
+ }{rw, rw, rw, rw}
+ // combination 12/16
+ case i0 && !i1 && i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw}
+ // combination 13/16
+ case i0 && i1 && !i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ }{rw, rw, rw, rw}
+ // combination 14/16
+ case i0 && i1 && !i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw}
+ // combination 15/16
+ case i0 && i1 && i2 && !i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Hijacker
+ }{rw, rw, rw, rw, rw}
+ // combination 16/16
+ case i0 && i1 && i2 && i3:
+ return struct {
+ Unwrapper
+ http.ResponseWriter
+ http.Flusher
+ http.CloseNotifier
+ http.Hijacker
+ io.ReaderFrom
+ }{rw, rw, rw, rw, rw, rw}
+ }
+ panic("unreachable")
+}
+
+type rw struct {
+ w http.ResponseWriter
+ h Hooks
+}
+
+func (w *rw) Unwrap() http.ResponseWriter {
+ return w.w
+}
+
+func (w *rw) Header() http.Header {
+ f := w.w.(http.ResponseWriter).Header
+ if w.h.Header != nil {
+ f = w.h.Header(f)
+ }
+ return f()
+}
+
+func (w *rw) WriteHeader(code int) {
+ f := w.w.(http.ResponseWriter).WriteHeader
+ if w.h.WriteHeader != nil {
+ f = w.h.WriteHeader(f)
+ }
+ f(code)
+}
+
+func (w *rw) Write(b []byte) (int, error) {
+ f := w.w.(http.ResponseWriter).Write
+ if w.h.Write != nil {
+ f = w.h.Write(f)
+ }
+ return f(b)
+}
+
+func (w *rw) Flush() {
+ f := w.w.(http.Flusher).Flush
+ if w.h.Flush != nil {
+ f = w.h.Flush(f)
+ }
+ f()
+}
+
+func (w *rw) CloseNotify() <-chan bool {
+ f := w.w.(http.CloseNotifier).CloseNotify
+ if w.h.CloseNotify != nil {
+ f = w.h.CloseNotify(f)
+ }
+ return f()
+}
+
+func (w *rw) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+ f := w.w.(http.Hijacker).Hijack
+ if w.h.Hijack != nil {
+ f = w.h.Hijack(f)
+ }
+ return f()
+}
+
+func (w *rw) ReadFrom(src io.Reader) (int64, error) {
+ f := w.w.(io.ReaderFrom).ReadFrom
+ if w.h.ReadFrom != nil {
+ f = w.h.ReadFrom(f)
+ }
+ return f(src)
+}
+
+type Unwrapper interface {
+ Unwrap() http.ResponseWriter
+}
+
+// Unwrap returns the underlying http.ResponseWriter from within zero or more
+// layers of httpsnoop wrappers.
+func Unwrap(w http.ResponseWriter) http.ResponseWriter {
+ if rw, ok := w.(Unwrapper); ok {
+ // recurse until rw.Unwrap() returns a non-Unwrapper
+ return Unwrap(rw.Unwrap())
+ } else {
+ return w
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/.codecov.yml b/vendor/github.com/go-openapi/analysis/.codecov.yml
new file mode 100644
index 000000000..841c4281e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.codecov.yml
@@ -0,0 +1,5 @@
+coverage:
+ status:
+ patch:
+ default:
+ target: 80%
diff --git a/vendor/github.com/go-openapi/analysis/.gitattributes b/vendor/github.com/go-openapi/analysis/.gitattributes
new file mode 100644
index 000000000..d020be8ea
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.gitattributes
@@ -0,0 +1,2 @@
+*.go text eol=lf
+
diff --git a/vendor/github.com/go-openapi/analysis/.gitignore b/vendor/github.com/go-openapi/analysis/.gitignore
new file mode 100644
index 000000000..87c3bd3e6
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.gitignore
@@ -0,0 +1,5 @@
+secrets.yml
+coverage.out
+coverage.txt
+*.cov
+.idea
diff --git a/vendor/github.com/go-openapi/analysis/.golangci.yml b/vendor/github.com/go-openapi/analysis/.golangci.yml
new file mode 100644
index 000000000..e24a6c14e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.golangci.yml
@@ -0,0 +1,56 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 40
+ gocognit:
+ min-complexity: 40
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 150
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoglobals
+ - gochecknoinits
+ # scopelint is useful, but also reports false positives
+ # that unfortunately can't be disabled. So we disable the
+ # linter rather than changing code that works.
+ # see: https://github.com/kyoh86/scopelint/issues/4
+ - scopelint
+ - godox
+ - gocognit
+ #- whitespace
+ - wsl
+ - funlen
+ - testpackage
+ - wrapcheck
+ #- nlreturn
+ - gomnd
+ - goerr113
+ - exhaustivestruct
+ #- errorlint
+ #- nestif
+ - gofumpt
+ - godot
+ - gci
+ - dogsled
+ - paralleltest
+ - tparallel
+ - thelper
+ - ifshort
+ - forbidigo
+ - cyclop
+ - varnamelen
+ - exhaustruct
+ - nonamedreturns
+ - nosnakecase
diff --git a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/analysis/LICENSE b/vendor/github.com/go-openapi/analysis/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/analysis/README.md b/vendor/github.com/go-openapi/analysis/README.md
new file mode 100644
index 000000000..aad6da10f
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/README.md
@@ -0,0 +1,31 @@
+# OpenAPI initiative analysis
+
+[![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis)
+[![Build status](https://ci.appveyor.com/api/projects/status/x377t5o9ennm847o/branch/master?svg=true)](https://ci.appveyor.com/project/casualjim/go-openapi/analysis/branch/master)
+[![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis)
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/analysis.svg)](https://pkg.go.dev/github.com/go-openapi/analysis)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/analysis)](https://goreportcard.com/report/github.com/go-openapi/analysis)
+
+
+A foundational library to analyze an OAI specification document for easier reasoning about the content.
+
+## What's inside?
+
+* A analyzer providing methods to walk the functional content of a specification
+* A spec flattener producing a self-contained document bundle, while preserving `$ref`s
+* A spec merger ("mixin") to merge several spec documents into a primary spec
+* A spec "fixer" ensuring that response descriptions are non empty
+
+[Documentation](https://godoc.org/github.com/go-openapi/analysis)
+
+## FAQ
+
+* Does this library support OpenAPI 3?
+
+> No.
+> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
+> There is no plan to make it evolve toward supporting OpenAPI 3.x.
+> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
+>
diff --git a/vendor/github.com/go-openapi/analysis/analyzer.go b/vendor/github.com/go-openapi/analysis/analyzer.go
new file mode 100644
index 000000000..c17aee1b6
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/analyzer.go
@@ -0,0 +1,1064 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "fmt"
+ slashpath "path"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+type referenceAnalysis struct {
+ schemas map[string]spec.Ref
+ responses map[string]spec.Ref
+ parameters map[string]spec.Ref
+ items map[string]spec.Ref
+ headerItems map[string]spec.Ref
+ parameterItems map[string]spec.Ref
+ allRefs map[string]spec.Ref
+ pathItems map[string]spec.Ref
+}
+
+func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
+ r.allRefs["#"+key] = ref
+}
+
+func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
+ r.items["#"+key] = items.Ref
+ r.addRef(key, items.Ref)
+ if location == "header" {
+ // NOTE: in swagger 2.0, headers and parameters (but not body param schemas) are simple schemas
+ // and $ref are not supported here. However it is possible to analyze this.
+ r.headerItems["#"+key] = items.Ref
+ } else {
+ r.parameterItems["#"+key] = items.Ref
+ }
+}
+
+func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
+ r.schemas["#"+key] = ref.Schema.Ref
+ r.addRef(key, ref.Schema.Ref)
+}
+
+func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
+ r.responses["#"+key] = resp.Ref
+ r.addRef(key, resp.Ref)
+}
+
+func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
+ r.parameters["#"+key] = param.Ref
+ r.addRef(key, param.Ref)
+}
+
+func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
+ r.pathItems["#"+key] = pathItem.Ref
+ r.addRef(key, pathItem.Ref)
+}
+
+type patternAnalysis struct {
+ parameters map[string]string
+ headers map[string]string
+ items map[string]string
+ schemas map[string]string
+ allPatterns map[string]string
+}
+
+func (p *patternAnalysis) addPattern(key, pattern string) {
+ p.allPatterns["#"+key] = pattern
+}
+
+func (p *patternAnalysis) addParameterPattern(key, pattern string) {
+ p.parameters["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
+ p.headers["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addItemsPattern(key, pattern string) {
+ p.items["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
+ p.schemas["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+type enumAnalysis struct {
+ parameters map[string][]interface{}
+ headers map[string][]interface{}
+ items map[string][]interface{}
+ schemas map[string][]interface{}
+ allEnums map[string][]interface{}
+}
+
+func (p *enumAnalysis) addEnum(key string, enum []interface{}) {
+ p.allEnums["#"+key] = enum
+}
+
+func (p *enumAnalysis) addParameterEnum(key string, enum []interface{}) {
+ p.parameters["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+func (p *enumAnalysis) addHeaderEnum(key string, enum []interface{}) {
+ p.headers["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+func (p *enumAnalysis) addItemsEnum(key string, enum []interface{}) {
+ p.items["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+func (p *enumAnalysis) addSchemaEnum(key string, enum []interface{}) {
+ p.schemas["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+// New takes a swagger spec object and returns an analyzed spec document.
+// The analyzed document contains a number of indices that make it easier to
+// reason about semantics of a swagger specification for use in code generation
+// or validation etc.
+func New(doc *spec.Swagger) *Spec {
+ a := &Spec{
+ spec: doc,
+ references: referenceAnalysis{},
+ patterns: patternAnalysis{},
+ enums: enumAnalysis{},
+ }
+ a.reset()
+ a.initialize()
+
+ return a
+}
+
+// Spec is an analyzed specification object. It takes a swagger spec object and turns it into a registry
+// with a bunch of utility methods to act on the information in the spec.
+type Spec struct {
+ spec *spec.Swagger
+ consumes map[string]struct{}
+ produces map[string]struct{}
+ authSchemes map[string]struct{}
+ operations map[string]map[string]*spec.Operation
+ references referenceAnalysis
+ patterns patternAnalysis
+ enums enumAnalysis
+ allSchemas map[string]SchemaRef
+ allOfs map[string]SchemaRef
+}
+
+func (s *Spec) reset() {
+ s.consumes = make(map[string]struct{}, 150)
+ s.produces = make(map[string]struct{}, 150)
+ s.authSchemes = make(map[string]struct{}, 150)
+ s.operations = make(map[string]map[string]*spec.Operation, 150)
+ s.allSchemas = make(map[string]SchemaRef, 150)
+ s.allOfs = make(map[string]SchemaRef, 150)
+ s.references.schemas = make(map[string]spec.Ref, 150)
+ s.references.pathItems = make(map[string]spec.Ref, 150)
+ s.references.responses = make(map[string]spec.Ref, 150)
+ s.references.parameters = make(map[string]spec.Ref, 150)
+ s.references.items = make(map[string]spec.Ref, 150)
+ s.references.headerItems = make(map[string]spec.Ref, 150)
+ s.references.parameterItems = make(map[string]spec.Ref, 150)
+ s.references.allRefs = make(map[string]spec.Ref, 150)
+ s.patterns.parameters = make(map[string]string, 150)
+ s.patterns.headers = make(map[string]string, 150)
+ s.patterns.items = make(map[string]string, 150)
+ s.patterns.schemas = make(map[string]string, 150)
+ s.patterns.allPatterns = make(map[string]string, 150)
+ s.enums.parameters = make(map[string][]interface{}, 150)
+ s.enums.headers = make(map[string][]interface{}, 150)
+ s.enums.items = make(map[string][]interface{}, 150)
+ s.enums.schemas = make(map[string][]interface{}, 150)
+ s.enums.allEnums = make(map[string][]interface{}, 150)
+}
+
+func (s *Spec) reload() {
+ s.reset()
+ s.initialize()
+}
+
+func (s *Spec) initialize() {
+ for _, c := range s.spec.Consumes {
+ s.consumes[c] = struct{}{}
+ }
+ for _, c := range s.spec.Produces {
+ s.produces[c] = struct{}{}
+ }
+ for _, ss := range s.spec.Security {
+ for k := range ss {
+ s.authSchemes[k] = struct{}{}
+ }
+ }
+ for path, pathItem := range s.AllPaths() {
+ s.analyzeOperations(path, &pathItem) //#nosec
+ }
+
+ for name, parameter := range s.spec.Parameters {
+ refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
+ if parameter.Items != nil {
+ s.analyzeItems("items", parameter.Items, refPref, "parameter")
+ }
+ if parameter.In == "body" && parameter.Schema != nil {
+ s.analyzeSchema("schema", parameter.Schema, refPref)
+ }
+ if parameter.Pattern != "" {
+ s.patterns.addParameterPattern(refPref, parameter.Pattern)
+ }
+ if len(parameter.Enum) > 0 {
+ s.enums.addParameterEnum(refPref, parameter.Enum)
+ }
+ }
+
+ for name, response := range s.spec.Responses {
+ refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
+ for k, v := range response.Headers {
+ hRefPref := slashpath.Join(refPref, "headers", k)
+ if v.Items != nil {
+ s.analyzeItems("items", v.Items, hRefPref, "header")
+ }
+ if v.Pattern != "" {
+ s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+ }
+ if len(v.Enum) > 0 {
+ s.enums.addHeaderEnum(hRefPref, v.Enum)
+ }
+ }
+ if response.Schema != nil {
+ s.analyzeSchema("schema", response.Schema, refPref)
+ }
+ }
+
+ for name := range s.spec.Definitions {
+ schema := s.spec.Definitions[name]
+ s.analyzeSchema(name, &schema, "/definitions")
+ }
+ // TODO: after analyzing all things and flattening schemas etc
+ // resolve all the collected references to their final representations
+ // best put in a separate method because this could get expensive
+}
+
+func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
+ // TODO: resolve refs here?
+ // Currently, operations declared via pathItem $ref are known only after expansion
+ op := pi
+ if pi.Ref.String() != "" {
+ key := slashpath.Join("/paths", jsonpointer.Escape(path))
+ s.references.addPathItemRef(key, pi)
+ }
+ s.analyzeOperation("GET", path, op.Get)
+ s.analyzeOperation("PUT", path, op.Put)
+ s.analyzeOperation("POST", path, op.Post)
+ s.analyzeOperation("PATCH", path, op.Patch)
+ s.analyzeOperation("DELETE", path, op.Delete)
+ s.analyzeOperation("HEAD", path, op.Head)
+ s.analyzeOperation("OPTIONS", path, op.Options)
+ for i, param := range op.Parameters {
+ refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
+ if param.Ref.String() != "" {
+ s.references.addParamRef(refPref, &param) //#nosec
+ }
+ if param.Pattern != "" {
+ s.patterns.addParameterPattern(refPref, param.Pattern)
+ }
+ if len(param.Enum) > 0 {
+ s.enums.addParameterEnum(refPref, param.Enum)
+ }
+ if param.Items != nil {
+ s.analyzeItems("items", param.Items, refPref, "parameter")
+ }
+ if param.Schema != nil {
+ s.analyzeSchema("schema", param.Schema, refPref)
+ }
+ }
+}
+
+func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
+ if items == nil {
+ return
+ }
+ refPref := slashpath.Join(prefix, name)
+ s.analyzeItems(name, items.Items, refPref, location)
+ if items.Ref.String() != "" {
+ s.references.addItemsRef(refPref, items, location)
+ }
+ if items.Pattern != "" {
+ s.patterns.addItemsPattern(refPref, items.Pattern)
+ }
+ if len(items.Enum) > 0 {
+ s.enums.addItemsEnum(refPref, items.Enum)
+ }
+}
+
+func (s *Spec) analyzeParameter(prefix string, i int, param spec.Parameter) {
+ refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
+ if param.Ref.String() != "" {
+ s.references.addParamRef(refPref, &param) //#nosec
+ }
+
+ if param.Pattern != "" {
+ s.patterns.addParameterPattern(refPref, param.Pattern)
+ }
+
+ if len(param.Enum) > 0 {
+ s.enums.addParameterEnum(refPref, param.Enum)
+ }
+
+ s.analyzeItems("items", param.Items, refPref, "parameter")
+ if param.In == "body" && param.Schema != nil {
+ s.analyzeSchema("schema", param.Schema, refPref)
+ }
+}
+
+func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
+ if op == nil {
+ return
+ }
+
+ for _, c := range op.Consumes {
+ s.consumes[c] = struct{}{}
+ }
+
+ for _, c := range op.Produces {
+ s.produces[c] = struct{}{}
+ }
+
+ for _, ss := range op.Security {
+ for k := range ss {
+ s.authSchemes[k] = struct{}{}
+ }
+ }
+
+ if _, ok := s.operations[method]; !ok {
+ s.operations[method] = make(map[string]*spec.Operation)
+ }
+
+ s.operations[method][path] = op
+ prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
+ for i, param := range op.Parameters {
+ s.analyzeParameter(prefix, i, param)
+ }
+
+ if op.Responses == nil {
+ return
+ }
+
+ if op.Responses.Default != nil {
+ s.analyzeDefaultResponse(prefix, op.Responses.Default)
+ }
+
+ for k, res := range op.Responses.StatusCodeResponses {
+ s.analyzeResponse(prefix, k, res)
+ }
+}
+
+func (s *Spec) analyzeDefaultResponse(prefix string, res *spec.Response) {
+ refPref := slashpath.Join(prefix, "responses", "default")
+ if res.Ref.String() != "" {
+ s.references.addResponseRef(refPref, res)
+ }
+
+ for k, v := range res.Headers {
+ hRefPref := slashpath.Join(refPref, "headers", k)
+ s.analyzeItems("items", v.Items, hRefPref, "header")
+ if v.Pattern != "" {
+ s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+ }
+ }
+
+ if res.Schema != nil {
+ s.analyzeSchema("schema", res.Schema, refPref)
+ }
+}
+
+func (s *Spec) analyzeResponse(prefix string, k int, res spec.Response) {
+ refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
+ if res.Ref.String() != "" {
+ s.references.addResponseRef(refPref, &res) //#nosec
+ }
+
+ for k, v := range res.Headers {
+ hRefPref := slashpath.Join(refPref, "headers", k)
+ s.analyzeItems("items", v.Items, hRefPref, "header")
+ if v.Pattern != "" {
+ s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+ }
+
+ if len(v.Enum) > 0 {
+ s.enums.addHeaderEnum(hRefPref, v.Enum)
+ }
+ }
+
+ if res.Schema != nil {
+ s.analyzeSchema("schema", res.Schema, refPref)
+ }
+}
+
+func (s *Spec) analyzeSchema(name string, schema *spec.Schema, prefix string) {
+ refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
+ schRef := SchemaRef{
+ Name: name,
+ Schema: schema,
+ Ref: spec.MustCreateRef("#" + refURI),
+ TopLevel: prefix == "/definitions",
+ }
+
+ s.allSchemas["#"+refURI] = schRef
+
+ if schema.Ref.String() != "" {
+ s.references.addSchemaRef(refURI, schRef)
+ }
+
+ if schema.Pattern != "" {
+ s.patterns.addSchemaPattern(refURI, schema.Pattern)
+ }
+
+ if len(schema.Enum) > 0 {
+ s.enums.addSchemaEnum(refURI, schema.Enum)
+ }
+
+ for k, v := range schema.Definitions {
+ v := v
+ s.analyzeSchema(k, &v, slashpath.Join(refURI, "definitions"))
+ }
+
+ for k, v := range schema.Properties {
+ v := v
+ s.analyzeSchema(k, &v, slashpath.Join(refURI, "properties"))
+ }
+
+ for k, v := range schema.PatternProperties {
+ v := v
+ // NOTE: swagger 2.0 does not support PatternProperties.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema(k, &v, slashpath.Join(refURI, "patternProperties"))
+ }
+
+ for i := range schema.AllOf {
+ v := &schema.AllOf[i]
+ s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
+ }
+
+ if len(schema.AllOf) > 0 {
+ s.allOfs["#"+refURI] = schRef
+ }
+
+ for i := range schema.AnyOf {
+ v := &schema.AnyOf[i]
+ // NOTE: swagger 2.0 does not support anyOf constructs.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
+ }
+
+ for i := range schema.OneOf {
+ v := &schema.OneOf[i]
+ // NOTE: swagger 2.0 does not support oneOf constructs.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
+ }
+
+ if schema.Not != nil {
+ // NOTE: swagger 2.0 does not support "not" constructs.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema("not", schema.Not, refURI)
+ }
+
+ if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+ s.analyzeSchema("additionalProperties", schema.AdditionalProperties.Schema, refURI)
+ }
+
+ if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
+ // NOTE: swagger 2.0 does not support AdditionalItems.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema("additionalItems", schema.AdditionalItems.Schema, refURI)
+ }
+
+ if schema.Items != nil {
+ if schema.Items.Schema != nil {
+ s.analyzeSchema("items", schema.Items.Schema, refURI)
+ }
+
+ for i := range schema.Items.Schemas {
+ sch := &schema.Items.Schemas[i]
+ s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
+ }
+ }
+}
+
+// SecurityRequirement is a representation of a security requirement for an operation
+type SecurityRequirement struct {
+ Name string
+ Scopes []string
+}
+
+// SecurityRequirementsFor gets the security requirements for the operation
+func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) [][]SecurityRequirement {
+ if s.spec.Security == nil && operation.Security == nil {
+ return nil
+ }
+
+ schemes := s.spec.Security
+ if operation.Security != nil {
+ schemes = operation.Security
+ }
+
+ result := [][]SecurityRequirement{}
+ for _, scheme := range schemes {
+ if len(scheme) == 0 {
+ // append a zero object for anonymous
+ result = append(result, []SecurityRequirement{{}})
+
+ continue
+ }
+
+ var reqs []SecurityRequirement
+ for k, v := range scheme {
+ if v == nil {
+ v = []string{}
+ }
+ reqs = append(reqs, SecurityRequirement{Name: k, Scopes: v})
+ }
+
+ result = append(result, reqs)
+ }
+
+ return result
+}
+
+// SecurityDefinitionsForRequirements gets the matching security definitions for a set of requirements
+func (s *Spec) SecurityDefinitionsForRequirements(requirements []SecurityRequirement) map[string]spec.SecurityScheme {
+ result := make(map[string]spec.SecurityScheme)
+
+ for _, v := range requirements {
+ if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
+ if definition != nil {
+ result[v.Name] = *definition
+ }
+ }
+ }
+
+ return result
+}
+
+// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
+func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
+ requirements := s.SecurityRequirementsFor(operation)
+ if len(requirements) == 0 {
+ return nil
+ }
+
+ result := make(map[string]spec.SecurityScheme)
+ for _, reqs := range requirements {
+ for _, v := range reqs {
+ if v.Name == "" {
+ // optional requirement
+ continue
+ }
+
+ if _, ok := result[v.Name]; ok {
+ // duplicate requirement
+ continue
+ }
+
+ if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
+ if definition != nil {
+ result[v.Name] = *definition
+ }
+ }
+ }
+ }
+
+ return result
+}
+
+// ConsumesFor gets the mediatypes for the operation
+func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
+ if len(operation.Consumes) == 0 {
+ cons := make(map[string]struct{}, len(s.spec.Consumes))
+ for _, k := range s.spec.Consumes {
+ cons[k] = struct{}{}
+ }
+
+ return s.structMapKeys(cons)
+ }
+
+ cons := make(map[string]struct{}, len(operation.Consumes))
+ for _, c := range operation.Consumes {
+ cons[c] = struct{}{}
+ }
+
+ return s.structMapKeys(cons)
+}
+
+// ProducesFor gets the mediatypes for the operation
+func (s *Spec) ProducesFor(operation *spec.Operation) []string {
+ if len(operation.Produces) == 0 {
+ prod := make(map[string]struct{}, len(s.spec.Produces))
+ for _, k := range s.spec.Produces {
+ prod[k] = struct{}{}
+ }
+
+ return s.structMapKeys(prod)
+ }
+
+ prod := make(map[string]struct{}, len(operation.Produces))
+ for _, c := range operation.Produces {
+ prod[c] = struct{}{}
+ }
+
+ return s.structMapKeys(prod)
+}
+
+func mapKeyFromParam(param *spec.Parameter) string {
+ return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
+}
+
+func fieldNameFromParam(param *spec.Parameter) string {
+ // TODO: this should be x-go-name
+ if nm, ok := param.Extensions.GetString("go-name"); ok {
+ return nm
+ }
+
+ return swag.ToGoName(param.Name)
+}
+
+// ErrorOnParamFunc is a callback function to be invoked
+// whenever an error is encountered while resolving references
+// on parameters.
+//
+// This function takes as input the spec.Parameter which triggered the
+// error and the error itself.
+//
+// If the callback function returns false, the calling function should bail.
+//
+// If it returns true, the calling function should continue evaluating parameters.
+// A nil ErrorOnParamFunc must be evaluated as equivalent to panic().
+type ErrorOnParamFunc func(spec.Parameter, error) bool
+
+func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter, callmeOnError ErrorOnParamFunc) {
+ for _, param := range parameters {
+ pr := param
+ if pr.Ref.String() == "" {
+ res[mapKeyFromParam(&pr)] = pr
+
+ continue
+ }
+
+ // resolve $ref
+ if callmeOnError == nil {
+ callmeOnError = func(_ spec.Parameter, err error) bool {
+ panic(err)
+ }
+ }
+
+ obj, _, err := pr.Ref.GetPointer().Get(s.spec)
+ if err != nil {
+ if callmeOnError(param, fmt.Errorf("invalid reference: %q", pr.Ref.String())) {
+ continue
+ }
+
+ break
+ }
+
+ objAsParam, ok := obj.(spec.Parameter)
+ if !ok {
+ if callmeOnError(param, fmt.Errorf("resolved reference is not a parameter: %q", pr.Ref.String())) {
+ continue
+ }
+
+ break
+ }
+
+ pr = objAsParam
+ res[mapKeyFromParam(&pr)] = pr
+ }
+}
+
+// ParametersFor the specified operation id.
+//
+// Assumes parameters properly resolve references if any and that
+// such references actually resolve to a parameter object.
+// Otherwise, panics.
+func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
+ return s.SafeParametersFor(operationID, nil)
+}
+
+// SafeParametersFor the specified operation id.
+//
+// Does not assume parameters properly resolve references or that
+// such references actually resolve to a parameter object.
+//
+// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
+// parameters. If the callback is set to nil, panics upon errors.
+func (s *Spec) SafeParametersFor(operationID string, callmeOnError ErrorOnParamFunc) []spec.Parameter {
+ gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
+ bag := make(map[string]spec.Parameter)
+ s.paramsAsMap(pi.Parameters, bag, callmeOnError)
+ s.paramsAsMap(op.Parameters, bag, callmeOnError)
+
+ var res []spec.Parameter
+ for _, v := range bag {
+ res = append(res, v)
+ }
+
+ return res
+ }
+
+ for _, pi := range s.spec.Paths.Paths {
+ if pi.Get != nil && pi.Get.ID == operationID {
+ return gatherParams(&pi, pi.Get) //#nosec
+ }
+ if pi.Head != nil && pi.Head.ID == operationID {
+ return gatherParams(&pi, pi.Head) //#nosec
+ }
+ if pi.Options != nil && pi.Options.ID == operationID {
+ return gatherParams(&pi, pi.Options) //#nosec
+ }
+ if pi.Post != nil && pi.Post.ID == operationID {
+ return gatherParams(&pi, pi.Post) //#nosec
+ }
+ if pi.Patch != nil && pi.Patch.ID == operationID {
+ return gatherParams(&pi, pi.Patch) //#nosec
+ }
+ if pi.Put != nil && pi.Put.ID == operationID {
+ return gatherParams(&pi, pi.Put) //#nosec
+ }
+ if pi.Delete != nil && pi.Delete.ID == operationID {
+ return gatherParams(&pi, pi.Delete) //#nosec
+ }
+ }
+
+ return nil
+}
+
+// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
+// apply for the method and path.
+//
+// Assumes parameters properly resolve references if any and that
+// such references actually resolve to a parameter object.
+// Otherwise, panics.
+func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
+ return s.SafeParamsFor(method, path, nil)
+}
+
+// SafeParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
+// apply for the method and path.
+//
+// Does not assume parameters properly resolve references or that
+// such references actually resolve to a parameter object.
+//
+// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
+// parameters. If the callback is set to nil, panics upon errors.
+func (s *Spec) SafeParamsFor(method, path string, callmeOnError ErrorOnParamFunc) map[string]spec.Parameter {
+ res := make(map[string]spec.Parameter)
+ if pi, ok := s.spec.Paths.Paths[path]; ok {
+ s.paramsAsMap(pi.Parameters, res, callmeOnError)
+ s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res, callmeOnError)
+ }
+
+ return res
+}
+
+// OperationForName gets the operation for the given id
+func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
+ for method, pathItem := range s.operations {
+ for path, op := range pathItem {
+ if operationID == op.ID {
+ return method, path, op, true
+ }
+ }
+ }
+
+ return "", "", nil, false
+}
+
+// OperationFor the given method and path
+func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
+ if mp, ok := s.operations[strings.ToUpper(method)]; ok {
+ op, fn := mp[path]
+
+ return op, fn
+ }
+
+ return nil, false
+}
+
+// Operations gathers all the operations specified in the spec document
+func (s *Spec) Operations() map[string]map[string]*spec.Operation {
+ return s.operations
+}
+
+func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
+ if len(mp) == 0 {
+ return nil
+ }
+
+ result := make([]string, 0, len(mp))
+ for k := range mp {
+ result = append(result, k)
+ }
+
+ return result
+}
+
+// AllPaths returns all the paths in the swagger spec
+func (s *Spec) AllPaths() map[string]spec.PathItem {
+ if s.spec == nil || s.spec.Paths == nil {
+ return nil
+ }
+
+ return s.spec.Paths.Paths
+}
+
+// OperationIDs gets all the operation ids based on method an dpath
+func (s *Spec) OperationIDs() []string {
+ if len(s.operations) == 0 {
+ return nil
+ }
+
+ result := make([]string, 0, len(s.operations))
+ for method, v := range s.operations {
+ for p, o := range v {
+ if o.ID != "" {
+ result = append(result, o.ID)
+ } else {
+ result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
+ }
+ }
+ }
+
+ return result
+}
+
+// OperationMethodPaths gets all the operation ids based on method an dpath
+func (s *Spec) OperationMethodPaths() []string {
+ if len(s.operations) == 0 {
+ return nil
+ }
+
+ result := make([]string, 0, len(s.operations))
+ for method, v := range s.operations {
+ for p := range v {
+ result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
+ }
+ }
+
+ return result
+}
+
+// RequiredConsumes gets all the distinct consumes that are specified in the specification document
+func (s *Spec) RequiredConsumes() []string {
+ return s.structMapKeys(s.consumes)
+}
+
+// RequiredProduces gets all the distinct produces that are specified in the specification document
+func (s *Spec) RequiredProduces() []string {
+ return s.structMapKeys(s.produces)
+}
+
+// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
+func (s *Spec) RequiredSecuritySchemes() []string {
+ return s.structMapKeys(s.authSchemes)
+}
+
+// SchemaRef is a reference to a schema
+type SchemaRef struct {
+ Name string
+ Ref spec.Ref
+ Schema *spec.Schema
+ TopLevel bool
+}
+
+// SchemasWithAllOf returns schema references to all schemas that are defined
+// with an allOf key
+func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
+ for _, v := range s.allOfs {
+ result = append(result, v)
+ }
+
+ return
+}
+
+// AllDefinitions returns schema references for all the definitions that were discovered
+func (s *Spec) AllDefinitions() (result []SchemaRef) {
+ for _, v := range s.allSchemas {
+ result = append(result, v)
+ }
+
+ return
+}
+
+// AllDefinitionReferences returns json refs for all the discovered schemas
+func (s *Spec) AllDefinitionReferences() (result []string) {
+ for _, v := range s.references.schemas {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllParameterReferences returns json refs for all the discovered parameters
+func (s *Spec) AllParameterReferences() (result []string) {
+ for _, v := range s.references.parameters {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllResponseReferences returns json refs for all the discovered responses
+func (s *Spec) AllResponseReferences() (result []string) {
+ for _, v := range s.references.responses {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllPathItemReferences returns the references for all the items
+func (s *Spec) AllPathItemReferences() (result []string) {
+ for _, v := range s.references.pathItems {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllItemsReferences returns the references for all the items in simple schemas (parameters or headers).
+//
+// NOTE: since Swagger 2.0 forbids $ref in simple params, this should always yield an empty slice for a valid
+// Swagger 2.0 spec.
+func (s *Spec) AllItemsReferences() (result []string) {
+ for _, v := range s.references.items {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllReferences returns all the references found in the document, with possible duplicates
+func (s *Spec) AllReferences() (result []string) {
+ for _, v := range s.references.allRefs {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllRefs returns all the unique references found in the document
+func (s *Spec) AllRefs() (result []spec.Ref) {
+ set := make(map[string]struct{})
+ for _, v := range s.references.allRefs {
+ a := v.String()
+ if a == "" {
+ continue
+ }
+
+ if _, ok := set[a]; !ok {
+ set[a] = struct{}{}
+ result = append(result, v)
+ }
+ }
+
+ return
+}
+
+func cloneStringMap(source map[string]string) map[string]string {
+ res := make(map[string]string, len(source))
+ for k, v := range source {
+ res[k] = v
+ }
+
+ return res
+}
+
+func cloneEnumMap(source map[string][]interface{}) map[string][]interface{} {
+ res := make(map[string][]interface{}, len(source))
+ for k, v := range source {
+ res[k] = v
+ }
+
+ return res
+}
+
+// ParameterPatterns returns all the patterns found in parameters
+// the map is cloned to avoid accidental changes
+func (s *Spec) ParameterPatterns() map[string]string {
+ return cloneStringMap(s.patterns.parameters)
+}
+
+// HeaderPatterns returns all the patterns found in response headers
+// the map is cloned to avoid accidental changes
+func (s *Spec) HeaderPatterns() map[string]string {
+ return cloneStringMap(s.patterns.headers)
+}
+
+// ItemsPatterns returns all the patterns found in simple array items
+// the map is cloned to avoid accidental changes
+func (s *Spec) ItemsPatterns() map[string]string {
+ return cloneStringMap(s.patterns.items)
+}
+
+// SchemaPatterns returns all the patterns found in schemas
+// the map is cloned to avoid accidental changes
+func (s *Spec) SchemaPatterns() map[string]string {
+ return cloneStringMap(s.patterns.schemas)
+}
+
+// AllPatterns returns all the patterns found in the spec
+// the map is cloned to avoid accidental changes
+func (s *Spec) AllPatterns() map[string]string {
+ return cloneStringMap(s.patterns.allPatterns)
+}
+
+// ParameterEnums returns all the enums found in parameters
+// the map is cloned to avoid accidental changes
+func (s *Spec) ParameterEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.parameters)
+}
+
+// HeaderEnums returns all the enums found in response headers
+// the map is cloned to avoid accidental changes
+func (s *Spec) HeaderEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.headers)
+}
+
+// ItemsEnums returns all the enums found in simple array items
+// the map is cloned to avoid accidental changes
+func (s *Spec) ItemsEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.items)
+}
+
+// SchemaEnums returns all the enums found in schemas
+// the map is cloned to avoid accidental changes
+func (s *Spec) SchemaEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.schemas)
+}
+
+// AllEnums returns all the enums found in the spec
+// the map is cloned to avoid accidental changes
+func (s *Spec) AllEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.allEnums)
+}
diff --git a/vendor/github.com/go-openapi/analysis/appveyor.yml b/vendor/github.com/go-openapi/analysis/appveyor.yml
new file mode 100644
index 000000000..c2f6fd733
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/appveyor.yml
@@ -0,0 +1,32 @@
+version: "0.1.{build}"
+
+clone_folder: C:\go-openapi\analysis
+shallow_clone: true # for startup speed
+pull_requests:
+ do_not_increment_build_number: true
+
+#skip_tags: true
+#skip_branch_with_pr: true
+
+# appveyor.yml
+build: off
+
+environment:
+ GOPATH: c:\gopath
+
+stack: go 1.16
+
+test_script:
+ - go test -v -timeout 20m ./...
+
+deploy: off
+
+notifications:
+ - provider: Slack
+ incoming_webhook: https://hooks.slack.com/services/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
+ auth_token:
+ secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
+ channel: bots
+ on_build_success: false
+ on_build_failure: true
+ on_build_status_changed: true
diff --git a/vendor/github.com/go-openapi/analysis/debug.go b/vendor/github.com/go-openapi/analysis/debug.go
new file mode 100644
index 000000000..33c15704e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/debug.go
@@ -0,0 +1,23 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "os"
+
+ "github.com/go-openapi/analysis/internal/debug"
+)
+
+var debugLog = debug.GetLogger("analysis", os.Getenv("SWAGGER_DEBUG") != "")
diff --git a/vendor/github.com/go-openapi/analysis/doc.go b/vendor/github.com/go-openapi/analysis/doc.go
new file mode 100644
index 000000000..d5294c095
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/doc.go
@@ -0,0 +1,43 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package analysis provides methods to work with a Swagger specification document from
+package go-openapi/spec.
+
+Analyzing a specification
+
+An analysed specification object (type Spec) provides methods to work with swagger definition.
+
+Flattening or expanding a specification
+
+Flattening a specification bundles all remote $ref in the main spec document.
+Depending on flattening options, additional preprocessing may take place:
+ - full flattening: replacing all inline complex constructs by a named entry in #/definitions
+ - expand: replace all $ref's in the document by their expanded content
+
+Merging several specifications
+
+Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
+
+Fixing a specification
+
+Unmarshalling a specification with golang json unmarshalling may lead to
+some unwanted result on present but empty fields.
+
+Analyzing a Swagger schema
+
+Swagger schemas are analyzed to determine their complexity and qualify their content.
+*/
+package analysis
diff --git a/vendor/github.com/go-openapi/analysis/fixer.go b/vendor/github.com/go-openapi/analysis/fixer.go
new file mode 100644
index 000000000..7c2ca0841
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/fixer.go
@@ -0,0 +1,79 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import "github.com/go-openapi/spec"
+
+// FixEmptyResponseDescriptions replaces empty ("") response
+// descriptions in the input with "(empty)" to ensure that the
+// resulting Swagger is stays valid. The problem appears to arise
+// from reading in valid specs that have a explicit response
+// description of "" (valid, response.description is required), but
+// due to zero values being omitted upon re-serializing (omitempty) we
+// lose them unless we stick some chars in there.
+func FixEmptyResponseDescriptions(s *spec.Swagger) {
+ for k, v := range s.Responses {
+ FixEmptyDesc(&v) //#nosec
+ s.Responses[k] = v
+ }
+
+ if s.Paths == nil {
+ return
+ }
+
+ for _, v := range s.Paths.Paths {
+ if v.Get != nil {
+ FixEmptyDescs(v.Get.Responses)
+ }
+ if v.Put != nil {
+ FixEmptyDescs(v.Put.Responses)
+ }
+ if v.Post != nil {
+ FixEmptyDescs(v.Post.Responses)
+ }
+ if v.Delete != nil {
+ FixEmptyDescs(v.Delete.Responses)
+ }
+ if v.Options != nil {
+ FixEmptyDescs(v.Options.Responses)
+ }
+ if v.Head != nil {
+ FixEmptyDescs(v.Head.Responses)
+ }
+ if v.Patch != nil {
+ FixEmptyDescs(v.Patch.Responses)
+ }
+ }
+}
+
+// FixEmptyDescs adds "(empty)" as the description for any Response in
+// the given Responses object that doesn't already have one.
+func FixEmptyDescs(rs *spec.Responses) {
+ FixEmptyDesc(rs.Default)
+ for k, v := range rs.StatusCodeResponses {
+ FixEmptyDesc(&v) //#nosec
+ rs.StatusCodeResponses[k] = v
+ }
+}
+
+// FixEmptyDesc adds "(empty)" as the description to the given
+// Response object if it doesn't already have one and isn't a
+// ref. No-op on nil input.
+func FixEmptyDesc(rs *spec.Response) {
+ if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
+ return
+ }
+ rs.Description = "(empty)"
+}
diff --git a/vendor/github.com/go-openapi/analysis/flatten.go b/vendor/github.com/go-openapi/analysis/flatten.go
new file mode 100644
index 000000000..0576220fb
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/flatten.go
@@ -0,0 +1,802 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "fmt"
+ "log"
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis/internal/flatten/normalize"
+ "github.com/go-openapi/analysis/internal/flatten/operations"
+ "github.com/go-openapi/analysis/internal/flatten/replace"
+ "github.com/go-openapi/analysis/internal/flatten/schutils"
+ "github.com/go-openapi/analysis/internal/flatten/sortref"
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+)
+
+const definitionsPath = "#/definitions"
+
+// newRef stores information about refs created during the flattening process
+type newRef struct {
+ key string
+ newName string
+ path string
+ isOAIGen bool
+ resolved bool
+ schema *spec.Schema
+ parents []string
+}
+
+// context stores intermediary results from flatten
+type context struct {
+ newRefs map[string]*newRef
+ warnings []string
+ resolved map[string]string
+}
+
+func newContext() *context {
+ return &context{
+ newRefs: make(map[string]*newRef, 150),
+ warnings: make([]string, 0),
+ resolved: make(map[string]string, 50),
+ }
+}
+
+// Flatten an analyzed spec and produce a self-contained spec bundle.
+//
+// There is a minimal and a full flattening mode.
+//
+//
+// Minimally flattening a spec means:
+// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left
+// unscathed)
+// - Importing external (http, file) references so they become internal to the document
+// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers
+// like "$ref": "#/definitions/myObject/allOfs/1")
+//
+// A minimally flattened spec thus guarantees the following properties:
+// - all $refs point to a local definition (i.e. '#/definitions/...')
+// - definitions are unique
+//
+// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they
+// represent a complex schema or express commonality in the spec.
+// Otherwise, they are simply expanded.
+// Self-referencing JSON pointers cannot resolve to a type and trigger an error.
+//
+//
+// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger.
+//
+// Fully flattening a spec means:
+// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion.
+//
+// By complex, we mean every JSON object with some properties.
+// Arrays, when they do not define a tuple,
+// or empty objects with or without additionalProperties, are not considered complex and remain inline.
+//
+// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions
+// have been created.
+//
+// Available flattening options:
+// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched
+// - Expand: expand all $ref's in the document (inoperant if Minimal set to true)
+// - Verbose: croaks about name conflicts detected
+// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening
+//
+// NOTE: expansion removes all $ref save circular $ref, which remain in place
+//
+// TODO: additional options
+// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a
+// x-go-name extension
+// - LiftAllOfs:
+// - limit the flattening of allOf members when simple objects
+// - merge allOf with validation only
+// - merge allOf with extensions only
+// - ...
+//
+func Flatten(opts FlattenOpts) error {
+ debugLog("FlattenOpts: %#v", opts)
+
+ opts.flattenContext = newContext()
+
+ // 1. Recursively expand responses, parameters, path items and items in simple schemas.
+ //
+ // This simplifies the spec and leaves only the $ref's in schema objects.
+ if err := expand(&opts); err != nil {
+ return err
+ }
+
+ // 2. Strip the current document from absolute $ref's that actually a in the root,
+ // so we can recognize them as proper definitions
+ //
+ // In particular, this works around issue go-openapi/spec#76: leading absolute file in $ref is stripped
+ if err := normalizeRef(&opts); err != nil {
+ return err
+ }
+
+ // 3. Optionally remove shared parameters and responses already expanded (now unused).
+ //
+ // Operation parameters (i.e. under paths) remain.
+ if opts.RemoveUnused {
+ removeUnusedShared(&opts)
+ }
+
+ // 4. Import all remote references.
+ if err := importReferences(&opts); err != nil {
+ return err
+ }
+
+ // 5. full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps)
+ if !opts.Minimal && !opts.Expand {
+ if err := nameInlinedSchemas(&opts); err != nil {
+ return err
+ }
+ }
+
+ // 6. Rewrite JSON pointers other than $ref to named definitions
+ // and attempt to resolve conflicting names whenever possible.
+ if err := stripPointersAndOAIGen(&opts); err != nil {
+ return err
+ }
+
+ // 7. Strip the spec from unused definitions
+ if opts.RemoveUnused {
+ removeUnused(&opts)
+ }
+
+ // 8. Issue warning notifications, if any
+ opts.croak()
+
+ // TODO: simplify known schema patterns to flat objects with properties
+ // examples:
+ // - lift simple allOf object,
+ // - empty allOf with validation only or extensions only
+ // - rework allOf arrays
+ // - rework allOf additionalProperties
+
+ return nil
+}
+
+func expand(opts *FlattenOpts) error {
+ if err := spec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil {
+ return err
+ }
+
+ opts.Spec.reload() // re-analyze
+
+ return nil
+}
+
+// normalizeRef strips the current file from any absolute file $ref. This works around issue go-openapi/spec#76:
+// leading absolute file in $ref is stripped
+func normalizeRef(opts *FlattenOpts) error {
+ debugLog("normalizeRef")
+
+ altered := false
+ for k, w := range opts.Spec.references.allRefs {
+ if !strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS
+ continue
+ }
+
+ altered = true
+ debugLog("stripping absolute path for: %s", w.String())
+
+ // strip the base path from definition
+ if err := replace.UpdateRef(opts.Swagger(), k,
+ spec.MustCreateRef(path.Join(definitionsPath, path.Base(w.String())))); err != nil {
+ return err
+ }
+ }
+
+ if altered {
+ opts.Spec.reload() // re-analyze
+ }
+
+ return nil
+}
+
+func removeUnusedShared(opts *FlattenOpts) {
+ opts.Swagger().Parameters = nil
+ opts.Swagger().Responses = nil
+
+ opts.Spec.reload() // re-analyze
+}
+
+func importReferences(opts *FlattenOpts) error {
+ var (
+ imported bool
+ err error
+ )
+
+ for !imported && err == nil {
+ // iteratively import remote references until none left.
+ // This inlining deals with name conflicts by introducing auto-generated names ("OAIGen")
+ imported, err = importExternalReferences(opts)
+
+ opts.Spec.reload() // re-analyze
+ }
+
+ return err
+}
+
+// nameInlinedSchemas replaces every complex inline construct by a named definition.
+func nameInlinedSchemas(opts *FlattenOpts) error {
+ debugLog("nameInlinedSchemas")
+
+ namer := &InlineSchemaNamer{
+ Spec: opts.Swagger(),
+ Operations: operations.AllOpRefsByRef(opts.Spec, nil),
+ flattenContext: opts.flattenContext,
+ opts: opts,
+ }
+
+ depthFirst := sortref.DepthFirst(opts.Spec.allSchemas)
+ for _, key := range depthFirst {
+ sch := opts.Spec.allSchemas[key]
+ if sch.Schema == nil || sch.Schema.Ref.String() != "" || sch.TopLevel {
+ continue
+ }
+
+ asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+ if err != nil {
+ return fmt.Errorf("schema analysis [%s]: %w", key, err)
+ }
+
+ if asch.isAnalyzedAsComplex() { // move complex schemas to definitions
+ if err := namer.Name(key, sch.Schema, asch); err != nil {
+ return err
+ }
+ }
+ }
+
+ opts.Spec.reload() // re-analyze
+
+ return nil
+}
+
+func removeUnused(opts *FlattenOpts) {
+ expected := make(map[string]struct{})
+ for k := range opts.Swagger().Definitions {
+ expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{}
+ }
+
+ for _, k := range opts.Spec.AllDefinitionReferences() {
+ delete(expected, k)
+ }
+
+ for k := range expected {
+ debugLog("removing unused definition %s", path.Base(k))
+ if opts.Verbose {
+ log.Printf("info: removing unused definition: %s", path.Base(k))
+ }
+ delete(opts.Swagger().Definitions, path.Base(k))
+ }
+
+ opts.Spec.reload() // re-analyze
+}
+
+func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error {
+ // rewrite ref with already resolved external ref (useful for cyclical refs):
+ // rewrite external refs to local ones
+ debugLog("resolving known ref [%s] to %s", refStr, newName)
+
+ for _, key := range entry.Keys {
+ if err := replace.UpdateRef(opts.Swagger(), key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) error {
+ var (
+ isOAIGen bool
+ newName string
+ )
+
+ debugLog("resolving schema from remote $ref [%s]", refStr)
+
+ sch, err := spec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false))
+ if err != nil {
+ return fmt.Errorf("could not resolve schema: %w", err)
+ }
+
+ // at this stage only $ref analysis matters
+ partialAnalyzer := &Spec{
+ references: referenceAnalysis{},
+ patterns: patternAnalysis{},
+ enums: enumAnalysis{},
+ }
+ partialAnalyzer.reset()
+ partialAnalyzer.analyzeSchema("", sch, "/")
+
+ // now rewrite those refs with rebase
+ for key, ref := range partialAnalyzer.references.allRefs {
+ if err := replace.UpdateRef(sch, key, spec.MustCreateRef(normalize.RebaseRef(entry.Ref.String(), ref.String()))); err != nil {
+ return fmt.Errorf("failed to rewrite ref for key %q at %s: %w", key, entry.Ref.String(), err)
+ }
+ }
+
+ // generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name
+ newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
+ debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen)
+
+ opts.flattenContext.resolved[refStr] = newName
+
+ // rewrite the external refs to local ones
+ for _, key := range entry.Keys {
+ if err := replace.UpdateRef(opts.Swagger(), key,
+ spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return err
+ }
+
+ // keep track of created refs
+ resolved := false
+ if _, ok := opts.flattenContext.newRefs[key]; ok {
+ resolved = opts.flattenContext.newRefs[key].resolved
+ }
+
+ debugLog("keeping track of ref: %s (%s), resolved: %t", key, newName, resolved)
+ opts.flattenContext.newRefs[key] = &newRef{
+ key: key,
+ newName: newName,
+ path: path.Join(definitionsPath, newName),
+ isOAIGen: isOAIGen,
+ resolved: resolved,
+ schema: sch,
+ }
+ }
+
+ // add the resolved schema to the definitions
+ schutils.Save(opts.Swagger(), newName, sch)
+
+ return nil
+}
+
+// importExternalReferences iteratively digs remote references and imports them into the main schema.
+//
+// At every iteration, new remotes may be found when digging deeper: they are rebased to the current schema before being imported.
+//
+// This returns true when no more remote references can be found.
+func importExternalReferences(opts *FlattenOpts) (bool, error) {
+ debugLog("importExternalReferences")
+
+ groupedRefs := sortref.ReverseIndex(opts.Spec.references.schemas, opts.BasePath)
+ sortedRefStr := make([]string, 0, len(groupedRefs))
+ if opts.flattenContext == nil {
+ opts.flattenContext = newContext()
+ }
+
+ // sort $ref resolution to ensure deterministic name conflict resolution
+ for refStr := range groupedRefs {
+ sortedRefStr = append(sortedRefStr, refStr)
+ }
+ sort.Strings(sortedRefStr)
+
+ complete := true
+
+ for _, refStr := range sortedRefStr {
+ entry := groupedRefs[refStr]
+ if entry.Ref.HasFragmentOnly {
+ continue
+ }
+
+ complete = false
+
+ newName := opts.flattenContext.resolved[refStr]
+ if newName != "" {
+ if err := importKnownRef(entry, refStr, newName, opts); err != nil {
+ return false, err
+ }
+
+ continue
+ }
+
+ // resolve schemas
+ if err := importNewRef(entry, refStr, opts); err != nil {
+ return false, err
+ }
+ }
+
+ // maintains ref index entries
+ for k := range opts.flattenContext.newRefs {
+ r := opts.flattenContext.newRefs[k]
+
+ // update tracking with resolved schemas
+ if r.schema.Ref.String() != "" {
+ ref := spec.MustCreateRef(r.path)
+ sch, err := spec.ResolveRefWithBase(opts.Swagger(), &ref, opts.ExpandOpts(false))
+ if err != nil {
+ return false, fmt.Errorf("could not resolve schema: %w", err)
+ }
+
+ r.schema = sch
+ }
+
+ if r.path == k {
+ continue
+ }
+
+ // update tracking with renamed keys: got a cascade of refs
+ renamed := *r
+ renamed.key = r.path
+ opts.flattenContext.newRefs[renamed.path] = &renamed
+
+ // indirect ref
+ r.newName = path.Base(k)
+ r.schema = spec.RefSchema(r.path)
+ r.path = k
+ r.isOAIGen = strings.Contains(k, "OAIGen")
+ }
+
+ return complete, nil
+}
+
+// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler.
+// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible.
+func stripPointersAndOAIGen(opts *FlattenOpts) error {
+ // name all JSON pointers to anonymous documents
+ if err := namePointers(opts); err != nil {
+ return err
+ }
+
+ // remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts)
+ hasIntroducedPointerOrInline, ers := stripOAIGen(opts)
+ if ers != nil {
+ return ers
+ }
+
+ // iterate as pointer or OAIGen resolution may introduce inline schemas or pointers
+ for hasIntroducedPointerOrInline {
+ if !opts.Minimal {
+ opts.Spec.reload() // re-analyze
+ if err := nameInlinedSchemas(opts); err != nil {
+ return err
+ }
+ }
+
+ if err := namePointers(opts); err != nil {
+ return err
+ }
+
+ // restrip and re-analyze
+ var err error
+ if hasIntroducedPointerOrInline, err = stripOAIGen(opts); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions.
+//
+// A dedupe is deemed unnecessary whenever:
+// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining)
+// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to
+// the first parent.
+//
+// This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate
+// pointer and name resolution again.
+func stripOAIGen(opts *FlattenOpts) (bool, error) {
+ debugLog("stripOAIGen")
+ replacedWithComplex := false
+
+ // figure out referers of OAIGen definitions (doing it before the ref start mutating)
+ for _, r := range opts.flattenContext.newRefs {
+ updateRefParents(opts.Spec.references.allRefs, r)
+ }
+
+ for k := range opts.flattenContext.newRefs {
+ r := opts.flattenContext.newRefs[k]
+ debugLog("newRefs[%s]: isOAIGen: %t, resolved: %t, name: %s, path:%s, #parents: %d, parents: %v, ref: %s",
+ k, r.isOAIGen, r.resolved, r.newName, r.path, len(r.parents), r.parents, r.schema.Ref.String())
+
+ if !r.isOAIGen || len(r.parents) == 0 {
+ continue
+ }
+
+ hasReplacedWithComplex, err := stripOAIGenForRef(opts, k, r)
+ if err != nil {
+ return replacedWithComplex, err
+ }
+
+ replacedWithComplex = replacedWithComplex || hasReplacedWithComplex
+ }
+
+ debugLog("replacedWithComplex: %t", replacedWithComplex)
+ opts.Spec.reload() // re-analyze
+
+ return replacedWithComplex, nil
+}
+
+// updateRefParents updates all parents of an updated $ref
+func updateRefParents(allRefs map[string]spec.Ref, r *newRef) {
+ if !r.isOAIGen || r.resolved { // bail on already resolved entries (avoid looping)
+ return
+ }
+ for k, v := range allRefs {
+ if r.path != v.String() {
+ continue
+ }
+
+ found := false
+ for _, p := range r.parents {
+ if p == k {
+ found = true
+
+ break
+ }
+ }
+ if !found {
+ r.parents = append(r.parents, k)
+ }
+ }
+}
+
+func stripOAIGenForRef(opts *FlattenOpts, k string, r *newRef) (bool, error) {
+ replacedWithComplex := false
+
+ pr := sortref.TopmostFirst(r.parents)
+
+ // rewrite first parent schema in hierarchical then lexicographical order
+ debugLog("rewrite first parent %s with schema", pr[0])
+ if err := replace.UpdateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil {
+ return false, err
+ }
+
+ if pa, ok := opts.flattenContext.newRefs[pr[0]]; ok && pa.isOAIGen {
+ // update parent in ref index entry
+ debugLog("update parent entry: %s", pr[0])
+ pa.schema = r.schema
+ pa.resolved = false
+ replacedWithComplex = true
+ }
+
+ // rewrite other parents to point to first parent
+ if len(pr) > 1 {
+ for _, p := range pr[1:] {
+ replacingRef := spec.MustCreateRef(pr[0])
+
+ // set complex when replacing ref is an anonymous jsonpointer: further processing may be required
+ replacedWithComplex = replacedWithComplex || path.Dir(replacingRef.String()) != definitionsPath
+ debugLog("rewrite parent with ref: %s", replacingRef.String())
+
+ // NOTE: it is possible at this stage to introduce json pointers (to non-definitions places).
+ // Those are stripped later on.
+ if err := replace.UpdateRef(opts.Swagger(), p, replacingRef); err != nil {
+ return false, err
+ }
+
+ if pa, ok := opts.flattenContext.newRefs[p]; ok && pa.isOAIGen {
+ // update parent in ref index
+ debugLog("update parent entry: %s", p)
+ pa.schema = r.schema
+ pa.resolved = false
+ replacedWithComplex = true
+ }
+ }
+ }
+
+ // remove OAIGen definition
+ debugLog("removing definition %s", path.Base(r.path))
+ delete(opts.Swagger().Definitions, path.Base(r.path))
+
+ // propagate changes in ref index for keys which have this one as a parent
+ for kk, value := range opts.flattenContext.newRefs {
+ if kk == k || !value.isOAIGen || value.resolved {
+ continue
+ }
+
+ found := false
+ newParents := make([]string, 0, len(value.parents))
+ for _, parent := range value.parents {
+ switch {
+ case parent == r.path:
+ found = true
+ parent = pr[0]
+ case strings.HasPrefix(parent, r.path+"/"):
+ found = true
+ parent = path.Join(pr[0], strings.TrimPrefix(parent, r.path))
+ }
+
+ newParents = append(newParents, parent)
+ }
+
+ if found {
+ value.parents = newParents
+ }
+ }
+
+ // mark naming conflict as resolved
+ debugLog("marking naming conflict resolved for key: %s", r.key)
+ opts.flattenContext.newRefs[r.key].isOAIGen = false
+ opts.flattenContext.newRefs[r.key].resolved = true
+
+ // determine if the previous substitution did inline a complex schema
+ if r.schema != nil && r.schema.Ref.String() == "" { // inline schema
+ asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+ if err != nil {
+ return false, err
+ }
+
+ debugLog("re-inlined schema: parent: %s, %t", pr[0], asch.isAnalyzedAsComplex())
+ replacedWithComplex = replacedWithComplex || !(path.Dir(pr[0]) == definitionsPath) && asch.isAnalyzedAsComplex()
+ }
+
+ return replacedWithComplex, nil
+}
+
+// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions.
+//
+// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself.
+// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used).
+func namePointers(opts *FlattenOpts) error {
+ debugLog("name pointers")
+
+ refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas))
+ for k, ref := range opts.Spec.references.allRefs {
+ if path.Dir(ref.String()) == definitionsPath {
+ // this a ref to a top-level definition: ok
+ continue
+ }
+
+ result, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), ref)
+ if err != nil {
+ return fmt.Errorf("at %s, %w", k, err)
+ }
+
+ replacingRef := result.Ref
+ sch := result.Schema
+ if opts.flattenContext != nil {
+ opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
+ }
+
+ debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String())
+ refsToReplace[k] = SchemaRef{
+ Name: k, // caller
+ Ref: replacingRef, // called
+ Schema: sch,
+ TopLevel: path.Dir(replacingRef.String()) == definitionsPath,
+ }
+ }
+
+ depthFirst := sortref.DepthFirst(refsToReplace)
+ namer := &InlineSchemaNamer{
+ Spec: opts.Swagger(),
+ Operations: operations.AllOpRefsByRef(opts.Spec, nil),
+ flattenContext: opts.flattenContext,
+ opts: opts,
+ }
+
+ for _, key := range depthFirst {
+ v := refsToReplace[key]
+ // update current replacement, which may have been updated by previous changes of deeper elements
+ result, erd := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), v.Ref)
+ if erd != nil {
+ return fmt.Errorf("at %s, %w", key, erd)
+ }
+
+ if opts.flattenContext != nil {
+ opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
+ }
+
+ v.Ref = result.Ref
+ v.Schema = result.Schema
+ v.TopLevel = path.Dir(result.Ref.String()) == definitionsPath
+ debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String())
+
+ if v.TopLevel {
+ debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String())
+
+ // if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref
+ if err := replace.UpdateRef(opts.Swagger(), key, v.Ref); err != nil {
+ return err
+ }
+
+ continue
+ }
+
+ if err := flattenAnonPointer(key, v, refsToReplace, namer, opts); err != nil {
+ return err
+ }
+ }
+
+ opts.Spec.reload() // re-analyze
+
+ return nil
+}
+
+func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]SchemaRef, namer *InlineSchemaNamer, opts *FlattenOpts) error {
+ // this is a JSON pointer to an anonymous document (internal or external):
+ // create a definition for this schema when:
+ // - it is a complex schema
+ // - or it is pointed by more than one $ref (i.e. expresses commonality)
+ // otherwise, expand the pointer (single reference to a simple type)
+ //
+ // The named definition for this follows the target's key, not the caller's
+ debugLog("namePointers at %s for %s", key, v.Ref.String())
+
+ // qualify the expanded schema
+ asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+ if ers != nil {
+ return fmt.Errorf("schema analysis [%s]: %w", key, ers)
+ }
+ callers := make([]string, 0, 64)
+
+ debugLog("looking for callers")
+
+ an := New(opts.Swagger())
+ for k, w := range an.references.allRefs {
+ r, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), w)
+ if err != nil {
+ return fmt.Errorf("at %s, %w", key, err)
+ }
+
+ if opts.flattenContext != nil {
+ opts.flattenContext.warnings = append(opts.flattenContext.warnings, r.Warnings...)
+ }
+
+ if r.Ref.String() == v.Ref.String() {
+ callers = append(callers, k)
+ }
+ }
+
+ debugLog("callers for %s: %d", v.Ref.String(), len(callers))
+ if len(callers) == 0 {
+ // has already been updated and resolved
+ return nil
+ }
+
+ parts := sortref.KeyParts(v.Ref.String())
+ debugLog("number of callers for %s: %d", v.Ref.String(), len(callers))
+
+ // identifying edge case when the namer did nothing because we point to a non-schema object
+ // no definition is created and we expand the $ref for all callers
+ if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() {
+ debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String())
+ if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil {
+ return err
+ }
+
+ // regular case: we named the $ref as a definition, and we move all callers to this new $ref
+ for _, caller := range callers {
+ if caller == key {
+ continue
+ }
+
+ // move $ref for next to resolve
+ debugLog("identified caller of %s at [%s]", v.Ref.String(), caller)
+ c := refsToReplace[caller]
+ c.Ref = v.Ref
+ refsToReplace[caller] = c
+ }
+
+ return nil
+ }
+
+ debugLog("expand JSON pointer for key=%s", key)
+
+ if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil {
+ return err
+ }
+ // NOTE: there is no other caller to update
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_name.go b/vendor/github.com/go-openapi/analysis/flatten_name.go
new file mode 100644
index 000000000..3ad2ccfbf
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/flatten_name.go
@@ -0,0 +1,293 @@
+package analysis
+
+import (
+ "fmt"
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis/internal/flatten/operations"
+ "github.com/go-openapi/analysis/internal/flatten/replace"
+ "github.com/go-openapi/analysis/internal/flatten/schutils"
+ "github.com/go-openapi/analysis/internal/flatten/sortref"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// InlineSchemaNamer finds a new name for an inlined type
+type InlineSchemaNamer struct {
+ Spec *spec.Swagger
+ Operations map[string]operations.OpRef
+ flattenContext *context
+ opts *FlattenOpts
+}
+
+// Name yields a new name for the inline schema
+func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *AnalyzedSchema) error {
+ debugLog("naming inlined schema at %s", key)
+
+ parts := sortref.KeyParts(key)
+ for _, name := range namesFromKey(parts, aschema, isn.Operations) {
+ if name == "" {
+ continue
+ }
+
+ // create unique name
+ newName, isOAIGen := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
+
+ // clone schema
+ sch := schutils.Clone(schema)
+
+ // replace values on schema
+ if err := replace.RewriteSchemaToRef(isn.Spec, key,
+ spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err)
+ }
+
+ // rewrite any dependent $ref pointing to this place,
+ // when not already pointing to a top-level definition.
+ //
+ // NOTE: this is important if such referers use arbitrary JSON pointers.
+ an := New(isn.Spec)
+ for k, v := range an.references.allRefs {
+ r, erd := replace.DeepestRef(isn.opts.Swagger(), isn.opts.ExpandOpts(false), v)
+ if erd != nil {
+ return fmt.Errorf("at %s, %w", k, erd)
+ }
+
+ if isn.opts.flattenContext != nil {
+ isn.opts.flattenContext.warnings = append(isn.opts.flattenContext.warnings, r.Warnings...)
+ }
+
+ if r.Ref.String() != key && (r.Ref.String() != path.Join(definitionsPath, newName) || path.Dir(v.String()) == definitionsPath) {
+ continue
+ }
+
+ debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String())
+
+ // rewrite $ref to the new target
+ if err := replace.UpdateRef(isn.Spec, k,
+ spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return err
+ }
+ }
+
+ // NOTE: this extension is currently not used by go-swagger (provided for information only)
+ sch.AddExtension("x-go-gen-location", GenLocation(parts))
+
+ // save cloned schema to definitions
+ schutils.Save(isn.Spec, newName, sch)
+
+ // keep track of created refs
+ if isn.flattenContext == nil {
+ continue
+ }
+
+ debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen)
+ resolved := false
+
+ if _, ok := isn.flattenContext.newRefs[key]; ok {
+ resolved = isn.flattenContext.newRefs[key].resolved
+ }
+
+ isn.flattenContext.newRefs[key] = &newRef{
+ key: key,
+ newName: newName,
+ path: path.Join(definitionsPath, newName),
+ isOAIGen: isOAIGen,
+ resolved: resolved,
+ schema: sch,
+ }
+ }
+
+ return nil
+}
+
+// uniqifyName yields a unique name for a definition
+func uniqifyName(definitions spec.Definitions, name string) (string, bool) {
+ isOAIGen := false
+ if name == "" {
+ name = "oaiGen"
+ isOAIGen = true
+ }
+
+ if len(definitions) == 0 {
+ return name, isOAIGen
+ }
+
+ unq := true
+ for k := range definitions {
+ if strings.EqualFold(k, name) {
+ unq = false
+
+ break
+ }
+ }
+
+ if unq {
+ return name, isOAIGen
+ }
+
+ name += "OAIGen"
+ isOAIGen = true
+ var idx int
+ unique := name
+ _, known := definitions[unique]
+
+ for known {
+ idx++
+ unique = fmt.Sprintf("%s%d", name, idx)
+ _, known = definitions[unique]
+ }
+
+ return unique, isOAIGen
+}
+
+func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations map[string]operations.OpRef) []string {
+ var (
+ baseNames [][]string
+ startIndex int
+ )
+
+ if parts.IsOperation() {
+ baseNames, startIndex = namesForOperation(parts, operations)
+ }
+
+ // definitions
+ if parts.IsDefinition() {
+ baseNames, startIndex = namesForDefinition(parts)
+ }
+
+ result := make([]string, 0, len(baseNames))
+ for _, segments := range baseNames {
+ nm := parts.BuildName(segments, startIndex, partAdder(aschema))
+ if nm == "" {
+ continue
+ }
+
+ result = append(result, nm)
+ }
+ sort.Strings(result)
+
+ return result
+}
+
+func namesForParam(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
+ var (
+ baseNames [][]string
+ startIndex int
+ )
+
+ piref := parts.PathItemRef()
+ if piref.String() != "" && parts.IsOperationParam() {
+ if op, ok := operations[piref.String()]; ok {
+ startIndex = 5
+ baseNames = append(baseNames, []string{op.ID, "params", "body"})
+ }
+ } else if parts.IsSharedOperationParam() {
+ pref := parts.PathRef()
+ for k, v := range operations {
+ if strings.HasPrefix(k, pref.String()) {
+ startIndex = 4
+ baseNames = append(baseNames, []string{v.ID, "params", "body"})
+ }
+ }
+ }
+
+ return baseNames, startIndex
+}
+
+func namesForOperation(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
+ var (
+ baseNames [][]string
+ startIndex int
+ )
+
+ // params
+ if parts.IsOperationParam() || parts.IsSharedOperationParam() {
+ baseNames, startIndex = namesForParam(parts, operations)
+ }
+
+ // responses
+ if parts.IsOperationResponse() {
+ piref := parts.PathItemRef()
+ if piref.String() != "" {
+ if op, ok := operations[piref.String()]; ok {
+ startIndex = 6
+ baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
+ }
+ }
+ }
+
+ return baseNames, startIndex
+}
+
+func namesForDefinition(parts sortref.SplitKey) ([][]string, int) {
+ nm := parts.DefinitionName()
+ if nm != "" {
+ return [][]string{{parts.DefinitionName()}}, 2
+ }
+
+ return [][]string{}, 0
+}
+
+// partAdder knows how to interpret a schema when it comes to build a name from parts
+func partAdder(aschema *AnalyzedSchema) sortref.PartAdder {
+ return func(part string) []string {
+ segments := make([]string, 0, 2)
+
+ if part == "items" || part == "additionalItems" {
+ if aschema.IsTuple || aschema.IsTupleWithExtra {
+ segments = append(segments, "tuple")
+ } else {
+ segments = append(segments, "items")
+ }
+
+ if part == "additionalItems" {
+ segments = append(segments, part)
+ }
+
+ return segments
+ }
+
+ segments = append(segments, part)
+
+ return segments
+ }
+}
+
+func nameFromRef(ref spec.Ref) string {
+ u := ref.GetURL()
+ if u.Fragment != "" {
+ return swag.ToJSONName(path.Base(u.Fragment))
+ }
+
+ if u.Path != "" {
+ bn := path.Base(u.Path)
+ if bn != "" && bn != "/" {
+ ext := path.Ext(bn)
+ if ext != "" {
+ return swag.ToJSONName(bn[:len(bn)-len(ext)])
+ }
+
+ return swag.ToJSONName(bn)
+ }
+ }
+
+ return swag.ToJSONName(strings.ReplaceAll(u.Host, ".", " "))
+}
+
+// GenLocation indicates from which section of the specification (models or operations) a definition has been created.
+//
+// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is is provided
+// for information only.
+func GenLocation(parts sortref.SplitKey) string {
+ switch {
+ case parts.IsOperation():
+ return "operations"
+ case parts.IsDefinition():
+ return "models"
+ default:
+ return ""
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_options.go b/vendor/github.com/go-openapi/analysis/flatten_options.go
new file mode 100644
index 000000000..c5bb97b0a
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/flatten_options.go
@@ -0,0 +1,78 @@
+package analysis
+
+import (
+ "log"
+
+ "github.com/go-openapi/spec"
+)
+
+// FlattenOpts configuration for flattening a swagger specification.
+//
+// The BasePath parameter is used to locate remote relative $ref found in the specification.
+// This path is a file: it points to the location of the root document and may be either a local
+// file path or a URL.
+//
+// If none specified, relative references (e.g. "$ref": "folder/schema.yaml#/definitions/...")
+// found in the spec are searched from the current working directory.
+type FlattenOpts struct {
+ Spec *Spec // The analyzed spec to work with
+ flattenContext *context // Internal context to track flattening activity
+
+ BasePath string // The location of the root document for this spec to resolve relative $ref
+
+ // Flattening options
+ Expand bool // When true, skip flattening the spec and expand it instead (if Minimal is false)
+ Minimal bool // When true, do not decompose complex structures such as allOf
+ Verbose bool // enable some reporting on possible name conflicts detected
+ RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening
+ ContinueOnError bool // Continue when spec expansion issues are found
+
+ /* Extra keys */
+ _ struct{} // require keys
+}
+
+// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
+func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *spec.ExpandOptions {
+ return &spec.ExpandOptions{
+ RelativeBase: f.BasePath,
+ SkipSchemas: skipSchemas,
+ ContinueOnError: f.ContinueOnError,
+ }
+}
+
+// Swagger gets the swagger specification for this flatten operation
+func (f *FlattenOpts) Swagger() *spec.Swagger {
+ return f.Spec.spec
+}
+
+// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting
+// from flattening a spec
+func (f *FlattenOpts) croak() {
+ if !f.Verbose {
+ return
+ }
+
+ reported := make(map[string]bool, len(f.flattenContext.newRefs))
+ for _, v := range f.Spec.references.allRefs {
+ // warns about duplicate handling
+ for _, r := range f.flattenContext.newRefs {
+ if r.isOAIGen && r.path == v.String() {
+ reported[r.newName] = true
+ }
+ }
+ }
+
+ for k := range reported {
+ log.Printf("warning: duplicate flattened definition name resolved as %s", k)
+ }
+
+ // warns about possible type mismatches
+ uniqueMsg := make(map[string]bool)
+ for _, msg := range f.flattenContext.warnings {
+ if _, ok := uniqueMsg[msg]; ok {
+ continue
+ }
+ log.Printf("warning: %s", msg)
+ uniqueMsg[msg] = true
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go
new file mode 100644
index 000000000..ec0fec022
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go
@@ -0,0 +1,41 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+var (
+ output = os.Stdout
+)
+
+// GetLogger provides a prefix debug logger
+func GetLogger(prefix string, debug bool) func(string, ...interface{}) {
+ if debug {
+ logger := log.New(output, fmt.Sprintf("%s:", prefix), log.LstdFlags)
+
+ return func(msg string, args ...interface{}) {
+ _, file1, pos1, _ := runtime.Caller(1)
+ logger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
+ }
+ }
+
+ return func(msg string, args ...interface{}) {}
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
new file mode 100644
index 000000000..8c9df0580
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
@@ -0,0 +1,87 @@
+package normalize
+
+import (
+ "net/url"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// RebaseRef rebases a remote ref relative to a base ref.
+//
+// NOTE: does not support JSONschema ID for $ref (we assume we are working with swagger specs here).
+//
+// NOTE(windows):
+// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
+// * "/ in paths may appear as escape sequences
+func RebaseRef(baseRef string, ref string) string {
+ baseRef, _ = url.PathUnescape(baseRef)
+ ref, _ = url.PathUnescape(ref)
+
+ if baseRef == "" || baseRef == "." || strings.HasPrefix(baseRef, "#") {
+ return ref
+ }
+
+ parts := strings.Split(ref, "#")
+
+ baseParts := strings.Split(baseRef, "#")
+ baseURL, _ := url.Parse(baseParts[0])
+ if strings.HasPrefix(ref, "#") {
+ if baseURL.Host == "" {
+ return strings.Join([]string{baseParts[0], parts[1]}, "#")
+ }
+
+ return strings.Join([]string{baseParts[0], parts[1]}, "#")
+ }
+
+ refURL, _ := url.Parse(parts[0])
+ if refURL.Host != "" || filepath.IsAbs(parts[0]) {
+ // not rebasing an absolute path
+ return ref
+ }
+
+ // there is a relative path
+ var basePath string
+ if baseURL.Host != "" {
+ // when there is a host, standard URI rules apply (with "/")
+ baseURL.Path = path.Dir(baseURL.Path)
+ baseURL.Path = path.Join(baseURL.Path, "/"+parts[0])
+
+ return baseURL.String()
+ }
+
+ // this is a local relative path
+ // basePart[0] and parts[0] are local filesystem directories/files
+ basePath = filepath.Dir(baseParts[0])
+ relPath := filepath.Join(basePath, string(filepath.Separator)+parts[0])
+ if len(parts) > 1 {
+ return strings.Join([]string{relPath, parts[1]}, "#")
+ }
+
+ return relPath
+}
+
+// Path renders absolute path on remote file refs
+//
+// NOTE(windows):
+// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
+// * "/ in paths may appear as escape sequences
+func Path(ref spec.Ref, basePath string) string {
+ uri, _ := url.PathUnescape(ref.String())
+ if ref.HasFragmentOnly || filepath.IsAbs(uri) {
+ return uri
+ }
+
+ refURL, _ := url.Parse(uri)
+ if refURL.Host != "" {
+ return uri
+ }
+
+ parts := strings.Split(uri, "#")
+ // BasePath, parts[0] are local filesystem directories, guaranteed to be absolute at this stage
+ parts[0] = filepath.Join(filepath.Dir(basePath), parts[0])
+
+ return strings.Join(parts, "#")
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
new file mode 100644
index 000000000..7f3a2b871
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
@@ -0,0 +1,90 @@
+package operations
+
+import (
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// AllOpRefsByRef returns an index of sortable operations
+func AllOpRefsByRef(specDoc Provider, operationIDs []string) map[string]OpRef {
+ return OpRefsByRef(GatherOperations(specDoc, operationIDs))
+}
+
+// OpRefsByRef indexes a map of sortable operations
+func OpRefsByRef(oprefs map[string]OpRef) map[string]OpRef {
+ result := make(map[string]OpRef, len(oprefs))
+ for _, v := range oprefs {
+ result[v.Ref.String()] = v
+ }
+
+ return result
+}
+
+// OpRef is an indexable, sortable operation
+type OpRef struct {
+ Method string
+ Path string
+ Key string
+ ID string
+ Op *spec.Operation
+ Ref spec.Ref
+}
+
+// OpRefs is a sortable collection of operations
+type OpRefs []OpRef
+
+func (o OpRefs) Len() int { return len(o) }
+func (o OpRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
+func (o OpRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
+
+// Provider knows how to collect operations from a spec
+type Provider interface {
+ Operations() map[string]map[string]*spec.Operation
+}
+
+// GatherOperations builds a map of sorted operations from a spec
+func GatherOperations(specDoc Provider, operationIDs []string) map[string]OpRef {
+ var oprefs OpRefs
+
+ for method, pathItem := range specDoc.Operations() {
+ for pth, operation := range pathItem {
+ vv := *operation
+ oprefs = append(oprefs, OpRef{
+ Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
+ Method: method,
+ Path: pth,
+ ID: vv.ID,
+ Op: &vv,
+ Ref: spec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
+ })
+ }
+ }
+
+ sort.Sort(oprefs)
+
+ operations := make(map[string]OpRef)
+ for _, opr := range oprefs {
+ nm := opr.ID
+ if nm == "" {
+ nm = opr.Key
+ }
+
+ oo, found := operations[nm]
+ if found && oo.Method != opr.Method && oo.Path != opr.Path {
+ nm = opr.Key
+ }
+
+ if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
+ opr.ID = nm
+ opr.Op.ID = nm
+ operations[nm] = opr
+ }
+ }
+
+ return operations
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
new file mode 100644
index 000000000..26c2a05a3
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
@@ -0,0 +1,434 @@
+package replace
+
+import (
+ "fmt"
+ "net/url"
+ "os"
+ "path"
+ "strconv"
+
+ "github.com/go-openapi/analysis/internal/debug"
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+)
+
+const definitionsPath = "#/definitions"
+
+var debugLog = debug.GetLogger("analysis/flatten/replace", os.Getenv("SWAGGER_DEBUG") != "")
+
+// RewriteSchemaToRef replaces a schema with a Ref
+func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error {
+ debugLog("rewriting schema to ref for %s with %s", key, ref.String())
+ _, value, err := getPointerFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ return rewriteParentRef(sp, key, ref)
+
+ case spec.Schema:
+ return rewriteParentRef(sp, key, ref)
+
+ case *spec.SchemaOrArray:
+ if refable.Schema != nil {
+ refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ }
+
+ case *spec.SchemaOrBool:
+ if refable.Schema != nil {
+ refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ }
+ default:
+ return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+ }
+
+ return nil
+}
+
+func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error {
+ parent, entry, pvalue, err := getParentFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ debugLog("rewriting holder for %T", pvalue)
+ switch container := pvalue.(type) {
+ case spec.Response:
+ if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
+ return err
+ }
+
+ case *spec.Response:
+ container.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case *spec.Responses:
+ statusCode, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ resp := container.StatusCodeResponses[statusCode]
+ resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container.StatusCodeResponses[statusCode] = resp
+
+ case map[string]spec.Response:
+ resp := container[entry]
+ resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container[entry] = resp
+
+ case spec.Parameter:
+ if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
+ return err
+ }
+
+ case map[string]spec.Parameter:
+ param := container[entry]
+ param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container[entry] = param
+
+ case []spec.Parameter:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ param := container[idx]
+ param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container[idx] = param
+
+ case spec.Definitions:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case map[string]spec.Schema:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case []spec.Schema:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case *spec.SchemaOrArray:
+ // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case spec.SchemaProperties:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+
+ default:
+ return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
+ }
+
+ return nil
+}
+
+// getPointerFromKey retrieves the content of the JSON pointer "key"
+func getPointerFromKey(sp interface{}, key string) (string, interface{}, error) {
+ switch sp.(type) {
+ case *spec.Schema:
+ case *spec.Swagger:
+ default:
+ panic("unexpected type used in getPointerFromKey")
+ }
+ if key == "#/" {
+ return "", sp, nil
+ }
+ // unescape chars in key, e.g. "{}" from path params
+ pth, _ := url.PathUnescape(key[1:])
+ ptr, err := jsonpointer.New(pth)
+ if err != nil {
+ return "", nil, err
+ }
+
+ value, _, err := ptr.Get(sp)
+ if err != nil {
+ debugLog("error when getting key: %s with path: %s", key, pth)
+
+ return "", nil, err
+ }
+
+ return pth, value, nil
+}
+
+// getParentFromKey retrieves the container of the JSON pointer "key"
+func getParentFromKey(sp interface{}, key string) (string, string, interface{}, error) {
+ switch sp.(type) {
+ case *spec.Schema:
+ case *spec.Swagger:
+ default:
+ panic("unexpected type used in getPointerFromKey")
+ }
+ // unescape chars in key, e.g. "{}" from path params
+ pth, _ := url.PathUnescape(key[1:])
+
+ parent, entry := path.Dir(pth), path.Base(pth)
+ debugLog("getting schema holder at: %s, with entry: %s", parent, entry)
+
+ pptr, err := jsonpointer.New(parent)
+ if err != nil {
+ return "", "", nil, err
+ }
+ pvalue, _, err := pptr.Get(sp)
+ if err != nil {
+ return "", "", nil, fmt.Errorf("can't get parent for %s: %w", parent, err)
+ }
+
+ return parent, entry, pvalue, nil
+}
+
+// UpdateRef replaces a ref by another one
+func UpdateRef(sp interface{}, key string, ref spec.Ref) error {
+ switch sp.(type) {
+ case *spec.Schema:
+ case *spec.Swagger:
+ default:
+ panic("unexpected type used in getPointerFromKey")
+ }
+ debugLog("updating ref for %s with %s", key, ref.String())
+ pth, value, err := getPointerFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ refable.Ref = ref
+ case *spec.SchemaOrArray:
+ if refable.Schema != nil {
+ refable.Schema.Ref = ref
+ }
+ case *spec.SchemaOrBool:
+ if refable.Schema != nil {
+ refable.Schema.Ref = ref
+ }
+ case spec.Schema:
+ debugLog("rewriting holder for %T", refable)
+ _, entry, pvalue, erp := getParentFromKey(sp, key)
+ if erp != nil {
+ return err
+ }
+ switch container := pvalue.(type) {
+ case spec.Definitions:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case map[string]spec.Schema:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case []spec.Schema:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case *spec.SchemaOrArray:
+ // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case spec.SchemaProperties:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+
+ default:
+ return fmt.Errorf("unhandled container type at %s: %T", key, value)
+ }
+
+ default:
+ return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+ }
+
+ return nil
+}
+
+// UpdateRefWithSchema replaces a ref with a schema (i.e. re-inline schema)
+func UpdateRefWithSchema(sp *spec.Swagger, key string, sch *spec.Schema) error {
+ debugLog("updating ref for %s with schema", key)
+ pth, value, err := getPointerFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ *refable = *sch
+ case spec.Schema:
+ _, entry, pvalue, erp := getParentFromKey(sp, key)
+ if erp != nil {
+ return err
+ }
+ switch container := pvalue.(type) {
+ case spec.Definitions:
+ container[entry] = *sch
+
+ case map[string]spec.Schema:
+ container[entry] = *sch
+
+ case []spec.Schema:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container[idx] = *sch
+
+ case *spec.SchemaOrArray:
+ // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container.Schemas[idx] = *sch
+
+ case spec.SchemaProperties:
+ container[entry] = *sch
+
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+
+ default:
+ return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value)
+ }
+ case *spec.SchemaOrArray:
+ *refable.Schema = *sch
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+ case *spec.SchemaOrBool:
+ *refable.Schema = *sch
+ default:
+ return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+ }
+
+ return nil
+}
+
+// DeepestRefResult holds the results from DeepestRef analysis
+type DeepestRefResult struct {
+ Ref spec.Ref
+ Schema *spec.Schema
+ Warnings []string
+}
+
+// DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions.
+// - if no definition is found, returns the deepest ref.
+// - pointers to external files are expanded
+//
+// NOTE: all external $ref's are assumed to be already expanded at this stage.
+func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) {
+ if !ref.HasFragmentOnly {
+ // we found an external $ref, which is odd at this stage:
+ // do nothing on external $refs
+ return &DeepestRefResult{Ref: ref}, nil
+ }
+
+ currentRef := ref
+ visited := make(map[string]bool, 64)
+ warnings := make([]string, 0, 2)
+
+DOWNREF:
+ for currentRef.String() != "" {
+ if path.Dir(currentRef.String()) == definitionsPath {
+ // this is a top-level definition: stop here and return this ref
+ return &DeepestRefResult{Ref: currentRef}, nil
+ }
+
+ if _, beenThere := visited[currentRef.String()]; beenThere {
+ return nil,
+ fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String())
+ }
+
+ visited[currentRef.String()] = true
+ value, _, err := currentRef.GetPointer().Get(sp)
+ if err != nil {
+ return nil, err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ if refable.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Ref
+
+ case spec.Schema:
+ if refable.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Ref
+
+ case *spec.SchemaOrArray:
+ if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Schema.Ref
+
+ case *spec.SchemaOrBool:
+ if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Schema.Ref
+
+ case spec.Response:
+ // a pointer points to a schema initially marshalled in responses section...
+ // Attempt to convert this to a schema. If this fails, the spec is invalid
+ asJSON, _ := refable.MarshalJSON()
+ var asSchema spec.Schema
+
+ err := asSchema.UnmarshalJSON(asJSON)
+ if err != nil {
+ return nil,
+ fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
+ currentRef.String(), value)
+ }
+ warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String()))
+
+ if asSchema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = asSchema.Ref
+
+ case spec.Parameter:
+ // a pointer points to a schema initially marshalled in parameters section...
+ // Attempt to convert this to a schema. If this fails, the spec is invalid
+ asJSON, _ := refable.MarshalJSON()
+ var asSchema spec.Schema
+ if err := asSchema.UnmarshalJSON(asJSON); err != nil {
+ return nil,
+ fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
+ currentRef.String(), value)
+ }
+
+ warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String()))
+
+ if asSchema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = asSchema.Ref
+
+ default:
+ return nil,
+ fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T",
+ currentRef.String(), value)
+ }
+ }
+
+ // assess what schema we're ending with
+ sch, erv := spec.ResolveRefWithBase(sp, &currentRef, opts)
+ if erv != nil {
+ return nil, erv
+ }
+
+ if sch == nil {
+ return nil, fmt.Errorf("no schema found at %s", currentRef.String())
+ }
+
+ return &DeepestRefResult{Ref: currentRef, Schema: sch, Warnings: warnings}, nil
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
new file mode 100644
index 000000000..4590236e6
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
@@ -0,0 +1,29 @@
+// Package schutils provides tools to save or clone a schema
+// when flattening a spec.
+package schutils
+
+import (
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// Save registers a schema as an entry in spec #/definitions
+func Save(sp *spec.Swagger, name string, schema *spec.Schema) {
+ if schema == nil {
+ return
+ }
+
+ if sp.Definitions == nil {
+ sp.Definitions = make(map[string]spec.Schema, 150)
+ }
+
+ sp.Definitions[name] = *schema
+}
+
+// Clone deep-clones a schema
+func Clone(schema *spec.Schema) *spec.Schema {
+ var sch spec.Schema
+ _ = swag.FromDynamicJSON(schema, &sch)
+
+ return &sch
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
new file mode 100644
index 000000000..18e552ead
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
@@ -0,0 +1,201 @@
+package sortref
+
+import (
+ "net/http"
+ "path"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+)
+
+const (
+ paths = "paths"
+ responses = "responses"
+ parameters = "parameters"
+ definitions = "definitions"
+)
+
+var (
+ ignoredKeys map[string]struct{}
+ validMethods map[string]struct{}
+)
+
+func init() {
+ ignoredKeys = map[string]struct{}{
+ "schema": {},
+ "properties": {},
+ "not": {},
+ "anyOf": {},
+ "oneOf": {},
+ }
+
+ validMethods = map[string]struct{}{
+ "GET": {},
+ "HEAD": {},
+ "OPTIONS": {},
+ "PATCH": {},
+ "POST": {},
+ "PUT": {},
+ "DELETE": {},
+ }
+}
+
+// Key represent a key item constructed from /-separated segments
+type Key struct {
+ Segments int
+ Key string
+}
+
+// Keys is a sortable collable collection of Keys
+type Keys []Key
+
+func (k Keys) Len() int { return len(k) }
+func (k Keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
+func (k Keys) Less(i, j int) bool {
+ return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
+}
+
+// KeyParts construct a SplitKey with all its /-separated segments decomposed. It is sortable.
+func KeyParts(key string) SplitKey {
+ var res []string
+ for _, part := range strings.Split(key[1:], "/") {
+ if part != "" {
+ res = append(res, jsonpointer.Unescape(part))
+ }
+ }
+
+ return res
+}
+
+// SplitKey holds of the parts of a /-separated key, soi that their location may be determined.
+type SplitKey []string
+
+// IsDefinition is true when the split key is in the #/definitions section of a spec
+func (s SplitKey) IsDefinition() bool {
+ return len(s) > 1 && s[0] == definitions
+}
+
+// DefinitionName yields the name of the definition
+func (s SplitKey) DefinitionName() string {
+ if !s.IsDefinition() {
+ return ""
+ }
+
+ return s[1]
+}
+
+func (s SplitKey) isKeyName(i int) bool {
+ if i <= 0 {
+ return false
+ }
+
+ count := 0
+ for idx := i - 1; idx > 0; idx-- {
+ if s[idx] != "properties" {
+ break
+ }
+ count++
+ }
+
+ return count%2 != 0
+}
+
+// PartAdder know how to construct the components of a new name
+type PartAdder func(string) []string
+
+// BuildName builds a name from segments
+func (s SplitKey) BuildName(segments []string, startIndex int, adder PartAdder) string {
+ for i, part := range s[startIndex:] {
+ if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
+ segments = append(segments, adder(part)...)
+ }
+ }
+
+ return strings.Join(segments, " ")
+}
+
+// IsOperation is true when the split key is in the operations section
+func (s SplitKey) IsOperation() bool {
+ return len(s) > 1 && s[0] == paths
+}
+
+// IsSharedOperationParam is true when the split key is in the parameters section of a path
+func (s SplitKey) IsSharedOperationParam() bool {
+ return len(s) > 2 && s[0] == paths && s[2] == parameters
+}
+
+// IsSharedParam is true when the split key is in the #/parameters section of a spec
+func (s SplitKey) IsSharedParam() bool {
+ return len(s) > 1 && s[0] == parameters
+}
+
+// IsOperationParam is true when the split key is in the parameters section of an operation
+func (s SplitKey) IsOperationParam() bool {
+ return len(s) > 3 && s[0] == paths && s[3] == parameters
+}
+
+// IsOperationResponse is true when the split key is in the responses section of an operation
+func (s SplitKey) IsOperationResponse() bool {
+ return len(s) > 3 && s[0] == paths && s[3] == responses
+}
+
+// IsSharedResponse is true when the split key is in the #/responses section of a spec
+func (s SplitKey) IsSharedResponse() bool {
+ return len(s) > 1 && s[0] == responses
+}
+
+// IsDefaultResponse is true when the split key is the default response for an operation
+func (s SplitKey) IsDefaultResponse() bool {
+ return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default"
+}
+
+// IsStatusCodeResponse is true when the split key is an operation response with a status code
+func (s SplitKey) IsStatusCodeResponse() bool {
+ isInt := func() bool {
+ _, err := strconv.Atoi(s[4])
+
+ return err == nil
+ }
+
+ return len(s) > 4 && s[0] == paths && s[3] == responses && isInt()
+}
+
+// ResponseName yields either the status code or "Default" for a response
+func (s SplitKey) ResponseName() string {
+ if s.IsStatusCodeResponse() {
+ code, _ := strconv.Atoi(s[4])
+
+ return http.StatusText(code)
+ }
+
+ if s.IsDefaultResponse() {
+ return "Default"
+ }
+
+ return ""
+}
+
+// PathItemRef constructs a $ref object from a split key of the form /{path}/{method}
+func (s SplitKey) PathItemRef() spec.Ref {
+ if len(s) < 3 {
+ return spec.Ref{}
+ }
+
+ pth, method := s[1], s[2]
+ if _, isValidMethod := validMethods[strings.ToUpper(method)]; !isValidMethod && !strings.HasPrefix(method, "x-") {
+ return spec.Ref{}
+ }
+
+ return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method)))
+}
+
+// PathRef constructs a $ref object from a split key of the form /paths/{reference}
+func (s SplitKey) PathRef() spec.Ref {
+ if !s.IsOperation() {
+ return spec.Ref{}
+ }
+
+ return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(s[1])))
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
new file mode 100644
index 000000000..73243df87
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
@@ -0,0 +1,141 @@
+package sortref
+
+import (
+ "reflect"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis/internal/flatten/normalize"
+ "github.com/go-openapi/spec"
+)
+
+var depthGroupOrder = []string{
+ "sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition",
+}
+
+type mapIterator struct {
+ len int
+ mapIter *reflect.MapIter
+}
+
+func (i *mapIterator) Next() bool {
+ return i.mapIter.Next()
+}
+
+func (i *mapIterator) Len() int {
+ return i.len
+}
+
+func (i *mapIterator) Key() string {
+ return i.mapIter.Key().String()
+}
+
+func mustMapIterator(anyMap interface{}) *mapIterator {
+ val := reflect.ValueOf(anyMap)
+
+ return &mapIterator{mapIter: val.MapRange(), len: val.Len()}
+}
+
+// DepthFirst sorts a map of anything. It groups keys by category
+// (shared params, op param, statuscode response, default response, definitions)
+// sort groups internally by number of parts in the key and lexical names
+// flatten groups into a single list of keys
+func DepthFirst(in interface{}) []string {
+ iterator := mustMapIterator(in)
+ sorted := make([]string, 0, iterator.Len())
+ grouped := make(map[string]Keys, iterator.Len())
+
+ for iterator.Next() {
+ k := iterator.Key()
+ split := KeyParts(k)
+ var pk string
+
+ if split.IsSharedOperationParam() {
+ pk = "sharedOpParam"
+ }
+ if split.IsOperationParam() {
+ pk = "opParam"
+ }
+ if split.IsStatusCodeResponse() {
+ pk = "codeResponse"
+ }
+ if split.IsDefaultResponse() {
+ pk = "defaultResponse"
+ }
+ if split.IsDefinition() {
+ pk = "definition"
+ }
+ if split.IsSharedParam() {
+ pk = "sharedParam"
+ }
+ if split.IsSharedResponse() {
+ pk = "sharedResponse"
+ }
+ grouped[pk] = append(grouped[pk], Key{Segments: len(split), Key: k})
+ }
+
+ for _, pk := range depthGroupOrder {
+ res := grouped[pk]
+ sort.Sort(res)
+
+ for _, v := range res {
+ sorted = append(sorted, v.Key)
+ }
+ }
+
+ return sorted
+}
+
+// topMostRefs is able to sort refs by hierarchical then lexicographic order,
+// yielding refs ordered breadth-first.
+type topmostRefs []string
+
+func (k topmostRefs) Len() int { return len(k) }
+func (k topmostRefs) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
+func (k topmostRefs) Less(i, j int) bool {
+ li, lj := len(strings.Split(k[i], "/")), len(strings.Split(k[j], "/"))
+ if li == lj {
+ return k[i] < k[j]
+ }
+
+ return li < lj
+}
+
+// TopmostFirst sorts references by depth
+func TopmostFirst(refs []string) []string {
+ res := topmostRefs(refs)
+ sort.Sort(res)
+
+ return res
+}
+
+// RefRevIdx is a reverse index for references
+type RefRevIdx struct {
+ Ref spec.Ref
+ Keys []string
+}
+
+// ReverseIndex builds a reverse index for references in schemas
+func ReverseIndex(schemas map[string]spec.Ref, basePath string) map[string]RefRevIdx {
+ collected := make(map[string]RefRevIdx)
+ for key, schRef := range schemas {
+ // normalize paths before sorting,
+ // so we get together keys that are from the same external file
+ normalizedPath := normalize.Path(schRef, basePath)
+
+ entry, ok := collected[normalizedPath]
+ if ok {
+ entry.Keys = append(entry.Keys, key)
+ collected[normalizedPath] = entry
+
+ continue
+ }
+
+ collected[normalizedPath] = RefRevIdx{
+ Ref: schRef,
+ Keys: []string{key},
+ }
+ }
+
+ return collected
+}
diff --git a/vendor/github.com/go-openapi/analysis/mixin.go b/vendor/github.com/go-openapi/analysis/mixin.go
new file mode 100644
index 000000000..b25305264
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/mixin.go
@@ -0,0 +1,515 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/go-openapi/spec"
+)
+
+// Mixin modifies the primary swagger spec by adding the paths and
+// definitions from the mixin specs. Top level parameters and
+// responses from the mixins are also carried over. Operation id
+// collisions are avoided by appending "Mixin<N>" but only if
+// needed.
+//
+// The following parts of primary are subject to merge, filling empty details
+// - Info
+// - BasePath
+// - Host
+// - ExternalDocs
+//
+// Consider calling FixEmptyResponseDescriptions() on the modified primary
+// if you read them from storage and they are valid to start with.
+//
+// Entries in "paths", "definitions", "parameters" and "responses" are
+// added to the primary in the order of the given mixins. If the entry
+// already exists in primary it is skipped with a warning message.
+//
+// The count of skipped entries (from collisions) is returned so any
+// deviation from the number expected can flag a warning in your build
+// scripts. Carefully review the collisions before accepting them;
+// consider renaming things if possible.
+//
+// No key normalization takes place (paths, type defs,
+// etc). Ensure they are canonical if your downstream tools do
+// key normalization of any form.
+//
+// Merging schemes (http, https), and consumers/producers do not account for
+// collisions.
+func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
+ skipped := make([]string, 0, len(mixins))
+ opIds := getOpIds(primary)
+ initPrimary(primary)
+
+ for i, m := range mixins {
+ skipped = append(skipped, mergeSwaggerProps(primary, m)...)
+
+ skipped = append(skipped, mergeConsumes(primary, m)...)
+
+ skipped = append(skipped, mergeProduces(primary, m)...)
+
+ skipped = append(skipped, mergeTags(primary, m)...)
+
+ skipped = append(skipped, mergeSchemes(primary, m)...)
+
+ skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
+
+ skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
+
+ skipped = append(skipped, mergeDefinitions(primary, m)...)
+
+ // merging paths requires a map of operationIDs to work with
+ skipped = append(skipped, mergePaths(primary, m, opIds, i)...)
+
+ skipped = append(skipped, mergeParameters(primary, m)...)
+
+ skipped = append(skipped, mergeResponses(primary, m)...)
+ }
+
+ return skipped
+}
+
+// getOpIds extracts all the paths.<path>.operationIds from the given
+// spec and returns them as the keys in a map with 'true' values.
+func getOpIds(s *spec.Swagger) map[string]bool {
+ rv := make(map[string]bool)
+ if s.Paths == nil {
+ return rv
+ }
+
+ for _, v := range s.Paths.Paths {
+ piops := pathItemOps(v)
+
+ for _, op := range piops {
+ rv[op.ID] = true
+ }
+ }
+
+ return rv
+}
+
+func pathItemOps(p spec.PathItem) []*spec.Operation {
+ var rv []*spec.Operation
+ rv = appendOp(rv, p.Get)
+ rv = appendOp(rv, p.Put)
+ rv = appendOp(rv, p.Post)
+ rv = appendOp(rv, p.Delete)
+ rv = appendOp(rv, p.Head)
+ rv = appendOp(rv, p.Patch)
+
+ return rv
+}
+
+func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
+ if op == nil {
+ return ops
+ }
+
+ return append(ops, op)
+}
+
+func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.SecurityDefinitions {
+ if _, exists := primary.SecurityDefinitions[k]; exists {
+ warn := fmt.Sprintf(
+ "SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+
+ primary.SecurityDefinitions[k] = v
+ }
+
+ return
+}
+
+func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for _, v := range m.Security {
+ found := false
+ for _, vv := range primary.Security {
+ if reflect.DeepEqual(v, vv) {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ warn := fmt.Sprintf(
+ "Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Security = append(primary.Security, v)
+ }
+
+ return
+}
+
+func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.Definitions {
+ // assume name collisions represent IDENTICAL type. careful.
+ if _, exists := primary.Definitions[k]; exists {
+ warn := fmt.Sprintf(
+ "definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Definitions[k] = v
+ }
+
+ return
+}
+
+func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) {
+ if m.Paths != nil {
+ for k, v := range m.Paths.Paths {
+ if _, exists := primary.Paths.Paths[k]; exists {
+ warn := fmt.Sprintf(
+ "paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+
+ // Swagger requires that operationIds be
+ // unique within a spec. If we find a
+ // collision we append "Mixin0" to the
+ // operatoinId we are adding, where 0 is mixin
+ // index. We assume that operationIds with
+ // all the proivded specs are already unique.
+ piops := pathItemOps(v)
+ for _, piop := range piops {
+ if opIds[piop.ID] {
+ piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
+ }
+ opIds[piop.ID] = true
+ }
+ primary.Paths.Paths[k] = v
+ }
+ }
+
+ return
+}
+
+func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.Parameters {
+ // could try to rename on conflict but would
+ // have to fix $refs in the mixin. Complain
+ // for now
+ if _, exists := primary.Parameters[k]; exists {
+ warn := fmt.Sprintf(
+ "top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Parameters[k] = v
+ }
+
+ return
+}
+
+func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.Responses {
+ // could try to rename on conflict but would
+ // have to fix $refs in the mixin. Complain
+ // for now
+ if _, exists := primary.Responses[k]; exists {
+ warn := fmt.Sprintf(
+ "top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Responses[k] = v
+ }
+
+ return skipped
+}
+
+func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) []string {
+ for _, v := range m.Consumes {
+ found := false
+ for _, vv := range primary.Consumes {
+ if v == vv {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ // no warning here: we just skip it
+ continue
+ }
+ primary.Consumes = append(primary.Consumes, v)
+ }
+
+ return []string{}
+}
+
+func mergeProduces(primary *spec.Swagger, m *spec.Swagger) []string {
+ for _, v := range m.Produces {
+ found := false
+ for _, vv := range primary.Produces {
+ if v == vv {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ // no warning here: we just skip it
+ continue
+ }
+ primary.Produces = append(primary.Produces, v)
+ }
+
+ return []string{}
+}
+
+func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for _, v := range m.Tags {
+ found := false
+ for _, vv := range primary.Tags {
+ if v.Name == vv.Name {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ warn := fmt.Sprintf(
+ "top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n",
+ v.Name,
+ )
+ skipped = append(skipped, warn)
+
+ continue
+ }
+
+ primary.Tags = append(primary.Tags, v)
+ }
+
+ return
+}
+
+func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) []string {
+ for _, v := range m.Schemes {
+ found := false
+ for _, vv := range primary.Schemes {
+ if v == vv {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ // no warning here: we just skip it
+ continue
+ }
+ primary.Schemes = append(primary.Schemes, v)
+ }
+
+ return []string{}
+}
+
+func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string {
+ var skipped, skippedInfo, skippedDocs []string
+
+ primary.Extensions, skipped = mergeExtensions(primary.Extensions, m.Extensions)
+
+ // merging details in swagger top properties
+ if primary.Host == "" {
+ primary.Host = m.Host
+ }
+
+ if primary.BasePath == "" {
+ primary.BasePath = m.BasePath
+ }
+
+ if primary.Info == nil {
+ primary.Info = m.Info
+ } else if m.Info != nil {
+ skippedInfo = mergeInfo(primary.Info, m.Info)
+ skipped = append(skipped, skippedInfo...)
+ }
+
+ if primary.ExternalDocs == nil {
+ primary.ExternalDocs = m.ExternalDocs
+ } else if m != nil {
+ skippedDocs = mergeExternalDocs(primary.ExternalDocs, m.ExternalDocs)
+ skipped = append(skipped, skippedDocs...)
+ }
+
+ return skipped
+}
+
+// nolint: unparam
+func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string {
+ if primary.Description == "" {
+ primary.Description = m.Description
+ }
+
+ if primary.URL == "" {
+ primary.URL = m.URL
+ }
+
+ return nil
+}
+
+func mergeInfo(primary *spec.Info, m *spec.Info) []string {
+ var sk, skipped []string
+
+ primary.Extensions, sk = mergeExtensions(primary.Extensions, m.Extensions)
+ skipped = append(skipped, sk...)
+
+ if primary.Description == "" {
+ primary.Description = m.Description
+ }
+
+ if primary.Title == "" {
+ primary.Description = m.Description
+ }
+
+ if primary.TermsOfService == "" {
+ primary.TermsOfService = m.TermsOfService
+ }
+
+ if primary.Version == "" {
+ primary.Version = m.Version
+ }
+
+ if primary.Contact == nil {
+ primary.Contact = m.Contact
+ } else if m.Contact != nil {
+ var csk []string
+ primary.Contact.Extensions, csk = mergeExtensions(primary.Contact.Extensions, m.Contact.Extensions)
+ skipped = append(skipped, csk...)
+
+ if primary.Contact.Name == "" {
+ primary.Contact.Name = m.Contact.Name
+ }
+
+ if primary.Contact.URL == "" {
+ primary.Contact.URL = m.Contact.URL
+ }
+
+ if primary.Contact.Email == "" {
+ primary.Contact.Email = m.Contact.Email
+ }
+ }
+
+ if primary.License == nil {
+ primary.License = m.License
+ } else if m.License != nil {
+ var lsk []string
+ primary.License.Extensions, lsk = mergeExtensions(primary.License.Extensions, m.License.Extensions)
+ skipped = append(skipped, lsk...)
+
+ if primary.License.Name == "" {
+ primary.License.Name = m.License.Name
+ }
+
+ if primary.License.URL == "" {
+ primary.License.URL = m.License.URL
+ }
+ }
+
+ return skipped
+}
+
+func mergeExtensions(primary spec.Extensions, m spec.Extensions) (result spec.Extensions, skipped []string) {
+ if primary == nil {
+ result = m
+
+ return
+ }
+
+ if m == nil {
+ result = primary
+
+ return
+ }
+
+ result = primary
+ for k, v := range m {
+ if _, found := primary[k]; found {
+ skipped = append(skipped, k)
+
+ continue
+ }
+
+ primary[k] = v
+ }
+
+ return
+}
+
+func initPrimary(primary *spec.Swagger) {
+ if primary.SecurityDefinitions == nil {
+ primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
+ }
+
+ if primary.Security == nil {
+ primary.Security = make([]map[string][]string, 0, 10)
+ }
+
+ if primary.Produces == nil {
+ primary.Produces = make([]string, 0, 10)
+ }
+
+ if primary.Consumes == nil {
+ primary.Consumes = make([]string, 0, 10)
+ }
+
+ if primary.Tags == nil {
+ primary.Tags = make([]spec.Tag, 0, 10)
+ }
+
+ if primary.Schemes == nil {
+ primary.Schemes = make([]string, 0, 10)
+ }
+
+ if primary.Paths == nil {
+ primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
+ }
+
+ if primary.Paths.Paths == nil {
+ primary.Paths.Paths = make(map[string]spec.PathItem)
+ }
+
+ if primary.Definitions == nil {
+ primary.Definitions = make(spec.Definitions)
+ }
+
+ if primary.Parameters == nil {
+ primary.Parameters = make(map[string]spec.Parameter)
+ }
+
+ if primary.Responses == nil {
+ primary.Responses = make(map[string]spec.Response)
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/schema.go b/vendor/github.com/go-openapi/analysis/schema.go
new file mode 100644
index 000000000..fc055095c
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/schema.go
@@ -0,0 +1,256 @@
+package analysis
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+// SchemaOpts configures the schema analyzer
+type SchemaOpts struct {
+ Schema *spec.Schema
+ Root interface{}
+ BasePath string
+ _ struct{}
+}
+
+// Schema analysis, will classify the schema according to known
+// patterns.
+func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
+ if opts.Schema == nil {
+ return nil, fmt.Errorf("no schema to analyze")
+ }
+
+ a := &AnalyzedSchema{
+ schema: opts.Schema,
+ root: opts.Root,
+ basePath: opts.BasePath,
+ }
+
+ a.initializeFlags()
+ a.inferKnownType()
+ a.inferEnum()
+ a.inferBaseType()
+
+ if err := a.inferMap(); err != nil {
+ return nil, err
+ }
+ if err := a.inferArray(); err != nil {
+ return nil, err
+ }
+
+ a.inferTuple()
+
+ if err := a.inferFromRef(); err != nil {
+ return nil, err
+ }
+
+ a.inferSimpleSchema()
+
+ return a, nil
+}
+
+// AnalyzedSchema indicates what the schema represents
+type AnalyzedSchema struct {
+ schema *spec.Schema
+ root interface{}
+ basePath string
+
+ hasProps bool
+ hasAllOf bool
+ hasItems bool
+ hasAdditionalProps bool
+ hasAdditionalItems bool
+ hasRef bool
+
+ IsKnownType bool
+ IsSimpleSchema bool
+ IsArray bool
+ IsSimpleArray bool
+ IsMap bool
+ IsSimpleMap bool
+ IsExtendedObject bool
+ IsTuple bool
+ IsTupleWithExtra bool
+ IsBaseType bool
+ IsEnum bool
+}
+
+// Inherits copies value fields from other onto this schema
+func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
+ if other == nil {
+ return
+ }
+ a.hasProps = other.hasProps
+ a.hasAllOf = other.hasAllOf
+ a.hasItems = other.hasItems
+ a.hasAdditionalItems = other.hasAdditionalItems
+ a.hasAdditionalProps = other.hasAdditionalProps
+ a.hasRef = other.hasRef
+
+ a.IsKnownType = other.IsKnownType
+ a.IsSimpleSchema = other.IsSimpleSchema
+ a.IsArray = other.IsArray
+ a.IsSimpleArray = other.IsSimpleArray
+ a.IsMap = other.IsMap
+ a.IsSimpleMap = other.IsSimpleMap
+ a.IsExtendedObject = other.IsExtendedObject
+ a.IsTuple = other.IsTuple
+ a.IsTupleWithExtra = other.IsTupleWithExtra
+ a.IsBaseType = other.IsBaseType
+ a.IsEnum = other.IsEnum
+}
+
+func (a *AnalyzedSchema) inferFromRef() error {
+ if a.hasRef {
+ sch := new(spec.Schema)
+ sch.Ref = a.schema.Ref
+ err := spec.ExpandSchema(sch, a.root, nil)
+ if err != nil {
+ return err
+ }
+ rsch, err := Schema(SchemaOpts{
+ Schema: sch,
+ Root: a.root,
+ BasePath: a.basePath,
+ })
+ if err != nil {
+ // NOTE(fredbi): currently the only cause for errors is
+ // unresolved ref. Since spec.ExpandSchema() expands the
+ // schema recursively, there is no chance to get there,
+ // until we add more causes for error in this schema analysis.
+ return err
+ }
+ a.inherits(rsch)
+ }
+
+ return nil
+}
+
+func (a *AnalyzedSchema) inferSimpleSchema() {
+ a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
+}
+
+func (a *AnalyzedSchema) inferKnownType() {
+ tpe := a.schema.Type
+ format := a.schema.Format
+ a.IsKnownType = tpe.Contains("boolean") ||
+ tpe.Contains("integer") ||
+ tpe.Contains("number") ||
+ tpe.Contains("string") ||
+ (format != "" && strfmt.Default.ContainsName(format)) ||
+ (a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
+}
+
+func (a *AnalyzedSchema) inferMap() error {
+ if !a.isObjectType() {
+ return nil
+ }
+
+ hasExtra := a.hasProps || a.hasAllOf
+ a.IsMap = a.hasAdditionalProps && !hasExtra
+ a.IsExtendedObject = a.hasAdditionalProps && hasExtra
+
+ if !a.IsMap {
+ return nil
+ }
+
+ // maps
+ if a.schema.AdditionalProperties.Schema != nil {
+ msch, err := Schema(SchemaOpts{
+ Schema: a.schema.AdditionalProperties.Schema,
+ Root: a.root,
+ BasePath: a.basePath,
+ })
+ if err != nil {
+ return err
+ }
+ a.IsSimpleMap = msch.IsSimpleSchema
+ } else if a.schema.AdditionalProperties.Allows {
+ a.IsSimpleMap = true
+ }
+
+ return nil
+}
+
+func (a *AnalyzedSchema) inferArray() error {
+ // an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
+ // (yes, even if the Items array contains only one element).
+ // arrays in JSON schema may be unrestricted (i.e no Items specified).
+ // Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
+ //
+ // NOTE: the spec package misses the distinction between:
+ // items: [] and items: {}, so we consider both arrays here.
+ a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
+ if a.IsArray && a.hasItems {
+ if a.schema.Items.Schema != nil {
+ itsch, err := Schema(SchemaOpts{
+ Schema: a.schema.Items.Schema,
+ Root: a.root,
+ BasePath: a.basePath,
+ })
+ if err != nil {
+ return err
+ }
+
+ a.IsSimpleArray = itsch.IsSimpleSchema
+ }
+ }
+
+ if a.IsArray && !a.hasItems {
+ a.IsSimpleArray = true
+ }
+
+ return nil
+}
+
+func (a *AnalyzedSchema) inferTuple() {
+ tuple := a.hasItems && a.schema.Items.Schemas != nil
+ a.IsTuple = tuple && !a.hasAdditionalItems
+ a.IsTupleWithExtra = tuple && a.hasAdditionalItems
+}
+
+func (a *AnalyzedSchema) inferBaseType() {
+ if a.isObjectType() {
+ a.IsBaseType = a.schema.Discriminator != ""
+ }
+}
+
+func (a *AnalyzedSchema) inferEnum() {
+ a.IsEnum = len(a.schema.Enum) > 0
+}
+
+func (a *AnalyzedSchema) initializeFlags() {
+ a.hasProps = len(a.schema.Properties) > 0
+ a.hasAllOf = len(a.schema.AllOf) > 0
+ a.hasRef = a.schema.Ref.String() != ""
+
+ a.hasItems = a.schema.Items != nil &&
+ (a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
+
+ a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
+ (a.schema.AdditionalProperties.Schema != nil || a.schema.AdditionalProperties.Allows)
+
+ a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
+ (a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
+}
+
+func (a *AnalyzedSchema) isObjectType() bool {
+ return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
+}
+
+func (a *AnalyzedSchema) isArrayType() bool {
+ return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
+}
+
+// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex").
+//
+// Complex means the schema is any of:
+// - a simple type (primitive)
+// - an array of something (items are possibly complex ; if this is the case, items will generate a definition)
+// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will
+// generate a definition)
+func (a *AnalyzedSchema) isAnalyzedAsComplex() bool {
+ return !a.IsSimpleSchema && !a.IsArray && !a.IsMap
+}
diff --git a/vendor/github.com/go-openapi/errors/.gitattributes b/vendor/github.com/go-openapi/errors/.gitattributes
new file mode 100644
index 000000000..a0717e4b3
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/.gitattributes
@@ -0,0 +1 @@
+*.go text eol=lf \ No newline at end of file
diff --git a/vendor/github.com/go-openapi/errors/.gitignore b/vendor/github.com/go-openapi/errors/.gitignore
new file mode 100644
index 000000000..dd91ed6a0
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/.gitignore
@@ -0,0 +1,2 @@
+secrets.yml
+coverage.out
diff --git a/vendor/github.com/go-openapi/errors/.golangci.yml b/vendor/github.com/go-openapi/errors/.golangci.yml
new file mode 100644
index 000000000..4e1fc0c7d
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/.golangci.yml
@@ -0,0 +1,48 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 30
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoglobals
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - funlen
+ - gochecknoglobals
+ - gochecknoinits
+ - scopelint
+ - wrapcheck
+ - exhaustivestruct
+ - exhaustive
+ - nlreturn
+ - testpackage
+ - gci
+ - gofumpt
+ - goerr113
+ - gomnd
+ - tparallel
+ - nestif
+ - godot
+ - errorlint
+ - paralleltest
+ - tparallel
+ - cyclop
+ - errname
+ - varnamelen
+ - exhaustruct
+ - maintidx
diff --git a/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/errors/LICENSE b/vendor/github.com/go-openapi/errors/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/errors/README.md b/vendor/github.com/go-openapi/errors/README.md
new file mode 100644
index 000000000..4aac049e6
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/README.md
@@ -0,0 +1,11 @@
+# OpenAPI errors
+
+[![Build Status](https://travis-ci.org/go-openapi/errors.svg?branch=master)](https://travis-ci.org/go-openapi/errors)
+[![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors)
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/errors.svg)](https://pkg.go.dev/github.com/go-openapi/errors)
+[![GolangCI](https://golangci.com/badges/github.com/go-openapi/errors.svg)](https://golangci.com)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/errors)](https://goreportcard.com/report/github.com/go-openapi/errors)
+
+Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit.
diff --git a/vendor/github.com/go-openapi/errors/api.go b/vendor/github.com/go-openapi/errors/api.go
new file mode 100644
index 000000000..c13f3435f
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/api.go
@@ -0,0 +1,182 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errors
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "reflect"
+ "strings"
+)
+
+// DefaultHTTPCode is used when the error Code cannot be used as an HTTP code.
+var DefaultHTTPCode = http.StatusUnprocessableEntity
+
+// Error represents a error interface all swagger framework errors implement
+type Error interface {
+ error
+ Code() int32
+}
+
+type apiError struct {
+ code int32
+ message string
+}
+
+func (a *apiError) Error() string {
+ return a.message
+}
+
+func (a *apiError) Code() int32 {
+ return a.code
+}
+
+// MarshalJSON implements the JSON encoding interface
+func (a apiError) MarshalJSON() ([]byte, error) {
+ return json.Marshal(map[string]interface{}{
+ "code": a.code,
+ "message": a.message,
+ })
+}
+
+// New creates a new API error with a code and a message
+func New(code int32, message string, args ...interface{}) Error {
+ if len(args) > 0 {
+ return &apiError{code, fmt.Sprintf(message, args...)}
+ }
+ return &apiError{code, message}
+}
+
+// NotFound creates a new not found error
+func NotFound(message string, args ...interface{}) Error {
+ if message == "" {
+ message = "Not found"
+ }
+ return New(http.StatusNotFound, fmt.Sprintf(message, args...))
+}
+
+// NotImplemented creates a new not implemented error
+func NotImplemented(message string) Error {
+ return New(http.StatusNotImplemented, message)
+}
+
+// MethodNotAllowedError represents an error for when the path matches but the method doesn't
+type MethodNotAllowedError struct {
+ code int32
+ Allowed []string
+ message string
+}
+
+func (m *MethodNotAllowedError) Error() string {
+ return m.message
+}
+
+// Code the error code
+func (m *MethodNotAllowedError) Code() int32 {
+ return m.code
+}
+
+// MarshalJSON implements the JSON encoding interface
+func (m MethodNotAllowedError) MarshalJSON() ([]byte, error) {
+ return json.Marshal(map[string]interface{}{
+ "code": m.code,
+ "message": m.message,
+ "allowed": m.Allowed,
+ })
+}
+
+func errorAsJSON(err Error) []byte {
+ //nolint:errchkjson
+ b, _ := json.Marshal(struct {
+ Code int32 `json:"code"`
+ Message string `json:"message"`
+ }{err.Code(), err.Error()})
+ return b
+}
+
+func flattenComposite(errs *CompositeError) *CompositeError {
+ var res []error
+ for _, er := range errs.Errors {
+ switch e := er.(type) {
+ case *CompositeError:
+ if e != nil && len(e.Errors) > 0 {
+ flat := flattenComposite(e)
+ if len(flat.Errors) > 0 {
+ res = append(res, flat.Errors...)
+ }
+ }
+ default:
+ if e != nil {
+ res = append(res, e)
+ }
+ }
+ }
+ return CompositeValidationError(res...)
+}
+
+// MethodNotAllowed creates a new method not allowed error
+func MethodNotAllowed(requested string, allow []string) Error {
+ msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ","))
+ return &MethodNotAllowedError{code: http.StatusMethodNotAllowed, Allowed: allow, message: msg}
+}
+
+// ServeError the error handler interface implementation
+func ServeError(rw http.ResponseWriter, r *http.Request, err error) {
+ rw.Header().Set("Content-Type", "application/json")
+ switch e := err.(type) {
+ case *CompositeError:
+ er := flattenComposite(e)
+ // strips composite errors to first element only
+ if len(er.Errors) > 0 {
+ ServeError(rw, r, er.Errors[0])
+ } else {
+ // guard against empty CompositeError (invalid construct)
+ ServeError(rw, r, nil)
+ }
+ case *MethodNotAllowedError:
+ rw.Header().Add("Allow", strings.Join(e.Allowed, ","))
+ rw.WriteHeader(asHTTPCode(int(e.Code())))
+ if r == nil || r.Method != http.MethodHead {
+ _, _ = rw.Write(errorAsJSON(e))
+ }
+ case Error:
+ value := reflect.ValueOf(e)
+ if value.Kind() == reflect.Ptr && value.IsNil() {
+ rw.WriteHeader(http.StatusInternalServerError)
+ _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
+ return
+ }
+ rw.WriteHeader(asHTTPCode(int(e.Code())))
+ if r == nil || r.Method != http.MethodHead {
+ _, _ = rw.Write(errorAsJSON(e))
+ }
+ case nil:
+ rw.WriteHeader(http.StatusInternalServerError)
+ _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
+ default:
+ rw.WriteHeader(http.StatusInternalServerError)
+ if r == nil || r.Method != http.MethodHead {
+ _, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error())))
+ }
+ }
+}
+
+func asHTTPCode(input int) int {
+ if input >= 600 {
+ return DefaultHTTPCode
+ }
+ return input
+}
diff --git a/vendor/github.com/go-openapi/errors/auth.go b/vendor/github.com/go-openapi/errors/auth.go
new file mode 100644
index 000000000..0545b501b
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/auth.go
@@ -0,0 +1,22 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errors
+
+import "net/http"
+
+// Unauthenticated returns an unauthenticated error
+func Unauthenticated(scheme string) Error {
+ return New(http.StatusUnauthorized, "unauthenticated for %s", scheme)
+}
diff --git a/vendor/github.com/go-openapi/errors/doc.go b/vendor/github.com/go-openapi/errors/doc.go
new file mode 100644
index 000000000..af01190ce
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/doc.go
@@ -0,0 +1,26 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package errors provides an Error interface and several concrete types
+implementing this interface to manage API errors and JSON-schema validation
+errors.
+
+A middleware handler ServeError() is provided to serve the errors types
+it defines.
+
+It is used throughout the various go-openapi toolkit libraries
+(https://github.com/go-openapi).
+*/
+package errors
diff --git a/vendor/github.com/go-openapi/errors/headers.go b/vendor/github.com/go-openapi/errors/headers.go
new file mode 100644
index 000000000..dfebe8f95
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/headers.go
@@ -0,0 +1,103 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errors
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/http"
+)
+
+// Validation represents a failure of a precondition
+type Validation struct {
+ code int32
+ Name string
+ In string
+ Value interface{}
+ message string
+ Values []interface{}
+}
+
+func (e *Validation) Error() string {
+ return e.message
+}
+
+// Code the error code
+func (e *Validation) Code() int32 {
+ return e.code
+}
+
+// MarshalJSON implements the JSON encoding interface
+func (e Validation) MarshalJSON() ([]byte, error) {
+ return json.Marshal(map[string]interface{}{
+ "code": e.code,
+ "message": e.message,
+ "in": e.In,
+ "name": e.Name,
+ "value": e.Value,
+ "values": e.Values,
+ })
+}
+
+// ValidateName sets the name for a validation or updates it for a nested property
+func (e *Validation) ValidateName(name string) *Validation {
+ if name != "" {
+ if e.Name == "" {
+ e.Name = name
+ e.message = name + e.message
+ } else {
+ e.Name = name + "." + e.Name
+ e.message = name + "." + e.message
+ }
+ }
+ return e
+}
+
+const (
+ contentTypeFail = `unsupported media type %q, only %v are allowed`
+ responseFormatFail = `unsupported media type requested, only %v are available`
+)
+
+// InvalidContentType error for an invalid content type
+func InvalidContentType(value string, allowed []string) *Validation {
+ values := make([]interface{}, 0, len(allowed))
+ for _, v := range allowed {
+ values = append(values, v)
+ }
+ return &Validation{
+ code: http.StatusUnsupportedMediaType,
+ Name: "Content-Type",
+ In: "header",
+ Value: value,
+ Values: values,
+ message: fmt.Sprintf(contentTypeFail, value, allowed),
+ }
+}
+
+// InvalidResponseFormat error for an unacceptable response format request
+func InvalidResponseFormat(value string, allowed []string) *Validation {
+ values := make([]interface{}, 0, len(allowed))
+ for _, v := range allowed {
+ values = append(values, v)
+ }
+ return &Validation{
+ code: http.StatusNotAcceptable,
+ Name: "Accept",
+ In: "header",
+ Value: value,
+ Values: values,
+ message: fmt.Sprintf(responseFormatFail, allowed),
+ }
+}
diff --git a/vendor/github.com/go-openapi/errors/middleware.go b/vendor/github.com/go-openapi/errors/middleware.go
new file mode 100644
index 000000000..963472d1f
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/middleware.go
@@ -0,0 +1,50 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errors
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+)
+
+// APIVerificationFailed is an error that contains all the missing info for a mismatched section
+// between the api registrations and the api spec
+type APIVerificationFailed struct {
+ Section string `json:"section,omitempty"`
+ MissingSpecification []string `json:"missingSpecification,omitempty"`
+ MissingRegistration []string `json:"missingRegistration,omitempty"`
+}
+
+func (v *APIVerificationFailed) Error() string {
+ buf := bytes.NewBuffer(nil)
+
+ hasRegMissing := len(v.MissingRegistration) > 0
+ hasSpecMissing := len(v.MissingSpecification) > 0
+
+ if hasRegMissing {
+ buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section))
+ }
+
+ if hasRegMissing && hasSpecMissing {
+ buf.WriteString("\n")
+ }
+
+ if hasSpecMissing {
+ buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section))
+ }
+
+ return buf.String()
+}
diff --git a/vendor/github.com/go-openapi/errors/parsing.go b/vendor/github.com/go-openapi/errors/parsing.go
new file mode 100644
index 000000000..5096e1ea7
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/parsing.go
@@ -0,0 +1,78 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errors
+
+import (
+ "encoding/json"
+ "fmt"
+)
+
+// ParseError represents a parsing error
+type ParseError struct {
+ code int32
+ Name string
+ In string
+ Value string
+ Reason error
+ message string
+}
+
+func (e *ParseError) Error() string {
+ return e.message
+}
+
+// Code returns the http status code for this error
+func (e *ParseError) Code() int32 {
+ return e.code
+}
+
+// MarshalJSON implements the JSON encoding interface
+func (e ParseError) MarshalJSON() ([]byte, error) {
+ var reason string
+ if e.Reason != nil {
+ reason = e.Reason.Error()
+ }
+ return json.Marshal(map[string]interface{}{
+ "code": e.code,
+ "message": e.message,
+ "in": e.In,
+ "name": e.Name,
+ "value": e.Value,
+ "reason": reason,
+ })
+}
+
+const (
+ parseErrorTemplContent = `parsing %s %s from %q failed, because %s`
+ parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s`
+)
+
+// NewParseError creates a new parse error
+func NewParseError(name, in, value string, reason error) *ParseError {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason)
+ } else {
+ msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason)
+ }
+ return &ParseError{
+ code: 400,
+ Name: name,
+ In: in,
+ Value: value,
+ Reason: reason,
+ message: msg,
+ }
+}
diff --git a/vendor/github.com/go-openapi/errors/schema.go b/vendor/github.com/go-openapi/errors/schema.go
new file mode 100644
index 000000000..da5f6c78c
--- /dev/null
+++ b/vendor/github.com/go-openapi/errors/schema.go
@@ -0,0 +1,611 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package errors
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+)
+
+const (
+ invalidType = "%s is an invalid type name"
+ typeFail = "%s in %s must be of type %s"
+ typeFailWithData = "%s in %s must be of type %s: %q"
+ typeFailWithError = "%s in %s must be of type %s, because: %s"
+ requiredFail = "%s in %s is required"
+ readOnlyFail = "%s in %s is readOnly"
+ tooLongMessage = "%s in %s should be at most %d chars long"
+ tooShortMessage = "%s in %s should be at least %d chars long"
+ patternFail = "%s in %s should match '%s'"
+ enumFail = "%s in %s should be one of %v"
+ multipleOfFail = "%s in %s should be a multiple of %v"
+ maxIncFail = "%s in %s should be less than or equal to %v"
+ maxExcFail = "%s in %s should be less than %v"
+ minIncFail = "%s in %s should be greater than or equal to %v"
+ minExcFail = "%s in %s should be greater than %v"
+ uniqueFail = "%s in %s shouldn't contain duplicates"
+ maxItemsFail = "%s in %s should have at most %d items"
+ minItemsFail = "%s in %s should have at least %d items"
+ typeFailNoIn = "%s must be of type %s"
+ typeFailWithDataNoIn = "%s must be of type %s: %q"
+ typeFailWithErrorNoIn = "%s must be of type %s, because: %s"
+ requiredFailNoIn = "%s is required"
+ readOnlyFailNoIn = "%s is readOnly"
+ tooLongMessageNoIn = "%s should be at most %d chars long"
+ tooShortMessageNoIn = "%s should be at least %d chars long"
+ patternFailNoIn = "%s should match '%s'"
+ enumFailNoIn = "%s should be one of %v"
+ multipleOfFailNoIn = "%s should be a multiple of %v"
+ maxIncFailNoIn = "%s should be less than or equal to %v"
+ maxExcFailNoIn = "%s should be less than %v"
+ minIncFailNoIn = "%s should be greater than or equal to %v"
+ minExcFailNoIn = "%s should be greater than %v"
+ uniqueFailNoIn = "%s shouldn't contain duplicates"
+ maxItemsFailNoIn = "%s should have at most %d items"
+ minItemsFailNoIn = "%s should have at least %d items"
+ noAdditionalItems = "%s in %s can't have additional items"
+ noAdditionalItemsNoIn = "%s can't have additional items"
+ tooFewProperties = "%s in %s should have at least %d properties"
+ tooFewPropertiesNoIn = "%s should have at least %d properties"
+ tooManyProperties = "%s in %s should have at most %d properties"
+ tooManyPropertiesNoIn = "%s should have at most %d properties"
+ unallowedProperty = "%s.%s in %s is a forbidden property"
+ unallowedPropertyNoIn = "%s.%s is a forbidden property"
+ failedAllPatternProps = "%s.%s in %s failed all pattern properties"
+ failedAllPatternPropsNoIn = "%s.%s failed all pattern properties"
+ multipleOfMustBePositive = "factor MultipleOf declared for %s must be positive: %v"
+)
+
+// All code responses can be used to differentiate errors for different handling
+// by the consuming program
+const (
+ // CompositeErrorCode remains 422 for backwards-compatibility
+ // and to separate it from validation errors with cause
+ CompositeErrorCode = 422
+ // InvalidTypeCode is used for any subclass of invalid types
+ InvalidTypeCode = 600 + iota
+ RequiredFailCode
+ TooLongFailCode
+ TooShortFailCode
+ PatternFailCode
+ EnumFailCode
+ MultipleOfFailCode
+ MaxFailCode
+ MinFailCode
+ UniqueFailCode
+ MaxItemsFailCode
+ MinItemsFailCode
+ NoAdditionalItemsCode
+ TooFewPropertiesCode
+ TooManyPropertiesCode
+ UnallowedPropertyCode
+ FailedAllPatternPropsCode
+ MultipleOfMustBePositiveCode
+ ReadOnlyFailCode
+)
+
+// CompositeError is an error that groups several errors together
+type CompositeError struct {
+ Errors []error
+ code int32
+ message string
+}
+
+// Code for this error
+func (c *CompositeError) Code() int32 {
+ return c.code
+}
+
+func (c *CompositeError) Error() string {
+ if len(c.Errors) > 0 {
+ msgs := []string{c.message + ":"}
+ for _, e := range c.Errors {
+ msgs = append(msgs, e.Error())
+ }
+ return strings.Join(msgs, "\n")
+ }
+ return c.message
+}
+
+// MarshalJSON implements the JSON encoding interface
+func (c CompositeError) MarshalJSON() ([]byte, error) {
+ return json.Marshal(map[string]interface{}{
+ "code": c.code,
+ "message": c.message,
+ "errors": c.Errors,
+ })
+}
+
+// CompositeValidationError an error to wrap a bunch of other errors
+func CompositeValidationError(errors ...error) *CompositeError {
+ return &CompositeError{
+ code: CompositeErrorCode,
+ Errors: append([]error{}, errors...),
+ message: "validation failure list",
+ }
+}
+
+// ValidateName recursively sets the name for all validations or updates them for nested properties
+func (c *CompositeError) ValidateName(name string) *CompositeError {
+ for i, e := range c.Errors {
+ if ve, ok := e.(*Validation); ok {
+ c.Errors[i] = ve.ValidateName(name)
+ } else if ce, ok := e.(*CompositeError); ok {
+ c.Errors[i] = ce.ValidateName(name)
+ }
+ }
+
+ return c
+}
+
+// FailedAllPatternProperties an error for when the property doesn't match a pattern
+func FailedAllPatternProperties(name, in, key string) *Validation {
+ msg := fmt.Sprintf(failedAllPatternProps, name, key, in)
+ if in == "" {
+ msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key)
+ }
+ return &Validation{
+ code: FailedAllPatternPropsCode,
+ Name: name,
+ In: in,
+ Value: key,
+ message: msg,
+ }
+}
+
+// PropertyNotAllowed an error for when the property doesn't match a pattern
+func PropertyNotAllowed(name, in, key string) *Validation {
+ msg := fmt.Sprintf(unallowedProperty, name, key, in)
+ if in == "" {
+ msg = fmt.Sprintf(unallowedPropertyNoIn, name, key)
+ }
+ return &Validation{
+ code: UnallowedPropertyCode,
+ Name: name,
+ In: in,
+ Value: key,
+ message: msg,
+ }
+}
+
+// TooFewProperties an error for an object with too few properties
+func TooFewProperties(name, in string, n int64) *Validation {
+ msg := fmt.Sprintf(tooFewProperties, name, in, n)
+ if in == "" {
+ msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n)
+ }
+ return &Validation{
+ code: TooFewPropertiesCode,
+ Name: name,
+ In: in,
+ Value: n,
+ message: msg,
+ }
+}
+
+// TooManyProperties an error for an object with too many properties
+func TooManyProperties(name, in string, n int64) *Validation {
+ msg := fmt.Sprintf(tooManyProperties, name, in, n)
+ if in == "" {
+ msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n)
+ }
+ return &Validation{
+ code: TooManyPropertiesCode,
+ Name: name,
+ In: in,
+ Value: n,
+ message: msg,
+ }
+}
+
+// AdditionalItemsNotAllowed an error for invalid additional items
+func AdditionalItemsNotAllowed(name, in string) *Validation {
+ msg := fmt.Sprintf(noAdditionalItems, name, in)
+ if in == "" {
+ msg = fmt.Sprintf(noAdditionalItemsNoIn, name)
+ }
+ return &Validation{
+ code: NoAdditionalItemsCode,
+ Name: name,
+ In: in,
+ message: msg,
+ }
+}
+
+// InvalidCollectionFormat another flavor of invalid type error
+func InvalidCollectionFormat(name, in, format string) *Validation {
+ return &Validation{
+ code: InvalidTypeCode,
+ Name: name,
+ In: in,
+ Value: format,
+ message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name),
+ }
+}
+
+// InvalidTypeName an error for when the type is invalid
+func InvalidTypeName(typeName string) *Validation {
+ return &Validation{
+ code: InvalidTypeCode,
+ Value: typeName,
+ message: fmt.Sprintf(invalidType, typeName),
+ }
+}
+
+// InvalidType creates an error for when the type is invalid
+func InvalidType(name, in, typeName string, value interface{}) *Validation {
+ var message string
+
+ if in != "" {
+ switch value.(type) {
+ case string:
+ message = fmt.Sprintf(typeFailWithData, name, in, typeName, value)
+ case error:
+ message = fmt.Sprintf(typeFailWithError, name, in, typeName, value)
+ default:
+ message = fmt.Sprintf(typeFail, name, in, typeName)
+ }
+ } else {
+ switch value.(type) {
+ case string:
+ message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value)
+ case error:
+ message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value)
+ default:
+ message = fmt.Sprintf(typeFailNoIn, name, typeName)
+ }
+ }
+
+ return &Validation{
+ code: InvalidTypeCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+
+}
+
+// DuplicateItems error for when an array contains duplicates
+func DuplicateItems(name, in string) *Validation {
+ msg := fmt.Sprintf(uniqueFail, name, in)
+ if in == "" {
+ msg = fmt.Sprintf(uniqueFailNoIn, name)
+ }
+ return &Validation{
+ code: UniqueFailCode,
+ Name: name,
+ In: in,
+ message: msg,
+ }
+}
+
+// TooManyItems error for when an array contains too many items
+func TooManyItems(name, in string, max int64, value interface{}) *Validation {
+ msg := fmt.Sprintf(maxItemsFail, name, in, max)
+ if in == "" {
+ msg = fmt.Sprintf(maxItemsFailNoIn, name, max)
+ }
+
+ return &Validation{
+ code: MaxItemsFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// TooFewItems error for when an array contains too few items
+func TooFewItems(name, in string, min int64, value interface{}) *Validation {
+ msg := fmt.Sprintf(minItemsFail, name, in, min)
+ if in == "" {
+ msg = fmt.Sprintf(minItemsFailNoIn, name, min)
+ }
+ return &Validation{
+ code: MinItemsFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// ExceedsMaximumInt error for when maximum validation fails
+func ExceedsMaximumInt(name, in string, max int64, exclusive bool, value interface{}) *Validation {
+ var message string
+ if in == "" {
+ m := maxIncFailNoIn
+ if exclusive {
+ m = maxExcFailNoIn
+ }
+ message = fmt.Sprintf(m, name, max)
+ } else {
+ m := maxIncFail
+ if exclusive {
+ m = maxExcFail
+ }
+ message = fmt.Sprintf(m, name, in, max)
+ }
+ return &Validation{
+ code: MaxFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+}
+
+// ExceedsMaximumUint error for when maximum validation fails
+func ExceedsMaximumUint(name, in string, max uint64, exclusive bool, value interface{}) *Validation {
+ var message string
+ if in == "" {
+ m := maxIncFailNoIn
+ if exclusive {
+ m = maxExcFailNoIn
+ }
+ message = fmt.Sprintf(m, name, max)
+ } else {
+ m := maxIncFail
+ if exclusive {
+ m = maxExcFail
+ }
+ message = fmt.Sprintf(m, name, in, max)
+ }
+ return &Validation{
+ code: MaxFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+}
+
+// ExceedsMaximum error for when maximum validation fails
+func ExceedsMaximum(name, in string, max float64, exclusive bool, value interface{}) *Validation {
+ var message string
+ if in == "" {
+ m := maxIncFailNoIn
+ if exclusive {
+ m = maxExcFailNoIn
+ }
+ message = fmt.Sprintf(m, name, max)
+ } else {
+ m := maxIncFail
+ if exclusive {
+ m = maxExcFail
+ }
+ message = fmt.Sprintf(m, name, in, max)
+ }
+ return &Validation{
+ code: MaxFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+}
+
+// ExceedsMinimumInt error for when minimum validation fails
+func ExceedsMinimumInt(name, in string, min int64, exclusive bool, value interface{}) *Validation {
+ var message string
+ if in == "" {
+ m := minIncFailNoIn
+ if exclusive {
+ m = minExcFailNoIn
+ }
+ message = fmt.Sprintf(m, name, min)
+ } else {
+ m := minIncFail
+ if exclusive {
+ m = minExcFail
+ }
+ message = fmt.Sprintf(m, name, in, min)
+ }
+ return &Validation{
+ code: MinFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+}
+
+// ExceedsMinimumUint error for when minimum validation fails
+func ExceedsMinimumUint(name, in string, min uint64, exclusive bool, value interface{}) *Validation {
+ var message string
+ if in == "" {
+ m := minIncFailNoIn
+ if exclusive {
+ m = minExcFailNoIn
+ }
+ message = fmt.Sprintf(m, name, min)
+ } else {
+ m := minIncFail
+ if exclusive {
+ m = minExcFail
+ }
+ message = fmt.Sprintf(m, name, in, min)
+ }
+ return &Validation{
+ code: MinFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+}
+
+// ExceedsMinimum error for when minimum validation fails
+func ExceedsMinimum(name, in string, min float64, exclusive bool, value interface{}) *Validation {
+ var message string
+ if in == "" {
+ m := minIncFailNoIn
+ if exclusive {
+ m = minExcFailNoIn
+ }
+ message = fmt.Sprintf(m, name, min)
+ } else {
+ m := minIncFail
+ if exclusive {
+ m = minExcFail
+ }
+ message = fmt.Sprintf(m, name, in, min)
+ }
+ return &Validation{
+ code: MinFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: message,
+ }
+}
+
+// NotMultipleOf error for when multiple of validation fails
+func NotMultipleOf(name, in string, multiple, value interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple)
+ } else {
+ msg = fmt.Sprintf(multipleOfFail, name, in, multiple)
+ }
+ return &Validation{
+ code: MultipleOfFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// EnumFail error for when an enum validation fails
+func EnumFail(name, in string, value interface{}, values []interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(enumFailNoIn, name, values)
+ } else {
+ msg = fmt.Sprintf(enumFail, name, in, values)
+ }
+
+ return &Validation{
+ code: EnumFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ Values: values,
+ message: msg,
+ }
+}
+
+// Required error for when a value is missing
+func Required(name, in string, value interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(requiredFailNoIn, name)
+ } else {
+ msg = fmt.Sprintf(requiredFail, name, in)
+ }
+ return &Validation{
+ code: RequiredFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// ReadOnly error for when a value is present in request
+func ReadOnly(name, in string, value interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(readOnlyFailNoIn, name)
+ } else {
+ msg = fmt.Sprintf(readOnlyFail, name, in)
+ }
+ return &Validation{
+ code: ReadOnlyFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// TooLong error for when a string is too long
+func TooLong(name, in string, max int64, value interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(tooLongMessageNoIn, name, max)
+ } else {
+ msg = fmt.Sprintf(tooLongMessage, name, in, max)
+ }
+ return &Validation{
+ code: TooLongFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// TooShort error for when a string is too short
+func TooShort(name, in string, min int64, value interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(tooShortMessageNoIn, name, min)
+ } else {
+ msg = fmt.Sprintf(tooShortMessage, name, in, min)
+ }
+
+ return &Validation{
+ code: TooShortFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// FailedPattern error for when a string fails a regex pattern match
+// the pattern that is returned is the ECMA syntax version of the pattern not the golang version.
+func FailedPattern(name, in, pattern string, value interface{}) *Validation {
+ var msg string
+ if in == "" {
+ msg = fmt.Sprintf(patternFailNoIn, name, pattern)
+ } else {
+ msg = fmt.Sprintf(patternFail, name, in, pattern)
+ }
+
+ return &Validation{
+ code: PatternFailCode,
+ Name: name,
+ In: in,
+ Value: value,
+ message: msg,
+ }
+}
+
+// MultipleOfMustBePositive error for when a
+// multipleOf factor is negative
+func MultipleOfMustBePositive(name, in string, factor interface{}) *Validation {
+ return &Validation{
+ code: MultipleOfMustBePositiveCode,
+ Name: name,
+ In: in,
+ Value: factor,
+ message: fmt.Sprintf(multipleOfMustBePositive, name, factor),
+ }
+}
diff --git a/vendor/github.com/go-openapi/inflect/.hgignore b/vendor/github.com/go-openapi/inflect/.hgignore
new file mode 100644
index 000000000..6cc3d7ce1
--- /dev/null
+++ b/vendor/github.com/go-openapi/inflect/.hgignore
@@ -0,0 +1 @@
+swp$
diff --git a/vendor/github.com/go-openapi/inflect/LICENCE b/vendor/github.com/go-openapi/inflect/LICENCE
new file mode 100644
index 000000000..8a36b944a
--- /dev/null
+++ b/vendor/github.com/go-openapi/inflect/LICENCE
@@ -0,0 +1,7 @@
+Copyright (c) 2011 Chris Farmiloe
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/go-openapi/inflect/README b/vendor/github.com/go-openapi/inflect/README
new file mode 100644
index 000000000..014699a22
--- /dev/null
+++ b/vendor/github.com/go-openapi/inflect/README
@@ -0,0 +1,168 @@
+INSTALLATION
+
+go get bitbucket.org/pkg/inflect
+
+PACKAGE
+
+package inflect
+
+
+FUNCTIONS
+
+func AddAcronym(word string)
+
+func AddHuman(suffix, replacement string)
+
+func AddIrregular(singular, plural string)
+
+func AddPlural(suffix, replacement string)
+
+func AddSingular(suffix, replacement string)
+
+func AddUncountable(word string)
+
+func Asciify(word string) string
+
+func Camelize(word string) string
+
+func CamelizeDownFirst(word string) string
+
+func Capitalize(word string) string
+
+func Dasherize(word string) string
+
+func ForeignKey(word string) string
+
+func ForeignKeyCondensed(word string) string
+
+func Humanize(word string) string
+
+func Ordinalize(word string) string
+
+func Parameterize(word string) string
+
+func ParameterizeJoin(word, sep string) string
+
+func Pluralize(word string) string
+
+func Singularize(word string) string
+
+func Tableize(word string) string
+
+func Titleize(word string) string
+
+func Typeify(word string) string
+
+func Uncountables() map[string]bool
+
+func Underscore(word string) string
+
+
+TYPES
+
+type Rule struct {
+ // contains filtered or unexported fields
+}
+used by rulesets
+
+type Ruleset struct {
+ // contains filtered or unexported fields
+}
+a Ruleset is the config of pluralization rules
+you can extend the rules with the Add* methods
+
+func NewDefaultRuleset() *Ruleset
+create a new ruleset and load it with the default
+set of common English pluralization rules
+
+func NewRuleset() *Ruleset
+create a blank ruleset. Unless you are going to
+build your own rules from scratch you probably
+won't need this and can just use the defaultRuleset
+via the global inflect.* methods
+
+func (rs *Ruleset) AddAcronym(word string)
+if you use acronym you may need to add them to the ruleset
+to prevent Underscored words of things like "HTML" coming out
+as "h_t_m_l"
+
+func (rs *Ruleset) AddHuman(suffix, replacement string)
+Human rules are applied by humanize to show more friendly
+versions of words
+
+func (rs *Ruleset) AddIrregular(singular, plural string)
+Add any inconsistant pluralizing/sinularizing rules
+to the set here.
+
+func (rs *Ruleset) AddPlural(suffix, replacement string)
+add a pluralization rule
+
+func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool)
+add a pluralization rule with full string match
+
+func (rs *Ruleset) AddSingular(suffix, replacement string)
+add a singular rule
+
+func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool)
+same as AddSingular but you can set `exact` to force
+a full string match
+
+func (rs *Ruleset) AddUncountable(word string)
+add a word to this ruleset that has the same singular and plural form
+for example: "rice"
+
+func (rs *Ruleset) Asciify(word string) string
+transforms latin characters like é -> e
+
+func (rs *Ruleset) Camelize(word string) string
+"dino_party" -> "DinoParty"
+
+func (rs *Ruleset) CamelizeDownFirst(word string) string
+same as Camelcase but with first letter downcased
+
+func (rs *Ruleset) Capitalize(word string) string
+uppercase first character
+
+func (rs *Ruleset) Dasherize(word string) string
+"SomeText" -> "some-text"
+
+func (rs *Ruleset) ForeignKey(word string) string
+an underscored foreign key name "Person" -> "person_id"
+
+func (rs *Ruleset) ForeignKeyCondensed(word string) string
+a foreign key (with an underscore) "Person" -> "personid"
+
+func (rs *Ruleset) Humanize(word string) string
+First letter of sentance captitilized
+Uses custom friendly replacements via AddHuman()
+
+func (rs *Ruleset) Ordinalize(str string) string
+"1031" -> "1031st"
+
+func (rs *Ruleset) Parameterize(word string) string
+param safe dasherized names like "my-param"
+
+func (rs *Ruleset) ParameterizeJoin(word, sep string) string
+param safe dasherized names with custom seperator
+
+func (rs *Ruleset) Pluralize(word string) string
+returns the plural form of a singular word
+
+func (rs *Ruleset) Singularize(word string) string
+returns the singular form of a plural word
+
+func (rs *Ruleset) Tableize(word string) string
+Rails style pluralized table names: "SuperPerson" -> "super_people"
+
+func (rs *Ruleset) Titleize(word string) string
+Captitilize every word in sentance "hello there" -> "Hello There"
+
+func (rs *Ruleset) Typeify(word string) string
+"something_like_this" -> "SomethingLikeThis"
+
+func (rs *Ruleset) Uncountables() map[string]bool
+
+func (rs *Ruleset) Underscore(word string) string
+lowercase underscore version "BigBen" -> "big_ben"
+
+
diff --git a/vendor/github.com/go-openapi/inflect/inflect.go b/vendor/github.com/go-openapi/inflect/inflect.go
new file mode 100644
index 000000000..3008844ca
--- /dev/null
+++ b/vendor/github.com/go-openapi/inflect/inflect.go
@@ -0,0 +1,713 @@
+package inflect
+
+import (
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+)
+
+// used by rulesets
+type Rule struct {
+ suffix string
+ replacement string
+ exact bool
+}
+
+// a Ruleset is the config of pluralization rules
+// you can extend the rules with the Add* methods
+type Ruleset struct {
+ uncountables map[string]bool
+ plurals []*Rule
+ singulars []*Rule
+ humans []*Rule
+ acronyms []*Rule
+ acronymMatcher *regexp.Regexp
+}
+
+// create a blank ruleset. Unless you are going to
+// build your own rules from scratch you probably
+// won't need this and can just use the defaultRuleset
+// via the global inflect.* methods
+func NewRuleset() *Ruleset {
+ rs := new(Ruleset)
+ rs.uncountables = make(map[string]bool)
+ rs.plurals = make([]*Rule, 0)
+ rs.singulars = make([]*Rule, 0)
+ rs.humans = make([]*Rule, 0)
+ rs.acronyms = make([]*Rule, 0)
+ return rs
+}
+
+// create a new ruleset and load it with the default
+// set of common English pluralization rules
+func NewDefaultRuleset() *Ruleset {
+ rs := NewRuleset()
+ rs.AddPlural("s", "s")
+ rs.AddPlural("testis", "testes")
+ rs.AddPlural("axis", "axes")
+ rs.AddPlural("octopus", "octopi")
+ rs.AddPlural("virus", "viri")
+ rs.AddPlural("octopi", "octopi")
+ rs.AddPlural("viri", "viri")
+ rs.AddPlural("alias", "aliases")
+ rs.AddPlural("status", "statuses")
+ rs.AddPlural("bus", "buses")
+ rs.AddPlural("buffalo", "buffaloes")
+ rs.AddPlural("tomato", "tomatoes")
+ rs.AddPlural("tum", "ta")
+ rs.AddPlural("ium", "ia")
+ rs.AddPlural("ta", "ta")
+ rs.AddPlural("ia", "ia")
+ rs.AddPlural("sis", "ses")
+ rs.AddPlural("lf", "lves")
+ rs.AddPlural("rf", "rves")
+ rs.AddPlural("afe", "aves")
+ rs.AddPlural("bfe", "bves")
+ rs.AddPlural("cfe", "cves")
+ rs.AddPlural("dfe", "dves")
+ rs.AddPlural("efe", "eves")
+ rs.AddPlural("gfe", "gves")
+ rs.AddPlural("hfe", "hves")
+ rs.AddPlural("ife", "ives")
+ rs.AddPlural("jfe", "jves")
+ rs.AddPlural("kfe", "kves")
+ rs.AddPlural("lfe", "lves")
+ rs.AddPlural("mfe", "mves")
+ rs.AddPlural("nfe", "nves")
+ rs.AddPlural("ofe", "oves")
+ rs.AddPlural("pfe", "pves")
+ rs.AddPlural("qfe", "qves")
+ rs.AddPlural("rfe", "rves")
+ rs.AddPlural("sfe", "sves")
+ rs.AddPlural("tfe", "tves")
+ rs.AddPlural("ufe", "uves")
+ rs.AddPlural("vfe", "vves")
+ rs.AddPlural("wfe", "wves")
+ rs.AddPlural("xfe", "xves")
+ rs.AddPlural("yfe", "yves")
+ rs.AddPlural("zfe", "zves")
+ rs.AddPlural("hive", "hives")
+ rs.AddPlural("quy", "quies")
+ rs.AddPlural("by", "bies")
+ rs.AddPlural("cy", "cies")
+ rs.AddPlural("dy", "dies")
+ rs.AddPlural("fy", "fies")
+ rs.AddPlural("gy", "gies")
+ rs.AddPlural("hy", "hies")
+ rs.AddPlural("jy", "jies")
+ rs.AddPlural("ky", "kies")
+ rs.AddPlural("ly", "lies")
+ rs.AddPlural("my", "mies")
+ rs.AddPlural("ny", "nies")
+ rs.AddPlural("py", "pies")
+ rs.AddPlural("qy", "qies")
+ rs.AddPlural("ry", "ries")
+ rs.AddPlural("sy", "sies")
+ rs.AddPlural("ty", "ties")
+ rs.AddPlural("vy", "vies")
+ rs.AddPlural("wy", "wies")
+ rs.AddPlural("xy", "xies")
+ rs.AddPlural("zy", "zies")
+ rs.AddPlural("x", "xes")
+ rs.AddPlural("ch", "ches")
+ rs.AddPlural("ss", "sses")
+ rs.AddPlural("sh", "shes")
+ rs.AddPlural("matrix", "matrices")
+ rs.AddPlural("vertix", "vertices")
+ rs.AddPlural("indix", "indices")
+ rs.AddPlural("matrex", "matrices")
+ rs.AddPlural("vertex", "vertices")
+ rs.AddPlural("index", "indices")
+ rs.AddPlural("mouse", "mice")
+ rs.AddPlural("louse", "lice")
+ rs.AddPlural("mice", "mice")
+ rs.AddPlural("lice", "lice")
+ rs.AddPluralExact("ox", "oxen", true)
+ rs.AddPluralExact("oxen", "oxen", true)
+ rs.AddPluralExact("quiz", "quizzes", true)
+ rs.AddSingular("s", "")
+ rs.AddSingular("news", "news")
+ rs.AddSingular("ta", "tum")
+ rs.AddSingular("ia", "ium")
+ rs.AddSingular("analyses", "analysis")
+ rs.AddSingular("bases", "basis")
+ rs.AddSingular("diagnoses", "diagnosis")
+ rs.AddSingular("parentheses", "parenthesis")
+ rs.AddSingular("prognoses", "prognosis")
+ rs.AddSingular("synopses", "synopsis")
+ rs.AddSingular("theses", "thesis")
+ rs.AddSingular("analyses", "analysis")
+ rs.AddSingular("aves", "afe")
+ rs.AddSingular("bves", "bfe")
+ rs.AddSingular("cves", "cfe")
+ rs.AddSingular("dves", "dfe")
+ rs.AddSingular("eves", "efe")
+ rs.AddSingular("gves", "gfe")
+ rs.AddSingular("hves", "hfe")
+ rs.AddSingular("ives", "ife")
+ rs.AddSingular("jves", "jfe")
+ rs.AddSingular("kves", "kfe")
+ rs.AddSingular("lves", "lfe")
+ rs.AddSingular("mves", "mfe")
+ rs.AddSingular("nves", "nfe")
+ rs.AddSingular("oves", "ofe")
+ rs.AddSingular("pves", "pfe")
+ rs.AddSingular("qves", "qfe")
+ rs.AddSingular("rves", "rfe")
+ rs.AddSingular("sves", "sfe")
+ rs.AddSingular("tves", "tfe")
+ rs.AddSingular("uves", "ufe")
+ rs.AddSingular("vves", "vfe")
+ rs.AddSingular("wves", "wfe")
+ rs.AddSingular("xves", "xfe")
+ rs.AddSingular("yves", "yfe")
+ rs.AddSingular("zves", "zfe")
+ rs.AddSingular("hives", "hive")
+ rs.AddSingular("tives", "tive")
+ rs.AddSingular("lves", "lf")
+ rs.AddSingular("rves", "rf")
+ rs.AddSingular("quies", "quy")
+ rs.AddSingular("bies", "by")
+ rs.AddSingular("cies", "cy")
+ rs.AddSingular("dies", "dy")
+ rs.AddSingular("fies", "fy")
+ rs.AddSingular("gies", "gy")
+ rs.AddSingular("hies", "hy")
+ rs.AddSingular("jies", "jy")
+ rs.AddSingular("kies", "ky")
+ rs.AddSingular("lies", "ly")
+ rs.AddSingular("mies", "my")
+ rs.AddSingular("nies", "ny")
+ rs.AddSingular("pies", "py")
+ rs.AddSingular("qies", "qy")
+ rs.AddSingular("ries", "ry")
+ rs.AddSingular("sies", "sy")
+ rs.AddSingular("ties", "ty")
+ rs.AddSingular("vies", "vy")
+ rs.AddSingular("wies", "wy")
+ rs.AddSingular("xies", "xy")
+ rs.AddSingular("zies", "zy")
+ rs.AddSingular("series", "series")
+ rs.AddSingular("movies", "movie")
+ rs.AddSingular("xes", "x")
+ rs.AddSingular("ches", "ch")
+ rs.AddSingular("sses", "ss")
+ rs.AddSingular("shes", "sh")
+ rs.AddSingular("mice", "mouse")
+ rs.AddSingular("lice", "louse")
+ rs.AddSingular("buses", "bus")
+ rs.AddSingular("oes", "o")
+ rs.AddSingular("shoes", "shoe")
+ rs.AddSingular("crises", "crisis")
+ rs.AddSingular("axes", "axis")
+ rs.AddSingular("testes", "testis")
+ rs.AddSingular("octopi", "octopus")
+ rs.AddSingular("viri", "virus")
+ rs.AddSingular("statuses", "status")
+ rs.AddSingular("aliases", "alias")
+ rs.AddSingularExact("oxen", "ox", true)
+ rs.AddSingular("vertices", "vertex")
+ rs.AddSingular("indices", "index")
+ rs.AddSingular("matrices", "matrix")
+ rs.AddSingularExact("quizzes", "quiz", true)
+ rs.AddSingular("databases", "database")
+ rs.AddIrregular("person", "people")
+ rs.AddIrregular("man", "men")
+ rs.AddIrregular("child", "children")
+ rs.AddIrregular("sex", "sexes")
+ rs.AddIrregular("move", "moves")
+ rs.AddIrregular("zombie", "zombies")
+ rs.AddUncountable("equipment")
+ rs.AddUncountable("information")
+ rs.AddUncountable("rice")
+ rs.AddUncountable("money")
+ rs.AddUncountable("species")
+ rs.AddUncountable("series")
+ rs.AddUncountable("fish")
+ rs.AddUncountable("sheep")
+ rs.AddUncountable("jeans")
+ rs.AddUncountable("police")
+ return rs
+}
+
+func (rs *Ruleset) Uncountables() map[string]bool {
+ return rs.uncountables
+}
+
+// add a pluralization rule
+func (rs *Ruleset) AddPlural(suffix, replacement string) {
+ rs.AddPluralExact(suffix, replacement, false)
+}
+
+// add a pluralization rule with full string match
+func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool) {
+ // remove uncountable
+ delete(rs.uncountables, suffix)
+ // create rule
+ r := new(Rule)
+ r.suffix = suffix
+ r.replacement = replacement
+ r.exact = exact
+ // prepend
+ rs.plurals = append([]*Rule{r}, rs.plurals...)
+}
+
+// add a singular rule
+func (rs *Ruleset) AddSingular(suffix, replacement string) {
+ rs.AddSingularExact(suffix, replacement, false)
+}
+
+// same as AddSingular but you can set `exact` to force
+// a full string match
+func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool) {
+ // remove from uncountable
+ delete(rs.uncountables, suffix)
+ // create rule
+ r := new(Rule)
+ r.suffix = suffix
+ r.replacement = replacement
+ r.exact = exact
+ rs.singulars = append([]*Rule{r}, rs.singulars...)
+}
+
+// Human rules are applied by humanize to show more friendly
+// versions of words
+func (rs *Ruleset) AddHuman(suffix, replacement string) {
+ r := new(Rule)
+ r.suffix = suffix
+ r.replacement = replacement
+ rs.humans = append([]*Rule{r}, rs.humans...)
+}
+
+// Add any inconsistant pluralizing/sinularizing rules
+// to the set here.
+func (rs *Ruleset) AddIrregular(singular, plural string) {
+ delete(rs.uncountables, singular)
+ delete(rs.uncountables, plural)
+ rs.AddPlural(singular, plural)
+ rs.AddPlural(plural, plural)
+ rs.AddSingular(plural, singular)
+}
+
+// if you use acronym you may need to add them to the ruleset
+// to prevent Underscored words of things like "HTML" coming out
+// as "h_t_m_l"
+func (rs *Ruleset) AddAcronym(word string) {
+ r := new(Rule)
+ r.suffix = word
+ r.replacement = rs.Titleize(strings.ToLower(word))
+ rs.acronyms = append(rs.acronyms, r)
+}
+
+// add a word to this ruleset that has the same singular and plural form
+// for example: "rice"
+func (rs *Ruleset) AddUncountable(word string) {
+ rs.uncountables[strings.ToLower(word)] = true
+}
+
+func (rs *Ruleset) isUncountable(word string) bool {
+ // handle multiple words by using the last one
+ words := strings.Split(word, " ")
+ if _, exists := rs.uncountables[strings.ToLower(words[len(words)-1])]; exists {
+ return true
+ }
+ return false
+}
+
+// returns the plural form of a singular word
+func (rs *Ruleset) Pluralize(word string) string {
+ if len(word) == 0 {
+ return word
+ }
+ if rs.isUncountable(word) {
+ return word
+ }
+ for _, rule := range rs.plurals {
+ if rule.exact {
+ if word == rule.suffix {
+ return rule.replacement
+ }
+ } else {
+ if strings.HasSuffix(word, rule.suffix) {
+ return replaceLast(word, rule.suffix, rule.replacement)
+ }
+ }
+ }
+ return word + "s"
+}
+
+// returns the singular form of a plural word
+func (rs *Ruleset) Singularize(word string) string {
+ if len(word) == 0 {
+ return word
+ }
+ if rs.isUncountable(word) {
+ return word
+ }
+ for _, rule := range rs.singulars {
+ if rule.exact {
+ if word == rule.suffix {
+ return rule.replacement
+ }
+ } else {
+ if strings.HasSuffix(word, rule.suffix) {
+ return replaceLast(word, rule.suffix, rule.replacement)
+ }
+ }
+ }
+ return word
+}
+
+// uppercase first character
+func (rs *Ruleset) Capitalize(word string) string {
+ return strings.ToUpper(word[:1]) + word[1:]
+}
+
+// "dino_party" -> "DinoParty"
+func (rs *Ruleset) Camelize(word string) string {
+ words := splitAtCaseChangeWithTitlecase(word)
+ return strings.Join(words, "")
+}
+
+// same as Camelcase but with first letter downcased
+func (rs *Ruleset) CamelizeDownFirst(word string) string {
+ word = Camelize(word)
+ return strings.ToLower(word[:1]) + word[1:]
+}
+
+// Captitilize every word in sentance "hello there" -> "Hello There"
+func (rs *Ruleset) Titleize(word string) string {
+ words := splitAtCaseChangeWithTitlecase(word)
+ return strings.Join(words, " ")
+}
+
+func (rs *Ruleset) safeCaseAcronyms(word string) string {
+ // convert an acroymn like HTML into Html
+ for _, rule := range rs.acronyms {
+ word = strings.Replace(word, rule.suffix, rule.replacement, -1)
+ }
+ return word
+}
+
+func (rs *Ruleset) seperatedWords(word, sep string) string {
+ word = rs.safeCaseAcronyms(word)
+ words := splitAtCaseChange(word)
+ return strings.Join(words, sep)
+}
+
+// lowercase underscore version "BigBen" -> "big_ben"
+func (rs *Ruleset) Underscore(word string) string {
+ return rs.seperatedWords(word, "_")
+}
+
+// First letter of sentance captitilized
+// Uses custom friendly replacements via AddHuman()
+func (rs *Ruleset) Humanize(word string) string {
+ word = replaceLast(word, "_id", "") // strip foreign key kinds
+ // replace and strings in humans list
+ for _, rule := range rs.humans {
+ word = strings.Replace(word, rule.suffix, rule.replacement, -1)
+ }
+ sentance := rs.seperatedWords(word, " ")
+ return strings.ToUpper(sentance[:1]) + sentance[1:]
+}
+
+// an underscored foreign key name "Person" -> "person_id"
+func (rs *Ruleset) ForeignKey(word string) string {
+ return rs.Underscore(rs.Singularize(word)) + "_id"
+}
+
+// a foreign key (with an underscore) "Person" -> "personid"
+func (rs *Ruleset) ForeignKeyCondensed(word string) string {
+ return rs.Underscore(word) + "id"
+}
+
+// Rails style pluralized table names: "SuperPerson" -> "super_people"
+func (rs *Ruleset) Tableize(word string) string {
+ return rs.Pluralize(rs.Underscore(rs.Typeify(word)))
+}
+
+var notUrlSafe *regexp.Regexp = regexp.MustCompile(`[^\w\d\-_ ]`)
+
+// param safe dasherized names like "my-param"
+func (rs *Ruleset) Parameterize(word string) string {
+ return ParameterizeJoin(word, "-")
+}
+
+// param safe dasherized names with custom seperator
+func (rs *Ruleset) ParameterizeJoin(word, sep string) string {
+ word = strings.ToLower(word)
+ word = rs.Asciify(word)
+ word = notUrlSafe.ReplaceAllString(word, "")
+ word = strings.Replace(word, " ", sep, -1)
+ if len(sep) > 0 {
+ squash, err := regexp.Compile(sep + "+")
+ if err == nil {
+ word = squash.ReplaceAllString(word, sep)
+ }
+ }
+ word = strings.Trim(word, sep+" ")
+ return word
+}
+
+var lookalikes map[string]*regexp.Regexp = map[string]*regexp.Regexp{
+ "A": regexp.MustCompile(`À|Á|Â|Ã|Ä|Å`),
+ "AE": regexp.MustCompile(`Æ`),
+ "C": regexp.MustCompile(`Ç`),
+ "E": regexp.MustCompile(`È|É|Ê|Ë`),
+ "G": regexp.MustCompile(`Ğ`),
+ "I": regexp.MustCompile(`Ì|Í|Î|Ï|İ`),
+ "N": regexp.MustCompile(`Ñ`),
+ "O": regexp.MustCompile(`Ò|Ó|Ô|Õ|Ö|Ø`),
+ "S": regexp.MustCompile(`Ş`),
+ "U": regexp.MustCompile(`Ù|Ú|Û|Ü`),
+ "Y": regexp.MustCompile(`Ý`),
+ "ss": regexp.MustCompile(`ß`),
+ "a": regexp.MustCompile(`à|á|â|ã|ä|å`),
+ "ae": regexp.MustCompile(`æ`),
+ "c": regexp.MustCompile(`ç`),
+ "e": regexp.MustCompile(`è|é|ê|ë`),
+ "g": regexp.MustCompile(`ğ`),
+ "i": regexp.MustCompile(`ì|í|î|ï|ı`),
+ "n": regexp.MustCompile(`ñ`),
+ "o": regexp.MustCompile(`ò|ó|ô|õ|ö|ø`),
+ "s": regexp.MustCompile(`ş`),
+ "u": regexp.MustCompile(`ù|ú|û|ü|ũ|ū|ŭ|ů|ű|ų`),
+ "y": regexp.MustCompile(`ý|ÿ`),
+}
+
+// transforms latin characters like é -> e
+func (rs *Ruleset) Asciify(word string) string {
+ for repl, regex := range lookalikes {
+ word = regex.ReplaceAllString(word, repl)
+ }
+ return word
+}
+
+var tablePrefix *regexp.Regexp = regexp.MustCompile(`^[^.]*\.`)
+
+// "something_like_this" -> "SomethingLikeThis"
+func (rs *Ruleset) Typeify(word string) string {
+ word = tablePrefix.ReplaceAllString(word, "")
+ return rs.Camelize(rs.Singularize(word))
+}
+
+// "SomeText" -> "some-text"
+func (rs *Ruleset) Dasherize(word string) string {
+ return rs.seperatedWords(word, "-")
+}
+
+// "1031" -> "1031st"
+func (rs *Ruleset) Ordinalize(str string) string {
+ number, err := strconv.Atoi(str)
+ if err != nil {
+ return str
+ }
+ switch abs(number) % 100 {
+ case 11, 12, 13:
+ return fmt.Sprintf("%dth", number)
+ default:
+ switch abs(number) % 10 {
+ case 1:
+ return fmt.Sprintf("%dst", number)
+ case 2:
+ return fmt.Sprintf("%dnd", number)
+ case 3:
+ return fmt.Sprintf("%drd", number)
+ }
+ }
+ return fmt.Sprintf("%dth", number)
+}
+
+/////////////////////////////////////////
+// the default global ruleset
+//////////////////////////////////////////
+
+var defaultRuleset *Ruleset
+
+func init() {
+ defaultRuleset = NewDefaultRuleset()
+}
+
+func Uncountables() map[string]bool {
+ return defaultRuleset.Uncountables()
+}
+
+func AddPlural(suffix, replacement string) {
+ defaultRuleset.AddPlural(suffix, replacement)
+}
+
+func AddSingular(suffix, replacement string) {
+ defaultRuleset.AddSingular(suffix, replacement)
+}
+
+func AddHuman(suffix, replacement string) {
+ defaultRuleset.AddHuman(suffix, replacement)
+}
+
+func AddIrregular(singular, plural string) {
+ defaultRuleset.AddIrregular(singular, plural)
+}
+
+func AddAcronym(word string) {
+ defaultRuleset.AddAcronym(word)
+}
+
+func AddUncountable(word string) {
+ defaultRuleset.AddUncountable(word)
+}
+
+func Pluralize(word string) string {
+ return defaultRuleset.Pluralize(word)
+}
+
+func Singularize(word string) string {
+ return defaultRuleset.Singularize(word)
+}
+
+func Capitalize(word string) string {
+ return defaultRuleset.Capitalize(word)
+}
+
+func Camelize(word string) string {
+ return defaultRuleset.Camelize(word)
+}
+
+func CamelizeDownFirst(word string) string {
+ return defaultRuleset.CamelizeDownFirst(word)
+}
+
+func Titleize(word string) string {
+ return defaultRuleset.Titleize(word)
+}
+
+func Underscore(word string) string {
+ return defaultRuleset.Underscore(word)
+}
+
+func Humanize(word string) string {
+ return defaultRuleset.Humanize(word)
+}
+
+func ForeignKey(word string) string {
+ return defaultRuleset.ForeignKey(word)
+}
+
+func ForeignKeyCondensed(word string) string {
+ return defaultRuleset.ForeignKeyCondensed(word)
+}
+
+func Tableize(word string) string {
+ return defaultRuleset.Tableize(word)
+}
+
+func Parameterize(word string) string {
+ return defaultRuleset.Parameterize(word)
+}
+
+func ParameterizeJoin(word, sep string) string {
+ return defaultRuleset.ParameterizeJoin(word, sep)
+}
+
+func Typeify(word string) string {
+ return defaultRuleset.Typeify(word)
+}
+
+func Dasherize(word string) string {
+ return defaultRuleset.Dasherize(word)
+}
+
+func Ordinalize(word string) string {
+ return defaultRuleset.Ordinalize(word)
+}
+
+func Asciify(word string) string {
+ return defaultRuleset.Asciify(word)
+}
+
+// helper funcs
+
+func reverse(s string) string {
+ o := make([]rune, utf8.RuneCountInString(s))
+ i := len(o)
+ for _, c := range s {
+ i--
+ o[i] = c
+ }
+ return string(o)
+}
+
+func isSpacerChar(c rune) bool {
+ switch {
+ case c == rune("_"[0]):
+ return true
+ case c == rune(" "[0]):
+ return true
+ case c == rune(":"[0]):
+ return true
+ case c == rune("-"[0]):
+ return true
+ }
+ return false
+}
+
+func splitAtCaseChange(s string) []string {
+ words := make([]string, 0)
+ word := make([]rune, 0)
+ for _, c := range s {
+ spacer := isSpacerChar(c)
+ if len(word) > 0 {
+ if unicode.IsUpper(c) || spacer {
+ words = append(words, string(word))
+ word = make([]rune, 0)
+ }
+ }
+ if !spacer {
+ word = append(word, unicode.ToLower(c))
+ }
+ }
+ words = append(words, string(word))
+ return words
+}
+
+func splitAtCaseChangeWithTitlecase(s string) []string {
+ words := make([]string, 0)
+ word := make([]rune, 0)
+ for _, c := range s {
+ spacer := isSpacerChar(c)
+ if len(word) > 0 {
+ if unicode.IsUpper(c) || spacer {
+ words = append(words, string(word))
+ word = make([]rune, 0)
+ }
+ }
+ if !spacer {
+ if len(word) > 0 {
+ word = append(word, unicode.ToLower(c))
+ } else {
+ word = append(word, unicode.ToUpper(c))
+ }
+ }
+ }
+ words = append(words, string(word))
+ return words
+}
+
+func replaceLast(s, match, repl string) string {
+ // reverse strings
+ srev := reverse(s)
+ mrev := reverse(match)
+ rrev := reverse(repl)
+ // match first and reverse back
+ return reverse(strings.Replace(srev, mrev, rrev, 1))
+}
+
+func abs(x int) int {
+ if x < 0 {
+ return -x
+ }
+ return x
+}
diff --git a/vendor/github.com/go-openapi/jsonpointer/.editorconfig b/vendor/github.com/go-openapi/jsonpointer/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/jsonpointer/.gitignore b/vendor/github.com/go-openapi/jsonpointer/.gitignore
new file mode 100644
index 000000000..769c24400
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/.gitignore
@@ -0,0 +1 @@
+secrets.yml
diff --git a/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/jsonpointer/LICENSE b/vendor/github.com/go-openapi/jsonpointer/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/jsonpointer/README.md b/vendor/github.com/go-openapi/jsonpointer/README.md
new file mode 100644
index 000000000..813788aff
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/README.md
@@ -0,0 +1,15 @@
+# gojsonpointer [![Build Status](https://travis-ci.org/go-openapi/jsonpointer.svg?branch=master)](https://travis-ci.org/go-openapi/jsonpointer) [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonpointer?status.svg)](http://godoc.org/github.com/go-openapi/jsonpointer)
+An implementation of JSON Pointer - Go language
+
+## Status
+Completed YES
+
+Tested YES
+
+## References
+http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
+
+### Note
+The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented.
diff --git a/vendor/github.com/go-openapi/jsonpointer/pointer.go b/vendor/github.com/go-openapi/jsonpointer/pointer.go
new file mode 100644
index 000000000..7df9853de
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonpointer/pointer.go
@@ -0,0 +1,390 @@
+// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// author sigu-399
+// author-github https://github.com/sigu-399
+// author-mail sigu.399@gmail.com
+//
+// repository-name jsonpointer
+// repository-desc An implementation of JSON Pointer - Go language
+//
+// description Main and unique file.
+//
+// created 25-02-2013
+
+package jsonpointer
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/swag"
+)
+
+const (
+ emptyPointer = ``
+ pointerSeparator = `/`
+
+ invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator
+)
+
+var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem()
+var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem()
+
+// JSONPointable is an interface for structs to implement when they need to customize the
+// json pointer process
+type JSONPointable interface {
+ JSONLookup(string) (interface{}, error)
+}
+
+// JSONSetable is an interface for structs to implement when they need to customize the
+// json pointer process
+type JSONSetable interface {
+ JSONSet(string, interface{}) error
+}
+
+// New creates a new json pointer for the given string
+func New(jsonPointerString string) (Pointer, error) {
+
+ var p Pointer
+ err := p.parse(jsonPointerString)
+ return p, err
+
+}
+
+// Pointer the json pointer reprsentation
+type Pointer struct {
+ referenceTokens []string
+}
+
+// "Constructor", parses the given string JSON pointer
+func (p *Pointer) parse(jsonPointerString string) error {
+
+ var err error
+
+ if jsonPointerString != emptyPointer {
+ if !strings.HasPrefix(jsonPointerString, pointerSeparator) {
+ err = errors.New(invalidStart)
+ } else {
+ referenceTokens := strings.Split(jsonPointerString, pointerSeparator)
+ for _, referenceToken := range referenceTokens[1:] {
+ p.referenceTokens = append(p.referenceTokens, referenceToken)
+ }
+ }
+ }
+
+ return err
+}
+
+// Get uses the pointer to retrieve a value from a JSON document
+func (p *Pointer) Get(document interface{}) (interface{}, reflect.Kind, error) {
+ return p.get(document, swag.DefaultJSONNameProvider)
+}
+
+// Set uses the pointer to set a value from a JSON document
+func (p *Pointer) Set(document interface{}, value interface{}) (interface{}, error) {
+ return document, p.set(document, value, swag.DefaultJSONNameProvider)
+}
+
+// GetForToken gets a value for a json pointer token 1 level deep
+func GetForToken(document interface{}, decodedToken string) (interface{}, reflect.Kind, error) {
+ return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider)
+}
+
+// SetForToken gets a value for a json pointer token 1 level deep
+func SetForToken(document interface{}, decodedToken string, value interface{}) (interface{}, error) {
+ return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider)
+}
+
+func getSingleImpl(node interface{}, decodedToken string, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) {
+ rValue := reflect.Indirect(reflect.ValueOf(node))
+ kind := rValue.Kind()
+
+ if rValue.Type().Implements(jsonPointableType) {
+ r, err := node.(JSONPointable).JSONLookup(decodedToken)
+ if err != nil {
+ return nil, kind, err
+ }
+ return r, kind, nil
+ }
+
+ switch kind {
+ case reflect.Struct:
+ nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
+ if !ok {
+ return nil, kind, fmt.Errorf("object has no field %q", decodedToken)
+ }
+ fld := rValue.FieldByName(nm)
+ return fld.Interface(), kind, nil
+
+ case reflect.Map:
+ kv := reflect.ValueOf(decodedToken)
+ mv := rValue.MapIndex(kv)
+
+ if mv.IsValid() {
+ return mv.Interface(), kind, nil
+ }
+ return nil, kind, fmt.Errorf("object has no key %q", decodedToken)
+
+ case reflect.Slice:
+ tokenIndex, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return nil, kind, err
+ }
+ sLength := rValue.Len()
+ if tokenIndex < 0 || tokenIndex >= sLength {
+ return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength-1, tokenIndex)
+ }
+
+ elem := rValue.Index(tokenIndex)
+ return elem.Interface(), kind, nil
+
+ default:
+ return nil, kind, fmt.Errorf("invalid token reference %q", decodedToken)
+ }
+
+}
+
+func setSingleImpl(node, data interface{}, decodedToken string, nameProvider *swag.NameProvider) error {
+ rValue := reflect.Indirect(reflect.ValueOf(node))
+
+ if ns, ok := node.(JSONSetable); ok { // pointer impl
+ return ns.JSONSet(decodedToken, data)
+ }
+
+ if rValue.Type().Implements(jsonSetableType) {
+ return node.(JSONSetable).JSONSet(decodedToken, data)
+ }
+
+ switch rValue.Kind() {
+ case reflect.Struct:
+ nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
+ if !ok {
+ return fmt.Errorf("object has no field %q", decodedToken)
+ }
+ fld := rValue.FieldByName(nm)
+ if fld.IsValid() {
+ fld.Set(reflect.ValueOf(data))
+ }
+ return nil
+
+ case reflect.Map:
+ kv := reflect.ValueOf(decodedToken)
+ rValue.SetMapIndex(kv, reflect.ValueOf(data))
+ return nil
+
+ case reflect.Slice:
+ tokenIndex, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return err
+ }
+ sLength := rValue.Len()
+ if tokenIndex < 0 || tokenIndex >= sLength {
+ return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
+ }
+
+ elem := rValue.Index(tokenIndex)
+ if !elem.CanSet() {
+ return fmt.Errorf("can't set slice index %s to %v", decodedToken, data)
+ }
+ elem.Set(reflect.ValueOf(data))
+ return nil
+
+ default:
+ return fmt.Errorf("invalid token reference %q", decodedToken)
+ }
+
+}
+
+func (p *Pointer) get(node interface{}, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) {
+
+ if nameProvider == nil {
+ nameProvider = swag.DefaultJSONNameProvider
+ }
+
+ kind := reflect.Invalid
+
+ // Full document when empty
+ if len(p.referenceTokens) == 0 {
+ return node, kind, nil
+ }
+
+ for _, token := range p.referenceTokens {
+
+ decodedToken := Unescape(token)
+
+ r, knd, err := getSingleImpl(node, decodedToken, nameProvider)
+ if err != nil {
+ return nil, knd, err
+ }
+ node, kind = r, knd
+
+ }
+
+ rValue := reflect.ValueOf(node)
+ kind = rValue.Kind()
+
+ return node, kind, nil
+}
+
+func (p *Pointer) set(node, data interface{}, nameProvider *swag.NameProvider) error {
+ knd := reflect.ValueOf(node).Kind()
+
+ if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array {
+ return fmt.Errorf("only structs, pointers, maps and slices are supported for setting values")
+ }
+
+ if nameProvider == nil {
+ nameProvider = swag.DefaultJSONNameProvider
+ }
+
+ // Full document when empty
+ if len(p.referenceTokens) == 0 {
+ return nil
+ }
+
+ lastI := len(p.referenceTokens) - 1
+ for i, token := range p.referenceTokens {
+ isLastToken := i == lastI
+ decodedToken := Unescape(token)
+
+ if isLastToken {
+
+ return setSingleImpl(node, data, decodedToken, nameProvider)
+ }
+
+ rValue := reflect.Indirect(reflect.ValueOf(node))
+ kind := rValue.Kind()
+
+ if rValue.Type().Implements(jsonPointableType) {
+ r, err := node.(JSONPointable).JSONLookup(decodedToken)
+ if err != nil {
+ return err
+ }
+ fld := reflect.ValueOf(r)
+ if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
+ node = fld.Addr().Interface()
+ continue
+ }
+ node = r
+ continue
+ }
+
+ switch kind {
+ case reflect.Struct:
+ nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
+ if !ok {
+ return fmt.Errorf("object has no field %q", decodedToken)
+ }
+ fld := rValue.FieldByName(nm)
+ if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
+ node = fld.Addr().Interface()
+ continue
+ }
+ node = fld.Interface()
+
+ case reflect.Map:
+ kv := reflect.ValueOf(decodedToken)
+ mv := rValue.MapIndex(kv)
+
+ if !mv.IsValid() {
+ return fmt.Errorf("object has no key %q", decodedToken)
+ }
+ if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr {
+ node = mv.Addr().Interface()
+ continue
+ }
+ node = mv.Interface()
+
+ case reflect.Slice:
+ tokenIndex, err := strconv.Atoi(decodedToken)
+ if err != nil {
+ return err
+ }
+ sLength := rValue.Len()
+ if tokenIndex < 0 || tokenIndex >= sLength {
+ return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
+ }
+
+ elem := rValue.Index(tokenIndex)
+ if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr {
+ node = elem.Addr().Interface()
+ continue
+ }
+ node = elem.Interface()
+
+ default:
+ return fmt.Errorf("invalid token reference %q", decodedToken)
+ }
+
+ }
+
+ return nil
+}
+
+// DecodedTokens returns the decoded tokens
+func (p *Pointer) DecodedTokens() []string {
+ result := make([]string, 0, len(p.referenceTokens))
+ for _, t := range p.referenceTokens {
+ result = append(result, Unescape(t))
+ }
+ return result
+}
+
+// IsEmpty returns true if this is an empty json pointer
+// this indicates that it points to the root document
+func (p *Pointer) IsEmpty() bool {
+ return len(p.referenceTokens) == 0
+}
+
+// Pointer to string representation function
+func (p *Pointer) String() string {
+
+ if len(p.referenceTokens) == 0 {
+ return emptyPointer
+ }
+
+ pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator)
+
+ return pointerString
+}
+
+// Specific JSON pointer encoding here
+// ~0 => ~
+// ~1 => /
+// ... and vice versa
+
+const (
+ encRefTok0 = `~0`
+ encRefTok1 = `~1`
+ decRefTok0 = `~`
+ decRefTok1 = `/`
+)
+
+// Unescape unescapes a json pointer reference token string to the original representation
+func Unescape(token string) string {
+ step1 := strings.Replace(token, encRefTok1, decRefTok1, -1)
+ step2 := strings.Replace(step1, encRefTok0, decRefTok0, -1)
+ return step2
+}
+
+// Escape escapes a pointer reference token string
+func Escape(token string) string {
+ step1 := strings.Replace(token, decRefTok0, encRefTok0, -1)
+ step2 := strings.Replace(step1, decRefTok1, encRefTok1, -1)
+ return step2
+}
diff --git a/vendor/github.com/go-openapi/jsonreference/.gitignore b/vendor/github.com/go-openapi/jsonreference/.gitignore
new file mode 100644
index 000000000..769c24400
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/.gitignore
@@ -0,0 +1 @@
+secrets.yml
diff --git a/vendor/github.com/go-openapi/jsonreference/.golangci.yml b/vendor/github.com/go-openapi/jsonreference/.golangci.yml
new file mode 100644
index 000000000..013fc1943
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/.golangci.yml
@@ -0,0 +1,50 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ gocyclo:
+ min-complexity: 30
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+ paralleltest:
+ ignore-missing: true
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoglobals
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - funlen
+ - gochecknoglobals
+ - gochecknoinits
+ - scopelint
+ - wrapcheck
+ - exhaustivestruct
+ - exhaustive
+ - nlreturn
+ - testpackage
+ - gci
+ - gofumpt
+ - goerr113
+ - gomnd
+ - tparallel
+ - nestif
+ - godot
+ - errorlint
+ - varcheck
+ - interfacer
+ - deadcode
+ - golint
+ - ifshort
+ - structcheck
+ - nosnakecase
+ - varnamelen
+ - exhaustruct
diff --git a/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/jsonreference/LICENSE b/vendor/github.com/go-openapi/jsonreference/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/jsonreference/README.md b/vendor/github.com/go-openapi/jsonreference/README.md
new file mode 100644
index 000000000..b94753aa5
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/README.md
@@ -0,0 +1,15 @@
+# gojsonreference [![Build Status](https://travis-ci.org/go-openapi/jsonreference.svg?branch=master)](https://travis-ci.org/go-openapi/jsonreference) [![codecov](https://codecov.io/gh/go-openapi/jsonreference/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonreference) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonreference?status.svg)](http://godoc.org/github.com/go-openapi/jsonreference)
+An implementation of JSON Reference - Go language
+
+## Status
+Feature complete. Stable API
+
+## Dependencies
+https://github.com/go-openapi/jsonpointer
+
+## References
+http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
+
+http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03
diff --git a/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go
new file mode 100644
index 000000000..f0610cf1e
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/internal/normalize_url.go
@@ -0,0 +1,69 @@
+package internal
+
+import (
+ "net/url"
+ "regexp"
+ "strings"
+)
+
+const (
+ defaultHTTPPort = ":80"
+ defaultHTTPSPort = ":443"
+)
+
+// Regular expressions used by the normalizations
+var rxPort = regexp.MustCompile(`(:\d+)/?$`)
+var rxDupSlashes = regexp.MustCompile(`/{2,}`)
+
+// NormalizeURL will normalize the specified URL
+// This was added to replace a previous call to the no longer maintained purell library:
+// The call that was used looked like the following:
+//
+// url.Parse(purell.NormalizeURL(parsed, purell.FlagsSafe|purell.FlagRemoveDuplicateSlashes))
+//
+// To explain all that was included in the call above, purell.FlagsSafe was really just the following:
+// - FlagLowercaseScheme
+// - FlagLowercaseHost
+// - FlagRemoveDefaultPort
+// - FlagRemoveDuplicateSlashes (and this was mixed in with the |)
+//
+// This also normalizes the URL into its urlencoded form by removing RawPath and RawFragment.
+func NormalizeURL(u *url.URL) {
+ lowercaseScheme(u)
+ lowercaseHost(u)
+ removeDefaultPort(u)
+ removeDuplicateSlashes(u)
+
+ u.RawPath = ""
+ u.RawFragment = ""
+}
+
+func lowercaseScheme(u *url.URL) {
+ if len(u.Scheme) > 0 {
+ u.Scheme = strings.ToLower(u.Scheme)
+ }
+}
+
+func lowercaseHost(u *url.URL) {
+ if len(u.Host) > 0 {
+ u.Host = strings.ToLower(u.Host)
+ }
+}
+
+func removeDefaultPort(u *url.URL) {
+ if len(u.Host) > 0 {
+ scheme := strings.ToLower(u.Scheme)
+ u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string {
+ if (scheme == "http" && val == defaultHTTPPort) || (scheme == "https" && val == defaultHTTPSPort) {
+ return ""
+ }
+ return val
+ })
+ }
+}
+
+func removeDuplicateSlashes(u *url.URL) {
+ if len(u.Path) > 0 {
+ u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/")
+ }
+}
diff --git a/vendor/github.com/go-openapi/jsonreference/reference.go b/vendor/github.com/go-openapi/jsonreference/reference.go
new file mode 100644
index 000000000..cfdef03e5
--- /dev/null
+++ b/vendor/github.com/go-openapi/jsonreference/reference.go
@@ -0,0 +1,158 @@
+// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// author sigu-399
+// author-github https://github.com/sigu-399
+// author-mail sigu.399@gmail.com
+//
+// repository-name jsonreference
+// repository-desc An implementation of JSON Reference - Go language
+//
+// description Main and unique file.
+//
+// created 26-02-2013
+
+package jsonreference
+
+import (
+ "errors"
+ "net/url"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/jsonreference/internal"
+)
+
+const (
+ fragmentRune = `#`
+)
+
+// New creates a new reference for the given string
+func New(jsonReferenceString string) (Ref, error) {
+
+ var r Ref
+ err := r.parse(jsonReferenceString)
+ return r, err
+
+}
+
+// MustCreateRef parses the ref string and panics when it's invalid.
+// Use the New method for a version that returns an error
+func MustCreateRef(ref string) Ref {
+ r, err := New(ref)
+ if err != nil {
+ panic(err)
+ }
+ return r
+}
+
+// Ref represents a json reference object
+type Ref struct {
+ referenceURL *url.URL
+ referencePointer jsonpointer.Pointer
+
+ HasFullURL bool
+ HasURLPathOnly bool
+ HasFragmentOnly bool
+ HasFileScheme bool
+ HasFullFilePath bool
+}
+
+// GetURL gets the URL for this reference
+func (r *Ref) GetURL() *url.URL {
+ return r.referenceURL
+}
+
+// GetPointer gets the json pointer for this reference
+func (r *Ref) GetPointer() *jsonpointer.Pointer {
+ return &r.referencePointer
+}
+
+// String returns the best version of the url for this reference
+func (r *Ref) String() string {
+
+ if r.referenceURL != nil {
+ return r.referenceURL.String()
+ }
+
+ if r.HasFragmentOnly {
+ return fragmentRune + r.referencePointer.String()
+ }
+
+ return r.referencePointer.String()
+}
+
+// IsRoot returns true if this reference is a root document
+func (r *Ref) IsRoot() bool {
+ return r.referenceURL != nil &&
+ !r.IsCanonical() &&
+ !r.HasURLPathOnly &&
+ r.referenceURL.Fragment == ""
+}
+
+// IsCanonical returns true when this pointer starts with http(s):// or file://
+func (r *Ref) IsCanonical() bool {
+ return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullURL)
+}
+
+// "Constructor", parses the given string JSON reference
+func (r *Ref) parse(jsonReferenceString string) error {
+
+ parsed, err := url.Parse(jsonReferenceString)
+ if err != nil {
+ return err
+ }
+
+ internal.NormalizeURL(parsed)
+
+ r.referenceURL = parsed
+ refURL := r.referenceURL
+
+ if refURL.Scheme != "" && refURL.Host != "" {
+ r.HasFullURL = true
+ } else {
+ if refURL.Path != "" {
+ r.HasURLPathOnly = true
+ } else if refURL.RawQuery == "" && refURL.Fragment != "" {
+ r.HasFragmentOnly = true
+ }
+ }
+
+ r.HasFileScheme = refURL.Scheme == "file"
+ r.HasFullFilePath = strings.HasPrefix(refURL.Path, "/")
+
+ // invalid json-pointer error means url has no json-pointer fragment. simply ignore error
+ r.referencePointer, _ = jsonpointer.New(refURL.Fragment)
+
+ return nil
+}
+
+// Inherits creates a new reference from a parent and a child
+// If the child cannot inherit from the parent, an error is returned
+func (r *Ref) Inherits(child Ref) (*Ref, error) {
+ childURL := child.GetURL()
+ parentURL := r.GetURL()
+ if childURL == nil {
+ return nil, errors.New("child url is nil")
+ }
+ if parentURL == nil {
+ return &child, nil
+ }
+
+ ref, err := New(parentURL.ResolveReference(childURL).String())
+ if err != nil {
+ return nil, err
+ }
+ return &ref, nil
+}
diff --git a/vendor/github.com/go-openapi/loads/.editorconfig b/vendor/github.com/go-openapi/loads/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/loads/.gitignore b/vendor/github.com/go-openapi/loads/.gitignore
new file mode 100644
index 000000000..e4f15f17b
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/.gitignore
@@ -0,0 +1,4 @@
+secrets.yml
+coverage.out
+profile.cov
+profile.out
diff --git a/vendor/github.com/go-openapi/loads/.golangci.yml b/vendor/github.com/go-openapi/loads/.golangci.yml
new file mode 100644
index 000000000..d48b4a515
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/.golangci.yml
@@ -0,0 +1,44 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 30
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoglobals
+ - gochecknoinits
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - funlen
+ - gochecknoglobals
+ - gochecknoinits
+ - scopelint
+ - wrapcheck
+ - exhaustivestruct
+ - exhaustive
+ - nlreturn
+ - testpackage
+ - gci
+ - gofumpt
+ - goerr113
+ - gomnd
+ - tparallel
+ - nestif
+ - godot
+ - errorlint
+ - paralleltest
diff --git a/vendor/github.com/go-openapi/loads/.travis.yml b/vendor/github.com/go-openapi/loads/.travis.yml
new file mode 100644
index 000000000..cd4a7c331
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/.travis.yml
@@ -0,0 +1,25 @@
+after_success:
+- bash <(curl -s https://codecov.io/bash)
+go:
+- 1.16.x
+- 1.x
+install:
+- go get gotest.tools/gotestsum
+language: go
+arch:
+- amd64
+- ppc64le
+jobs:
+ include:
+ # include linting job, but only for latest go version and amd64 arch
+ - go: 1.x
+ arch: amd64
+ install:
+ go get github.com/golangci/golangci-lint/cmd/golangci-lint
+ script:
+ - golangci-lint run --new-from-rev master
+notifications:
+ slack:
+ secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM=
+script:
+- gotestsum -f short-verbose -- -race -timeout=20m -coverprofile=coverage.txt -covermode=atomic ./...
diff --git a/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/loads/LICENSE b/vendor/github.com/go-openapi/loads/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/loads/README.md b/vendor/github.com/go-openapi/loads/README.md
new file mode 100644
index 000000000..df1f62646
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/README.md
@@ -0,0 +1,6 @@
+# Loads OAI specs [![Build Status](https://travis-ci.org/go-openapi/loads.svg?branch=master)](https://travis-ci.org/go-openapi/loads) [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![Actions/Go Test Status](https://github.com/go-openapi/loads/workflows/Go%20Test/badge.svg)](https://github.com/go-openapi/loads/actions?query=workflow%3A"Go+Test")
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/loads)](https://goreportcard.com/report/github.com/go-openapi/loads)
+
+Loading of OAI specification documents from local or remote locations. Supports JSON and YAML documents.
diff --git a/vendor/github.com/go-openapi/loads/doc.go b/vendor/github.com/go-openapi/loads/doc.go
new file mode 100644
index 000000000..3046da4ce
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/doc.go
@@ -0,0 +1,21 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package loads provides document loading methods for swagger (OAI) specifications.
+
+It is used by other go-openapi packages to load and run analysis on local or remote spec documents.
+
+*/
+package loads
diff --git a/vendor/github.com/go-openapi/loads/fmts/yaml.go b/vendor/github.com/go-openapi/loads/fmts/yaml.go
new file mode 100644
index 000000000..1cef2ac22
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/fmts/yaml.go
@@ -0,0 +1,30 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package fmts
+
+import "github.com/go-openapi/swag"
+
+var (
+ // YAMLMatcher matches yaml
+ YAMLMatcher = swag.YAMLMatcher
+ // YAMLToJSON converts YAML unmarshaled data into json compatible data
+ YAMLToJSON = swag.YAMLToJSON
+ // BytesToYAMLDoc converts raw bytes to a map[string]interface{}
+ BytesToYAMLDoc = swag.BytesToYAMLDoc
+ // YAMLDoc loads a yaml document from either http or a file and converts it to json
+ YAMLDoc = swag.YAMLDoc
+ // YAMLData loads a yaml document from either http or a file
+ YAMLData = swag.YAMLData
+)
diff --git a/vendor/github.com/go-openapi/loads/loaders.go b/vendor/github.com/go-openapi/loads/loaders.go
new file mode 100644
index 000000000..44bd32b5b
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/loaders.go
@@ -0,0 +1,134 @@
+package loads
+
+import (
+ "encoding/json"
+ "errors"
+ "net/url"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+var (
+ // Default chain of loaders, defined at the package level.
+ //
+ // By default this matches json and yaml documents.
+ //
+ // May be altered with AddLoader().
+ loaders *loader
+)
+
+func init() {
+ jsonLoader := &loader{
+ DocLoaderWithMatch: DocLoaderWithMatch{
+ Match: func(pth string) bool {
+ return true
+ },
+ Fn: JSONDoc,
+ },
+ }
+
+ loaders = jsonLoader.WithHead(&loader{
+ DocLoaderWithMatch: DocLoaderWithMatch{
+ Match: swag.YAMLMatcher,
+ Fn: swag.YAMLDoc,
+ },
+ })
+
+ // sets the global default loader for go-openapi/spec
+ spec.PathLoader = loaders.Load
+}
+
+// DocLoader represents a doc loader type
+type DocLoader func(string) (json.RawMessage, error)
+
+// DocMatcher represents a predicate to check if a loader matches
+type DocMatcher func(string) bool
+
+// DocLoaderWithMatch describes a loading function for a given extension match.
+type DocLoaderWithMatch struct {
+ Fn DocLoader
+ Match DocMatcher
+}
+
+// NewDocLoaderWithMatch builds a DocLoaderWithMatch to be used in load options
+func NewDocLoaderWithMatch(fn DocLoader, matcher DocMatcher) DocLoaderWithMatch {
+ return DocLoaderWithMatch{
+ Fn: fn,
+ Match: matcher,
+ }
+}
+
+type loader struct {
+ DocLoaderWithMatch
+ Next *loader
+}
+
+// WithHead adds a loader at the head of the current stack
+func (l *loader) WithHead(head *loader) *loader {
+ if head == nil {
+ return l
+ }
+ head.Next = l
+ return head
+}
+
+// WithNext adds a loader at the trail of the current stack
+func (l *loader) WithNext(next *loader) *loader {
+ l.Next = next
+ return next
+}
+
+// Load the raw document from path
+func (l *loader) Load(path string) (json.RawMessage, error) {
+ _, erp := url.Parse(path)
+ if erp != nil {
+ return nil, erp
+ }
+
+ var lastErr error = errors.New("no loader matched") // default error if no match was found
+ for ldr := l; ldr != nil; ldr = ldr.Next {
+ if ldr.Match != nil && !ldr.Match(path) {
+ continue
+ }
+
+ // try then move to next one if there is an error
+ b, err := ldr.Fn(path)
+ if err == nil {
+ return b, nil
+ }
+
+ lastErr = err
+ }
+
+ return nil, lastErr
+}
+
+// JSONDoc loads a json document from either a file or a remote url
+func JSONDoc(path string) (json.RawMessage, error) {
+ data, err := swag.LoadFromFileOrHTTP(path)
+ if err != nil {
+ return nil, err
+ }
+ return json.RawMessage(data), nil
+}
+
+// AddLoader for a document, executed before other previously set loaders.
+//
+// This sets the configuration at the package level.
+//
+// NOTE:
+// * this updates the default loader used by github.com/go-openapi/spec
+// * since this sets package level globals, you shouln't call this concurrently
+//
+func AddLoader(predicate DocMatcher, load DocLoader) {
+ loaders = loaders.WithHead(&loader{
+ DocLoaderWithMatch: DocLoaderWithMatch{
+ Match: predicate,
+ Fn: load,
+ },
+ })
+
+ // sets the global default loader for go-openapi/spec
+ spec.PathLoader = loaders.Load
+}
diff --git a/vendor/github.com/go-openapi/loads/options.go b/vendor/github.com/go-openapi/loads/options.go
new file mode 100644
index 000000000..f8305d560
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/options.go
@@ -0,0 +1,61 @@
+package loads
+
+type options struct {
+ loader *loader
+}
+
+func defaultOptions() *options {
+ return &options{
+ loader: loaders,
+ }
+}
+
+func loaderFromOptions(options []LoaderOption) *loader {
+ opts := defaultOptions()
+ for _, apply := range options {
+ apply(opts)
+ }
+
+ return opts.loader
+}
+
+// LoaderOption allows to fine-tune the spec loader behavior
+type LoaderOption func(*options)
+
+// WithDocLoader sets a custom loader for loading specs
+func WithDocLoader(l DocLoader) LoaderOption {
+ return func(opt *options) {
+ if l == nil {
+ return
+ }
+ opt.loader = &loader{
+ DocLoaderWithMatch: DocLoaderWithMatch{
+ Fn: l,
+ },
+ }
+ }
+}
+
+// WithDocLoaderMatches sets a chain of custom loaders for loading specs
+// for different extension matches.
+//
+// Loaders are executed in the order of provided DocLoaderWithMatch'es.
+func WithDocLoaderMatches(l ...DocLoaderWithMatch) LoaderOption {
+ return func(opt *options) {
+ var final, prev *loader
+ for _, ldr := range l {
+ if ldr.Fn == nil {
+ continue
+ }
+
+ if prev == nil {
+ final = &loader{DocLoaderWithMatch: ldr}
+ prev = final
+ continue
+ }
+
+ prev = prev.WithNext(&loader{DocLoaderWithMatch: ldr})
+ }
+ opt.loader = final
+ }
+}
diff --git a/vendor/github.com/go-openapi/loads/spec.go b/vendor/github.com/go-openapi/loads/spec.go
new file mode 100644
index 000000000..93c8d4b89
--- /dev/null
+++ b/vendor/github.com/go-openapi/loads/spec.go
@@ -0,0 +1,266 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package loads
+
+import (
+ "bytes"
+ "encoding/gob"
+ "encoding/json"
+ "fmt"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+func init() {
+ gob.Register(map[string]interface{}{})
+ gob.Register([]interface{}{})
+}
+
+// Document represents a swagger spec document
+type Document struct {
+ // specAnalyzer
+ Analyzer *analysis.Spec
+ spec *spec.Swagger
+ specFilePath string
+ origSpec *spec.Swagger
+ schema *spec.Schema
+ raw json.RawMessage
+ pathLoader *loader
+}
+
+// JSONSpec loads a spec from a json document
+func JSONSpec(path string, options ...LoaderOption) (*Document, error) {
+ data, err := JSONDoc(path)
+ if err != nil {
+ return nil, err
+ }
+ // convert to json
+ return Analyzed(data, "", options...)
+}
+
+// Embedded returns a Document based on embedded specs. No analysis is required
+func Embedded(orig, flat json.RawMessage, options ...LoaderOption) (*Document, error) {
+ var origSpec, flatSpec spec.Swagger
+ if err := json.Unmarshal(orig, &origSpec); err != nil {
+ return nil, err
+ }
+ if err := json.Unmarshal(flat, &flatSpec); err != nil {
+ return nil, err
+ }
+ return &Document{
+ raw: orig,
+ origSpec: &origSpec,
+ spec: &flatSpec,
+ pathLoader: loaderFromOptions(options),
+ }, nil
+}
+
+// Spec loads a new spec document from a local or remote path
+func Spec(path string, options ...LoaderOption) (*Document, error) {
+
+ ldr := loaderFromOptions(options)
+
+ b, err := ldr.Load(path)
+ if err != nil {
+ return nil, err
+ }
+
+ document, err := Analyzed(b, "", options...)
+ if err != nil {
+ return nil, err
+ }
+
+ if document != nil {
+ document.specFilePath = path
+ document.pathLoader = ldr
+ }
+
+ return document, err
+}
+
+// Analyzed creates a new analyzed spec document for a root json.RawMessage.
+func Analyzed(data json.RawMessage, version string, options ...LoaderOption) (*Document, error) {
+ if version == "" {
+ version = "2.0"
+ }
+ if version != "2.0" {
+ return nil, fmt.Errorf("spec version %q is not supported", version)
+ }
+
+ raw, err := trimData(data) // trim blanks, then convert yaml docs into json
+ if err != nil {
+ return nil, err
+ }
+
+ swspec := new(spec.Swagger)
+ if err = json.Unmarshal(raw, swspec); err != nil {
+ return nil, err
+ }
+
+ origsqspec, err := cloneSpec(swspec)
+ if err != nil {
+ return nil, err
+ }
+
+ d := &Document{
+ Analyzer: analysis.New(swspec),
+ schema: spec.MustLoadSwagger20Schema(),
+ spec: swspec,
+ raw: raw,
+ origSpec: origsqspec,
+ pathLoader: loaderFromOptions(options),
+ }
+
+ return d, nil
+}
+
+func trimData(in json.RawMessage) (json.RawMessage, error) {
+ trimmed := bytes.TrimSpace(in)
+ if len(trimmed) == 0 {
+ return in, nil
+ }
+
+ if trimmed[0] == '{' || trimmed[0] == '[' {
+ return trimmed, nil
+ }
+
+ // assume yaml doc: convert it to json
+ yml, err := swag.BytesToYAMLDoc(trimmed)
+ if err != nil {
+ return nil, fmt.Errorf("analyzed: %v", err)
+ }
+
+ d, err := swag.YAMLToJSON(yml)
+ if err != nil {
+ return nil, fmt.Errorf("analyzed: %v", err)
+ }
+
+ return d, nil
+}
+
+// Expanded expands the ref fields in the spec document and returns a new spec document
+func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) {
+
+ swspec := new(spec.Swagger)
+ if err := json.Unmarshal(d.raw, swspec); err != nil {
+ return nil, err
+ }
+
+ var expandOptions *spec.ExpandOptions
+ if len(options) > 0 {
+ expandOptions = options[0]
+ } else {
+ expandOptions = &spec.ExpandOptions{
+ RelativeBase: d.specFilePath,
+ }
+ }
+
+ if expandOptions.PathLoader == nil {
+ if d.pathLoader != nil {
+ // use loader from Document options
+ expandOptions.PathLoader = d.pathLoader.Load
+ } else {
+ // use package level loader
+ expandOptions.PathLoader = loaders.Load
+ }
+ }
+
+ if err := spec.ExpandSpec(swspec, expandOptions); err != nil {
+ return nil, err
+ }
+
+ dd := &Document{
+ Analyzer: analysis.New(swspec),
+ spec: swspec,
+ specFilePath: d.specFilePath,
+ schema: spec.MustLoadSwagger20Schema(),
+ raw: d.raw,
+ origSpec: d.origSpec,
+ }
+ return dd, nil
+}
+
+// BasePath the base path for this spec
+func (d *Document) BasePath() string {
+ return d.spec.BasePath
+}
+
+// Version returns the version of this spec
+func (d *Document) Version() string {
+ return d.spec.Swagger
+}
+
+// Schema returns the swagger 2.0 schema
+func (d *Document) Schema() *spec.Schema {
+ return d.schema
+}
+
+// Spec returns the swagger spec object model
+func (d *Document) Spec() *spec.Swagger {
+ return d.spec
+}
+
+// Host returns the host for the API
+func (d *Document) Host() string {
+ return d.spec.Host
+}
+
+// Raw returns the raw swagger spec as json bytes
+func (d *Document) Raw() json.RawMessage {
+ return d.raw
+}
+
+// OrigSpec yields the original spec
+func (d *Document) OrigSpec() *spec.Swagger {
+ return d.origSpec
+}
+
+// ResetDefinitions gives a shallow copy with the models reset to the original spec
+func (d *Document) ResetDefinitions() *Document {
+ defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
+ for k, v := range d.origSpec.Definitions {
+ defs[k] = v
+ }
+
+ d.spec.Definitions = defs
+ return d
+}
+
+// Pristine creates a new pristine document instance based on the input data
+func (d *Document) Pristine() *Document {
+ dd, _ := Analyzed(d.Raw(), d.Version())
+ dd.pathLoader = d.pathLoader
+ return dd
+}
+
+// SpecFilePath returns the file path of the spec if one is defined
+func (d *Document) SpecFilePath() string {
+ return d.specFilePath
+}
+
+func cloneSpec(src *spec.Swagger) (*spec.Swagger, error) {
+ var b bytes.Buffer
+ if err := gob.NewEncoder(&b).Encode(src); err != nil {
+ return nil, err
+ }
+
+ var dst spec.Swagger
+ if err := gob.NewDecoder(&b).Decode(&dst); err != nil {
+ return nil, err
+ }
+ return &dst, nil
+}
diff --git a/vendor/github.com/go-openapi/runtime/.editorconfig b/vendor/github.com/go-openapi/runtime/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/runtime/.gitattributes b/vendor/github.com/go-openapi/runtime/.gitattributes
new file mode 100644
index 000000000..d207b1802
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/.gitattributes
@@ -0,0 +1 @@
+*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/runtime/.gitignore b/vendor/github.com/go-openapi/runtime/.gitignore
new file mode 100644
index 000000000..fea8b84ec
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/.gitignore
@@ -0,0 +1,5 @@
+secrets.yml
+coverage.out
+*.cov
+*.out
+playground
diff --git a/vendor/github.com/go-openapi/runtime/.golangci.yml b/vendor/github.com/go-openapi/runtime/.golangci.yml
new file mode 100644
index 000000000..b1aa7928a
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/.golangci.yml
@@ -0,0 +1,44 @@
+linters-settings:
+ govet:
+ # Using err repeatedly considered as shadowing.
+ check-shadowing: false
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 30
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+linters:
+ disable:
+ - maligned
+ - lll
+ - gochecknoglobals
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - funlen
+ - gochecknoglobals
+ - gochecknoinits
+ - scopelint
+ - wrapcheck
+ - exhaustivestruct
+ - exhaustive
+ - nlreturn
+ - testpackage
+ - gci
+ - gofumpt
+ - goerr113
+ - gomnd
+ - tparallel
+ - nestif
+ - godot
+ - errorlint
+ - noctx
+ - interfacer
+ - nilerr
diff --git a/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/runtime/LICENSE b/vendor/github.com/go-openapi/runtime/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/runtime/README.md b/vendor/github.com/go-openapi/runtime/README.md
new file mode 100644
index 000000000..5b1ec6494
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/README.md
@@ -0,0 +1,7 @@
+# runtime [![Build Status](https://travis-ci.org/go-openapi/runtime.svg?branch=client-context)](https://travis-ci.org/go-openapi/runtime) [![codecov](https://codecov.io/gh/go-openapi/runtime/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/runtime) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/runtime?status.svg)](http://godoc.org/github.com/go-openapi/runtime)
+
+# golang Open-API toolkit - runtime
+
+The runtime component for use in codegeneration or as untyped usage.
diff --git a/vendor/github.com/go-openapi/runtime/bytestream.go b/vendor/github.com/go-openapi/runtime/bytestream.go
new file mode 100644
index 000000000..6eb6ceb5c
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/bytestream.go
@@ -0,0 +1,169 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "bytes"
+ "encoding"
+ "errors"
+ "fmt"
+ "io"
+ "reflect"
+
+ "github.com/go-openapi/swag"
+)
+
+func defaultCloser() error { return nil }
+
+type byteStreamOpt func(opts *byteStreamOpts)
+
+// ClosesStream when the bytestream consumer or producer is finished
+func ClosesStream(opts *byteStreamOpts) {
+ opts.Close = true
+}
+
+type byteStreamOpts struct {
+ Close bool
+}
+
+// ByteStreamConsumer creates a consumer for byte streams,
+// takes a Writer/BinaryUnmarshaler interface or binary slice by reference,
+// and reads from the provided reader
+func ByteStreamConsumer(opts ...byteStreamOpt) Consumer {
+ var vals byteStreamOpts
+ for _, opt := range opts {
+ opt(&vals)
+ }
+
+ return ConsumerFunc(func(reader io.Reader, data interface{}) error {
+ if reader == nil {
+ return errors.New("ByteStreamConsumer requires a reader") // early exit
+ }
+
+ close := defaultCloser
+ if vals.Close {
+ if cl, ok := reader.(io.Closer); ok {
+ close = cl.Close
+ }
+ }
+ //nolint:errcheck // closing a reader wouldn't fail.
+ defer close()
+
+ if wrtr, ok := data.(io.Writer); ok {
+ _, err := io.Copy(wrtr, reader)
+ return err
+ }
+
+ buf := new(bytes.Buffer)
+ _, err := buf.ReadFrom(reader)
+ if err != nil {
+ return err
+ }
+ b := buf.Bytes()
+
+ if bu, ok := data.(encoding.BinaryUnmarshaler); ok {
+ return bu.UnmarshalBinary(b)
+ }
+
+ if data != nil {
+ if str, ok := data.(*string); ok {
+ *str = string(b)
+ return nil
+ }
+ }
+
+ if t := reflect.TypeOf(data); data != nil && t.Kind() == reflect.Ptr {
+ v := reflect.Indirect(reflect.ValueOf(data))
+ if t = v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {
+ v.SetBytes(b)
+ return nil
+ }
+ }
+
+ return fmt.Errorf("%v (%T) is not supported by the ByteStreamConsumer, %s",
+ data, data, "can be resolved by supporting Writer/BinaryUnmarshaler interface")
+ })
+}
+
+// ByteStreamProducer creates a producer for byte streams,
+// takes a Reader/BinaryMarshaler interface or binary slice,
+// and writes to a writer (essentially a pipe)
+func ByteStreamProducer(opts ...byteStreamOpt) Producer {
+ var vals byteStreamOpts
+ for _, opt := range opts {
+ opt(&vals)
+ }
+ return ProducerFunc(func(writer io.Writer, data interface{}) error {
+ if writer == nil {
+ return errors.New("ByteStreamProducer requires a writer") // early exit
+ }
+ close := defaultCloser
+ if vals.Close {
+ if cl, ok := writer.(io.Closer); ok {
+ close = cl.Close
+ }
+ }
+ //nolint:errcheck // TODO: closing a writer would fail.
+ defer close()
+
+ if rc, ok := data.(io.ReadCloser); ok {
+ defer rc.Close()
+ }
+
+ if rdr, ok := data.(io.Reader); ok {
+ _, err := io.Copy(writer, rdr)
+ return err
+ }
+
+ if bm, ok := data.(encoding.BinaryMarshaler); ok {
+ bytes, err := bm.MarshalBinary()
+ if err != nil {
+ return err
+ }
+
+ _, err = writer.Write(bytes)
+ return err
+ }
+
+ if data != nil {
+ if str, ok := data.(string); ok {
+ _, err := writer.Write([]byte(str))
+ return err
+ }
+
+ if e, ok := data.(error); ok {
+ _, err := writer.Write([]byte(e.Error()))
+ return err
+ }
+
+ v := reflect.Indirect(reflect.ValueOf(data))
+ if t := v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {
+ _, err := writer.Write(v.Bytes())
+ return err
+ }
+ if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice {
+ b, err := swag.WriteJSON(data)
+ if err != nil {
+ return err
+ }
+ _, err = writer.Write(b)
+ return err
+ }
+ }
+
+ return fmt.Errorf("%v (%T) is not supported by the ByteStreamProducer, %s",
+ data, data, "can be resolved by supporting Reader/BinaryMarshaler interface")
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/client_auth_info.go b/vendor/github.com/go-openapi/runtime/client_auth_info.go
new file mode 100644
index 000000000..c6c97d9a7
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/client_auth_info.go
@@ -0,0 +1,30 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import "github.com/go-openapi/strfmt"
+
+// A ClientAuthInfoWriterFunc converts a function to a request writer interface
+type ClientAuthInfoWriterFunc func(ClientRequest, strfmt.Registry) error
+
+// AuthenticateRequest adds authentication data to the request
+func (fn ClientAuthInfoWriterFunc) AuthenticateRequest(req ClientRequest, reg strfmt.Registry) error {
+ return fn(req, reg)
+}
+
+// A ClientAuthInfoWriter implementor knows how to write authentication info to a request
+type ClientAuthInfoWriter interface {
+ AuthenticateRequest(ClientRequest, strfmt.Registry) error
+}
diff --git a/vendor/github.com/go-openapi/runtime/client_operation.go b/vendor/github.com/go-openapi/runtime/client_operation.go
new file mode 100644
index 000000000..fa21eacf3
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/client_operation.go
@@ -0,0 +1,41 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "context"
+ "net/http"
+)
+
+// ClientOperation represents the context for a swagger operation to be submitted to the transport
+type ClientOperation struct {
+ ID string
+ Method string
+ PathPattern string
+ ProducesMediaTypes []string
+ ConsumesMediaTypes []string
+ Schemes []string
+ AuthInfo ClientAuthInfoWriter
+ Params ClientRequestWriter
+ Reader ClientResponseReader
+ Context context.Context
+ Client *http.Client
+}
+
+// A ClientTransport implementor knows how to submit Request objects to some destination
+type ClientTransport interface {
+ //Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error)
+ Submit(*ClientOperation) (interface{}, error)
+}
diff --git a/vendor/github.com/go-openapi/runtime/client_request.go b/vendor/github.com/go-openapi/runtime/client_request.go
new file mode 100644
index 000000000..d4d2b58f2
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/client_request.go
@@ -0,0 +1,152 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "io"
+ "net/http"
+ "net/url"
+ "time"
+
+ "github.com/go-openapi/strfmt"
+)
+
+// ClientRequestWriterFunc converts a function to a request writer interface
+type ClientRequestWriterFunc func(ClientRequest, strfmt.Registry) error
+
+// WriteToRequest adds data to the request
+func (fn ClientRequestWriterFunc) WriteToRequest(req ClientRequest, reg strfmt.Registry) error {
+ return fn(req, reg)
+}
+
+// ClientRequestWriter is an interface for things that know how to write to a request
+type ClientRequestWriter interface {
+ WriteToRequest(ClientRequest, strfmt.Registry) error
+}
+
+// ClientRequest is an interface for things that know how to
+// add information to a swagger client request
+type ClientRequest interface {
+ SetHeaderParam(string, ...string) error
+
+ GetHeaderParams() http.Header
+
+ SetQueryParam(string, ...string) error
+
+ SetFormParam(string, ...string) error
+
+ SetPathParam(string, string) error
+
+ GetQueryParams() url.Values
+
+ SetFileParam(string, ...NamedReadCloser) error
+
+ SetBodyParam(interface{}) error
+
+ SetTimeout(time.Duration) error
+
+ GetMethod() string
+
+ GetPath() string
+
+ GetBody() []byte
+
+ GetBodyParam() interface{}
+
+ GetFileParam() map[string][]NamedReadCloser
+}
+
+// NamedReadCloser represents a named ReadCloser interface
+type NamedReadCloser interface {
+ io.ReadCloser
+ Name() string
+}
+
+// NamedReader creates a NamedReadCloser for use as file upload
+func NamedReader(name string, rdr io.Reader) NamedReadCloser {
+ rc, ok := rdr.(io.ReadCloser)
+ if !ok {
+ rc = io.NopCloser(rdr)
+ }
+ return &namedReadCloser{
+ name: name,
+ cr: rc,
+ }
+}
+
+type namedReadCloser struct {
+ name string
+ cr io.ReadCloser
+}
+
+func (n *namedReadCloser) Close() error {
+ return n.cr.Close()
+}
+func (n *namedReadCloser) Read(p []byte) (int, error) {
+ return n.cr.Read(p)
+}
+func (n *namedReadCloser) Name() string {
+ return n.name
+}
+
+type TestClientRequest struct {
+ Headers http.Header
+ Body interface{}
+}
+
+func (t *TestClientRequest) SetHeaderParam(name string, values ...string) error {
+ if t.Headers == nil {
+ t.Headers = make(http.Header)
+ }
+ t.Headers.Set(name, values[0])
+ return nil
+}
+
+func (t *TestClientRequest) SetQueryParam(_ string, _ ...string) error { return nil }
+
+func (t *TestClientRequest) SetFormParam(_ string, _ ...string) error { return nil }
+
+func (t *TestClientRequest) SetPathParam(_ string, _ string) error { return nil }
+
+func (t *TestClientRequest) SetFileParam(_ string, _ ...NamedReadCloser) error { return nil }
+
+func (t *TestClientRequest) SetBodyParam(body interface{}) error {
+ t.Body = body
+ return nil
+}
+
+func (t *TestClientRequest) SetTimeout(time.Duration) error {
+ return nil
+}
+
+func (t *TestClientRequest) GetQueryParams() url.Values { return nil }
+
+func (t *TestClientRequest) GetMethod() string { return "" }
+
+func (t *TestClientRequest) GetPath() string { return "" }
+
+func (t *TestClientRequest) GetBody() []byte { return nil }
+
+func (t *TestClientRequest) GetBodyParam() interface{} {
+ return t.Body
+}
+
+func (t *TestClientRequest) GetFileParam() map[string][]NamedReadCloser {
+ return nil
+}
+
+func (t *TestClientRequest) GetHeaderParams() http.Header {
+ return t.Headers
+}
diff --git a/vendor/github.com/go-openapi/runtime/client_response.go b/vendor/github.com/go-openapi/runtime/client_response.go
new file mode 100644
index 000000000..0d1691149
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/client_response.go
@@ -0,0 +1,110 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+)
+
+// A ClientResponse represents a client response
+// This bridges between responses obtained from different transports
+type ClientResponse interface {
+ Code() int
+ Message() string
+ GetHeader(string) string
+ GetHeaders(string) []string
+ Body() io.ReadCloser
+}
+
+// A ClientResponseReaderFunc turns a function into a ClientResponseReader interface implementation
+type ClientResponseReaderFunc func(ClientResponse, Consumer) (interface{}, error)
+
+// ReadResponse reads the response
+func (read ClientResponseReaderFunc) ReadResponse(resp ClientResponse, consumer Consumer) (interface{}, error) {
+ return read(resp, consumer)
+}
+
+// A ClientResponseReader is an interface for things want to read a response.
+// An application of this is to create structs from response values
+type ClientResponseReader interface {
+ ReadResponse(ClientResponse, Consumer) (interface{}, error)
+}
+
+// NewAPIError creates a new API error
+func NewAPIError(opName string, payload interface{}, code int) *APIError {
+ return &APIError{
+ OperationName: opName,
+ Response: payload,
+ Code: code,
+ }
+}
+
+// APIError wraps an error model and captures the status code
+type APIError struct {
+ OperationName string
+ Response interface{}
+ Code int
+}
+
+func (o *APIError) Error() string {
+ var resp []byte
+ if err, ok := o.Response.(error); ok {
+ resp = []byte("'" + err.Error() + "'")
+ } else {
+ resp, _ = json.Marshal(o.Response)
+ }
+ return fmt.Sprintf("%s (status %d): %s", o.OperationName, o.Code, resp)
+}
+
+func (o *APIError) String() string {
+ return o.Error()
+}
+
+// IsSuccess returns true when this elapse o k response returns a 2xx status code
+func (o *APIError) IsSuccess() bool {
+ return o.Code/100 == 2
+}
+
+// IsRedirect returns true when this elapse o k response returns a 3xx status code
+func (o *APIError) IsRedirect() bool {
+ return o.Code/100 == 3
+}
+
+// IsClientError returns true when this elapse o k response returns a 4xx status code
+func (o *APIError) IsClientError() bool {
+ return o.Code/100 == 4
+}
+
+// IsServerError returns true when this elapse o k response returns a 5xx status code
+func (o *APIError) IsServerError() bool {
+ return o.Code/100 == 5
+}
+
+// IsCode returns true when this elapse o k response returns a 4xx status code
+func (o *APIError) IsCode(code int) bool {
+ return o.Code == code
+}
+
+// A ClientResponseStatus is a common interface implemented by all responses on the generated code
+// You can use this to treat any client response based on status code
+type ClientResponseStatus interface {
+ IsSuccess() bool
+ IsRedirect() bool
+ IsClientError() bool
+ IsServerError() bool
+ IsCode(int) bool
+}
diff --git a/vendor/github.com/go-openapi/runtime/constants.go b/vendor/github.com/go-openapi/runtime/constants.go
new file mode 100644
index 000000000..515969242
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/constants.go
@@ -0,0 +1,49 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+const (
+ // HeaderContentType represents a http content-type header, it's value is supposed to be a mime type
+ HeaderContentType = "Content-Type"
+
+ // HeaderTransferEncoding represents a http transfer-encoding header.
+ HeaderTransferEncoding = "Transfer-Encoding"
+
+ // HeaderAccept the Accept header
+ HeaderAccept = "Accept"
+ // HeaderAuthorization the Authorization header
+ HeaderAuthorization = "Authorization"
+
+ charsetKey = "charset"
+
+ // DefaultMime the default fallback mime type
+ DefaultMime = "application/octet-stream"
+ // JSONMime the json mime type
+ JSONMime = "application/json"
+ // YAMLMime the yaml mime type
+ YAMLMime = "application/x-yaml"
+ // XMLMime the xml mime type
+ XMLMime = "application/xml"
+ // TextMime the text mime type
+ TextMime = "text/plain"
+ // HTMLMime the html mime type
+ HTMLMime = "text/html"
+ // CSVMime the csv mime type
+ CSVMime = "text/csv"
+ // MultipartFormMime the multipart form mime type
+ MultipartFormMime = "multipart/form-data"
+ // URLencodedFormMime the url encoded form mime type
+ URLencodedFormMime = "application/x-www-form-urlencoded"
+)
diff --git a/vendor/github.com/go-openapi/runtime/csv.go b/vendor/github.com/go-openapi/runtime/csv.go
new file mode 100644
index 000000000..d807bd915
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/csv.go
@@ -0,0 +1,77 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "bytes"
+ "encoding/csv"
+ "errors"
+ "io"
+)
+
+// CSVConsumer creates a new CSV consumer
+func CSVConsumer() Consumer {
+ return ConsumerFunc(func(reader io.Reader, data interface{}) error {
+ if reader == nil {
+ return errors.New("CSVConsumer requires a reader")
+ }
+
+ csvReader := csv.NewReader(reader)
+ writer, ok := data.(io.Writer)
+ if !ok {
+ return errors.New("data type must be io.Writer")
+ }
+ csvWriter := csv.NewWriter(writer)
+ records, err := csvReader.ReadAll()
+ if err != nil {
+ return err
+ }
+ for _, r := range records {
+ if err := csvWriter.Write(r); err != nil {
+ return err
+ }
+ }
+ csvWriter.Flush()
+ return nil
+ })
+}
+
+// CSVProducer creates a new CSV producer
+func CSVProducer() Producer {
+ return ProducerFunc(func(writer io.Writer, data interface{}) error {
+ if writer == nil {
+ return errors.New("CSVProducer requires a writer")
+ }
+
+ dataBytes, ok := data.([]byte)
+ if !ok {
+ return errors.New("data type must be byte array")
+ }
+
+ csvReader := csv.NewReader(bytes.NewBuffer(dataBytes))
+ records, err := csvReader.ReadAll()
+ if err != nil {
+ return err
+ }
+ csvWriter := csv.NewWriter(writer)
+ for _, r := range records {
+ if err := csvWriter.Write(r); err != nil {
+ return err
+ }
+ }
+ csvWriter.Flush()
+ return nil
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/discard.go b/vendor/github.com/go-openapi/runtime/discard.go
new file mode 100644
index 000000000..0d390cfd6
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/discard.go
@@ -0,0 +1,9 @@
+package runtime
+
+import "io"
+
+// DiscardConsumer does absolutely nothing, it's a black hole.
+var DiscardConsumer = ConsumerFunc(func(_ io.Reader, _ interface{}) error { return nil })
+
+// DiscardProducer does absolutely nothing, it's a black hole.
+var DiscardProducer = ProducerFunc(func(_ io.Writer, _ interface{}) error { return nil })
diff --git a/vendor/github.com/go-openapi/runtime/file.go b/vendor/github.com/go-openapi/runtime/file.go
new file mode 100644
index 000000000..397d8a459
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/file.go
@@ -0,0 +1,19 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import "github.com/go-openapi/swag"
+
+type File = swag.File
diff --git a/vendor/github.com/go-openapi/runtime/headers.go b/vendor/github.com/go-openapi/runtime/headers.go
new file mode 100644
index 000000000..4d111db4f
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/headers.go
@@ -0,0 +1,45 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "mime"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+)
+
+// ContentType parses a content type header
+func ContentType(headers http.Header) (string, string, error) {
+ ct := headers.Get(HeaderContentType)
+ orig := ct
+ if ct == "" {
+ ct = DefaultMime
+ }
+ if ct == "" {
+ return "", "", nil
+ }
+
+ mt, opts, err := mime.ParseMediaType(ct)
+ if err != nil {
+ return "", "", errors.NewParseError(HeaderContentType, "header", orig, err)
+ }
+
+ if cs, ok := opts[charsetKey]; ok {
+ return mt, cs, nil
+ }
+
+ return mt, "", nil
+}
diff --git a/vendor/github.com/go-openapi/runtime/interfaces.go b/vendor/github.com/go-openapi/runtime/interfaces.go
new file mode 100644
index 000000000..e33412868
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/interfaces.go
@@ -0,0 +1,112 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "context"
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/strfmt"
+)
+
+// OperationHandlerFunc an adapter for a function to the OperationHandler interface
+type OperationHandlerFunc func(interface{}) (interface{}, error)
+
+// Handle implements the operation handler interface
+func (s OperationHandlerFunc) Handle(data interface{}) (interface{}, error) {
+ return s(data)
+}
+
+// OperationHandler a handler for a swagger operation
+type OperationHandler interface {
+ Handle(interface{}) (interface{}, error)
+}
+
+// ConsumerFunc represents a function that can be used as a consumer
+type ConsumerFunc func(io.Reader, interface{}) error
+
+// Consume consumes the reader into the data parameter
+func (fn ConsumerFunc) Consume(reader io.Reader, data interface{}) error {
+ return fn(reader, data)
+}
+
+// Consumer implementations know how to bind the values on the provided interface to
+// data provided by the request body
+type Consumer interface {
+ // Consume performs the binding of request values
+ Consume(io.Reader, interface{}) error
+}
+
+// ProducerFunc represents a function that can be used as a producer
+type ProducerFunc func(io.Writer, interface{}) error
+
+// Produce produces the response for the provided data
+func (f ProducerFunc) Produce(writer io.Writer, data interface{}) error {
+ return f(writer, data)
+}
+
+// Producer implementations know how to turn the provided interface into a valid
+// HTTP response
+type Producer interface {
+ // Produce writes to the http response
+ Produce(io.Writer, interface{}) error
+}
+
+// AuthenticatorFunc turns a function into an authenticator
+type AuthenticatorFunc func(interface{}) (bool, interface{}, error)
+
+// Authenticate authenticates the request with the provided data
+func (f AuthenticatorFunc) Authenticate(params interface{}) (bool, interface{}, error) {
+ return f(params)
+}
+
+// Authenticator represents an authentication strategy
+// implementations of Authenticator know how to authenticate the
+// request data and translate that into a valid principal object or an error
+type Authenticator interface {
+ Authenticate(interface{}) (bool, interface{}, error)
+}
+
+// AuthorizerFunc turns a function into an authorizer
+type AuthorizerFunc func(*http.Request, interface{}) error
+
+// Authorize authorizes the processing of the request for the principal
+func (f AuthorizerFunc) Authorize(r *http.Request, principal interface{}) error {
+ return f(r, principal)
+}
+
+// Authorizer represents an authorization strategy
+// implementations of Authorizer know how to authorize the principal object
+// using the request data and returns error if unauthorized
+type Authorizer interface {
+ Authorize(*http.Request, interface{}) error
+}
+
+// Validatable types implementing this interface allow customizing their validation
+// this will be used instead of the reflective validation based on the spec document.
+// the implementations are assumed to have been generated by the swagger tool so they should
+// contain all the validations obtained from the spec
+type Validatable interface {
+ Validate(strfmt.Registry) error
+}
+
+// ContextValidatable types implementing this interface allow customizing their validation
+// this will be used instead of the reflective validation based on the spec document.
+// the implementations are assumed to have been generated by the swagger tool so they should
+// contain all the context validations obtained from the spec
+type ContextValidatable interface {
+ ContextValidate(context.Context, strfmt.Registry) error
+}
diff --git a/vendor/github.com/go-openapi/runtime/json.go b/vendor/github.com/go-openapi/runtime/json.go
new file mode 100644
index 000000000..5a690559c
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/json.go
@@ -0,0 +1,38 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "encoding/json"
+ "io"
+)
+
+// JSONConsumer creates a new JSON consumer
+func JSONConsumer() Consumer {
+ return ConsumerFunc(func(reader io.Reader, data interface{}) error {
+ dec := json.NewDecoder(reader)
+ dec.UseNumber() // preserve number formats
+ return dec.Decode(data)
+ })
+}
+
+// JSONProducer creates a new JSON producer
+func JSONProducer() Producer {
+ return ProducerFunc(func(writer io.Writer, data interface{}) error {
+ enc := json.NewEncoder(writer)
+ enc.SetEscapeHTML(false)
+ return enc.Encode(data)
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/logger/logger.go b/vendor/github.com/go-openapi/runtime/logger/logger.go
new file mode 100644
index 000000000..6f4debcc1
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/logger/logger.go
@@ -0,0 +1,20 @@
+package logger
+
+import "os"
+
+type Logger interface {
+ Printf(format string, args ...interface{})
+ Debugf(format string, args ...interface{})
+}
+
+func DebugEnabled() bool {
+ d := os.Getenv("SWAGGER_DEBUG")
+ if d != "" && d != "false" && d != "0" {
+ return true
+ }
+ d = os.Getenv("DEBUG")
+ if d != "" && d != "false" && d != "0" {
+ return true
+ }
+ return false
+}
diff --git a/vendor/github.com/go-openapi/runtime/logger/standard.go b/vendor/github.com/go-openapi/runtime/logger/standard.go
new file mode 100644
index 000000000..f7e67ebb9
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/logger/standard.go
@@ -0,0 +1,22 @@
+package logger
+
+import (
+ "fmt"
+ "os"
+)
+
+type StandardLogger struct{}
+
+func (StandardLogger) Printf(format string, args ...interface{}) {
+ if len(format) == 0 || format[len(format)-1] != '\n' {
+ format += "\n"
+ }
+ fmt.Fprintf(os.Stderr, format, args...)
+}
+
+func (StandardLogger) Debugf(format string, args ...interface{}) {
+ if len(format) == 0 || format[len(format)-1] != '\n' {
+ format += "\n"
+ }
+ fmt.Fprintf(os.Stderr, format, args...)
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/context.go b/vendor/github.com/go-openapi/runtime/middleware/context.go
new file mode 100644
index 000000000..d21ae4e87
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/context.go
@@ -0,0 +1,635 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ stdContext "context"
+ "fmt"
+ "net/http"
+ "strings"
+ "sync"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/logger"
+ "github.com/go-openapi/runtime/middleware/untyped"
+ "github.com/go-openapi/runtime/security"
+)
+
+// Debug when true turns on verbose logging
+var Debug = logger.DebugEnabled()
+var Logger logger.Logger = logger.StandardLogger{}
+
+func debugLog(format string, args ...interface{}) {
+ if Debug {
+ Logger.Printf(format, args...)
+ }
+}
+
+// A Builder can create middlewares
+type Builder func(http.Handler) http.Handler
+
+// PassthroughBuilder returns the handler, aka the builder identity function
+func PassthroughBuilder(handler http.Handler) http.Handler { return handler }
+
+// RequestBinder is an interface for types to implement
+// when they want to be able to bind from a request
+type RequestBinder interface {
+ BindRequest(*http.Request, *MatchedRoute) error
+}
+
+// Responder is an interface for types to implement
+// when they want to be considered for writing HTTP responses
+type Responder interface {
+ WriteResponse(http.ResponseWriter, runtime.Producer)
+}
+
+// ResponderFunc wraps a func as a Responder interface
+type ResponderFunc func(http.ResponseWriter, runtime.Producer)
+
+// WriteResponse writes to the response
+func (fn ResponderFunc) WriteResponse(rw http.ResponseWriter, pr runtime.Producer) {
+ fn(rw, pr)
+}
+
+// Context is a type safe wrapper around an untyped request context
+// used throughout to store request context with the standard context attached
+// to the http.Request
+type Context struct {
+ spec *loads.Document
+ analyzer *analysis.Spec
+ api RoutableAPI
+ router Router
+}
+
+type routableUntypedAPI struct {
+ api *untyped.API
+ hlock *sync.Mutex
+ handlers map[string]map[string]http.Handler
+ defaultConsumes string
+ defaultProduces string
+}
+
+func newRoutableUntypedAPI(spec *loads.Document, api *untyped.API, context *Context) *routableUntypedAPI {
+ var handlers map[string]map[string]http.Handler
+ if spec == nil || api == nil {
+ return nil
+ }
+ analyzer := analysis.New(spec.Spec())
+ for method, hls := range analyzer.Operations() {
+ um := strings.ToUpper(method)
+ for path, op := range hls {
+ schemes := analyzer.SecurityRequirementsFor(op)
+
+ if oh, ok := api.OperationHandlerFor(method, path); ok {
+ if handlers == nil {
+ handlers = make(map[string]map[string]http.Handler)
+ }
+ if b, ok := handlers[um]; !ok || b == nil {
+ handlers[um] = make(map[string]http.Handler)
+ }
+
+ var handler http.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // lookup route info in the context
+ route, rCtx, _ := context.RouteInfo(r)
+ if rCtx != nil {
+ r = rCtx
+ }
+
+ // bind and validate the request using reflection
+ var bound interface{}
+ var validation error
+ bound, r, validation = context.BindAndValidate(r, route)
+ if validation != nil {
+ context.Respond(w, r, route.Produces, route, validation)
+ return
+ }
+
+ // actually handle the request
+ result, err := oh.Handle(bound)
+ if err != nil {
+ // respond with failure
+ context.Respond(w, r, route.Produces, route, err)
+ return
+ }
+
+ // respond with success
+ context.Respond(w, r, route.Produces, route, result)
+ })
+
+ if len(schemes) > 0 {
+ handler = newSecureAPI(context, handler)
+ }
+ handlers[um][path] = handler
+ }
+ }
+ }
+
+ return &routableUntypedAPI{
+ api: api,
+ hlock: new(sync.Mutex),
+ handlers: handlers,
+ defaultProduces: api.DefaultProduces,
+ defaultConsumes: api.DefaultConsumes,
+ }
+}
+
+func (r *routableUntypedAPI) HandlerFor(method, path string) (http.Handler, bool) {
+ r.hlock.Lock()
+ paths, ok := r.handlers[strings.ToUpper(method)]
+ if !ok {
+ r.hlock.Unlock()
+ return nil, false
+ }
+ handler, ok := paths[path]
+ r.hlock.Unlock()
+ return handler, ok
+}
+func (r *routableUntypedAPI) ServeErrorFor(operationID string) func(http.ResponseWriter, *http.Request, error) {
+ return r.api.ServeError
+}
+func (r *routableUntypedAPI) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
+ return r.api.ConsumersFor(mediaTypes)
+}
+func (r *routableUntypedAPI) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
+ return r.api.ProducersFor(mediaTypes)
+}
+func (r *routableUntypedAPI) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
+ return r.api.AuthenticatorsFor(schemes)
+}
+func (r *routableUntypedAPI) Authorizer() runtime.Authorizer {
+ return r.api.Authorizer()
+}
+func (r *routableUntypedAPI) Formats() strfmt.Registry {
+ return r.api.Formats()
+}
+
+func (r *routableUntypedAPI) DefaultProduces() string {
+ return r.defaultProduces
+}
+
+func (r *routableUntypedAPI) DefaultConsumes() string {
+ return r.defaultConsumes
+}
+
+// NewRoutableContext creates a new context for a routable API
+func NewRoutableContext(spec *loads.Document, routableAPI RoutableAPI, routes Router) *Context {
+ var an *analysis.Spec
+ if spec != nil {
+ an = analysis.New(spec.Spec())
+ }
+
+ return NewRoutableContextWithAnalyzedSpec(spec, an, routableAPI, routes)
+}
+
+// NewRoutableContextWithAnalyzedSpec is like NewRoutableContext but takes in input the analysed spec too
+func NewRoutableContextWithAnalyzedSpec(spec *loads.Document, an *analysis.Spec, routableAPI RoutableAPI, routes Router) *Context {
+ // Either there are no spec doc and analysis, or both of them.
+ if !((spec == nil && an == nil) || (spec != nil && an != nil)) {
+ panic(errors.New(http.StatusInternalServerError, "routable context requires either both spec doc and analysis, or none of them"))
+ }
+
+ ctx := &Context{spec: spec, api: routableAPI, analyzer: an, router: routes}
+ return ctx
+}
+
+// NewContext creates a new context wrapper
+func NewContext(spec *loads.Document, api *untyped.API, routes Router) *Context {
+ var an *analysis.Spec
+ if spec != nil {
+ an = analysis.New(spec.Spec())
+ }
+ ctx := &Context{spec: spec, analyzer: an}
+ ctx.api = newRoutableUntypedAPI(spec, api, ctx)
+ ctx.router = routes
+ return ctx
+}
+
+// Serve serves the specified spec with the specified api registrations as a http.Handler
+func Serve(spec *loads.Document, api *untyped.API) http.Handler {
+ return ServeWithBuilder(spec, api, PassthroughBuilder)
+}
+
+// ServeWithBuilder serves the specified spec with the specified api registrations as a http.Handler that is decorated
+// by the Builder
+func ServeWithBuilder(spec *loads.Document, api *untyped.API, builder Builder) http.Handler {
+ context := NewContext(spec, api, nil)
+ return context.APIHandler(builder)
+}
+
+type contextKey int8
+
+const (
+ _ contextKey = iota
+ ctxContentType
+ ctxResponseFormat
+ ctxMatchedRoute
+ ctxBoundParams
+ ctxSecurityPrincipal
+ ctxSecurityScopes
+)
+
+// MatchedRouteFrom request context value.
+func MatchedRouteFrom(req *http.Request) *MatchedRoute {
+ mr := req.Context().Value(ctxMatchedRoute)
+ if mr == nil {
+ return nil
+ }
+ if res, ok := mr.(*MatchedRoute); ok {
+ return res
+ }
+ return nil
+}
+
+// SecurityPrincipalFrom request context value.
+func SecurityPrincipalFrom(req *http.Request) interface{} {
+ return req.Context().Value(ctxSecurityPrincipal)
+}
+
+// SecurityScopesFrom request context value.
+func SecurityScopesFrom(req *http.Request) []string {
+ rs := req.Context().Value(ctxSecurityScopes)
+ if res, ok := rs.([]string); ok {
+ return res
+ }
+ return nil
+}
+
+type contentTypeValue struct {
+ MediaType string
+ Charset string
+}
+
+// BasePath returns the base path for this API
+func (c *Context) BasePath() string {
+ return c.spec.BasePath()
+}
+
+// RequiredProduces returns the accepted content types for responses
+func (c *Context) RequiredProduces() []string {
+ return c.analyzer.RequiredProduces()
+}
+
+// BindValidRequest binds a params object to a request but only when the request is valid
+// if the request is not valid an error will be returned
+func (c *Context) BindValidRequest(request *http.Request, route *MatchedRoute, binder RequestBinder) error {
+ var res []error
+ var requestContentType string
+
+ // check and validate content type, select consumer
+ if runtime.HasBody(request) {
+ ct, _, err := runtime.ContentType(request.Header)
+ if err != nil {
+ res = append(res, err)
+ } else {
+ if err := validateContentType(route.Consumes, ct); err != nil {
+ res = append(res, err)
+ }
+ if len(res) == 0 {
+ cons, ok := route.Consumers[ct]
+ if !ok {
+ res = append(res, errors.New(500, "no consumer registered for %s", ct))
+ } else {
+ route.Consumer = cons
+ requestContentType = ct
+ }
+ }
+ }
+ }
+
+ // check and validate the response format
+ if len(res) == 0 {
+ // if the route does not provide Produces and a default contentType could not be identified
+ // based on a body, typical for GET and DELETE requests, then default contentType to.
+ if len(route.Produces) == 0 && requestContentType == "" {
+ requestContentType = "*/*"
+ }
+
+ if str := NegotiateContentType(request, route.Produces, requestContentType); str == "" {
+ res = append(res, errors.InvalidResponseFormat(request.Header.Get(runtime.HeaderAccept), route.Produces))
+ }
+ }
+
+ // now bind the request with the provided binder
+ // it's assumed the binder will also validate the request and return an error if the
+ // request is invalid
+ if binder != nil && len(res) == 0 {
+ if err := binder.BindRequest(request, route); err != nil {
+ return err
+ }
+ }
+
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+// ContentType gets the parsed value of a content type
+// Returns the media type, its charset and a shallow copy of the request
+// when its context doesn't contain the content type value, otherwise it returns
+// the same request
+// Returns the error that runtime.ContentType may retunrs.
+func (c *Context) ContentType(request *http.Request) (string, string, *http.Request, error) {
+ var rCtx = request.Context()
+
+ if v, ok := rCtx.Value(ctxContentType).(*contentTypeValue); ok {
+ return v.MediaType, v.Charset, request, nil
+ }
+
+ mt, cs, err := runtime.ContentType(request.Header)
+ if err != nil {
+ return "", "", nil, err
+ }
+ rCtx = stdContext.WithValue(rCtx, ctxContentType, &contentTypeValue{mt, cs})
+ return mt, cs, request.WithContext(rCtx), nil
+}
+
+// LookupRoute looks a route up and returns true when it is found
+func (c *Context) LookupRoute(request *http.Request) (*MatchedRoute, bool) {
+ if route, ok := c.router.Lookup(request.Method, request.URL.EscapedPath()); ok {
+ return route, ok
+ }
+ return nil, false
+}
+
+// RouteInfo tries to match a route for this request
+// Returns the matched route, a shallow copy of the request if its context
+// contains the matched router, otherwise the same request, and a bool to
+// indicate if it the request matches one of the routes, if it doesn't
+// then it returns false and nil for the other two return values
+func (c *Context) RouteInfo(request *http.Request) (*MatchedRoute, *http.Request, bool) {
+ var rCtx = request.Context()
+
+ if v, ok := rCtx.Value(ctxMatchedRoute).(*MatchedRoute); ok {
+ return v, request, ok
+ }
+
+ if route, ok := c.LookupRoute(request); ok {
+ rCtx = stdContext.WithValue(rCtx, ctxMatchedRoute, route)
+ return route, request.WithContext(rCtx), ok
+ }
+
+ return nil, nil, false
+}
+
+// ResponseFormat negotiates the response content type
+// Returns the response format and a shallow copy of the request if its context
+// doesn't contain the response format, otherwise the same request
+func (c *Context) ResponseFormat(r *http.Request, offers []string) (string, *http.Request) {
+ var rCtx = r.Context()
+
+ if v, ok := rCtx.Value(ctxResponseFormat).(string); ok {
+ debugLog("[%s %s] found response format %q in context", r.Method, r.URL.Path, v)
+ return v, r
+ }
+
+ format := NegotiateContentType(r, offers, "")
+ if format != "" {
+ debugLog("[%s %s] set response format %q in context", r.Method, r.URL.Path, format)
+ r = r.WithContext(stdContext.WithValue(rCtx, ctxResponseFormat, format))
+ }
+ debugLog("[%s %s] negotiated response format %q", r.Method, r.URL.Path, format)
+ return format, r
+}
+
+// AllowedMethods gets the allowed methods for the path of this request
+func (c *Context) AllowedMethods(request *http.Request) []string {
+ return c.router.OtherMethods(request.Method, request.URL.EscapedPath())
+}
+
+// ResetAuth removes the current principal from the request context
+func (c *Context) ResetAuth(request *http.Request) *http.Request {
+ rctx := request.Context()
+ rctx = stdContext.WithValue(rctx, ctxSecurityPrincipal, nil)
+ rctx = stdContext.WithValue(rctx, ctxSecurityScopes, nil)
+ return request.WithContext(rctx)
+}
+
+// Authorize authorizes the request
+// Returns the principal object and a shallow copy of the request when its
+// context doesn't contain the principal, otherwise the same request or an error
+// (the last) if one of the authenticators returns one or an Unauthenticated error
+func (c *Context) Authorize(request *http.Request, route *MatchedRoute) (interface{}, *http.Request, error) {
+ if route == nil || !route.HasAuth() {
+ return nil, nil, nil
+ }
+
+ var rCtx = request.Context()
+ if v := rCtx.Value(ctxSecurityPrincipal); v != nil {
+ return v, request, nil
+ }
+
+ applies, usr, err := route.Authenticators.Authenticate(request, route)
+ if !applies || err != nil || !route.Authenticators.AllowsAnonymous() && usr == nil {
+ if err != nil {
+ return nil, nil, err
+ }
+ return nil, nil, errors.Unauthenticated("invalid credentials")
+ }
+ if route.Authorizer != nil {
+ if err := route.Authorizer.Authorize(request, usr); err != nil {
+ if _, ok := err.(errors.Error); ok {
+ return nil, nil, err
+ }
+
+ return nil, nil, errors.New(http.StatusForbidden, err.Error())
+ }
+ }
+
+ rCtx = request.Context()
+
+ rCtx = stdContext.WithValue(rCtx, ctxSecurityPrincipal, usr)
+ rCtx = stdContext.WithValue(rCtx, ctxSecurityScopes, route.Authenticator.AllScopes())
+ return usr, request.WithContext(rCtx), nil
+}
+
+// BindAndValidate binds and validates the request
+// Returns the validation map and a shallow copy of the request when its context
+// doesn't contain the validation, otherwise it returns the same request or an
+// CompositeValidationError error
+func (c *Context) BindAndValidate(request *http.Request, matched *MatchedRoute) (interface{}, *http.Request, error) {
+ var rCtx = request.Context()
+
+ if v, ok := rCtx.Value(ctxBoundParams).(*validation); ok {
+ debugLog("got cached validation (valid: %t)", len(v.result) == 0)
+ if len(v.result) > 0 {
+ return v.bound, request, errors.CompositeValidationError(v.result...)
+ }
+ return v.bound, request, nil
+ }
+ result := validateRequest(c, request, matched)
+ rCtx = stdContext.WithValue(rCtx, ctxBoundParams, result)
+ request = request.WithContext(rCtx)
+ if len(result.result) > 0 {
+ return result.bound, request, errors.CompositeValidationError(result.result...)
+ }
+ debugLog("no validation errors found")
+ return result.bound, request, nil
+}
+
+// NotFound the default not found responder for when no route has been matched yet
+func (c *Context) NotFound(rw http.ResponseWriter, r *http.Request) {
+ c.Respond(rw, r, []string{c.api.DefaultProduces()}, nil, errors.NotFound("not found"))
+}
+
+// Respond renders the response after doing some content negotiation
+func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []string, route *MatchedRoute, data interface{}) {
+ debugLog("responding to %s %s with produces: %v", r.Method, r.URL.Path, produces)
+ offers := []string{}
+ for _, mt := range produces {
+ if mt != c.api.DefaultProduces() {
+ offers = append(offers, mt)
+ }
+ }
+ // the default producer is last so more specific producers take precedence
+ offers = append(offers, c.api.DefaultProduces())
+ debugLog("offers: %v", offers)
+
+ var format string
+ format, r = c.ResponseFormat(r, offers)
+ rw.Header().Set(runtime.HeaderContentType, format)
+
+ if resp, ok := data.(Responder); ok {
+ producers := route.Producers
+ // producers contains keys with normalized format, if a format has MIME type parameter such as `text/plain; charset=utf-8`
+ // then you must provide `text/plain` to get the correct producer. HOWEVER, format here is not normalized.
+ prod, ok := producers[normalizeOffer(format)]
+ if !ok {
+ prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()}))
+ pr, ok := prods[c.api.DefaultProduces()]
+ if !ok {
+ panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format))
+ }
+ prod = pr
+ }
+ resp.WriteResponse(rw, prod)
+ return
+ }
+
+ if err, ok := data.(error); ok {
+ if format == "" {
+ rw.Header().Set(runtime.HeaderContentType, runtime.JSONMime)
+ }
+
+ if realm := security.FailedBasicAuth(r); realm != "" {
+ rw.Header().Set("WWW-Authenticate", fmt.Sprintf("Basic realm=%q", realm))
+ }
+
+ if route == nil || route.Operation == nil {
+ c.api.ServeErrorFor("")(rw, r, err)
+ return
+ }
+ c.api.ServeErrorFor(route.Operation.ID)(rw, r, err)
+ return
+ }
+
+ if route == nil || route.Operation == nil {
+ rw.WriteHeader(200)
+ if r.Method == "HEAD" {
+ return
+ }
+ producers := c.api.ProducersFor(normalizeOffers(offers))
+ prod, ok := producers[format]
+ if !ok {
+ panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format))
+ }
+ if err := prod.Produce(rw, data); err != nil {
+ panic(err) // let the recovery middleware deal with this
+ }
+ return
+ }
+
+ if _, code, ok := route.Operation.SuccessResponse(); ok {
+ rw.WriteHeader(code)
+ if code == 204 || r.Method == "HEAD" {
+ return
+ }
+
+ producers := route.Producers
+ prod, ok := producers[format]
+ if !ok {
+ if !ok {
+ prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()}))
+ pr, ok := prods[c.api.DefaultProduces()]
+ if !ok {
+ panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format))
+ }
+ prod = pr
+ }
+ }
+ if err := prod.Produce(rw, data); err != nil {
+ panic(err) // let the recovery middleware deal with this
+ }
+ return
+ }
+
+ c.api.ServeErrorFor(route.Operation.ID)(rw, r, errors.New(http.StatusInternalServerError, "can't produce response"))
+}
+
+func (c *Context) APIHandlerSwaggerUI(builder Builder) http.Handler {
+ b := builder
+ if b == nil {
+ b = PassthroughBuilder
+ }
+
+ var title string
+ sp := c.spec.Spec()
+ if sp != nil && sp.Info != nil && sp.Info.Title != "" {
+ title = sp.Info.Title
+ }
+
+ swaggerUIOpts := SwaggerUIOpts{
+ BasePath: c.BasePath(),
+ Title: title,
+ }
+
+ return Spec("", c.spec.Raw(), SwaggerUI(swaggerUIOpts, c.RoutesHandler(b)))
+}
+
+// APIHandler returns a handler to serve the API, this includes a swagger spec, router and the contract defined in the swagger spec
+func (c *Context) APIHandler(builder Builder) http.Handler {
+ b := builder
+ if b == nil {
+ b = PassthroughBuilder
+ }
+
+ var title string
+ sp := c.spec.Spec()
+ if sp != nil && sp.Info != nil && sp.Info.Title != "" {
+ title = sp.Info.Title
+ }
+
+ redocOpts := RedocOpts{
+ BasePath: c.BasePath(),
+ Title: title,
+ }
+
+ return Spec("", c.spec.Raw(), Redoc(redocOpts, c.RoutesHandler(b)))
+}
+
+// RoutesHandler returns a handler to serve the API, just the routes and the contract defined in the swagger spec
+func (c *Context) RoutesHandler(builder Builder) http.Handler {
+ b := builder
+ if b == nil {
+ b = PassthroughBuilder
+ }
+ return NewRouter(c, b(NewOperationExecutor(c)))
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE b/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE
new file mode 100644
index 000000000..e65039ad8
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/denco/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2014 Naoya Inada <naoina@kuune.org>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/README.md b/vendor/github.com/go-openapi/runtime/middleware/denco/README.md
new file mode 100644
index 000000000..30109e17d
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/denco/README.md
@@ -0,0 +1,180 @@
+# Denco [![Build Status](https://travis-ci.org/naoina/denco.png?branch=master)](https://travis-ci.org/naoina/denco)
+
+The fast and flexible HTTP request router for [Go](http://golang.org).
+
+Denco is based on Double-Array implementation of [Kocha-urlrouter](https://github.com/naoina/kocha-urlrouter).
+However, Denco is optimized and some features added.
+
+## Features
+
+* Fast (See [go-http-routing-benchmark](https://github.com/naoina/go-http-routing-benchmark))
+* [URL patterns](#url-patterns) (`/foo/:bar` and `/foo/*wildcard`)
+* Small (but enough) URL router API
+* HTTP request multiplexer like `http.ServeMux`
+
+## Installation
+
+ go get -u github.com/go-openapi/runtime/middleware/denco
+
+## Using as HTTP request multiplexer
+
+```go
+package main
+
+import (
+ "fmt"
+ "log"
+ "net/http"
+
+ "github.com/go-openapi/runtime/middleware/denco"
+)
+
+func Index(w http.ResponseWriter, r *http.Request, params denco.Params) {
+ fmt.Fprintf(w, "Welcome to Denco!\n")
+}
+
+func User(w http.ResponseWriter, r *http.Request, params denco.Params) {
+ fmt.Fprintf(w, "Hello %s!\n", params.Get("name"))
+}
+
+func main() {
+ mux := denco.NewMux()
+ handler, err := mux.Build([]denco.Handler{
+ mux.GET("/", Index),
+ mux.GET("/user/:name", User),
+ mux.POST("/user/:name", User),
+ })
+ if err != nil {
+ panic(err)
+ }
+ log.Fatal(http.ListenAndServe(":8080", handler))
+}
+```
+
+## Using as URL router
+
+```go
+package main
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/runtime/middleware/denco"
+)
+
+type route struct {
+ name string
+}
+
+func main() {
+ router := denco.New()
+ router.Build([]denco.Record{
+ {"/", &route{"root"}},
+ {"/user/:id", &route{"user"}},
+ {"/user/:name/:id", &route{"username"}},
+ {"/static/*filepath", &route{"static"}},
+ })
+
+ data, params, found := router.Lookup("/")
+ // print `&main.route{name:"root"}, denco.Params(nil), true`.
+ fmt.Printf("%#v, %#v, %#v\n", data, params, found)
+
+ data, params, found = router.Lookup("/user/hoge")
+ // print `&main.route{name:"user"}, denco.Params{denco.Param{Name:"id", Value:"hoge"}}, true`.
+ fmt.Printf("%#v, %#v, %#v\n", data, params, found)
+
+ data, params, found = router.Lookup("/user/hoge/7")
+ // print `&main.route{name:"username"}, denco.Params{denco.Param{Name:"name", Value:"hoge"}, denco.Param{Name:"id", Value:"7"}}, true`.
+ fmt.Printf("%#v, %#v, %#v\n", data, params, found)
+
+ data, params, found = router.Lookup("/static/path/to/file")
+ // print `&main.route{name:"static"}, denco.Params{denco.Param{Name:"filepath", Value:"path/to/file"}}, true`.
+ fmt.Printf("%#v, %#v, %#v\n", data, params, found)
+}
+```
+
+See [Godoc](http://godoc.org/github.com/go-openapi/runtime/middleware/denco) for more details.
+
+## Getting the value of path parameter
+
+You can get the value of path parameter by 2 ways.
+
+1. Using [`denco.Params.Get`](http://godoc.org/github.com/go-openapi/runtime/middleware/denco#Params.Get) method
+2. Find by loop
+
+```go
+package main
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/runtime/middleware/denco"
+)
+
+func main() {
+ router := denco.New()
+ if err := router.Build([]denco.Record{
+ {"/user/:name/:id", "route1"},
+ }); err != nil {
+ panic(err)
+ }
+
+ // 1. Using denco.Params.Get method.
+ _, params, _ := router.Lookup("/user/alice/1")
+ name := params.Get("name")
+ if name != "" {
+ fmt.Printf("Hello %s.\n", name) // prints "Hello alice.".
+ }
+
+ // 2. Find by loop.
+ for _, param := range params {
+ if param.Name == "name" {
+ fmt.Printf("Hello %s.\n", name) // prints "Hello alice.".
+ }
+ }
+}
+```
+
+## URL patterns
+
+Denco's route matching strategy is "most nearly matching".
+
+When routes `/:name` and `/alice` have been built, URI `/alice` matches the route `/alice`, not `/:name`.
+Because URI `/alice` is more match with the route `/alice` than `/:name`.
+
+For more example, when routes below have been built:
+
+```
+/user/alice
+/user/:name
+/user/:name/:id
+/user/alice/:id
+/user/:id/bob
+```
+
+Routes matching are:
+
+```
+/user/alice => "/user/alice" (no match with "/user/:name")
+/user/bob => "/user/:name"
+/user/naoina/1 => "/user/:name/1"
+/user/alice/1 => "/user/alice/:id" (no match with "/user/:name/:id")
+/user/1/bob => "/user/:id/bob" (no match with "/user/:name/:id")
+/user/alice/bob => "/user/alice/:id" (no match with "/user/:name/:id" and "/user/:id/bob")
+```
+
+## Limitation
+
+Denco has some limitations below.
+
+* Number of param records (such as `/:name`) must be less than 2^22
+* Number of elements of internal slice must be less than 2^22
+
+## Benchmarks
+
+ cd $GOPATH/github.com/go-openapi/runtime/middleware/denco
+ go test -bench . -benchmem
+
+## License
+
+Denco is licensed under the MIT License.
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/router.go b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go
new file mode 100644
index 000000000..5d2691ec3
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/denco/router.go
@@ -0,0 +1,460 @@
+// Package denco provides fast URL router.
+package denco
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+)
+
+const (
+ // ParamCharacter is a special character for path parameter.
+ ParamCharacter = ':'
+
+ // WildcardCharacter is a special character for wildcard path parameter.
+ WildcardCharacter = '*'
+
+ // TerminationCharacter is a special character for end of path.
+ TerminationCharacter = '#'
+
+ // SeparatorCharacter separates path segments.
+ SeparatorCharacter = '/'
+
+ // PathParamCharacter indicates a RESTCONF path param
+ PathParamCharacter = '='
+
+ // MaxSize is max size of records and internal slice.
+ MaxSize = (1 << 22) - 1
+)
+
+// Router represents a URL router.
+type Router struct {
+ // SizeHint expects the maximum number of path parameters in records to Build.
+ // SizeHint will be used to determine the capacity of the memory to allocate.
+ // By default, SizeHint will be determined from given records to Build.
+ SizeHint int
+
+ static map[string]interface{}
+ param *doubleArray
+}
+
+// New returns a new Router.
+func New() *Router {
+ return &Router{
+ SizeHint: -1,
+ static: make(map[string]interface{}),
+ param: newDoubleArray(),
+ }
+}
+
+// Lookup returns data and path parameters that associated with path.
+// params is a slice of the Param that arranged in the order in which parameters appeared.
+// e.g. when built routing path is "/path/to/:id/:name" and given path is "/path/to/1/alice". params order is [{"id": "1"}, {"name": "alice"}], not [{"name": "alice"}, {"id": "1"}].
+func (rt *Router) Lookup(path string) (data interface{}, params Params, found bool) {
+ if data, found := rt.static[path]; found {
+ return data, nil, true
+ }
+ if len(rt.param.node) == 1 {
+ return nil, nil, false
+ }
+ nd, params, found := rt.param.lookup(path, make([]Param, 0, rt.SizeHint), 1)
+ if !found {
+ return nil, nil, false
+ }
+ for i := 0; i < len(params); i++ {
+ params[i].Name = nd.paramNames[i]
+ }
+ return nd.data, params, true
+}
+
+// Build builds URL router from records.
+func (rt *Router) Build(records []Record) error {
+ statics, params := makeRecords(records)
+ if len(params) > MaxSize {
+ return fmt.Errorf("denco: too many records")
+ }
+ if rt.SizeHint < 0 {
+ rt.SizeHint = 0
+ for _, p := range params {
+ size := 0
+ for _, k := range p.Key {
+ if k == ParamCharacter || k == WildcardCharacter {
+ size++
+ }
+ }
+ if size > rt.SizeHint {
+ rt.SizeHint = size
+ }
+ }
+ }
+ for _, r := range statics {
+ rt.static[r.Key] = r.Value
+ }
+ if err := rt.param.build(params, 1, 0, make(map[int]struct{})); err != nil {
+ return err
+ }
+ return nil
+}
+
+// Param represents name and value of path parameter.
+type Param struct {
+ Name string
+ Value string
+}
+
+// Params represents the name and value of path parameters.
+type Params []Param
+
+// Get gets the first value associated with the given name.
+// If there are no values associated with the key, Get returns "".
+func (ps Params) Get(name string) string {
+ for _, p := range ps {
+ if p.Name == name {
+ return p.Value
+ }
+ }
+ return ""
+}
+
+type doubleArray struct {
+ bc []baseCheck
+ node []*node
+}
+
+func newDoubleArray() *doubleArray {
+ return &doubleArray{
+ bc: []baseCheck{0},
+ node: []*node{nil}, // A start index is adjusting to 1 because 0 will be used as a mark of non-existent node.
+ }
+}
+
+// baseCheck contains BASE, CHECK and Extra flags.
+// From the top, 22bits of BASE, 2bits of Extra flags and 8bits of CHECK.
+//
+// BASE (22bit) | Extra flags (2bit) | CHECK (8bit)
+// |----------------------|--|--------|
+// 32 10 8 0
+type baseCheck uint32
+
+func (bc baseCheck) Base() int {
+ return int(bc >> 10)
+}
+
+func (bc *baseCheck) SetBase(base int) {
+ *bc |= baseCheck(base) << 10
+}
+
+func (bc baseCheck) Check() byte {
+ return byte(bc)
+}
+
+func (bc *baseCheck) SetCheck(check byte) {
+ *bc |= baseCheck(check)
+}
+
+func (bc baseCheck) IsEmpty() bool {
+ return bc&0xfffffcff == 0
+}
+
+func (bc baseCheck) IsSingleParam() bool {
+ return bc&paramTypeSingle == paramTypeSingle
+}
+
+func (bc baseCheck) IsWildcardParam() bool {
+ return bc&paramTypeWildcard == paramTypeWildcard
+}
+
+func (bc baseCheck) IsAnyParam() bool {
+ return bc&paramTypeAny != 0
+}
+
+func (bc *baseCheck) SetSingleParam() {
+ *bc |= (1 << 8)
+}
+
+func (bc *baseCheck) SetWildcardParam() {
+ *bc |= (1 << 9)
+}
+
+const (
+ paramTypeSingle = 0x0100
+ paramTypeWildcard = 0x0200
+ paramTypeAny = 0x0300
+)
+
+func (da *doubleArray) lookup(path string, params []Param, idx int) (*node, []Param, bool) {
+ indices := make([]uint64, 0, 1)
+ for i := 0; i < len(path); i++ {
+ if da.bc[idx].IsAnyParam() {
+ indices = append(indices, (uint64(i)<<32)|(uint64(idx)&0xffffffff))
+ }
+ c := path[i]
+ if idx = nextIndex(da.bc[idx].Base(), c); idx >= len(da.bc) || da.bc[idx].Check() != c {
+ goto BACKTRACKING
+ }
+ }
+ if next := nextIndex(da.bc[idx].Base(), TerminationCharacter); next < len(da.bc) && da.bc[next].Check() == TerminationCharacter {
+ return da.node[da.bc[next].Base()], params, true
+ }
+BACKTRACKING:
+ for j := len(indices) - 1; j >= 0; j-- {
+ i, idx := int(indices[j]>>32), int(indices[j]&0xffffffff)
+ if da.bc[idx].IsSingleParam() {
+ idx := nextIndex(da.bc[idx].Base(), ParamCharacter)
+ if idx >= len(da.bc) {
+ break
+ }
+ next := NextSeparator(path, i)
+ params := append(params, Param{Value: path[i:next]})
+ if nd, params, found := da.lookup(path[next:], params, idx); found {
+ return nd, params, true
+ }
+ }
+ if da.bc[idx].IsWildcardParam() {
+ idx := nextIndex(da.bc[idx].Base(), WildcardCharacter)
+ params := append(params, Param{Value: path[i:]})
+ return da.node[da.bc[idx].Base()], params, true
+ }
+ }
+ return nil, nil, false
+}
+
+// build builds double-array from records.
+func (da *doubleArray) build(srcs []*record, idx, depth int, usedBase map[int]struct{}) error {
+ sort.Stable(recordSlice(srcs))
+ base, siblings, leaf, err := da.arrange(srcs, idx, depth, usedBase)
+ if err != nil {
+ return err
+ }
+ if leaf != nil {
+ nd, err := makeNode(leaf)
+ if err != nil {
+ return err
+ }
+ da.bc[idx].SetBase(len(da.node))
+ da.node = append(da.node, nd)
+ }
+ for _, sib := range siblings {
+ da.setCheck(nextIndex(base, sib.c), sib.c)
+ }
+ for _, sib := range siblings {
+ records := srcs[sib.start:sib.end]
+ switch sib.c {
+ case ParamCharacter:
+ for _, r := range records {
+ next := NextSeparator(r.Key, depth+1)
+ name := r.Key[depth+1 : next]
+ r.paramNames = append(r.paramNames, name)
+ r.Key = r.Key[next:]
+ }
+ da.bc[idx].SetSingleParam()
+ if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil {
+ return err
+ }
+ case WildcardCharacter:
+ r := records[0]
+ name := r.Key[depth+1 : len(r.Key)-1]
+ r.paramNames = append(r.paramNames, name)
+ r.Key = ""
+ da.bc[idx].SetWildcardParam()
+ if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil {
+ return err
+ }
+ default:
+ if err := da.build(records, nextIndex(base, sib.c), depth+1, usedBase); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+// setBase sets BASE.
+func (da *doubleArray) setBase(i, base int) {
+ da.bc[i].SetBase(base)
+}
+
+// setCheck sets CHECK.
+func (da *doubleArray) setCheck(i int, check byte) {
+ da.bc[i].SetCheck(check)
+}
+
+// findEmptyIndex returns an index of unused BASE/CHECK node.
+func (da *doubleArray) findEmptyIndex(start int) int {
+ i := start
+ for ; i < len(da.bc); i++ {
+ if da.bc[i].IsEmpty() {
+ break
+ }
+ }
+ return i
+}
+
+// findBase returns good BASE.
+func (da *doubleArray) findBase(siblings []sibling, start int, usedBase map[int]struct{}) (base int) {
+ for idx, firstChar := start+1, siblings[0].c; ; idx = da.findEmptyIndex(idx + 1) {
+ base = nextIndex(idx, firstChar)
+ if _, used := usedBase[base]; used {
+ continue
+ }
+ i := 0
+ for ; i < len(siblings); i++ {
+ next := nextIndex(base, siblings[i].c)
+ if len(da.bc) <= next {
+ da.bc = append(da.bc, make([]baseCheck, next-len(da.bc)+1)...)
+ }
+ if !da.bc[next].IsEmpty() {
+ break
+ }
+ }
+ if i == len(siblings) {
+ break
+ }
+ }
+ usedBase[base] = struct{}{}
+ return base
+}
+
+func (da *doubleArray) arrange(records []*record, idx, depth int, usedBase map[int]struct{}) (base int, siblings []sibling, leaf *record, err error) {
+ siblings, leaf, err = makeSiblings(records, depth)
+ if err != nil {
+ return -1, nil, nil, err
+ }
+ if len(siblings) < 1 {
+ return -1, nil, leaf, nil
+ }
+ base = da.findBase(siblings, idx, usedBase)
+ if base > MaxSize {
+ return -1, nil, nil, fmt.Errorf("denco: too many elements of internal slice")
+ }
+ da.setBase(idx, base)
+ return base, siblings, leaf, err
+}
+
+// node represents a node of Double-Array.
+type node struct {
+ data interface{}
+
+ // Names of path parameters.
+ paramNames []string
+}
+
+// makeNode returns a new node from record.
+func makeNode(r *record) (*node, error) {
+ dups := make(map[string]bool)
+ for _, name := range r.paramNames {
+ if dups[name] {
+ return nil, fmt.Errorf("denco: path parameter `%v' is duplicated in the key `%v'", name, r.Key)
+ }
+ dups[name] = true
+ }
+ return &node{data: r.Value, paramNames: r.paramNames}, nil
+}
+
+// sibling represents an intermediate data of build for Double-Array.
+type sibling struct {
+ // An index of start of duplicated characters.
+ start int
+
+ // An index of end of duplicated characters.
+ end int
+
+ // A character of sibling.
+ c byte
+}
+
+// nextIndex returns a next index of array of BASE/CHECK.
+func nextIndex(base int, c byte) int {
+ return base ^ int(c)
+}
+
+// makeSiblings returns slice of sibling.
+func makeSiblings(records []*record, depth int) (sib []sibling, leaf *record, err error) {
+ var (
+ pc byte
+ n int
+ )
+ for i, r := range records {
+ if len(r.Key) <= depth {
+ leaf = r
+ continue
+ }
+ c := r.Key[depth]
+ switch {
+ case pc < c:
+ sib = append(sib, sibling{start: i, c: c})
+ case pc == c:
+ continue
+ default:
+ return nil, nil, fmt.Errorf("denco: BUG: routing table hasn't been sorted")
+ }
+ if n > 0 {
+ sib[n-1].end = i
+ }
+ pc = c
+ n++
+ }
+ if n == 0 {
+ return nil, leaf, nil
+ }
+ sib[n-1].end = len(records)
+ return sib, leaf, nil
+}
+
+// Record represents a record data for router construction.
+type Record struct {
+ // Key for router construction.
+ Key string
+
+ // Result value for Key.
+ Value interface{}
+}
+
+// NewRecord returns a new Record.
+func NewRecord(key string, value interface{}) Record {
+ return Record{
+ Key: key,
+ Value: value,
+ }
+}
+
+// record represents a record that use to build the Double-Array.
+type record struct {
+ Record
+ paramNames []string
+}
+
+// makeRecords returns the records that use to build Double-Arrays.
+func makeRecords(srcs []Record) (statics, params []*record) {
+ termChar := string(TerminationCharacter)
+ paramPrefix := string(SeparatorCharacter) + string(ParamCharacter)
+ wildcardPrefix := string(SeparatorCharacter) + string(WildcardCharacter)
+ restconfPrefix := string(PathParamCharacter) + string(ParamCharacter)
+ for _, r := range srcs {
+ if strings.Contains(r.Key, paramPrefix) || strings.Contains(r.Key, wildcardPrefix) ||strings.Contains(r.Key, restconfPrefix){
+ r.Key += termChar
+ params = append(params, &record{Record: r})
+ } else {
+ statics = append(statics, &record{Record: r})
+ }
+ }
+ return statics, params
+}
+
+// recordSlice represents a slice of Record for sort and implements the sort.Interface.
+type recordSlice []*record
+
+// Len implements the sort.Interface.Len.
+func (rs recordSlice) Len() int {
+ return len(rs)
+}
+
+// Less implements the sort.Interface.Less.
+func (rs recordSlice) Less(i, j int) bool {
+ return rs[i].Key < rs[j].Key
+}
+
+// Swap implements the sort.Interface.Swap.
+func (rs recordSlice) Swap(i, j int) {
+ rs[i], rs[j] = rs[j], rs[i]
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/server.go b/vendor/github.com/go-openapi/runtime/middleware/denco/server.go
new file mode 100644
index 000000000..0886713c1
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/denco/server.go
@@ -0,0 +1,106 @@
+package denco
+
+import (
+ "net/http"
+)
+
+// Mux represents a multiplexer for HTTP request.
+type Mux struct{}
+
+// NewMux returns a new Mux.
+func NewMux() *Mux {
+ return &Mux{}
+}
+
+// GET is shorthand of Mux.Handler("GET", path, handler).
+func (m *Mux) GET(path string, handler HandlerFunc) Handler {
+ return m.Handler("GET", path, handler)
+}
+
+// POST is shorthand of Mux.Handler("POST", path, handler).
+func (m *Mux) POST(path string, handler HandlerFunc) Handler {
+ return m.Handler("POST", path, handler)
+}
+
+// PUT is shorthand of Mux.Handler("PUT", path, handler).
+func (m *Mux) PUT(path string, handler HandlerFunc) Handler {
+ return m.Handler("PUT", path, handler)
+}
+
+// HEAD is shorthand of Mux.Handler("HEAD", path, handler).
+func (m *Mux) HEAD(path string, handler HandlerFunc) Handler {
+ return m.Handler("HEAD", path, handler)
+}
+
+// Handler returns a handler for HTTP method.
+func (m *Mux) Handler(method, path string, handler HandlerFunc) Handler {
+ return Handler{
+ Method: method,
+ Path: path,
+ Func: handler,
+ }
+}
+
+// Build builds a http.Handler.
+func (m *Mux) Build(handlers []Handler) (http.Handler, error) {
+ recordMap := make(map[string][]Record)
+ for _, h := range handlers {
+ recordMap[h.Method] = append(recordMap[h.Method], NewRecord(h.Path, h.Func))
+ }
+ mux := newServeMux()
+ for m, records := range recordMap {
+ router := New()
+ if err := router.Build(records); err != nil {
+ return nil, err
+ }
+ mux.routers[m] = router
+ }
+ return mux, nil
+}
+
+// Handler represents a handler of HTTP request.
+type Handler struct {
+ // Method is an HTTP method.
+ Method string
+
+ // Path is a routing path for handler.
+ Path string
+
+ // Func is a function of handler of HTTP request.
+ Func HandlerFunc
+}
+
+// The HandlerFunc type is aliased to type of handler function.
+type HandlerFunc func(w http.ResponseWriter, r *http.Request, params Params)
+
+type serveMux struct {
+ routers map[string]*Router
+}
+
+func newServeMux() *serveMux {
+ return &serveMux{
+ routers: make(map[string]*Router),
+ }
+}
+
+// ServeHTTP implements http.Handler interface.
+func (mux *serveMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ handler, params := mux.handler(r.Method, r.URL.Path)
+ handler(w, r, params)
+}
+
+func (mux *serveMux) handler(method, path string) (HandlerFunc, []Param) {
+ if router, found := mux.routers[method]; found {
+ if handler, params, found := router.Lookup(path); found {
+ return handler.(HandlerFunc), params
+ }
+ }
+ return NotFound, nil
+}
+
+// NotFound replies to the request with an HTTP 404 not found error.
+// NotFound is called when unknown HTTP method or a handler not found.
+// If you want to use the your own NotFound handler, please overwrite this variable.
+var NotFound = func(w http.ResponseWriter, r *http.Request, _ Params) {
+ http.NotFound(w, r)
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/denco/util.go b/vendor/github.com/go-openapi/runtime/middleware/denco/util.go
new file mode 100644
index 000000000..edc1f6ab8
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/denco/util.go
@@ -0,0 +1,12 @@
+package denco
+
+// NextSeparator returns an index of next separator in path.
+func NextSeparator(path string, start int) int {
+ for start < len(path) {
+ if c := path[start]; c == '/' || c == TerminationCharacter {
+ break
+ }
+ start++
+ }
+ return start
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/doc.go b/vendor/github.com/go-openapi/runtime/middleware/doc.go
new file mode 100644
index 000000000..eaf90606a
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/doc.go
@@ -0,0 +1,62 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*Package middleware provides the library with helper functions for serving swagger APIs.
+
+Pseudo middleware handler
+
+ import (
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ )
+
+ func newCompleteMiddleware(ctx *Context) http.Handler {
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ // use context to lookup routes
+ if matched, ok := ctx.RouteInfo(r); ok {
+
+ if matched.NeedsAuth() {
+ if _, err := ctx.Authorize(r, matched); err != nil {
+ ctx.Respond(rw, r, matched.Produces, matched, err)
+ return
+ }
+ }
+
+ bound, validation := ctx.BindAndValidate(r, matched)
+ if validation != nil {
+ ctx.Respond(rw, r, matched.Produces, matched, validation)
+ return
+ }
+
+ result, err := matched.Handler.Handle(bound)
+ if err != nil {
+ ctx.Respond(rw, r, matched.Produces, matched, err)
+ return
+ }
+
+ ctx.Respond(rw, r, matched.Produces, matched, result)
+ return
+ }
+
+ // Not found, check if it exists in the other methods first
+ if others := ctx.AllowedMethods(r); len(others) > 0 {
+ ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others))
+ return
+ }
+ ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.Path))
+ })
+ }
+*/
+package middleware
diff --git a/vendor/github.com/go-openapi/runtime/middleware/go18.go b/vendor/github.com/go-openapi/runtime/middleware/go18.go
new file mode 100644
index 000000000..75c762c09
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/go18.go
@@ -0,0 +1,9 @@
+// +build go1.8
+
+package middleware
+
+import "net/url"
+
+func pathUnescape(path string) (string, error) {
+ return url.PathUnescape(path)
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/header/header.go b/vendor/github.com/go-openapi/runtime/middleware/header/header.go
new file mode 100644
index 000000000..e069743e3
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/header/header.go
@@ -0,0 +1,329 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// this file was taken from the github.com/golang/gddo repository
+
+// Package header provides functions for parsing HTTP headers.
+package header
+
+import (
+ "net/http"
+ "strings"
+ "time"
+)
+
+// Octet types from RFC 2616.
+var octetTypes [256]octetType
+
+type octetType byte
+
+const (
+ isToken octetType = 1 << iota
+ isSpace
+)
+
+func init() {
+ // OCTET = <any 8-bit sequence of data>
+ // CHAR = <any US-ASCII character (octets 0 - 127)>
+ // CTL = <any US-ASCII control character (octets 0 - 31) and DEL (127)>
+ // CR = <US-ASCII CR, carriage return (13)>
+ // LF = <US-ASCII LF, linefeed (10)>
+ // SP = <US-ASCII SP, space (32)>
+ // HT = <US-ASCII HT, horizontal-tab (9)>
+ // <"> = <US-ASCII double-quote mark (34)>
+ // CRLF = CR LF
+ // LWS = [CRLF] 1*( SP | HT )
+ // TEXT = <any OCTET except CTLs, but including LWS>
+ // separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <">
+ // | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT
+ // token = 1*<any CHAR except CTLs or separators>
+ // qdtext = <any TEXT except <">>
+
+ for c := 0; c < 256; c++ {
+ var t octetType
+ isCtl := c <= 31 || c == 127
+ isChar := 0 <= c && c <= 127
+ isSeparator := strings.ContainsRune(" \t\"(),/:;<=>?@[]\\{}", rune(c))
+ if strings.ContainsRune(" \t\r\n", rune(c)) {
+ t |= isSpace
+ }
+ if isChar && !isCtl && !isSeparator {
+ t |= isToken
+ }
+ octetTypes[c] = t
+ }
+}
+
+// Copy returns a shallow copy of the header.
+func Copy(header http.Header) http.Header {
+ h := make(http.Header)
+ for k, vs := range header {
+ h[k] = vs
+ }
+ return h
+}
+
+var timeLayouts = []string{"Mon, 02 Jan 2006 15:04:05 GMT", time.RFC850, time.ANSIC}
+
+// ParseTime parses the header as time. The zero value is returned if the
+// header is not present or there is an error parsing the
+// header.
+func ParseTime(header http.Header, key string) time.Time {
+ if s := header.Get(key); s != "" {
+ for _, layout := range timeLayouts {
+ if t, err := time.Parse(layout, s); err == nil {
+ return t.UTC()
+ }
+ }
+ }
+ return time.Time{}
+}
+
+// ParseList parses a comma separated list of values. Commas are ignored in
+// quoted strings. Quoted values are not unescaped or unquoted. Whitespace is
+// trimmed.
+func ParseList(header http.Header, key string) []string {
+ var result []string
+ for _, s := range header[http.CanonicalHeaderKey(key)] {
+ begin := 0
+ end := 0
+ escape := false
+ quote := false
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case escape:
+ escape = false
+ end = i + 1
+ case quote:
+ switch b {
+ case '\\':
+ escape = true
+ case '"':
+ quote = false
+ }
+ end = i + 1
+ case b == '"':
+ quote = true
+ end = i + 1
+ case octetTypes[b]&isSpace != 0:
+ if begin == end {
+ begin = i + 1
+ end = begin
+ }
+ case b == ',':
+ if begin < end {
+ result = append(result, s[begin:end])
+ }
+ begin = i + 1
+ end = begin
+ default:
+ end = i + 1
+ }
+ }
+ if begin < end {
+ result = append(result, s[begin:end])
+ }
+ }
+ return result
+}
+
+// ParseValueAndParams parses a comma separated list of values with optional
+// semicolon separated name-value pairs. Content-Type and Content-Disposition
+// headers are in this format.
+func ParseValueAndParams(header http.Header, key string) (string, map[string]string) {
+ return parseValueAndParams(header.Get(key))
+}
+
+func parseValueAndParams(s string) (value string, params map[string]string) {
+ params = make(map[string]string)
+ value, s = expectTokenSlash(s)
+ if value == "" {
+ return
+ }
+ value = strings.ToLower(value)
+ s = skipSpace(s)
+ for strings.HasPrefix(s, ";") {
+ var pkey string
+ pkey, s = expectToken(skipSpace(s[1:]))
+ if pkey == "" {
+ return
+ }
+ if !strings.HasPrefix(s, "=") {
+ return
+ }
+ var pvalue string
+ pvalue, s = expectTokenOrQuoted(s[1:])
+ if pvalue == "" {
+ return
+ }
+ pkey = strings.ToLower(pkey)
+ params[pkey] = pvalue
+ s = skipSpace(s)
+ }
+ return
+}
+
+// AcceptSpec ...
+type AcceptSpec struct {
+ Value string
+ Q float64
+}
+
+// ParseAccept2 ...
+func ParseAccept2(header http.Header, key string) (specs []AcceptSpec) {
+ for _, en := range ParseList(header, key) {
+ v, p := parseValueAndParams(en)
+ var spec AcceptSpec
+ spec.Value = v
+ spec.Q = 1.0
+ if p != nil {
+ if q, ok := p["q"]; ok {
+ spec.Q, _ = expectQuality(q)
+ }
+ }
+ if spec.Q < 0.0 {
+ continue
+ }
+ specs = append(specs, spec)
+ }
+
+ return
+}
+
+// ParseAccept parses Accept* headers.
+func ParseAccept(header http.Header, key string) (specs []AcceptSpec) {
+loop:
+ for _, s := range header[key] {
+ for {
+ var spec AcceptSpec
+ spec.Value, s = expectTokenSlash(s)
+ if spec.Value == "" {
+ continue loop
+ }
+ spec.Q = 1.0
+ s = skipSpace(s)
+ if strings.HasPrefix(s, ";") {
+ s = skipSpace(s[1:])
+ for !strings.HasPrefix(s, "q=") && s != "" && !strings.HasPrefix(s, ",") {
+ s = skipSpace(s[1:])
+ }
+ if strings.HasPrefix(s, "q=") {
+ spec.Q, s = expectQuality(s[2:])
+ if spec.Q < 0.0 {
+ continue loop
+ }
+ }
+ }
+ specs = append(specs, spec)
+ s = skipSpace(s)
+ if !strings.HasPrefix(s, ",") {
+ continue loop
+ }
+ s = skipSpace(s[1:])
+ }
+ }
+ return
+}
+
+func skipSpace(s string) (rest string) {
+ i := 0
+ for ; i < len(s); i++ {
+ if octetTypes[s[i]]&isSpace == 0 {
+ break
+ }
+ }
+ return s[i:]
+}
+
+func expectToken(s string) (token, rest string) {
+ i := 0
+ for ; i < len(s); i++ {
+ if octetTypes[s[i]]&isToken == 0 {
+ break
+ }
+ }
+ return s[:i], s[i:]
+}
+
+func expectTokenSlash(s string) (token, rest string) {
+ i := 0
+ for ; i < len(s); i++ {
+ b := s[i]
+ if (octetTypes[b]&isToken == 0) && b != '/' {
+ break
+ }
+ }
+ return s[:i], s[i:]
+}
+
+func expectQuality(s string) (q float64, rest string) {
+ switch {
+ case len(s) == 0:
+ return -1, ""
+ case s[0] == '0':
+ // q is already 0
+ s = s[1:]
+ case s[0] == '1':
+ s = s[1:]
+ q = 1
+ case s[0] == '.':
+ // q is already 0
+ default:
+ return -1, ""
+ }
+ if !strings.HasPrefix(s, ".") {
+ return q, s
+ }
+ s = s[1:]
+ i := 0
+ n := 0
+ d := 1
+ for ; i < len(s); i++ {
+ b := s[i]
+ if b < '0' || b > '9' {
+ break
+ }
+ n = n*10 + int(b) - '0'
+ d *= 10
+ }
+ return q + float64(n)/float64(d), s[i:]
+}
+
+func expectTokenOrQuoted(s string) (value string, rest string) {
+ if !strings.HasPrefix(s, "\"") {
+ return expectToken(s)
+ }
+ s = s[1:]
+ for i := 0; i < len(s); i++ {
+ switch s[i] {
+ case '"':
+ return s[:i], s[i+1:]
+ case '\\':
+ p := make([]byte, len(s)-1)
+ j := copy(p, s[:i])
+ escape := true
+ for i = i + 1; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case escape:
+ escape = false
+ p[j] = b
+ j++
+ case b == '\\':
+ escape = true
+ case b == '"':
+ return string(p[:j]), s[i+1:]
+ default:
+ p[j] = b
+ j++
+ }
+ }
+ return "", ""
+ }
+ }
+ return "", ""
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/negotiate.go b/vendor/github.com/go-openapi/runtime/middleware/negotiate.go
new file mode 100644
index 000000000..a9b6f27d3
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/negotiate.go
@@ -0,0 +1,98 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+//
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file or at
+// https://developers.google.com/open-source/licenses/bsd.
+
+// this file was taken from the github.com/golang/gddo repository
+
+package middleware
+
+import (
+ "net/http"
+ "strings"
+
+ "github.com/go-openapi/runtime/middleware/header"
+)
+
+// NegotiateContentEncoding returns the best offered content encoding for the
+// request's Accept-Encoding header. If two offers match with equal weight and
+// then the offer earlier in the list is preferred. If no offers are
+// acceptable, then "" is returned.
+func NegotiateContentEncoding(r *http.Request, offers []string) string {
+ bestOffer := "identity"
+ bestQ := -1.0
+ specs := header.ParseAccept(r.Header, "Accept-Encoding")
+ for _, offer := range offers {
+ for _, spec := range specs {
+ if spec.Q > bestQ &&
+ (spec.Value == "*" || spec.Value == offer) {
+ bestQ = spec.Q
+ bestOffer = offer
+ }
+ }
+ }
+ if bestQ == 0 {
+ bestOffer = ""
+ }
+ return bestOffer
+}
+
+// NegotiateContentType returns the best offered content type for the request's
+// Accept header. If two offers match with equal weight, then the more specific
+// offer is preferred. For example, text/* trumps */*. If two offers match
+// with equal weight and specificity, then the offer earlier in the list is
+// preferred. If no offers match, then defaultOffer is returned.
+func NegotiateContentType(r *http.Request, offers []string, defaultOffer string) string {
+ bestOffer := defaultOffer
+ bestQ := -1.0
+ bestWild := 3
+ specs := header.ParseAccept(r.Header, "Accept")
+ for _, rawOffer := range offers {
+ offer := normalizeOffer(rawOffer)
+ // No Accept header: just return the first offer.
+ if len(specs) == 0 {
+ return rawOffer
+ }
+ for _, spec := range specs {
+ switch {
+ case spec.Q == 0.0:
+ // ignore
+ case spec.Q < bestQ:
+ // better match found
+ case spec.Value == "*/*":
+ if spec.Q > bestQ || bestWild > 2 {
+ bestQ = spec.Q
+ bestWild = 2
+ bestOffer = rawOffer
+ }
+ case strings.HasSuffix(spec.Value, "/*"):
+ if strings.HasPrefix(offer, spec.Value[:len(spec.Value)-1]) &&
+ (spec.Q > bestQ || bestWild > 1) {
+ bestQ = spec.Q
+ bestWild = 1
+ bestOffer = rawOffer
+ }
+ default:
+ if spec.Value == offer &&
+ (spec.Q > bestQ || bestWild > 0) {
+ bestQ = spec.Q
+ bestWild = 0
+ bestOffer = rawOffer
+ }
+ }
+ }
+ }
+ return bestOffer
+}
+
+func normalizeOffers(orig []string) (norm []string) {
+ for _, o := range orig {
+ norm = append(norm, normalizeOffer(o))
+ }
+ return
+}
+
+func normalizeOffer(orig string) string {
+ return strings.SplitN(orig, ";", 2)[0]
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go b/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go
new file mode 100644
index 000000000..bc6942a0f
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/not_implemented.go
@@ -0,0 +1,67 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/runtime"
+)
+
+type errorResp struct {
+ code int
+ response interface{}
+ headers http.Header
+}
+
+func (e *errorResp) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
+ for k, v := range e.headers {
+ for _, val := range v {
+ rw.Header().Add(k, val)
+ }
+ }
+ if e.code > 0 {
+ rw.WriteHeader(e.code)
+ } else {
+ rw.WriteHeader(http.StatusInternalServerError)
+ }
+ if err := producer.Produce(rw, e.response); err != nil {
+ Logger.Printf("failed to write error response: %v", err)
+ }
+}
+
+// NotImplemented the error response when the response is not implemented
+func NotImplemented(message string) Responder {
+ return Error(http.StatusNotImplemented, message)
+}
+
+// Error creates a generic responder for returning errors, the data will be serialized
+// with the matching producer for the request
+func Error(code int, data interface{}, headers ...http.Header) Responder {
+ var hdr http.Header
+ for _, h := range headers {
+ for k, v := range h {
+ if hdr == nil {
+ hdr = make(http.Header)
+ }
+ hdr[k] = v
+ }
+ }
+ return &errorResp{
+ code: code,
+ response: data,
+ headers: hdr,
+ }
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/operation.go b/vendor/github.com/go-openapi/runtime/middleware/operation.go
new file mode 100644
index 000000000..1175a63cf
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/operation.go
@@ -0,0 +1,30 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import "net/http"
+
+// NewOperationExecutor creates a context aware middleware that handles the operations after routing
+func NewOperationExecutor(ctx *Context) http.Handler {
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ // use context to lookup routes
+ route, rCtx, _ := ctx.RouteInfo(r)
+ if rCtx != nil {
+ r = rCtx
+ }
+
+ route.Handler.ServeHTTP(rw, r)
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/parameter.go b/vendor/github.com/go-openapi/runtime/middleware/parameter.go
new file mode 100644
index 000000000..9aaf65958
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/parameter.go
@@ -0,0 +1,485 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ "encoding"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "net/http"
+ "reflect"
+ "strconv"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ "github.com/go-openapi/runtime"
+)
+
+const defaultMaxMemory = 32 << 20
+
+var textUnmarshalType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
+
+func newUntypedParamBinder(param spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *untypedParamBinder {
+ binder := new(untypedParamBinder)
+ binder.Name = param.Name
+ binder.parameter = &param
+ binder.formats = formats
+ if param.In != "body" {
+ binder.validator = validate.NewParamValidator(&param, formats)
+ } else {
+ binder.validator = validate.NewSchemaValidator(param.Schema, spec, param.Name, formats)
+ }
+
+ return binder
+}
+
+type untypedParamBinder struct {
+ parameter *spec.Parameter
+ formats strfmt.Registry
+ Name string
+ validator validate.EntityValidator
+}
+
+func (p *untypedParamBinder) Type() reflect.Type {
+ return p.typeForSchema(p.parameter.Type, p.parameter.Format, p.parameter.Items)
+}
+
+func (p *untypedParamBinder) typeForSchema(tpe, format string, items *spec.Items) reflect.Type {
+ switch tpe {
+ case "boolean":
+ return reflect.TypeOf(true)
+
+ case "string":
+ if tt, ok := p.formats.GetType(format); ok {
+ return tt
+ }
+ return reflect.TypeOf("")
+
+ case "integer":
+ switch format {
+ case "int8":
+ return reflect.TypeOf(int8(0))
+ case "int16":
+ return reflect.TypeOf(int16(0))
+ case "int32":
+ return reflect.TypeOf(int32(0))
+ case "int64":
+ return reflect.TypeOf(int64(0))
+ default:
+ return reflect.TypeOf(int64(0))
+ }
+
+ case "number":
+ switch format {
+ case "float":
+ return reflect.TypeOf(float32(0))
+ case "double":
+ return reflect.TypeOf(float64(0))
+ }
+
+ case "array":
+ if items == nil {
+ return nil
+ }
+ itemsType := p.typeForSchema(items.Type, items.Format, items.Items)
+ if itemsType == nil {
+ return nil
+ }
+ return reflect.MakeSlice(reflect.SliceOf(itemsType), 0, 0).Type()
+
+ case "file":
+ return reflect.TypeOf(&runtime.File{}).Elem()
+
+ case "object":
+ return reflect.TypeOf(map[string]interface{}{})
+ }
+ return nil
+}
+
+func (p *untypedParamBinder) allowsMulti() bool {
+ return p.parameter.In == "query" || p.parameter.In == "formData"
+}
+
+func (p *untypedParamBinder) readValue(values runtime.Gettable, target reflect.Value) ([]string, bool, bool, error) {
+ name, in, cf, tpe := p.parameter.Name, p.parameter.In, p.parameter.CollectionFormat, p.parameter.Type
+ if tpe == "array" {
+ if cf == "multi" {
+ if !p.allowsMulti() {
+ return nil, false, false, errors.InvalidCollectionFormat(name, in, cf)
+ }
+ vv, hasKey, _ := values.GetOK(name)
+ return vv, false, hasKey, nil
+ }
+
+ v, hk, hv := values.GetOK(name)
+ if !hv {
+ return nil, false, hk, nil
+ }
+ d, c, e := p.readFormattedSliceFieldValue(v[len(v)-1], target)
+ return d, c, hk, e
+ }
+
+ vv, hk, _ := values.GetOK(name)
+ return vv, false, hk, nil
+}
+
+func (p *untypedParamBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, target reflect.Value) error {
+ // fmt.Println("binding", p.name, "as", p.Type())
+ switch p.parameter.In {
+ case "query":
+ data, custom, hasKey, err := p.readValue(runtime.Values(request.URL.Query()), target)
+ if err != nil {
+ return err
+ }
+ if custom {
+ return nil
+ }
+
+ return p.bindValue(data, hasKey, target)
+
+ case "header":
+ data, custom, hasKey, err := p.readValue(runtime.Values(request.Header), target)
+ if err != nil {
+ return err
+ }
+ if custom {
+ return nil
+ }
+ return p.bindValue(data, hasKey, target)
+
+ case "path":
+ data, custom, hasKey, err := p.readValue(routeParams, target)
+ if err != nil {
+ return err
+ }
+ if custom {
+ return nil
+ }
+ return p.bindValue(data, hasKey, target)
+
+ case "formData":
+ var err error
+ var mt string
+
+ mt, _, e := runtime.ContentType(request.Header)
+ if e != nil {
+ // because of the interface conversion go thinks the error is not nil
+ // so we first check for nil and then set the err var if it's not nil
+ err = e
+ }
+
+ if err != nil {
+ return errors.InvalidContentType("", []string{"multipart/form-data", "application/x-www-form-urlencoded"})
+ }
+
+ if mt != "multipart/form-data" && mt != "application/x-www-form-urlencoded" {
+ return errors.InvalidContentType(mt, []string{"multipart/form-data", "application/x-www-form-urlencoded"})
+ }
+
+ if mt == "multipart/form-data" {
+ if err = request.ParseMultipartForm(defaultMaxMemory); err != nil {
+ return errors.NewParseError(p.Name, p.parameter.In, "", err)
+ }
+ }
+
+ if err = request.ParseForm(); err != nil {
+ return errors.NewParseError(p.Name, p.parameter.In, "", err)
+ }
+
+ if p.parameter.Type == "file" {
+ file, header, ffErr := request.FormFile(p.parameter.Name)
+ if ffErr != nil {
+ if p.parameter.Required {
+ return errors.NewParseError(p.Name, p.parameter.In, "", ffErr)
+ } else {
+ return nil
+ }
+ }
+ target.Set(reflect.ValueOf(runtime.File{Data: file, Header: header}))
+ return nil
+ }
+
+ if request.MultipartForm != nil {
+ data, custom, hasKey, rvErr := p.readValue(runtime.Values(request.MultipartForm.Value), target)
+ if rvErr != nil {
+ return rvErr
+ }
+ if custom {
+ return nil
+ }
+ return p.bindValue(data, hasKey, target)
+ }
+ data, custom, hasKey, err := p.readValue(runtime.Values(request.PostForm), target)
+ if err != nil {
+ return err
+ }
+ if custom {
+ return nil
+ }
+ return p.bindValue(data, hasKey, target)
+
+ case "body":
+ newValue := reflect.New(target.Type())
+ if !runtime.HasBody(request) {
+ if p.parameter.Default != nil {
+ target.Set(reflect.ValueOf(p.parameter.Default))
+ }
+
+ return nil
+ }
+ if err := consumer.Consume(request.Body, newValue.Interface()); err != nil {
+ if err == io.EOF && p.parameter.Default != nil {
+ target.Set(reflect.ValueOf(p.parameter.Default))
+ return nil
+ }
+ tpe := p.parameter.Type
+ if p.parameter.Format != "" {
+ tpe = p.parameter.Format
+ }
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, nil)
+ }
+ target.Set(reflect.Indirect(newValue))
+ return nil
+ default:
+ return errors.New(500, fmt.Sprintf("invalid parameter location %q", p.parameter.In))
+ }
+}
+
+func (p *untypedParamBinder) bindValue(data []string, hasKey bool, target reflect.Value) error {
+ if p.parameter.Type == "array" {
+ return p.setSliceFieldValue(target, p.parameter.Default, data, hasKey)
+ }
+ var d string
+ if len(data) > 0 {
+ d = data[len(data)-1]
+ }
+ return p.setFieldValue(target, p.parameter.Default, d, hasKey)
+}
+
+func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue interface{}, data string, hasKey bool) error {
+ tpe := p.parameter.Type
+ if p.parameter.Format != "" {
+ tpe = p.parameter.Format
+ }
+
+ if (!hasKey || (!p.parameter.AllowEmptyValue && data == "")) && p.parameter.Required && p.parameter.Default == nil {
+ return errors.Required(p.Name, p.parameter.In, data)
+ }
+
+ ok, err := p.tryUnmarshaler(target, defaultValue, data)
+ if err != nil {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if ok {
+ return nil
+ }
+
+ defVal := reflect.Zero(target.Type())
+ if defaultValue != nil {
+ defVal = reflect.ValueOf(defaultValue)
+ }
+
+ if tpe == "byte" {
+ if data == "" {
+ if target.CanSet() {
+ target.SetBytes(defVal.Bytes())
+ }
+ return nil
+ }
+
+ b, err := base64.StdEncoding.DecodeString(data)
+ if err != nil {
+ b, err = base64.URLEncoding.DecodeString(data)
+ if err != nil {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ }
+ if target.CanSet() {
+ target.SetBytes(b)
+ }
+ return nil
+ }
+
+ switch target.Kind() {
+ case reflect.Bool:
+ if data == "" {
+ if target.CanSet() {
+ target.SetBool(defVal.Bool())
+ }
+ return nil
+ }
+ b, err := swag.ConvertBool(data)
+ if err != nil {
+ return err
+ }
+ if target.CanSet() {
+ target.SetBool(b)
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ if data == "" {
+ if target.CanSet() {
+ rd := defVal.Convert(reflect.TypeOf(int64(0)))
+ target.SetInt(rd.Int())
+ }
+ return nil
+ }
+ i, err := strconv.ParseInt(data, 10, 64)
+ if err != nil {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if target.OverflowInt(i) {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if target.CanSet() {
+ target.SetInt(i)
+ }
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ if data == "" {
+ if target.CanSet() {
+ rd := defVal.Convert(reflect.TypeOf(uint64(0)))
+ target.SetUint(rd.Uint())
+ }
+ return nil
+ }
+ u, err := strconv.ParseUint(data, 10, 64)
+ if err != nil {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if target.OverflowUint(u) {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if target.CanSet() {
+ target.SetUint(u)
+ }
+
+ case reflect.Float32, reflect.Float64:
+ if data == "" {
+ if target.CanSet() {
+ rd := defVal.Convert(reflect.TypeOf(float64(0)))
+ target.SetFloat(rd.Float())
+ }
+ return nil
+ }
+ f, err := strconv.ParseFloat(data, 64)
+ if err != nil {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if target.OverflowFloat(f) {
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ if target.CanSet() {
+ target.SetFloat(f)
+ }
+
+ case reflect.String:
+ value := data
+ if value == "" {
+ value = defVal.String()
+ }
+ // validate string
+ if target.CanSet() {
+ target.SetString(value)
+ }
+
+ case reflect.Ptr:
+ if data == "" && defVal.Kind() == reflect.Ptr {
+ if target.CanSet() {
+ target.Set(defVal)
+ }
+ return nil
+ }
+ newVal := reflect.New(target.Type().Elem())
+ if err := p.setFieldValue(reflect.Indirect(newVal), defVal, data, hasKey); err != nil {
+ return err
+ }
+ if target.CanSet() {
+ target.Set(newVal)
+ }
+
+ default:
+ return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
+ }
+ return nil
+}
+
+func (p *untypedParamBinder) tryUnmarshaler(target reflect.Value, defaultValue interface{}, data string) (bool, error) {
+ if !target.CanSet() {
+ return false, nil
+ }
+ // When a type implements encoding.TextUnmarshaler we'll use that instead of reflecting some more
+ if reflect.PtrTo(target.Type()).Implements(textUnmarshalType) {
+ if defaultValue != nil && len(data) == 0 {
+ target.Set(reflect.ValueOf(defaultValue))
+ return true, nil
+ }
+ value := reflect.New(target.Type())
+ if err := value.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(data)); err != nil {
+ return true, err
+ }
+ target.Set(reflect.Indirect(value))
+ return true, nil
+ }
+ return false, nil
+}
+
+func (p *untypedParamBinder) readFormattedSliceFieldValue(data string, target reflect.Value) ([]string, bool, error) {
+ ok, err := p.tryUnmarshaler(target, p.parameter.Default, data)
+ if err != nil {
+ return nil, true, err
+ }
+ if ok {
+ return nil, true, nil
+ }
+
+ return swag.SplitByFormat(data, p.parameter.CollectionFormat), false, nil
+}
+
+func (p *untypedParamBinder) setSliceFieldValue(target reflect.Value, defaultValue interface{}, data []string, hasKey bool) error {
+ sz := len(data)
+ if (!hasKey || (!p.parameter.AllowEmptyValue && (sz == 0 || (sz == 1 && data[0] == "")))) && p.parameter.Required && defaultValue == nil {
+ return errors.Required(p.Name, p.parameter.In, data)
+ }
+
+ defVal := reflect.Zero(target.Type())
+ if defaultValue != nil {
+ defVal = reflect.ValueOf(defaultValue)
+ }
+
+ if !target.CanSet() {
+ return nil
+ }
+ if sz == 0 {
+ target.Set(defVal)
+ return nil
+ }
+
+ value := reflect.MakeSlice(reflect.SliceOf(target.Type().Elem()), sz, sz)
+
+ for i := 0; i < sz; i++ {
+ if err := p.setFieldValue(value.Index(i), nil, data[i], hasKey); err != nil {
+ return err
+ }
+ }
+
+ target.Set(value)
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/pre_go18.go b/vendor/github.com/go-openapi/runtime/middleware/pre_go18.go
new file mode 100644
index 000000000..03385251e
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/pre_go18.go
@@ -0,0 +1,9 @@
+// +build !go1.8
+
+package middleware
+
+import "net/url"
+
+func pathUnescape(path string) (string, error) {
+ return url.QueryUnescape(path)
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go
new file mode 100644
index 000000000..4be330d6d
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/rapidoc.go
@@ -0,0 +1,90 @@
+package middleware
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "path"
+)
+
+// RapiDocOpts configures the RapiDoc middlewares
+type RapiDocOpts struct {
+ // BasePath for the UI path, defaults to: /
+ BasePath string
+ // Path combines with BasePath for the full UI path, defaults to: docs
+ Path string
+ // SpecURL the url to find the spec for
+ SpecURL string
+ // RapiDocURL for the js that generates the rapidoc site, defaults to: https://cdn.jsdelivr.net/npm/rapidoc/bundles/rapidoc.standalone.js
+ RapiDocURL string
+ // Title for the documentation site, default to: API documentation
+ Title string
+}
+
+// EnsureDefaults in case some options are missing
+func (r *RapiDocOpts) EnsureDefaults() {
+ if r.BasePath == "" {
+ r.BasePath = "/"
+ }
+ if r.Path == "" {
+ r.Path = "docs"
+ }
+ if r.SpecURL == "" {
+ r.SpecURL = "/swagger.json"
+ }
+ if r.RapiDocURL == "" {
+ r.RapiDocURL = rapidocLatest
+ }
+ if r.Title == "" {
+ r.Title = "API documentation"
+ }
+}
+
+// RapiDoc creates a middleware to serve a documentation site for a swagger spec.
+// This allows for altering the spec before starting the http listener.
+//
+func RapiDoc(opts RapiDocOpts, next http.Handler) http.Handler {
+ opts.EnsureDefaults()
+
+ pth := path.Join(opts.BasePath, opts.Path)
+ tmpl := template.Must(template.New("rapidoc").Parse(rapidocTemplate))
+
+ buf := bytes.NewBuffer(nil)
+ _ = tmpl.Execute(buf, opts)
+ b := buf.Bytes()
+
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == pth {
+ rw.Header().Set("Content-Type", "text/html; charset=utf-8")
+ rw.WriteHeader(http.StatusOK)
+
+ _, _ = rw.Write(b)
+ return
+ }
+
+ if next == nil {
+ rw.Header().Set("Content-Type", "text/plain")
+ rw.WriteHeader(http.StatusNotFound)
+ _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth)))
+ return
+ }
+ next.ServeHTTP(rw, r)
+ })
+}
+
+const (
+ rapidocLatest = "https://unpkg.com/rapidoc/dist/rapidoc-min.js"
+ rapidocTemplate = `<!doctype html>
+<html>
+<head>
+ <title>{{ .Title }}</title>
+ <meta charset="utf-8"> <!-- Important: rapi-doc uses utf8 charecters -->
+ <script type="module" src="{{ .RapiDocURL }}"></script>
+</head>
+<body>
+ <rapi-doc spec-url="{{ .SpecURL }}"></rapi-doc>
+</body>
+</html>
+`
+)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/redoc.go b/vendor/github.com/go-openapi/runtime/middleware/redoc.go
new file mode 100644
index 000000000..019c85429
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/redoc.go
@@ -0,0 +1,103 @@
+package middleware
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "path"
+)
+
+// RedocOpts configures the Redoc middlewares
+type RedocOpts struct {
+ // BasePath for the UI path, defaults to: /
+ BasePath string
+ // Path combines with BasePath for the full UI path, defaults to: docs
+ Path string
+ // SpecURL the url to find the spec for
+ SpecURL string
+ // RedocURL for the js that generates the redoc site, defaults to: https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js
+ RedocURL string
+ // Title for the documentation site, default to: API documentation
+ Title string
+}
+
+// EnsureDefaults in case some options are missing
+func (r *RedocOpts) EnsureDefaults() {
+ if r.BasePath == "" {
+ r.BasePath = "/"
+ }
+ if r.Path == "" {
+ r.Path = "docs"
+ }
+ if r.SpecURL == "" {
+ r.SpecURL = "/swagger.json"
+ }
+ if r.RedocURL == "" {
+ r.RedocURL = redocLatest
+ }
+ if r.Title == "" {
+ r.Title = "API documentation"
+ }
+}
+
+// Redoc creates a middleware to serve a documentation site for a swagger spec.
+// This allows for altering the spec before starting the http listener.
+//
+func Redoc(opts RedocOpts, next http.Handler) http.Handler {
+ opts.EnsureDefaults()
+
+ pth := path.Join(opts.BasePath, opts.Path)
+ tmpl := template.Must(template.New("redoc").Parse(redocTemplate))
+
+ buf := bytes.NewBuffer(nil)
+ _ = tmpl.Execute(buf, opts)
+ b := buf.Bytes()
+
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == pth {
+ rw.Header().Set("Content-Type", "text/html; charset=utf-8")
+ rw.WriteHeader(http.StatusOK)
+
+ _, _ = rw.Write(b)
+ return
+ }
+
+ if next == nil {
+ rw.Header().Set("Content-Type", "text/plain")
+ rw.WriteHeader(http.StatusNotFound)
+ _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth)))
+ return
+ }
+ next.ServeHTTP(rw, r)
+ })
+}
+
+const (
+ redocLatest = "https://cdn.jsdelivr.net/npm/redoc/bundles/redoc.standalone.js"
+ redocTemplate = `<!DOCTYPE html>
+<html>
+ <head>
+ <title>{{ .Title }}</title>
+ <!-- needed for adaptive design -->
+ <meta charset="utf-8"/>
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <link href="https://fonts.googleapis.com/css?family=Montserrat:300,400,700|Roboto:300,400,700" rel="stylesheet">
+
+ <!--
+ ReDoc doesn't change outer page styles
+ -->
+ <style>
+ body {
+ margin: 0;
+ padding: 0;
+ }
+ </style>
+ </head>
+ <body>
+ <redoc spec-url='{{ .SpecURL }}'></redoc>
+ <script src="{{ .RedocURL }}"> </script>
+ </body>
+</html>
+`
+)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/request.go b/vendor/github.com/go-openapi/runtime/middleware/request.go
new file mode 100644
index 000000000..760c37861
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/request.go
@@ -0,0 +1,104 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ "net/http"
+ "reflect"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+
+ "github.com/go-openapi/runtime"
+)
+
+// UntypedRequestBinder binds and validates the data from a http request
+type UntypedRequestBinder struct {
+ Spec *spec.Swagger
+ Parameters map[string]spec.Parameter
+ Formats strfmt.Registry
+ paramBinders map[string]*untypedParamBinder
+}
+
+// NewUntypedRequestBinder creates a new binder for reading a request.
+func NewUntypedRequestBinder(parameters map[string]spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *UntypedRequestBinder {
+ binders := make(map[string]*untypedParamBinder)
+ for fieldName, param := range parameters {
+ binders[fieldName] = newUntypedParamBinder(param, spec, formats)
+ }
+ return &UntypedRequestBinder{
+ Parameters: parameters,
+ paramBinders: binders,
+ Spec: spec,
+ Formats: formats,
+ }
+}
+
+// Bind perform the databinding and validation
+func (o *UntypedRequestBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, data interface{}) error {
+ val := reflect.Indirect(reflect.ValueOf(data))
+ isMap := val.Kind() == reflect.Map
+ var result []error
+ debugLog("binding %d parameters for %s %s", len(o.Parameters), request.Method, request.URL.EscapedPath())
+ for fieldName, param := range o.Parameters {
+ binder := o.paramBinders[fieldName]
+ debugLog("binding parameter %s for %s %s", fieldName, request.Method, request.URL.EscapedPath())
+ var target reflect.Value
+ if !isMap {
+ binder.Name = fieldName
+ target = val.FieldByName(fieldName)
+ }
+
+ if isMap {
+ tpe := binder.Type()
+ if tpe == nil {
+ if param.Schema.Type.Contains("array") {
+ tpe = reflect.TypeOf([]interface{}{})
+ } else {
+ tpe = reflect.TypeOf(map[string]interface{}{})
+ }
+ }
+ target = reflect.Indirect(reflect.New(tpe))
+ }
+
+ if !target.IsValid() {
+ result = append(result, errors.New(500, "parameter name %q is an unknown field", binder.Name))
+ continue
+ }
+
+ if err := binder.Bind(request, routeParams, consumer, target); err != nil {
+ result = append(result, err)
+ continue
+ }
+
+ if binder.validator != nil {
+ rr := binder.validator.Validate(target.Interface())
+ if rr != nil && rr.HasErrors() {
+ result = append(result, rr.AsError())
+ }
+ }
+
+ if isMap {
+ val.SetMapIndex(reflect.ValueOf(param.Name), target)
+ }
+ }
+
+ if len(result) > 0 {
+ return errors.CompositeValidationError(result...)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/router.go b/vendor/github.com/go-openapi/runtime/middleware/router.go
new file mode 100644
index 000000000..5052031c8
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/router.go
@@ -0,0 +1,488 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ "fmt"
+ "net/http"
+ fpath "path"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/swag"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware/denco"
+)
+
+// RouteParam is a object to capture route params in a framework agnostic way.
+// implementations of the muxer should use these route params to communicate with the
+// swagger framework
+type RouteParam struct {
+ Name string
+ Value string
+}
+
+// RouteParams the collection of route params
+type RouteParams []RouteParam
+
+// Get gets the value for the route param for the specified key
+func (r RouteParams) Get(name string) string {
+ vv, _, _ := r.GetOK(name)
+ if len(vv) > 0 {
+ return vv[len(vv)-1]
+ }
+ return ""
+}
+
+// GetOK gets the value but also returns booleans to indicate if a key or value
+// is present. This aids in validation and satisfies an interface in use there
+//
+// The returned values are: data, has key, has value
+func (r RouteParams) GetOK(name string) ([]string, bool, bool) {
+ for _, p := range r {
+ if p.Name == name {
+ return []string{p.Value}, true, p.Value != ""
+ }
+ }
+ return nil, false, false
+}
+
+// NewRouter creates a new context aware router middleware
+func NewRouter(ctx *Context, next http.Handler) http.Handler {
+ if ctx.router == nil {
+ ctx.router = DefaultRouter(ctx.spec, ctx.api)
+ }
+
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if _, rCtx, ok := ctx.RouteInfo(r); ok {
+ next.ServeHTTP(rw, rCtx)
+ return
+ }
+
+ // Not found, check if it exists in the other methods first
+ if others := ctx.AllowedMethods(r); len(others) > 0 {
+ ctx.Respond(rw, r, ctx.analyzer.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others))
+ return
+ }
+
+ ctx.Respond(rw, r, ctx.analyzer.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.EscapedPath()))
+ })
+}
+
+// RoutableAPI represents an interface for things that can serve
+// as a provider of implementations for the swagger router
+type RoutableAPI interface {
+ HandlerFor(string, string) (http.Handler, bool)
+ ServeErrorFor(string) func(http.ResponseWriter, *http.Request, error)
+ ConsumersFor([]string) map[string]runtime.Consumer
+ ProducersFor([]string) map[string]runtime.Producer
+ AuthenticatorsFor(map[string]spec.SecurityScheme) map[string]runtime.Authenticator
+ Authorizer() runtime.Authorizer
+ Formats() strfmt.Registry
+ DefaultProduces() string
+ DefaultConsumes() string
+}
+
+// Router represents a swagger aware router
+type Router interface {
+ Lookup(method, path string) (*MatchedRoute, bool)
+ OtherMethods(method, path string) []string
+}
+
+type defaultRouteBuilder struct {
+ spec *loads.Document
+ analyzer *analysis.Spec
+ api RoutableAPI
+ records map[string][]denco.Record
+}
+
+type defaultRouter struct {
+ spec *loads.Document
+ routers map[string]*denco.Router
+}
+
+func newDefaultRouteBuilder(spec *loads.Document, api RoutableAPI) *defaultRouteBuilder {
+ return &defaultRouteBuilder{
+ spec: spec,
+ analyzer: analysis.New(spec.Spec()),
+ api: api,
+ records: make(map[string][]denco.Record),
+ }
+}
+
+// DefaultRouter creates a default implemenation of the router
+func DefaultRouter(spec *loads.Document, api RoutableAPI) Router {
+ builder := newDefaultRouteBuilder(spec, api)
+ if spec != nil {
+ for method, paths := range builder.analyzer.Operations() {
+ for path, operation := range paths {
+ fp := fpath.Join(spec.BasePath(), path)
+ debugLog("adding route %s %s %q", method, fp, operation.ID)
+ builder.AddRoute(method, fp, operation)
+ }
+ }
+ }
+ return builder.Build()
+}
+
+// RouteAuthenticator is an authenticator that can compose several authenticators together.
+// It also knows when it contains an authenticator that allows for anonymous pass through.
+// Contains a group of 1 or more authenticators that have a logical AND relationship
+type RouteAuthenticator struct {
+ Authenticator map[string]runtime.Authenticator
+ Schemes []string
+ Scopes map[string][]string
+ allScopes []string
+ commonScopes []string
+ allowAnonymous bool
+}
+
+func (ra *RouteAuthenticator) AllowsAnonymous() bool {
+ return ra.allowAnonymous
+}
+
+// AllScopes returns a list of unique scopes that is the combination
+// of all the scopes in the requirements
+func (ra *RouteAuthenticator) AllScopes() []string {
+ return ra.allScopes
+}
+
+// CommonScopes returns a list of unique scopes that are common in all the
+// scopes in the requirements
+func (ra *RouteAuthenticator) CommonScopes() []string {
+ return ra.commonScopes
+}
+
+// Authenticate Authenticator interface implementation
+func (ra *RouteAuthenticator) Authenticate(req *http.Request, route *MatchedRoute) (bool, interface{}, error) {
+ if ra.allowAnonymous {
+ route.Authenticator = ra
+ return true, nil, nil
+ }
+ // iterate in proper order
+ var lastResult interface{}
+ for _, scheme := range ra.Schemes {
+ if authenticator, ok := ra.Authenticator[scheme]; ok {
+ applies, princ, err := authenticator.Authenticate(&security.ScopedAuthRequest{
+ Request: req,
+ RequiredScopes: ra.Scopes[scheme],
+ })
+ if !applies {
+ return false, nil, nil
+ }
+ if err != nil {
+ route.Authenticator = ra
+ return true, nil, err
+ }
+ lastResult = princ
+ }
+ }
+ route.Authenticator = ra
+ return true, lastResult, nil
+}
+
+func stringSliceUnion(slices ...[]string) []string {
+ unique := make(map[string]struct{})
+ var result []string
+ for _, slice := range slices {
+ for _, entry := range slice {
+ if _, ok := unique[entry]; ok {
+ continue
+ }
+ unique[entry] = struct{}{}
+ result = append(result, entry)
+ }
+ }
+ return result
+}
+
+func stringSliceIntersection(slices ...[]string) []string {
+ unique := make(map[string]int)
+ var intersection []string
+
+ total := len(slices)
+ var emptyCnt int
+ for _, slice := range slices {
+ if len(slice) == 0 {
+ emptyCnt++
+ continue
+ }
+
+ for _, entry := range slice {
+ unique[entry]++
+ if unique[entry] == total-emptyCnt { // this entry appeared in all the non-empty slices
+ intersection = append(intersection, entry)
+ }
+ }
+ }
+
+ return intersection
+}
+
+// RouteAuthenticators represents a group of authenticators that represent a logical OR
+type RouteAuthenticators []RouteAuthenticator
+
+// AllowsAnonymous returns true when there is an authenticator that means optional auth
+func (ras RouteAuthenticators) AllowsAnonymous() bool {
+ for _, ra := range ras {
+ if ra.AllowsAnonymous() {
+ return true
+ }
+ }
+ return false
+}
+
+// Authenticate method implemention so this collection can be used as authenticator
+func (ras RouteAuthenticators) Authenticate(req *http.Request, route *MatchedRoute) (bool, interface{}, error) {
+ var lastError error
+ var allowsAnon bool
+ var anonAuth RouteAuthenticator
+
+ for _, ra := range ras {
+ if ra.AllowsAnonymous() {
+ anonAuth = ra
+ allowsAnon = true
+ continue
+ }
+ applies, usr, err := ra.Authenticate(req, route)
+ if !applies || err != nil || usr == nil {
+ if err != nil {
+ lastError = err
+ }
+ continue
+ }
+ return applies, usr, nil
+ }
+
+ if allowsAnon && lastError == nil {
+ route.Authenticator = &anonAuth
+ return true, nil, lastError
+ }
+ return lastError != nil, nil, lastError
+}
+
+type routeEntry struct {
+ PathPattern string
+ BasePath string
+ Operation *spec.Operation
+ Consumes []string
+ Consumers map[string]runtime.Consumer
+ Produces []string
+ Producers map[string]runtime.Producer
+ Parameters map[string]spec.Parameter
+ Handler http.Handler
+ Formats strfmt.Registry
+ Binder *UntypedRequestBinder
+ Authenticators RouteAuthenticators
+ Authorizer runtime.Authorizer
+}
+
+// MatchedRoute represents the route that was matched in this request
+type MatchedRoute struct {
+ routeEntry
+ Params RouteParams
+ Consumer runtime.Consumer
+ Producer runtime.Producer
+ Authenticator *RouteAuthenticator
+}
+
+// HasAuth returns true when the route has a security requirement defined
+func (m *MatchedRoute) HasAuth() bool {
+ return len(m.Authenticators) > 0
+}
+
+// NeedsAuth returns true when the request still
+// needs to perform authentication
+func (m *MatchedRoute) NeedsAuth() bool {
+ return m.HasAuth() && m.Authenticator == nil
+}
+
+func (d *defaultRouter) Lookup(method, path string) (*MatchedRoute, bool) {
+ mth := strings.ToUpper(method)
+ debugLog("looking up route for %s %s", method, path)
+ if Debug {
+ if len(d.routers) == 0 {
+ debugLog("there are no known routers")
+ }
+ for meth := range d.routers {
+ debugLog("got a router for %s", meth)
+ }
+ }
+ if router, ok := d.routers[mth]; ok {
+ if m, rp, ok := router.Lookup(fpath.Clean(path)); ok && m != nil {
+ if entry, ok := m.(*routeEntry); ok {
+ debugLog("found a route for %s %s with %d parameters", method, path, len(entry.Parameters))
+ var params RouteParams
+ for _, p := range rp {
+ v, err := pathUnescape(p.Value)
+ if err != nil {
+ debugLog("failed to escape %q: %v", p.Value, err)
+ v = p.Value
+ }
+ // a workaround to handle fragment/composing parameters until they are supported in denco router
+ // check if this parameter is a fragment within a path segment
+ if xpos := strings.Index(entry.PathPattern, fmt.Sprintf("{%s}", p.Name)) + len(p.Name) + 2; xpos < len(entry.PathPattern) && entry.PathPattern[xpos] != '/' {
+ // extract fragment parameters
+ ep := strings.Split(entry.PathPattern[xpos:], "/")[0]
+ pnames, pvalues := decodeCompositParams(p.Name, v, ep, nil, nil)
+ for i, pname := range pnames {
+ params = append(params, RouteParam{Name: pname, Value: pvalues[i]})
+ }
+ } else {
+ // use the parameter directly
+ params = append(params, RouteParam{Name: p.Name, Value: v})
+ }
+ }
+ return &MatchedRoute{routeEntry: *entry, Params: params}, true
+ }
+ } else {
+ debugLog("couldn't find a route by path for %s %s", method, path)
+ }
+ } else {
+ debugLog("couldn't find a route by method for %s %s", method, path)
+ }
+ return nil, false
+}
+
+func (d *defaultRouter) OtherMethods(method, path string) []string {
+ mn := strings.ToUpper(method)
+ var methods []string
+ for k, v := range d.routers {
+ if k != mn {
+ if _, _, ok := v.Lookup(fpath.Clean(path)); ok {
+ methods = append(methods, k)
+ continue
+ }
+ }
+ }
+ return methods
+}
+
+// convert swagger parameters per path segment into a denco parameter as multiple parameters per segment are not supported in denco
+var pathConverter = regexp.MustCompile(`{(.+?)}([^/]*)`)
+
+func decodeCompositParams(name string, value string, pattern string, names []string, values []string) ([]string, []string) {
+ pleft := strings.Index(pattern, "{")
+ names = append(names, name)
+ if pleft < 0 {
+ if strings.HasSuffix(value, pattern) {
+ values = append(values, value[:len(value)-len(pattern)])
+ } else {
+ values = append(values, "")
+ }
+ } else {
+ toskip := pattern[:pleft]
+ pright := strings.Index(pattern, "}")
+ vright := strings.Index(value, toskip)
+ if vright >= 0 {
+ values = append(values, value[:vright])
+ } else {
+ values = append(values, "")
+ value = ""
+ }
+ return decodeCompositParams(pattern[pleft+1:pright], value[vright+len(toskip):], pattern[pright+1:], names, values)
+ }
+ return names, values
+}
+
+func (d *defaultRouteBuilder) AddRoute(method, path string, operation *spec.Operation) {
+ mn := strings.ToUpper(method)
+
+ bp := fpath.Clean(d.spec.BasePath())
+ if len(bp) > 0 && bp[len(bp)-1] == '/' {
+ bp = bp[:len(bp)-1]
+ }
+
+ debugLog("operation: %#v", *operation)
+ if handler, ok := d.api.HandlerFor(method, strings.TrimPrefix(path, bp)); ok {
+ consumes := d.analyzer.ConsumesFor(operation)
+ produces := d.analyzer.ProducesFor(operation)
+ parameters := d.analyzer.ParamsFor(method, strings.TrimPrefix(path, bp))
+
+ // add API defaults if not part of the spec
+ if defConsumes := d.api.DefaultConsumes(); defConsumes != "" && !swag.ContainsStringsCI(consumes, defConsumes) {
+ consumes = append(consumes, defConsumes)
+ }
+
+ if defProduces := d.api.DefaultProduces(); defProduces != "" && !swag.ContainsStringsCI(produces, defProduces) {
+ produces = append(produces, defProduces)
+ }
+
+ record := denco.NewRecord(pathConverter.ReplaceAllString(path, ":$1"), &routeEntry{
+ BasePath: bp,
+ PathPattern: path,
+ Operation: operation,
+ Handler: handler,
+ Consumes: consumes,
+ Produces: produces,
+ Consumers: d.api.ConsumersFor(normalizeOffers(consumes)),
+ Producers: d.api.ProducersFor(normalizeOffers(produces)),
+ Parameters: parameters,
+ Formats: d.api.Formats(),
+ Binder: NewUntypedRequestBinder(parameters, d.spec.Spec(), d.api.Formats()),
+ Authenticators: d.buildAuthenticators(operation),
+ Authorizer: d.api.Authorizer(),
+ })
+ d.records[mn] = append(d.records[mn], record)
+ }
+}
+
+func (d *defaultRouteBuilder) buildAuthenticators(operation *spec.Operation) RouteAuthenticators {
+ requirements := d.analyzer.SecurityRequirementsFor(operation)
+ var auths []RouteAuthenticator
+ for _, reqs := range requirements {
+ var schemes []string
+ scopes := make(map[string][]string, len(reqs))
+ var scopeSlices [][]string
+ for _, req := range reqs {
+ schemes = append(schemes, req.Name)
+ scopes[req.Name] = req.Scopes
+ scopeSlices = append(scopeSlices, req.Scopes)
+ }
+
+ definitions := d.analyzer.SecurityDefinitionsForRequirements(reqs)
+ authenticators := d.api.AuthenticatorsFor(definitions)
+ auths = append(auths, RouteAuthenticator{
+ Authenticator: authenticators,
+ Schemes: schemes,
+ Scopes: scopes,
+ allScopes: stringSliceUnion(scopeSlices...),
+ commonScopes: stringSliceIntersection(scopeSlices...),
+ allowAnonymous: len(reqs) == 1 && reqs[0].Name == "",
+ })
+ }
+ return auths
+}
+
+func (d *defaultRouteBuilder) Build() *defaultRouter {
+ routers := make(map[string]*denco.Router)
+ for method, records := range d.records {
+ router := denco.New()
+ _ = router.Build(records)
+ routers[method] = router
+ }
+ return &defaultRouter{
+ spec: d.spec,
+ routers: routers,
+ }
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/security.go b/vendor/github.com/go-openapi/runtime/middleware/security.go
new file mode 100644
index 000000000..2b061caef
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/security.go
@@ -0,0 +1,39 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import "net/http"
+
+func newSecureAPI(ctx *Context, next http.Handler) http.Handler {
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ route, rCtx, _ := ctx.RouteInfo(r)
+ if rCtx != nil {
+ r = rCtx
+ }
+ if route != nil && !route.NeedsAuth() {
+ next.ServeHTTP(rw, r)
+ return
+ }
+
+ _, rCtx, err := ctx.Authorize(r, route)
+ if err != nil {
+ ctx.Respond(rw, r, route.Produces, route, err)
+ return
+ }
+ r = rCtx
+
+ next.ServeHTTP(rw, r)
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/spec.go b/vendor/github.com/go-openapi/runtime/middleware/spec.go
new file mode 100644
index 000000000..f02914298
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/spec.go
@@ -0,0 +1,48 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ "net/http"
+ "path"
+)
+
+// Spec creates a middleware to serve a swagger spec.
+// This allows for altering the spec before starting the http listener.
+// This can be useful if you want to serve the swagger spec from another path than /swagger.json
+//
+func Spec(basePath string, b []byte, next http.Handler) http.Handler {
+ if basePath == "" {
+ basePath = "/"
+ }
+ pth := path.Join(basePath, "swagger.json")
+
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if r.URL.Path == pth {
+ rw.Header().Set("Content-Type", "application/json")
+ rw.WriteHeader(http.StatusOK)
+ //#nosec
+ _, _ = rw.Write(b)
+ return
+ }
+
+ if next == nil {
+ rw.Header().Set("Content-Type", "application/json")
+ rw.WriteHeader(http.StatusNotFound)
+ return
+ }
+ next.ServeHTTP(rw, r)
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go
new file mode 100644
index 000000000..b4dea29e4
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/swaggerui.go
@@ -0,0 +1,168 @@
+package middleware
+
+import (
+ "bytes"
+ "fmt"
+ "html/template"
+ "net/http"
+ "path"
+)
+
+// SwaggerUIOpts configures the Swaggerui middlewares
+type SwaggerUIOpts struct {
+ // BasePath for the UI path, defaults to: /
+ BasePath string
+ // Path combines with BasePath for the full UI path, defaults to: docs
+ Path string
+ // SpecURL the url to find the spec for
+ SpecURL string
+ // OAuthCallbackURL the url called after OAuth2 login
+ OAuthCallbackURL string
+
+ // The three components needed to embed swagger-ui
+ SwaggerURL string
+ SwaggerPresetURL string
+ SwaggerStylesURL string
+
+ Favicon32 string
+ Favicon16 string
+
+ // Title for the documentation site, default to: API documentation
+ Title string
+}
+
+// EnsureDefaults in case some options are missing
+func (r *SwaggerUIOpts) EnsureDefaults() {
+ if r.BasePath == "" {
+ r.BasePath = "/"
+ }
+ if r.Path == "" {
+ r.Path = "docs"
+ }
+ if r.SpecURL == "" {
+ r.SpecURL = "/swagger.json"
+ }
+ if r.OAuthCallbackURL == "" {
+ r.OAuthCallbackURL = path.Join(r.BasePath, r.Path, "oauth2-callback")
+ }
+ if r.SwaggerURL == "" {
+ r.SwaggerURL = swaggerLatest
+ }
+ if r.SwaggerPresetURL == "" {
+ r.SwaggerPresetURL = swaggerPresetLatest
+ }
+ if r.SwaggerStylesURL == "" {
+ r.SwaggerStylesURL = swaggerStylesLatest
+ }
+ if r.Favicon16 == "" {
+ r.Favicon16 = swaggerFavicon16Latest
+ }
+ if r.Favicon32 == "" {
+ r.Favicon32 = swaggerFavicon32Latest
+ }
+ if r.Title == "" {
+ r.Title = "API documentation"
+ }
+}
+
+// SwaggerUI creates a middleware to serve a documentation site for a swagger spec.
+// This allows for altering the spec before starting the http listener.
+func SwaggerUI(opts SwaggerUIOpts, next http.Handler) http.Handler {
+ opts.EnsureDefaults()
+
+ pth := path.Join(opts.BasePath, opts.Path)
+ tmpl := template.Must(template.New("swaggerui").Parse(swaggeruiTemplate))
+
+ buf := bytes.NewBuffer(nil)
+ _ = tmpl.Execute(buf, &opts)
+ b := buf.Bytes()
+
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if path.Join(r.URL.Path) == pth {
+ rw.Header().Set("Content-Type", "text/html; charset=utf-8")
+ rw.WriteHeader(http.StatusOK)
+
+ _, _ = rw.Write(b)
+ return
+ }
+
+ if next == nil {
+ rw.Header().Set("Content-Type", "text/plain")
+ rw.WriteHeader(http.StatusNotFound)
+ _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth)))
+ return
+ }
+ next.ServeHTTP(rw, r)
+ })
+}
+
+const (
+ swaggerLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui-bundle.js"
+ swaggerPresetLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui-standalone-preset.js"
+ swaggerStylesLatest = "https://unpkg.com/swagger-ui-dist/swagger-ui.css"
+ swaggerFavicon32Latest = "https://unpkg.com/swagger-ui-dist/favicon-32x32.png"
+ swaggerFavicon16Latest = "https://unpkg.com/swagger-ui-dist/favicon-16x16.png"
+ swaggeruiTemplate = `
+<!DOCTYPE html>
+<html lang="en">
+ <head>
+ <meta charset="UTF-8">
+ <title>{{ .Title }}</title>
+
+ <link rel="stylesheet" type="text/css" href="{{ .SwaggerStylesURL }}" >
+ <link rel="icon" type="image/png" href="{{ .Favicon32 }}" sizes="32x32" />
+ <link rel="icon" type="image/png" href="{{ .Favicon16 }}" sizes="16x16" />
+ <style>
+ html
+ {
+ box-sizing: border-box;
+ overflow: -moz-scrollbars-vertical;
+ overflow-y: scroll;
+ }
+
+ *,
+ *:before,
+ *:after
+ {
+ box-sizing: inherit;
+ }
+
+ body
+ {
+ margin:0;
+ background: #fafafa;
+ }
+ </style>
+ </head>
+
+ <body>
+ <div id="swagger-ui"></div>
+
+ <script src="{{ .SwaggerURL }}"> </script>
+ <script src="{{ .SwaggerPresetURL }}"> </script>
+ <script>
+ window.onload = function() {
+ // Begin Swagger UI call region
+ const ui = SwaggerUIBundle({
+ url: '{{ .SpecURL }}',
+ dom_id: '#swagger-ui',
+ deepLinking: true,
+ presets: [
+ SwaggerUIBundle.presets.apis,
+ SwaggerUIStandalonePreset
+ ],
+ plugins: [
+ SwaggerUIBundle.plugins.DownloadUrl
+ ],
+ layout: "StandaloneLayout",
+ oauth2RedirectUrl: '{{ .OAuthCallbackURL }}'
+ })
+ // End Swagger UI call region
+
+ window.ui = ui
+ }
+ </script>
+ </body>
+</html>
+`
+)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go
new file mode 100644
index 000000000..576f6003f
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/swaggerui_oauth2.go
@@ -0,0 +1,122 @@
+package middleware
+
+import (
+ "bytes"
+ "fmt"
+ "net/http"
+ "path"
+ "text/template"
+)
+
+func SwaggerUIOAuth2Callback(opts SwaggerUIOpts, next http.Handler) http.Handler {
+ opts.EnsureDefaults()
+
+ pth := opts.OAuthCallbackURL
+ tmpl := template.Must(template.New("swaggeroauth").Parse(swaggerOAuthTemplate))
+
+ buf := bytes.NewBuffer(nil)
+ _ = tmpl.Execute(buf, &opts)
+ b := buf.Bytes()
+
+ return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
+ if path.Join(r.URL.Path) == pth {
+ rw.Header().Set("Content-Type", "text/html; charset=utf-8")
+ rw.WriteHeader(http.StatusOK)
+
+ _, _ = rw.Write(b)
+ return
+ }
+
+ if next == nil {
+ rw.Header().Set("Content-Type", "text/plain")
+ rw.WriteHeader(http.StatusNotFound)
+ _, _ = rw.Write([]byte(fmt.Sprintf("%q not found", pth)))
+ return
+ }
+ next.ServeHTTP(rw, r)
+ })
+}
+
+const (
+ swaggerOAuthTemplate = `
+<!DOCTYPE html>
+<html lang="en">
+<head>
+ <title>{{ .Title }}</title>
+</head>
+<body>
+<script>
+ 'use strict';
+ function run () {
+ var oauth2 = window.opener.swaggerUIRedirectOauth2;
+ var sentState = oauth2.state;
+ var redirectUrl = oauth2.redirectUrl;
+ var isValid, qp, arr;
+
+ if (/code|token|error/.test(window.location.hash)) {
+ qp = window.location.hash.substring(1).replace('?', '&');
+ } else {
+ qp = location.search.substring(1);
+ }
+
+ arr = qp.split("&");
+ arr.forEach(function (v,i,_arr) { _arr[i] = '"' + v.replace('=', '":"') + '"';});
+ qp = qp ? JSON.parse('{' + arr.join() + '}',
+ function (key, value) {
+ return key === "" ? value : decodeURIComponent(value);
+ }
+ ) : {};
+
+ isValid = qp.state === sentState;
+
+ if ((
+ oauth2.auth.schema.get("flow") === "accessCode" ||
+ oauth2.auth.schema.get("flow") === "authorizationCode" ||
+ oauth2.auth.schema.get("flow") === "authorization_code"
+ ) && !oauth2.auth.code) {
+ if (!isValid) {
+ oauth2.errCb({
+ authId: oauth2.auth.name,
+ source: "auth",
+ level: "warning",
+ message: "Authorization may be unsafe, passed state was changed in server. The passed state wasn't returned from auth server."
+ });
+ }
+
+ if (qp.code) {
+ delete oauth2.state;
+ oauth2.auth.code = qp.code;
+ oauth2.callback({auth: oauth2.auth, redirectUrl: redirectUrl});
+ } else {
+ let oauthErrorMsg;
+ if (qp.error) {
+ oauthErrorMsg = "["+qp.error+"]: " +
+ (qp.error_description ? qp.error_description+ ". " : "no accessCode received from the server. ") +
+ (qp.error_uri ? "More info: "+qp.error_uri : "");
+ }
+
+ oauth2.errCb({
+ authId: oauth2.auth.name,
+ source: "auth",
+ level: "error",
+ message: oauthErrorMsg || "[Authorization failed]: no accessCode received from the server."
+ });
+ }
+ } else {
+ oauth2.callback({auth: oauth2.auth, token: qp, isValid: isValid, redirectUrl: redirectUrl});
+ }
+ window.close();
+ }
+
+ if (document.readyState !== 'loading') {
+ run();
+ } else {
+ document.addEventListener('DOMContentLoaded', function () {
+ run();
+ });
+ }
+</script>
+</body>
+</html>
+`
+)
diff --git a/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go
new file mode 100644
index 000000000..39a85f7d9
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/untyped/api.go
@@ -0,0 +1,286 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package untyped
+
+import (
+ "fmt"
+ "net/http"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+
+ "github.com/go-openapi/runtime"
+)
+
+// NewAPI creates the default untyped API
+func NewAPI(spec *loads.Document) *API {
+ var an *analysis.Spec
+ if spec != nil && spec.Spec() != nil {
+ an = analysis.New(spec.Spec())
+ }
+ api := &API{
+ spec: spec,
+ analyzer: an,
+ consumers: make(map[string]runtime.Consumer, 10),
+ producers: make(map[string]runtime.Producer, 10),
+ authenticators: make(map[string]runtime.Authenticator),
+ operations: make(map[string]map[string]runtime.OperationHandler),
+ ServeError: errors.ServeError,
+ Models: make(map[string]func() interface{}),
+ formats: strfmt.NewFormats(),
+ }
+ return api.WithJSONDefaults()
+}
+
+// API represents an untyped mux for a swagger spec
+type API struct {
+ spec *loads.Document
+ analyzer *analysis.Spec
+ DefaultProduces string
+ DefaultConsumes string
+ consumers map[string]runtime.Consumer
+ producers map[string]runtime.Producer
+ authenticators map[string]runtime.Authenticator
+ authorizer runtime.Authorizer
+ operations map[string]map[string]runtime.OperationHandler
+ ServeError func(http.ResponseWriter, *http.Request, error)
+ Models map[string]func() interface{}
+ formats strfmt.Registry
+}
+
+// WithJSONDefaults loads the json defaults for this api
+func (d *API) WithJSONDefaults() *API {
+ d.DefaultConsumes = runtime.JSONMime
+ d.DefaultProduces = runtime.JSONMime
+ d.consumers[runtime.JSONMime] = runtime.JSONConsumer()
+ d.producers[runtime.JSONMime] = runtime.JSONProducer()
+ return d
+}
+
+// WithoutJSONDefaults clears the json defaults for this api
+func (d *API) WithoutJSONDefaults() *API {
+ d.DefaultConsumes = ""
+ d.DefaultProduces = ""
+ delete(d.consumers, runtime.JSONMime)
+ delete(d.producers, runtime.JSONMime)
+ return d
+}
+
+// Formats returns the registered string formats
+func (d *API) Formats() strfmt.Registry {
+ if d.formats == nil {
+ d.formats = strfmt.NewFormats()
+ }
+ return d.formats
+}
+
+// RegisterFormat registers a custom format validator
+func (d *API) RegisterFormat(name string, format strfmt.Format, validator strfmt.Validator) {
+ if d.formats == nil {
+ d.formats = strfmt.NewFormats()
+ }
+ d.formats.Add(name, format, validator)
+}
+
+// RegisterAuth registers an auth handler in this api
+func (d *API) RegisterAuth(scheme string, handler runtime.Authenticator) {
+ if d.authenticators == nil {
+ d.authenticators = make(map[string]runtime.Authenticator)
+ }
+ d.authenticators[scheme] = handler
+}
+
+// RegisterAuthorizer registers an authorizer handler in this api
+func (d *API) RegisterAuthorizer(handler runtime.Authorizer) {
+ d.authorizer = handler
+}
+
+// RegisterConsumer registers a consumer for a media type.
+func (d *API) RegisterConsumer(mediaType string, handler runtime.Consumer) {
+ if d.consumers == nil {
+ d.consumers = make(map[string]runtime.Consumer, 10)
+ }
+ d.consumers[strings.ToLower(mediaType)] = handler
+}
+
+// RegisterProducer registers a producer for a media type
+func (d *API) RegisterProducer(mediaType string, handler runtime.Producer) {
+ if d.producers == nil {
+ d.producers = make(map[string]runtime.Producer, 10)
+ }
+ d.producers[strings.ToLower(mediaType)] = handler
+}
+
+// RegisterOperation registers an operation handler for an operation name
+func (d *API) RegisterOperation(method, path string, handler runtime.OperationHandler) {
+ if d.operations == nil {
+ d.operations = make(map[string]map[string]runtime.OperationHandler, 30)
+ }
+ um := strings.ToUpper(method)
+ if b, ok := d.operations[um]; !ok || b == nil {
+ d.operations[um] = make(map[string]runtime.OperationHandler)
+ }
+ d.operations[um][path] = handler
+}
+
+// OperationHandlerFor returns the operation handler for the specified id if it can be found
+func (d *API) OperationHandlerFor(method, path string) (runtime.OperationHandler, bool) {
+ if d.operations == nil {
+ return nil, false
+ }
+ if pi, ok := d.operations[strings.ToUpper(method)]; ok {
+ h, ok := pi[path]
+ return h, ok
+ }
+ return nil, false
+}
+
+// ConsumersFor gets the consumers for the specified media types
+func (d *API) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
+ result := make(map[string]runtime.Consumer)
+ for _, mt := range mediaTypes {
+ if consumer, ok := d.consumers[mt]; ok {
+ result[mt] = consumer
+ }
+ }
+ return result
+}
+
+// ProducersFor gets the producers for the specified media types
+func (d *API) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
+ result := make(map[string]runtime.Producer)
+ for _, mt := range mediaTypes {
+ if producer, ok := d.producers[mt]; ok {
+ result[mt] = producer
+ }
+ }
+ return result
+}
+
+// AuthenticatorsFor gets the authenticators for the specified security schemes
+func (d *API) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
+ result := make(map[string]runtime.Authenticator)
+ for k := range schemes {
+ if a, ok := d.authenticators[k]; ok {
+ result[k] = a
+ }
+ }
+ return result
+}
+
+// Authorizer returns the registered authorizer
+func (d *API) Authorizer() runtime.Authorizer {
+ return d.authorizer
+}
+
+// Validate validates this API for any missing items
+func (d *API) Validate() error {
+ return d.validate()
+}
+
+// validateWith validates the registrations in this API against the provided spec analyzer
+func (d *API) validate() error {
+ var consumes []string
+ for k := range d.consumers {
+ consumes = append(consumes, k)
+ }
+
+ var produces []string
+ for k := range d.producers {
+ produces = append(produces, k)
+ }
+
+ var authenticators []string
+ for k := range d.authenticators {
+ authenticators = append(authenticators, k)
+ }
+
+ var operations []string
+ for m, v := range d.operations {
+ for p := range v {
+ operations = append(operations, fmt.Sprintf("%s %s", strings.ToUpper(m), p))
+ }
+ }
+
+ var definedAuths []string
+ for k := range d.spec.Spec().SecurityDefinitions {
+ definedAuths = append(definedAuths, k)
+ }
+
+ if err := d.verify("consumes", consumes, d.analyzer.RequiredConsumes()); err != nil {
+ return err
+ }
+ if err := d.verify("produces", produces, d.analyzer.RequiredProduces()); err != nil {
+ return err
+ }
+ if err := d.verify("operation", operations, d.analyzer.OperationMethodPaths()); err != nil {
+ return err
+ }
+
+ requiredAuths := d.analyzer.RequiredSecuritySchemes()
+ if err := d.verify("auth scheme", authenticators, requiredAuths); err != nil {
+ return err
+ }
+ if err := d.verify("security definitions", definedAuths, requiredAuths); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (d *API) verify(name string, registrations []string, expectations []string) error {
+ sort.Strings(registrations)
+ sort.Strings(expectations)
+
+ expected := map[string]struct{}{}
+ seen := map[string]struct{}{}
+
+ for _, v := range expectations {
+ expected[v] = struct{}{}
+ }
+
+ var unspecified []string
+ for _, v := range registrations {
+ seen[v] = struct{}{}
+ if _, ok := expected[v]; !ok {
+ unspecified = append(unspecified, v)
+ }
+ }
+
+ for k := range seen {
+ delete(expected, k)
+ }
+
+ var unregistered []string
+ for k := range expected {
+ unregistered = append(unregistered, k)
+ }
+ sort.Strings(unspecified)
+ sort.Strings(unregistered)
+
+ if len(unregistered) > 0 || len(unspecified) > 0 {
+ return &errors.APIVerificationFailed{
+ Section: name,
+ MissingSpecification: unspecified,
+ MissingRegistration: unregistered,
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/runtime/middleware/validation.go b/vendor/github.com/go-openapi/runtime/middleware/validation.go
new file mode 100644
index 000000000..1f0135b57
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/middleware/validation.go
@@ -0,0 +1,126 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package middleware
+
+import (
+ "mime"
+ "net/http"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/swag"
+
+ "github.com/go-openapi/runtime"
+)
+
+type validation struct {
+ context *Context
+ result []error
+ request *http.Request
+ route *MatchedRoute
+ bound map[string]interface{}
+}
+
+// ContentType validates the content type of a request
+func validateContentType(allowed []string, actual string) error {
+ debugLog("validating content type for %q against [%s]", actual, strings.Join(allowed, ", "))
+ if len(allowed) == 0 {
+ return nil
+ }
+ mt, _, err := mime.ParseMediaType(actual)
+ if err != nil {
+ return errors.InvalidContentType(actual, allowed)
+ }
+ if swag.ContainsStringsCI(allowed, mt) {
+ return nil
+ }
+ if swag.ContainsStringsCI(allowed, "*/*") {
+ return nil
+ }
+ parts := strings.Split(actual, "/")
+ if len(parts) == 2 && swag.ContainsStringsCI(allowed, parts[0]+"/*") {
+ return nil
+ }
+ return errors.InvalidContentType(actual, allowed)
+}
+
+func validateRequest(ctx *Context, request *http.Request, route *MatchedRoute) *validation {
+ debugLog("validating request %s %s", request.Method, request.URL.EscapedPath())
+ validate := &validation{
+ context: ctx,
+ request: request,
+ route: route,
+ bound: make(map[string]interface{}),
+ }
+
+ validate.contentType()
+ if len(validate.result) == 0 {
+ validate.responseFormat()
+ }
+ if len(validate.result) == 0 {
+ validate.parameters()
+ }
+
+ return validate
+}
+
+func (v *validation) parameters() {
+ debugLog("validating request parameters for %s %s", v.request.Method, v.request.URL.EscapedPath())
+ if result := v.route.Binder.Bind(v.request, v.route.Params, v.route.Consumer, v.bound); result != nil {
+ if result.Error() == "validation failure list" {
+ for _, e := range result.(*errors.Validation).Value.([]interface{}) {
+ v.result = append(v.result, e.(error))
+ }
+ return
+ }
+ v.result = append(v.result, result)
+ }
+}
+
+func (v *validation) contentType() {
+ if len(v.result) == 0 && runtime.HasBody(v.request) {
+ debugLog("validating body content type for %s %s", v.request.Method, v.request.URL.EscapedPath())
+ ct, _, req, err := v.context.ContentType(v.request)
+ if err != nil {
+ v.result = append(v.result, err)
+ } else {
+ v.request = req
+ }
+
+ if len(v.result) == 0 {
+ if err := validateContentType(v.route.Consumes, ct); err != nil {
+ v.result = append(v.result, err)
+ }
+ }
+ if ct != "" && v.route.Consumer == nil {
+ cons, ok := v.route.Consumers[ct]
+ if !ok {
+ v.result = append(v.result, errors.New(500, "no consumer registered for %s", ct))
+ } else {
+ v.route.Consumer = cons
+ }
+ }
+ }
+}
+
+func (v *validation) responseFormat() {
+ // if the route provides values for Produces and no format could be identify then return an error.
+ // if the route does not specify values for Produces then treat request as valid since the API designer
+ // choose not to specify the format for responses.
+ if str, rCtx := v.context.ResponseFormat(v.request, v.route.Produces); str == "" && len(v.route.Produces) > 0 {
+ v.request = rCtx
+ v.result = append(v.result, errors.InvalidResponseFormat(v.request.Header.Get(runtime.HeaderAccept), v.route.Produces))
+ }
+}
diff --git a/vendor/github.com/go-openapi/runtime/request.go b/vendor/github.com/go-openapi/runtime/request.go
new file mode 100644
index 000000000..078fda173
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/request.go
@@ -0,0 +1,139 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "bufio"
+ "io"
+ "net/http"
+ "strings"
+
+ "github.com/go-openapi/swag"
+)
+
+// CanHaveBody returns true if this method can have a body
+func CanHaveBody(method string) bool {
+ mn := strings.ToUpper(method)
+ return mn == "POST" || mn == "PUT" || mn == "PATCH" || mn == "DELETE"
+}
+
+// IsSafe returns true if this is a request with a safe method
+func IsSafe(r *http.Request) bool {
+ mn := strings.ToUpper(r.Method)
+ return mn == "GET" || mn == "HEAD"
+}
+
+// AllowsBody returns true if the request allows for a body
+func AllowsBody(r *http.Request) bool {
+ mn := strings.ToUpper(r.Method)
+ return mn != "HEAD"
+}
+
+// HasBody returns true if this method needs a content-type
+func HasBody(r *http.Request) bool {
+ // happy case: we have a content length set
+ if r.ContentLength > 0 {
+ return true
+ }
+
+ if r.Header.Get("content-length") != "" {
+ // in this case, no Transfer-Encoding should be present
+ // we have a header set but it was explicitly set to 0, so we assume no body
+ return false
+ }
+
+ rdr := newPeekingReader(r.Body)
+ r.Body = rdr
+ return rdr.HasContent()
+}
+
+func newPeekingReader(r io.ReadCloser) *peekingReader {
+ if r == nil {
+ return nil
+ }
+ return &peekingReader{
+ underlying: bufio.NewReader(r),
+ orig: r,
+ }
+}
+
+type peekingReader struct {
+ underlying interface {
+ Buffered() int
+ Peek(int) ([]byte, error)
+ Read([]byte) (int, error)
+ }
+ orig io.ReadCloser
+}
+
+func (p *peekingReader) HasContent() bool {
+ if p == nil {
+ return false
+ }
+ if p.underlying.Buffered() > 0 {
+ return true
+ }
+ b, err := p.underlying.Peek(1)
+ if err != nil {
+ return false
+ }
+ return len(b) > 0
+}
+
+func (p *peekingReader) Read(d []byte) (int, error) {
+ if p == nil {
+ return 0, io.EOF
+ }
+ return p.underlying.Read(d)
+}
+
+func (p *peekingReader) Close() error {
+ p.underlying = nil
+ if p.orig != nil {
+ return p.orig.Close()
+ }
+ return nil
+}
+
+// JSONRequest creates a new http request with json headers set
+func JSONRequest(method, urlStr string, body io.Reader) (*http.Request, error) {
+ req, err := http.NewRequest(method, urlStr, body)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Add(HeaderContentType, JSONMime)
+ req.Header.Add(HeaderAccept, JSONMime)
+ return req, nil
+}
+
+// Gettable for things with a method GetOK(string) (data string, hasKey bool, hasValue bool)
+type Gettable interface {
+ GetOK(string) ([]string, bool, bool)
+}
+
+// ReadSingleValue reads a single value from the source
+func ReadSingleValue(values Gettable, name string) string {
+ vv, _, hv := values.GetOK(name)
+ if hv {
+ return vv[len(vv)-1]
+ }
+ return ""
+}
+
+// ReadCollectionValue reads a collection value from a string data source
+func ReadCollectionValue(values Gettable, name, collectionFormat string) []string {
+ v := ReadSingleValue(values, name)
+ return swag.SplitByFormat(v, collectionFormat)
+}
diff --git a/vendor/github.com/go-openapi/runtime/security/authenticator.go b/vendor/github.com/go-openapi/runtime/security/authenticator.go
new file mode 100644
index 000000000..c3ffdac7e
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/security/authenticator.go
@@ -0,0 +1,276 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "context"
+ "net/http"
+ "strings"
+
+ "github.com/go-openapi/errors"
+
+ "github.com/go-openapi/runtime"
+)
+
+const (
+ query = "query"
+ header = "header"
+)
+
+// HttpAuthenticator is a function that authenticates a HTTP request
+func HttpAuthenticator(handler func(*http.Request) (bool, interface{}, error)) runtime.Authenticator {
+ return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) {
+ if request, ok := params.(*http.Request); ok {
+ return handler(request)
+ }
+ if scoped, ok := params.(*ScopedAuthRequest); ok {
+ return handler(scoped.Request)
+ }
+ return false, nil, nil
+ })
+}
+
+// ScopedAuthenticator is a function that authenticates a HTTP request against a list of valid scopes
+func ScopedAuthenticator(handler func(*ScopedAuthRequest) (bool, interface{}, error)) runtime.Authenticator {
+ return runtime.AuthenticatorFunc(func(params interface{}) (bool, interface{}, error) {
+ if request, ok := params.(*ScopedAuthRequest); ok {
+ return handler(request)
+ }
+ return false, nil, nil
+ })
+}
+
+// UserPassAuthentication authentication function
+type UserPassAuthentication func(string, string) (interface{}, error)
+
+// UserPassAuthenticationCtx authentication function with context.Context
+type UserPassAuthenticationCtx func(context.Context, string, string) (context.Context, interface{}, error)
+
+// TokenAuthentication authentication function
+type TokenAuthentication func(string) (interface{}, error)
+
+// TokenAuthenticationCtx authentication function with context.Context
+type TokenAuthenticationCtx func(context.Context, string) (context.Context, interface{}, error)
+
+// ScopedTokenAuthentication authentication function
+type ScopedTokenAuthentication func(string, []string) (interface{}, error)
+
+// ScopedTokenAuthenticationCtx authentication function with context.Context
+type ScopedTokenAuthenticationCtx func(context.Context, string, []string) (context.Context, interface{}, error)
+
+var DefaultRealmName = "API"
+
+type secCtxKey uint8
+
+const (
+ failedBasicAuth secCtxKey = iota
+ oauth2SchemeName
+)
+
+func FailedBasicAuth(r *http.Request) string {
+ return FailedBasicAuthCtx(r.Context())
+}
+
+func FailedBasicAuthCtx(ctx context.Context) string {
+ v, ok := ctx.Value(failedBasicAuth).(string)
+ if !ok {
+ return ""
+ }
+ return v
+}
+
+func OAuth2SchemeName(r *http.Request) string {
+ return OAuth2SchemeNameCtx(r.Context())
+}
+
+func OAuth2SchemeNameCtx(ctx context.Context) string {
+ v, ok := ctx.Value(oauth2SchemeName).(string)
+ if !ok {
+ return ""
+ }
+ return v
+}
+
+// BasicAuth creates a basic auth authenticator with the provided authentication function
+func BasicAuth(authenticate UserPassAuthentication) runtime.Authenticator {
+ return BasicAuthRealm(DefaultRealmName, authenticate)
+}
+
+// BasicAuthRealm creates a basic auth authenticator with the provided authentication function and realm name
+func BasicAuthRealm(realm string, authenticate UserPassAuthentication) runtime.Authenticator {
+ if realm == "" {
+ realm = DefaultRealmName
+ }
+
+ return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
+ if usr, pass, ok := r.BasicAuth(); ok {
+ p, err := authenticate(usr, pass)
+ if err != nil {
+ *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm))
+ }
+ return true, p, err
+ }
+ *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm))
+ return false, nil, nil
+ })
+}
+
+// BasicAuthCtx creates a basic auth authenticator with the provided authentication function with support for context.Context
+func BasicAuthCtx(authenticate UserPassAuthenticationCtx) runtime.Authenticator {
+ return BasicAuthRealmCtx(DefaultRealmName, authenticate)
+}
+
+// BasicAuthRealmCtx creates a basic auth authenticator with the provided authentication function and realm name with support for context.Context
+func BasicAuthRealmCtx(realm string, authenticate UserPassAuthenticationCtx) runtime.Authenticator {
+ if realm == "" {
+ realm = DefaultRealmName
+ }
+
+ return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
+ if usr, pass, ok := r.BasicAuth(); ok {
+ ctx, p, err := authenticate(r.Context(), usr, pass)
+ if err != nil {
+ ctx = context.WithValue(ctx, failedBasicAuth, realm)
+ }
+ *r = *r.WithContext(ctx)
+ return true, p, err
+ }
+ *r = *r.WithContext(context.WithValue(r.Context(), failedBasicAuth, realm))
+ return false, nil, nil
+ })
+}
+
+// APIKeyAuth creates an authenticator that uses a token for authorization.
+// This token can be obtained from either a header or a query string
+func APIKeyAuth(name, in string, authenticate TokenAuthentication) runtime.Authenticator {
+ inl := strings.ToLower(in)
+ if inl != query && inl != header {
+ // panic because this is most likely a typo
+ panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\"."))
+ }
+
+ var getToken func(*http.Request) string
+ switch inl {
+ case header:
+ getToken = func(r *http.Request) string { return r.Header.Get(name) }
+ case query:
+ getToken = func(r *http.Request) string { return r.URL.Query().Get(name) }
+ }
+
+ return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
+ token := getToken(r)
+ if token == "" {
+ return false, nil, nil
+ }
+
+ p, err := authenticate(token)
+ return true, p, err
+ })
+}
+
+// APIKeyAuthCtx creates an authenticator that uses a token for authorization with support for context.Context.
+// This token can be obtained from either a header or a query string
+func APIKeyAuthCtx(name, in string, authenticate TokenAuthenticationCtx) runtime.Authenticator {
+ inl := strings.ToLower(in)
+ if inl != query && inl != header {
+ // panic because this is most likely a typo
+ panic(errors.New(500, "api key auth: in value needs to be either \"query\" or \"header\"."))
+ }
+
+ var getToken func(*http.Request) string
+ switch inl {
+ case header:
+ getToken = func(r *http.Request) string { return r.Header.Get(name) }
+ case query:
+ getToken = func(r *http.Request) string { return r.URL.Query().Get(name) }
+ }
+
+ return HttpAuthenticator(func(r *http.Request) (bool, interface{}, error) {
+ token := getToken(r)
+ if token == "" {
+ return false, nil, nil
+ }
+
+ ctx, p, err := authenticate(r.Context(), token)
+ *r = *r.WithContext(ctx)
+ return true, p, err
+ })
+}
+
+// ScopedAuthRequest contains both a http request and the required scopes for a particular operation
+type ScopedAuthRequest struct {
+ Request *http.Request
+ RequiredScopes []string
+}
+
+// BearerAuth for use with oauth2 flows
+func BearerAuth(name string, authenticate ScopedTokenAuthentication) runtime.Authenticator {
+ const prefix = "Bearer "
+ return ScopedAuthenticator(func(r *ScopedAuthRequest) (bool, interface{}, error) {
+ var token string
+ hdr := r.Request.Header.Get(runtime.HeaderAuthorization)
+ if strings.HasPrefix(hdr, prefix) {
+ token = strings.TrimPrefix(hdr, prefix)
+ }
+ if token == "" {
+ qs := r.Request.URL.Query()
+ token = qs.Get("access_token")
+ }
+ //#nosec
+ ct, _, _ := runtime.ContentType(r.Request.Header)
+ if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") {
+ token = r.Request.FormValue("access_token")
+ }
+
+ if token == "" {
+ return false, nil, nil
+ }
+
+ rctx := context.WithValue(r.Request.Context(), oauth2SchemeName, name)
+ *r.Request = *r.Request.WithContext(rctx)
+ p, err := authenticate(token, r.RequiredScopes)
+ return true, p, err
+ })
+}
+
+// BearerAuthCtx for use with oauth2 flows with support for context.Context.
+func BearerAuthCtx(name string, authenticate ScopedTokenAuthenticationCtx) runtime.Authenticator {
+ const prefix = "Bearer "
+ return ScopedAuthenticator(func(r *ScopedAuthRequest) (bool, interface{}, error) {
+ var token string
+ hdr := r.Request.Header.Get(runtime.HeaderAuthorization)
+ if strings.HasPrefix(hdr, prefix) {
+ token = strings.TrimPrefix(hdr, prefix)
+ }
+ if token == "" {
+ qs := r.Request.URL.Query()
+ token = qs.Get("access_token")
+ }
+ //#nosec
+ ct, _, _ := runtime.ContentType(r.Request.Header)
+ if token == "" && (ct == "application/x-www-form-urlencoded" || ct == "multipart/form-data") {
+ token = r.Request.FormValue("access_token")
+ }
+
+ if token == "" {
+ return false, nil, nil
+ }
+
+ rctx := context.WithValue(r.Request.Context(), oauth2SchemeName, name)
+ ctx, p, err := authenticate(rctx, token, r.RequiredScopes)
+ *r.Request = *r.Request.WithContext(ctx)
+ return true, p, err
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/security/authorizer.go b/vendor/github.com/go-openapi/runtime/security/authorizer.go
new file mode 100644
index 000000000..00c1a4d6a
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/security/authorizer.go
@@ -0,0 +1,27 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package security
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/runtime"
+)
+
+// Authorized provides a default implementation of the Authorizer interface where all
+// requests are authorized (successful)
+func Authorized() runtime.Authorizer {
+ return runtime.AuthorizerFunc(func(_ *http.Request, _ interface{}) error { return nil })
+}
diff --git a/vendor/github.com/go-openapi/runtime/statuses.go b/vendor/github.com/go-openapi/runtime/statuses.go
new file mode 100644
index 000000000..3b011a0bf
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/statuses.go
@@ -0,0 +1,90 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+// Statuses lists the most common HTTP status codes to default message
+// taken from https://httpstatuses.com/
+var Statuses = map[int]string{
+ 100: "Continue",
+ 101: "Switching Protocols",
+ 102: "Processing",
+ 103: "Checkpoint",
+ 122: "URI too long",
+ 200: "OK",
+ 201: "Created",
+ 202: "Accepted",
+ 203: "Request Processed",
+ 204: "No Content",
+ 205: "Reset Content",
+ 206: "Partial Content",
+ 207: "Multi-Status",
+ 208: "Already Reported",
+ 226: "IM Used",
+ 300: "Multiple Choices",
+ 301: "Moved Permanently",
+ 302: "Found",
+ 303: "See Other",
+ 304: "Not Modified",
+ 305: "Use Proxy",
+ 306: "Switch Proxy",
+ 307: "Temporary Redirect",
+ 308: "Permanent Redirect",
+ 400: "Bad Request",
+ 401: "Unauthorized",
+ 402: "Payment Required",
+ 403: "Forbidden",
+ 404: "Not Found",
+ 405: "Method Not Allowed",
+ 406: "Not Acceptable",
+ 407: "Proxy Authentication Required",
+ 408: "Request Timeout",
+ 409: "Conflict",
+ 410: "Gone",
+ 411: "Length Required",
+ 412: "Precondition Failed",
+ 413: "Request Entity Too Large",
+ 414: "Request-URI Too Long",
+ 415: "Unsupported Media Type",
+ 416: "Request Range Not Satisfiable",
+ 417: "Expectation Failed",
+ 418: "I'm a teapot",
+ 420: "Enhance Your Calm",
+ 422: "Unprocessable Entity",
+ 423: "Locked",
+ 424: "Failed Dependency",
+ 426: "Upgrade Required",
+ 428: "Precondition Required",
+ 429: "Too Many Requests",
+ 431: "Request Header Fields Too Large",
+ 444: "No Response",
+ 449: "Retry With",
+ 450: "Blocked by Windows Parental Controls",
+ 451: "Wrong Exchange Server",
+ 499: "Client Closed Request",
+ 500: "Internal Server Error",
+ 501: "Not Implemented",
+ 502: "Bad Gateway",
+ 503: "Service Unavailable",
+ 504: "Gateway Timeout",
+ 505: "HTTP Version Not Supported",
+ 506: "Variant Also Negotiates",
+ 507: "Insufficient Storage",
+ 508: "Loop Detected",
+ 509: "Bandwidth Limit Exceeded",
+ 510: "Not Extended",
+ 511: "Network Authentication Required",
+ 598: "Network read timeout error",
+ 599: "Network connect timeout error",
+}
diff --git a/vendor/github.com/go-openapi/runtime/text.go b/vendor/github.com/go-openapi/runtime/text.go
new file mode 100644
index 000000000..f33320b7d
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/text.go
@@ -0,0 +1,116 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "bytes"
+ "encoding"
+ "errors"
+ "fmt"
+ "io"
+ "reflect"
+
+ "github.com/go-openapi/swag"
+)
+
+// TextConsumer creates a new text consumer
+func TextConsumer() Consumer {
+ return ConsumerFunc(func(reader io.Reader, data interface{}) error {
+ if reader == nil {
+ return errors.New("TextConsumer requires a reader") // early exit
+ }
+
+ buf := new(bytes.Buffer)
+ _, err := buf.ReadFrom(reader)
+ if err != nil {
+ return err
+ }
+ b := buf.Bytes()
+
+ // If the buffer is empty, no need to unmarshal it, which causes a panic.
+ if len(b) == 0 {
+ return nil
+ }
+
+ if tu, ok := data.(encoding.TextUnmarshaler); ok {
+ err := tu.UnmarshalText(b)
+ if err != nil {
+ return fmt.Errorf("text consumer: %v", err)
+ }
+
+ return nil
+ }
+
+ t := reflect.TypeOf(data)
+ if data != nil && t.Kind() == reflect.Ptr {
+ v := reflect.Indirect(reflect.ValueOf(data))
+ if t.Elem().Kind() == reflect.String {
+ v.SetString(string(b))
+ return nil
+ }
+ }
+
+ return fmt.Errorf("%v (%T) is not supported by the TextConsumer, %s",
+ data, data, "can be resolved by supporting TextUnmarshaler interface")
+ })
+}
+
+// TextProducer creates a new text producer
+func TextProducer() Producer {
+ return ProducerFunc(func(writer io.Writer, data interface{}) error {
+ if writer == nil {
+ return errors.New("TextProducer requires a writer") // early exit
+ }
+
+ if data == nil {
+ return errors.New("no data given to produce text from")
+ }
+
+ if tm, ok := data.(encoding.TextMarshaler); ok {
+ txt, err := tm.MarshalText()
+ if err != nil {
+ return fmt.Errorf("text producer: %v", err)
+ }
+ _, err = writer.Write(txt)
+ return err
+ }
+
+ if str, ok := data.(error); ok {
+ _, err := writer.Write([]byte(str.Error()))
+ return err
+ }
+
+ if str, ok := data.(fmt.Stringer); ok {
+ _, err := writer.Write([]byte(str.String()))
+ return err
+ }
+
+ v := reflect.Indirect(reflect.ValueOf(data))
+ if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice {
+ b, err := swag.WriteJSON(data)
+ if err != nil {
+ return err
+ }
+ _, err = writer.Write(b)
+ return err
+ }
+ if v.Kind() != reflect.String {
+ return fmt.Errorf("%T is not a supported type by the TextProducer", data)
+ }
+
+ _, err := writer.Write([]byte(v.String()))
+ return err
+ })
+}
diff --git a/vendor/github.com/go-openapi/runtime/values.go b/vendor/github.com/go-openapi/runtime/values.go
new file mode 100644
index 000000000..11f5732af
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/values.go
@@ -0,0 +1,19 @@
+package runtime
+
+// Values typically represent parameters on a http request.
+type Values map[string][]string
+
+// GetOK returns the values collection for the given key.
+// When the key is present in the map it will return true for hasKey.
+// When the value is not empty it will return true for hasValue.
+func (v Values) GetOK(key string) (value []string, hasKey bool, hasValue bool) {
+ value, hasKey = v[key]
+ if !hasKey {
+ return
+ }
+ if len(value) == 0 {
+ return
+ }
+ hasValue = true
+ return
+}
diff --git a/vendor/github.com/go-openapi/runtime/xml.go b/vendor/github.com/go-openapi/runtime/xml.go
new file mode 100644
index 000000000..821c7393d
--- /dev/null
+++ b/vendor/github.com/go-openapi/runtime/xml.go
@@ -0,0 +1,36 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package runtime
+
+import (
+ "encoding/xml"
+ "io"
+)
+
+// XMLConsumer creates a new XML consumer
+func XMLConsumer() Consumer {
+ return ConsumerFunc(func(reader io.Reader, data interface{}) error {
+ dec := xml.NewDecoder(reader)
+ return dec.Decode(data)
+ })
+}
+
+// XMLProducer creates a new XML producer
+func XMLProducer() Producer {
+ return ProducerFunc(func(writer io.Writer, data interface{}) error {
+ enc := xml.NewEncoder(writer)
+ return enc.Encode(data)
+ })
+}
diff --git a/vendor/github.com/go-openapi/spec/.editorconfig b/vendor/github.com/go-openapi/spec/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/spec/.gitignore b/vendor/github.com/go-openapi/spec/.gitignore
new file mode 100644
index 000000000..dd91ed6a0
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/.gitignore
@@ -0,0 +1,2 @@
+secrets.yml
+coverage.out
diff --git a/vendor/github.com/go-openapi/spec/.golangci.yml b/vendor/github.com/go-openapi/spec/.golangci.yml
new file mode 100644
index 000000000..835d55e74
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/.golangci.yml
@@ -0,0 +1,42 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 45
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 200
+ goconst:
+ min-len: 2
+ min-occurrences: 2
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - unparam
+ - lll
+ - gochecknoinits
+ - gochecknoglobals
+ - funlen
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - wrapcheck
+ - testpackage
+ - nlreturn
+ - gomnd
+ - exhaustivestruct
+ - goerr113
+ - errorlint
+ - nestif
+ - godot
+ - gofumpt
+ - paralleltest
+ - tparallel
+ - thelper
+ - ifshort
diff --git a/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/spec/LICENSE b/vendor/github.com/go-openapi/spec/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/spec/README.md b/vendor/github.com/go-openapi/spec/README.md
new file mode 100644
index 000000000..18782c6da
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/README.md
@@ -0,0 +1,34 @@
+# OAI object model
+
+[![Build Status](https://travis-ci.org/go-openapi/spec.svg?branch=master)](https://travis-ci.org/go-openapi/spec)
+<!-- [![Build status](https://ci.appveyor.com/api/projects/status/x377t5o9ennm847o/branch/master?svg=true)](https://ci.appveyor.com/project/casualjim/go-openapi/spec/branch/master) -->
+[![codecov](https://codecov.io/gh/go-openapi/spec/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/spec)
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/spec/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/spec.svg)](https://pkg.go.dev/github.com/go-openapi/spec)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/spec)](https://goreportcard.com/report/github.com/go-openapi/spec)
+
+The object model for OpenAPI specification documents.
+
+### FAQ
+
+* What does this do?
+
+> 1. This package knows how to marshal and unmarshal Swagger API specifications into a golang object model
+> 2. It knows how to resolve $ref and expand them to make a single root document
+
+* How does it play with the rest of the go-openapi packages ?
+
+> 1. This package is at the core of the go-openapi suite of packages and [code generator](https://github.com/go-swagger/go-swagger)
+> 2. There is a [spec loading package](https://github.com/go-openapi/loads) to fetch specs as JSON or YAML from local or remote locations
+> 3. There is a [spec validation package](https://github.com/go-openapi/validate) built on top of it
+> 4. There is a [spec analysis package](https://github.com/go-openapi/analysis) built on top of it, to analyze, flatten, fix and merge spec documents
+
+* Does this library support OpenAPI 3?
+
+> No.
+> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
+> There is no plan to make it evolve toward supporting OpenAPI 3.x.
+> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
+>
+> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3
diff --git a/vendor/github.com/go-openapi/spec/appveyor.yml b/vendor/github.com/go-openapi/spec/appveyor.yml
new file mode 100644
index 000000000..090359391
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/appveyor.yml
@@ -0,0 +1,32 @@
+version: "0.1.{build}"
+
+clone_folder: C:\go-openapi\spec
+shallow_clone: true # for startup speed
+pull_requests:
+ do_not_increment_build_number: true
+
+#skip_tags: true
+#skip_branch_with_pr: true
+
+# appveyor.yml
+build: off
+
+environment:
+ GOPATH: c:\gopath
+
+stack: go 1.15
+
+test_script:
+ - go test -v -timeout 20m ./...
+
+deploy: off
+
+notifications:
+ - provider: Slack
+ incoming_webhook: https://hooks.slack.com/services/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
+ auth_token:
+ secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
+ channel: bots
+ on_build_success: false
+ on_build_failure: true
+ on_build_status_changed: true
diff --git a/vendor/github.com/go-openapi/spec/bindata.go b/vendor/github.com/go-openapi/spec/bindata.go
new file mode 100644
index 000000000..afc83850c
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/bindata.go
@@ -0,0 +1,297 @@
+// Code generated by go-bindata. DO NOT EDIT.
+// sources:
+// schemas/jsonschema-draft-04.json (4.357kB)
+// schemas/v2/schema.json (40.248kB)
+
+package spec
+
+import (
+ "bytes"
+ "compress/gzip"
+ "crypto/sha256"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+func bindataRead(data []byte, name string) ([]byte, error) {
+ gz, err := gzip.NewReader(bytes.NewBuffer(data))
+ if err != nil {
+ return nil, fmt.Errorf("read %q: %v", name, err)
+ }
+
+ var buf bytes.Buffer
+ _, err = io.Copy(&buf, gz)
+ clErr := gz.Close()
+
+ if err != nil {
+ return nil, fmt.Errorf("read %q: %v", name, err)
+ }
+ if clErr != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
+
+type asset struct {
+ bytes []byte
+ info os.FileInfo
+ digest [sha256.Size]byte
+}
+
+type bindataFileInfo struct {
+ name string
+ size int64
+ mode os.FileMode
+ modTime time.Time
+}
+
+func (fi bindataFileInfo) Name() string {
+ return fi.name
+}
+func (fi bindataFileInfo) Size() int64 {
+ return fi.size
+}
+func (fi bindataFileInfo) Mode() os.FileMode {
+ return fi.mode
+}
+func (fi bindataFileInfo) ModTime() time.Time {
+ return fi.modTime
+}
+func (fi bindataFileInfo) IsDir() bool {
+ return false
+}
+func (fi bindataFileInfo) Sys() interface{} {
+ return nil
+}
+
+var _jsonschemaDraft04Json = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xc4\x57\x3d\x6f\xdb\x3c\x10\xde\xf3\x2b\x08\x26\x63\xf2\x2a\x2f\xd0\xc9\x5b\xd1\x2e\x01\x5a\x34\x43\x37\x23\x03\x6d\x9d\x6c\x06\x14\xa9\x50\x54\x60\xc3\xd0\x7f\x2f\x28\x4a\x14\x29\x91\x92\x2d\xa7\x8d\x97\x28\xbc\xaf\xe7\x8e\xf7\xc5\xd3\x0d\x42\x08\x61\x9a\xe2\x15\xc2\x7b\xa5\x8a\x55\x92\xbc\x96\x82\x3f\x94\xdb\x3d\xe4\xe4\x3f\x21\x77\x49\x2a\x49\xa6\x1e\x1e\xbf\x24\xe6\xec\x16\xdf\x1b\xa1\x3b\xf3\xff\x02\xc9\x14\xca\xad\xa4\x85\xa2\x82\x6b\xe9\x6f\x42\x02\x32\x2c\x28\x07\x45\x5a\x15\x3d\x77\x46\x39\xd5\xcc\x25\x5e\x21\x83\xb8\x21\x18\xb6\xaf\x52\x92\xa3\x47\x68\x88\xea\x58\x80\x56\x4e\x1a\xf2\xbd\x4f\xcc\x29\x7f\x52\x90\x6b\x7d\xff\x0f\x48\xb4\x3d\x3f\x21\x7c\x27\x21\xd3\x2a\x6e\x31\xaa\x2d\x53\xdd\xf3\xe3\x42\x94\x54\xd1\x77\x78\xe2\x0a\x76\x20\xe3\x20\x68\xcb\x30\x86\x41\xf3\x2a\xc7\x2b\xf4\x78\x8e\xfe\xef\x90\x91\x8a\xa9\xc7\xb1\x1d\xc2\xd8\x2f\x0d\x75\xed\xc1\x4e\x9c\xc8\x25\x43\xac\xa8\xbe\xd7\xcc\xa9\xd1\xa9\x21\xa0\x1a\xbd\x04\x61\x94\x34\x2f\x18\xfc\x3e\x16\x50\x8e\x4d\x03\x6f\x1c\x58\xdb\x48\x23\xbc\x11\x82\x01\xe1\xfa\xd3\x3a\x8e\x30\xaf\x18\x33\x7f\xf3\x8d\x39\x11\x9b\x57\xd8\x2a\xfd\x55\x2a\x49\xf9\x0e\xc7\xec\x37\xd4\x25\xf7\xec\x5c\x66\xc7\xd7\x99\xaa\xcf\x4f\x89\x8a\xd3\xb7\x0a\x3a\xaa\x92\x15\xf4\x30\x6f\x1c\xb0\xd6\x46\xe7\x98\x39\x2d\xa4\x28\x40\x2a\x3a\x88\x9e\x29\xba\x88\x37\x2d\xca\x60\x38\xfa\xba\x5b\x20\xac\xa8\x62\xb0\x4c\xd4\xaf\xda\x45\x0a\xba\x5c\x3b\xb9\xc7\x79\xc5\x14\x2d\x18\x34\x19\x1c\x51\xdb\x25\x4d\xb4\x7e\x06\x14\x38\x6c\x59\x55\xd2\x77\xf8\x69\x59\xfc\x7b\x73\xed\x93\x43\xcb\x32\x6d\x3c\x28\xdc\x1b\x9a\xd3\x62\xab\xc2\x27\xf7\x41\xc9\x08\x2b\x23\x08\xad\x13\x57\x21\x9c\xd3\x72\x0d\x42\x72\xf8\x01\x7c\xa7\xf6\x83\xce\x39\xd7\x82\x3c\x1f\x2f\xd6\x60\x1b\xa2\xdf\x35\x89\x52\x20\xe7\x73\x74\xe0\x66\x26\x64\x4e\xb4\x97\x58\xc2\x0e\x0e\xe1\x60\x92\x34\x6d\xa0\x10\xd6\xb5\x83\x61\x27\xe6\x47\xd3\x89\xbd\x63\xfd\x3b\x8d\x03\x3d\x6c\x42\x2d\x5b\x70\xee\xe8\xdf\x4b\xf4\x66\x4e\xe1\x01\x45\x17\x80\x74\xad\x4f\xc3\xf3\xae\xc6\x1d\xc6\xd7\xc2\xce\xc9\xe1\x29\x30\x86\x2f\x4a\xa6\x4b\x15\x84\x73\xc9\x6f\xfd\x7f\xa5\x6e\x9e\xbd\xf1\xb0\xd4\xdd\x45\x5a\xc2\x3e\x4b\x78\xab\xa8\x84\x74\x4a\x91\x3b\x92\x23\x05\xf2\x1c\x1e\x7b\xf3\x09\xf8\xcf\xab\x24\xb6\x60\xa2\xe8\x4c\x9f\x75\x77\xaa\x8c\xe6\x01\x45\x36\x86\xcf\xc3\x63\x3a\xea\xd4\x8d\x7e\x06\xac\x14\x0a\xe0\x29\xf0\xed\x07\x22\x1a\x65\xda\x44\xae\xa2\x73\x1a\xe6\x90\x69\xa2\x8c\x46\xb2\x2f\xde\x49\x38\x08\xed\xfe\xfd\x41\xaf\x9f\xa9\x55\xd7\xdd\x22\x8d\xfa\x45\x63\xc5\x0f\x80\xf3\xb4\x08\xd6\x79\x30\x9e\x93\xee\x59\xa6\xd0\x4b\xee\x22\xe3\x33\xc1\x3a\x27\x68\x36\x78\x7e\x87\x0a\x06\xd5\x2e\x20\xd3\xaf\x15\xfb\xd8\x3b\x73\x14\xbb\x92\xed\x05\x5d\x2e\x29\x38\x2c\x94\xe4\x42\x45\x5e\xd3\xb5\x7d\xdf\x47\xca\x38\xb4\x5c\xaf\xfb\x7d\xdd\x6d\xf4\xa1\x2d\x77\xdd\x2f\xce\x6d\xc4\x7b\x8b\x4e\x67\xa9\x6f\xfe\x04\x00\x00\xff\xff\xb1\xd1\x27\x78\x05\x11\x00\x00")
+
+func jsonschemaDraft04JsonBytes() ([]byte, error) {
+ return bindataRead(
+ _jsonschemaDraft04Json,
+ "jsonschema-draft-04.json",
+ )
+}
+
+func jsonschemaDraft04Json() (*asset, error) {
+ bytes, err := jsonschemaDraft04JsonBytes()
+ if err != nil {
+ return nil, err
+ }
+
+ info := bindataFileInfo{name: "jsonschema-draft-04.json", size: 4357, mode: os.FileMode(0640), modTime: time.Unix(1568963823, 0)}
+ a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xe1, 0x48, 0x9d, 0xb, 0x47, 0x55, 0xf0, 0x27, 0x93, 0x30, 0x25, 0x91, 0xd3, 0xfc, 0xb8, 0xf0, 0x7b, 0x68, 0x93, 0xa8, 0x2a, 0x94, 0xf2, 0x48, 0x95, 0xf8, 0xe4, 0xed, 0xf1, 0x1b, 0x82, 0xe2}}
+ return a, nil
+}
+
+var _v2SchemaJson = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x5d\x4f\x93\xdb\x36\xb2\xbf\xfb\x53\xa0\x14\x57\xd9\xae\xd8\x92\xe3\xf7\x2e\xcf\x97\xd4\xbc\xd8\x49\x66\x37\x5e\x4f\x79\x26\xbb\x87\x78\x5c\x05\x91\x2d\x09\x09\x09\x30\x00\x38\x33\x5a\xef\x7c\xf7\x2d\xf0\x9f\x08\x02\x20\x41\x8a\xd2\xc8\x0e\x0f\xa9\x78\x28\xa0\xd1\xdd\x68\x34\x7e\xdd\xf8\xf7\xf9\x11\x42\x33\x49\x64\x04\xb3\xd7\x68\x76\x86\xfe\x76\xf9\xfe\x1f\xe8\x32\xd8\x40\x8c\xd1\x8a\x71\x74\x79\x8b\xd7\x6b\xe0\xe8\xd5\xfc\x25\x3a\xbb\x38\x9f\xcf\x9e\xab\x0a\x24\x54\xa5\x37\x52\x26\xaf\x17\x0b\x91\x17\x99\x13\xb6\xb8\x79\xb5\x10\x59\xdd\xf9\xef\x82\xd1\x6f\xf2\xc2\x8f\xf3\x4f\xb5\x1a\xea\xc7\x17\x45\x41\xc6\xd7\x8b\x90\xe3\x95\x7c\xf1\xf2\x7f\x8b\xca\x45\x3d\xb9\x4d\x32\xa6\xd8\xf2\x77\x08\x64\xfe\x8d\xc3\x9f\x29\xe1\xa0\x9a\xff\xed\x11\x42\x08\xcd\x8a\xd6\xb3\x9f\x15\x67\x74\xc5\xca\x7f\x27\x58\x6e\xc4\xec\x11\x42\xd7\x59\x5d\x1c\x86\x44\x12\x46\x71\x74\xc1\x59\x02\x5c\x12\x10\xb3\xd7\x68\x85\x23\x01\x59\x81\x04\x4b\x09\x9c\x6a\xbf\x7e\xce\x49\x7d\xba\x7b\x51\xfd\xa1\x44\xe2\xb0\x52\xac\x7d\xb3\x08\x61\x45\x68\x46\x56\x2c\x6e\x80\x86\x8c\xbf\xbd\x93\x40\x05\x61\x74\x96\x95\xbe\x7f\x84\xd0\x7d\x4e\xde\x42\xb7\xe4\xbe\x46\xbb\x14\x5b\x48\x4e\xe8\xba\x90\x05\xa1\x19\xd0\x34\xae\xc4\xce\xbe\xbc\x9a\xbf\x9c\x15\x7f\x5d\x57\xc5\x42\x10\x01\x27\x89\xe2\x48\x51\xb9\xda\x40\xd5\x87\x37\xc0\x15\x5f\x88\xad\x90\xdc\x10\x81\x42\x16\xa4\x31\x50\x39\x2f\x38\xad\xab\xb0\x53\xd8\xac\x94\x56\x6f\xc3\x84\xf4\x11\xa4\x50\xb3\xfa\xe9\xd3\x6f\x9f\x3e\xdf\x2f\xd0\xeb\x8f\x1f\x3f\x7e\xbc\xfe\xf6\xe9\xf7\xaf\x5f\x7f\xfc\x18\x7e\xfb\xec\xfb\xc7\xb3\x36\x79\x54\x43\xe8\x29\xc5\x31\x20\xc6\x11\x49\x9e\xe5\x12\x41\x66\xa0\xe8\xed\x1d\x8e\x93\x08\x5e\xa3\x27\x3b\xc3\x7c\xa2\x73\xba\xc4\x02\x2e\xb0\xdc\xf4\xe5\x76\xd1\xca\x96\xa2\x8a\x94\xcd\x21\xc9\x6c\xec\x2c\x70\x42\x9e\x34\x74\x9d\x19\x7c\xcd\x20\x9c\xea\x2e\x0a\xfe\x42\x84\xd4\x29\x04\x8c\x8a\xb4\x41\xa2\xc1\xdc\x19\x8a\x88\x90\x4a\x49\xef\xce\xdf\xbd\x45\x4a\x52\x81\x70\x10\x40\x22\x21\x44\xcb\x6d\xc5\xec\x4e\x3c\x1c\x45\xef\x57\x9a\xb5\x7d\xae\xfe\xe5\xe4\x31\x86\x90\xe0\xab\x6d\x02\x3b\x2e\xcb\x11\x90\xd9\xa8\xc6\x77\xc2\x59\x98\x06\xfd\xf9\x2e\x78\x45\x01\xa6\xa8\xa0\x71\x5c\xbe\x33\xa7\xd2\xd9\x5f\x95\xef\xd9\xd5\xac\xfd\xdc\x5d\xbf\x5e\xb8\xd1\x3e\xc7\x31\x48\xe0\x5e\x4c\x14\x65\xdf\xb8\xa8\x71\x10\x09\xa3\xc2\xc7\x02\xcb\xa2\x4e\x5a\x02\x82\x94\x13\xb9\xf5\x30\xe6\xb2\xa4\xb5\xfe\x9b\x3e\x7a\xb2\x55\xd2\xa8\x4a\xbc\x16\xb6\x71\x8e\x39\xc7\xdb\x9d\xe1\x10\x09\x71\xbd\x9c\xb3\x41\x89\xd7\xa5\x89\xdc\x57\xb5\x53\x4a\xfe\x4c\xe1\xbc\xa0\x21\x79\x0a\x1a\x0f\x70\xa7\x5c\x08\x8e\xde\xb0\xc0\x43\x24\xad\x74\x63\x0e\xb1\xd9\x90\xe1\xb0\x2d\x13\xa7\x6d\x78\xfd\x04\x14\x38\x8e\x90\xaa\xce\x63\xac\x3e\x23\xbc\x64\xa9\xb4\xf8\x03\x63\xde\xcd\xbe\x16\x13\x4a\x55\xac\x82\x12\xc6\xac\xd4\x35\xf7\x22\xd4\x3a\xff\x22\x73\x0e\x6e\x51\xa0\x75\x1e\xae\x8f\xe8\x5d\xc7\x59\xe6\xe4\x9a\x18\x8d\xd6\x1c\x53\x84\x4d\xb7\x67\x28\x37\x09\x84\x69\x88\x12\x0e\x01\x11\x80\x32\xa2\xf5\xb9\xaa\xc6\xd9\x73\x53\xab\xfb\xb4\x2e\x20\xc6\x54\x92\xa0\x9a\xf3\x69\x1a\x2f\x81\x77\x37\xae\x53\x1a\xce\x40\xc4\xa8\x82\x1c\xb5\xef\xda\x24\x7d\xb9\x61\x69\x14\xa2\x25\xa0\x90\xac\x56\xc0\x81\x4a\xb4\xe2\x2c\xce\x4a\x64\x7a\x9a\x23\xf4\x13\x91\x3f\xa7\x4b\xf4\x63\x84\x6f\x18\x87\x10\xbd\xc3\xfc\x8f\x90\xdd\x52\x44\x04\xc2\x51\xc4\x6e\x21\x74\x48\x21\x81\xc7\xe2\xfd\xea\x12\xf8\x0d\x09\xf6\xe9\x47\x35\xaf\x67\xc4\x14\xf7\x22\x27\x97\xe1\xe2\x76\x2d\x06\x8c\x4a\x1c\x48\x3f\x73\x2d\x0b\x5b\x29\x45\x24\x00\x2a\x0c\x11\xec\x94\xca\xc2\xa6\xc1\x37\x21\x43\x83\x3b\x5f\x97\xf1\x43\x5e\x53\x73\x19\xa5\x36\xd8\x2d\x05\x2e\x34\x0b\xeb\x39\xfc\x1d\x63\x51\x01\xbd\x3d\xbb\x90\x84\x40\x25\x59\x6d\x09\x5d\xa3\x1c\x37\xe6\x5c\x16\x9a\x40\x09\x70\xc1\xe8\x82\xf1\x35\xa6\xe4\xdf\x99\x5c\x8e\x9e\x4d\x79\xb4\x27\x2f\xbf\x7e\xf8\x05\x25\x8c\x50\xa9\x98\x29\x90\x62\x60\xea\x75\xae\x13\xca\xbf\x2b\x1a\x29\x27\x76\xd6\x20\xc6\x64\x5f\xe6\x32\x1a\x08\x87\x21\x07\x21\xbc\xb4\xe4\xe0\x32\x67\xa6\xcd\xf3\x1e\xcd\xd9\x6b\xb6\x6f\x8e\x27\xa7\xed\xdb\xe7\xbc\xcc\x1a\x07\xce\x6f\x87\x33\xf0\xba\x51\x17\x22\x66\x78\x79\x8e\xce\xe5\x13\x81\x80\x06\x2c\xe5\x78\x0d\xa1\xb2\xb8\x54\xa8\x79\x09\xbd\xbf\x3c\x47\x01\x8b\x13\x2c\xc9\x32\xaa\xaa\x1d\xd5\xee\xab\x36\xbd\x6c\xfd\x54\x6c\xc8\x08\x01\x3c\xbd\xe7\x07\x88\xb0\x24\x37\x79\x90\x28\x4a\x1d\x10\x1a\x92\x1b\x12\xa6\x38\x42\x40\xc3\x4c\x43\x62\x8e\xae\x36\xb0\x45\x71\x2a\xa4\x9a\x23\x79\x59\xb1\xa8\xf2\xa4\x0c\x60\x9f\xcc\x8d\x40\xf5\x80\xca\xa8\x99\xc3\xa7\x85\x1f\x31\x25\xa9\x82\xc5\x6d\xbd\xd8\x36\x76\x7c\x02\x28\x97\xf6\x1d\x74\x3b\x11\x7e\x91\xae\x32\xf8\x6c\xf4\xe6\x7b\x9a\xa5\x1f\x62\xc6\x21\xcf\x9a\xe5\xed\x8b\x02\xf3\x2c\x33\x33\xdf\x00\xca\xc9\x09\xb4\x04\xf5\xa5\x08\xd7\xc3\x02\x18\x66\xf1\xab\x1e\x83\x37\x4c\xcd\x12\xc1\x1d\x50\xf6\xaa\xbd\xfe\xe2\x73\x48\x38\x08\xa0\x32\x9b\x18\x44\x86\x0b\x6a\xc1\xaa\x26\x96\x2d\x96\x3c\xa0\x54\x65\x73\xe3\x08\xb5\x8b\x99\xbd\x82\xbc\x9e\xc2\xe8\x53\x46\x83\x3f\x33\x54\x2b\x5b\xad\x92\x79\xd9\x8f\x5d\x93\x98\xf2\xe6\xc6\x1c\xe6\x9a\x9e\xfc\x43\x82\x31\x66\x8e\x53\x77\xfe\x90\xe7\xf3\xf6\xe9\x62\x23\x3f\x10\x93\x18\xae\x72\x1a\x9d\xf9\x48\xcb\xcc\x5a\x65\xc7\x4a\x04\xf0\xf3\xd5\xd5\x05\x8a\x41\x08\xbc\x86\x86\x43\x51\x6c\xe0\x46\x57\xf6\x44\x40\x0d\xfb\xff\xa2\xc3\x7c\x3d\x39\x84\xdc\x09\x22\x64\x4f\x12\xd9\xba\xaa\xf6\xe3\xbd\x56\xdd\x91\x25\x6a\x14\x9c\x89\x34\x8e\x31\xdf\xee\x15\x7e\x2f\x39\x81\x15\x2a\x28\x95\x66\x51\xf5\xfd\x83\xc5\xfe\x15\x07\xcf\xf7\x08\xee\x1d\x8e\xb6\xc5\x52\xcc\x8c\x5a\x93\x66\xc5\xd8\x79\x38\x46\xd6\xa7\x88\x37\xc9\x2e\xe3\xd2\xa5\x7b\x4b\x3a\xdc\xa1\xdc\x9e\x29\xf1\x8c\x8a\x99\x16\x47\x8d\xd4\x78\x8b\xf6\x1c\xe9\x71\x54\x1b\x69\xa8\x4a\x93\x37\xe5\xb2\x2c\x4f\x0c\x92\xab\xa0\x73\x32\x72\x59\xd3\xf0\x2d\x8d\xed\xca\x37\x16\x19\x9e\xdb\x1c\xab\x17\x49\xc3\x0f\x37\xdc\x88\xb1\xb4\xd4\x42\xcb\x58\x5e\x6a\x52\x0b\x15\x10\x0a\xb0\x04\xe7\xf8\x58\x32\x16\x01\xa6\xcd\x01\xb2\xc2\x69\x24\x35\x38\x6f\x30\x6a\xae\x1b\xb4\x71\xaa\xad\x1d\xa0\xd6\x20\x2d\x8b\x3c\xc6\x82\x62\x27\x34\x6d\x15\x84\x7b\x43\xb1\x35\x78\xa6\x24\x77\x28\xc1\x6e\xfc\xe9\x48\x74\xf4\x15\xe3\xe1\x84\x42\x88\x40\x7a\x26\x49\x3b\x48\xb1\xa4\x19\x8e\x0c\xa7\xb5\x01\x6c\x0c\x97\x61\x8a\xc2\x32\xd8\x8c\x44\x69\x24\xbf\x65\x1d\x74\xd6\xe5\x44\xef\xec\x48\x5e\xb7\x8a\xa3\x29\x8e\x41\x64\xce\x1f\x88\xdc\x00\x47\x4b\x40\x98\x6e\xd1\x0d\x8e\x48\x98\x63\x5c\x21\xb1\x4c\x05\x0a\x58\x98\xc5\x6d\x4f\x0a\x77\x53\x4f\x8b\xc4\x44\x1f\xb2\xdf\x8d\x3b\xea\x9f\xfe\xf6\xf2\xc5\xff\x5d\x7f\xfe\x9f\xfb\x67\x8f\xff\xf3\xe9\x69\xd1\xfe\xb3\xc7\xfd\x3c\xf8\x3f\x71\x94\x82\x23\xd1\x72\x00\xb7\x42\x99\x6c\xc0\x60\x7b\x0f\x79\xea\xa8\x53\x4b\x56\x31\xfa\x0b\x52\x9f\x96\xdb\xcd\x2f\xd7\x67\xcd\x04\x19\x85\xfe\xdb\x02\x9a\x59\x03\xad\x63\x3c\xea\xff\x2e\x18\xfd\x00\xd9\xe2\x56\x60\x59\x93\xb9\xb6\xb2\x3e\x3c\x2c\xab\x0f\xa7\xb2\x89\x43\xc7\xf6\xd5\xce\x2e\xad\xa6\xa9\xed\xa6\xc6\x5a\xb4\xa6\x67\xdf\x8c\x26\x7b\x50\x5a\x91\x08\x2e\x6d\xd4\x3a\xc1\x9d\xf2\xdb\xde\x1e\xb2\x2c\x6c\xa5\x64\xc9\x16\xb4\x90\xaa\x4a\xb7\x0c\xde\x13\xc3\x2a\x9a\x11\x9b\x7a\x1b\x3d\x95\x97\x37\x31\x6b\x69\x7e\x34\xc0\x67\x1f\x66\x19\x49\xef\xf1\x25\xf5\xac\x0e\xea\x0a\x28\x8d\x4d\x7e\xd9\x57\x4b\x49\xe5\xc6\xb3\x25\xfd\xe6\x57\x42\x25\xac\xcd\xcf\x36\x74\x8e\xca\x24\x47\xe7\x80\xa8\x92\x72\xbd\x3d\x84\x2d\x65\xe2\x82\x1a\x9c\xc4\x44\x92\x1b\x10\x79\x8a\xc4\x4a\x2f\x60\x51\x04\x81\xaa\xf0\xa3\x95\x27\xd7\x12\x7b\xa3\x96\x03\x45\x96\xc1\x8a\x07\xc9\xb2\xb0\x95\x52\x8c\xef\x48\x9c\xc6\x7e\x94\xca\xc2\x0e\x07\x12\x44\xa9\x20\x37\xf0\xae\x0f\x49\xa3\x96\x9d\x4b\x42\x7b\x70\x59\x14\xee\xe0\xb2\x0f\x49\xa3\x96\x4b\x97\xbf\x00\x5d\x4b\x4f\xfc\xbb\x2b\xee\x92\xb9\x17\xb5\xaa\xb8\x0b\x97\x17\x9b\x43\xfd\xd6\xc2\xb2\xc2\x2e\x29\xcf\xfd\x87\x4a\x55\xda\x25\x63\x1f\x5a\x65\x69\x2b\x2d\x3d\x67\xe9\x41\xae\x5e\xc1\x6e\x2b\xd4\xdb\x3e\xa8\xd3\x26\xd2\x48\x92\x24\xca\x61\x86\x8f\x8c\xbb\xf2\x8e\x91\xdf\x1f\x06\x19\x33\xf3\x03\x4d\xba\xcd\xe2\x2d\xfb\x69\xe9\x16\x15\x13\xd5\x56\x85\x4e\x3c\x5b\x8a\xbf\x25\x72\x83\xee\x5e\x20\x22\xf2\xc8\xaa\x7b\xdb\x8e\xe4\x29\x58\xca\x38\xb7\x3f\x2e\x59\xb8\xbd\xa8\x16\x16\xf7\xdb\x79\x51\x9f\x5a\xb4\x8d\x87\x3a\x6e\xbc\x3e\xc5\xb4\xcd\x58\xf9\xf5\x3c\xb9\x6f\x49\xaf\x57\xc1\xfa\x1c\x5d\x6d\x88\x8a\x8b\xd3\x28\xcc\xb7\xef\x10\x8a\x4a\x74\xa9\x4a\xa7\x62\xbf\x0d\x76\x23\x6f\x59\xd9\x31\xee\x40\x11\xfb\x28\xec\x8d\x22\x1c\x13\x5a\x64\x94\x23\x16\x60\xbb\xd2\x7c\xa0\x98\xb2\xe5\x6e\xbc\x54\x33\xe0\x3e\xb9\x52\x17\xdb\xb7\x1b\xc8\x12\x20\x8c\x23\xca\x64\x7e\x78\xa3\x62\x5b\x75\x56\xd9\x9e\x2a\x91\x27\xb0\x70\x34\x1f\x90\x89\xb5\x86\x73\x7e\x71\xda\x1e\xfb\x3a\x72\xdc\x5e\x79\x88\xcb\x74\x79\xd9\x64\xe4\xd4\xc2\x9e\xce\xb1\xfe\x85\x5a\xc0\xe9\x0c\x34\x3d\xd0\x43\xce\xa1\x36\x39\xd5\xa1\x4e\xf5\xf8\xb1\xa9\x23\x08\x75\x84\xac\x53\x6c\x3a\xc5\xa6\x53\x6c\x3a\xc5\xa6\x7f\xc5\xd8\xf4\x51\xfd\xff\x25\x4e\xfa\x33\x05\xbe\x9d\x60\xd2\x04\x93\x6a\x5f\x33\x9b\x98\x50\xd2\xe1\x50\x52\xc6\xcc\xdb\x38\x91\xdb\xe6\xaa\xa2\x8f\xa1\x6a\xa6\xd4\xc6\x56\xd6\x8c\x40\x02\x68\x48\xe8\x1a\xe1\x9a\xd9\x2e\xb7\x05\xc3\x34\xda\x2a\xbb\xcd\x12\x36\x98\x22\x50\x4c\xa1\x1b\xc5\xd5\x84\xf0\xbe\x24\x84\xf7\x2f\x22\x37\xef\x94\xd7\x9f\xa0\xde\x04\xf5\x26\xa8\x37\x41\x3d\x64\x40\x3d\xe5\xf2\xde\x60\x89\x27\xb4\x37\xa1\xbd\xda\xd7\xd2\x2c\x26\xc0\x37\x01\x3e\x1b\xef\x5f\x06\xe0\x6b\x7c\x5c\x91\x08\x26\x10\x38\x81\xc0\x09\x04\x76\x4a\x3d\x81\xc0\xbf\x12\x08\x4c\xb0\xdc\x7c\x99\x00\xd0\x75\x70\xb4\xf8\x5a\x7c\xea\xde\x3e\x39\x08\x30\x5a\x27\x35\xed\xb4\x65\xad\x69\x74\x10\x88\x79\xe2\x30\x52\x19\xd6\x04\x21\xa7\x95\xd5\x0e\x03\xf8\xda\x20\xd7\x84\xb4\x26\xa4\x35\x21\xad\x09\x69\x21\x03\x69\x51\x46\xff\xff\x18\x9b\x54\xed\x87\x47\x06\x9d\x4e\x73\x6e\x9a\xb3\xa9\xce\x83\x5e\x4b\xc6\x71\x20\x45\xd7\x72\xf5\x40\x72\x0e\x34\x6c\xf4\x6c\xf3\xba\x5e\x4b\x97\x0e\x52\xb8\xbe\x8b\x79\xa0\x10\x86\xa1\x75\xb0\x6f\xec\xc8\xf4\x3d\x4d\x7b\x86\xc2\x02\x31\x12\x51\xbf\x07\x94\xad\x10\xd6\x2e\x79\xcf\xe9\x1c\xf5\x1e\x31\x23\x5c\x18\xfb\x9c\xfb\x70\xe0\x62\xbd\xf7\xb5\x94\xcf\xf3\xf6\xfa\xc5\x4e\x9c\x85\x76\x1d\xae\x37\xbc\xde\xa3\x41\xcb\x29\xd0\x5e\x70\x67\x50\x93\x6d\x98\xa8\xd3\x67\x0f\x68\xb1\xeb\x38\x47\x07\x10\x1b\xd2\xe2\x18\x68\x6d\x40\xbb\xa3\x40\xba\x21\xf2\x8e\x81\xfb\xf6\x92\x77\x2f\x70\xe8\xdb\xb2\x36\xbf\x30\x91\xc5\x21\xe7\x45\xcc\x34\x0c\x48\x8e\xd0\xf2\x9b\x7c\x3c\xbd\x1c\x04\x3e\x07\xe8\x7c\x2f\x84\x7a\x48\x4d\x1f\xba\xe1\x76\x45\x7b\x60\xe0\x01\xca\xee\x04\xca\x31\xbe\x73\x5f\xa3\x70\x0c\xad\x1f\xa5\xf5\x76\xd5\xbb\xd2\x7e\xfb\x30\x90\xcf\xfa\x67\x7a\xe6\xc3\x37\x42\x19\xe2\xc9\x9c\x61\x4c\xe7\xd1\x77\x55\x86\x6e\x8f\x7b\x85\x42\x33\xa3\xaa\x57\xae\xfd\xd5\xcc\x9c\x56\x68\xe2\xde\x0e\xa8\x2c\xa9\xb0\x7d\xf0\x54\x2d\x80\xf2\x48\x39\x3d\x98\x1a\x6d\x0b\x9d\xba\x53\xfb\xce\xf8\xd1\x7e\xbb\x60\x4f\x06\xf5\xce\xda\xab\xeb\xca\xcb\xd5\xac\x20\xda\x72\x3b\xa2\x4b\x38\xd7\xb5\x89\xbe\x42\xd9\xb9\x73\xc4\x0c\x6d\xb7\xd9\xf8\x8d\xbd\x3e\x9c\xf5\x53\x68\x48\x14\x36\x8f\x09\xc5\x92\xf1\x21\xd1\x09\x07\x1c\xbe\xa7\x91\xf3\x6a\xc8\xc1\x57\xb0\xdd\xc5\xc6\x1d\xad\x76\x1d\xa8\x82\x0e\x4c\x38\xfe\xa5\x8c\xc5\x0a\x40\x5d\xa1\xbb\x98\xd1\xfb\x74\x61\xed\x1a\x98\xaf\x3c\x8c\x1e\xe3\xc2\x92\x29\x74\x3e\x99\xd0\xf9\x41\x50\xd0\x38\x4b\x57\x7e\x5b\x7a\x0e\xe6\xce\x4e\xd7\x19\x35\x57\xbb\x3c\x3c\xd2\x5e\x4f\x4b\x4c\xf7\x0f\x4d\x2b\x91\x5d\x94\xa6\x95\xc8\x69\x25\x72\x5a\x89\x7c\xb8\x95\xc8\x07\x80\x8c\xda\x9c\x64\x7b\xb7\x71\xdf\x57\x12\x4b\x9a\x1f\x72\x0c\x13\x03\xad\x3c\xd5\x4e\xde\x8e\x57\x13\x6d\x34\x86\xcf\x97\xe6\xa4\x68\xc4\xb0\xf6\xc9\xc2\xeb\x8d\x0b\xd7\xcd\xfe\xba\xa6\xf5\x30\xeb\x30\x33\xbe\xc7\x56\x27\xab\x08\xd9\x6d\xbb\x09\xee\x7c\x2d\xcf\xee\x87\x38\xac\xc8\xdd\x90\x9a\x58\x4a\x4e\x96\xa9\x79\x79\xf3\xde\x20\xf0\x96\xe3\x24\x19\xeb\xba\xf2\x53\x19\xab\x12\xaf\x47\xb3\xa0\x3e\xef\x9b\x8d\x6d\x6d\x7b\xde\x3b\x3b\x1a\xc0\x3f\x95\x7e\xed\x78\xfb\x76\xb8\xaf\xb3\xdd\xc5\xeb\x95\xed\x5a\x62\x41\x82\xb3\x54\x6e\x80\x4a\x92\x6f\x36\xbd\x34\xae\xde\x6f\xa4\xc0\xbc\x08\xe3\x84\xfc\x1d\xb6\xe3\xd0\x62\x38\x95\x9b\x57\xe7\x71\x12\x91\x80\xc8\x31\x69\x5e\x60\x21\x6e\x19\x0f\xc7\xa4\x79\x96\x28\x3e\x47\x54\x65\x41\x36\x08\x40\x88\x1f\x58\x08\x56\xaa\xd5\xbf\xaf\xad\x96\xd7\xd6\xcf\x87\xf5\x34\x0f\x71\x93\x6e\x26\xed\x98\x5b\x9f\x4f\xcf\x95\x34\xc6\xd7\x11\xfa\xb0\x81\x22\x1a\xdb\xdf\x8e\xdc\xc3\xb9\xf8\xdd\x5d\x3c\x74\xe6\xea\xb7\x8b\xbf\xf5\x6e\xb3\x46\x2e\x64\xf4\xab\x3c\x4e\xcf\x36\x1d\xfe\xfa\xb8\x36\xba\x8a\xd8\xad\xf6\xc6\x41\x2a\x37\x8c\x17\x0f\xda\xfe\xda\xe7\x65\xbc\x71\x2c\x36\x57\x8a\x47\x12\x4c\xf1\xbd\x77\x6b\xa4\x50\x7e\x77\x7b\x22\x60\x89\xef\xcd\xf5\xb9\x0c\x97\x79\x0d\x2b\x35\x43\xcb\x3d\x24\xf1\x78\xfc\xf8\xcb\x1f\x15\x06\xe2\x78\xd8\x51\x21\xd9\x1f\xf0\xf5\x8f\x86\xa4\x50\xfa\xb1\x47\x43\xa5\xdd\x69\x14\xe8\xa3\xc0\x86\x91\xa7\x81\x50\xb4\x7c\xc0\x81\x80\x77\x7a\x9f\xc6\xc2\xa9\x8c\x05\x33\xb0\x3b\x31\xa4\xf4\xd7\x1b\x26\x55\x97\x7c\x65\xf8\x69\x1a\x84\x8e\x41\x78\xd9\xec\xc5\x11\x16\x1e\x74\x91\xf5\x56\xf5\x57\x49\x47\x5c\x92\xa9\x1e\x99\x36\xf4\xdb\xb1\x0e\xd3\x78\x02\xb0\x9b\x25\xcb\xe9\xe9\x1d\x0d\x44\x01\x42\x08\x91\x64\xd9\xdd\x37\x08\x17\xef\xf9\xe5\x0f\xbd\x46\x91\xf5\xf9\x89\x92\x37\xdd\x89\x59\x44\x1f\x9c\xee\x34\x1e\xbe\x47\x83\x32\x72\x8e\x37\xdf\xac\x69\x38\xef\x75\xb0\xda\xdb\xac\x83\x94\x2f\x39\xa6\x62\x05\x1c\x25\x9c\x49\x16\xb0\xa8\x3c\xc7\x7e\x76\x71\x3e\x6f\xb5\x24\xe7\xe8\xb7\xb9\xc7\x6c\x43\x92\xee\x21\xd4\x17\xa1\x7f\xba\x35\xfe\xae\x39\xbc\xde\xba\x69\xd9\x8e\xe1\x62\xde\x64\x7d\x16\x88\x1b\xed\x29\x11\xfd\x4f\xa9\xff\x99\x90\xc4\xf6\xf4\xf9\x6e\xe9\x28\x23\xd7\xca\xe5\xee\xee\x9f\x63\xb1\x5b\xfb\x10\xd7\x2f\x1d\xf2\xe3\xbf\xb9\xb5\x6f\xa4\x6d\x7d\x25\x79\xfb\x24\x31\xea\x56\xbe\x5d\x53\xcd\x2d\x36\xa3\x6d\xdf\xab\x1c\xb8\x6d\x6f\xc0\x98\xa7\xdd\xaa\x86\x8c\x1d\x39\xa3\x9d\x70\x2b\x9b\x68\xd9\xfd\x33\xfe\xa9\xb6\x4a\x2e\x63\x0f\xcf\x68\x27\xd9\x4c\xb9\x46\x6d\xcb\xbe\xa1\xa8\xd6\x5f\xc6\xd6\x9f\xf1\x4f\xf4\xd4\xb4\x78\xd0\xd6\xf4\x13\x3c\x3b\xac\xd0\xdc\x90\x34\xda\xc9\xb4\x9a\x1a\x8d\xbd\x93\x87\xd4\xe2\x21\x1b\xb3\x2b\xd1\xbe\xe7\x69\xd4\x53\x67\xd5\x40\xa0\xe3\x19\x3f\x6d\x1a\xbc\x0e\x86\x3c\x10\xb4\x3d\x2a\xcd\x78\x32\xe6\xab\xbd\x36\xc9\xf4\x3a\x58\xae\xc3\xf4\x47\xea\xbf\xfb\x47\xff\x0d\x00\x00\xff\xff\xd2\x32\x5a\x28\x38\x9d\x00\x00")
+
+func v2SchemaJsonBytes() ([]byte, error) {
+ return bindataRead(
+ _v2SchemaJson,
+ "v2/schema.json",
+ )
+}
+
+func v2SchemaJson() (*asset, error) {
+ bytes, err := v2SchemaJsonBytes()
+ if err != nil {
+ return nil, err
+ }
+
+ info := bindataFileInfo{name: "v2/schema.json", size: 40248, mode: os.FileMode(0640), modTime: time.Unix(1568964748, 0)}
+ a := &asset{bytes: bytes, info: info, digest: [32]uint8{0xab, 0x88, 0x5e, 0xf, 0xbf, 0x17, 0x74, 0x0, 0xb2, 0x5a, 0x7f, 0xbc, 0x58, 0xcd, 0xc, 0x25, 0x73, 0xd5, 0x29, 0x1c, 0x7a, 0xd0, 0xce, 0x79, 0xd4, 0x89, 0x31, 0x27, 0x90, 0xf2, 0xff, 0xe6}}
+ return a, nil
+}
+
+// Asset loads and returns the asset for the given name.
+// It returns an error if the asset could not be found or
+// could not be loaded.
+func Asset(name string) ([]byte, error) {
+ canonicalName := strings.Replace(name, "\\", "/", -1)
+ if f, ok := _bindata[canonicalName]; ok {
+ a, err := f()
+ if err != nil {
+ return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
+ }
+ return a.bytes, nil
+ }
+ return nil, fmt.Errorf("Asset %s not found", name)
+}
+
+// AssetString returns the asset contents as a string (instead of a []byte).
+func AssetString(name string) (string, error) {
+ data, err := Asset(name)
+ return string(data), err
+}
+
+// MustAsset is like Asset but panics when Asset would return an error.
+// It simplifies safe initialization of global variables.
+func MustAsset(name string) []byte {
+ a, err := Asset(name)
+ if err != nil {
+ panic("asset: Asset(" + name + "): " + err.Error())
+ }
+
+ return a
+}
+
+// MustAssetString is like AssetString but panics when Asset would return an
+// error. It simplifies safe initialization of global variables.
+func MustAssetString(name string) string {
+ return string(MustAsset(name))
+}
+
+// AssetInfo loads and returns the asset info for the given name.
+// It returns an error if the asset could not be found or
+// could not be loaded.
+func AssetInfo(name string) (os.FileInfo, error) {
+ canonicalName := strings.Replace(name, "\\", "/", -1)
+ if f, ok := _bindata[canonicalName]; ok {
+ a, err := f()
+ if err != nil {
+ return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
+ }
+ return a.info, nil
+ }
+ return nil, fmt.Errorf("AssetInfo %s not found", name)
+}
+
+// AssetDigest returns the digest of the file with the given name. It returns an
+// error if the asset could not be found or the digest could not be loaded.
+func AssetDigest(name string) ([sha256.Size]byte, error) {
+ canonicalName := strings.Replace(name, "\\", "/", -1)
+ if f, ok := _bindata[canonicalName]; ok {
+ a, err := f()
+ if err != nil {
+ return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s can't read by error: %v", name, err)
+ }
+ return a.digest, nil
+ }
+ return [sha256.Size]byte{}, fmt.Errorf("AssetDigest %s not found", name)
+}
+
+// Digests returns a map of all known files and their checksums.
+func Digests() (map[string][sha256.Size]byte, error) {
+ mp := make(map[string][sha256.Size]byte, len(_bindata))
+ for name := range _bindata {
+ a, err := _bindata[name]()
+ if err != nil {
+ return nil, err
+ }
+ mp[name] = a.digest
+ }
+ return mp, nil
+}
+
+// AssetNames returns the names of the assets.
+func AssetNames() []string {
+ names := make([]string, 0, len(_bindata))
+ for name := range _bindata {
+ names = append(names, name)
+ }
+ return names
+}
+
+// _bindata is a table, holding each asset generator, mapped to its name.
+var _bindata = map[string]func() (*asset, error){
+ "jsonschema-draft-04.json": jsonschemaDraft04Json,
+
+ "v2/schema.json": v2SchemaJson,
+}
+
+// AssetDir returns the file names below a certain
+// directory embedded in the file by go-bindata.
+// For example if you run go-bindata on data/... and data contains the
+// following hierarchy:
+// data/
+// foo.txt
+// img/
+// a.png
+// b.png
+// then AssetDir("data") would return []string{"foo.txt", "img"},
+// AssetDir("data/img") would return []string{"a.png", "b.png"},
+// AssetDir("foo.txt") and AssetDir("notexist") would return an error, and
+// AssetDir("") will return []string{"data"}.
+func AssetDir(name string) ([]string, error) {
+ node := _bintree
+ if len(name) != 0 {
+ canonicalName := strings.Replace(name, "\\", "/", -1)
+ pathList := strings.Split(canonicalName, "/")
+ for _, p := range pathList {
+ node = node.Children[p]
+ if node == nil {
+ return nil, fmt.Errorf("Asset %s not found", name)
+ }
+ }
+ }
+ if node.Func != nil {
+ return nil, fmt.Errorf("Asset %s not found", name)
+ }
+ rv := make([]string, 0, len(node.Children))
+ for childName := range node.Children {
+ rv = append(rv, childName)
+ }
+ return rv, nil
+}
+
+type bintree struct {
+ Func func() (*asset, error)
+ Children map[string]*bintree
+}
+
+var _bintree = &bintree{nil, map[string]*bintree{
+ "jsonschema-draft-04.json": {jsonschemaDraft04Json, map[string]*bintree{}},
+ "v2": {nil, map[string]*bintree{
+ "schema.json": {v2SchemaJson, map[string]*bintree{}},
+ }},
+}}
+
+// RestoreAsset restores an asset under the given directory.
+func RestoreAsset(dir, name string) error {
+ data, err := Asset(name)
+ if err != nil {
+ return err
+ }
+ info, err := AssetInfo(name)
+ if err != nil {
+ return err
+ }
+ err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
+ if err != nil {
+ return err
+ }
+ err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
+ if err != nil {
+ return err
+ }
+ return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
+}
+
+// RestoreAssets restores an asset under the given directory recursively.
+func RestoreAssets(dir, name string) error {
+ children, err := AssetDir(name)
+ // File
+ if err != nil {
+ return RestoreAsset(dir, name)
+ }
+ // Dir
+ for _, child := range children {
+ err = RestoreAssets(dir, filepath.Join(name, child))
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func _filePath(dir, name string) string {
+ canonicalName := strings.Replace(name, "\\", "/", -1)
+ return filepath.Join(append([]string{dir}, strings.Split(canonicalName, "/")...)...)
+}
diff --git a/vendor/github.com/go-openapi/spec/cache.go b/vendor/github.com/go-openapi/spec/cache.go
new file mode 100644
index 000000000..122993b44
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/cache.go
@@ -0,0 +1,98 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "sync"
+)
+
+// ResolutionCache a cache for resolving urls
+type ResolutionCache interface {
+ Get(string) (interface{}, bool)
+ Set(string, interface{})
+}
+
+type simpleCache struct {
+ lock sync.RWMutex
+ store map[string]interface{}
+}
+
+func (s *simpleCache) ShallowClone() ResolutionCache {
+ store := make(map[string]interface{}, len(s.store))
+ s.lock.RLock()
+ for k, v := range s.store {
+ store[k] = v
+ }
+ s.lock.RUnlock()
+
+ return &simpleCache{
+ store: store,
+ }
+}
+
+// Get retrieves a cached URI
+func (s *simpleCache) Get(uri string) (interface{}, bool) {
+ s.lock.RLock()
+ v, ok := s.store[uri]
+
+ s.lock.RUnlock()
+ return v, ok
+}
+
+// Set caches a URI
+func (s *simpleCache) Set(uri string, data interface{}) {
+ s.lock.Lock()
+ s.store[uri] = data
+ s.lock.Unlock()
+}
+
+var (
+ // resCache is a package level cache for $ref resolution and expansion.
+ // It is initialized lazily by methods that have the need for it: no
+ // memory is allocated unless some expander methods are called.
+ //
+ // It is initialized with JSON schema and swagger schema,
+ // which do not mutate during normal operations.
+ //
+ // All subsequent utilizations of this cache are produced from a shallow
+ // clone of this initial version.
+ resCache *simpleCache
+ onceCache sync.Once
+
+ _ ResolutionCache = &simpleCache{}
+)
+
+// initResolutionCache initializes the URI resolution cache. To be wrapped in a sync.Once.Do call.
+func initResolutionCache() {
+ resCache = defaultResolutionCache()
+}
+
+func defaultResolutionCache() *simpleCache {
+ return &simpleCache{store: map[string]interface{}{
+ "http://swagger.io/v2/schema.json": MustLoadSwagger20Schema(),
+ "http://json-schema.org/draft-04/schema": MustLoadJSONSchemaDraft04(),
+ }}
+}
+
+func cacheOrDefault(cache ResolutionCache) ResolutionCache {
+ onceCache.Do(initResolutionCache)
+
+ if cache != nil {
+ return cache
+ }
+
+ // get a shallow clone of the base cache with swagger and json schema
+ return resCache.ShallowClone()
+}
diff --git a/vendor/github.com/go-openapi/spec/contact_info.go b/vendor/github.com/go-openapi/spec/contact_info.go
new file mode 100644
index 000000000..2f7bb219b
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/contact_info.go
@@ -0,0 +1,57 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+
+ "github.com/go-openapi/swag"
+)
+
+// ContactInfo contact information for the exposed API.
+//
+// For more information: http://goo.gl/8us55a#contactObject
+type ContactInfo struct {
+ ContactInfoProps
+ VendorExtensible
+}
+
+// ContactInfoProps hold the properties of a ContactInfo object
+type ContactInfoProps struct {
+ Name string `json:"name,omitempty"`
+ URL string `json:"url,omitempty"`
+ Email string `json:"email,omitempty"`
+}
+
+// UnmarshalJSON hydrates ContactInfo from json
+func (c *ContactInfo) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &c.ContactInfoProps); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &c.VendorExtensible)
+}
+
+// MarshalJSON produces ContactInfo as json
+func (c ContactInfo) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(c.ContactInfoProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(c.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2), nil
+}
diff --git a/vendor/github.com/go-openapi/spec/debug.go b/vendor/github.com/go-openapi/spec/debug.go
new file mode 100644
index 000000000..fc889f6d0
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/debug.go
@@ -0,0 +1,49 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path"
+ "runtime"
+)
+
+// Debug is true when the SWAGGER_DEBUG env var is not empty.
+//
+// It enables a more verbose logging of this package.
+var Debug = os.Getenv("SWAGGER_DEBUG") != ""
+
+var (
+ // specLogger is a debug logger for this package
+ specLogger *log.Logger
+)
+
+func init() {
+ debugOptions()
+}
+
+func debugOptions() {
+ specLogger = log.New(os.Stdout, "spec:", log.LstdFlags)
+}
+
+func debugLog(msg string, args ...interface{}) {
+ // A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog()
+ if Debug {
+ _, file1, pos1, _ := runtime.Caller(1)
+ specLogger.Printf("%s:%d: %s", path.Base(file1), pos1, fmt.Sprintf(msg, args...))
+ }
+}
diff --git a/vendor/github.com/go-openapi/spec/errors.go b/vendor/github.com/go-openapi/spec/errors.go
new file mode 100644
index 000000000..6992c7ba7
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/errors.go
@@ -0,0 +1,19 @@
+package spec
+
+import "errors"
+
+// Error codes
+var (
+ // ErrUnknownTypeForReference indicates that a resolved reference was found in an unsupported container type
+ ErrUnknownTypeForReference = errors.New("unknown type for the resolved reference")
+
+ // ErrResolveRefNeedsAPointer indicates that a $ref target must be a valid JSON pointer
+ ErrResolveRefNeedsAPointer = errors.New("resolve ref: target needs to be a pointer")
+
+ // ErrDerefUnsupportedType indicates that a resolved reference was found in an unsupported container type.
+ // At the moment, $ref are supported only inside: schemas, parameters, responses, path items
+ ErrDerefUnsupportedType = errors.New("deref: unsupported type")
+
+ // ErrExpandUnsupportedType indicates that $ref expansion is attempted on some invalid type
+ ErrExpandUnsupportedType = errors.New("expand: unsupported type. Input should be of type *Parameter or *Response")
+)
diff --git a/vendor/github.com/go-openapi/spec/expander.go b/vendor/github.com/go-openapi/spec/expander.go
new file mode 100644
index 000000000..d4ea889d4
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/expander.go
@@ -0,0 +1,594 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "fmt"
+)
+
+// ExpandOptions provides options for the spec expander.
+//
+// RelativeBase is the path to the root document. This can be a remote URL or a path to a local file.
+//
+// If left empty, the root document is assumed to be located in the current working directory:
+// all relative $ref's will be resolved from there.
+//
+// PathLoader injects a document loading method. By default, this resolves to the function provided by the SpecLoader package variable.
+//
+type ExpandOptions struct {
+ RelativeBase string // the path to the root document to expand. This is a file, not a directory
+ SkipSchemas bool // do not expand schemas, just paths, parameters and responses
+ ContinueOnError bool // continue expanding even after and error is found
+ PathLoader func(string) (json.RawMessage, error) `json:"-"` // the document loading method that takes a path as input and yields a json document
+ AbsoluteCircularRef bool // circular $ref remaining after expansion remain absolute URLs
+}
+
+func optionsOrDefault(opts *ExpandOptions) *ExpandOptions {
+ if opts != nil {
+ clone := *opts // shallow clone to avoid internal changes to be propagated to the caller
+ if clone.RelativeBase != "" {
+ clone.RelativeBase = normalizeBase(clone.RelativeBase)
+ }
+ // if the relative base is empty, let the schema loader choose a pseudo root document
+ return &clone
+ }
+ return &ExpandOptions{}
+}
+
+// ExpandSpec expands the references in a swagger spec
+func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
+ options = optionsOrDefault(options)
+ resolver := defaultSchemaLoader(spec, options, nil, nil)
+
+ specBasePath := options.RelativeBase
+
+ if !options.SkipSchemas {
+ for key, definition := range spec.Definitions {
+ parentRefs := make([]string, 0, 10)
+ parentRefs = append(parentRefs, fmt.Sprintf("#/definitions/%s", key))
+
+ def, err := expandSchema(definition, parentRefs, resolver, specBasePath)
+ if resolver.shouldStopOnError(err) {
+ return err
+ }
+ if def != nil {
+ spec.Definitions[key] = *def
+ }
+ }
+ }
+
+ for key := range spec.Parameters {
+ parameter := spec.Parameters[key]
+ if err := expandParameterOrResponse(&parameter, resolver, specBasePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ spec.Parameters[key] = parameter
+ }
+
+ for key := range spec.Responses {
+ response := spec.Responses[key]
+ if err := expandParameterOrResponse(&response, resolver, specBasePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ spec.Responses[key] = response
+ }
+
+ if spec.Paths != nil {
+ for key := range spec.Paths.Paths {
+ pth := spec.Paths.Paths[key]
+ if err := expandPathItem(&pth, resolver, specBasePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ spec.Paths.Paths[key] = pth
+ }
+ }
+
+ return nil
+}
+
+const rootBase = ".root"
+
+// baseForRoot loads in the cache the root document and produces a fake ".root" base path entry
+// for further $ref resolution
+//
+// Setting the cache is optional and this parameter may safely be left to nil.
+func baseForRoot(root interface{}, cache ResolutionCache) string {
+ if root == nil {
+ return ""
+ }
+
+ // cache the root document to resolve $ref's
+ normalizedBase := normalizeBase(rootBase)
+ cache.Set(normalizedBase, root)
+
+ return normalizedBase
+}
+
+// ExpandSchema expands the refs in the schema object with reference to the root object.
+//
+// go-openapi/validate uses this function.
+//
+// Notice that it is impossible to reference a json schema in a different document other than root
+// (use ExpandSchemaWithBasePath to resolve external references).
+//
+// Setting the cache is optional and this parameter may safely be left to nil.
+func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error {
+ cache = cacheOrDefault(cache)
+ if root == nil {
+ root = schema
+ }
+
+ opts := &ExpandOptions{
+ // when a root is specified, cache the root as an in-memory document for $ref retrieval
+ RelativeBase: baseForRoot(root, cache),
+ SkipSchemas: false,
+ ContinueOnError: false,
+ }
+
+ return ExpandSchemaWithBasePath(schema, cache, opts)
+}
+
+// ExpandSchemaWithBasePath expands the refs in the schema object, base path configured through expand options.
+//
+// Setting the cache is optional and this parameter may safely be left to nil.
+func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *ExpandOptions) error {
+ if schema == nil {
+ return nil
+ }
+
+ cache = cacheOrDefault(cache)
+
+ opts = optionsOrDefault(opts)
+
+ resolver := defaultSchemaLoader(nil, opts, cache, nil)
+
+ parentRefs := make([]string, 0, 10)
+ s, err := expandSchema(*schema, parentRefs, resolver, opts.RelativeBase)
+ if err != nil {
+ return err
+ }
+ if s != nil {
+ // guard for when continuing on error
+ *schema = *s
+ }
+
+ return nil
+}
+
+func expandItems(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
+ if target.Items == nil {
+ return &target, nil
+ }
+
+ // array
+ if target.Items.Schema != nil {
+ t, err := expandSchema(*target.Items.Schema, parentRefs, resolver, basePath)
+ if err != nil {
+ return nil, err
+ }
+ *target.Items.Schema = *t
+ }
+
+ // tuple
+ for i := range target.Items.Schemas {
+ t, err := expandSchema(target.Items.Schemas[i], parentRefs, resolver, basePath)
+ if err != nil {
+ return nil, err
+ }
+ target.Items.Schemas[i] = *t
+ }
+
+ return &target, nil
+}
+
+func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
+ if target.Ref.String() == "" && target.Ref.IsRoot() {
+ newRef := normalizeRef(&target.Ref, basePath)
+ target.Ref = *newRef
+ return &target, nil
+ }
+
+ // change the base path of resolution when an ID is encountered
+ // otherwise the basePath should inherit the parent's
+ if target.ID != "" {
+ basePath, _ = resolver.setSchemaID(target, target.ID, basePath)
+ }
+
+ if target.Ref.String() != "" {
+ return expandSchemaRef(target, parentRefs, resolver, basePath)
+ }
+
+ for k := range target.Definitions {
+ tt, err := expandSchema(target.Definitions[k], parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if tt != nil {
+ target.Definitions[k] = *tt
+ }
+ }
+
+ t, err := expandItems(target, parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ target = *t
+ }
+
+ for i := range target.AllOf {
+ t, err := expandSchema(target.AllOf[i], parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ target.AllOf[i] = *t
+ }
+ }
+
+ for i := range target.AnyOf {
+ t, err := expandSchema(target.AnyOf[i], parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ target.AnyOf[i] = *t
+ }
+ }
+
+ for i := range target.OneOf {
+ t, err := expandSchema(target.OneOf[i], parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ target.OneOf[i] = *t
+ }
+ }
+
+ if target.Not != nil {
+ t, err := expandSchema(*target.Not, parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ *target.Not = *t
+ }
+ }
+
+ for k := range target.Properties {
+ t, err := expandSchema(target.Properties[k], parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ target.Properties[k] = *t
+ }
+ }
+
+ if target.AdditionalProperties != nil && target.AdditionalProperties.Schema != nil {
+ t, err := expandSchema(*target.AdditionalProperties.Schema, parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ *target.AdditionalProperties.Schema = *t
+ }
+ }
+
+ for k := range target.PatternProperties {
+ t, err := expandSchema(target.PatternProperties[k], parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ target.PatternProperties[k] = *t
+ }
+ }
+
+ for k := range target.Dependencies {
+ if target.Dependencies[k].Schema != nil {
+ t, err := expandSchema(*target.Dependencies[k].Schema, parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ *target.Dependencies[k].Schema = *t
+ }
+ }
+ }
+
+ if target.AdditionalItems != nil && target.AdditionalItems.Schema != nil {
+ t, err := expandSchema(*target.AdditionalItems.Schema, parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return &target, err
+ }
+ if t != nil {
+ *target.AdditionalItems.Schema = *t
+ }
+ }
+ return &target, nil
+}
+
+func expandSchemaRef(target Schema, parentRefs []string, resolver *schemaLoader, basePath string) (*Schema, error) {
+ // if a Ref is found, all sibling fields are skipped
+ // Ref also changes the resolution scope of children expandSchema
+
+ // here the resolution scope is changed because a $ref was encountered
+ normalizedRef := normalizeRef(&target.Ref, basePath)
+ normalizedBasePath := normalizedRef.RemoteURI()
+
+ if resolver.isCircular(normalizedRef, basePath, parentRefs...) {
+ // this means there is a cycle in the recursion tree: return the Ref
+ // - circular refs cannot be expanded. We leave them as ref.
+ // - denormalization means that a new local file ref is set relative to the original basePath
+ debugLog("short circuit circular ref: basePath: %s, normalizedPath: %s, normalized ref: %s",
+ basePath, normalizedBasePath, normalizedRef.String())
+ if !resolver.options.AbsoluteCircularRef {
+ target.Ref = denormalizeRef(normalizedRef, resolver.context.basePath, resolver.context.rootID)
+ } else {
+ target.Ref = *normalizedRef
+ }
+ return &target, nil
+ }
+
+ var t *Schema
+ err := resolver.Resolve(&target.Ref, &t, basePath)
+ if resolver.shouldStopOnError(err) {
+ return nil, err
+ }
+
+ if t == nil {
+ // guard for when continuing on error
+ return &target, nil
+ }
+
+ parentRefs = append(parentRefs, normalizedRef.String())
+ transitiveResolver := resolver.transitiveResolver(basePath, target.Ref)
+
+ basePath = resolver.updateBasePath(transitiveResolver, normalizedBasePath)
+
+ return expandSchema(*t, parentRefs, transitiveResolver, basePath)
+}
+
+func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string) error {
+ if pathItem == nil {
+ return nil
+ }
+
+ parentRefs := make([]string, 0, 10)
+ if err := resolver.deref(pathItem, parentRefs, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+
+ if pathItem.Ref.String() != "" {
+ transitiveResolver := resolver.transitiveResolver(basePath, pathItem.Ref)
+ basePath = transitiveResolver.updateBasePath(resolver, basePath)
+ resolver = transitiveResolver
+ }
+
+ pathItem.Ref = Ref{}
+ for i := range pathItem.Parameters {
+ if err := expandParameterOrResponse(&(pathItem.Parameters[i]), resolver, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ }
+
+ ops := []*Operation{
+ pathItem.Get,
+ pathItem.Head,
+ pathItem.Options,
+ pathItem.Put,
+ pathItem.Post,
+ pathItem.Patch,
+ pathItem.Delete,
+ }
+ for _, op := range ops {
+ if err := expandOperation(op, resolver, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func expandOperation(op *Operation, resolver *schemaLoader, basePath string) error {
+ if op == nil {
+ return nil
+ }
+
+ for i := range op.Parameters {
+ param := op.Parameters[i]
+ if err := expandParameterOrResponse(&param, resolver, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ op.Parameters[i] = param
+ }
+
+ if op.Responses == nil {
+ return nil
+ }
+
+ responses := op.Responses
+ if err := expandParameterOrResponse(responses.Default, resolver, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+
+ for code := range responses.StatusCodeResponses {
+ response := responses.StatusCodeResponses[code]
+ if err := expandParameterOrResponse(&response, resolver, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+ responses.StatusCodeResponses[code] = response
+ }
+
+ return nil
+}
+
+// ExpandResponseWithRoot expands a response based on a root document, not a fetchable document
+//
+// Notice that it is impossible to reference a json schema in a different document other than root
+// (use ExpandResponse to resolve external references).
+//
+// Setting the cache is optional and this parameter may safely be left to nil.
+func ExpandResponseWithRoot(response *Response, root interface{}, cache ResolutionCache) error {
+ cache = cacheOrDefault(cache)
+ opts := &ExpandOptions{
+ RelativeBase: baseForRoot(root, cache),
+ }
+ resolver := defaultSchemaLoader(root, opts, cache, nil)
+
+ return expandParameterOrResponse(response, resolver, opts.RelativeBase)
+}
+
+// ExpandResponse expands a response based on a basepath
+//
+// All refs inside response will be resolved relative to basePath
+func ExpandResponse(response *Response, basePath string) error {
+ opts := optionsOrDefault(&ExpandOptions{
+ RelativeBase: basePath,
+ })
+ resolver := defaultSchemaLoader(nil, opts, nil, nil)
+
+ return expandParameterOrResponse(response, resolver, opts.RelativeBase)
+}
+
+// ExpandParameterWithRoot expands a parameter based on a root document, not a fetchable document.
+//
+// Notice that it is impossible to reference a json schema in a different document other than root
+// (use ExpandParameter to resolve external references).
+func ExpandParameterWithRoot(parameter *Parameter, root interface{}, cache ResolutionCache) error {
+ cache = cacheOrDefault(cache)
+
+ opts := &ExpandOptions{
+ RelativeBase: baseForRoot(root, cache),
+ }
+ resolver := defaultSchemaLoader(root, opts, cache, nil)
+
+ return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
+}
+
+// ExpandParameter expands a parameter based on a basepath.
+// This is the exported version of expandParameter
+// all refs inside parameter will be resolved relative to basePath
+func ExpandParameter(parameter *Parameter, basePath string) error {
+ opts := optionsOrDefault(&ExpandOptions{
+ RelativeBase: basePath,
+ })
+ resolver := defaultSchemaLoader(nil, opts, nil, nil)
+
+ return expandParameterOrResponse(parameter, resolver, opts.RelativeBase)
+}
+
+func getRefAndSchema(input interface{}) (*Ref, *Schema, error) {
+ var (
+ ref *Ref
+ sch *Schema
+ )
+
+ switch refable := input.(type) {
+ case *Parameter:
+ if refable == nil {
+ return nil, nil, nil
+ }
+ ref = &refable.Ref
+ sch = refable.Schema
+ case *Response:
+ if refable == nil {
+ return nil, nil, nil
+ }
+ ref = &refable.Ref
+ sch = refable.Schema
+ default:
+ return nil, nil, fmt.Errorf("unsupported type: %T: %w", input, ErrExpandUnsupportedType)
+ }
+
+ return ref, sch, nil
+}
+
+func expandParameterOrResponse(input interface{}, resolver *schemaLoader, basePath string) error {
+ ref, _, err := getRefAndSchema(input)
+ if err != nil {
+ return err
+ }
+
+ if ref == nil {
+ return nil
+ }
+
+ parentRefs := make([]string, 0, 10)
+ if err = resolver.deref(input, parentRefs, basePath); resolver.shouldStopOnError(err) {
+ return err
+ }
+
+ ref, sch, _ := getRefAndSchema(input)
+ if ref.String() != "" {
+ transitiveResolver := resolver.transitiveResolver(basePath, *ref)
+ basePath = resolver.updateBasePath(transitiveResolver, basePath)
+ resolver = transitiveResolver
+ }
+
+ if sch == nil {
+ // nothing to be expanded
+ if ref != nil {
+ *ref = Ref{}
+ }
+ return nil
+ }
+
+ if sch.Ref.String() != "" {
+ rebasedRef, ern := NewRef(normalizeURI(sch.Ref.String(), basePath))
+ if ern != nil {
+ return ern
+ }
+
+ switch {
+ case resolver.isCircular(&rebasedRef, basePath, parentRefs...):
+ // this is a circular $ref: stop expansion
+ if !resolver.options.AbsoluteCircularRef {
+ sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID)
+ } else {
+ sch.Ref = rebasedRef
+ }
+ case !resolver.options.SkipSchemas:
+ // schema expanded to a $ref in another root
+ sch.Ref = rebasedRef
+ debugLog("rebased to: %s", sch.Ref.String())
+ default:
+ // skip schema expansion but rebase $ref to schema
+ sch.Ref = denormalizeRef(&rebasedRef, resolver.context.basePath, resolver.context.rootID)
+ }
+ }
+
+ if ref != nil {
+ *ref = Ref{}
+ }
+
+ // expand schema
+ if !resolver.options.SkipSchemas {
+ s, err := expandSchema(*sch, parentRefs, resolver, basePath)
+ if resolver.shouldStopOnError(err) {
+ return err
+ }
+ if s == nil {
+ // guard for when continuing on error
+ return nil
+ }
+ *sch = *s
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/spec/external_docs.go b/vendor/github.com/go-openapi/spec/external_docs.go
new file mode 100644
index 000000000..88add91b2
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/external_docs.go
@@ -0,0 +1,24 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+// ExternalDocumentation allows referencing an external resource for
+// extended documentation.
+//
+// For more information: http://goo.gl/8us55a#externalDocumentationObject
+type ExternalDocumentation struct {
+ Description string `json:"description,omitempty"`
+ URL string `json:"url,omitempty"`
+}
diff --git a/vendor/github.com/go-openapi/spec/header.go b/vendor/github.com/go-openapi/spec/header.go
new file mode 100644
index 000000000..9dfd17b18
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/header.go
@@ -0,0 +1,203 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+const (
+ jsonArray = "array"
+)
+
+// HeaderProps describes a response header
+type HeaderProps struct {
+ Description string `json:"description,omitempty"`
+}
+
+// Header describes a header for a response of the API
+//
+// For more information: http://goo.gl/8us55a#headerObject
+type Header struct {
+ CommonValidations
+ SimpleSchema
+ VendorExtensible
+ HeaderProps
+}
+
+// ResponseHeader creates a new header instance for use in a response
+func ResponseHeader() *Header {
+ return new(Header)
+}
+
+// WithDescription sets the description on this response, allows for chaining
+func (h *Header) WithDescription(description string) *Header {
+ h.Description = description
+ return h
+}
+
+// Typed a fluent builder method for the type of parameter
+func (h *Header) Typed(tpe, format string) *Header {
+ h.Type = tpe
+ h.Format = format
+ return h
+}
+
+// CollectionOf a fluent builder method for an array item
+func (h *Header) CollectionOf(items *Items, format string) *Header {
+ h.Type = jsonArray
+ h.Items = items
+ h.CollectionFormat = format
+ return h
+}
+
+// WithDefault sets the default value on this item
+func (h *Header) WithDefault(defaultValue interface{}) *Header {
+ h.Default = defaultValue
+ return h
+}
+
+// WithMaxLength sets a max length value
+func (h *Header) WithMaxLength(max int64) *Header {
+ h.MaxLength = &max
+ return h
+}
+
+// WithMinLength sets a min length value
+func (h *Header) WithMinLength(min int64) *Header {
+ h.MinLength = &min
+ return h
+}
+
+// WithPattern sets a pattern value
+func (h *Header) WithPattern(pattern string) *Header {
+ h.Pattern = pattern
+ return h
+}
+
+// WithMultipleOf sets a multiple of value
+func (h *Header) WithMultipleOf(number float64) *Header {
+ h.MultipleOf = &number
+ return h
+}
+
+// WithMaximum sets a maximum number value
+func (h *Header) WithMaximum(max float64, exclusive bool) *Header {
+ h.Maximum = &max
+ h.ExclusiveMaximum = exclusive
+ return h
+}
+
+// WithMinimum sets a minimum number value
+func (h *Header) WithMinimum(min float64, exclusive bool) *Header {
+ h.Minimum = &min
+ h.ExclusiveMinimum = exclusive
+ return h
+}
+
+// WithEnum sets a the enum values (replace)
+func (h *Header) WithEnum(values ...interface{}) *Header {
+ h.Enum = append([]interface{}{}, values...)
+ return h
+}
+
+// WithMaxItems sets the max items
+func (h *Header) WithMaxItems(size int64) *Header {
+ h.MaxItems = &size
+ return h
+}
+
+// WithMinItems sets the min items
+func (h *Header) WithMinItems(size int64) *Header {
+ h.MinItems = &size
+ return h
+}
+
+// UniqueValues dictates that this array can only have unique items
+func (h *Header) UniqueValues() *Header {
+ h.UniqueItems = true
+ return h
+}
+
+// AllowDuplicates this array can have duplicates
+func (h *Header) AllowDuplicates() *Header {
+ h.UniqueItems = false
+ return h
+}
+
+// WithValidations is a fluent method to set header validations
+func (h *Header) WithValidations(val CommonValidations) *Header {
+ h.SetValidations(SchemaValidations{CommonValidations: val})
+ return h
+}
+
+// MarshalJSON marshal this to JSON
+func (h Header) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(h.CommonValidations)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(h.SimpleSchema)
+ if err != nil {
+ return nil, err
+ }
+ b3, err := json.Marshal(h.HeaderProps)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2, b3), nil
+}
+
+// UnmarshalJSON unmarshals this header from JSON
+func (h *Header) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &h.CommonValidations); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &h.SimpleSchema); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &h.VendorExtensible); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &h.HeaderProps)
+}
+
+// JSONLookup look up a value by the json property name
+func (h Header) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := h.Extensions[token]; ok {
+ return &ex, nil
+ }
+
+ r, _, err := jsonpointer.GetForToken(h.CommonValidations, token)
+ if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
+ return nil, err
+ }
+ if r != nil {
+ return r, nil
+ }
+ r, _, err = jsonpointer.GetForToken(h.SimpleSchema, token)
+ if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
+ return nil, err
+ }
+ if r != nil {
+ return r, nil
+ }
+ r, _, err = jsonpointer.GetForToken(h.HeaderProps, token)
+ return r, err
+}
diff --git a/vendor/github.com/go-openapi/spec/info.go b/vendor/github.com/go-openapi/spec/info.go
new file mode 100644
index 000000000..582f0fd4c
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/info.go
@@ -0,0 +1,184 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// Extensions vendor specific extensions
+type Extensions map[string]interface{}
+
+// Add adds a value to these extensions
+func (e Extensions) Add(key string, value interface{}) {
+ realKey := strings.ToLower(key)
+ e[realKey] = value
+}
+
+// GetString gets a string value from the extensions
+func (e Extensions) GetString(key string) (string, bool) {
+ if v, ok := e[strings.ToLower(key)]; ok {
+ str, ok := v.(string)
+ return str, ok
+ }
+ return "", false
+}
+
+// GetInt gets a int value from the extensions
+func (e Extensions) GetInt(key string) (int, bool) {
+ realKey := strings.ToLower(key)
+
+ if v, ok := e.GetString(realKey); ok {
+ if r, err := strconv.Atoi(v); err == nil {
+ return r, true
+ }
+ }
+
+ if v, ok := e[realKey]; ok {
+ if r, rOk := v.(float64); rOk {
+ return int(r), true
+ }
+ }
+ return -1, false
+}
+
+// GetBool gets a string value from the extensions
+func (e Extensions) GetBool(key string) (bool, bool) {
+ if v, ok := e[strings.ToLower(key)]; ok {
+ str, ok := v.(bool)
+ return str, ok
+ }
+ return false, false
+}
+
+// GetStringSlice gets a string value from the extensions
+func (e Extensions) GetStringSlice(key string) ([]string, bool) {
+ if v, ok := e[strings.ToLower(key)]; ok {
+ arr, isSlice := v.([]interface{})
+ if !isSlice {
+ return nil, false
+ }
+ var strs []string
+ for _, iface := range arr {
+ str, isString := iface.(string)
+ if !isString {
+ return nil, false
+ }
+ strs = append(strs, str)
+ }
+ return strs, ok
+ }
+ return nil, false
+}
+
+// VendorExtensible composition block.
+type VendorExtensible struct {
+ Extensions Extensions
+}
+
+// AddExtension adds an extension to this extensible object
+func (v *VendorExtensible) AddExtension(key string, value interface{}) {
+ if value == nil {
+ return
+ }
+ if v.Extensions == nil {
+ v.Extensions = make(map[string]interface{})
+ }
+ v.Extensions.Add(key, value)
+}
+
+// MarshalJSON marshals the extensions to json
+func (v VendorExtensible) MarshalJSON() ([]byte, error) {
+ toser := make(map[string]interface{})
+ for k, v := range v.Extensions {
+ lk := strings.ToLower(k)
+ if strings.HasPrefix(lk, "x-") {
+ toser[k] = v
+ }
+ }
+ return json.Marshal(toser)
+}
+
+// UnmarshalJSON for this extensible object
+func (v *VendorExtensible) UnmarshalJSON(data []byte) error {
+ var d map[string]interface{}
+ if err := json.Unmarshal(data, &d); err != nil {
+ return err
+ }
+ for k, vv := range d {
+ lk := strings.ToLower(k)
+ if strings.HasPrefix(lk, "x-") {
+ if v.Extensions == nil {
+ v.Extensions = map[string]interface{}{}
+ }
+ v.Extensions[k] = vv
+ }
+ }
+ return nil
+}
+
+// InfoProps the properties for an info definition
+type InfoProps struct {
+ Description string `json:"description,omitempty"`
+ Title string `json:"title,omitempty"`
+ TermsOfService string `json:"termsOfService,omitempty"`
+ Contact *ContactInfo `json:"contact,omitempty"`
+ License *License `json:"license,omitempty"`
+ Version string `json:"version,omitempty"`
+}
+
+// Info object provides metadata about the API.
+// The metadata can be used by the clients if needed, and can be presented in the Swagger-UI for convenience.
+//
+// For more information: http://goo.gl/8us55a#infoObject
+type Info struct {
+ VendorExtensible
+ InfoProps
+}
+
+// JSONLookup look up a value by the json property name
+func (i Info) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := i.Extensions[token]; ok {
+ return &ex, nil
+ }
+ r, _, err := jsonpointer.GetForToken(i.InfoProps, token)
+ return r, err
+}
+
+// MarshalJSON marshal this to JSON
+func (i Info) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(i.InfoProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(i.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2), nil
+}
+
+// UnmarshalJSON marshal this from JSON
+func (i *Info) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &i.InfoProps); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &i.VendorExtensible)
+}
diff --git a/vendor/github.com/go-openapi/spec/items.go b/vendor/github.com/go-openapi/spec/items.go
new file mode 100644
index 000000000..e2afb2133
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/items.go
@@ -0,0 +1,234 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+const (
+ jsonRef = "$ref"
+)
+
+// SimpleSchema describe swagger simple schemas for parameters and headers
+type SimpleSchema struct {
+ Type string `json:"type,omitempty"`
+ Nullable bool `json:"nullable,omitempty"`
+ Format string `json:"format,omitempty"`
+ Items *Items `json:"items,omitempty"`
+ CollectionFormat string `json:"collectionFormat,omitempty"`
+ Default interface{} `json:"default,omitempty"`
+ Example interface{} `json:"example,omitempty"`
+}
+
+// TypeName return the type (or format) of a simple schema
+func (s *SimpleSchema) TypeName() string {
+ if s.Format != "" {
+ return s.Format
+ }
+ return s.Type
+}
+
+// ItemsTypeName yields the type of items in a simple schema array
+func (s *SimpleSchema) ItemsTypeName() string {
+ if s.Items == nil {
+ return ""
+ }
+ return s.Items.TypeName()
+}
+
+// Items a limited subset of JSON-Schema's items object.
+// It is used by parameter definitions that are not located in "body".
+//
+// For more information: http://goo.gl/8us55a#items-object
+type Items struct {
+ Refable
+ CommonValidations
+ SimpleSchema
+ VendorExtensible
+}
+
+// NewItems creates a new instance of items
+func NewItems() *Items {
+ return &Items{}
+}
+
+// Typed a fluent builder method for the type of item
+func (i *Items) Typed(tpe, format string) *Items {
+ i.Type = tpe
+ i.Format = format
+ return i
+}
+
+// AsNullable flags this schema as nullable.
+func (i *Items) AsNullable() *Items {
+ i.Nullable = true
+ return i
+}
+
+// CollectionOf a fluent builder method for an array item
+func (i *Items) CollectionOf(items *Items, format string) *Items {
+ i.Type = jsonArray
+ i.Items = items
+ i.CollectionFormat = format
+ return i
+}
+
+// WithDefault sets the default value on this item
+func (i *Items) WithDefault(defaultValue interface{}) *Items {
+ i.Default = defaultValue
+ return i
+}
+
+// WithMaxLength sets a max length value
+func (i *Items) WithMaxLength(max int64) *Items {
+ i.MaxLength = &max
+ return i
+}
+
+// WithMinLength sets a min length value
+func (i *Items) WithMinLength(min int64) *Items {
+ i.MinLength = &min
+ return i
+}
+
+// WithPattern sets a pattern value
+func (i *Items) WithPattern(pattern string) *Items {
+ i.Pattern = pattern
+ return i
+}
+
+// WithMultipleOf sets a multiple of value
+func (i *Items) WithMultipleOf(number float64) *Items {
+ i.MultipleOf = &number
+ return i
+}
+
+// WithMaximum sets a maximum number value
+func (i *Items) WithMaximum(max float64, exclusive bool) *Items {
+ i.Maximum = &max
+ i.ExclusiveMaximum = exclusive
+ return i
+}
+
+// WithMinimum sets a minimum number value
+func (i *Items) WithMinimum(min float64, exclusive bool) *Items {
+ i.Minimum = &min
+ i.ExclusiveMinimum = exclusive
+ return i
+}
+
+// WithEnum sets a the enum values (replace)
+func (i *Items) WithEnum(values ...interface{}) *Items {
+ i.Enum = append([]interface{}{}, values...)
+ return i
+}
+
+// WithMaxItems sets the max items
+func (i *Items) WithMaxItems(size int64) *Items {
+ i.MaxItems = &size
+ return i
+}
+
+// WithMinItems sets the min items
+func (i *Items) WithMinItems(size int64) *Items {
+ i.MinItems = &size
+ return i
+}
+
+// UniqueValues dictates that this array can only have unique items
+func (i *Items) UniqueValues() *Items {
+ i.UniqueItems = true
+ return i
+}
+
+// AllowDuplicates this array can have duplicates
+func (i *Items) AllowDuplicates() *Items {
+ i.UniqueItems = false
+ return i
+}
+
+// WithValidations is a fluent method to set Items validations
+func (i *Items) WithValidations(val CommonValidations) *Items {
+ i.SetValidations(SchemaValidations{CommonValidations: val})
+ return i
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (i *Items) UnmarshalJSON(data []byte) error {
+ var validations CommonValidations
+ if err := json.Unmarshal(data, &validations); err != nil {
+ return err
+ }
+ var ref Refable
+ if err := json.Unmarshal(data, &ref); err != nil {
+ return err
+ }
+ var simpleSchema SimpleSchema
+ if err := json.Unmarshal(data, &simpleSchema); err != nil {
+ return err
+ }
+ var vendorExtensible VendorExtensible
+ if err := json.Unmarshal(data, &vendorExtensible); err != nil {
+ return err
+ }
+ i.Refable = ref
+ i.CommonValidations = validations
+ i.SimpleSchema = simpleSchema
+ i.VendorExtensible = vendorExtensible
+ return nil
+}
+
+// MarshalJSON converts this items object to JSON
+func (i Items) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(i.CommonValidations)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(i.SimpleSchema)
+ if err != nil {
+ return nil, err
+ }
+ b3, err := json.Marshal(i.Refable)
+ if err != nil {
+ return nil, err
+ }
+ b4, err := json.Marshal(i.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b4, b3, b1, b2), nil
+}
+
+// JSONLookup look up a value by the json property name
+func (i Items) JSONLookup(token string) (interface{}, error) {
+ if token == jsonRef {
+ return &i.Ref, nil
+ }
+
+ r, _, err := jsonpointer.GetForToken(i.CommonValidations, token)
+ if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
+ return nil, err
+ }
+ if r != nil {
+ return r, nil
+ }
+ r, _, err = jsonpointer.GetForToken(i.SimpleSchema, token)
+ return r, err
+}
diff --git a/vendor/github.com/go-openapi/spec/license.go b/vendor/github.com/go-openapi/spec/license.go
new file mode 100644
index 000000000..b42f80368
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/license.go
@@ -0,0 +1,56 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+
+ "github.com/go-openapi/swag"
+)
+
+// License information for the exposed API.
+//
+// For more information: http://goo.gl/8us55a#licenseObject
+type License struct {
+ LicenseProps
+ VendorExtensible
+}
+
+// LicenseProps holds the properties of a License object
+type LicenseProps struct {
+ Name string `json:"name,omitempty"`
+ URL string `json:"url,omitempty"`
+}
+
+// UnmarshalJSON hydrates License from json
+func (l *License) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &l.LicenseProps); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &l.VendorExtensible)
+}
+
+// MarshalJSON produces License as json
+func (l License) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(l.LicenseProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(l.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2), nil
+}
diff --git a/vendor/github.com/go-openapi/spec/normalizer.go b/vendor/github.com/go-openapi/spec/normalizer.go
new file mode 100644
index 000000000..e8b600994
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/normalizer.go
@@ -0,0 +1,202 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "net/url"
+ "path"
+ "strings"
+)
+
+const fileScheme = "file"
+
+// normalizeURI ensures that all $ref paths used internally by the expander are canonicalized.
+//
+// NOTE(windows): there is a tolerance over the strict URI format on windows.
+//
+// The normalizer accepts relative file URLs like 'Path\File.JSON' as well as absolute file URLs like
+// 'C:\Path\file.Yaml'.
+//
+// Both are canonicalized with a "file://" scheme, slashes and a lower-cased path:
+// 'file:///c:/path/file.yaml'
+//
+// URLs can be specified with a file scheme, like in 'file:///folder/file.json' or
+// 'file:///c:\folder\File.json'.
+//
+// URLs like file://C:\folder are considered invalid (i.e. there is no host 'c:\folder') and a "repair"
+// is attempted.
+//
+// The base path argument is assumed to be canonicalized (e.g. using normalizeBase()).
+func normalizeURI(refPath, base string) string {
+ refURL, err := parseURL(refPath)
+ if err != nil {
+ specLogger.Printf("warning: invalid URI in $ref %q: %v", refPath, err)
+ refURL, refPath = repairURI(refPath)
+ }
+
+ fixWindowsURI(refURL, refPath) // noop on non-windows OS
+
+ refURL.Path = path.Clean(refURL.Path)
+ if refURL.Path == "." {
+ refURL.Path = ""
+ }
+
+ r := MustCreateRef(refURL.String())
+ if r.IsCanonical() {
+ return refURL.String()
+ }
+
+ baseURL, _ := parseURL(base)
+ if path.IsAbs(refURL.Path) {
+ baseURL.Path = refURL.Path
+ } else if refURL.Path != "" {
+ baseURL.Path = path.Join(path.Dir(baseURL.Path), refURL.Path)
+ }
+ // copying fragment from ref to base
+ baseURL.Fragment = refURL.Fragment
+
+ return baseURL.String()
+}
+
+// denormalizeRef returns the simplest notation for a normalized $ref, given the path of the original root document.
+//
+// When calling this, we assume that:
+// * $ref is a canonical URI
+// * originalRelativeBase is a canonical URI
+//
+// denormalizeRef is currently used when we rewrite a $ref after a circular $ref has been detected.
+// In this case, expansion stops and normally renders the internal canonical $ref.
+//
+// This internal $ref is eventually rebased to the original RelativeBase used for the expansion.
+//
+// There is a special case for schemas that are anchored with an "id":
+// in that case, the rebasing is performed // against the id only if this is an anchor for the initial root document.
+// All other intermediate "id"'s found along the way are ignored for the purpose of rebasing.
+func denormalizeRef(ref *Ref, originalRelativeBase, id string) Ref {
+ debugLog("denormalizeRef called:\n$ref: %q\noriginal: %s\nroot ID:%s", ref.String(), originalRelativeBase, id)
+
+ if ref.String() == "" || ref.IsRoot() || ref.HasFragmentOnly {
+ // short circuit: $ref to current doc
+ return *ref
+ }
+
+ if id != "" {
+ idBaseURL, err := parseURL(id)
+ if err == nil { // if the schema id is not usable as a URI, ignore it
+ if ref, ok := rebase(ref, idBaseURL, true); ok { // rebase, but keep references to root unchaged (do not want $ref: "")
+ // $ref relative to the ID of the schema in the root document
+ return ref
+ }
+ }
+ }
+
+ originalRelativeBaseURL, _ := parseURL(originalRelativeBase)
+
+ r, _ := rebase(ref, originalRelativeBaseURL, false)
+
+ return r
+}
+
+func rebase(ref *Ref, v *url.URL, notEqual bool) (Ref, bool) {
+ var newBase url.URL
+
+ u := ref.GetURL()
+
+ if u.Scheme != v.Scheme || u.Host != v.Host {
+ return *ref, false
+ }
+
+ docPath := v.Path
+ v.Path = path.Dir(v.Path)
+
+ if v.Path == "." {
+ v.Path = ""
+ } else if !strings.HasSuffix(v.Path, "/") {
+ v.Path += "/"
+ }
+
+ newBase.Fragment = u.Fragment
+
+ if strings.HasPrefix(u.Path, docPath) {
+ newBase.Path = strings.TrimPrefix(u.Path, docPath)
+ } else {
+ newBase.Path = strings.TrimPrefix(u.Path, v.Path)
+ }
+
+ if notEqual && newBase.Path == "" && newBase.Fragment == "" {
+ // do not want rebasing to end up in an empty $ref
+ return *ref, false
+ }
+
+ if path.IsAbs(newBase.Path) {
+ // whenever we end up with an absolute path, specify the scheme and host
+ newBase.Scheme = v.Scheme
+ newBase.Host = v.Host
+ }
+
+ return MustCreateRef(newBase.String()), true
+}
+
+// normalizeRef canonicalize a Ref, using a canonical relativeBase as its absolute anchor
+func normalizeRef(ref *Ref, relativeBase string) *Ref {
+ r := MustCreateRef(normalizeURI(ref.String(), relativeBase))
+ return &r
+}
+
+// normalizeBase performs a normalization of the input base path.
+//
+// This always yields a canonical URI (absolute), usable for the document cache.
+//
+// It ensures that all further internal work on basePath may safely assume
+// a non-empty, cross-platform, canonical URI (i.e. absolute).
+//
+// This normalization tolerates windows paths (e.g. C:\x\y\File.dat) and transform this
+// in a file:// URL with lower cased drive letter and path.
+//
+// See also: https://en.wikipedia.org/wiki/File_URI_scheme
+func normalizeBase(in string) string {
+ u, err := parseURL(in)
+ if err != nil {
+ specLogger.Printf("warning: invalid URI in RelativeBase %q: %v", in, err)
+ u, in = repairURI(in)
+ }
+
+ u.Fragment = "" // any fragment in the base is irrelevant
+
+ fixWindowsURI(u, in) // noop on non-windows OS
+
+ u.Path = path.Clean(u.Path)
+ if u.Path == "." { // empty after Clean()
+ u.Path = ""
+ }
+
+ if u.Scheme != "" {
+ if path.IsAbs(u.Path) || u.Scheme != fileScheme {
+ // this is absolute or explicitly not a local file: we're good
+ return u.String()
+ }
+ }
+
+ // no scheme or file scheme with relative path: assume file and make it absolute
+ // enforce scheme file://... with absolute path.
+ //
+ // If the input path is relative, we anchor the path to the current working directory.
+ // NOTE: we may end up with a host component. Leave it unchanged: e.g. file://host/folder/file.json
+
+ u.Scheme = fileScheme
+ u.Path = absPath(u.Path) // platform-dependent
+ u.RawQuery = "" // any query component is irrelevant for a base
+ return u.String()
+}
diff --git a/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go
new file mode 100644
index 000000000..2df072315
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/normalizer_nonwindows.go
@@ -0,0 +1,44 @@
+//go:build !windows
+// +build !windows
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "net/url"
+ "path/filepath"
+)
+
+// absPath makes a file path absolute and compatible with a URI path component.
+//
+// The parameter must be a path, not an URI.
+func absPath(in string) string {
+ anchored, err := filepath.Abs(in)
+ if err != nil {
+ specLogger.Printf("warning: could not resolve current working directory: %v", err)
+ return in
+ }
+ return anchored
+}
+
+func repairURI(in string) (*url.URL, string) {
+ u, _ := parseURL("")
+ debugLog("repaired URI: original: %q, repaired: %q", in, "")
+ return u, ""
+}
+
+func fixWindowsURI(u *url.URL, in string) {
+}
diff --git a/vendor/github.com/go-openapi/spec/normalizer_windows.go b/vendor/github.com/go-openapi/spec/normalizer_windows.go
new file mode 100644
index 000000000..a66c532db
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/normalizer_windows.go
@@ -0,0 +1,154 @@
+// -build windows
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "net/url"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+// absPath makes a file path absolute and compatible with a URI path component
+//
+// The parameter must be a path, not an URI.
+func absPath(in string) string {
+ // NOTE(windows): filepath.Abs exhibits a special behavior on windows for empty paths.
+ // See https://github.com/golang/go/issues/24441
+ if in == "" {
+ in = "."
+ }
+
+ anchored, err := filepath.Abs(in)
+ if err != nil {
+ specLogger.Printf("warning: could not resolve current working directory: %v", err)
+ return in
+ }
+
+ pth := strings.ReplaceAll(strings.ToLower(anchored), `\`, `/`)
+ if !strings.HasPrefix(pth, "/") {
+ pth = "/" + pth
+ }
+
+ return path.Clean(pth)
+}
+
+// repairURI tolerates invalid file URIs with common typos
+// such as 'file://E:\folder\file', that break the regular URL parser.
+//
+// Adopting the same defaults as for unixes (e.g. return an empty path) would
+// result into a counter-intuitive result for that case (e.g. E:\folder\file is
+// eventually resolved as the current directory). The repair will detect the missing "/".
+//
+// Note that this only works for the file scheme.
+func repairURI(in string) (*url.URL, string) {
+ const prefix = fileScheme + "://"
+ if !strings.HasPrefix(in, prefix) {
+ // giving up: resolve to empty path
+ u, _ := parseURL("")
+
+ return u, ""
+ }
+
+ // attempt the repair, stripping the scheme should be sufficient
+ u, _ := parseURL(strings.TrimPrefix(in, prefix))
+ debugLog("repaired URI: original: %q, repaired: %q", in, u.String())
+
+ return u, u.String()
+}
+
+// fixWindowsURI tolerates an absolute file path on windows such as C:\Base\File.yaml or \\host\share\Base\File.yaml
+// and makes it a canonical URI: file:///c:/base/file.yaml
+//
+// Catch 22 notes for Windows:
+//
+// * There may be a drive letter on windows (it is lower-cased)
+// * There may be a share UNC, e.g. \\server\folder\data.xml
+// * Paths are case insensitive
+// * Paths may already contain slashes
+// * Paths must be slashed
+//
+// NOTE: there is no escaping. "/" may be valid separators just like "\".
+// We don't use ToSlash() (which escapes everything) because windows now also
+// tolerates the use of "/". Hence, both C:\File.yaml and C:/File.yaml will work.
+func fixWindowsURI(u *url.URL, in string) {
+ drive := filepath.VolumeName(in)
+
+ if len(drive) > 0 {
+ if len(u.Scheme) == 1 && strings.EqualFold(u.Scheme, drive[:1]) { // a path with a drive letter
+ u.Scheme = fileScheme
+ u.Host = ""
+ u.Path = strings.Join([]string{drive, u.Opaque, u.Path}, `/`) // reconstruct the full path component (no fragment, no query)
+ } else if u.Host == "" && strings.HasPrefix(u.Path, drive) { // a path with a \\host volume
+ // NOTE: the special host@port syntax for UNC is not supported (yet)
+ u.Scheme = fileScheme
+
+ // this is a modified version of filepath.Dir() to apply on the VolumeName itself
+ i := len(drive) - 1
+ for i >= 0 && !os.IsPathSeparator(drive[i]) {
+ i--
+ }
+ host := drive[:i] // \\host\share => host
+
+ u.Path = strings.TrimPrefix(u.Path, host)
+ u.Host = strings.TrimPrefix(host, `\\`)
+ }
+
+ u.Opaque = ""
+ u.Path = strings.ReplaceAll(strings.ToLower(u.Path), `\`, `/`)
+
+ // ensure we form an absolute path
+ if !strings.HasPrefix(u.Path, "/") {
+ u.Path = "/" + u.Path
+ }
+
+ u.Path = path.Clean(u.Path)
+
+ return
+ }
+
+ if u.Scheme == fileScheme {
+ // Handle dodgy cases for file://{...} URIs on windows.
+ // A canonical URI should always be followed by an absolute path.
+ //
+ // Examples:
+ // * file:///folder/file => valid, unchanged
+ // * file:///c:\folder\file => slashed
+ // * file:///./folder/file => valid, cleaned to remove the dot
+ // * file:///.\folder\file => remapped to cwd
+ // * file:///. => dodgy, remapped to / (consistent with the behavior on unix)
+ // * file:///.. => dodgy, remapped to / (consistent with the behavior on unix)
+ if (!path.IsAbs(u.Path) && !filepath.IsAbs(u.Path)) || (strings.HasPrefix(u.Path, `/.`) && strings.Contains(u.Path, `\`)) {
+ // ensure we form an absolute path
+ u.Path, _ = filepath.Abs(strings.TrimLeft(u.Path, `/`))
+ if !strings.HasPrefix(u.Path, "/") {
+ u.Path = "/" + u.Path
+ }
+ }
+ u.Path = strings.ToLower(u.Path)
+ }
+
+ // NOTE: lower case normalization does not propagate to inner resources,
+ // generated when rebasing: when joining a relative URI with a file to an absolute base,
+ // only the base is currently lower-cased.
+ //
+ // For now, we assume this is good enough for most use cases
+ // and try not to generate too many differences
+ // between the output produced on different platforms.
+ u.Path = path.Clean(strings.ReplaceAll(u.Path, `\`, `/`))
+}
diff --git a/vendor/github.com/go-openapi/spec/operation.go b/vendor/github.com/go-openapi/spec/operation.go
new file mode 100644
index 000000000..995ce6acb
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/operation.go
@@ -0,0 +1,397 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "bytes"
+ "encoding/gob"
+ "encoding/json"
+ "sort"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+func init() {
+ gob.Register(map[string]interface{}{})
+ gob.Register([]interface{}{})
+}
+
+// OperationProps describes an operation
+//
+// NOTES:
+// - schemes, when present must be from [http, https, ws, wss]: see validate
+// - Security is handled as a special case: see MarshalJSON function
+type OperationProps struct {
+ Description string `json:"description,omitempty"`
+ Consumes []string `json:"consumes,omitempty"`
+ Produces []string `json:"produces,omitempty"`
+ Schemes []string `json:"schemes,omitempty"`
+ Tags []string `json:"tags,omitempty"`
+ Summary string `json:"summary,omitempty"`
+ ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
+ ID string `json:"operationId,omitempty"`
+ Deprecated bool `json:"deprecated,omitempty"`
+ Security []map[string][]string `json:"security,omitempty"`
+ Parameters []Parameter `json:"parameters,omitempty"`
+ Responses *Responses `json:"responses,omitempty"`
+}
+
+// MarshalJSON takes care of serializing operation properties to JSON
+//
+// We use a custom marhaller here to handle a special cases related to
+// the Security field. We need to preserve zero length slice
+// while omitting the field when the value is nil/unset.
+func (op OperationProps) MarshalJSON() ([]byte, error) {
+ type Alias OperationProps
+ if op.Security == nil {
+ return json.Marshal(&struct {
+ Security []map[string][]string `json:"security,omitempty"`
+ *Alias
+ }{
+ Security: op.Security,
+ Alias: (*Alias)(&op),
+ })
+ }
+ return json.Marshal(&struct {
+ Security []map[string][]string `json:"security"`
+ *Alias
+ }{
+ Security: op.Security,
+ Alias: (*Alias)(&op),
+ })
+}
+
+// Operation describes a single API operation on a path.
+//
+// For more information: http://goo.gl/8us55a#operationObject
+type Operation struct {
+ VendorExtensible
+ OperationProps
+}
+
+// SuccessResponse gets a success response model
+func (o *Operation) SuccessResponse() (*Response, int, bool) {
+ if o.Responses == nil {
+ return nil, 0, false
+ }
+
+ responseCodes := make([]int, 0, len(o.Responses.StatusCodeResponses))
+ for k := range o.Responses.StatusCodeResponses {
+ if k >= 200 && k < 300 {
+ responseCodes = append(responseCodes, k)
+ }
+ }
+ if len(responseCodes) > 0 {
+ sort.Ints(responseCodes)
+ v := o.Responses.StatusCodeResponses[responseCodes[0]]
+ return &v, responseCodes[0], true
+ }
+
+ return o.Responses.Default, 0, false
+}
+
+// JSONLookup look up a value by the json property name
+func (o Operation) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := o.Extensions[token]; ok {
+ return &ex, nil
+ }
+ r, _, err := jsonpointer.GetForToken(o.OperationProps, token)
+ return r, err
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (o *Operation) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &o.OperationProps); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &o.VendorExtensible)
+}
+
+// MarshalJSON converts this items object to JSON
+func (o Operation) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(o.OperationProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(o.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ concated := swag.ConcatJSON(b1, b2)
+ return concated, nil
+}
+
+// NewOperation creates a new operation instance.
+// It expects an ID as parameter but not passing an ID is also valid.
+func NewOperation(id string) *Operation {
+ op := new(Operation)
+ op.ID = id
+ return op
+}
+
+// WithID sets the ID property on this operation, allows for chaining.
+func (o *Operation) WithID(id string) *Operation {
+ o.ID = id
+ return o
+}
+
+// WithDescription sets the description on this operation, allows for chaining
+func (o *Operation) WithDescription(description string) *Operation {
+ o.Description = description
+ return o
+}
+
+// WithSummary sets the summary on this operation, allows for chaining
+func (o *Operation) WithSummary(summary string) *Operation {
+ o.Summary = summary
+ return o
+}
+
+// WithExternalDocs sets/removes the external docs for/from this operation.
+// When you pass empty strings as params the external documents will be removed.
+// When you pass non-empty string as one value then those values will be used on the external docs object.
+// So when you pass a non-empty description, you should also pass the url and vice versa.
+func (o *Operation) WithExternalDocs(description, url string) *Operation {
+ if description == "" && url == "" {
+ o.ExternalDocs = nil
+ return o
+ }
+
+ if o.ExternalDocs == nil {
+ o.ExternalDocs = &ExternalDocumentation{}
+ }
+ o.ExternalDocs.Description = description
+ o.ExternalDocs.URL = url
+ return o
+}
+
+// Deprecate marks the operation as deprecated
+func (o *Operation) Deprecate() *Operation {
+ o.Deprecated = true
+ return o
+}
+
+// Undeprecate marks the operation as not deprected
+func (o *Operation) Undeprecate() *Operation {
+ o.Deprecated = false
+ return o
+}
+
+// WithConsumes adds media types for incoming body values
+func (o *Operation) WithConsumes(mediaTypes ...string) *Operation {
+ o.Consumes = append(o.Consumes, mediaTypes...)
+ return o
+}
+
+// WithProduces adds media types for outgoing body values
+func (o *Operation) WithProduces(mediaTypes ...string) *Operation {
+ o.Produces = append(o.Produces, mediaTypes...)
+ return o
+}
+
+// WithTags adds tags for this operation
+func (o *Operation) WithTags(tags ...string) *Operation {
+ o.Tags = append(o.Tags, tags...)
+ return o
+}
+
+// AddParam adds a parameter to this operation, when a parameter for that location
+// and with that name already exists it will be replaced
+func (o *Operation) AddParam(param *Parameter) *Operation {
+ if param == nil {
+ return o
+ }
+
+ for i, p := range o.Parameters {
+ if p.Name == param.Name && p.In == param.In {
+ params := append(o.Parameters[:i], *param)
+ params = append(params, o.Parameters[i+1:]...)
+ o.Parameters = params
+ return o
+ }
+ }
+
+ o.Parameters = append(o.Parameters, *param)
+ return o
+}
+
+// RemoveParam removes a parameter from the operation
+func (o *Operation) RemoveParam(name, in string) *Operation {
+ for i, p := range o.Parameters {
+ if p.Name == name && p.In == in {
+ o.Parameters = append(o.Parameters[:i], o.Parameters[i+1:]...)
+ return o
+ }
+ }
+ return o
+}
+
+// SecuredWith adds a security scope to this operation.
+func (o *Operation) SecuredWith(name string, scopes ...string) *Operation {
+ o.Security = append(o.Security, map[string][]string{name: scopes})
+ return o
+}
+
+// WithDefaultResponse adds a default response to the operation.
+// Passing a nil value will remove the response
+func (o *Operation) WithDefaultResponse(response *Response) *Operation {
+ return o.RespondsWith(0, response)
+}
+
+// RespondsWith adds a status code response to the operation.
+// When the code is 0 the value of the response will be used as default response value.
+// When the value of the response is nil it will be removed from the operation
+func (o *Operation) RespondsWith(code int, response *Response) *Operation {
+ if o.Responses == nil {
+ o.Responses = new(Responses)
+ }
+ if code == 0 {
+ o.Responses.Default = response
+ return o
+ }
+ if response == nil {
+ delete(o.Responses.StatusCodeResponses, code)
+ return o
+ }
+ if o.Responses.StatusCodeResponses == nil {
+ o.Responses.StatusCodeResponses = make(map[int]Response)
+ }
+ o.Responses.StatusCodeResponses[code] = *response
+ return o
+}
+
+type opsAlias OperationProps
+
+type gobAlias struct {
+ Security []map[string]struct {
+ List []string
+ Pad bool
+ }
+ Alias *opsAlias
+ SecurityIsEmpty bool
+}
+
+// GobEncode provides a safe gob encoder for Operation, including empty security requirements
+func (o Operation) GobEncode() ([]byte, error) {
+ raw := struct {
+ Ext VendorExtensible
+ Props OperationProps
+ }{
+ Ext: o.VendorExtensible,
+ Props: o.OperationProps,
+ }
+ var b bytes.Buffer
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+}
+
+// GobDecode provides a safe gob decoder for Operation, including empty security requirements
+func (o *Operation) GobDecode(b []byte) error {
+ var raw struct {
+ Ext VendorExtensible
+ Props OperationProps
+ }
+
+ buf := bytes.NewBuffer(b)
+ err := gob.NewDecoder(buf).Decode(&raw)
+ if err != nil {
+ return err
+ }
+ o.VendorExtensible = raw.Ext
+ o.OperationProps = raw.Props
+ return nil
+}
+
+// GobEncode provides a safe gob encoder for Operation, including empty security requirements
+func (op OperationProps) GobEncode() ([]byte, error) {
+ raw := gobAlias{
+ Alias: (*opsAlias)(&op),
+ }
+
+ var b bytes.Buffer
+ if op.Security == nil {
+ // nil security requirement
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+ }
+
+ if len(op.Security) == 0 {
+ // empty, but non-nil security requirement
+ raw.SecurityIsEmpty = true
+ raw.Alias.Security = nil
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+ }
+
+ raw.Security = make([]map[string]struct {
+ List []string
+ Pad bool
+ }, 0, len(op.Security))
+ for _, req := range op.Security {
+ v := make(map[string]struct {
+ List []string
+ Pad bool
+ }, len(req))
+ for k, val := range req {
+ v[k] = struct {
+ List []string
+ Pad bool
+ }{
+ List: val,
+ }
+ }
+ raw.Security = append(raw.Security, v)
+ }
+
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+}
+
+// GobDecode provides a safe gob decoder for Operation, including empty security requirements
+func (op *OperationProps) GobDecode(b []byte) error {
+ var raw gobAlias
+
+ buf := bytes.NewBuffer(b)
+ err := gob.NewDecoder(buf).Decode(&raw)
+ if err != nil {
+ return err
+ }
+ if raw.Alias == nil {
+ return nil
+ }
+
+ switch {
+ case raw.SecurityIsEmpty:
+ // empty, but non-nil security requirement
+ raw.Alias.Security = []map[string][]string{}
+ case len(raw.Alias.Security) == 0:
+ // nil security requirement
+ raw.Alias.Security = nil
+ default:
+ raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security))
+ for _, req := range raw.Security {
+ v := make(map[string][]string, len(req))
+ for k, val := range req {
+ v[k] = make([]string, 0, len(val.List))
+ v[k] = append(v[k], val.List...)
+ }
+ raw.Alias.Security = append(raw.Alias.Security, v)
+ }
+ }
+
+ *op = *(*OperationProps)(raw.Alias)
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/spec/parameter.go b/vendor/github.com/go-openapi/spec/parameter.go
new file mode 100644
index 000000000..2b2b89b67
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/parameter.go
@@ -0,0 +1,326 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// QueryParam creates a query parameter
+func QueryParam(name string) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name, In: "query"}}
+}
+
+// HeaderParam creates a header parameter, this is always required by default
+func HeaderParam(name string) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name, In: "header", Required: true}}
+}
+
+// PathParam creates a path parameter, this is always required
+func PathParam(name string) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name, In: "path", Required: true}}
+}
+
+// BodyParam creates a body parameter
+func BodyParam(name string, schema *Schema) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name, In: "body", Schema: schema}}
+}
+
+// FormDataParam creates a body parameter
+func FormDataParam(name string) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"}}
+}
+
+// FileParam creates a body parameter
+func FileParam(name string) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name, In: "formData"},
+ SimpleSchema: SimpleSchema{Type: "file"}}
+}
+
+// SimpleArrayParam creates a param for a simple array (string, int, date etc)
+func SimpleArrayParam(name, tpe, fmt string) *Parameter {
+ return &Parameter{ParamProps: ParamProps{Name: name},
+ SimpleSchema: SimpleSchema{Type: jsonArray, CollectionFormat: "csv",
+ Items: &Items{SimpleSchema: SimpleSchema{Type: tpe, Format: fmt}}}}
+}
+
+// ParamRef creates a parameter that's a json reference
+func ParamRef(uri string) *Parameter {
+ p := new(Parameter)
+ p.Ref = MustCreateRef(uri)
+ return p
+}
+
+// ParamProps describes the specific attributes of an operation parameter
+//
+// NOTE:
+// - Schema is defined when "in" == "body": see validate
+// - AllowEmptyValue is allowed where "in" == "query" || "formData"
+type ParamProps struct {
+ Description string `json:"description,omitempty"`
+ Name string `json:"name,omitempty"`
+ In string `json:"in,omitempty"`
+ Required bool `json:"required,omitempty"`
+ Schema *Schema `json:"schema,omitempty"`
+ AllowEmptyValue bool `json:"allowEmptyValue,omitempty"`
+}
+
+// Parameter a unique parameter is defined by a combination of a [name](#parameterName) and [location](#parameterIn).
+//
+// There are five possible parameter types.
+// * Path - Used together with [Path Templating](#pathTemplating), where the parameter value is actually part
+// of the operation's URL. This does not include the host or base path of the API. For example, in `/items/{itemId}`,
+// the path parameter is `itemId`.
+// * Query - Parameters that are appended to the URL. For example, in `/items?id=###`, the query parameter is `id`.
+// * Header - Custom headers that are expected as part of the request.
+// * Body - The payload that's appended to the HTTP request. Since there can only be one payload, there can only be
+// _one_ body parameter. The name of the body parameter has no effect on the parameter itself and is used for
+// documentation purposes only. Since Form parameters are also in the payload, body and form parameters cannot exist
+// together for the same operation.
+// * Form - Used to describe the payload of an HTTP request when either `application/x-www-form-urlencoded` or
+// `multipart/form-data` are used as the content type of the request (in Swagger's definition,
+// the [`consumes`](#operationConsumes) property of an operation). This is the only parameter type that can be used
+// to send files, thus supporting the `file` type. Since form parameters are sent in the payload, they cannot be
+// declared together with a body parameter for the same operation. Form parameters have a different format based on
+// the content-type used (for further details, consult http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4).
+// * `application/x-www-form-urlencoded` - Similar to the format of Query parameters but as a payload.
+// For example, `foo=1&bar=swagger` - both `foo` and `bar` are form parameters. This is normally used for simple
+// parameters that are being transferred.
+// * `multipart/form-data` - each parameter takes a section in the payload with an internal header.
+// For example, for the header `Content-Disposition: form-data; name="submit-name"` the name of the parameter is
+// `submit-name`. This type of form parameters is more commonly used for file transfers.
+//
+// For more information: http://goo.gl/8us55a#parameterObject
+type Parameter struct {
+ Refable
+ CommonValidations
+ SimpleSchema
+ VendorExtensible
+ ParamProps
+}
+
+// JSONLookup look up a value by the json property name
+func (p Parameter) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := p.Extensions[token]; ok {
+ return &ex, nil
+ }
+ if token == jsonRef {
+ return &p.Ref, nil
+ }
+
+ r, _, err := jsonpointer.GetForToken(p.CommonValidations, token)
+ if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
+ return nil, err
+ }
+ if r != nil {
+ return r, nil
+ }
+ r, _, err = jsonpointer.GetForToken(p.SimpleSchema, token)
+ if err != nil && !strings.HasPrefix(err.Error(), "object has no field") {
+ return nil, err
+ }
+ if r != nil {
+ return r, nil
+ }
+ r, _, err = jsonpointer.GetForToken(p.ParamProps, token)
+ return r, err
+}
+
+// WithDescription a fluent builder method for the description of the parameter
+func (p *Parameter) WithDescription(description string) *Parameter {
+ p.Description = description
+ return p
+}
+
+// Named a fluent builder method to override the name of the parameter
+func (p *Parameter) Named(name string) *Parameter {
+ p.Name = name
+ return p
+}
+
+// WithLocation a fluent builder method to override the location of the parameter
+func (p *Parameter) WithLocation(in string) *Parameter {
+ p.In = in
+ return p
+}
+
+// Typed a fluent builder method for the type of the parameter value
+func (p *Parameter) Typed(tpe, format string) *Parameter {
+ p.Type = tpe
+ p.Format = format
+ return p
+}
+
+// CollectionOf a fluent builder method for an array parameter
+func (p *Parameter) CollectionOf(items *Items, format string) *Parameter {
+ p.Type = jsonArray
+ p.Items = items
+ p.CollectionFormat = format
+ return p
+}
+
+// WithDefault sets the default value on this parameter
+func (p *Parameter) WithDefault(defaultValue interface{}) *Parameter {
+ p.AsOptional() // with default implies optional
+ p.Default = defaultValue
+ return p
+}
+
+// AllowsEmptyValues flags this parameter as being ok with empty values
+func (p *Parameter) AllowsEmptyValues() *Parameter {
+ p.AllowEmptyValue = true
+ return p
+}
+
+// NoEmptyValues flags this parameter as not liking empty values
+func (p *Parameter) NoEmptyValues() *Parameter {
+ p.AllowEmptyValue = false
+ return p
+}
+
+// AsOptional flags this parameter as optional
+func (p *Parameter) AsOptional() *Parameter {
+ p.Required = false
+ return p
+}
+
+// AsRequired flags this parameter as required
+func (p *Parameter) AsRequired() *Parameter {
+ if p.Default != nil { // with a default required makes no sense
+ return p
+ }
+ p.Required = true
+ return p
+}
+
+// WithMaxLength sets a max length value
+func (p *Parameter) WithMaxLength(max int64) *Parameter {
+ p.MaxLength = &max
+ return p
+}
+
+// WithMinLength sets a min length value
+func (p *Parameter) WithMinLength(min int64) *Parameter {
+ p.MinLength = &min
+ return p
+}
+
+// WithPattern sets a pattern value
+func (p *Parameter) WithPattern(pattern string) *Parameter {
+ p.Pattern = pattern
+ return p
+}
+
+// WithMultipleOf sets a multiple of value
+func (p *Parameter) WithMultipleOf(number float64) *Parameter {
+ p.MultipleOf = &number
+ return p
+}
+
+// WithMaximum sets a maximum number value
+func (p *Parameter) WithMaximum(max float64, exclusive bool) *Parameter {
+ p.Maximum = &max
+ p.ExclusiveMaximum = exclusive
+ return p
+}
+
+// WithMinimum sets a minimum number value
+func (p *Parameter) WithMinimum(min float64, exclusive bool) *Parameter {
+ p.Minimum = &min
+ p.ExclusiveMinimum = exclusive
+ return p
+}
+
+// WithEnum sets a the enum values (replace)
+func (p *Parameter) WithEnum(values ...interface{}) *Parameter {
+ p.Enum = append([]interface{}{}, values...)
+ return p
+}
+
+// WithMaxItems sets the max items
+func (p *Parameter) WithMaxItems(size int64) *Parameter {
+ p.MaxItems = &size
+ return p
+}
+
+// WithMinItems sets the min items
+func (p *Parameter) WithMinItems(size int64) *Parameter {
+ p.MinItems = &size
+ return p
+}
+
+// UniqueValues dictates that this array can only have unique items
+func (p *Parameter) UniqueValues() *Parameter {
+ p.UniqueItems = true
+ return p
+}
+
+// AllowDuplicates this array can have duplicates
+func (p *Parameter) AllowDuplicates() *Parameter {
+ p.UniqueItems = false
+ return p
+}
+
+// WithValidations is a fluent method to set parameter validations
+func (p *Parameter) WithValidations(val CommonValidations) *Parameter {
+ p.SetValidations(SchemaValidations{CommonValidations: val})
+ return p
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (p *Parameter) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &p.CommonValidations); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &p.Refable); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &p.SimpleSchema); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &p.VendorExtensible); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &p.ParamProps)
+}
+
+// MarshalJSON converts this items object to JSON
+func (p Parameter) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(p.CommonValidations)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(p.SimpleSchema)
+ if err != nil {
+ return nil, err
+ }
+ b3, err := json.Marshal(p.Refable)
+ if err != nil {
+ return nil, err
+ }
+ b4, err := json.Marshal(p.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ b5, err := json.Marshal(p.ParamProps)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b3, b1, b2, b4, b5), nil
+}
diff --git a/vendor/github.com/go-openapi/spec/path_item.go b/vendor/github.com/go-openapi/spec/path_item.go
new file mode 100644
index 000000000..68fc8e901
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/path_item.go
@@ -0,0 +1,87 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// PathItemProps the path item specific properties
+type PathItemProps struct {
+ Get *Operation `json:"get,omitempty"`
+ Put *Operation `json:"put,omitempty"`
+ Post *Operation `json:"post,omitempty"`
+ Delete *Operation `json:"delete,omitempty"`
+ Options *Operation `json:"options,omitempty"`
+ Head *Operation `json:"head,omitempty"`
+ Patch *Operation `json:"patch,omitempty"`
+ Parameters []Parameter `json:"parameters,omitempty"`
+}
+
+// PathItem describes the operations available on a single path.
+// A Path Item may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering).
+// The path itself is still exposed to the documentation viewer but they will
+// not know which operations and parameters are available.
+//
+// For more information: http://goo.gl/8us55a#pathItemObject
+type PathItem struct {
+ Refable
+ VendorExtensible
+ PathItemProps
+}
+
+// JSONLookup look up a value by the json property name
+func (p PathItem) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := p.Extensions[token]; ok {
+ return &ex, nil
+ }
+ if token == jsonRef {
+ return &p.Ref, nil
+ }
+ r, _, err := jsonpointer.GetForToken(p.PathItemProps, token)
+ return r, err
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (p *PathItem) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &p.Refable); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &p.VendorExtensible); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &p.PathItemProps)
+}
+
+// MarshalJSON converts this items object to JSON
+func (p PathItem) MarshalJSON() ([]byte, error) {
+ b3, err := json.Marshal(p.Refable)
+ if err != nil {
+ return nil, err
+ }
+ b4, err := json.Marshal(p.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ b5, err := json.Marshal(p.PathItemProps)
+ if err != nil {
+ return nil, err
+ }
+ concated := swag.ConcatJSON(b3, b4, b5)
+ return concated, nil
+}
diff --git a/vendor/github.com/go-openapi/spec/paths.go b/vendor/github.com/go-openapi/spec/paths.go
new file mode 100644
index 000000000..9dc82a290
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/paths.go
@@ -0,0 +1,97 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/swag"
+)
+
+// Paths holds the relative paths to the individual endpoints.
+// The path is appended to the [`basePath`](http://goo.gl/8us55a#swaggerBasePath) in order
+// to construct the full URL.
+// The Paths may be empty, due to [ACL constraints](http://goo.gl/8us55a#securityFiltering).
+//
+// For more information: http://goo.gl/8us55a#pathsObject
+type Paths struct {
+ VendorExtensible
+ Paths map[string]PathItem `json:"-"` // custom serializer to flatten this, each entry must start with "/"
+}
+
+// JSONLookup look up a value by the json property name
+func (p Paths) JSONLookup(token string) (interface{}, error) {
+ if pi, ok := p.Paths[token]; ok {
+ return &pi, nil
+ }
+ if ex, ok := p.Extensions[token]; ok {
+ return &ex, nil
+ }
+ return nil, fmt.Errorf("object has no field %q", token)
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (p *Paths) UnmarshalJSON(data []byte) error {
+ var res map[string]json.RawMessage
+ if err := json.Unmarshal(data, &res); err != nil {
+ return err
+ }
+ for k, v := range res {
+ if strings.HasPrefix(strings.ToLower(k), "x-") {
+ if p.Extensions == nil {
+ p.Extensions = make(map[string]interface{})
+ }
+ var d interface{}
+ if err := json.Unmarshal(v, &d); err != nil {
+ return err
+ }
+ p.Extensions[k] = d
+ }
+ if strings.HasPrefix(k, "/") {
+ if p.Paths == nil {
+ p.Paths = make(map[string]PathItem)
+ }
+ var pi PathItem
+ if err := json.Unmarshal(v, &pi); err != nil {
+ return err
+ }
+ p.Paths[k] = pi
+ }
+ }
+ return nil
+}
+
+// MarshalJSON converts this items object to JSON
+func (p Paths) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(p.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+
+ pths := make(map[string]PathItem)
+ for k, v := range p.Paths {
+ if strings.HasPrefix(k, "/") {
+ pths[k] = v
+ }
+ }
+ b2, err := json.Marshal(pths)
+ if err != nil {
+ return nil, err
+ }
+ concated := swag.ConcatJSON(b1, b2)
+ return concated, nil
+}
diff --git a/vendor/github.com/go-openapi/spec/properties.go b/vendor/github.com/go-openapi/spec/properties.go
new file mode 100644
index 000000000..91d2435f0
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/properties.go
@@ -0,0 +1,91 @@
+package spec
+
+import (
+ "bytes"
+ "encoding/json"
+ "reflect"
+ "sort"
+)
+
+// OrderSchemaItem holds a named schema (e.g. from a property of an object)
+type OrderSchemaItem struct {
+ Name string
+ Schema
+}
+
+// OrderSchemaItems is a sortable slice of named schemas.
+// The ordering is defined by the x-order schema extension.
+type OrderSchemaItems []OrderSchemaItem
+
+// MarshalJSON produces a json object with keys defined by the name schemas
+// of the OrderSchemaItems slice, keeping the original order of the slice.
+func (items OrderSchemaItems) MarshalJSON() ([]byte, error) {
+ buf := bytes.NewBuffer(nil)
+ buf.WriteString("{")
+ for i := range items {
+ if i > 0 {
+ buf.WriteString(",")
+ }
+ buf.WriteString("\"")
+ buf.WriteString(items[i].Name)
+ buf.WriteString("\":")
+ bs, err := json.Marshal(&items[i].Schema)
+ if err != nil {
+ return nil, err
+ }
+ buf.Write(bs)
+ }
+ buf.WriteString("}")
+ return buf.Bytes(), nil
+}
+
+func (items OrderSchemaItems) Len() int { return len(items) }
+func (items OrderSchemaItems) Swap(i, j int) { items[i], items[j] = items[j], items[i] }
+func (items OrderSchemaItems) Less(i, j int) (ret bool) {
+ ii, oki := items[i].Extensions.GetInt("x-order")
+ ij, okj := items[j].Extensions.GetInt("x-order")
+ if oki {
+ if okj {
+ defer func() {
+ if err := recover(); err != nil {
+ defer func() {
+ if err = recover(); err != nil {
+ ret = items[i].Name < items[j].Name
+ }
+ }()
+ ret = reflect.ValueOf(ii).String() < reflect.ValueOf(ij).String()
+ }
+ }()
+ return ii < ij
+ }
+ return true
+ } else if okj {
+ return false
+ }
+ return items[i].Name < items[j].Name
+}
+
+// SchemaProperties is a map representing the properties of a Schema object.
+// It knows how to transform its keys into an ordered slice.
+type SchemaProperties map[string]Schema
+
+// ToOrderedSchemaItems transforms the map of properties into a sortable slice
+func (properties SchemaProperties) ToOrderedSchemaItems() OrderSchemaItems {
+ items := make(OrderSchemaItems, 0, len(properties))
+ for k, v := range properties {
+ items = append(items, OrderSchemaItem{
+ Name: k,
+ Schema: v,
+ })
+ }
+ sort.Sort(items)
+ return items
+}
+
+// MarshalJSON produces properties as json, keeping their order.
+func (properties SchemaProperties) MarshalJSON() ([]byte, error) {
+ if properties == nil {
+ return []byte("null"), nil
+ }
+ return json.Marshal(properties.ToOrderedSchemaItems())
+}
diff --git a/vendor/github.com/go-openapi/spec/ref.go b/vendor/github.com/go-openapi/spec/ref.go
new file mode 100644
index 000000000..b0ef9bd9c
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/ref.go
@@ -0,0 +1,193 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "bytes"
+ "encoding/gob"
+ "encoding/json"
+ "net/http"
+ "os"
+ "path/filepath"
+
+ "github.com/go-openapi/jsonreference"
+)
+
+// Refable is a struct for things that accept a $ref property
+type Refable struct {
+ Ref Ref
+}
+
+// MarshalJSON marshals the ref to json
+func (r Refable) MarshalJSON() ([]byte, error) {
+ return r.Ref.MarshalJSON()
+}
+
+// UnmarshalJSON unmarshalss the ref from json
+func (r *Refable) UnmarshalJSON(d []byte) error {
+ return json.Unmarshal(d, &r.Ref)
+}
+
+// Ref represents a json reference that is potentially resolved
+type Ref struct {
+ jsonreference.Ref
+}
+
+// RemoteURI gets the remote uri part of the ref
+func (r *Ref) RemoteURI() string {
+ if r.String() == "" {
+ return ""
+ }
+
+ u := *r.GetURL()
+ u.Fragment = ""
+ return u.String()
+}
+
+// IsValidURI returns true when the url the ref points to can be found
+func (r *Ref) IsValidURI(basepaths ...string) bool {
+ if r.String() == "" {
+ return true
+ }
+
+ v := r.RemoteURI()
+ if v == "" {
+ return true
+ }
+
+ if r.HasFullURL {
+ //nolint:noctx,gosec
+ rr, err := http.Get(v)
+ if err != nil {
+ return false
+ }
+ defer rr.Body.Close()
+
+ return rr.StatusCode/100 == 2
+ }
+
+ if !(r.HasFileScheme || r.HasFullFilePath || r.HasURLPathOnly) {
+ return false
+ }
+
+ // check for local file
+ pth := v
+ if r.HasURLPathOnly {
+ base := "."
+ if len(basepaths) > 0 {
+ base = filepath.Dir(filepath.Join(basepaths...))
+ }
+ p, e := filepath.Abs(filepath.ToSlash(filepath.Join(base, pth)))
+ if e != nil {
+ return false
+ }
+ pth = p
+ }
+
+ fi, err := os.Stat(filepath.ToSlash(pth))
+ if err != nil {
+ return false
+ }
+
+ return !fi.IsDir()
+}
+
+// Inherits creates a new reference from a parent and a child
+// If the child cannot inherit from the parent, an error is returned
+func (r *Ref) Inherits(child Ref) (*Ref, error) {
+ ref, err := r.Ref.Inherits(child.Ref)
+ if err != nil {
+ return nil, err
+ }
+ return &Ref{Ref: *ref}, nil
+}
+
+// NewRef creates a new instance of a ref object
+// returns an error when the reference uri is an invalid uri
+func NewRef(refURI string) (Ref, error) {
+ ref, err := jsonreference.New(refURI)
+ if err != nil {
+ return Ref{}, err
+ }
+ return Ref{Ref: ref}, nil
+}
+
+// MustCreateRef creates a ref object but panics when refURI is invalid.
+// Use the NewRef method for a version that returns an error.
+func MustCreateRef(refURI string) Ref {
+ return Ref{Ref: jsonreference.MustCreateRef(refURI)}
+}
+
+// MarshalJSON marshals this ref into a JSON object
+func (r Ref) MarshalJSON() ([]byte, error) {
+ str := r.String()
+ if str == "" {
+ if r.IsRoot() {
+ return []byte(`{"$ref":""}`), nil
+ }
+ return []byte("{}"), nil
+ }
+ v := map[string]interface{}{"$ref": str}
+ return json.Marshal(v)
+}
+
+// UnmarshalJSON unmarshals this ref from a JSON object
+func (r *Ref) UnmarshalJSON(d []byte) error {
+ var v map[string]interface{}
+ if err := json.Unmarshal(d, &v); err != nil {
+ return err
+ }
+ return r.fromMap(v)
+}
+
+// GobEncode provides a safe gob encoder for Ref
+func (r Ref) GobEncode() ([]byte, error) {
+ var b bytes.Buffer
+ raw, err := r.MarshalJSON()
+ if err != nil {
+ return nil, err
+ }
+ err = gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+}
+
+// GobDecode provides a safe gob decoder for Ref
+func (r *Ref) GobDecode(b []byte) error {
+ var raw []byte
+ buf := bytes.NewBuffer(b)
+ err := gob.NewDecoder(buf).Decode(&raw)
+ if err != nil {
+ return err
+ }
+ return json.Unmarshal(raw, r)
+}
+
+func (r *Ref) fromMap(v map[string]interface{}) error {
+ if v == nil {
+ return nil
+ }
+
+ if vv, ok := v["$ref"]; ok {
+ if str, ok := vv.(string); ok {
+ ref, err := jsonreference.New(str)
+ if err != nil {
+ return err
+ }
+ *r = Ref{Ref: ref}
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/spec/resolver.go b/vendor/github.com/go-openapi/spec/resolver.go
new file mode 100644
index 000000000..47d1ee13f
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/resolver.go
@@ -0,0 +1,127 @@
+package spec
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/swag"
+)
+
+func resolveAnyWithBase(root interface{}, ref *Ref, result interface{}, options *ExpandOptions) error {
+ options = optionsOrDefault(options)
+ resolver := defaultSchemaLoader(root, options, nil, nil)
+
+ if err := resolver.Resolve(ref, result, options.RelativeBase); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// ResolveRefWithBase resolves a reference against a context root with preservation of base path
+func ResolveRefWithBase(root interface{}, ref *Ref, options *ExpandOptions) (*Schema, error) {
+ result := new(Schema)
+
+ if err := resolveAnyWithBase(root, ref, result, options); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// ResolveRef resolves a reference for a schema against a context root
+// ref is guaranteed to be in root (no need to go to external files)
+//
+// ResolveRef is ONLY called from the code generation module
+func ResolveRef(root interface{}, ref *Ref) (*Schema, error) {
+ res, _, err := ref.GetPointer().Get(root)
+ if err != nil {
+ return nil, err
+ }
+
+ switch sch := res.(type) {
+ case Schema:
+ return &sch, nil
+ case *Schema:
+ return sch, nil
+ case map[string]interface{}:
+ newSch := new(Schema)
+ if err = swag.DynamicJSONToStruct(sch, newSch); err != nil {
+ return nil, err
+ }
+ return newSch, nil
+ default:
+ return nil, fmt.Errorf("type: %T: %w", sch, ErrUnknownTypeForReference)
+ }
+}
+
+// ResolveParameterWithBase resolves a parameter reference against a context root and base path
+func ResolveParameterWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Parameter, error) {
+ result := new(Parameter)
+
+ if err := resolveAnyWithBase(root, &ref, result, options); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// ResolveParameter resolves a parameter reference against a context root
+func ResolveParameter(root interface{}, ref Ref) (*Parameter, error) {
+ return ResolveParameterWithBase(root, ref, nil)
+}
+
+// ResolveResponseWithBase resolves response a reference against a context root and base path
+func ResolveResponseWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Response, error) {
+ result := new(Response)
+
+ err := resolveAnyWithBase(root, &ref, result, options)
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// ResolveResponse resolves response a reference against a context root
+func ResolveResponse(root interface{}, ref Ref) (*Response, error) {
+ return ResolveResponseWithBase(root, ref, nil)
+}
+
+// ResolvePathItemWithBase resolves response a path item against a context root and base path
+func ResolvePathItemWithBase(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) {
+ result := new(PathItem)
+
+ if err := resolveAnyWithBase(root, &ref, result, options); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// ResolvePathItem resolves response a path item against a context root and base path
+//
+// Deprecated: use ResolvePathItemWithBase instead
+func ResolvePathItem(root interface{}, ref Ref, options *ExpandOptions) (*PathItem, error) {
+ return ResolvePathItemWithBase(root, ref, options)
+}
+
+// ResolveItemsWithBase resolves parameter items reference against a context root and base path.
+//
+// NOTE: stricly speaking, this construct is not supported by Swagger 2.0.
+// Similarly, $ref are forbidden in response headers.
+func ResolveItemsWithBase(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) {
+ result := new(Items)
+
+ if err := resolveAnyWithBase(root, &ref, result, options); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// ResolveItems resolves parameter items reference against a context root and base path.
+//
+// Deprecated: use ResolveItemsWithBase instead
+func ResolveItems(root interface{}, ref Ref, options *ExpandOptions) (*Items, error) {
+ return ResolveItemsWithBase(root, ref, options)
+}
diff --git a/vendor/github.com/go-openapi/spec/response.go b/vendor/github.com/go-openapi/spec/response.go
new file mode 100644
index 000000000..0340b60d8
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/response.go
@@ -0,0 +1,152 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// ResponseProps properties specific to a response
+type ResponseProps struct {
+ Description string `json:"description"`
+ Schema *Schema `json:"schema,omitempty"`
+ Headers map[string]Header `json:"headers,omitempty"`
+ Examples map[string]interface{} `json:"examples,omitempty"`
+}
+
+// Response describes a single response from an API Operation.
+//
+// For more information: http://goo.gl/8us55a#responseObject
+type Response struct {
+ Refable
+ ResponseProps
+ VendorExtensible
+}
+
+// JSONLookup look up a value by the json property name
+func (r Response) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := r.Extensions[token]; ok {
+ return &ex, nil
+ }
+ if token == "$ref" {
+ return &r.Ref, nil
+ }
+ ptr, _, err := jsonpointer.GetForToken(r.ResponseProps, token)
+ return ptr, err
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (r *Response) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &r.ResponseProps); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &r.Refable); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &r.VendorExtensible)
+}
+
+// MarshalJSON converts this items object to JSON
+func (r Response) MarshalJSON() ([]byte, error) {
+ var (
+ b1 []byte
+ err error
+ )
+
+ if r.Ref.String() == "" {
+ // when there is no $ref, empty description is rendered as an empty string
+ b1, err = json.Marshal(r.ResponseProps)
+ } else {
+ // when there is $ref inside the schema, description should be omitempty-ied
+ b1, err = json.Marshal(struct {
+ Description string `json:"description,omitempty"`
+ Schema *Schema `json:"schema,omitempty"`
+ Headers map[string]Header `json:"headers,omitempty"`
+ Examples map[string]interface{} `json:"examples,omitempty"`
+ }{
+ Description: r.ResponseProps.Description,
+ Schema: r.ResponseProps.Schema,
+ Examples: r.ResponseProps.Examples,
+ })
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ b2, err := json.Marshal(r.Refable)
+ if err != nil {
+ return nil, err
+ }
+ b3, err := json.Marshal(r.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2, b3), nil
+}
+
+// NewResponse creates a new response instance
+func NewResponse() *Response {
+ return new(Response)
+}
+
+// ResponseRef creates a response as a json reference
+func ResponseRef(url string) *Response {
+ resp := NewResponse()
+ resp.Ref = MustCreateRef(url)
+ return resp
+}
+
+// WithDescription sets the description on this response, allows for chaining
+func (r *Response) WithDescription(description string) *Response {
+ r.Description = description
+ return r
+}
+
+// WithSchema sets the schema on this response, allows for chaining.
+// Passing a nil argument removes the schema from this response
+func (r *Response) WithSchema(schema *Schema) *Response {
+ r.Schema = schema
+ return r
+}
+
+// AddHeader adds a header to this response
+func (r *Response) AddHeader(name string, header *Header) *Response {
+ if header == nil {
+ return r.RemoveHeader(name)
+ }
+ if r.Headers == nil {
+ r.Headers = make(map[string]Header)
+ }
+ r.Headers[name] = *header
+ return r
+}
+
+// RemoveHeader removes a header from this response
+func (r *Response) RemoveHeader(name string) *Response {
+ delete(r.Headers, name)
+ return r
+}
+
+// AddExample adds an example to this response
+func (r *Response) AddExample(mediaType string, example interface{}) *Response {
+ if r.Examples == nil {
+ r.Examples = make(map[string]interface{})
+ }
+ r.Examples[mediaType] = example
+ return r
+}
diff --git a/vendor/github.com/go-openapi/spec/responses.go b/vendor/github.com/go-openapi/spec/responses.go
new file mode 100644
index 000000000..16c3076fe
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/responses.go
@@ -0,0 +1,140 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/swag"
+)
+
+// Responses is a container for the expected responses of an operation.
+// The container maps a HTTP response code to the expected response.
+// It is not expected from the documentation to necessarily cover all possible HTTP response codes,
+// since they may not be known in advance. However, it is expected from the documentation to cover
+// a successful operation response and any known errors.
+//
+// The `default` can be used a default response object for all HTTP codes that are not covered
+// individually by the specification.
+//
+// The `Responses Object` MUST contain at least one response code, and it SHOULD be the response
+// for a successful operation call.
+//
+// For more information: http://goo.gl/8us55a#responsesObject
+type Responses struct {
+ VendorExtensible
+ ResponsesProps
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (r Responses) JSONLookup(token string) (interface{}, error) {
+ if token == "default" {
+ return r.Default, nil
+ }
+ if ex, ok := r.Extensions[token]; ok {
+ return &ex, nil
+ }
+ if i, err := strconv.Atoi(token); err == nil {
+ if scr, ok := r.StatusCodeResponses[i]; ok {
+ return scr, nil
+ }
+ }
+ return nil, fmt.Errorf("object has no field %q", token)
+}
+
+// UnmarshalJSON hydrates this items instance with the data from JSON
+func (r *Responses) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &r.ResponsesProps); err != nil {
+ return err
+ }
+
+ if err := json.Unmarshal(data, &r.VendorExtensible); err != nil {
+ return err
+ }
+ if reflect.DeepEqual(ResponsesProps{}, r.ResponsesProps) {
+ r.ResponsesProps = ResponsesProps{}
+ }
+ return nil
+}
+
+// MarshalJSON converts this items object to JSON
+func (r Responses) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(r.ResponsesProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(r.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ concated := swag.ConcatJSON(b1, b2)
+ return concated, nil
+}
+
+// ResponsesProps describes all responses for an operation.
+// It tells what is the default response and maps all responses with a
+// HTTP status code.
+type ResponsesProps struct {
+ Default *Response
+ StatusCodeResponses map[int]Response
+}
+
+// MarshalJSON marshals responses as JSON
+func (r ResponsesProps) MarshalJSON() ([]byte, error) {
+ toser := map[string]Response{}
+ if r.Default != nil {
+ toser["default"] = *r.Default
+ }
+ for k, v := range r.StatusCodeResponses {
+ toser[strconv.Itoa(k)] = v
+ }
+ return json.Marshal(toser)
+}
+
+// UnmarshalJSON unmarshals responses from JSON
+func (r *ResponsesProps) UnmarshalJSON(data []byte) error {
+ var res map[string]json.RawMessage
+ if err := json.Unmarshal(data, &res); err != nil {
+ return err
+ }
+
+ if v, ok := res["default"]; ok {
+ var defaultRes Response
+ if err := json.Unmarshal(v, &defaultRes); err != nil {
+ return err
+ }
+ r.Default = &defaultRes
+ delete(res, "default")
+ }
+ for k, v := range res {
+ if !strings.HasPrefix(k, "x-") {
+ var statusCodeResp Response
+ if err := json.Unmarshal(v, &statusCodeResp); err != nil {
+ return err
+ }
+ if nk, err := strconv.Atoi(k); err == nil {
+ if r.StatusCodeResponses == nil {
+ r.StatusCodeResponses = map[int]Response{}
+ }
+ r.StatusCodeResponses[nk] = statusCodeResp
+ }
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/spec/schema.go b/vendor/github.com/go-openapi/spec/schema.go
new file mode 100644
index 000000000..4e9be8576
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/schema.go
@@ -0,0 +1,645 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// BooleanProperty creates a boolean property
+func BooleanProperty() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"boolean"}}}
+}
+
+// BoolProperty creates a boolean property
+func BoolProperty() *Schema { return BooleanProperty() }
+
+// StringProperty creates a string property
+func StringProperty() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}}
+}
+
+// CharProperty creates a string property
+func CharProperty() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}}}
+}
+
+// Float64Property creates a float64/double property
+func Float64Property() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "double"}}
+}
+
+// Float32Property creates a float32/float property
+func Float32Property() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"number"}, Format: "float"}}
+}
+
+// Int8Property creates an int8 property
+func Int8Property() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int8"}}
+}
+
+// Int16Property creates an int16 property
+func Int16Property() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int16"}}
+}
+
+// Int32Property creates an int32 property
+func Int32Property() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int32"}}
+}
+
+// Int64Property creates an int64 property
+func Int64Property() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"integer"}, Format: "int64"}}
+}
+
+// StrFmtProperty creates a property for the named string format
+func StrFmtProperty(format string) *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: format}}
+}
+
+// DateProperty creates a date property
+func DateProperty() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date"}}
+}
+
+// DateTimeProperty creates a date time property
+func DateTimeProperty() *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"string"}, Format: "date-time"}}
+}
+
+// MapProperty creates a map property
+func MapProperty(property *Schema) *Schema {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"object"},
+ AdditionalProperties: &SchemaOrBool{Allows: true, Schema: property}}}
+}
+
+// RefProperty creates a ref property
+func RefProperty(name string) *Schema {
+ return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}}
+}
+
+// RefSchema creates a ref property
+func RefSchema(name string) *Schema {
+ return &Schema{SchemaProps: SchemaProps{Ref: MustCreateRef(name)}}
+}
+
+// ArrayProperty creates an array property
+func ArrayProperty(items *Schema) *Schema {
+ if items == nil {
+ return &Schema{SchemaProps: SchemaProps{Type: []string{"array"}}}
+ }
+ return &Schema{SchemaProps: SchemaProps{Items: &SchemaOrArray{Schema: items}, Type: []string{"array"}}}
+}
+
+// ComposedSchema creates a schema with allOf
+func ComposedSchema(schemas ...Schema) *Schema {
+ s := new(Schema)
+ s.AllOf = schemas
+ return s
+}
+
+// SchemaURL represents a schema url
+type SchemaURL string
+
+// MarshalJSON marshal this to JSON
+func (r SchemaURL) MarshalJSON() ([]byte, error) {
+ if r == "" {
+ return []byte("{}"), nil
+ }
+ v := map[string]interface{}{"$schema": string(r)}
+ return json.Marshal(v)
+}
+
+// UnmarshalJSON unmarshal this from JSON
+func (r *SchemaURL) UnmarshalJSON(data []byte) error {
+ var v map[string]interface{}
+ if err := json.Unmarshal(data, &v); err != nil {
+ return err
+ }
+ return r.fromMap(v)
+}
+
+func (r *SchemaURL) fromMap(v map[string]interface{}) error {
+ if v == nil {
+ return nil
+ }
+ if vv, ok := v["$schema"]; ok {
+ if str, ok := vv.(string); ok {
+ u, err := parseURL(str)
+ if err != nil {
+ return err
+ }
+
+ *r = SchemaURL(u.String())
+ }
+ }
+ return nil
+}
+
+// SchemaProps describes a JSON schema (draft 4)
+type SchemaProps struct {
+ ID string `json:"id,omitempty"`
+ Ref Ref `json:"-"`
+ Schema SchemaURL `json:"-"`
+ Description string `json:"description,omitempty"`
+ Type StringOrArray `json:"type,omitempty"`
+ Nullable bool `json:"nullable,omitempty"`
+ Format string `json:"format,omitempty"`
+ Title string `json:"title,omitempty"`
+ Default interface{} `json:"default,omitempty"`
+ Maximum *float64 `json:"maximum,omitempty"`
+ ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
+ Minimum *float64 `json:"minimum,omitempty"`
+ ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
+ MaxLength *int64 `json:"maxLength,omitempty"`
+ MinLength *int64 `json:"minLength,omitempty"`
+ Pattern string `json:"pattern,omitempty"`
+ MaxItems *int64 `json:"maxItems,omitempty"`
+ MinItems *int64 `json:"minItems,omitempty"`
+ UniqueItems bool `json:"uniqueItems,omitempty"`
+ MultipleOf *float64 `json:"multipleOf,omitempty"`
+ Enum []interface{} `json:"enum,omitempty"`
+ MaxProperties *int64 `json:"maxProperties,omitempty"`
+ MinProperties *int64 `json:"minProperties,omitempty"`
+ Required []string `json:"required,omitempty"`
+ Items *SchemaOrArray `json:"items,omitempty"`
+ AllOf []Schema `json:"allOf,omitempty"`
+ OneOf []Schema `json:"oneOf,omitempty"`
+ AnyOf []Schema `json:"anyOf,omitempty"`
+ Not *Schema `json:"not,omitempty"`
+ Properties SchemaProperties `json:"properties,omitempty"`
+ AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"`
+ PatternProperties SchemaProperties `json:"patternProperties,omitempty"`
+ Dependencies Dependencies `json:"dependencies,omitempty"`
+ AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"`
+ Definitions Definitions `json:"definitions,omitempty"`
+}
+
+// SwaggerSchemaProps are additional properties supported by swagger schemas, but not JSON-schema (draft 4)
+type SwaggerSchemaProps struct {
+ Discriminator string `json:"discriminator,omitempty"`
+ ReadOnly bool `json:"readOnly,omitempty"`
+ XML *XMLObject `json:"xml,omitempty"`
+ ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
+ Example interface{} `json:"example,omitempty"`
+}
+
+// Schema the schema object allows the definition of input and output data types.
+// These types can be objects, but also primitives and arrays.
+// This object is based on the [JSON Schema Specification Draft 4](http://json-schema.org/)
+// and uses a predefined subset of it.
+// On top of this subset, there are extensions provided by this specification to allow for more complete documentation.
+//
+// For more information: http://goo.gl/8us55a#schemaObject
+type Schema struct {
+ VendorExtensible
+ SchemaProps
+ SwaggerSchemaProps
+ ExtraProps map[string]interface{} `json:"-"`
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (s Schema) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := s.Extensions[token]; ok {
+ return &ex, nil
+ }
+
+ if ex, ok := s.ExtraProps[token]; ok {
+ return &ex, nil
+ }
+
+ r, _, err := jsonpointer.GetForToken(s.SchemaProps, token)
+ if r != nil || (err != nil && !strings.HasPrefix(err.Error(), "object has no field")) {
+ return r, err
+ }
+ r, _, err = jsonpointer.GetForToken(s.SwaggerSchemaProps, token)
+ return r, err
+}
+
+// WithID sets the id for this schema, allows for chaining
+func (s *Schema) WithID(id string) *Schema {
+ s.ID = id
+ return s
+}
+
+// WithTitle sets the title for this schema, allows for chaining
+func (s *Schema) WithTitle(title string) *Schema {
+ s.Title = title
+ return s
+}
+
+// WithDescription sets the description for this schema, allows for chaining
+func (s *Schema) WithDescription(description string) *Schema {
+ s.Description = description
+ return s
+}
+
+// WithProperties sets the properties for this schema
+func (s *Schema) WithProperties(schemas map[string]Schema) *Schema {
+ s.Properties = schemas
+ return s
+}
+
+// SetProperty sets a property on this schema
+func (s *Schema) SetProperty(name string, schema Schema) *Schema {
+ if s.Properties == nil {
+ s.Properties = make(map[string]Schema)
+ }
+ s.Properties[name] = schema
+ return s
+}
+
+// WithAllOf sets the all of property
+func (s *Schema) WithAllOf(schemas ...Schema) *Schema {
+ s.AllOf = schemas
+ return s
+}
+
+// WithMaxProperties sets the max number of properties an object can have
+func (s *Schema) WithMaxProperties(max int64) *Schema {
+ s.MaxProperties = &max
+ return s
+}
+
+// WithMinProperties sets the min number of properties an object must have
+func (s *Schema) WithMinProperties(min int64) *Schema {
+ s.MinProperties = &min
+ return s
+}
+
+// Typed sets the type of this schema for a single value item
+func (s *Schema) Typed(tpe, format string) *Schema {
+ s.Type = []string{tpe}
+ s.Format = format
+ return s
+}
+
+// AddType adds a type with potential format to the types for this schema
+func (s *Schema) AddType(tpe, format string) *Schema {
+ s.Type = append(s.Type, tpe)
+ if format != "" {
+ s.Format = format
+ }
+ return s
+}
+
+// AsNullable flags this schema as nullable.
+func (s *Schema) AsNullable() *Schema {
+ s.Nullable = true
+ return s
+}
+
+// CollectionOf a fluent builder method for an array parameter
+func (s *Schema) CollectionOf(items Schema) *Schema {
+ s.Type = []string{jsonArray}
+ s.Items = &SchemaOrArray{Schema: &items}
+ return s
+}
+
+// WithDefault sets the default value on this parameter
+func (s *Schema) WithDefault(defaultValue interface{}) *Schema {
+ s.Default = defaultValue
+ return s
+}
+
+// WithRequired flags this parameter as required
+func (s *Schema) WithRequired(items ...string) *Schema {
+ s.Required = items
+ return s
+}
+
+// AddRequired adds field names to the required properties array
+func (s *Schema) AddRequired(items ...string) *Schema {
+ s.Required = append(s.Required, items...)
+ return s
+}
+
+// WithMaxLength sets a max length value
+func (s *Schema) WithMaxLength(max int64) *Schema {
+ s.MaxLength = &max
+ return s
+}
+
+// WithMinLength sets a min length value
+func (s *Schema) WithMinLength(min int64) *Schema {
+ s.MinLength = &min
+ return s
+}
+
+// WithPattern sets a pattern value
+func (s *Schema) WithPattern(pattern string) *Schema {
+ s.Pattern = pattern
+ return s
+}
+
+// WithMultipleOf sets a multiple of value
+func (s *Schema) WithMultipleOf(number float64) *Schema {
+ s.MultipleOf = &number
+ return s
+}
+
+// WithMaximum sets a maximum number value
+func (s *Schema) WithMaximum(max float64, exclusive bool) *Schema {
+ s.Maximum = &max
+ s.ExclusiveMaximum = exclusive
+ return s
+}
+
+// WithMinimum sets a minimum number value
+func (s *Schema) WithMinimum(min float64, exclusive bool) *Schema {
+ s.Minimum = &min
+ s.ExclusiveMinimum = exclusive
+ return s
+}
+
+// WithEnum sets a the enum values (replace)
+func (s *Schema) WithEnum(values ...interface{}) *Schema {
+ s.Enum = append([]interface{}{}, values...)
+ return s
+}
+
+// WithMaxItems sets the max items
+func (s *Schema) WithMaxItems(size int64) *Schema {
+ s.MaxItems = &size
+ return s
+}
+
+// WithMinItems sets the min items
+func (s *Schema) WithMinItems(size int64) *Schema {
+ s.MinItems = &size
+ return s
+}
+
+// UniqueValues dictates that this array can only have unique items
+func (s *Schema) UniqueValues() *Schema {
+ s.UniqueItems = true
+ return s
+}
+
+// AllowDuplicates this array can have duplicates
+func (s *Schema) AllowDuplicates() *Schema {
+ s.UniqueItems = false
+ return s
+}
+
+// AddToAllOf adds a schema to the allOf property
+func (s *Schema) AddToAllOf(schemas ...Schema) *Schema {
+ s.AllOf = append(s.AllOf, schemas...)
+ return s
+}
+
+// WithDiscriminator sets the name of the discriminator field
+func (s *Schema) WithDiscriminator(discriminator string) *Schema {
+ s.Discriminator = discriminator
+ return s
+}
+
+// AsReadOnly flags this schema as readonly
+func (s *Schema) AsReadOnly() *Schema {
+ s.ReadOnly = true
+ return s
+}
+
+// AsWritable flags this schema as writeable (not read-only)
+func (s *Schema) AsWritable() *Schema {
+ s.ReadOnly = false
+ return s
+}
+
+// WithExample sets the example for this schema
+func (s *Schema) WithExample(example interface{}) *Schema {
+ s.Example = example
+ return s
+}
+
+// WithExternalDocs sets/removes the external docs for/from this schema.
+// When you pass empty strings as params the external documents will be removed.
+// When you pass non-empty string as one value then those values will be used on the external docs object.
+// So when you pass a non-empty description, you should also pass the url and vice versa.
+func (s *Schema) WithExternalDocs(description, url string) *Schema {
+ if description == "" && url == "" {
+ s.ExternalDocs = nil
+ return s
+ }
+
+ if s.ExternalDocs == nil {
+ s.ExternalDocs = &ExternalDocumentation{}
+ }
+ s.ExternalDocs.Description = description
+ s.ExternalDocs.URL = url
+ return s
+}
+
+// WithXMLName sets the xml name for the object
+func (s *Schema) WithXMLName(name string) *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Name = name
+ return s
+}
+
+// WithXMLNamespace sets the xml namespace for the object
+func (s *Schema) WithXMLNamespace(namespace string) *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Namespace = namespace
+ return s
+}
+
+// WithXMLPrefix sets the xml prefix for the object
+func (s *Schema) WithXMLPrefix(prefix string) *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Prefix = prefix
+ return s
+}
+
+// AsXMLAttribute flags this object as xml attribute
+func (s *Schema) AsXMLAttribute() *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Attribute = true
+ return s
+}
+
+// AsXMLElement flags this object as an xml node
+func (s *Schema) AsXMLElement() *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Attribute = false
+ return s
+}
+
+// AsWrappedXML flags this object as wrapped, this is mostly useful for array types
+func (s *Schema) AsWrappedXML() *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Wrapped = true
+ return s
+}
+
+// AsUnwrappedXML flags this object as an xml node
+func (s *Schema) AsUnwrappedXML() *Schema {
+ if s.XML == nil {
+ s.XML = new(XMLObject)
+ }
+ s.XML.Wrapped = false
+ return s
+}
+
+// SetValidations defines all schema validations.
+//
+// NOTE: Required, ReadOnly, AllOf, AnyOf, OneOf and Not are not considered.
+func (s *Schema) SetValidations(val SchemaValidations) {
+ s.Maximum = val.Maximum
+ s.ExclusiveMaximum = val.ExclusiveMaximum
+ s.Minimum = val.Minimum
+ s.ExclusiveMinimum = val.ExclusiveMinimum
+ s.MaxLength = val.MaxLength
+ s.MinLength = val.MinLength
+ s.Pattern = val.Pattern
+ s.MaxItems = val.MaxItems
+ s.MinItems = val.MinItems
+ s.UniqueItems = val.UniqueItems
+ s.MultipleOf = val.MultipleOf
+ s.Enum = val.Enum
+ s.MinProperties = val.MinProperties
+ s.MaxProperties = val.MaxProperties
+ s.PatternProperties = val.PatternProperties
+}
+
+// WithValidations is a fluent method to set schema validations
+func (s *Schema) WithValidations(val SchemaValidations) *Schema {
+ s.SetValidations(val)
+ return s
+}
+
+// Validations returns a clone of the validations for this schema
+func (s Schema) Validations() SchemaValidations {
+ return SchemaValidations{
+ CommonValidations: CommonValidations{
+ Maximum: s.Maximum,
+ ExclusiveMaximum: s.ExclusiveMaximum,
+ Minimum: s.Minimum,
+ ExclusiveMinimum: s.ExclusiveMinimum,
+ MaxLength: s.MaxLength,
+ MinLength: s.MinLength,
+ Pattern: s.Pattern,
+ MaxItems: s.MaxItems,
+ MinItems: s.MinItems,
+ UniqueItems: s.UniqueItems,
+ MultipleOf: s.MultipleOf,
+ Enum: s.Enum,
+ },
+ MinProperties: s.MinProperties,
+ MaxProperties: s.MaxProperties,
+ PatternProperties: s.PatternProperties,
+ }
+}
+
+// MarshalJSON marshal this to JSON
+func (s Schema) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(s.SchemaProps)
+ if err != nil {
+ return nil, fmt.Errorf("schema props %v", err)
+ }
+ b2, err := json.Marshal(s.VendorExtensible)
+ if err != nil {
+ return nil, fmt.Errorf("vendor props %v", err)
+ }
+ b3, err := s.Ref.MarshalJSON()
+ if err != nil {
+ return nil, fmt.Errorf("ref prop %v", err)
+ }
+ b4, err := s.Schema.MarshalJSON()
+ if err != nil {
+ return nil, fmt.Errorf("schema prop %v", err)
+ }
+ b5, err := json.Marshal(s.SwaggerSchemaProps)
+ if err != nil {
+ return nil, fmt.Errorf("common validations %v", err)
+ }
+ var b6 []byte
+ if s.ExtraProps != nil {
+ jj, err := json.Marshal(s.ExtraProps)
+ if err != nil {
+ return nil, fmt.Errorf("extra props %v", err)
+ }
+ b6 = jj
+ }
+ return swag.ConcatJSON(b1, b2, b3, b4, b5, b6), nil
+}
+
+// UnmarshalJSON marshal this from JSON
+func (s *Schema) UnmarshalJSON(data []byte) error {
+ props := struct {
+ SchemaProps
+ SwaggerSchemaProps
+ }{}
+ if err := json.Unmarshal(data, &props); err != nil {
+ return err
+ }
+
+ sch := Schema{
+ SchemaProps: props.SchemaProps,
+ SwaggerSchemaProps: props.SwaggerSchemaProps,
+ }
+
+ var d map[string]interface{}
+ if err := json.Unmarshal(data, &d); err != nil {
+ return err
+ }
+
+ _ = sch.Ref.fromMap(d)
+ _ = sch.Schema.fromMap(d)
+
+ delete(d, "$ref")
+ delete(d, "$schema")
+ for _, pn := range swag.DefaultJSONNameProvider.GetJSONNames(s) {
+ delete(d, pn)
+ }
+
+ for k, vv := range d {
+ lk := strings.ToLower(k)
+ if strings.HasPrefix(lk, "x-") {
+ if sch.Extensions == nil {
+ sch.Extensions = map[string]interface{}{}
+ }
+ sch.Extensions[k] = vv
+ continue
+ }
+ if sch.ExtraProps == nil {
+ sch.ExtraProps = map[string]interface{}{}
+ }
+ sch.ExtraProps[k] = vv
+ }
+
+ *s = sch
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/spec/schema_loader.go b/vendor/github.com/go-openapi/spec/schema_loader.go
new file mode 100644
index 000000000..b81175afd
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/schema_loader.go
@@ -0,0 +1,338 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "net/url"
+ "reflect"
+ "strings"
+
+ "github.com/go-openapi/swag"
+)
+
+// PathLoader is a function to use when loading remote refs.
+//
+// This is a package level default. It may be overridden or bypassed by
+// specifying the loader in ExpandOptions.
+//
+// NOTE: if you are using the go-openapi/loads package, it will override
+// this value with its own default (a loader to retrieve YAML documents as
+// well as JSON ones).
+var PathLoader = func(pth string) (json.RawMessage, error) {
+ data, err := swag.LoadFromFileOrHTTP(pth)
+ if err != nil {
+ return nil, err
+ }
+ return json.RawMessage(data), nil
+}
+
+// resolverContext allows to share a context during spec processing.
+// At the moment, it just holds the index of circular references found.
+type resolverContext struct {
+ // circulars holds all visited circular references, to shortcircuit $ref resolution.
+ //
+ // This structure is privately instantiated and needs not be locked against
+ // concurrent access, unless we chose to implement a parallel spec walking.
+ circulars map[string]bool
+ basePath string
+ loadDoc func(string) (json.RawMessage, error)
+ rootID string
+}
+
+func newResolverContext(options *ExpandOptions) *resolverContext {
+ expandOptions := optionsOrDefault(options)
+
+ // path loader may be overridden by options
+ var loader func(string) (json.RawMessage, error)
+ if expandOptions.PathLoader == nil {
+ loader = PathLoader
+ } else {
+ loader = expandOptions.PathLoader
+ }
+
+ return &resolverContext{
+ circulars: make(map[string]bool),
+ basePath: expandOptions.RelativeBase, // keep the root base path in context
+ loadDoc: loader,
+ }
+}
+
+type schemaLoader struct {
+ root interface{}
+ options *ExpandOptions
+ cache ResolutionCache
+ context *resolverContext
+}
+
+func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) *schemaLoader {
+ if ref.IsRoot() || ref.HasFragmentOnly {
+ return r
+ }
+
+ baseRef := MustCreateRef(basePath)
+ currentRef := normalizeRef(&ref, basePath)
+ if strings.HasPrefix(currentRef.String(), baseRef.String()) {
+ return r
+ }
+
+ // set a new root against which to resolve
+ rootURL := currentRef.GetURL()
+ rootURL.Fragment = ""
+ root, _ := r.cache.Get(rootURL.String())
+
+ // shallow copy of resolver options to set a new RelativeBase when
+ // traversing multiple documents
+ newOptions := r.options
+ newOptions.RelativeBase = rootURL.String()
+
+ return defaultSchemaLoader(root, newOptions, r.cache, r.context)
+}
+
+func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string {
+ if transitive != r {
+ if transitive.options != nil && transitive.options.RelativeBase != "" {
+ return normalizeBase(transitive.options.RelativeBase)
+ }
+ }
+
+ return basePath
+}
+
+func (r *schemaLoader) resolveRef(ref *Ref, target interface{}, basePath string) error {
+ tgt := reflect.ValueOf(target)
+ if tgt.Kind() != reflect.Ptr {
+ return ErrResolveRefNeedsAPointer
+ }
+
+ if ref.GetURL() == nil {
+ return nil
+ }
+
+ var (
+ res interface{}
+ data interface{}
+ err error
+ )
+
+ // Resolve against the root if it isn't nil, and if ref is pointing at the root, or has a fragment only which means
+ // it is pointing somewhere in the root.
+ root := r.root
+ if (ref.IsRoot() || ref.HasFragmentOnly) && root == nil && basePath != "" {
+ if baseRef, erb := NewRef(basePath); erb == nil {
+ root, _, _, _ = r.load(baseRef.GetURL())
+ }
+ }
+
+ if (ref.IsRoot() || ref.HasFragmentOnly) && root != nil {
+ data = root
+ } else {
+ baseRef := normalizeRef(ref, basePath)
+ data, _, _, err = r.load(baseRef.GetURL())
+ if err != nil {
+ return err
+ }
+ }
+
+ res = data
+ if ref.String() != "" {
+ res, _, err = ref.GetPointer().Get(data)
+ if err != nil {
+ return err
+ }
+ }
+ return swag.DynamicJSONToStruct(res, target)
+}
+
+func (r *schemaLoader) load(refURL *url.URL) (interface{}, url.URL, bool, error) {
+ debugLog("loading schema from url: %s", refURL)
+ toFetch := *refURL
+ toFetch.Fragment = ""
+
+ var err error
+ pth := toFetch.String()
+ normalized := normalizeBase(pth)
+ debugLog("loading doc from: %s", normalized)
+
+ unescaped, err := url.PathUnescape(normalized)
+ if err != nil {
+ return nil, url.URL{}, false, err
+ }
+
+ u := url.URL{Path: unescaped}
+
+ data, fromCache := r.cache.Get(u.RequestURI())
+ if fromCache {
+ return data, toFetch, fromCache, nil
+ }
+
+ b, err := r.context.loadDoc(normalized)
+ if err != nil {
+ return nil, url.URL{}, false, err
+ }
+
+ var doc interface{}
+ if err := json.Unmarshal(b, &doc); err != nil {
+ return nil, url.URL{}, false, err
+ }
+ r.cache.Set(normalized, doc)
+
+ return doc, toFetch, fromCache, nil
+}
+
+// isCircular detects cycles in sequences of $ref.
+//
+// It relies on a private context (which needs not be locked).
+func (r *schemaLoader) isCircular(ref *Ref, basePath string, parentRefs ...string) (foundCycle bool) {
+ normalizedRef := normalizeURI(ref.String(), basePath)
+ if _, ok := r.context.circulars[normalizedRef]; ok {
+ // circular $ref has been already detected in another explored cycle
+ foundCycle = true
+ return
+ }
+ foundCycle = swag.ContainsStrings(parentRefs, normalizedRef) // normalized windows url's are lower cased
+ if foundCycle {
+ r.context.circulars[normalizedRef] = true
+ }
+ return
+}
+
+// Resolve resolves a reference against basePath and stores the result in target.
+//
+// Resolve is not in charge of following references: it only resolves ref by following its URL.
+//
+// If the schema the ref is referring to holds nested refs, Resolve doesn't resolve them.
+//
+// If basePath is an empty string, ref is resolved against the root schema stored in the schemaLoader struct
+func (r *schemaLoader) Resolve(ref *Ref, target interface{}, basePath string) error {
+ return r.resolveRef(ref, target, basePath)
+}
+
+func (r *schemaLoader) deref(input interface{}, parentRefs []string, basePath string) error {
+ var ref *Ref
+ switch refable := input.(type) {
+ case *Schema:
+ ref = &refable.Ref
+ case *Parameter:
+ ref = &refable.Ref
+ case *Response:
+ ref = &refable.Ref
+ case *PathItem:
+ ref = &refable.Ref
+ default:
+ return fmt.Errorf("unsupported type: %T: %w", input, ErrDerefUnsupportedType)
+ }
+
+ curRef := ref.String()
+ if curRef == "" {
+ return nil
+ }
+
+ normalizedRef := normalizeRef(ref, basePath)
+ normalizedBasePath := normalizedRef.RemoteURI()
+
+ if r.isCircular(normalizedRef, basePath, parentRefs...) {
+ return nil
+ }
+
+ if err := r.resolveRef(ref, input, basePath); r.shouldStopOnError(err) {
+ return err
+ }
+
+ if ref.String() == "" || ref.String() == curRef {
+ // done with rereferencing
+ return nil
+ }
+
+ parentRefs = append(parentRefs, normalizedRef.String())
+ return r.deref(input, parentRefs, normalizedBasePath)
+}
+
+func (r *schemaLoader) shouldStopOnError(err error) bool {
+ if err != nil && !r.options.ContinueOnError {
+ return true
+ }
+
+ if err != nil {
+ log.Println(err)
+ }
+
+ return false
+}
+
+func (r *schemaLoader) setSchemaID(target interface{}, id, basePath string) (string, string) {
+ debugLog("schema has ID: %s", id)
+
+ // handling the case when id is a folder
+ // remember that basePath has to point to a file
+ var refPath string
+ if strings.HasSuffix(id, "/") {
+ // ensure this is detected as a file, not a folder
+ refPath = fmt.Sprintf("%s%s", id, "placeholder.json")
+ } else {
+ refPath = id
+ }
+
+ // updates the current base path
+ // * important: ID can be a relative path
+ // * registers target to be fetchable from the new base proposed by this id
+ newBasePath := normalizeURI(refPath, basePath)
+
+ // store found IDs for possible future reuse in $ref
+ r.cache.Set(newBasePath, target)
+
+ // the root document has an ID: all $ref relative to that ID may
+ // be rebased relative to the root document
+ if basePath == r.context.basePath {
+ debugLog("root document is a schema with ID: %s (normalized as:%s)", id, newBasePath)
+ r.context.rootID = newBasePath
+ }
+
+ return newBasePath, refPath
+}
+
+func defaultSchemaLoader(
+ root interface{},
+ expandOptions *ExpandOptions,
+ cache ResolutionCache,
+ context *resolverContext) *schemaLoader {
+
+ if expandOptions == nil {
+ expandOptions = &ExpandOptions{}
+ }
+
+ cache = cacheOrDefault(cache)
+
+ if expandOptions.RelativeBase == "" {
+ // if no relative base is provided, assume the root document
+ // contains all $ref, or at least, that the relative documents
+ // may be resolved from the current working directory.
+ expandOptions.RelativeBase = baseForRoot(root, cache)
+ }
+ debugLog("effective expander options: %#v", expandOptions)
+
+ if context == nil {
+ context = newResolverContext(expandOptions)
+ }
+
+ return &schemaLoader{
+ root: root,
+ options: expandOptions,
+ cache: cache,
+ context: context,
+ }
+}
diff --git a/vendor/github.com/go-openapi/spec/security_scheme.go b/vendor/github.com/go-openapi/spec/security_scheme.go
new file mode 100644
index 000000000..9d0bdae90
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/security_scheme.go
@@ -0,0 +1,170 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+const (
+ basic = "basic"
+ apiKey = "apiKey"
+ oauth2 = "oauth2"
+ implicit = "implicit"
+ password = "password"
+ application = "application"
+ accessCode = "accessCode"
+)
+
+// BasicAuth creates a basic auth security scheme
+func BasicAuth() *SecurityScheme {
+ return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: basic}}
+}
+
+// APIKeyAuth creates an api key auth security scheme
+func APIKeyAuth(fieldName, valueSource string) *SecurityScheme {
+ return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{Type: apiKey, Name: fieldName, In: valueSource}}
+}
+
+// OAuth2Implicit creates an implicit flow oauth2 security scheme
+func OAuth2Implicit(authorizationURL string) *SecurityScheme {
+ return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
+ Type: oauth2,
+ Flow: implicit,
+ AuthorizationURL: authorizationURL,
+ }}
+}
+
+// OAuth2Password creates a password flow oauth2 security scheme
+func OAuth2Password(tokenURL string) *SecurityScheme {
+ return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
+ Type: oauth2,
+ Flow: password,
+ TokenURL: tokenURL,
+ }}
+}
+
+// OAuth2Application creates an application flow oauth2 security scheme
+func OAuth2Application(tokenURL string) *SecurityScheme {
+ return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
+ Type: oauth2,
+ Flow: application,
+ TokenURL: tokenURL,
+ }}
+}
+
+// OAuth2AccessToken creates an access token flow oauth2 security scheme
+func OAuth2AccessToken(authorizationURL, tokenURL string) *SecurityScheme {
+ return &SecurityScheme{SecuritySchemeProps: SecuritySchemeProps{
+ Type: oauth2,
+ Flow: accessCode,
+ AuthorizationURL: authorizationURL,
+ TokenURL: tokenURL,
+ }}
+}
+
+// SecuritySchemeProps describes a swagger security scheme in the securityDefinitions section
+type SecuritySchemeProps struct {
+ Description string `json:"description,omitempty"`
+ Type string `json:"type"`
+ Name string `json:"name,omitempty"` // api key
+ In string `json:"in,omitempty"` // api key
+ Flow string `json:"flow,omitempty"` // oauth2
+ AuthorizationURL string `json:"authorizationUrl"` // oauth2
+ TokenURL string `json:"tokenUrl,omitempty"` // oauth2
+ Scopes map[string]string `json:"scopes,omitempty"` // oauth2
+}
+
+// AddScope adds a scope to this security scheme
+func (s *SecuritySchemeProps) AddScope(scope, description string) {
+ if s.Scopes == nil {
+ s.Scopes = make(map[string]string)
+ }
+ s.Scopes[scope] = description
+}
+
+// SecurityScheme allows the definition of a security scheme that can be used by the operations.
+// Supported schemes are basic authentication, an API key (either as a header or as a query parameter)
+// and OAuth2's common flows (implicit, password, application and access code).
+//
+// For more information: http://goo.gl/8us55a#securitySchemeObject
+type SecurityScheme struct {
+ VendorExtensible
+ SecuritySchemeProps
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (s SecurityScheme) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := s.Extensions[token]; ok {
+ return &ex, nil
+ }
+
+ r, _, err := jsonpointer.GetForToken(s.SecuritySchemeProps, token)
+ return r, err
+}
+
+// MarshalJSON marshal this to JSON
+func (s SecurityScheme) MarshalJSON() ([]byte, error) {
+ var (
+ b1 []byte
+ err error
+ )
+
+ if s.Type == oauth2 && (s.Flow == "implicit" || s.Flow == "accessCode") {
+ // when oauth2 for implicit or accessCode flows, empty AuthorizationURL is added as empty string
+ b1, err = json.Marshal(s.SecuritySchemeProps)
+ } else {
+ // when not oauth2, empty AuthorizationURL should be omitted
+ b1, err = json.Marshal(struct {
+ Description string `json:"description,omitempty"`
+ Type string `json:"type"`
+ Name string `json:"name,omitempty"` // api key
+ In string `json:"in,omitempty"` // api key
+ Flow string `json:"flow,omitempty"` // oauth2
+ AuthorizationURL string `json:"authorizationUrl,omitempty"` // oauth2
+ TokenURL string `json:"tokenUrl,omitempty"` // oauth2
+ Scopes map[string]string `json:"scopes,omitempty"` // oauth2
+ }{
+ Description: s.Description,
+ Type: s.Type,
+ Name: s.Name,
+ In: s.In,
+ Flow: s.Flow,
+ AuthorizationURL: s.AuthorizationURL,
+ TokenURL: s.TokenURL,
+ Scopes: s.Scopes,
+ })
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ b2, err := json.Marshal(s.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2), nil
+}
+
+// UnmarshalJSON marshal this from JSON
+func (s *SecurityScheme) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &s.SecuritySchemeProps); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &s.VendorExtensible)
+}
diff --git a/vendor/github.com/go-openapi/spec/spec.go b/vendor/github.com/go-openapi/spec/spec.go
new file mode 100644
index 000000000..7d38b6e62
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/spec.go
@@ -0,0 +1,78 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+)
+
+//go:generate curl -L --progress -o ./schemas/v2/schema.json http://swagger.io/v2/schema.json
+//go:generate curl -L --progress -o ./schemas/jsonschema-draft-04.json http://json-schema.org/draft-04/schema
+//go:generate go-bindata -pkg=spec -prefix=./schemas -ignore=.*\.md ./schemas/...
+//go:generate perl -pi -e s,Json,JSON,g bindata.go
+
+const (
+ // SwaggerSchemaURL the url for the swagger 2.0 schema to validate specs
+ SwaggerSchemaURL = "http://swagger.io/v2/schema.json#"
+ // JSONSchemaURL the url for the json schema schema
+ JSONSchemaURL = "http://json-schema.org/draft-04/schema#"
+)
+
+// MustLoadJSONSchemaDraft04 panics when Swagger20Schema returns an error
+func MustLoadJSONSchemaDraft04() *Schema {
+ d, e := JSONSchemaDraft04()
+ if e != nil {
+ panic(e)
+ }
+ return d
+}
+
+// JSONSchemaDraft04 loads the json schema document for json shema draft04
+func JSONSchemaDraft04() (*Schema, error) {
+ b, err := Asset("jsonschema-draft-04.json")
+ if err != nil {
+ return nil, err
+ }
+
+ schema := new(Schema)
+ if err := json.Unmarshal(b, schema); err != nil {
+ return nil, err
+ }
+ return schema, nil
+}
+
+// MustLoadSwagger20Schema panics when Swagger20Schema returns an error
+func MustLoadSwagger20Schema() *Schema {
+ d, e := Swagger20Schema()
+ if e != nil {
+ panic(e)
+ }
+ return d
+}
+
+// Swagger20Schema loads the swagger 2.0 schema from the embedded assets
+func Swagger20Schema() (*Schema, error) {
+
+ b, err := Asset("v2/schema.json")
+ if err != nil {
+ return nil, err
+ }
+
+ schema := new(Schema)
+ if err := json.Unmarshal(b, schema); err != nil {
+ return nil, err
+ }
+ return schema, nil
+}
diff --git a/vendor/github.com/go-openapi/spec/swagger.go b/vendor/github.com/go-openapi/spec/swagger.go
new file mode 100644
index 000000000..44722ffd5
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/swagger.go
@@ -0,0 +1,448 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "bytes"
+ "encoding/gob"
+ "encoding/json"
+ "fmt"
+ "strconv"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// Swagger this is the root document object for the API specification.
+// It combines what previously was the Resource Listing and API Declaration (version 1.2 and earlier)
+// together into one document.
+//
+// For more information: http://goo.gl/8us55a#swagger-object-
+type Swagger struct {
+ VendorExtensible
+ SwaggerProps
+}
+
+// JSONLookup look up a value by the json property name
+func (s Swagger) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := s.Extensions[token]; ok {
+ return &ex, nil
+ }
+ r, _, err := jsonpointer.GetForToken(s.SwaggerProps, token)
+ return r, err
+}
+
+// MarshalJSON marshals this swagger structure to json
+func (s Swagger) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(s.SwaggerProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(s.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2), nil
+}
+
+// UnmarshalJSON unmarshals a swagger spec from json
+func (s *Swagger) UnmarshalJSON(data []byte) error {
+ var sw Swagger
+ if err := json.Unmarshal(data, &sw.SwaggerProps); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(data, &sw.VendorExtensible); err != nil {
+ return err
+ }
+ *s = sw
+ return nil
+}
+
+// GobEncode provides a safe gob encoder for Swagger, including extensions
+func (s Swagger) GobEncode() ([]byte, error) {
+ var b bytes.Buffer
+ raw := struct {
+ Props SwaggerProps
+ Ext VendorExtensible
+ }{
+ Props: s.SwaggerProps,
+ Ext: s.VendorExtensible,
+ }
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+}
+
+// GobDecode provides a safe gob decoder for Swagger, including extensions
+func (s *Swagger) GobDecode(b []byte) error {
+ var raw struct {
+ Props SwaggerProps
+ Ext VendorExtensible
+ }
+ buf := bytes.NewBuffer(b)
+ err := gob.NewDecoder(buf).Decode(&raw)
+ if err != nil {
+ return err
+ }
+ s.SwaggerProps = raw.Props
+ s.VendorExtensible = raw.Ext
+ return nil
+}
+
+// SwaggerProps captures the top-level properties of an Api specification
+//
+// NOTE: validation rules
+// - the scheme, when present must be from [http, https, ws, wss]
+// - BasePath must start with a leading "/"
+// - Paths is required
+type SwaggerProps struct {
+ ID string `json:"id,omitempty"`
+ Consumes []string `json:"consumes,omitempty"`
+ Produces []string `json:"produces,omitempty"`
+ Schemes []string `json:"schemes,omitempty"`
+ Swagger string `json:"swagger,omitempty"`
+ Info *Info `json:"info,omitempty"`
+ Host string `json:"host,omitempty"`
+ BasePath string `json:"basePath,omitempty"`
+ Paths *Paths `json:"paths"`
+ Definitions Definitions `json:"definitions,omitempty"`
+ Parameters map[string]Parameter `json:"parameters,omitempty"`
+ Responses map[string]Response `json:"responses,omitempty"`
+ SecurityDefinitions SecurityDefinitions `json:"securityDefinitions,omitempty"`
+ Security []map[string][]string `json:"security,omitempty"`
+ Tags []Tag `json:"tags,omitempty"`
+ ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
+}
+
+type swaggerPropsAlias SwaggerProps
+
+type gobSwaggerPropsAlias struct {
+ Security []map[string]struct {
+ List []string
+ Pad bool
+ }
+ Alias *swaggerPropsAlias
+ SecurityIsEmpty bool
+}
+
+// GobEncode provides a safe gob encoder for SwaggerProps, including empty security requirements
+func (o SwaggerProps) GobEncode() ([]byte, error) {
+ raw := gobSwaggerPropsAlias{
+ Alias: (*swaggerPropsAlias)(&o),
+ }
+
+ var b bytes.Buffer
+ if o.Security == nil {
+ // nil security requirement
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+ }
+
+ if len(o.Security) == 0 {
+ // empty, but non-nil security requirement
+ raw.SecurityIsEmpty = true
+ raw.Alias.Security = nil
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+ }
+
+ raw.Security = make([]map[string]struct {
+ List []string
+ Pad bool
+ }, 0, len(o.Security))
+ for _, req := range o.Security {
+ v := make(map[string]struct {
+ List []string
+ Pad bool
+ }, len(req))
+ for k, val := range req {
+ v[k] = struct {
+ List []string
+ Pad bool
+ }{
+ List: val,
+ }
+ }
+ raw.Security = append(raw.Security, v)
+ }
+
+ err := gob.NewEncoder(&b).Encode(raw)
+ return b.Bytes(), err
+}
+
+// GobDecode provides a safe gob decoder for SwaggerProps, including empty security requirements
+func (o *SwaggerProps) GobDecode(b []byte) error {
+ var raw gobSwaggerPropsAlias
+
+ buf := bytes.NewBuffer(b)
+ err := gob.NewDecoder(buf).Decode(&raw)
+ if err != nil {
+ return err
+ }
+ if raw.Alias == nil {
+ return nil
+ }
+
+ switch {
+ case raw.SecurityIsEmpty:
+ // empty, but non-nil security requirement
+ raw.Alias.Security = []map[string][]string{}
+ case len(raw.Alias.Security) == 0:
+ // nil security requirement
+ raw.Alias.Security = nil
+ default:
+ raw.Alias.Security = make([]map[string][]string, 0, len(raw.Security))
+ for _, req := range raw.Security {
+ v := make(map[string][]string, len(req))
+ for k, val := range req {
+ v[k] = make([]string, 0, len(val.List))
+ v[k] = append(v[k], val.List...)
+ }
+ raw.Alias.Security = append(raw.Alias.Security, v)
+ }
+ }
+
+ *o = *(*SwaggerProps)(raw.Alias)
+ return nil
+}
+
+// Dependencies represent a dependencies property
+type Dependencies map[string]SchemaOrStringArray
+
+// SchemaOrBool represents a schema or boolean value, is biased towards true for the boolean property
+type SchemaOrBool struct {
+ Allows bool
+ Schema *Schema
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (s SchemaOrBool) JSONLookup(token string) (interface{}, error) {
+ if token == "allows" {
+ return s.Allows, nil
+ }
+ r, _, err := jsonpointer.GetForToken(s.Schema, token)
+ return r, err
+}
+
+var jsTrue = []byte("true")
+var jsFalse = []byte("false")
+
+// MarshalJSON convert this object to JSON
+func (s SchemaOrBool) MarshalJSON() ([]byte, error) {
+ if s.Schema != nil {
+ return json.Marshal(s.Schema)
+ }
+
+ if s.Schema == nil && !s.Allows {
+ return jsFalse, nil
+ }
+ return jsTrue, nil
+}
+
+// UnmarshalJSON converts this bool or schema object from a JSON structure
+func (s *SchemaOrBool) UnmarshalJSON(data []byte) error {
+ var nw SchemaOrBool
+ if len(data) >= 4 {
+ if data[0] == '{' {
+ var sch Schema
+ if err := json.Unmarshal(data, &sch); err != nil {
+ return err
+ }
+ nw.Schema = &sch
+ }
+ nw.Allows = !(data[0] == 'f' && data[1] == 'a' && data[2] == 'l' && data[3] == 's' && data[4] == 'e')
+ }
+ *s = nw
+ return nil
+}
+
+// SchemaOrStringArray represents a schema or a string array
+type SchemaOrStringArray struct {
+ Schema *Schema
+ Property []string
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (s SchemaOrStringArray) JSONLookup(token string) (interface{}, error) {
+ r, _, err := jsonpointer.GetForToken(s.Schema, token)
+ return r, err
+}
+
+// MarshalJSON converts this schema object or array into JSON structure
+func (s SchemaOrStringArray) MarshalJSON() ([]byte, error) {
+ if len(s.Property) > 0 {
+ return json.Marshal(s.Property)
+ }
+ if s.Schema != nil {
+ return json.Marshal(s.Schema)
+ }
+ return []byte("null"), nil
+}
+
+// UnmarshalJSON converts this schema object or array from a JSON structure
+func (s *SchemaOrStringArray) UnmarshalJSON(data []byte) error {
+ var first byte
+ if len(data) > 1 {
+ first = data[0]
+ }
+ var nw SchemaOrStringArray
+ if first == '{' {
+ var sch Schema
+ if err := json.Unmarshal(data, &sch); err != nil {
+ return err
+ }
+ nw.Schema = &sch
+ }
+ if first == '[' {
+ if err := json.Unmarshal(data, &nw.Property); err != nil {
+ return err
+ }
+ }
+ *s = nw
+ return nil
+}
+
+// Definitions contains the models explicitly defined in this spec
+// An object to hold data types that can be consumed and produced by operations.
+// These data types can be primitives, arrays or models.
+//
+// For more information: http://goo.gl/8us55a#definitionsObject
+type Definitions map[string]Schema
+
+// SecurityDefinitions a declaration of the security schemes available to be used in the specification.
+// This does not enforce the security schemes on the operations and only serves to provide
+// the relevant details for each scheme.
+//
+// For more information: http://goo.gl/8us55a#securityDefinitionsObject
+type SecurityDefinitions map[string]*SecurityScheme
+
+// StringOrArray represents a value that can either be a string
+// or an array of strings. Mainly here for serialization purposes
+type StringOrArray []string
+
+// Contains returns true when the value is contained in the slice
+func (s StringOrArray) Contains(value string) bool {
+ for _, str := range s {
+ if str == value {
+ return true
+ }
+ }
+ return false
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (s SchemaOrArray) JSONLookup(token string) (interface{}, error) {
+ if _, err := strconv.Atoi(token); err == nil {
+ r, _, err := jsonpointer.GetForToken(s.Schemas, token)
+ return r, err
+ }
+ r, _, err := jsonpointer.GetForToken(s.Schema, token)
+ return r, err
+}
+
+// UnmarshalJSON unmarshals this string or array object from a JSON array or JSON string
+func (s *StringOrArray) UnmarshalJSON(data []byte) error {
+ var first byte
+ if len(data) > 1 {
+ first = data[0]
+ }
+
+ if first == '[' {
+ var parsed []string
+ if err := json.Unmarshal(data, &parsed); err != nil {
+ return err
+ }
+ *s = StringOrArray(parsed)
+ return nil
+ }
+
+ var single interface{}
+ if err := json.Unmarshal(data, &single); err != nil {
+ return err
+ }
+ if single == nil {
+ return nil
+ }
+ switch v := single.(type) {
+ case string:
+ *s = StringOrArray([]string{v})
+ return nil
+ default:
+ return fmt.Errorf("only string or array is allowed, not %T", single)
+ }
+}
+
+// MarshalJSON converts this string or array to a JSON array or JSON string
+func (s StringOrArray) MarshalJSON() ([]byte, error) {
+ if len(s) == 1 {
+ return json.Marshal([]string(s)[0])
+ }
+ return json.Marshal([]string(s))
+}
+
+// SchemaOrArray represents a value that can either be a Schema
+// or an array of Schema. Mainly here for serialization purposes
+type SchemaOrArray struct {
+ Schema *Schema
+ Schemas []Schema
+}
+
+// Len returns the number of schemas in this property
+func (s SchemaOrArray) Len() int {
+ if s.Schema != nil {
+ return 1
+ }
+ return len(s.Schemas)
+}
+
+// ContainsType returns true when one of the schemas is of the specified type
+func (s *SchemaOrArray) ContainsType(name string) bool {
+ if s.Schema != nil {
+ return s.Schema.Type != nil && s.Schema.Type.Contains(name)
+ }
+ return false
+}
+
+// MarshalJSON converts this schema object or array into JSON structure
+func (s SchemaOrArray) MarshalJSON() ([]byte, error) {
+ if len(s.Schemas) > 0 {
+ return json.Marshal(s.Schemas)
+ }
+ return json.Marshal(s.Schema)
+}
+
+// UnmarshalJSON converts this schema object or array from a JSON structure
+func (s *SchemaOrArray) UnmarshalJSON(data []byte) error {
+ var nw SchemaOrArray
+ var first byte
+ if len(data) > 1 {
+ first = data[0]
+ }
+ if first == '{' {
+ var sch Schema
+ if err := json.Unmarshal(data, &sch); err != nil {
+ return err
+ }
+ nw.Schema = &sch
+ }
+ if first == '[' {
+ if err := json.Unmarshal(data, &nw.Schemas); err != nil {
+ return err
+ }
+ }
+ *s = nw
+ return nil
+}
+
+// vim:set ft=go noet sts=2 sw=2 ts=2:
diff --git a/vendor/github.com/go-openapi/spec/tag.go b/vendor/github.com/go-openapi/spec/tag.go
new file mode 100644
index 000000000..faa3d3de1
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/tag.go
@@ -0,0 +1,75 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+import (
+ "encoding/json"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/swag"
+)
+
+// TagProps describe a tag entry in the top level tags section of a swagger spec
+type TagProps struct {
+ Description string `json:"description,omitempty"`
+ Name string `json:"name,omitempty"`
+ ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
+}
+
+// NewTag creates a new tag
+func NewTag(name, description string, externalDocs *ExternalDocumentation) Tag {
+ return Tag{TagProps: TagProps{Description: description, Name: name, ExternalDocs: externalDocs}}
+}
+
+// Tag allows adding meta data to a single tag that is used by the
+// [Operation Object](http://goo.gl/8us55a#operationObject).
+// It is not mandatory to have a Tag Object per tag used there.
+//
+// For more information: http://goo.gl/8us55a#tagObject
+type Tag struct {
+ VendorExtensible
+ TagProps
+}
+
+// JSONLookup implements an interface to customize json pointer lookup
+func (t Tag) JSONLookup(token string) (interface{}, error) {
+ if ex, ok := t.Extensions[token]; ok {
+ return &ex, nil
+ }
+
+ r, _, err := jsonpointer.GetForToken(t.TagProps, token)
+ return r, err
+}
+
+// MarshalJSON marshal this to JSON
+func (t Tag) MarshalJSON() ([]byte, error) {
+ b1, err := json.Marshal(t.TagProps)
+ if err != nil {
+ return nil, err
+ }
+ b2, err := json.Marshal(t.VendorExtensible)
+ if err != nil {
+ return nil, err
+ }
+ return swag.ConcatJSON(b1, b2), nil
+}
+
+// UnmarshalJSON marshal this from JSON
+func (t *Tag) UnmarshalJSON(data []byte) error {
+ if err := json.Unmarshal(data, &t.TagProps); err != nil {
+ return err
+ }
+ return json.Unmarshal(data, &t.VendorExtensible)
+}
diff --git a/vendor/github.com/go-openapi/spec/url_go18.go b/vendor/github.com/go-openapi/spec/url_go18.go
new file mode 100644
index 000000000..60b785153
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/url_go18.go
@@ -0,0 +1,8 @@
+//go:build !go1.19
+// +build !go1.19
+
+package spec
+
+import "net/url"
+
+var parseURL = url.Parse
diff --git a/vendor/github.com/go-openapi/spec/url_go19.go b/vendor/github.com/go-openapi/spec/url_go19.go
new file mode 100644
index 000000000..392e3e639
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/url_go19.go
@@ -0,0 +1,14 @@
+//go:build go1.19
+// +build go1.19
+
+package spec
+
+import "net/url"
+
+func parseURL(s string) (*url.URL, error) {
+ u, err := url.Parse(s)
+ if err == nil {
+ u.OmitHost = false
+ }
+ return u, err
+}
diff --git a/vendor/github.com/go-openapi/spec/validations.go b/vendor/github.com/go-openapi/spec/validations.go
new file mode 100644
index 000000000..6360a8ea7
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/validations.go
@@ -0,0 +1,215 @@
+package spec
+
+// CommonValidations describe common JSON-schema validations
+type CommonValidations struct {
+ Maximum *float64 `json:"maximum,omitempty"`
+ ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
+ Minimum *float64 `json:"minimum,omitempty"`
+ ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
+ MaxLength *int64 `json:"maxLength,omitempty"`
+ MinLength *int64 `json:"minLength,omitempty"`
+ Pattern string `json:"pattern,omitempty"`
+ MaxItems *int64 `json:"maxItems,omitempty"`
+ MinItems *int64 `json:"minItems,omitempty"`
+ UniqueItems bool `json:"uniqueItems,omitempty"`
+ MultipleOf *float64 `json:"multipleOf,omitempty"`
+ Enum []interface{} `json:"enum,omitempty"`
+}
+
+// SetValidations defines all validations for a simple schema.
+//
+// NOTE: the input is the larger set of validations available for schemas.
+// For simple schemas, MinProperties and MaxProperties are ignored.
+func (v *CommonValidations) SetValidations(val SchemaValidations) {
+ v.Maximum = val.Maximum
+ v.ExclusiveMaximum = val.ExclusiveMaximum
+ v.Minimum = val.Minimum
+ v.ExclusiveMinimum = val.ExclusiveMinimum
+ v.MaxLength = val.MaxLength
+ v.MinLength = val.MinLength
+ v.Pattern = val.Pattern
+ v.MaxItems = val.MaxItems
+ v.MinItems = val.MinItems
+ v.UniqueItems = val.UniqueItems
+ v.MultipleOf = val.MultipleOf
+ v.Enum = val.Enum
+}
+
+type clearedValidation struct {
+ Validation string
+ Value interface{}
+}
+
+type clearedValidations []clearedValidation
+
+func (c clearedValidations) apply(cbs []func(string, interface{})) {
+ for _, cb := range cbs {
+ for _, cleared := range c {
+ cb(cleared.Validation, cleared.Value)
+ }
+ }
+}
+
+// ClearNumberValidations clears all number validations.
+//
+// Some callbacks may be set by the caller to capture changed values.
+func (v *CommonValidations) ClearNumberValidations(cbs ...func(string, interface{})) {
+ done := make(clearedValidations, 0, 5)
+ defer func() {
+ done.apply(cbs)
+ }()
+
+ if v.Minimum != nil {
+ done = append(done, clearedValidation{Validation: "minimum", Value: v.Minimum})
+ v.Minimum = nil
+ }
+ if v.Maximum != nil {
+ done = append(done, clearedValidation{Validation: "maximum", Value: v.Maximum})
+ v.Maximum = nil
+ }
+ if v.ExclusiveMaximum {
+ done = append(done, clearedValidation{Validation: "exclusiveMaximum", Value: v.ExclusiveMaximum})
+ v.ExclusiveMaximum = false
+ }
+ if v.ExclusiveMinimum {
+ done = append(done, clearedValidation{Validation: "exclusiveMinimum", Value: v.ExclusiveMinimum})
+ v.ExclusiveMinimum = false
+ }
+ if v.MultipleOf != nil {
+ done = append(done, clearedValidation{Validation: "multipleOf", Value: v.MultipleOf})
+ v.MultipleOf = nil
+ }
+}
+
+// ClearStringValidations clears all string validations.
+//
+// Some callbacks may be set by the caller to capture changed values.
+func (v *CommonValidations) ClearStringValidations(cbs ...func(string, interface{})) {
+ done := make(clearedValidations, 0, 3)
+ defer func() {
+ done.apply(cbs)
+ }()
+
+ if v.Pattern != "" {
+ done = append(done, clearedValidation{Validation: "pattern", Value: v.Pattern})
+ v.Pattern = ""
+ }
+ if v.MinLength != nil {
+ done = append(done, clearedValidation{Validation: "minLength", Value: v.MinLength})
+ v.MinLength = nil
+ }
+ if v.MaxLength != nil {
+ done = append(done, clearedValidation{Validation: "maxLength", Value: v.MaxLength})
+ v.MaxLength = nil
+ }
+}
+
+// ClearArrayValidations clears all array validations.
+//
+// Some callbacks may be set by the caller to capture changed values.
+func (v *CommonValidations) ClearArrayValidations(cbs ...func(string, interface{})) {
+ done := make(clearedValidations, 0, 3)
+ defer func() {
+ done.apply(cbs)
+ }()
+
+ if v.MaxItems != nil {
+ done = append(done, clearedValidation{Validation: "maxItems", Value: v.MaxItems})
+ v.MaxItems = nil
+ }
+ if v.MinItems != nil {
+ done = append(done, clearedValidation{Validation: "minItems", Value: v.MinItems})
+ v.MinItems = nil
+ }
+ if v.UniqueItems {
+ done = append(done, clearedValidation{Validation: "uniqueItems", Value: v.UniqueItems})
+ v.UniqueItems = false
+ }
+}
+
+// Validations returns a clone of the validations for a simple schema.
+//
+// NOTE: in the context of simple schema objects, MinProperties, MaxProperties
+// and PatternProperties remain unset.
+func (v CommonValidations) Validations() SchemaValidations {
+ return SchemaValidations{
+ CommonValidations: v,
+ }
+}
+
+// HasNumberValidations indicates if the validations are for numbers or integers
+func (v CommonValidations) HasNumberValidations() bool {
+ return v.Maximum != nil || v.Minimum != nil || v.MultipleOf != nil
+}
+
+// HasStringValidations indicates if the validations are for strings
+func (v CommonValidations) HasStringValidations() bool {
+ return v.MaxLength != nil || v.MinLength != nil || v.Pattern != ""
+}
+
+// HasArrayValidations indicates if the validations are for arrays
+func (v CommonValidations) HasArrayValidations() bool {
+ return v.MaxItems != nil || v.MinItems != nil || v.UniqueItems
+}
+
+// HasEnum indicates if the validation includes some enum constraint
+func (v CommonValidations) HasEnum() bool {
+ return len(v.Enum) > 0
+}
+
+// SchemaValidations describes the validation properties of a schema
+//
+// NOTE: at this moment, this is not embedded in SchemaProps because this would induce a breaking change
+// in the exported members: all initializers using litterals would fail.
+type SchemaValidations struct {
+ CommonValidations
+
+ PatternProperties SchemaProperties `json:"patternProperties,omitempty"`
+ MaxProperties *int64 `json:"maxProperties,omitempty"`
+ MinProperties *int64 `json:"minProperties,omitempty"`
+}
+
+// HasObjectValidations indicates if the validations are for objects
+func (v SchemaValidations) HasObjectValidations() bool {
+ return v.MaxProperties != nil || v.MinProperties != nil || v.PatternProperties != nil
+}
+
+// SetValidations for schema validations
+func (v *SchemaValidations) SetValidations(val SchemaValidations) {
+ v.CommonValidations.SetValidations(val)
+ v.PatternProperties = val.PatternProperties
+ v.MaxProperties = val.MaxProperties
+ v.MinProperties = val.MinProperties
+}
+
+// Validations for a schema
+func (v SchemaValidations) Validations() SchemaValidations {
+ val := v.CommonValidations.Validations()
+ val.PatternProperties = v.PatternProperties
+ val.MinProperties = v.MinProperties
+ val.MaxProperties = v.MaxProperties
+ return val
+}
+
+// ClearObjectValidations returns a clone of the validations with all object validations cleared.
+//
+// Some callbacks may be set by the caller to capture changed values.
+func (v *SchemaValidations) ClearObjectValidations(cbs ...func(string, interface{})) {
+ done := make(clearedValidations, 0, 3)
+ defer func() {
+ done.apply(cbs)
+ }()
+
+ if v.MaxProperties != nil {
+ done = append(done, clearedValidation{Validation: "maxProperties", Value: v.MaxProperties})
+ v.MaxProperties = nil
+ }
+ if v.MinProperties != nil {
+ done = append(done, clearedValidation{Validation: "minProperties", Value: v.MinProperties})
+ v.MinProperties = nil
+ }
+ if v.PatternProperties != nil {
+ done = append(done, clearedValidation{Validation: "patternProperties", Value: v.PatternProperties})
+ v.PatternProperties = nil
+ }
+}
diff --git a/vendor/github.com/go-openapi/spec/xml_object.go b/vendor/github.com/go-openapi/spec/xml_object.go
new file mode 100644
index 000000000..945a46703
--- /dev/null
+++ b/vendor/github.com/go-openapi/spec/xml_object.go
@@ -0,0 +1,68 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spec
+
+// XMLObject a metadata object that allows for more fine-tuned XML model definitions.
+//
+// For more information: http://goo.gl/8us55a#xmlObject
+type XMLObject struct {
+ Name string `json:"name,omitempty"`
+ Namespace string `json:"namespace,omitempty"`
+ Prefix string `json:"prefix,omitempty"`
+ Attribute bool `json:"attribute,omitempty"`
+ Wrapped bool `json:"wrapped,omitempty"`
+}
+
+// WithName sets the xml name for the object
+func (x *XMLObject) WithName(name string) *XMLObject {
+ x.Name = name
+ return x
+}
+
+// WithNamespace sets the xml namespace for the object
+func (x *XMLObject) WithNamespace(namespace string) *XMLObject {
+ x.Namespace = namespace
+ return x
+}
+
+// WithPrefix sets the xml prefix for the object
+func (x *XMLObject) WithPrefix(prefix string) *XMLObject {
+ x.Prefix = prefix
+ return x
+}
+
+// AsAttribute flags this object as xml attribute
+func (x *XMLObject) AsAttribute() *XMLObject {
+ x.Attribute = true
+ return x
+}
+
+// AsElement flags this object as an xml node
+func (x *XMLObject) AsElement() *XMLObject {
+ x.Attribute = false
+ return x
+}
+
+// AsWrapped flags this object as wrapped, this is mostly useful for array types
+func (x *XMLObject) AsWrapped() *XMLObject {
+ x.Wrapped = true
+ return x
+}
+
+// AsUnwrapped flags this object as an xml node
+func (x *XMLObject) AsUnwrapped() *XMLObject {
+ x.Wrapped = false
+ return x
+}
diff --git a/vendor/github.com/go-openapi/strfmt/.editorconfig b/vendor/github.com/go-openapi/strfmt/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/strfmt/.gitattributes b/vendor/github.com/go-openapi/strfmt/.gitattributes
new file mode 100644
index 000000000..d020be8ea
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/.gitattributes
@@ -0,0 +1,2 @@
+*.go text eol=lf
+
diff --git a/vendor/github.com/go-openapi/strfmt/.gitignore b/vendor/github.com/go-openapi/strfmt/.gitignore
new file mode 100644
index 000000000..dd91ed6a0
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/.gitignore
@@ -0,0 +1,2 @@
+secrets.yml
+coverage.out
diff --git a/vendor/github.com/go-openapi/strfmt/.golangci.yml b/vendor/github.com/go-openapi/strfmt/.golangci.yml
new file mode 100644
index 000000000..be4899cb1
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/.golangci.yml
@@ -0,0 +1,59 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 31
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+
+linters:
+ enable:
+ - revive
+ - goimports
+ - gosec
+ - unparam
+ - unconvert
+ - predeclared
+ - prealloc
+ - misspell
+
+ # disable:
+ # - maligned
+ # - lll
+ # - gochecknoinits
+ # - gochecknoglobals
+ # - godox
+ # - gocognit
+ # - whitespace
+ # - wsl
+ # - funlen
+ # - wrapcheck
+ # - testpackage
+ # - nlreturn
+ # - gofumpt
+ # - goerr113
+ # - gci
+ # - gomnd
+ # - godot
+ # - exhaustivestruct
+ # - paralleltest
+ # - varnamelen
+ # - ireturn
+ # - exhaustruct
+ # #- thelper
+
+issues:
+ exclude-rules:
+ - path: bson.go
+ text: "should be .*ObjectID"
+ linters:
+ - golint
+ - stylecheck
+
diff --git a/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/strfmt/LICENSE b/vendor/github.com/go-openapi/strfmt/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/strfmt/README.md b/vendor/github.com/go-openapi/strfmt/README.md
new file mode 100644
index 000000000..0cf89d776
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/README.md
@@ -0,0 +1,88 @@
+# Strfmt [![Build Status](https://travis-ci.org/go-openapi/strfmt.svg?branch=master)](https://travis-ci.org/go-openapi/strfmt) [![codecov](https://codecov.io/gh/go-openapi/strfmt/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/strfmt) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/strfmt/master/LICENSE)
+[![GoDoc](https://godoc.org/github.com/go-openapi/strfmt?status.svg)](http://godoc.org/github.com/go-openapi/strfmt)
+[![GolangCI](https://golangci.com/badges/github.com/go-openapi/strfmt.svg)](https://golangci.com)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/strfmt)](https://goreportcard.com/report/github.com/go-openapi/strfmt)
+
+This package exposes a registry of data types to support string formats in the go-openapi toolkit.
+
+strfmt represents a well known string format such as credit card or email. The go toolkit for OpenAPI specifications knows how to deal with those.
+
+## Supported data formats
+go-openapi/strfmt follows the swagger 2.0 specification with the following formats
+defined [here](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types).
+
+It also provides convenient extensions to go-openapi users.
+
+- [x] JSON-schema draft 4 formats
+ - date-time
+ - email
+ - hostname
+ - ipv4
+ - ipv6
+ - uri
+- [x] swagger 2.0 format extensions
+ - binary
+ - byte (e.g. base64 encoded string)
+ - date (e.g. "1970-01-01")
+ - password
+- [x] go-openapi custom format extensions
+ - bsonobjectid (BSON objectID)
+ - creditcard
+ - duration (e.g. "3 weeks", "1ms")
+ - hexcolor (e.g. "#FFFFFF")
+ - isbn, isbn10, isbn13
+ - mac (e.g "01:02:03:04:05:06")
+ - rgbcolor (e.g. "rgb(100,100,100)")
+ - ssn
+ - uuid, uuid3, uuid4, uuid5
+ - cidr (e.g. "192.0.2.1/24", "2001:db8:a0b:12f0::1/32")
+ - ulid (e.g. "00000PP9HGSBSSDZ1JTEXBJ0PW", [spec](https://github.com/ulid/spec))
+
+> NOTE: as the name stands for, this package is intended to support string formatting only.
+> It does not provide validation for numerical values with swagger format extension for JSON types "number" or
+> "integer" (e.g. float, double, int32...).
+
+## Type conversion
+
+All types defined here are stringers and may be converted to strings with `.String()`.
+Note that most types defined by this package may be converted directly to string like `string(Email{})`.
+
+`Date` and `DateTime` may be converted directly to `time.Time` like `time.Time(Time{})`.
+Similarly, you can convert `Duration` to `time.Duration` as in `time.Duration(Duration{})`
+
+## Using pointers
+
+The `conv` subpackage provides helpers to convert the types to and from pointers, just like `go-openapi/swag` does
+with primitive types.
+
+## Format types
+Types defined in strfmt expose marshaling and validation capabilities.
+
+List of defined types:
+- Base64
+- CreditCard
+- Date
+- DateTime
+- Duration
+- Email
+- HexColor
+- Hostname
+- IPv4
+- IPv6
+- CIDR
+- ISBN
+- ISBN10
+- ISBN13
+- MAC
+- ObjectId
+- Password
+- RGBColor
+- SSN
+- URI
+- UUID
+- UUID3
+- UUID4
+- UUID5
+- [ULID](https://github.com/ulid/spec)
diff --git a/vendor/github.com/go-openapi/strfmt/bson.go b/vendor/github.com/go-openapi/strfmt/bson.go
new file mode 100644
index 000000000..a8a3604a2
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/bson.go
@@ -0,0 +1,165 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strfmt
+
+import (
+ "database/sql/driver"
+ "fmt"
+
+ "go.mongodb.org/mongo-driver/bson"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ bsonprim "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+func init() {
+ var id ObjectId
+ // register this format in the default registry
+ Default.Add("bsonobjectid", &id, IsBSONObjectID)
+}
+
+// IsBSONObjectID returns true when the string is a valid BSON.ObjectId
+func IsBSONObjectID(str string) bool {
+ _, err := bsonprim.ObjectIDFromHex(str)
+ return err == nil
+}
+
+// ObjectId represents a BSON object ID (alias to go.mongodb.org/mongo-driver/bson/primitive.ObjectID)
+//
+// swagger:strfmt bsonobjectid
+type ObjectId bsonprim.ObjectID //nolint:revive
+
+// NewObjectId creates a ObjectId from a Hex String
+func NewObjectId(hex string) ObjectId { //nolint:revive
+ oid, err := bsonprim.ObjectIDFromHex(hex)
+ if err != nil {
+ panic(err)
+ }
+ return ObjectId(oid)
+}
+
+// MarshalText turns this instance into text
+func (id ObjectId) MarshalText() ([]byte, error) {
+ oid := bsonprim.ObjectID(id)
+ if oid == bsonprim.NilObjectID {
+ return nil, nil
+ }
+ return []byte(oid.Hex()), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (id *ObjectId) UnmarshalText(data []byte) error { // validation is performed later on
+ if len(data) == 0 {
+ *id = ObjectId(bsonprim.NilObjectID)
+ return nil
+ }
+ oidstr := string(data)
+ oid, err := bsonprim.ObjectIDFromHex(oidstr)
+ if err != nil {
+ return err
+ }
+ *id = ObjectId(oid)
+ return nil
+}
+
+// Scan read a value from a database driver
+func (id *ObjectId) Scan(raw interface{}) error {
+ var data []byte
+ switch v := raw.(type) {
+ case []byte:
+ data = v
+ case string:
+ data = []byte(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v", v)
+ }
+
+ return id.UnmarshalText(data)
+}
+
+// Value converts a value to a database driver value
+func (id ObjectId) Value() (driver.Value, error) {
+ return driver.Value(bsonprim.ObjectID(id).Hex()), nil
+}
+
+func (id ObjectId) String() string {
+ return bsonprim.ObjectID(id).Hex()
+}
+
+// MarshalJSON returns the ObjectId as JSON
+func (id ObjectId) MarshalJSON() ([]byte, error) {
+ return bsonprim.ObjectID(id).MarshalJSON()
+}
+
+// UnmarshalJSON sets the ObjectId from JSON
+func (id *ObjectId) UnmarshalJSON(data []byte) error {
+ var obj bsonprim.ObjectID
+ if err := obj.UnmarshalJSON(data); err != nil {
+ return err
+ }
+ *id = ObjectId(obj)
+ return nil
+}
+
+// MarshalBSON renders the object id as a BSON document
+func (id ObjectId) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": bsonprim.ObjectID(id)})
+}
+
+// UnmarshalBSON reads the objectId from a BSON document
+func (id *ObjectId) UnmarshalBSON(data []byte) error {
+ var obj struct {
+ Data bsonprim.ObjectID
+ }
+ if err := bson.Unmarshal(data, &obj); err != nil {
+ return err
+ }
+ *id = ObjectId(obj.Data)
+ return nil
+}
+
+// MarshalBSONValue is an interface implemented by types that can marshal themselves
+// into a BSON document represented as bytes. The bytes returned must be a valid
+// BSON document if the error is nil.
+func (id ObjectId) MarshalBSONValue() (bsontype.Type, []byte, error) {
+ oid := bsonprim.ObjectID(id)
+ return bsontype.ObjectID, oid[:], nil
+}
+
+// UnmarshalBSONValue is an interface implemented by types that can unmarshal a
+// BSON value representation of themselves. The BSON bytes and type can be
+// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
+// wishes to retain the data after returning.
+func (id *ObjectId) UnmarshalBSONValue(_ bsontype.Type, data []byte) error {
+ var oid bsonprim.ObjectID
+ copy(oid[:], data)
+ *id = ObjectId(oid)
+ return nil
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (id *ObjectId) DeepCopyInto(out *ObjectId) {
+ *out = *id
+}
+
+// DeepCopy copies the receiver into a new ObjectId.
+func (id *ObjectId) DeepCopy() *ObjectId {
+ if id == nil {
+ return nil
+ }
+ out := new(ObjectId)
+ id.DeepCopyInto(out)
+ return out
+}
diff --git a/vendor/github.com/go-openapi/strfmt/date.go b/vendor/github.com/go-openapi/strfmt/date.go
new file mode 100644
index 000000000..3c93381c7
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/date.go
@@ -0,0 +1,187 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strfmt
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson"
+)
+
+func init() {
+ d := Date{}
+ // register this format in the default registry
+ Default.Add("date", &d, IsDate)
+}
+
+// IsDate returns true when the string is a valid date
+func IsDate(str string) bool {
+ _, err := time.Parse(RFC3339FullDate, str)
+ return err == nil
+}
+
+const (
+ // RFC3339FullDate represents a full-date as specified by RFC3339
+ // See: http://goo.gl/xXOvVd
+ RFC3339FullDate = "2006-01-02"
+)
+
+// Date represents a date from the API
+//
+// swagger:strfmt date
+type Date time.Time
+
+// String converts this date into a string
+func (d Date) String() string {
+ return time.Time(d).Format(RFC3339FullDate)
+}
+
+// UnmarshalText parses a text representation into a date type
+func (d *Date) UnmarshalText(text []byte) error {
+ if len(text) == 0 {
+ return nil
+ }
+ dd, err := time.ParseInLocation(RFC3339FullDate, string(text), DefaultTimeLocation)
+ if err != nil {
+ return err
+ }
+ *d = Date(dd)
+ return nil
+}
+
+// MarshalText serializes this date type to string
+func (d Date) MarshalText() ([]byte, error) {
+ return []byte(d.String()), nil
+}
+
+// Scan scans a Date value from database driver type.
+func (d *Date) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ return d.UnmarshalText(v)
+ case string:
+ return d.UnmarshalText([]byte(v))
+ case time.Time:
+ *d = Date(v)
+ return nil
+ case nil:
+ *d = Date{}
+ return nil
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.Date from: %#v", v)
+ }
+}
+
+// Value converts Date to a primitive value ready to written to a database.
+func (d Date) Value() (driver.Value, error) {
+ return driver.Value(d.String()), nil
+}
+
+// MarshalJSON returns the Date as JSON
+func (d Date) MarshalJSON() ([]byte, error) {
+ return json.Marshal(time.Time(d).Format(RFC3339FullDate))
+}
+
+// UnmarshalJSON sets the Date from JSON
+func (d *Date) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var strdate string
+ if err := json.Unmarshal(data, &strdate); err != nil {
+ return err
+ }
+ tt, err := time.ParseInLocation(RFC3339FullDate, strdate, DefaultTimeLocation)
+ if err != nil {
+ return err
+ }
+ *d = Date(tt)
+ return nil
+}
+
+func (d Date) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": d.String()})
+}
+
+func (d *Date) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if data, ok := m["data"].(string); ok {
+ rd, err := time.ParseInLocation(RFC3339FullDate, data, DefaultTimeLocation)
+ if err != nil {
+ return err
+ }
+ *d = Date(rd)
+ return nil
+ }
+
+ return errors.New("couldn't unmarshal bson bytes value as Date")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (d *Date) DeepCopyInto(out *Date) {
+ *out = *d
+}
+
+// DeepCopy copies the receiver into a new Date.
+func (d *Date) DeepCopy() *Date {
+ if d == nil {
+ return nil
+ }
+ out := new(Date)
+ d.DeepCopyInto(out)
+ return out
+}
+
+// GobEncode implements the gob.GobEncoder interface.
+func (d Date) GobEncode() ([]byte, error) {
+ return d.MarshalBinary()
+}
+
+// GobDecode implements the gob.GobDecoder interface.
+func (d *Date) GobDecode(data []byte) error {
+ return d.UnmarshalBinary(data)
+}
+
+// MarshalBinary implements the encoding.BinaryMarshaler interface.
+func (d Date) MarshalBinary() ([]byte, error) {
+ return time.Time(d).MarshalBinary()
+}
+
+// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
+func (d *Date) UnmarshalBinary(data []byte) error {
+ var original time.Time
+
+ err := original.UnmarshalBinary(data)
+ if err != nil {
+ return err
+ }
+
+ *d = Date(original)
+
+ return nil
+}
+
+// Equal checks if two Date instances are equal
+func (d Date) Equal(d2 Date) bool {
+ return time.Time(d).Equal(time.Time(d2))
+}
diff --git a/vendor/github.com/go-openapi/strfmt/default.go b/vendor/github.com/go-openapi/strfmt/default.go
new file mode 100644
index 000000000..a89a4de3f
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/default.go
@@ -0,0 +1,2035 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strfmt
+
+import (
+ "database/sql/driver"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/mail"
+ "regexp"
+ "strings"
+
+ "github.com/asaskevich/govalidator"
+ "go.mongodb.org/mongo-driver/bson"
+)
+
+const (
+ // HostnamePattern http://json-schema.org/latest/json-schema-validation.html#anchor114
+ // A string instance is valid against this attribute if it is a valid
+ // representation for an Internet host name, as defined by RFC 1034, section 3.1 [RFC1034].
+ // http://tools.ietf.org/html/rfc1034#section-3.5
+ // <digit> ::= any one of the ten digits 0 through 9
+ // var digit = /[0-9]/;
+ // <letter> ::= any one of the 52 alphabetic characters A through Z in upper case and a through z in lower case
+ // var letter = /[a-zA-Z]/;
+ // <let-dig> ::= <letter> | <digit>
+ // var letDig = /[0-9a-zA-Z]/;
+ // <let-dig-hyp> ::= <let-dig> | "-"
+ // var letDigHyp = /[-0-9a-zA-Z]/;
+ // <ldh-str> ::= <let-dig-hyp> | <let-dig-hyp> <ldh-str>
+ // var ldhStr = /[-0-9a-zA-Z]+/;
+ // <label> ::= <letter> [ [ <ldh-str> ] <let-dig> ]
+ // var label = /[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?/;
+ // <subdomain> ::= <label> | <subdomain> "." <label>
+ // var subdomain = /^[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?(\.[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?)*$/;
+ // <domain> ::= <subdomain> | " "
+ //
+ // Additional validations:
+ // - for FDQNs, top-level domain (e.g. ".com"), is at least to letters long (no special characters here)
+ // - hostnames may start with a digit [RFC1123]
+ // - special registered names with an underscore ('_') are not allowed in this context
+ // - dashes are permitted, but not at the start or the end of a segment
+ // - long top-level domain names (e.g. example.london) are permitted
+ // - symbol unicode points are permitted (e.g. emoji) (not for top-level domain)
+ HostnamePattern = `^([a-zA-Z0-9\p{S}\p{L}]((-?[a-zA-Z0-9\p{S}\p{L}]{0,62})?)|([a-zA-Z0-9\p{S}\p{L}](([a-zA-Z0-9-\p{S}\p{L}]{0,61}[a-zA-Z0-9\p{S}\p{L}])?)(\.)){1,}([a-zA-Z\p{L}]){2,63})$`
+ // UUIDPattern Regex for UUID that allows uppercase
+ UUIDPattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{4}-?[0-9a-f]{12}$`
+ // UUID3Pattern Regex for UUID3 that allows uppercase
+ UUID3Pattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?3[0-9a-f]{3}-?[0-9a-f]{4}-?[0-9a-f]{12}$`
+ // UUID4Pattern Regex for UUID4 that allows uppercase
+ UUID4Pattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?4[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$`
+ // UUID5Pattern Regex for UUID5 that allows uppercase
+ UUID5Pattern = `(?i)^[0-9a-f]{8}-?[0-9a-f]{4}-?5[0-9a-f]{3}-?[89ab][0-9a-f]{3}-?[0-9a-f]{12}$`
+ // json null type
+ jsonNull = "null"
+)
+
+var (
+ rxHostname = regexp.MustCompile(HostnamePattern)
+ rxUUID = regexp.MustCompile(UUIDPattern)
+ rxUUID3 = regexp.MustCompile(UUID3Pattern)
+ rxUUID4 = regexp.MustCompile(UUID4Pattern)
+ rxUUID5 = regexp.MustCompile(UUID5Pattern)
+)
+
+// IsHostname returns true when the string is a valid hostname
+func IsHostname(str string) bool {
+ if !rxHostname.MatchString(str) {
+ return false
+ }
+
+ // the sum of all label octets and label lengths is limited to 255.
+ if len(str) > 255 {
+ return false
+ }
+
+ // Each node has a label, which is zero to 63 octets in length
+ parts := strings.Split(str, ".")
+ valid := true
+ for _, p := range parts {
+ if len(p) > 63 {
+ valid = false
+ }
+ }
+ return valid
+}
+
+// IsUUID returns true is the string matches a UUID, upper case is allowed
+func IsUUID(str string) bool {
+ return rxUUID.MatchString(str)
+}
+
+// IsUUID3 returns true is the string matches a UUID, upper case is allowed
+func IsUUID3(str string) bool {
+ return rxUUID3.MatchString(str)
+}
+
+// IsUUID4 returns true is the string matches a UUID, upper case is allowed
+func IsUUID4(str string) bool {
+ return rxUUID4.MatchString(str)
+}
+
+// IsUUID5 returns true is the string matches a UUID, upper case is allowed
+func IsUUID5(str string) bool {
+ return rxUUID5.MatchString(str)
+}
+
+// IsEmail validates an email address.
+func IsEmail(str string) bool {
+ addr, e := mail.ParseAddress(str)
+ return e == nil && addr.Address != ""
+}
+
+func init() {
+ // register formats in the default registry:
+ // - byte
+ // - creditcard
+ // - email
+ // - hexcolor
+ // - hostname
+ // - ipv4
+ // - ipv6
+ // - cidr
+ // - isbn
+ // - isbn10
+ // - isbn13
+ // - mac
+ // - password
+ // - rgbcolor
+ // - ssn
+ // - uri
+ // - uuid
+ // - uuid3
+ // - uuid4
+ // - uuid5
+ u := URI("")
+ Default.Add("uri", &u, govalidator.IsRequestURI)
+
+ eml := Email("")
+ Default.Add("email", &eml, IsEmail)
+
+ hn := Hostname("")
+ Default.Add("hostname", &hn, IsHostname)
+
+ ip4 := IPv4("")
+ Default.Add("ipv4", &ip4, govalidator.IsIPv4)
+
+ ip6 := IPv6("")
+ Default.Add("ipv6", &ip6, govalidator.IsIPv6)
+
+ cidr := CIDR("")
+ Default.Add("cidr", &cidr, govalidator.IsCIDR)
+
+ mac := MAC("")
+ Default.Add("mac", &mac, govalidator.IsMAC)
+
+ uid := UUID("")
+ Default.Add("uuid", &uid, IsUUID)
+
+ uid3 := UUID3("")
+ Default.Add("uuid3", &uid3, IsUUID3)
+
+ uid4 := UUID4("")
+ Default.Add("uuid4", &uid4, IsUUID4)
+
+ uid5 := UUID5("")
+ Default.Add("uuid5", &uid5, IsUUID5)
+
+ isbn := ISBN("")
+ Default.Add("isbn", &isbn, func(str string) bool { return govalidator.IsISBN10(str) || govalidator.IsISBN13(str) })
+
+ isbn10 := ISBN10("")
+ Default.Add("isbn10", &isbn10, govalidator.IsISBN10)
+
+ isbn13 := ISBN13("")
+ Default.Add("isbn13", &isbn13, govalidator.IsISBN13)
+
+ cc := CreditCard("")
+ Default.Add("creditcard", &cc, govalidator.IsCreditCard)
+
+ ssn := SSN("")
+ Default.Add("ssn", &ssn, govalidator.IsSSN)
+
+ hc := HexColor("")
+ Default.Add("hexcolor", &hc, govalidator.IsHexcolor)
+
+ rc := RGBColor("")
+ Default.Add("rgbcolor", &rc, govalidator.IsRGBcolor)
+
+ b64 := Base64([]byte(nil))
+ Default.Add("byte", &b64, govalidator.IsBase64)
+
+ pw := Password("")
+ Default.Add("password", &pw, func(_ string) bool { return true })
+}
+
+// Base64 represents a base64 encoded string, using URLEncoding alphabet
+//
+// swagger:strfmt byte
+type Base64 []byte
+
+// MarshalText turns this instance into text
+func (b Base64) MarshalText() ([]byte, error) {
+ enc := base64.URLEncoding
+ src := []byte(b)
+ buf := make([]byte, enc.EncodedLen(len(src)))
+ enc.Encode(buf, src)
+ return buf, nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (b *Base64) UnmarshalText(data []byte) error { // validation is performed later on
+ enc := base64.URLEncoding
+ dbuf := make([]byte, enc.DecodedLen(len(data)))
+
+ n, err := enc.Decode(dbuf, data)
+ if err != nil {
+ return err
+ }
+
+ *b = dbuf[:n]
+ return nil
+}
+
+// Scan read a value from a database driver
+func (b *Base64) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ dbuf := make([]byte, base64.StdEncoding.DecodedLen(len(v)))
+ n, err := base64.StdEncoding.Decode(dbuf, v)
+ if err != nil {
+ return err
+ }
+ *b = dbuf[:n]
+ case string:
+ vv, err := base64.StdEncoding.DecodeString(v)
+ if err != nil {
+ return err
+ }
+ *b = Base64(vv)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.Base64 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (b Base64) Value() (driver.Value, error) {
+ return driver.Value(b.String()), nil
+}
+
+func (b Base64) String() string {
+ return base64.StdEncoding.EncodeToString([]byte(b))
+}
+
+// MarshalJSON returns the Base64 as JSON
+func (b Base64) MarshalJSON() ([]byte, error) {
+ return json.Marshal(b.String())
+}
+
+// UnmarshalJSON sets the Base64 from JSON
+func (b *Base64) UnmarshalJSON(data []byte) error {
+ var b64str string
+ if err := json.Unmarshal(data, &b64str); err != nil {
+ return err
+ }
+ vb, err := base64.StdEncoding.DecodeString(b64str)
+ if err != nil {
+ return err
+ }
+ *b = Base64(vb)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (b Base64) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": b.String()})
+}
+
+// UnmarshalBSON document into this value
+func (b *Base64) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if bd, ok := m["data"].(string); ok {
+ vb, err := base64.StdEncoding.DecodeString(bd)
+ if err != nil {
+ return err
+ }
+ *b = Base64(vb)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as base64")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (b *Base64) DeepCopyInto(out *Base64) {
+ *out = *b
+}
+
+// DeepCopy copies the receiver into a new Base64.
+func (b *Base64) DeepCopy() *Base64 {
+ if b == nil {
+ return nil
+ }
+ out := new(Base64)
+ b.DeepCopyInto(out)
+ return out
+}
+
+// URI represents the uri string format as specified by the json schema spec
+//
+// swagger:strfmt uri
+type URI string
+
+// MarshalText turns this instance into text
+func (u URI) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *URI) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = URI(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *URI) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = URI(string(v))
+ case string:
+ *u = URI(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u URI) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u URI) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the URI as JSON
+func (u URI) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the URI from JSON
+func (u *URI) UnmarshalJSON(data []byte) error {
+ var uristr string
+ if err := json.Unmarshal(data, &uristr); err != nil {
+ return err
+ }
+ *u = URI(uristr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u URI) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *URI) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = URI(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as uri")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *URI) DeepCopyInto(out *URI) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new URI.
+func (u *URI) DeepCopy() *URI {
+ if u == nil {
+ return nil
+ }
+ out := new(URI)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// Email represents the email string format as specified by the json schema spec
+//
+// swagger:strfmt email
+type Email string
+
+// MarshalText turns this instance into text
+func (e Email) MarshalText() ([]byte, error) {
+ return []byte(string(e)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (e *Email) UnmarshalText(data []byte) error { // validation is performed later on
+ *e = Email(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (e *Email) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *e = Email(string(v))
+ case string:
+ *e = Email(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.Email from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (e Email) Value() (driver.Value, error) {
+ return driver.Value(string(e)), nil
+}
+
+func (e Email) String() string {
+ return string(e)
+}
+
+// MarshalJSON returns the Email as JSON
+func (e Email) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(e))
+}
+
+// UnmarshalJSON sets the Email from JSON
+func (e *Email) UnmarshalJSON(data []byte) error {
+ var estr string
+ if err := json.Unmarshal(data, &estr); err != nil {
+ return err
+ }
+ *e = Email(estr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (e Email) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": e.String()})
+}
+
+// UnmarshalBSON document into this value
+func (e *Email) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *e = Email(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as email")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (e *Email) DeepCopyInto(out *Email) {
+ *out = *e
+}
+
+// DeepCopy copies the receiver into a new Email.
+func (e *Email) DeepCopy() *Email {
+ if e == nil {
+ return nil
+ }
+ out := new(Email)
+ e.DeepCopyInto(out)
+ return out
+}
+
+// Hostname represents the hostname string format as specified by the json schema spec
+//
+// swagger:strfmt hostname
+type Hostname string
+
+// MarshalText turns this instance into text
+func (h Hostname) MarshalText() ([]byte, error) {
+ return []byte(string(h)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (h *Hostname) UnmarshalText(data []byte) error { // validation is performed later on
+ *h = Hostname(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (h *Hostname) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *h = Hostname(string(v))
+ case string:
+ *h = Hostname(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.Hostname from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (h Hostname) Value() (driver.Value, error) {
+ return driver.Value(string(h)), nil
+}
+
+func (h Hostname) String() string {
+ return string(h)
+}
+
+// MarshalJSON returns the Hostname as JSON
+func (h Hostname) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(h))
+}
+
+// UnmarshalJSON sets the Hostname from JSON
+func (h *Hostname) UnmarshalJSON(data []byte) error {
+ var hstr string
+ if err := json.Unmarshal(data, &hstr); err != nil {
+ return err
+ }
+ *h = Hostname(hstr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (h Hostname) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": h.String()})
+}
+
+// UnmarshalBSON document into this value
+func (h *Hostname) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *h = Hostname(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as hostname")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (h *Hostname) DeepCopyInto(out *Hostname) {
+ *out = *h
+}
+
+// DeepCopy copies the receiver into a new Hostname.
+func (h *Hostname) DeepCopy() *Hostname {
+ if h == nil {
+ return nil
+ }
+ out := new(Hostname)
+ h.DeepCopyInto(out)
+ return out
+}
+
+// IPv4 represents an IP v4 address
+//
+// swagger:strfmt ipv4
+type IPv4 string
+
+// MarshalText turns this instance into text
+func (u IPv4) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *IPv4) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = IPv4(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *IPv4) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = IPv4(string(v))
+ case string:
+ *u = IPv4(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.IPv4 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u IPv4) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u IPv4) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the IPv4 as JSON
+func (u IPv4) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the IPv4 from JSON
+func (u *IPv4) UnmarshalJSON(data []byte) error {
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = IPv4(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u IPv4) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *IPv4) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = IPv4(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as ipv4")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *IPv4) DeepCopyInto(out *IPv4) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new IPv4.
+func (u *IPv4) DeepCopy() *IPv4 {
+ if u == nil {
+ return nil
+ }
+ out := new(IPv4)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// IPv6 represents an IP v6 address
+//
+// swagger:strfmt ipv6
+type IPv6 string
+
+// MarshalText turns this instance into text
+func (u IPv6) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *IPv6) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = IPv6(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *IPv6) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = IPv6(string(v))
+ case string:
+ *u = IPv6(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.IPv6 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u IPv6) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u IPv6) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the IPv6 as JSON
+func (u IPv6) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the IPv6 from JSON
+func (u *IPv6) UnmarshalJSON(data []byte) error {
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = IPv6(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u IPv6) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *IPv6) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = IPv6(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as ipv6")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *IPv6) DeepCopyInto(out *IPv6) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new IPv6.
+func (u *IPv6) DeepCopy() *IPv6 {
+ if u == nil {
+ return nil
+ }
+ out := new(IPv6)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// CIDR represents a Classless Inter-Domain Routing notation
+//
+// swagger:strfmt cidr
+type CIDR string
+
+// MarshalText turns this instance into text
+func (u CIDR) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *CIDR) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = CIDR(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *CIDR) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = CIDR(string(v))
+ case string:
+ *u = CIDR(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.CIDR from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u CIDR) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u CIDR) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the CIDR as JSON
+func (u CIDR) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the CIDR from JSON
+func (u *CIDR) UnmarshalJSON(data []byte) error {
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = CIDR(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u CIDR) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *CIDR) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = CIDR(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as CIDR")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *CIDR) DeepCopyInto(out *CIDR) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new CIDR.
+func (u *CIDR) DeepCopy() *CIDR {
+ if u == nil {
+ return nil
+ }
+ out := new(CIDR)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// MAC represents a 48 bit MAC address
+//
+// swagger:strfmt mac
+type MAC string
+
+// MarshalText turns this instance into text
+func (u MAC) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *MAC) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = MAC(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *MAC) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = MAC(string(v))
+ case string:
+ *u = MAC(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.IPv4 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u MAC) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u MAC) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the MAC as JSON
+func (u MAC) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the MAC from JSON
+func (u *MAC) UnmarshalJSON(data []byte) error {
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = MAC(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u MAC) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *MAC) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = MAC(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as MAC")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *MAC) DeepCopyInto(out *MAC) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new MAC.
+func (u *MAC) DeepCopy() *MAC {
+ if u == nil {
+ return nil
+ }
+ out := new(MAC)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// UUID represents a uuid string format
+//
+// swagger:strfmt uuid
+type UUID string
+
+// MarshalText turns this instance into text
+func (u UUID) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *UUID) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = UUID(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *UUID) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = UUID(string(v))
+ case string:
+ *u = UUID(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.UUID from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u UUID) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u UUID) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the UUID as JSON
+func (u UUID) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the UUID from JSON
+func (u *UUID) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = UUID(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u UUID) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *UUID) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = UUID(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as UUID")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *UUID) DeepCopyInto(out *UUID) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new UUID.
+func (u *UUID) DeepCopy() *UUID {
+ if u == nil {
+ return nil
+ }
+ out := new(UUID)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// UUID3 represents a uuid3 string format
+//
+// swagger:strfmt uuid3
+type UUID3 string
+
+// MarshalText turns this instance into text
+func (u UUID3) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *UUID3) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = UUID3(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *UUID3) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = UUID3(string(v))
+ case string:
+ *u = UUID3(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.UUID3 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u UUID3) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u UUID3) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the UUID as JSON
+func (u UUID3) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the UUID from JSON
+func (u *UUID3) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = UUID3(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u UUID3) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *UUID3) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = UUID3(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as UUID3")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *UUID3) DeepCopyInto(out *UUID3) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new UUID3.
+func (u *UUID3) DeepCopy() *UUID3 {
+ if u == nil {
+ return nil
+ }
+ out := new(UUID3)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// UUID4 represents a uuid4 string format
+//
+// swagger:strfmt uuid4
+type UUID4 string
+
+// MarshalText turns this instance into text
+func (u UUID4) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *UUID4) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = UUID4(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *UUID4) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = UUID4(string(v))
+ case string:
+ *u = UUID4(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.UUID4 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u UUID4) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u UUID4) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the UUID as JSON
+func (u UUID4) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the UUID from JSON
+func (u *UUID4) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = UUID4(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u UUID4) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *UUID4) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = UUID4(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as UUID4")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *UUID4) DeepCopyInto(out *UUID4) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new UUID4.
+func (u *UUID4) DeepCopy() *UUID4 {
+ if u == nil {
+ return nil
+ }
+ out := new(UUID4)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// UUID5 represents a uuid5 string format
+//
+// swagger:strfmt uuid5
+type UUID5 string
+
+// MarshalText turns this instance into text
+func (u UUID5) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *UUID5) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = UUID5(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *UUID5) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = UUID5(string(v))
+ case string:
+ *u = UUID5(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.UUID5 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u UUID5) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u UUID5) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the UUID as JSON
+func (u UUID5) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the UUID from JSON
+func (u *UUID5) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = UUID5(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u UUID5) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *UUID5) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = UUID5(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as UUID5")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *UUID5) DeepCopyInto(out *UUID5) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new UUID5.
+func (u *UUID5) DeepCopy() *UUID5 {
+ if u == nil {
+ return nil
+ }
+ out := new(UUID5)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// ISBN represents an isbn string format
+//
+// swagger:strfmt isbn
+type ISBN string
+
+// MarshalText turns this instance into text
+func (u ISBN) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *ISBN) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = ISBN(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *ISBN) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = ISBN(string(v))
+ case string:
+ *u = ISBN(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.ISBN from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u ISBN) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u ISBN) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the ISBN as JSON
+func (u ISBN) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the ISBN from JSON
+func (u *ISBN) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = ISBN(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u ISBN) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *ISBN) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = ISBN(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as ISBN")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *ISBN) DeepCopyInto(out *ISBN) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new ISBN.
+func (u *ISBN) DeepCopy() *ISBN {
+ if u == nil {
+ return nil
+ }
+ out := new(ISBN)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// ISBN10 represents an isbn 10 string format
+//
+// swagger:strfmt isbn10
+type ISBN10 string
+
+// MarshalText turns this instance into text
+func (u ISBN10) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *ISBN10) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = ISBN10(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *ISBN10) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = ISBN10(string(v))
+ case string:
+ *u = ISBN10(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.ISBN10 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u ISBN10) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u ISBN10) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the ISBN10 as JSON
+func (u ISBN10) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the ISBN10 from JSON
+func (u *ISBN10) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = ISBN10(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u ISBN10) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *ISBN10) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = ISBN10(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as ISBN10")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *ISBN10) DeepCopyInto(out *ISBN10) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new ISBN10.
+func (u *ISBN10) DeepCopy() *ISBN10 {
+ if u == nil {
+ return nil
+ }
+ out := new(ISBN10)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// ISBN13 represents an isbn 13 string format
+//
+// swagger:strfmt isbn13
+type ISBN13 string
+
+// MarshalText turns this instance into text
+func (u ISBN13) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *ISBN13) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = ISBN13(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *ISBN13) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = ISBN13(string(v))
+ case string:
+ *u = ISBN13(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.ISBN13 from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u ISBN13) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u ISBN13) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the ISBN13 as JSON
+func (u ISBN13) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the ISBN13 from JSON
+func (u *ISBN13) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = ISBN13(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u ISBN13) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *ISBN13) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = ISBN13(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as ISBN13")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *ISBN13) DeepCopyInto(out *ISBN13) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new ISBN13.
+func (u *ISBN13) DeepCopy() *ISBN13 {
+ if u == nil {
+ return nil
+ }
+ out := new(ISBN13)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// CreditCard represents a credit card string format
+//
+// swagger:strfmt creditcard
+type CreditCard string
+
+// MarshalText turns this instance into text
+func (u CreditCard) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *CreditCard) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = CreditCard(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *CreditCard) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = CreditCard(string(v))
+ case string:
+ *u = CreditCard(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.CreditCard from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u CreditCard) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u CreditCard) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the CreditCard as JSON
+func (u CreditCard) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the CreditCard from JSON
+func (u *CreditCard) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = CreditCard(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u CreditCard) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *CreditCard) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = CreditCard(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as CreditCard")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *CreditCard) DeepCopyInto(out *CreditCard) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new CreditCard.
+func (u *CreditCard) DeepCopy() *CreditCard {
+ if u == nil {
+ return nil
+ }
+ out := new(CreditCard)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// SSN represents a social security string format
+//
+// swagger:strfmt ssn
+type SSN string
+
+// MarshalText turns this instance into text
+func (u SSN) MarshalText() ([]byte, error) {
+ return []byte(string(u)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *SSN) UnmarshalText(data []byte) error { // validation is performed later on
+ *u = SSN(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (u *SSN) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *u = SSN(string(v))
+ case string:
+ *u = SSN(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.SSN from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (u SSN) Value() (driver.Value, error) {
+ return driver.Value(string(u)), nil
+}
+
+func (u SSN) String() string {
+ return string(u)
+}
+
+// MarshalJSON returns the SSN as JSON
+func (u SSN) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(u))
+}
+
+// UnmarshalJSON sets the SSN from JSON
+func (u *SSN) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *u = SSN(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u SSN) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *SSN) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *u = SSN(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as SSN")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *SSN) DeepCopyInto(out *SSN) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new SSN.
+func (u *SSN) DeepCopy() *SSN {
+ if u == nil {
+ return nil
+ }
+ out := new(SSN)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// HexColor represents a hex color string format
+//
+// swagger:strfmt hexcolor
+type HexColor string
+
+// MarshalText turns this instance into text
+func (h HexColor) MarshalText() ([]byte, error) {
+ return []byte(string(h)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (h *HexColor) UnmarshalText(data []byte) error { // validation is performed later on
+ *h = HexColor(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (h *HexColor) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *h = HexColor(string(v))
+ case string:
+ *h = HexColor(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.HexColor from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (h HexColor) Value() (driver.Value, error) {
+ return driver.Value(string(h)), nil
+}
+
+func (h HexColor) String() string {
+ return string(h)
+}
+
+// MarshalJSON returns the HexColor as JSON
+func (h HexColor) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(h))
+}
+
+// UnmarshalJSON sets the HexColor from JSON
+func (h *HexColor) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *h = HexColor(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (h HexColor) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": h.String()})
+}
+
+// UnmarshalBSON document into this value
+func (h *HexColor) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *h = HexColor(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as HexColor")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (h *HexColor) DeepCopyInto(out *HexColor) {
+ *out = *h
+}
+
+// DeepCopy copies the receiver into a new HexColor.
+func (h *HexColor) DeepCopy() *HexColor {
+ if h == nil {
+ return nil
+ }
+ out := new(HexColor)
+ h.DeepCopyInto(out)
+ return out
+}
+
+// RGBColor represents a RGB color string format
+//
+// swagger:strfmt rgbcolor
+type RGBColor string
+
+// MarshalText turns this instance into text
+func (r RGBColor) MarshalText() ([]byte, error) {
+ return []byte(string(r)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (r *RGBColor) UnmarshalText(data []byte) error { // validation is performed later on
+ *r = RGBColor(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (r *RGBColor) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *r = RGBColor(string(v))
+ case string:
+ *r = RGBColor(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.RGBColor from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (r RGBColor) Value() (driver.Value, error) {
+ return driver.Value(string(r)), nil
+}
+
+func (r RGBColor) String() string {
+ return string(r)
+}
+
+// MarshalJSON returns the RGBColor as JSON
+func (r RGBColor) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(r))
+}
+
+// UnmarshalJSON sets the RGBColor from JSON
+func (r *RGBColor) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *r = RGBColor(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (r RGBColor) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": r.String()})
+}
+
+// UnmarshalBSON document into this value
+func (r *RGBColor) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *r = RGBColor(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as RGBColor")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (r *RGBColor) DeepCopyInto(out *RGBColor) {
+ *out = *r
+}
+
+// DeepCopy copies the receiver into a new RGBColor.
+func (r *RGBColor) DeepCopy() *RGBColor {
+ if r == nil {
+ return nil
+ }
+ out := new(RGBColor)
+ r.DeepCopyInto(out)
+ return out
+}
+
+// Password represents a password.
+// This has no validations and is mainly used as a marker for UI components.
+//
+// swagger:strfmt password
+type Password string
+
+// MarshalText turns this instance into text
+func (r Password) MarshalText() ([]byte, error) {
+ return []byte(string(r)), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (r *Password) UnmarshalText(data []byte) error { // validation is performed later on
+ *r = Password(string(data))
+ return nil
+}
+
+// Scan read a value from a database driver
+func (r *Password) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ case []byte:
+ *r = Password(string(v))
+ case string:
+ *r = Password(v)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.Password from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts a value to a database driver value
+func (r Password) Value() (driver.Value, error) {
+ return driver.Value(string(r)), nil
+}
+
+func (r Password) String() string {
+ return string(r)
+}
+
+// MarshalJSON returns the Password as JSON
+func (r Password) MarshalJSON() ([]byte, error) {
+ return json.Marshal(string(r))
+}
+
+// UnmarshalJSON sets the Password from JSON
+func (r *Password) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ *r = Password(ustr)
+ return nil
+}
+
+// MarshalBSON document from this value
+func (r Password) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": r.String()})
+}
+
+// UnmarshalBSON document into this value
+func (r *Password) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ *r = Password(ud)
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as Password")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (r *Password) DeepCopyInto(out *Password) {
+ *out = *r
+}
+
+// DeepCopy copies the receiver into a new Password.
+func (r *Password) DeepCopy() *Password {
+ if r == nil {
+ return nil
+ }
+ out := new(Password)
+ r.DeepCopyInto(out)
+ return out
+}
diff --git a/vendor/github.com/go-openapi/strfmt/doc.go b/vendor/github.com/go-openapi/strfmt/doc.go
new file mode 100644
index 000000000..41aebe6d5
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/doc.go
@@ -0,0 +1,18 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package strfmt contains custom string formats
+//
+// TODO: add info on how to define and register a custom format
+package strfmt
diff --git a/vendor/github.com/go-openapi/strfmt/duration.go b/vendor/github.com/go-openapi/strfmt/duration.go
new file mode 100644
index 000000000..6284b821f
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/duration.go
@@ -0,0 +1,211 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strfmt
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson"
+)
+
+func init() {
+ d := Duration(0)
+ // register this format in the default registry
+ Default.Add("duration", &d, IsDuration)
+}
+
+var (
+ timeUnits = [][]string{
+ {"ns", "nano"},
+ {"us", "µs", "micro"},
+ {"ms", "milli"},
+ {"s", "sec"},
+ {"m", "min"},
+ {"h", "hr", "hour"},
+ {"d", "day"},
+ {"w", "wk", "week"},
+ }
+
+ timeMultiplier = map[string]time.Duration{
+ "ns": time.Nanosecond,
+ "us": time.Microsecond,
+ "ms": time.Millisecond,
+ "s": time.Second,
+ "m": time.Minute,
+ "h": time.Hour,
+ "d": 24 * time.Hour,
+ "w": 7 * 24 * time.Hour,
+ }
+
+ durationMatcher = regexp.MustCompile(`((\d+)\s*([A-Za-zµ]+))`)
+)
+
+// IsDuration returns true if the provided string is a valid duration
+func IsDuration(str string) bool {
+ _, err := ParseDuration(str)
+ return err == nil
+}
+
+// Duration represents a duration
+//
+// Duration stores a period of time as a nanosecond count, with the largest
+// repesentable duration being approximately 290 years.
+//
+// swagger:strfmt duration
+type Duration time.Duration
+
+// MarshalText turns this instance into text
+func (d Duration) MarshalText() ([]byte, error) {
+ return []byte(time.Duration(d).String()), nil
+}
+
+// UnmarshalText hydrates this instance from text
+func (d *Duration) UnmarshalText(data []byte) error { // validation is performed later on
+ dd, err := ParseDuration(string(data))
+ if err != nil {
+ return err
+ }
+ *d = Duration(dd)
+ return nil
+}
+
+// ParseDuration parses a duration from a string, compatible with scala duration syntax
+func ParseDuration(cand string) (time.Duration, error) {
+ if dur, err := time.ParseDuration(cand); err == nil {
+ return dur, nil
+ }
+
+ var dur time.Duration
+ ok := false
+ for _, match := range durationMatcher.FindAllStringSubmatch(cand, -1) {
+
+ factor, err := strconv.Atoi(match[2]) // converts string to int
+ if err != nil {
+ return 0, err
+ }
+ unit := strings.ToLower(strings.TrimSpace(match[3]))
+
+ for _, variants := range timeUnits {
+ last := len(variants) - 1
+ multiplier := timeMultiplier[variants[0]]
+
+ for i, variant := range variants {
+ if (last == i && strings.HasPrefix(unit, variant)) || strings.EqualFold(variant, unit) {
+ ok = true
+ dur += (time.Duration(factor) * multiplier)
+ }
+ }
+ }
+ }
+
+ if ok {
+ return dur, nil
+ }
+ return 0, fmt.Errorf("unable to parse %s as duration", cand)
+}
+
+// Scan reads a Duration value from database driver type.
+func (d *Duration) Scan(raw interface{}) error {
+ switch v := raw.(type) {
+ // TODO: case []byte: // ?
+ case int64:
+ *d = Duration(v)
+ case float64:
+ *d = Duration(int64(v))
+ case nil:
+ *d = Duration(0)
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.Duration from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts Duration to a primitive value ready to be written to a database.
+func (d Duration) Value() (driver.Value, error) {
+ return driver.Value(int64(d)), nil
+}
+
+// String converts this duration to a string
+func (d Duration) String() string {
+ return time.Duration(d).String()
+}
+
+// MarshalJSON returns the Duration as JSON
+func (d Duration) MarshalJSON() ([]byte, error) {
+ return json.Marshal(time.Duration(d).String())
+}
+
+// UnmarshalJSON sets the Duration from JSON
+func (d *Duration) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+
+ var dstr string
+ if err := json.Unmarshal(data, &dstr); err != nil {
+ return err
+ }
+ tt, err := ParseDuration(dstr)
+ if err != nil {
+ return err
+ }
+ *d = Duration(tt)
+ return nil
+}
+
+func (d Duration) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": d.String()})
+}
+
+func (d *Duration) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if data, ok := m["data"].(string); ok {
+ rd, err := ParseDuration(data)
+ if err != nil {
+ return err
+ }
+ *d = Duration(rd)
+ return nil
+ }
+
+ return errors.New("couldn't unmarshal bson bytes value as Date")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (d *Duration) DeepCopyInto(out *Duration) {
+ *out = *d
+}
+
+// DeepCopy copies the receiver into a new Duration.
+func (d *Duration) DeepCopy() *Duration {
+ if d == nil {
+ return nil
+ }
+ out := new(Duration)
+ d.DeepCopyInto(out)
+ return out
+}
diff --git a/vendor/github.com/go-openapi/strfmt/format.go b/vendor/github.com/go-openapi/strfmt/format.go
new file mode 100644
index 000000000..ad3b3c355
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/format.go
@@ -0,0 +1,326 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strfmt
+
+import (
+ "encoding"
+ "fmt"
+ "reflect"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/go-openapi/errors"
+ "github.com/mitchellh/mapstructure"
+)
+
+// Default is the default formats registry
+var Default = NewSeededFormats(nil, nil)
+
+// Validator represents a validator for a string format.
+type Validator func(string) bool
+
+// Format represents a string format.
+//
+// All implementations of Format provide a string representation and text
+// marshaling/unmarshaling interface to be used by encoders (e.g. encoding/json).
+type Format interface {
+ String() string
+ encoding.TextMarshaler
+ encoding.TextUnmarshaler
+}
+
+// Registry is a registry of string formats, with a validation method.
+type Registry interface {
+ Add(string, Format, Validator) bool
+ DelByName(string) bool
+ GetType(string) (reflect.Type, bool)
+ ContainsName(string) bool
+ Validates(string, string) bool
+ Parse(string, string) (interface{}, error)
+ MapStructureHookFunc() mapstructure.DecodeHookFunc
+}
+
+type knownFormat struct {
+ Name string
+ OrigName string
+ Type reflect.Type
+ Validator Validator
+}
+
+// NameNormalizer is a function that normalizes a format name.
+type NameNormalizer func(string) string
+
+// DefaultNameNormalizer removes all dashes
+func DefaultNameNormalizer(name string) string {
+ return strings.ReplaceAll(name, "-", "")
+}
+
+type defaultFormats struct {
+ sync.Mutex
+ data []knownFormat
+ normalizeName NameNormalizer
+}
+
+// NewFormats creates a new formats registry seeded with the values from the default
+func NewFormats() Registry {
+ //nolint:forcetypeassert
+ return NewSeededFormats(Default.(*defaultFormats).data, nil)
+}
+
+// NewSeededFormats creates a new formats registry
+func NewSeededFormats(seeds []knownFormat, normalizer NameNormalizer) Registry {
+ if normalizer == nil {
+ normalizer = DefaultNameNormalizer
+ }
+ // copy here, don't modify original
+ d := append([]knownFormat(nil), seeds...)
+ return &defaultFormats{
+ data: d,
+ normalizeName: normalizer,
+ }
+}
+
+// MapStructureHookFunc is a decode hook function for mapstructure
+func (f *defaultFormats) MapStructureHookFunc() mapstructure.DecodeHookFunc { //nolint:gocyclo,cyclop
+ return func(from reflect.Type, to reflect.Type, obj interface{}) (interface{}, error) {
+ if from.Kind() != reflect.String {
+ return obj, nil
+ }
+ data, ok := obj.(string)
+ if !ok {
+ return nil, fmt.Errorf("failed to cast %+v to string", obj)
+ }
+
+ for _, v := range f.data {
+ tpe, _ := f.GetType(v.Name)
+ if to == tpe {
+ switch v.Name {
+ case "date":
+ d, err := time.ParseInLocation(RFC3339FullDate, data, DefaultTimeLocation)
+ if err != nil {
+ return nil, err
+ }
+ return Date(d), nil
+ case "datetime":
+ input := data
+ if len(input) == 0 {
+ return nil, fmt.Errorf("empty string is an invalid datetime format")
+ }
+ return ParseDateTime(input)
+ case "duration":
+ dur, err := ParseDuration(data)
+ if err != nil {
+ return nil, err
+ }
+ return Duration(dur), nil
+ case "uri":
+ return URI(data), nil
+ case "email":
+ return Email(data), nil
+ case "uuid":
+ return UUID(data), nil
+ case "uuid3":
+ return UUID3(data), nil
+ case "uuid4":
+ return UUID4(data), nil
+ case "uuid5":
+ return UUID5(data), nil
+ case "hostname":
+ return Hostname(data), nil
+ case "ipv4":
+ return IPv4(data), nil
+ case "ipv6":
+ return IPv6(data), nil
+ case "cidr":
+ return CIDR(data), nil
+ case "mac":
+ return MAC(data), nil
+ case "isbn":
+ return ISBN(data), nil
+ case "isbn10":
+ return ISBN10(data), nil
+ case "isbn13":
+ return ISBN13(data), nil
+ case "creditcard":
+ return CreditCard(data), nil
+ case "ssn":
+ return SSN(data), nil
+ case "hexcolor":
+ return HexColor(data), nil
+ case "rgbcolor":
+ return RGBColor(data), nil
+ case "byte":
+ return Base64(data), nil
+ case "password":
+ return Password(data), nil
+ case "ulid":
+ ulid, err := ParseULID(data)
+ if err != nil {
+ return nil, err
+ }
+ return ulid, nil
+ default:
+ return nil, errors.InvalidTypeName(v.Name)
+ }
+ }
+ }
+ return data, nil
+ }
+}
+
+// Add adds a new format, return true if this was a new item instead of a replacement
+func (f *defaultFormats) Add(name string, strfmt Format, validator Validator) bool {
+ f.Lock()
+ defer f.Unlock()
+
+ nme := f.normalizeName(name)
+
+ tpe := reflect.TypeOf(strfmt)
+ if tpe.Kind() == reflect.Ptr {
+ tpe = tpe.Elem()
+ }
+
+ for i := range f.data {
+ v := &f.data[i]
+ if v.Name == nme {
+ v.Type = tpe
+ v.Validator = validator
+ return false
+ }
+ }
+
+ // turns out it's new after all
+ f.data = append(f.data, knownFormat{Name: nme, OrigName: name, Type: tpe, Validator: validator})
+ return true
+}
+
+// GetType gets the type for the specified name
+func (f *defaultFormats) GetType(name string) (reflect.Type, bool) {
+ f.Lock()
+ defer f.Unlock()
+ nme := f.normalizeName(name)
+ for _, v := range f.data {
+ if v.Name == nme {
+ return v.Type, true
+ }
+ }
+ return nil, false
+}
+
+// DelByName removes the format by the specified name, returns true when an item was actually removed
+func (f *defaultFormats) DelByName(name string) bool {
+ f.Lock()
+ defer f.Unlock()
+
+ nme := f.normalizeName(name)
+
+ for i, v := range f.data {
+ if v.Name == nme {
+ f.data[i] = knownFormat{} // release
+ f.data = append(f.data[:i], f.data[i+1:]...)
+ return true
+ }
+ }
+ return false
+}
+
+// DelByFormat removes the specified format, returns true when an item was actually removed
+func (f *defaultFormats) DelByFormat(strfmt Format) bool {
+ f.Lock()
+ defer f.Unlock()
+
+ tpe := reflect.TypeOf(strfmt)
+ if tpe.Kind() == reflect.Ptr {
+ tpe = tpe.Elem()
+ }
+
+ for i, v := range f.data {
+ if v.Type == tpe {
+ f.data[i] = knownFormat{} // release
+ f.data = append(f.data[:i], f.data[i+1:]...)
+ return true
+ }
+ }
+ return false
+}
+
+// ContainsName returns true if this registry contains the specified name
+func (f *defaultFormats) ContainsName(name string) bool {
+ f.Lock()
+ defer f.Unlock()
+ nme := f.normalizeName(name)
+ for _, v := range f.data {
+ if v.Name == nme {
+ return true
+ }
+ }
+ return false
+}
+
+// ContainsFormat returns true if this registry contains the specified format
+func (f *defaultFormats) ContainsFormat(strfmt Format) bool {
+ f.Lock()
+ defer f.Unlock()
+ tpe := reflect.TypeOf(strfmt)
+ if tpe.Kind() == reflect.Ptr {
+ tpe = tpe.Elem()
+ }
+
+ for _, v := range f.data {
+ if v.Type == tpe {
+ return true
+ }
+ }
+ return false
+}
+
+// Validates passed data against format.
+//
+// Note that the format name is automatically normalized, e.g. one may
+// use "date-time" to use the "datetime" format validator.
+func (f *defaultFormats) Validates(name, data string) bool {
+ f.Lock()
+ defer f.Unlock()
+ nme := f.normalizeName(name)
+ for _, v := range f.data {
+ if v.Name == nme {
+ return v.Validator(data)
+ }
+ }
+ return false
+}
+
+// Parse a string into the appropriate format representation type.
+//
+// E.g. parsing a string a "date" will return a Date type.
+func (f *defaultFormats) Parse(name, data string) (interface{}, error) {
+ f.Lock()
+ defer f.Unlock()
+ nme := f.normalizeName(name)
+ for _, v := range f.data {
+ if v.Name == nme {
+ nw := reflect.New(v.Type).Interface()
+ if dec, ok := nw.(encoding.TextUnmarshaler); ok {
+ if err := dec.UnmarshalText([]byte(data)); err != nil {
+ return nil, err
+ }
+ return nw, nil
+ }
+ return nil, errors.InvalidTypeName(name)
+ }
+ }
+ return nil, errors.InvalidTypeName(name)
+}
diff --git a/vendor/github.com/go-openapi/strfmt/time.go b/vendor/github.com/go-openapi/strfmt/time.go
new file mode 100644
index 000000000..9bef4c3b3
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/time.go
@@ -0,0 +1,319 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package strfmt
+
+import (
+ "database/sql/driver"
+ "encoding/binary"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+var (
+ // UnixZero sets the zero unix timestamp we want to compare against.
+ // Unix 0 for an EST timezone is not equivalent to a UTC timezone.
+ UnixZero = time.Unix(0, 0).UTC()
+)
+
+func init() {
+ dt := DateTime{}
+ Default.Add("datetime", &dt, IsDateTime)
+}
+
+// IsDateTime returns true when the string is a valid date-time
+func IsDateTime(str string) bool {
+ if len(str) < 4 {
+ return false
+ }
+ s := strings.Split(strings.ToLower(str), "t")
+ if len(s) < 2 || !IsDate(s[0]) {
+ return false
+ }
+
+ matches := rxDateTime.FindAllStringSubmatch(s[1], -1)
+ if len(matches) == 0 || len(matches[0]) == 0 {
+ return false
+ }
+ m := matches[0]
+ res := m[1] <= "23" && m[2] <= "59" && m[3] <= "59"
+ return res
+}
+
+const (
+ // RFC3339Millis represents a ISO8601 format to millis instead of to nanos
+ RFC3339Millis = "2006-01-02T15:04:05.000Z07:00"
+ // RFC3339MillisNoColon represents a ISO8601 format to millis instead of to nanos
+ RFC3339MillisNoColon = "2006-01-02T15:04:05.000Z0700"
+ // RFC3339Micro represents a ISO8601 format to micro instead of to nano
+ RFC3339Micro = "2006-01-02T15:04:05.000000Z07:00"
+ // RFC3339MicroNoColon represents a ISO8601 format to micro instead of to nano
+ RFC3339MicroNoColon = "2006-01-02T15:04:05.000000Z0700"
+ // ISO8601LocalTime represents a ISO8601 format to ISO8601 in local time (no timezone)
+ ISO8601LocalTime = "2006-01-02T15:04:05"
+ // ISO8601TimeWithReducedPrecision represents a ISO8601 format with reduced precision (dropped secs)
+ ISO8601TimeWithReducedPrecision = "2006-01-02T15:04Z"
+ // ISO8601TimeWithReducedPrecisionLocaltime represents a ISO8601 format with reduced precision and no timezone (dropped seconds + no timezone)
+ ISO8601TimeWithReducedPrecisionLocaltime = "2006-01-02T15:04"
+ // ISO8601TimeUniversalSortableDateTimePattern represents a ISO8601 universal sortable date time pattern.
+ ISO8601TimeUniversalSortableDateTimePattern = "2006-01-02 15:04:05"
+ // DateTimePattern pattern to match for the date-time format from http://tools.ietf.org/html/rfc3339#section-5.6
+ DateTimePattern = `^([0-9]{2}):([0-9]{2}):([0-9]{2})(.[0-9]+)?(z|([+-][0-9]{2}:[0-9]{2}))$`
+)
+
+var (
+ rxDateTime = regexp.MustCompile(DateTimePattern)
+
+ // DateTimeFormats is the collection of formats used by ParseDateTime()
+ DateTimeFormats = []string{RFC3339Micro, RFC3339MicroNoColon, RFC3339Millis, RFC3339MillisNoColon, time.RFC3339, time.RFC3339Nano, ISO8601LocalTime, ISO8601TimeWithReducedPrecision, ISO8601TimeWithReducedPrecisionLocaltime, ISO8601TimeUniversalSortableDateTimePattern}
+
+ // MarshalFormat sets the time resolution format used for marshaling time (set to milliseconds)
+ MarshalFormat = RFC3339Millis
+
+ // NormalizeTimeForMarshal provides a normalization function on time befeore marshalling (e.g. time.UTC).
+ // By default, the time value is not changed.
+ NormalizeTimeForMarshal = func(t time.Time) time.Time { return t }
+
+ // DefaultTimeLocation provides a location for a time when the time zone is not encoded in the string (ex: ISO8601 Local variants).
+ DefaultTimeLocation = time.UTC
+)
+
+// ParseDateTime parses a string that represents an ISO8601 time or a unix epoch
+func ParseDateTime(data string) (DateTime, error) {
+ if data == "" {
+ return NewDateTime(), nil
+ }
+ var lastError error
+ for _, layout := range DateTimeFormats {
+ dd, err := time.ParseInLocation(layout, data, DefaultTimeLocation)
+ if err != nil {
+ lastError = err
+ continue
+ }
+ return DateTime(dd), nil
+ }
+ return DateTime{}, lastError
+}
+
+// DateTime is a time but it serializes to ISO8601 format with millis
+// It knows how to read 3 different variations of a RFC3339 date time.
+// Most APIs we encounter want either millisecond or second precision times.
+// This just tries to make it worry-free.
+//
+// swagger:strfmt date-time
+type DateTime time.Time
+
+// NewDateTime is a representation of zero value for DateTime type
+func NewDateTime() DateTime {
+ return DateTime(time.Unix(0, 0).UTC())
+}
+
+// String converts this time to a string
+func (t DateTime) String() string {
+ return NormalizeTimeForMarshal(time.Time(t)).Format(MarshalFormat)
+}
+
+// IsZero returns whether the date time is a zero value
+func (t *DateTime) IsZero() bool {
+ if t == nil {
+ return true
+ }
+ return time.Time(*t).IsZero()
+}
+
+// IsUnixZerom returns whether the date time is equivalent to time.Unix(0, 0).UTC().
+func (t *DateTime) IsUnixZero() bool {
+ if t == nil {
+ return true
+ }
+ return time.Time(*t).Equal(UnixZero)
+}
+
+// MarshalText implements the text marshaller interface
+func (t DateTime) MarshalText() ([]byte, error) {
+ return []byte(t.String()), nil
+}
+
+// UnmarshalText implements the text unmarshaller interface
+func (t *DateTime) UnmarshalText(text []byte) error {
+ tt, err := ParseDateTime(string(text))
+ if err != nil {
+ return err
+ }
+ *t = tt
+ return nil
+}
+
+// Scan scans a DateTime value from database driver type.
+func (t *DateTime) Scan(raw interface{}) error {
+ // TODO: case int64: and case float64: ?
+ switch v := raw.(type) {
+ case []byte:
+ return t.UnmarshalText(v)
+ case string:
+ return t.UnmarshalText([]byte(v))
+ case time.Time:
+ *t = DateTime(v)
+ case nil:
+ *t = DateTime{}
+ default:
+ return fmt.Errorf("cannot sql.Scan() strfmt.DateTime from: %#v", v)
+ }
+
+ return nil
+}
+
+// Value converts DateTime to a primitive value ready to written to a database.
+func (t DateTime) Value() (driver.Value, error) {
+ return driver.Value(t.String()), nil
+}
+
+// MarshalJSON returns the DateTime as JSON
+func (t DateTime) MarshalJSON() ([]byte, error) {
+ return json.Marshal(NormalizeTimeForMarshal(time.Time(t)).Format(MarshalFormat))
+}
+
+// UnmarshalJSON sets the DateTime from JSON
+func (t *DateTime) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+
+ var tstr string
+ if err := json.Unmarshal(data, &tstr); err != nil {
+ return err
+ }
+ tt, err := ParseDateTime(tstr)
+ if err != nil {
+ return err
+ }
+ *t = tt
+ return nil
+}
+
+// MarshalBSON renders the DateTime as a BSON document
+func (t DateTime) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": t})
+}
+
+// UnmarshalBSON reads the DateTime from a BSON document
+func (t *DateTime) UnmarshalBSON(data []byte) error {
+ var obj struct {
+ Data DateTime
+ }
+
+ if err := bson.Unmarshal(data, &obj); err != nil {
+ return err
+ }
+
+ *t = obj.Data
+
+ return nil
+}
+
+// MarshalBSONValue is an interface implemented by types that can marshal themselves
+// into a BSON document represented as bytes. The bytes returned must be a valid
+// BSON document if the error is nil.
+// Marshals a DateTime as a bsontype.DateTime, an int64 representing
+// milliseconds since epoch.
+func (t DateTime) MarshalBSONValue() (bsontype.Type, []byte, error) {
+ // UnixNano cannot be used directly, the result of calling UnixNano on the zero
+ // Time is undefined. Thats why we use time.Nanosecond() instead.
+
+ tNorm := NormalizeTimeForMarshal(time.Time(t))
+ i64 := tNorm.Unix()*1000 + int64(tNorm.Nanosecond())/1e6
+
+ buf := make([]byte, 8)
+ binary.LittleEndian.PutUint64(buf, uint64(i64))
+
+ return bsontype.DateTime, buf, nil
+}
+
+// UnmarshalBSONValue is an interface implemented by types that can unmarshal a
+// BSON value representation of themselves. The BSON bytes and type can be
+// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
+// wishes to retain the data after returning.
+func (t *DateTime) UnmarshalBSONValue(tpe bsontype.Type, data []byte) error {
+ if tpe == bsontype.Null {
+ *t = DateTime{}
+ return nil
+ }
+
+ if len(data) != 8 {
+ return errors.New("bson date field length not exactly 8 bytes")
+ }
+
+ i64 := int64(binary.LittleEndian.Uint64(data))
+ // TODO: Use bsonprim.DateTime.Time() method
+ *t = DateTime(time.Unix(i64/1000, i64%1000*1000000))
+
+ return nil
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (t *DateTime) DeepCopyInto(out *DateTime) {
+ *out = *t
+}
+
+// DeepCopy copies the receiver into a new DateTime.
+func (t *DateTime) DeepCopy() *DateTime {
+ if t == nil {
+ return nil
+ }
+ out := new(DateTime)
+ t.DeepCopyInto(out)
+ return out
+}
+
+// GobEncode implements the gob.GobEncoder interface.
+func (t DateTime) GobEncode() ([]byte, error) {
+ return t.MarshalBinary()
+}
+
+// GobDecode implements the gob.GobDecoder interface.
+func (t *DateTime) GobDecode(data []byte) error {
+ return t.UnmarshalBinary(data)
+}
+
+// MarshalBinary implements the encoding.BinaryMarshaler interface.
+func (t DateTime) MarshalBinary() ([]byte, error) {
+ return NormalizeTimeForMarshal(time.Time(t)).MarshalBinary()
+}
+
+// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
+func (t *DateTime) UnmarshalBinary(data []byte) error {
+ var original time.Time
+
+ err := original.UnmarshalBinary(data)
+ if err != nil {
+ return err
+ }
+
+ *t = DateTime(original)
+
+ return nil
+}
+
+// Equal checks if two DateTime instances are equal using time.Time's Equal method
+func (t DateTime) Equal(t2 DateTime) bool {
+ return time.Time(t).Equal(time.Time(t2))
+}
diff --git a/vendor/github.com/go-openapi/strfmt/ulid.go b/vendor/github.com/go-openapi/strfmt/ulid.go
new file mode 100644
index 000000000..e71aff7c3
--- /dev/null
+++ b/vendor/github.com/go-openapi/strfmt/ulid.go
@@ -0,0 +1,230 @@
+package strfmt
+
+import (
+ cryptorand "crypto/rand"
+ "database/sql/driver"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "sync"
+
+ "github.com/oklog/ulid"
+ "go.mongodb.org/mongo-driver/bson"
+)
+
+// ULID represents a ulid string format
+// ref:
+//
+// https://github.com/ulid/spec
+//
+// impl:
+//
+// https://github.com/oklog/ulid
+//
+// swagger:strfmt ulid
+type ULID struct {
+ ulid.ULID
+}
+
+var (
+ ulidEntropyPool = sync.Pool{
+ New: func() interface{} {
+ return cryptorand.Reader
+ },
+ }
+
+ ULIDScanDefaultFunc = func(raw interface{}) (ULID, error) {
+ u := NewULIDZero()
+ switch x := raw.(type) {
+ case nil:
+ // zerp ulid
+ return u, nil
+ case string:
+ if x == "" {
+ // zero ulid
+ return u, nil
+ }
+ return u, u.UnmarshalText([]byte(x))
+ case []byte:
+ return u, u.UnmarshalText(x)
+ }
+
+ return u, fmt.Errorf("cannot sql.Scan() strfmt.ULID from: %#v: %w", raw, ulid.ErrScanValue)
+ }
+
+ // ULIDScanOverrideFunc allows you to override the Scan method of the ULID type
+ ULIDScanOverrideFunc = ULIDScanDefaultFunc
+
+ ULIDValueDefaultFunc = func(u ULID) (driver.Value, error) {
+ return driver.Value(u.String()), nil
+ }
+
+ // ULIDValueOverrideFunc allows you to override the Value method of the ULID type
+ ULIDValueOverrideFunc = ULIDValueDefaultFunc
+)
+
+func init() {
+ // register formats in the default registry:
+ // - ulid
+ ulid := ULID{}
+ Default.Add("ulid", &ulid, IsULID)
+}
+
+// IsULID checks if provided string is ULID format
+// Be noticed that this function considers overflowed ULID as non-ulid.
+// For more details see https://github.com/ulid/spec
+func IsULID(str string) bool {
+ _, err := ulid.ParseStrict(str)
+ return err == nil
+}
+
+// ParseULID parses a string that represents an valid ULID
+func ParseULID(str string) (ULID, error) {
+ var u ULID
+
+ return u, u.UnmarshalText([]byte(str))
+}
+
+// NewULIDZero returns a zero valued ULID type
+func NewULIDZero() ULID {
+ return ULID{}
+}
+
+// NewULID generates new unique ULID value and a error if any
+func NewULID() (ULID, error) {
+ var u ULID
+
+ obj := ulidEntropyPool.Get()
+ entropy, ok := obj.(io.Reader)
+ if !ok {
+ return u, fmt.Errorf("failed to cast %+v to io.Reader", obj)
+ }
+
+ id, err := ulid.New(ulid.Now(), entropy)
+ if err != nil {
+ return u, err
+ }
+ ulidEntropyPool.Put(entropy)
+
+ u.ULID = id
+ return u, nil
+}
+
+// GetULID returns underlying instance of ULID
+func (u *ULID) GetULID() interface{} {
+ return u.ULID
+}
+
+// MarshalText returns this instance into text
+func (u ULID) MarshalText() ([]byte, error) {
+ return u.ULID.MarshalText()
+}
+
+// UnmarshalText hydrates this instance from text
+func (u *ULID) UnmarshalText(data []byte) error { // validation is performed later on
+ return u.ULID.UnmarshalText(data)
+}
+
+// Scan reads a value from a database driver
+func (u *ULID) Scan(raw interface{}) error {
+ ul, err := ULIDScanOverrideFunc(raw)
+ if err == nil {
+ *u = ul
+ }
+ return err
+}
+
+// Value converts a value to a database driver value
+func (u ULID) Value() (driver.Value, error) {
+ return ULIDValueOverrideFunc(u)
+}
+
+func (u ULID) String() string {
+ return u.ULID.String()
+}
+
+// MarshalJSON returns the ULID as JSON
+func (u ULID) MarshalJSON() ([]byte, error) {
+ return json.Marshal(u.String())
+}
+
+// UnmarshalJSON sets the ULID from JSON
+func (u *ULID) UnmarshalJSON(data []byte) error {
+ if string(data) == jsonNull {
+ return nil
+ }
+ var ustr string
+ if err := json.Unmarshal(data, &ustr); err != nil {
+ return err
+ }
+ id, err := ulid.ParseStrict(ustr)
+ if err != nil {
+ return fmt.Errorf("couldn't parse JSON value as ULID: %w", err)
+ }
+ u.ULID = id
+ return nil
+}
+
+// MarshalBSON document from this value
+func (u ULID) MarshalBSON() ([]byte, error) {
+ return bson.Marshal(bson.M{"data": u.String()})
+}
+
+// UnmarshalBSON document into this value
+func (u *ULID) UnmarshalBSON(data []byte) error {
+ var m bson.M
+ if err := bson.Unmarshal(data, &m); err != nil {
+ return err
+ }
+
+ if ud, ok := m["data"].(string); ok {
+ id, err := ulid.ParseStrict(ud)
+ if err != nil {
+ return fmt.Errorf("couldn't parse bson bytes as ULID: %w", err)
+ }
+ u.ULID = id
+ return nil
+ }
+ return errors.New("couldn't unmarshal bson bytes as ULID")
+}
+
+// DeepCopyInto copies the receiver and writes its value into out.
+func (u *ULID) DeepCopyInto(out *ULID) {
+ *out = *u
+}
+
+// DeepCopy copies the receiver into a new ULID.
+func (u *ULID) DeepCopy() *ULID {
+ if u == nil {
+ return nil
+ }
+ out := new(ULID)
+ u.DeepCopyInto(out)
+ return out
+}
+
+// GobEncode implements the gob.GobEncoder interface.
+func (u ULID) GobEncode() ([]byte, error) {
+ return u.ULID.MarshalBinary()
+}
+
+// GobDecode implements the gob.GobDecoder interface.
+func (u *ULID) GobDecode(data []byte) error {
+ return u.ULID.UnmarshalBinary(data)
+}
+
+// MarshalBinary implements the encoding.BinaryMarshaler interface.
+func (u ULID) MarshalBinary() ([]byte, error) {
+ return u.ULID.MarshalBinary()
+}
+
+// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface.
+func (u *ULID) UnmarshalBinary(data []byte) error {
+ return u.ULID.UnmarshalBinary(data)
+}
+
+// Equal checks if two ULID instances are equal by their underlying type
+func (u ULID) Equal(other ULID) bool {
+ return u.ULID == other.ULID
+}
diff --git a/vendor/github.com/go-openapi/swag/.editorconfig b/vendor/github.com/go-openapi/swag/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/swag/.gitattributes b/vendor/github.com/go-openapi/swag/.gitattributes
new file mode 100644
index 000000000..49ad52766
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.gitattributes
@@ -0,0 +1,2 @@
+# gofmt always uses LF, whereas Git uses CRLF on Windows.
+*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/swag/.gitignore b/vendor/github.com/go-openapi/swag/.gitignore
new file mode 100644
index 000000000..d69b53acc
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.gitignore
@@ -0,0 +1,4 @@
+secrets.yml
+vendor
+Godeps
+.idea
diff --git a/vendor/github.com/go-openapi/swag/.golangci.yml b/vendor/github.com/go-openapi/swag/.golangci.yml
new file mode 100644
index 000000000..bf503e400
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/.golangci.yml
@@ -0,0 +1,54 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 25
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 3
+ min-occurrences: 2
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoinits
+ - gochecknoglobals
+ - nlreturn
+ - testpackage
+ - wrapcheck
+ - gomnd
+ - exhaustive
+ - exhaustivestruct
+ - goerr113
+ - wsl
+ - whitespace
+ - gofumpt
+ - godot
+ - nestif
+ - godox
+ - funlen
+ - gci
+ - gocognit
+ - paralleltest
+ - thelper
+ - ifshort
+ - gomoddirectives
+ - cyclop
+ - forcetypeassert
+ - ireturn
+ - tagliatelle
+ - varnamelen
+ - goimports
+ - tenv
+ - golint
+ - exhaustruct
+ - nilnil
+ - nonamedreturns
+ - nosnakecase
diff --git a/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/swag/LICENSE b/vendor/github.com/go-openapi/swag/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/swag/README.md b/vendor/github.com/go-openapi/swag/README.md
new file mode 100644
index 000000000..217f6fa50
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/README.md
@@ -0,0 +1,21 @@
+# Swag [![Build Status](https://travis-ci.org/go-openapi/swag.svg?branch=master)](https://travis-ci.org/go-openapi/swag) [![codecov](https://codecov.io/gh/go-openapi/swag/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/swag) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE)
+[![GoDoc](https://godoc.org/github.com/go-openapi/swag?status.svg)](http://godoc.org/github.com/go-openapi/swag)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/swag)](https://goreportcard.com/report/github.com/go-openapi/swag)
+
+Contains a bunch of helper functions for go-openapi and go-swagger projects.
+
+You may also use it standalone for your projects.
+
+* convert between value and pointers for builtin types
+* convert from string to builtin types (wraps strconv)
+* fast json concatenation
+* search in path
+* load from file or http
+* name mangling
+
+
+This repo has only few dependencies outside of the standard library:
+
+* YAML utilities depend on gopkg.in/yaml.v2
diff --git a/vendor/github.com/go-openapi/swag/convert.go b/vendor/github.com/go-openapi/swag/convert.go
new file mode 100644
index 000000000..fc085aeb8
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/convert.go
@@ -0,0 +1,208 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "math"
+ "strconv"
+ "strings"
+)
+
+// same as ECMA Number.MAX_SAFE_INTEGER and Number.MIN_SAFE_INTEGER
+const (
+ maxJSONFloat = float64(1<<53 - 1) // 9007199254740991.0 2^53 - 1
+ minJSONFloat = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1
+ epsilon float64 = 1e-9
+)
+
+// IsFloat64AJSONInteger allow for integers [-2^53, 2^53-1] inclusive
+func IsFloat64AJSONInteger(f float64) bool {
+ if math.IsNaN(f) || math.IsInf(f, 0) || f < minJSONFloat || f > maxJSONFloat {
+ return false
+ }
+ fa := math.Abs(f)
+ g := float64(uint64(f))
+ ga := math.Abs(g)
+
+ diff := math.Abs(f - g)
+
+ // more info: https://floating-point-gui.de/errors/comparison/#look-out-for-edge-cases
+ switch {
+ case f == g: // best case
+ return true
+ case f == float64(int64(f)) || f == float64(uint64(f)): // optimistic case
+ return true
+ case f == 0 || g == 0 || diff < math.SmallestNonzeroFloat64: // very close to 0 values
+ return diff < (epsilon * math.SmallestNonzeroFloat64)
+ }
+ // check the relative error
+ return diff/math.Min(fa+ga, math.MaxFloat64) < epsilon
+}
+
+var evaluatesAsTrue map[string]struct{}
+
+func init() {
+ evaluatesAsTrue = map[string]struct{}{
+ "true": {},
+ "1": {},
+ "yes": {},
+ "ok": {},
+ "y": {},
+ "on": {},
+ "selected": {},
+ "checked": {},
+ "t": {},
+ "enabled": {},
+ }
+}
+
+// ConvertBool turn a string into a boolean
+func ConvertBool(str string) (bool, error) {
+ _, ok := evaluatesAsTrue[strings.ToLower(str)]
+ return ok, nil
+}
+
+// ConvertFloat32 turn a string into a float32
+func ConvertFloat32(str string) (float32, error) {
+ f, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ return 0, err
+ }
+ return float32(f), nil
+}
+
+// ConvertFloat64 turn a string into a float64
+func ConvertFloat64(str string) (float64, error) {
+ return strconv.ParseFloat(str, 64)
+}
+
+// ConvertInt8 turn a string into an int8
+func ConvertInt8(str string) (int8, error) {
+ i, err := strconv.ParseInt(str, 10, 8)
+ if err != nil {
+ return 0, err
+ }
+ return int8(i), nil
+}
+
+// ConvertInt16 turn a string into an int16
+func ConvertInt16(str string) (int16, error) {
+ i, err := strconv.ParseInt(str, 10, 16)
+ if err != nil {
+ return 0, err
+ }
+ return int16(i), nil
+}
+
+// ConvertInt32 turn a string into an int32
+func ConvertInt32(str string) (int32, error) {
+ i, err := strconv.ParseInt(str, 10, 32)
+ if err != nil {
+ return 0, err
+ }
+ return int32(i), nil
+}
+
+// ConvertInt64 turn a string into an int64
+func ConvertInt64(str string) (int64, error) {
+ return strconv.ParseInt(str, 10, 64)
+}
+
+// ConvertUint8 turn a string into an uint8
+func ConvertUint8(str string) (uint8, error) {
+ i, err := strconv.ParseUint(str, 10, 8)
+ if err != nil {
+ return 0, err
+ }
+ return uint8(i), nil
+}
+
+// ConvertUint16 turn a string into an uint16
+func ConvertUint16(str string) (uint16, error) {
+ i, err := strconv.ParseUint(str, 10, 16)
+ if err != nil {
+ return 0, err
+ }
+ return uint16(i), nil
+}
+
+// ConvertUint32 turn a string into an uint32
+func ConvertUint32(str string) (uint32, error) {
+ i, err := strconv.ParseUint(str, 10, 32)
+ if err != nil {
+ return 0, err
+ }
+ return uint32(i), nil
+}
+
+// ConvertUint64 turn a string into an uint64
+func ConvertUint64(str string) (uint64, error) {
+ return strconv.ParseUint(str, 10, 64)
+}
+
+// FormatBool turns a boolean into a string
+func FormatBool(value bool) string {
+ return strconv.FormatBool(value)
+}
+
+// FormatFloat32 turns a float32 into a string
+func FormatFloat32(value float32) string {
+ return strconv.FormatFloat(float64(value), 'f', -1, 32)
+}
+
+// FormatFloat64 turns a float64 into a string
+func FormatFloat64(value float64) string {
+ return strconv.FormatFloat(value, 'f', -1, 64)
+}
+
+// FormatInt8 turns an int8 into a string
+func FormatInt8(value int8) string {
+ return strconv.FormatInt(int64(value), 10)
+}
+
+// FormatInt16 turns an int16 into a string
+func FormatInt16(value int16) string {
+ return strconv.FormatInt(int64(value), 10)
+}
+
+// FormatInt32 turns an int32 into a string
+func FormatInt32(value int32) string {
+ return strconv.Itoa(int(value))
+}
+
+// FormatInt64 turns an int64 into a string
+func FormatInt64(value int64) string {
+ return strconv.FormatInt(value, 10)
+}
+
+// FormatUint8 turns an uint8 into a string
+func FormatUint8(value uint8) string {
+ return strconv.FormatUint(uint64(value), 10)
+}
+
+// FormatUint16 turns an uint16 into a string
+func FormatUint16(value uint16) string {
+ return strconv.FormatUint(uint64(value), 10)
+}
+
+// FormatUint32 turns an uint32 into a string
+func FormatUint32(value uint32) string {
+ return strconv.FormatUint(uint64(value), 10)
+}
+
+// FormatUint64 turns an uint64 into a string
+func FormatUint64(value uint64) string {
+ return strconv.FormatUint(value, 10)
+}
diff --git a/vendor/github.com/go-openapi/swag/convert_types.go b/vendor/github.com/go-openapi/swag/convert_types.go
new file mode 100644
index 000000000..c49cc473a
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/convert_types.go
@@ -0,0 +1,730 @@
+package swag
+
+import "time"
+
+// This file was taken from the aws go sdk
+
+// String returns a pointer to of the string value passed in.
+func String(v string) *string {
+ return &v
+}
+
+// StringValue returns the value of the string pointer passed in or
+// "" if the pointer is nil.
+func StringValue(v *string) string {
+ if v != nil {
+ return *v
+ }
+ return ""
+}
+
+// StringSlice converts a slice of string values into a slice of
+// string pointers
+func StringSlice(src []string) []*string {
+ dst := make([]*string, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// StringValueSlice converts a slice of string pointers into a slice of
+// string values
+func StringValueSlice(src []*string) []string {
+ dst := make([]string, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// StringMap converts a string map of string values into a string
+// map of string pointers
+func StringMap(src map[string]string) map[string]*string {
+ dst := make(map[string]*string)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// StringValueMap converts a string map of string pointers into a string
+// map of string values
+func StringValueMap(src map[string]*string) map[string]string {
+ dst := make(map[string]string)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Bool returns a pointer to of the bool value passed in.
+func Bool(v bool) *bool {
+ return &v
+}
+
+// BoolValue returns the value of the bool pointer passed in or
+// false if the pointer is nil.
+func BoolValue(v *bool) bool {
+ if v != nil {
+ return *v
+ }
+ return false
+}
+
+// BoolSlice converts a slice of bool values into a slice of
+// bool pointers
+func BoolSlice(src []bool) []*bool {
+ dst := make([]*bool, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// BoolValueSlice converts a slice of bool pointers into a slice of
+// bool values
+func BoolValueSlice(src []*bool) []bool {
+ dst := make([]bool, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// BoolMap converts a string map of bool values into a string
+// map of bool pointers
+func BoolMap(src map[string]bool) map[string]*bool {
+ dst := make(map[string]*bool)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// BoolValueMap converts a string map of bool pointers into a string
+// map of bool values
+func BoolValueMap(src map[string]*bool) map[string]bool {
+ dst := make(map[string]bool)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Int returns a pointer to of the int value passed in.
+func Int(v int) *int {
+ return &v
+}
+
+// IntValue returns the value of the int pointer passed in or
+// 0 if the pointer is nil.
+func IntValue(v *int) int {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// IntSlice converts a slice of int values into a slice of
+// int pointers
+func IntSlice(src []int) []*int {
+ dst := make([]*int, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// IntValueSlice converts a slice of int pointers into a slice of
+// int values
+func IntValueSlice(src []*int) []int {
+ dst := make([]int, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// IntMap converts a string map of int values into a string
+// map of int pointers
+func IntMap(src map[string]int) map[string]*int {
+ dst := make(map[string]*int)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// IntValueMap converts a string map of int pointers into a string
+// map of int values
+func IntValueMap(src map[string]*int) map[string]int {
+ dst := make(map[string]int)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Int32 returns a pointer to of the int32 value passed in.
+func Int32(v int32) *int32 {
+ return &v
+}
+
+// Int32Value returns the value of the int32 pointer passed in or
+// 0 if the pointer is nil.
+func Int32Value(v *int32) int32 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Int32Slice converts a slice of int32 values into a slice of
+// int32 pointers
+func Int32Slice(src []int32) []*int32 {
+ dst := make([]*int32, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Int32ValueSlice converts a slice of int32 pointers into a slice of
+// int32 values
+func Int32ValueSlice(src []*int32) []int32 {
+ dst := make([]int32, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Int32Map converts a string map of int32 values into a string
+// map of int32 pointers
+func Int32Map(src map[string]int32) map[string]*int32 {
+ dst := make(map[string]*int32)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Int32ValueMap converts a string map of int32 pointers into a string
+// map of int32 values
+func Int32ValueMap(src map[string]*int32) map[string]int32 {
+ dst := make(map[string]int32)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Int64 returns a pointer to of the int64 value passed in.
+func Int64(v int64) *int64 {
+ return &v
+}
+
+// Int64Value returns the value of the int64 pointer passed in or
+// 0 if the pointer is nil.
+func Int64Value(v *int64) int64 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Int64Slice converts a slice of int64 values into a slice of
+// int64 pointers
+func Int64Slice(src []int64) []*int64 {
+ dst := make([]*int64, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Int64ValueSlice converts a slice of int64 pointers into a slice of
+// int64 values
+func Int64ValueSlice(src []*int64) []int64 {
+ dst := make([]int64, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Int64Map converts a string map of int64 values into a string
+// map of int64 pointers
+func Int64Map(src map[string]int64) map[string]*int64 {
+ dst := make(map[string]*int64)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Int64ValueMap converts a string map of int64 pointers into a string
+// map of int64 values
+func Int64ValueMap(src map[string]*int64) map[string]int64 {
+ dst := make(map[string]int64)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Uint16 returns a pointer to of the uint16 value passed in.
+func Uint16(v uint16) *uint16 {
+ return &v
+}
+
+// Uint16Value returns the value of the uint16 pointer passed in or
+// 0 if the pointer is nil.
+func Uint16Value(v *uint16) uint16 {
+ if v != nil {
+ return *v
+ }
+
+ return 0
+}
+
+// Uint16Slice converts a slice of uint16 values into a slice of
+// uint16 pointers
+func Uint16Slice(src []uint16) []*uint16 {
+ dst := make([]*uint16, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+
+ return dst
+}
+
+// Uint16ValueSlice converts a slice of uint16 pointers into a slice of
+// uint16 values
+func Uint16ValueSlice(src []*uint16) []uint16 {
+ dst := make([]uint16, len(src))
+
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+
+ return dst
+}
+
+// Uint16Map converts a string map of uint16 values into a string
+// map of uint16 pointers
+func Uint16Map(src map[string]uint16) map[string]*uint16 {
+ dst := make(map[string]*uint16)
+
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+
+ return dst
+}
+
+// Uint16ValueMap converts a string map of uint16 pointers into a string
+// map of uint16 values
+func Uint16ValueMap(src map[string]*uint16) map[string]uint16 {
+ dst := make(map[string]uint16)
+
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+
+ return dst
+}
+
+// Uint returns a pointer to of the uint value passed in.
+func Uint(v uint) *uint {
+ return &v
+}
+
+// UintValue returns the value of the uint pointer passed in or
+// 0 if the pointer is nil.
+func UintValue(v *uint) uint {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// UintSlice converts a slice of uint values into a slice of
+// uint pointers
+func UintSlice(src []uint) []*uint {
+ dst := make([]*uint, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// UintValueSlice converts a slice of uint pointers into a slice of
+// uint values
+func UintValueSlice(src []*uint) []uint {
+ dst := make([]uint, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// UintMap converts a string map of uint values into a string
+// map of uint pointers
+func UintMap(src map[string]uint) map[string]*uint {
+ dst := make(map[string]*uint)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// UintValueMap converts a string map of uint pointers into a string
+// map of uint values
+func UintValueMap(src map[string]*uint) map[string]uint {
+ dst := make(map[string]uint)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Uint32 returns a pointer to of the uint32 value passed in.
+func Uint32(v uint32) *uint32 {
+ return &v
+}
+
+// Uint32Value returns the value of the uint32 pointer passed in or
+// 0 if the pointer is nil.
+func Uint32Value(v *uint32) uint32 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Uint32Slice converts a slice of uint32 values into a slice of
+// uint32 pointers
+func Uint32Slice(src []uint32) []*uint32 {
+ dst := make([]*uint32, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Uint32ValueSlice converts a slice of uint32 pointers into a slice of
+// uint32 values
+func Uint32ValueSlice(src []*uint32) []uint32 {
+ dst := make([]uint32, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Uint32Map converts a string map of uint32 values into a string
+// map of uint32 pointers
+func Uint32Map(src map[string]uint32) map[string]*uint32 {
+ dst := make(map[string]*uint32)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Uint32ValueMap converts a string map of uint32 pointers into a string
+// map of uint32 values
+func Uint32ValueMap(src map[string]*uint32) map[string]uint32 {
+ dst := make(map[string]uint32)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Uint64 returns a pointer to of the uint64 value passed in.
+func Uint64(v uint64) *uint64 {
+ return &v
+}
+
+// Uint64Value returns the value of the uint64 pointer passed in or
+// 0 if the pointer is nil.
+func Uint64Value(v *uint64) uint64 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Uint64Slice converts a slice of uint64 values into a slice of
+// uint64 pointers
+func Uint64Slice(src []uint64) []*uint64 {
+ dst := make([]*uint64, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Uint64ValueSlice converts a slice of uint64 pointers into a slice of
+// uint64 values
+func Uint64ValueSlice(src []*uint64) []uint64 {
+ dst := make([]uint64, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Uint64Map converts a string map of uint64 values into a string
+// map of uint64 pointers
+func Uint64Map(src map[string]uint64) map[string]*uint64 {
+ dst := make(map[string]*uint64)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Uint64ValueMap converts a string map of uint64 pointers into a string
+// map of uint64 values
+func Uint64ValueMap(src map[string]*uint64) map[string]uint64 {
+ dst := make(map[string]uint64)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Float32 returns a pointer to of the float32 value passed in.
+func Float32(v float32) *float32 {
+ return &v
+}
+
+// Float32Value returns the value of the float32 pointer passed in or
+// 0 if the pointer is nil.
+func Float32Value(v *float32) float32 {
+ if v != nil {
+ return *v
+ }
+
+ return 0
+}
+
+// Float32Slice converts a slice of float32 values into a slice of
+// float32 pointers
+func Float32Slice(src []float32) []*float32 {
+ dst := make([]*float32, len(src))
+
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+
+ return dst
+}
+
+// Float32ValueSlice converts a slice of float32 pointers into a slice of
+// float32 values
+func Float32ValueSlice(src []*float32) []float32 {
+ dst := make([]float32, len(src))
+
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+
+ return dst
+}
+
+// Float32Map converts a string map of float32 values into a string
+// map of float32 pointers
+func Float32Map(src map[string]float32) map[string]*float32 {
+ dst := make(map[string]*float32)
+
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+
+ return dst
+}
+
+// Float32ValueMap converts a string map of float32 pointers into a string
+// map of float32 values
+func Float32ValueMap(src map[string]*float32) map[string]float32 {
+ dst := make(map[string]float32)
+
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+
+ return dst
+}
+
+// Float64 returns a pointer to of the float64 value passed in.
+func Float64(v float64) *float64 {
+ return &v
+}
+
+// Float64Value returns the value of the float64 pointer passed in or
+// 0 if the pointer is nil.
+func Float64Value(v *float64) float64 {
+ if v != nil {
+ return *v
+ }
+ return 0
+}
+
+// Float64Slice converts a slice of float64 values into a slice of
+// float64 pointers
+func Float64Slice(src []float64) []*float64 {
+ dst := make([]*float64, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// Float64ValueSlice converts a slice of float64 pointers into a slice of
+// float64 values
+func Float64ValueSlice(src []*float64) []float64 {
+ dst := make([]float64, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// Float64Map converts a string map of float64 values into a string
+// map of float64 pointers
+func Float64Map(src map[string]float64) map[string]*float64 {
+ dst := make(map[string]*float64)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// Float64ValueMap converts a string map of float64 pointers into a string
+// map of float64 values
+func Float64ValueMap(src map[string]*float64) map[string]float64 {
+ dst := make(map[string]float64)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
+
+// Time returns a pointer to of the time.Time value passed in.
+func Time(v time.Time) *time.Time {
+ return &v
+}
+
+// TimeValue returns the value of the time.Time pointer passed in or
+// time.Time{} if the pointer is nil.
+func TimeValue(v *time.Time) time.Time {
+ if v != nil {
+ return *v
+ }
+ return time.Time{}
+}
+
+// TimeSlice converts a slice of time.Time values into a slice of
+// time.Time pointers
+func TimeSlice(src []time.Time) []*time.Time {
+ dst := make([]*time.Time, len(src))
+ for i := 0; i < len(src); i++ {
+ dst[i] = &(src[i])
+ }
+ return dst
+}
+
+// TimeValueSlice converts a slice of time.Time pointers into a slice of
+// time.Time values
+func TimeValueSlice(src []*time.Time) []time.Time {
+ dst := make([]time.Time, len(src))
+ for i := 0; i < len(src); i++ {
+ if src[i] != nil {
+ dst[i] = *(src[i])
+ }
+ }
+ return dst
+}
+
+// TimeMap converts a string map of time.Time values into a string
+// map of time.Time pointers
+func TimeMap(src map[string]time.Time) map[string]*time.Time {
+ dst := make(map[string]*time.Time)
+ for k, val := range src {
+ v := val
+ dst[k] = &v
+ }
+ return dst
+}
+
+// TimeValueMap converts a string map of time.Time pointers into a string
+// map of time.Time values
+func TimeValueMap(src map[string]*time.Time) map[string]time.Time {
+ dst := make(map[string]time.Time)
+ for k, val := range src {
+ if val != nil {
+ dst[k] = *val
+ }
+ }
+ return dst
+}
diff --git a/vendor/github.com/go-openapi/swag/doc.go b/vendor/github.com/go-openapi/swag/doc.go
new file mode 100644
index 000000000..55094cb74
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/doc.go
@@ -0,0 +1,31 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package swag contains a bunch of helper functions for go-openapi and go-swagger projects.
+
+You may also use it standalone for your projects.
+
+ - convert between value and pointers for builtin types
+ - convert from string to builtin types (wraps strconv)
+ - fast json concatenation
+ - search in path
+ - load from file or http
+ - name mangling
+
+This repo has only few dependencies outside of the standard library:
+
+ - YAML utilities depend on gopkg.in/yaml.v2
+*/
+package swag
diff --git a/vendor/github.com/go-openapi/swag/file.go b/vendor/github.com/go-openapi/swag/file.go
new file mode 100644
index 000000000..16accc55f
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/file.go
@@ -0,0 +1,33 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import "mime/multipart"
+
+// File represents an uploaded file.
+type File struct {
+ Data multipart.File
+ Header *multipart.FileHeader
+}
+
+// Read bytes from the file
+func (f *File) Read(p []byte) (n int, err error) {
+ return f.Data.Read(p)
+}
+
+// Close the file
+func (f *File) Close() error {
+ return f.Data.Close()
+}
diff --git a/vendor/github.com/go-openapi/swag/json.go b/vendor/github.com/go-openapi/swag/json.go
new file mode 100644
index 000000000..7e9902ca3
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/json.go
@@ -0,0 +1,312 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "bytes"
+ "encoding/json"
+ "log"
+ "reflect"
+ "strings"
+ "sync"
+
+ "github.com/mailru/easyjson/jlexer"
+ "github.com/mailru/easyjson/jwriter"
+)
+
+// nullJSON represents a JSON object with null type
+var nullJSON = []byte("null")
+
+// DefaultJSONNameProvider the default cache for types
+var DefaultJSONNameProvider = NewNameProvider()
+
+const comma = byte(',')
+
+var closers map[byte]byte
+
+func init() {
+ closers = map[byte]byte{
+ '{': '}',
+ '[': ']',
+ }
+}
+
+type ejMarshaler interface {
+ MarshalEasyJSON(w *jwriter.Writer)
+}
+
+type ejUnmarshaler interface {
+ UnmarshalEasyJSON(w *jlexer.Lexer)
+}
+
+// WriteJSON writes json data, prefers finding an appropriate interface to short-circuit the marshaler
+// so it takes the fastest option available.
+func WriteJSON(data interface{}) ([]byte, error) {
+ if d, ok := data.(ejMarshaler); ok {
+ jw := new(jwriter.Writer)
+ d.MarshalEasyJSON(jw)
+ return jw.BuildBytes()
+ }
+ if d, ok := data.(json.Marshaler); ok {
+ return d.MarshalJSON()
+ }
+ return json.Marshal(data)
+}
+
+// ReadJSON reads json data, prefers finding an appropriate interface to short-circuit the unmarshaler
+// so it takes the fastest option available
+func ReadJSON(data []byte, value interface{}) error {
+ trimmedData := bytes.Trim(data, "\x00")
+ if d, ok := value.(ejUnmarshaler); ok {
+ jl := &jlexer.Lexer{Data: trimmedData}
+ d.UnmarshalEasyJSON(jl)
+ return jl.Error()
+ }
+ if d, ok := value.(json.Unmarshaler); ok {
+ return d.UnmarshalJSON(trimmedData)
+ }
+ return json.Unmarshal(trimmedData, value)
+}
+
+// DynamicJSONToStruct converts an untyped json structure into a struct
+func DynamicJSONToStruct(data interface{}, target interface{}) error {
+ // TODO: convert straight to a json typed map (mergo + iterate?)
+ b, err := WriteJSON(data)
+ if err != nil {
+ return err
+ }
+ return ReadJSON(b, target)
+}
+
+// ConcatJSON concatenates multiple json objects efficiently
+func ConcatJSON(blobs ...[]byte) []byte {
+ if len(blobs) == 0 {
+ return nil
+ }
+
+ last := len(blobs) - 1
+ for blobs[last] == nil || bytes.Equal(blobs[last], nullJSON) {
+ // strips trailing null objects
+ last--
+ if last < 0 {
+ // there was nothing but "null"s or nil...
+ return nil
+ }
+ }
+ if last == 0 {
+ return blobs[0]
+ }
+
+ var opening, closing byte
+ var idx, a int
+ buf := bytes.NewBuffer(nil)
+
+ for i, b := range blobs[:last+1] {
+ if b == nil || bytes.Equal(b, nullJSON) {
+ // a null object is in the list: skip it
+ continue
+ }
+ if len(b) > 0 && opening == 0 { // is this an array or an object?
+ opening, closing = b[0], closers[b[0]]
+ }
+
+ if opening != '{' && opening != '[' {
+ continue // don't know how to concatenate non container objects
+ }
+
+ if len(b) < 3 { // yep empty but also the last one, so closing this thing
+ if i == last && a > 0 {
+ if err := buf.WriteByte(closing); err != nil {
+ log.Println(err)
+ }
+ }
+ continue
+ }
+
+ idx = 0
+ if a > 0 { // we need to join with a comma for everything beyond the first non-empty item
+ if err := buf.WriteByte(comma); err != nil {
+ log.Println(err)
+ }
+ idx = 1 // this is not the first or the last so we want to drop the leading bracket
+ }
+
+ if i != last { // not the last one, strip brackets
+ if _, err := buf.Write(b[idx : len(b)-1]); err != nil {
+ log.Println(err)
+ }
+ } else { // last one, strip only the leading bracket
+ if _, err := buf.Write(b[idx:]); err != nil {
+ log.Println(err)
+ }
+ }
+ a++
+ }
+ // somehow it ended up being empty, so provide a default value
+ if buf.Len() == 0 {
+ if err := buf.WriteByte(opening); err != nil {
+ log.Println(err)
+ }
+ if err := buf.WriteByte(closing); err != nil {
+ log.Println(err)
+ }
+ }
+ return buf.Bytes()
+}
+
+// ToDynamicJSON turns an object into a properly JSON typed structure
+func ToDynamicJSON(data interface{}) interface{} {
+ // TODO: convert straight to a json typed map (mergo + iterate?)
+ b, err := json.Marshal(data)
+ if err != nil {
+ log.Println(err)
+ }
+ var res interface{}
+ if err := json.Unmarshal(b, &res); err != nil {
+ log.Println(err)
+ }
+ return res
+}
+
+// FromDynamicJSON turns an object into a properly JSON typed structure
+func FromDynamicJSON(data, target interface{}) error {
+ b, err := json.Marshal(data)
+ if err != nil {
+ log.Println(err)
+ }
+ return json.Unmarshal(b, target)
+}
+
+// NameProvider represents an object capable of translating from go property names
+// to json property names
+// This type is thread-safe.
+type NameProvider struct {
+ lock *sync.Mutex
+ index map[reflect.Type]nameIndex
+}
+
+type nameIndex struct {
+ jsonNames map[string]string
+ goNames map[string]string
+}
+
+// NewNameProvider creates a new name provider
+func NewNameProvider() *NameProvider {
+ return &NameProvider{
+ lock: &sync.Mutex{},
+ index: make(map[reflect.Type]nameIndex),
+ }
+}
+
+func buildnameIndex(tpe reflect.Type, idx, reverseIdx map[string]string) {
+ for i := 0; i < tpe.NumField(); i++ {
+ targetDes := tpe.Field(i)
+
+ if targetDes.PkgPath != "" { // unexported
+ continue
+ }
+
+ if targetDes.Anonymous { // walk embedded structures tree down first
+ buildnameIndex(targetDes.Type, idx, reverseIdx)
+ continue
+ }
+
+ if tag := targetDes.Tag.Get("json"); tag != "" {
+
+ parts := strings.Split(tag, ",")
+ if len(parts) == 0 {
+ continue
+ }
+
+ nm := parts[0]
+ if nm == "-" {
+ continue
+ }
+ if nm == "" { // empty string means we want to use the Go name
+ nm = targetDes.Name
+ }
+
+ idx[nm] = targetDes.Name
+ reverseIdx[targetDes.Name] = nm
+ }
+ }
+}
+
+func newNameIndex(tpe reflect.Type) nameIndex {
+ var idx = make(map[string]string, tpe.NumField())
+ var reverseIdx = make(map[string]string, tpe.NumField())
+
+ buildnameIndex(tpe, idx, reverseIdx)
+ return nameIndex{jsonNames: idx, goNames: reverseIdx}
+}
+
+// GetJSONNames gets all the json property names for a type
+func (n *NameProvider) GetJSONNames(subject interface{}) []string {
+ n.lock.Lock()
+ defer n.lock.Unlock()
+ tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
+ names, ok := n.index[tpe]
+ if !ok {
+ names = n.makeNameIndex(tpe)
+ }
+
+ res := make([]string, 0, len(names.jsonNames))
+ for k := range names.jsonNames {
+ res = append(res, k)
+ }
+ return res
+}
+
+// GetJSONName gets the json name for a go property name
+func (n *NameProvider) GetJSONName(subject interface{}, name string) (string, bool) {
+ tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
+ return n.GetJSONNameForType(tpe, name)
+}
+
+// GetJSONNameForType gets the json name for a go property name on a given type
+func (n *NameProvider) GetJSONNameForType(tpe reflect.Type, name string) (string, bool) {
+ n.lock.Lock()
+ defer n.lock.Unlock()
+ names, ok := n.index[tpe]
+ if !ok {
+ names = n.makeNameIndex(tpe)
+ }
+ nme, ok := names.goNames[name]
+ return nme, ok
+}
+
+func (n *NameProvider) makeNameIndex(tpe reflect.Type) nameIndex {
+ names := newNameIndex(tpe)
+ n.index[tpe] = names
+ return names
+}
+
+// GetGoName gets the go name for a json property name
+func (n *NameProvider) GetGoName(subject interface{}, name string) (string, bool) {
+ tpe := reflect.Indirect(reflect.ValueOf(subject)).Type()
+ return n.GetGoNameForType(tpe, name)
+}
+
+// GetGoNameForType gets the go name for a given type for a json property name
+func (n *NameProvider) GetGoNameForType(tpe reflect.Type, name string) (string, bool) {
+ n.lock.Lock()
+ defer n.lock.Unlock()
+ names, ok := n.index[tpe]
+ if !ok {
+ names = n.makeNameIndex(tpe)
+ }
+ nme, ok := names.jsonNames[name]
+ return nme, ok
+}
diff --git a/vendor/github.com/go-openapi/swag/loading.go b/vendor/github.com/go-openapi/swag/loading.go
new file mode 100644
index 000000000..00038c377
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/loading.go
@@ -0,0 +1,121 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "time"
+)
+
+// LoadHTTPTimeout the default timeout for load requests
+var LoadHTTPTimeout = 30 * time.Second
+
+// LoadHTTPBasicAuthUsername the username to use when load requests require basic auth
+var LoadHTTPBasicAuthUsername = ""
+
+// LoadHTTPBasicAuthPassword the password to use when load requests require basic auth
+var LoadHTTPBasicAuthPassword = ""
+
+// LoadHTTPCustomHeaders an optional collection of custom HTTP headers for load requests
+var LoadHTTPCustomHeaders = map[string]string{}
+
+// LoadFromFileOrHTTP loads the bytes from a file or a remote http server based on the path passed in
+func LoadFromFileOrHTTP(path string) ([]byte, error) {
+ return LoadStrategy(path, os.ReadFile, loadHTTPBytes(LoadHTTPTimeout))(path)
+}
+
+// LoadFromFileOrHTTPWithTimeout loads the bytes from a file or a remote http server based on the path passed in
+// timeout arg allows for per request overriding of the request timeout
+func LoadFromFileOrHTTPWithTimeout(path string, timeout time.Duration) ([]byte, error) {
+ return LoadStrategy(path, os.ReadFile, loadHTTPBytes(timeout))(path)
+}
+
+// LoadStrategy returns a loader function for a given path or uri
+func LoadStrategy(path string, local, remote func(string) ([]byte, error)) func(string) ([]byte, error) {
+ if strings.HasPrefix(path, "http") {
+ return remote
+ }
+ return func(pth string) ([]byte, error) {
+ upth, err := pathUnescape(pth)
+ if err != nil {
+ return nil, err
+ }
+
+ if strings.HasPrefix(pth, `file://`) {
+ if runtime.GOOS == "windows" {
+ // support for canonical file URIs on windows.
+ // Zero tolerance here for dodgy URIs.
+ u, _ := url.Parse(upth)
+ if u.Host != "" {
+ // assume UNC name (volume share)
+ // file://host/share/folder\... ==> \\host\share\path\folder
+ // NOTE: UNC port not yet supported
+ upth = strings.Join([]string{`\`, u.Host, u.Path}, `\`)
+ } else {
+ // file:///c:/folder/... ==> just remove the leading slash
+ upth = strings.TrimPrefix(upth, `file:///`)
+ }
+ } else {
+ upth = strings.TrimPrefix(upth, `file://`)
+ }
+ }
+
+ return local(filepath.FromSlash(upth))
+ }
+}
+
+func loadHTTPBytes(timeout time.Duration) func(path string) ([]byte, error) {
+ return func(path string) ([]byte, error) {
+ client := &http.Client{Timeout: timeout}
+ req, err := http.NewRequest(http.MethodGet, path, nil) //nolint:noctx
+ if err != nil {
+ return nil, err
+ }
+
+ if LoadHTTPBasicAuthUsername != "" && LoadHTTPBasicAuthPassword != "" {
+ req.SetBasicAuth(LoadHTTPBasicAuthUsername, LoadHTTPBasicAuthPassword)
+ }
+
+ for key, val := range LoadHTTPCustomHeaders {
+ req.Header.Set(key, val)
+ }
+
+ resp, err := client.Do(req)
+ defer func() {
+ if resp != nil {
+ if e := resp.Body.Close(); e != nil {
+ log.Println(e)
+ }
+ }
+ }()
+ if err != nil {
+ return nil, err
+ }
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("could not access document at %q [%s] ", path, resp.Status)
+ }
+
+ return io.ReadAll(resp.Body)
+ }
+}
diff --git a/vendor/github.com/go-openapi/swag/name_lexem.go b/vendor/github.com/go-openapi/swag/name_lexem.go
new file mode 100644
index 000000000..aa7f6a9bb
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/name_lexem.go
@@ -0,0 +1,87 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import "unicode"
+
+type (
+ nameLexem interface {
+ GetUnsafeGoName() string
+ GetOriginal() string
+ IsInitialism() bool
+ }
+
+ initialismNameLexem struct {
+ original string
+ matchedInitialism string
+ }
+
+ casualNameLexem struct {
+ original string
+ }
+)
+
+func newInitialismNameLexem(original, matchedInitialism string) *initialismNameLexem {
+ return &initialismNameLexem{
+ original: original,
+ matchedInitialism: matchedInitialism,
+ }
+}
+
+func newCasualNameLexem(original string) *casualNameLexem {
+ return &casualNameLexem{
+ original: original,
+ }
+}
+
+func (l *initialismNameLexem) GetUnsafeGoName() string {
+ return l.matchedInitialism
+}
+
+func (l *casualNameLexem) GetUnsafeGoName() string {
+ var first rune
+ var rest string
+ for i, orig := range l.original {
+ if i == 0 {
+ first = orig
+ continue
+ }
+ if i > 0 {
+ rest = l.original[i:]
+ break
+ }
+ }
+ if len(l.original) > 1 {
+ return string(unicode.ToUpper(first)) + lower(rest)
+ }
+
+ return l.original
+}
+
+func (l *initialismNameLexem) GetOriginal() string {
+ return l.original
+}
+
+func (l *casualNameLexem) GetOriginal() string {
+ return l.original
+}
+
+func (l *initialismNameLexem) IsInitialism() bool {
+ return true
+}
+
+func (l *casualNameLexem) IsInitialism() bool {
+ return false
+}
diff --git a/vendor/github.com/go-openapi/swag/net.go b/vendor/github.com/go-openapi/swag/net.go
new file mode 100644
index 000000000..821235f84
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/net.go
@@ -0,0 +1,38 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "net"
+ "strconv"
+)
+
+// SplitHostPort splits a network address into a host and a port.
+// The port is -1 when there is no port to be found
+func SplitHostPort(addr string) (host string, port int, err error) {
+ h, p, err := net.SplitHostPort(addr)
+ if err != nil {
+ return "", -1, err
+ }
+ if p == "" {
+ return "", -1, &net.AddrError{Err: "missing port in address", Addr: addr}
+ }
+
+ pi, err := strconv.Atoi(p)
+ if err != nil {
+ return "", -1, err
+ }
+ return h, pi, nil
+}
diff --git a/vendor/github.com/go-openapi/swag/path.go b/vendor/github.com/go-openapi/swag/path.go
new file mode 100644
index 000000000..941bd0176
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/path.go
@@ -0,0 +1,59 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+)
+
+const (
+ // GOPATHKey represents the env key for gopath
+ GOPATHKey = "GOPATH"
+)
+
+// FindInSearchPath finds a package in a provided lists of paths
+func FindInSearchPath(searchPath, pkg string) string {
+ pathsList := filepath.SplitList(searchPath)
+ for _, path := range pathsList {
+ if evaluatedPath, err := filepath.EvalSymlinks(filepath.Join(path, "src", pkg)); err == nil {
+ if _, err := os.Stat(evaluatedPath); err == nil {
+ return evaluatedPath
+ }
+ }
+ }
+ return ""
+}
+
+// FindInGoSearchPath finds a package in the $GOPATH:$GOROOT
+func FindInGoSearchPath(pkg string) string {
+ return FindInSearchPath(FullGoSearchPath(), pkg)
+}
+
+// FullGoSearchPath gets the search paths for finding packages
+func FullGoSearchPath() string {
+ allPaths := os.Getenv(GOPATHKey)
+ if allPaths == "" {
+ allPaths = filepath.Join(os.Getenv("HOME"), "go")
+ }
+ if allPaths != "" {
+ allPaths = strings.Join([]string{allPaths, runtime.GOROOT()}, ":")
+ } else {
+ allPaths = runtime.GOROOT()
+ }
+ return allPaths
+}
diff --git a/vendor/github.com/go-openapi/swag/post_go18.go b/vendor/github.com/go-openapi/swag/post_go18.go
new file mode 100644
index 000000000..f5228b82c
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/post_go18.go
@@ -0,0 +1,24 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build go1.8
+// +build go1.8
+
+package swag
+
+import "net/url"
+
+func pathUnescape(path string) (string, error) {
+ return url.PathUnescape(path)
+}
diff --git a/vendor/github.com/go-openapi/swag/post_go19.go b/vendor/github.com/go-openapi/swag/post_go19.go
new file mode 100644
index 000000000..7c7da9c08
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/post_go19.go
@@ -0,0 +1,68 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build go1.9
+// +build go1.9
+
+package swag
+
+import (
+ "sort"
+ "sync"
+)
+
+// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
+// Since go1.9, this may be implemented with sync.Map.
+type indexOfInitialisms struct {
+ sortMutex *sync.Mutex
+ index *sync.Map
+}
+
+func newIndexOfInitialisms() *indexOfInitialisms {
+ return &indexOfInitialisms{
+ sortMutex: new(sync.Mutex),
+ index: new(sync.Map),
+ }
+}
+
+func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
+ m.sortMutex.Lock()
+ defer m.sortMutex.Unlock()
+ for k, v := range initial {
+ m.index.Store(k, v)
+ }
+ return m
+}
+
+func (m *indexOfInitialisms) isInitialism(key string) bool {
+ _, ok := m.index.Load(key)
+ return ok
+}
+
+func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
+ m.index.Store(key, true)
+ return m
+}
+
+func (m *indexOfInitialisms) sorted() (result []string) {
+ m.sortMutex.Lock()
+ defer m.sortMutex.Unlock()
+ m.index.Range(func(key, value interface{}) bool {
+ k := key.(string)
+ result = append(result, k)
+ return true
+ })
+ sort.Sort(sort.Reverse(byInitialism(result)))
+ return
+}
diff --git a/vendor/github.com/go-openapi/swag/pre_go18.go b/vendor/github.com/go-openapi/swag/pre_go18.go
new file mode 100644
index 000000000..2757d9b95
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/pre_go18.go
@@ -0,0 +1,24 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !go1.8
+// +build !go1.8
+
+package swag
+
+import "net/url"
+
+func pathUnescape(path string) (string, error) {
+ return url.QueryUnescape(path)
+}
diff --git a/vendor/github.com/go-openapi/swag/pre_go19.go b/vendor/github.com/go-openapi/swag/pre_go19.go
new file mode 100644
index 000000000..0565db377
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/pre_go19.go
@@ -0,0 +1,70 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !go1.9
+// +build !go1.9
+
+package swag
+
+import (
+ "sort"
+ "sync"
+)
+
+// indexOfInitialisms is a thread-safe implementation of the sorted index of initialisms.
+// Before go1.9, this may be implemented with a mutex on the map.
+type indexOfInitialisms struct {
+ getMutex *sync.Mutex
+ index map[string]bool
+}
+
+func newIndexOfInitialisms() *indexOfInitialisms {
+ return &indexOfInitialisms{
+ getMutex: new(sync.Mutex),
+ index: make(map[string]bool, 50),
+ }
+}
+
+func (m *indexOfInitialisms) load(initial map[string]bool) *indexOfInitialisms {
+ m.getMutex.Lock()
+ defer m.getMutex.Unlock()
+ for k, v := range initial {
+ m.index[k] = v
+ }
+ return m
+}
+
+func (m *indexOfInitialisms) isInitialism(key string) bool {
+ m.getMutex.Lock()
+ defer m.getMutex.Unlock()
+ _, ok := m.index[key]
+ return ok
+}
+
+func (m *indexOfInitialisms) add(key string) *indexOfInitialisms {
+ m.getMutex.Lock()
+ defer m.getMutex.Unlock()
+ m.index[key] = true
+ return m
+}
+
+func (m *indexOfInitialisms) sorted() (result []string) {
+ m.getMutex.Lock()
+ defer m.getMutex.Unlock()
+ for k := range m.index {
+ result = append(result, k)
+ }
+ sort.Sort(sort.Reverse(byInitialism(result)))
+ return
+}
diff --git a/vendor/github.com/go-openapi/swag/split.go b/vendor/github.com/go-openapi/swag/split.go
new file mode 100644
index 000000000..a1825fb7d
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/split.go
@@ -0,0 +1,262 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "unicode"
+)
+
+var nameReplaceTable = map[rune]string{
+ '@': "At ",
+ '&': "And ",
+ '|': "Pipe ",
+ '$': "Dollar ",
+ '!': "Bang ",
+ '-': "",
+ '_': "",
+}
+
+type (
+ splitter struct {
+ postSplitInitialismCheck bool
+ initialisms []string
+ }
+
+ splitterOption func(*splitter) *splitter
+)
+
+// split calls the splitter; splitter provides more control and post options
+func split(str string) []string {
+ lexems := newSplitter().split(str)
+ result := make([]string, 0, len(lexems))
+
+ for _, lexem := range lexems {
+ result = append(result, lexem.GetOriginal())
+ }
+
+ return result
+
+}
+
+func (s *splitter) split(str string) []nameLexem {
+ return s.toNameLexems(str)
+}
+
+func newSplitter(options ...splitterOption) *splitter {
+ splitter := &splitter{
+ postSplitInitialismCheck: false,
+ initialisms: initialisms,
+ }
+
+ for _, option := range options {
+ splitter = option(splitter)
+ }
+
+ return splitter
+}
+
+// withPostSplitInitialismCheck allows to catch initialisms after main split process
+func withPostSplitInitialismCheck(s *splitter) *splitter {
+ s.postSplitInitialismCheck = true
+ return s
+}
+
+type (
+ initialismMatch struct {
+ start, end int
+ body []rune
+ complete bool
+ }
+ initialismMatches []*initialismMatch
+)
+
+func (s *splitter) toNameLexems(name string) []nameLexem {
+ nameRunes := []rune(name)
+ matches := s.gatherInitialismMatches(nameRunes)
+ return s.mapMatchesToNameLexems(nameRunes, matches)
+}
+
+func (s *splitter) gatherInitialismMatches(nameRunes []rune) initialismMatches {
+ matches := make(initialismMatches, 0)
+
+ for currentRunePosition, currentRune := range nameRunes {
+ newMatches := make(initialismMatches, 0, len(matches))
+
+ // check current initialism matches
+ for _, match := range matches {
+ if keepCompleteMatch := match.complete; keepCompleteMatch {
+ newMatches = append(newMatches, match)
+ continue
+ }
+
+ // drop failed match
+ currentMatchRune := match.body[currentRunePosition-match.start]
+ if !s.initialismRuneEqual(currentMatchRune, currentRune) {
+ continue
+ }
+
+ // try to complete ongoing match
+ if currentRunePosition-match.start == len(match.body)-1 {
+ // we are close; the next step is to check the symbol ahead
+ // if it is a small letter, then it is not the end of match
+ // but beginning of the next word
+
+ if currentRunePosition < len(nameRunes)-1 {
+ nextRune := nameRunes[currentRunePosition+1]
+ if newWord := unicode.IsLower(nextRune); newWord {
+ // oh ok, it was the start of a new word
+ continue
+ }
+ }
+
+ match.complete = true
+ match.end = currentRunePosition
+ }
+
+ newMatches = append(newMatches, match)
+ }
+
+ // check for new initialism matches
+ for _, initialism := range s.initialisms {
+ initialismRunes := []rune(initialism)
+ if s.initialismRuneEqual(initialismRunes[0], currentRune) {
+ newMatches = append(newMatches, &initialismMatch{
+ start: currentRunePosition,
+ body: initialismRunes,
+ complete: false,
+ })
+ }
+ }
+
+ matches = newMatches
+ }
+
+ return matches
+}
+
+func (s *splitter) mapMatchesToNameLexems(nameRunes []rune, matches initialismMatches) []nameLexem {
+ nameLexems := make([]nameLexem, 0)
+
+ var lastAcceptedMatch *initialismMatch
+ for _, match := range matches {
+ if !match.complete {
+ continue
+ }
+
+ if firstMatch := lastAcceptedMatch == nil; firstMatch {
+ nameLexems = append(nameLexems, s.breakCasualString(nameRunes[:match.start])...)
+ nameLexems = append(nameLexems, s.breakInitialism(string(match.body)))
+
+ lastAcceptedMatch = match
+
+ continue
+ }
+
+ if overlappedMatch := match.start <= lastAcceptedMatch.end; overlappedMatch {
+ continue
+ }
+
+ middle := nameRunes[lastAcceptedMatch.end+1 : match.start]
+ nameLexems = append(nameLexems, s.breakCasualString(middle)...)
+ nameLexems = append(nameLexems, s.breakInitialism(string(match.body)))
+
+ lastAcceptedMatch = match
+ }
+
+ // we have not found any accepted matches
+ if lastAcceptedMatch == nil {
+ return s.breakCasualString(nameRunes)
+ }
+
+ if lastAcceptedMatch.end+1 != len(nameRunes) {
+ rest := nameRunes[lastAcceptedMatch.end+1:]
+ nameLexems = append(nameLexems, s.breakCasualString(rest)...)
+ }
+
+ return nameLexems
+}
+
+func (s *splitter) initialismRuneEqual(a, b rune) bool {
+ return a == b
+}
+
+func (s *splitter) breakInitialism(original string) nameLexem {
+ return newInitialismNameLexem(original, original)
+}
+
+func (s *splitter) breakCasualString(str []rune) []nameLexem {
+ segments := make([]nameLexem, 0)
+ currentSegment := ""
+
+ addCasualNameLexem := func(original string) {
+ segments = append(segments, newCasualNameLexem(original))
+ }
+
+ addInitialismNameLexem := func(original, match string) {
+ segments = append(segments, newInitialismNameLexem(original, match))
+ }
+
+ addNameLexem := func(original string) {
+ if s.postSplitInitialismCheck {
+ for _, initialism := range s.initialisms {
+ if upper(initialism) == upper(original) {
+ addInitialismNameLexem(original, initialism)
+ return
+ }
+ }
+ }
+
+ addCasualNameLexem(original)
+ }
+
+ for _, rn := range string(str) {
+ if replace, found := nameReplaceTable[rn]; found {
+ if currentSegment != "" {
+ addNameLexem(currentSegment)
+ currentSegment = ""
+ }
+
+ if replace != "" {
+ addNameLexem(replace)
+ }
+
+ continue
+ }
+
+ if !unicode.In(rn, unicode.L, unicode.M, unicode.N, unicode.Pc) {
+ if currentSegment != "" {
+ addNameLexem(currentSegment)
+ currentSegment = ""
+ }
+
+ continue
+ }
+
+ if unicode.IsUpper(rn) {
+ if currentSegment != "" {
+ addNameLexem(currentSegment)
+ }
+ currentSegment = ""
+ }
+
+ currentSegment += string(rn)
+ }
+
+ if currentSegment != "" {
+ addNameLexem(currentSegment)
+ }
+
+ return segments
+}
diff --git a/vendor/github.com/go-openapi/swag/util.go b/vendor/github.com/go-openapi/swag/util.go
new file mode 100644
index 000000000..d971fbe34
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/util.go
@@ -0,0 +1,394 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "reflect"
+ "strings"
+ "unicode"
+)
+
+// commonInitialisms are common acronyms that are kept as whole uppercased words.
+var commonInitialisms *indexOfInitialisms
+
+// initialisms is a slice of sorted initialisms
+var initialisms []string
+
+var isInitialism func(string) bool
+
+// GoNamePrefixFunc sets an optional rule to prefix go names
+// which do not start with a letter.
+//
+// e.g. to help convert "123" into "{prefix}123"
+//
+// The default is to prefix with "X"
+var GoNamePrefixFunc func(string) string
+
+func init() {
+ // Taken from https://github.com/golang/lint/blob/3390df4df2787994aea98de825b964ac7944b817/lint.go#L732-L769
+ var configuredInitialisms = map[string]bool{
+ "ACL": true,
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "GUID": true,
+ "HTML": true,
+ "HTTPS": true,
+ "HTTP": true,
+ "ID": true,
+ "IP": true,
+ "IPv4": true,
+ "IPv6": true,
+ "JSON": true,
+ "LHS": true,
+ "OAI": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SMTP": true,
+ "SQL": true,
+ "SSH": true,
+ "TCP": true,
+ "TLS": true,
+ "TTL": true,
+ "UDP": true,
+ "UI": true,
+ "UID": true,
+ "UUID": true,
+ "URI": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+ "XMPP": true,
+ "XSRF": true,
+ "XSS": true,
+ }
+
+ // a thread-safe index of initialisms
+ commonInitialisms = newIndexOfInitialisms().load(configuredInitialisms)
+ initialisms = commonInitialisms.sorted()
+
+ // a test function
+ isInitialism = commonInitialisms.isInitialism
+}
+
+const (
+ // collectionFormatComma = "csv"
+ collectionFormatSpace = "ssv"
+ collectionFormatTab = "tsv"
+ collectionFormatPipe = "pipes"
+ collectionFormatMulti = "multi"
+)
+
+// JoinByFormat joins a string array by a known format (e.g. swagger's collectionFormat attribute):
+//
+// ssv: space separated value
+// tsv: tab separated value
+// pipes: pipe (|) separated value
+// csv: comma separated value (default)
+func JoinByFormat(data []string, format string) []string {
+ if len(data) == 0 {
+ return data
+ }
+ var sep string
+ switch format {
+ case collectionFormatSpace:
+ sep = " "
+ case collectionFormatTab:
+ sep = "\t"
+ case collectionFormatPipe:
+ sep = "|"
+ case collectionFormatMulti:
+ return data
+ default:
+ sep = ","
+ }
+ return []string{strings.Join(data, sep)}
+}
+
+// SplitByFormat splits a string by a known format:
+//
+// ssv: space separated value
+// tsv: tab separated value
+// pipes: pipe (|) separated value
+// csv: comma separated value (default)
+func SplitByFormat(data, format string) []string {
+ if data == "" {
+ return nil
+ }
+ var sep string
+ switch format {
+ case collectionFormatSpace:
+ sep = " "
+ case collectionFormatTab:
+ sep = "\t"
+ case collectionFormatPipe:
+ sep = "|"
+ case collectionFormatMulti:
+ return nil
+ default:
+ sep = ","
+ }
+ var result []string
+ for _, s := range strings.Split(data, sep) {
+ if ts := strings.TrimSpace(s); ts != "" {
+ result = append(result, ts)
+ }
+ }
+ return result
+}
+
+type byInitialism []string
+
+func (s byInitialism) Len() int {
+ return len(s)
+}
+func (s byInitialism) Swap(i, j int) {
+ s[i], s[j] = s[j], s[i]
+}
+func (s byInitialism) Less(i, j int) bool {
+ if len(s[i]) != len(s[j]) {
+ return len(s[i]) < len(s[j])
+ }
+
+ return strings.Compare(s[i], s[j]) > 0
+}
+
+// Removes leading whitespaces
+func trim(str string) string {
+ return strings.Trim(str, " ")
+}
+
+// Shortcut to strings.ToUpper()
+func upper(str string) string {
+ return strings.ToUpper(trim(str))
+}
+
+// Shortcut to strings.ToLower()
+func lower(str string) string {
+ return strings.ToLower(trim(str))
+}
+
+// Camelize an uppercased word
+func Camelize(word string) (camelized string) {
+ for pos, ru := range []rune(word) {
+ if pos > 0 {
+ camelized += string(unicode.ToLower(ru))
+ } else {
+ camelized += string(unicode.ToUpper(ru))
+ }
+ }
+ return
+}
+
+// ToFileName lowercases and underscores a go type name
+func ToFileName(name string) string {
+ in := split(name)
+ out := make([]string, 0, len(in))
+
+ for _, w := range in {
+ out = append(out, lower(w))
+ }
+
+ return strings.Join(out, "_")
+}
+
+// ToCommandName lowercases and underscores a go type name
+func ToCommandName(name string) string {
+ in := split(name)
+ out := make([]string, 0, len(in))
+
+ for _, w := range in {
+ out = append(out, lower(w))
+ }
+ return strings.Join(out, "-")
+}
+
+// ToHumanNameLower represents a code name as a human series of words
+func ToHumanNameLower(name string) string {
+ in := newSplitter(withPostSplitInitialismCheck).split(name)
+ out := make([]string, 0, len(in))
+
+ for _, w := range in {
+ if !w.IsInitialism() {
+ out = append(out, lower(w.GetOriginal()))
+ } else {
+ out = append(out, w.GetOriginal())
+ }
+ }
+
+ return strings.Join(out, " ")
+}
+
+// ToHumanNameTitle represents a code name as a human series of words with the first letters titleized
+func ToHumanNameTitle(name string) string {
+ in := newSplitter(withPostSplitInitialismCheck).split(name)
+
+ out := make([]string, 0, len(in))
+ for _, w := range in {
+ original := w.GetOriginal()
+ if !w.IsInitialism() {
+ out = append(out, Camelize(original))
+ } else {
+ out = append(out, original)
+ }
+ }
+ return strings.Join(out, " ")
+}
+
+// ToJSONName camelcases a name which can be underscored or pascal cased
+func ToJSONName(name string) string {
+ in := split(name)
+ out := make([]string, 0, len(in))
+
+ for i, w := range in {
+ if i == 0 {
+ out = append(out, lower(w))
+ continue
+ }
+ out = append(out, Camelize(w))
+ }
+ return strings.Join(out, "")
+}
+
+// ToVarName camelcases a name which can be underscored or pascal cased
+func ToVarName(name string) string {
+ res := ToGoName(name)
+ if isInitialism(res) {
+ return lower(res)
+ }
+ if len(res) <= 1 {
+ return lower(res)
+ }
+ return lower(res[:1]) + res[1:]
+}
+
+// ToGoName translates a swagger name which can be underscored or camel cased to a name that golint likes
+func ToGoName(name string) string {
+ lexems := newSplitter(withPostSplitInitialismCheck).split(name)
+
+ result := ""
+ for _, lexem := range lexems {
+ goName := lexem.GetUnsafeGoName()
+
+ // to support old behavior
+ if lexem.IsInitialism() {
+ goName = upper(goName)
+ }
+ result += goName
+ }
+
+ if len(result) > 0 {
+ // Only prefix with X when the first character isn't an ascii letter
+ first := []rune(result)[0]
+ if !unicode.IsLetter(first) || (first > unicode.MaxASCII && !unicode.IsUpper(first)) {
+ if GoNamePrefixFunc == nil {
+ return "X" + result
+ }
+ result = GoNamePrefixFunc(name) + result
+ }
+ first = []rune(result)[0]
+ if unicode.IsLetter(first) && !unicode.IsUpper(first) {
+ result = string(append([]rune{unicode.ToUpper(first)}, []rune(result)[1:]...))
+ }
+ }
+
+ return result
+}
+
+// ContainsStrings searches a slice of strings for a case-sensitive match
+func ContainsStrings(coll []string, item string) bool {
+ for _, a := range coll {
+ if a == item {
+ return true
+ }
+ }
+ return false
+}
+
+// ContainsStringsCI searches a slice of strings for a case-insensitive match
+func ContainsStringsCI(coll []string, item string) bool {
+ for _, a := range coll {
+ if strings.EqualFold(a, item) {
+ return true
+ }
+ }
+ return false
+}
+
+type zeroable interface {
+ IsZero() bool
+}
+
+// IsZero returns true when the value passed into the function is a zero value.
+// This allows for safer checking of interface values.
+func IsZero(data interface{}) bool {
+ v := reflect.ValueOf(data)
+ // check for nil data
+ switch v.Kind() {
+ case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+ if v.IsNil() {
+ return true
+ }
+ }
+
+ // check for things that have an IsZero method instead
+ if vv, ok := data.(zeroable); ok {
+ return vv.IsZero()
+ }
+
+ // continue with slightly more complex reflection
+ switch v.Kind() {
+ case reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Struct, reflect.Array:
+ return reflect.DeepEqual(data, reflect.Zero(v.Type()).Interface())
+ case reflect.Invalid:
+ return true
+ default:
+ return false
+ }
+}
+
+// AddInitialisms add additional initialisms
+func AddInitialisms(words ...string) {
+ for _, word := range words {
+ // commonInitialisms[upper(word)] = true
+ commonInitialisms.add(upper(word))
+ }
+ // sort again
+ initialisms = commonInitialisms.sorted()
+}
+
+// CommandLineOptionsGroup represents a group of user-defined command line options
+type CommandLineOptionsGroup struct {
+ ShortDescription string
+ LongDescription string
+ Options interface{}
+}
diff --git a/vendor/github.com/go-openapi/swag/yaml.go b/vendor/github.com/go-openapi/swag/yaml.go
new file mode 100644
index 000000000..f09ee609f
--- /dev/null
+++ b/vendor/github.com/go-openapi/swag/yaml.go
@@ -0,0 +1,450 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package swag
+
+import (
+ "encoding/json"
+ "fmt"
+ "path/filepath"
+ "strconv"
+
+ "github.com/mailru/easyjson/jlexer"
+ "github.com/mailru/easyjson/jwriter"
+ yaml "gopkg.in/yaml.v3"
+)
+
+// YAMLMatcher matches yaml
+func YAMLMatcher(path string) bool {
+ ext := filepath.Ext(path)
+ return ext == ".yaml" || ext == ".yml"
+}
+
+// YAMLToJSON converts YAML unmarshaled data into json compatible data
+func YAMLToJSON(data interface{}) (json.RawMessage, error) {
+ jm, err := transformData(data)
+ if err != nil {
+ return nil, err
+ }
+ b, err := WriteJSON(jm)
+ return json.RawMessage(b), err
+}
+
+// BytesToYAMLDoc converts a byte slice into a YAML document
+func BytesToYAMLDoc(data []byte) (interface{}, error) {
+ var document yaml.Node // preserve order that is present in the document
+ if err := yaml.Unmarshal(data, &document); err != nil {
+ return nil, err
+ }
+ if document.Kind != yaml.DocumentNode || len(document.Content) != 1 || document.Content[0].Kind != yaml.MappingNode {
+ return nil, fmt.Errorf("only YAML documents that are objects are supported")
+ }
+ return &document, nil
+}
+
+func yamlNode(root *yaml.Node) (interface{}, error) {
+ switch root.Kind {
+ case yaml.DocumentNode:
+ return yamlDocument(root)
+ case yaml.SequenceNode:
+ return yamlSequence(root)
+ case yaml.MappingNode:
+ return yamlMapping(root)
+ case yaml.ScalarNode:
+ return yamlScalar(root)
+ case yaml.AliasNode:
+ return yamlNode(root.Alias)
+ default:
+ return nil, fmt.Errorf("unsupported YAML node type: %v", root.Kind)
+ }
+}
+
+func yamlDocument(node *yaml.Node) (interface{}, error) {
+ if len(node.Content) != 1 {
+ return nil, fmt.Errorf("unexpected YAML Document node content length: %d", len(node.Content))
+ }
+ return yamlNode(node.Content[0])
+}
+
+func yamlMapping(node *yaml.Node) (interface{}, error) {
+ m := make(JSONMapSlice, len(node.Content)/2)
+
+ var j int
+ for i := 0; i < len(node.Content); i += 2 {
+ var nmi JSONMapItem
+ k, err := yamlStringScalarC(node.Content[i])
+ if err != nil {
+ return nil, fmt.Errorf("unable to decode YAML map key: %w", err)
+ }
+ nmi.Key = k
+ v, err := yamlNode(node.Content[i+1])
+ if err != nil {
+ return nil, fmt.Errorf("unable to process YAML map value for key %q: %w", k, err)
+ }
+ nmi.Value = v
+ m[j] = nmi
+ j++
+ }
+ return m, nil
+}
+
+func yamlSequence(node *yaml.Node) (interface{}, error) {
+ s := make([]interface{}, 0)
+
+ for i := 0; i < len(node.Content); i++ {
+
+ v, err := yamlNode(node.Content[i])
+ if err != nil {
+ return nil, fmt.Errorf("unable to decode YAML sequence value: %w", err)
+ }
+ s = append(s, v)
+ }
+ return s, nil
+}
+
+const ( // See https://yaml.org/type/
+ yamlStringScalar = "tag:yaml.org,2002:str"
+ yamlIntScalar = "tag:yaml.org,2002:int"
+ yamlBoolScalar = "tag:yaml.org,2002:bool"
+ yamlFloatScalar = "tag:yaml.org,2002:float"
+ yamlTimestamp = "tag:yaml.org,2002:timestamp"
+ yamlNull = "tag:yaml.org,2002:null"
+)
+
+func yamlScalar(node *yaml.Node) (interface{}, error) {
+ switch node.LongTag() {
+ case yamlStringScalar:
+ return node.Value, nil
+ case yamlBoolScalar:
+ b, err := strconv.ParseBool(node.Value)
+ if err != nil {
+ return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting bool content: %w", node.Value, err)
+ }
+ return b, nil
+ case yamlIntScalar:
+ i, err := strconv.ParseInt(node.Value, 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting integer content: %w", node.Value, err)
+ }
+ return i, nil
+ case yamlFloatScalar:
+ f, err := strconv.ParseFloat(node.Value, 64)
+ if err != nil {
+ return nil, fmt.Errorf("unable to process scalar node. Got %q. Expecting float content: %w", node.Value, err)
+ }
+ return f, nil
+ case yamlTimestamp:
+ return node.Value, nil
+ case yamlNull:
+ return nil, nil
+ default:
+ return nil, fmt.Errorf("YAML tag %q is not supported", node.LongTag())
+ }
+}
+
+func yamlStringScalarC(node *yaml.Node) (string, error) {
+ if node.Kind != yaml.ScalarNode {
+ return "", fmt.Errorf("expecting a string scalar but got %q", node.Kind)
+ }
+ switch node.LongTag() {
+ case yamlStringScalar, yamlIntScalar, yamlFloatScalar:
+ return node.Value, nil
+ default:
+ return "", fmt.Errorf("YAML tag %q is not supported as map key", node.LongTag())
+ }
+}
+
+// JSONMapSlice represent a JSON object, with the order of keys maintained
+type JSONMapSlice []JSONMapItem
+
+// MarshalJSON renders a JSONMapSlice as JSON
+func (s JSONMapSlice) MarshalJSON() ([]byte, error) {
+ w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty}
+ s.MarshalEasyJSON(w)
+ return w.BuildBytes()
+}
+
+// MarshalEasyJSON renders a JSONMapSlice as JSON, using easyJSON
+func (s JSONMapSlice) MarshalEasyJSON(w *jwriter.Writer) {
+ w.RawByte('{')
+
+ ln := len(s)
+ last := ln - 1
+ for i := 0; i < ln; i++ {
+ s[i].MarshalEasyJSON(w)
+ if i != last { // last item
+ w.RawByte(',')
+ }
+ }
+
+ w.RawByte('}')
+}
+
+// UnmarshalJSON makes a JSONMapSlice from JSON
+func (s *JSONMapSlice) UnmarshalJSON(data []byte) error {
+ l := jlexer.Lexer{Data: data}
+ s.UnmarshalEasyJSON(&l)
+ return l.Error()
+}
+
+// UnmarshalEasyJSON makes a JSONMapSlice from JSON, using easyJSON
+func (s *JSONMapSlice) UnmarshalEasyJSON(in *jlexer.Lexer) {
+ if in.IsNull() {
+ in.Skip()
+ return
+ }
+
+ var result JSONMapSlice
+ in.Delim('{')
+ for !in.IsDelim('}') {
+ var mi JSONMapItem
+ mi.UnmarshalEasyJSON(in)
+ result = append(result, mi)
+ }
+ *s = result
+}
+
+func (s JSONMapSlice) MarshalYAML() (interface{}, error) {
+ var n yaml.Node
+ n.Kind = yaml.DocumentNode
+ var nodes []*yaml.Node
+ for _, item := range s {
+ nn, err := json2yaml(item.Value)
+ if err != nil {
+ return nil, err
+ }
+ ns := []*yaml.Node{
+ {
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: item.Key,
+ },
+ nn,
+ }
+ nodes = append(nodes, ns...)
+ }
+
+ n.Content = []*yaml.Node{
+ {
+ Kind: yaml.MappingNode,
+ Content: nodes,
+ },
+ }
+
+ return yaml.Marshal(&n)
+}
+
+func json2yaml(item interface{}) (*yaml.Node, error) {
+ switch val := item.(type) {
+ case JSONMapSlice:
+ var n yaml.Node
+ n.Kind = yaml.MappingNode
+ for i := range val {
+ childNode, err := json2yaml(&val[i].Value)
+ if err != nil {
+ return nil, err
+ }
+ n.Content = append(n.Content, &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: val[i].Key,
+ }, childNode)
+ }
+ return &n, nil
+ case map[string]interface{}:
+ var n yaml.Node
+ n.Kind = yaml.MappingNode
+ for k, v := range val {
+ childNode, err := json2yaml(v)
+ if err != nil {
+ return nil, err
+ }
+ n.Content = append(n.Content, &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: k,
+ }, childNode)
+ }
+ return &n, nil
+ case []interface{}:
+ var n yaml.Node
+ n.Kind = yaml.SequenceNode
+ for i := range val {
+ childNode, err := json2yaml(val[i])
+ if err != nil {
+ return nil, err
+ }
+ n.Content = append(n.Content, childNode)
+ }
+ return &n, nil
+ case string:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlStringScalar,
+ Value: val,
+ }, nil
+ case float64:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlFloatScalar,
+ Value: strconv.FormatFloat(val, 'f', -1, 64),
+ }, nil
+ case int64:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlIntScalar,
+ Value: strconv.FormatInt(val, 10),
+ }, nil
+ case uint64:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlIntScalar,
+ Value: strconv.FormatUint(val, 10),
+ }, nil
+ case bool:
+ return &yaml.Node{
+ Kind: yaml.ScalarNode,
+ Tag: yamlBoolScalar,
+ Value: strconv.FormatBool(val),
+ }, nil
+ }
+ return nil, nil
+}
+
+// JSONMapItem represents the value of a key in a JSON object held by JSONMapSlice
+type JSONMapItem struct {
+ Key string
+ Value interface{}
+}
+
+// MarshalJSON renders a JSONMapItem as JSON
+func (s JSONMapItem) MarshalJSON() ([]byte, error) {
+ w := &jwriter.Writer{Flags: jwriter.NilMapAsEmpty | jwriter.NilSliceAsEmpty}
+ s.MarshalEasyJSON(w)
+ return w.BuildBytes()
+}
+
+// MarshalEasyJSON renders a JSONMapItem as JSON, using easyJSON
+func (s JSONMapItem) MarshalEasyJSON(w *jwriter.Writer) {
+ w.String(s.Key)
+ w.RawByte(':')
+ w.Raw(WriteJSON(s.Value))
+}
+
+// UnmarshalJSON makes a JSONMapItem from JSON
+func (s *JSONMapItem) UnmarshalJSON(data []byte) error {
+ l := jlexer.Lexer{Data: data}
+ s.UnmarshalEasyJSON(&l)
+ return l.Error()
+}
+
+// UnmarshalEasyJSON makes a JSONMapItem from JSON, using easyJSON
+func (s *JSONMapItem) UnmarshalEasyJSON(in *jlexer.Lexer) {
+ key := in.UnsafeString()
+ in.WantColon()
+ value := in.Interface()
+ in.WantComma()
+ s.Key = key
+ s.Value = value
+}
+
+func transformData(input interface{}) (out interface{}, err error) {
+ format := func(t interface{}) (string, error) {
+ switch k := t.(type) {
+ case string:
+ return k, nil
+ case uint:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint8:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint16:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint32:
+ return strconv.FormatUint(uint64(k), 10), nil
+ case uint64:
+ return strconv.FormatUint(k, 10), nil
+ case int:
+ return strconv.Itoa(k), nil
+ case int8:
+ return strconv.FormatInt(int64(k), 10), nil
+ case int16:
+ return strconv.FormatInt(int64(k), 10), nil
+ case int32:
+ return strconv.FormatInt(int64(k), 10), nil
+ case int64:
+ return strconv.FormatInt(k, 10), nil
+ default:
+ return "", fmt.Errorf("unexpected map key type, got: %T", k)
+ }
+ }
+
+ switch in := input.(type) {
+ case yaml.Node:
+ return yamlNode(&in)
+ case *yaml.Node:
+ return yamlNode(in)
+ case map[interface{}]interface{}:
+ o := make(JSONMapSlice, 0, len(in))
+ for ke, va := range in {
+ var nmi JSONMapItem
+ if nmi.Key, err = format(ke); err != nil {
+ return nil, err
+ }
+
+ v, ert := transformData(va)
+ if ert != nil {
+ return nil, ert
+ }
+ nmi.Value = v
+ o = append(o, nmi)
+ }
+ return o, nil
+ case []interface{}:
+ len1 := len(in)
+ o := make([]interface{}, len1)
+ for i := 0; i < len1; i++ {
+ o[i], err = transformData(in[i])
+ if err != nil {
+ return nil, err
+ }
+ }
+ return o, nil
+ }
+ return input, nil
+}
+
+// YAMLDoc loads a yaml document from either http or a file and converts it to json
+func YAMLDoc(path string) (json.RawMessage, error) {
+ yamlDoc, err := YAMLData(path)
+ if err != nil {
+ return nil, err
+ }
+
+ data, err := YAMLToJSON(yamlDoc)
+ if err != nil {
+ return nil, err
+ }
+
+ return data, nil
+}
+
+// YAMLData loads a yaml document from either http or a file
+func YAMLData(path string) (interface{}, error) {
+ data, err := LoadFromFileOrHTTP(path)
+ if err != nil {
+ return nil, err
+ }
+
+ return BytesToYAMLDoc(data)
+}
diff --git a/vendor/github.com/go-openapi/validate/.editorconfig b/vendor/github.com/go-openapi/validate/.editorconfig
new file mode 100644
index 000000000..3152da69a
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/.editorconfig
@@ -0,0 +1,26 @@
+# top-most EditorConfig file
+root = true
+
+# Unix-style newlines with a newline ending every file
+[*]
+end_of_line = lf
+insert_final_newline = true
+indent_style = space
+indent_size = 2
+trim_trailing_whitespace = true
+
+# Set default charset
+[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
+charset = utf-8
+
+# Tab indentation (no size specified)
+[*.go]
+indent_style = tab
+
+[*.md]
+trim_trailing_whitespace = false
+
+# Matches the exact files either package.json or .travis.yml
+[{package.json,.travis.yml}]
+indent_style = space
+indent_size = 2
diff --git a/vendor/github.com/go-openapi/validate/.gitattributes b/vendor/github.com/go-openapi/validate/.gitattributes
new file mode 100644
index 000000000..49ad52766
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/.gitattributes
@@ -0,0 +1,2 @@
+# gofmt always uses LF, whereas Git uses CRLF on Windows.
+*.go text eol=lf
diff --git a/vendor/github.com/go-openapi/validate/.gitignore b/vendor/github.com/go-openapi/validate/.gitignore
new file mode 100644
index 000000000..fea8b84ec
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/.gitignore
@@ -0,0 +1,5 @@
+secrets.yml
+coverage.out
+*.cov
+*.out
+playground
diff --git a/vendor/github.com/go-openapi/validate/.golangci.yml b/vendor/github.com/go-openapi/validate/.golangci.yml
new file mode 100644
index 000000000..81818ca67
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/.golangci.yml
@@ -0,0 +1,50 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ gocyclo:
+ min-complexity: 50
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 2
+ min-occurrences: 3
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - godox
+ - gocognit
+ - whitespace
+ - wsl
+ - funlen
+ - gochecknoglobals
+ - gochecknoinits
+ - scopelint
+ - wrapcheck
+ - exhaustivestruct
+ - exhaustive
+ - nlreturn
+ - testpackage
+ - gci
+ - gofumpt
+ - goerr113
+ - gomnd
+ - tparallel
+ - nestif
+ - godot
+ - tparallel
+ - paralleltest
+ - cyclop # because we have gocyclo already
+ # TODO: review the linters below. We disabled them to make the CI pass first.
+ - ireturn
+ - varnamelen
+ - forcetypeassert
+ - thelper
+ # Disable deprecated linters.
+ # They will be removed from golangci-lint in future.
+ - interfacer
+ - golint \ No newline at end of file
diff --git a/vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/validate/LICENSE b/vendor/github.com/go-openapi/validate/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/validate/README.md b/vendor/github.com/go-openapi/validate/README.md
new file mode 100644
index 000000000..ea2d68cb6
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/README.md
@@ -0,0 +1,38 @@
+# Validation helpers
+[![Build Status](https://travis-ci.org/go-openapi/validate.svg?branch=master)](https://travis-ci.org/go-openapi/validate)
+[![Build status](https://ci.appveyor.com/api/projects/status/d6epy6vipueyh5fs/branch/master?svg=true)](https://ci.appveyor.com/project/fredbi/validate/branch/master)
+[![codecov](https://codecov.io/gh/go-openapi/validate/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/validate)
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/validate/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/validate.svg)](https://pkg.go.dev/github.com/go-openapi/validate)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/validate)](https://goreportcard.com/report/github.com/go-openapi/validate)
+
+This package provides helpers to validate Swagger 2.0. specification (aka OpenAPI 2.0).
+
+Reference can be found here: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md.
+
+## What's inside?
+
+* A validator for Swagger specifications
+* A validator for JSON schemas draft4
+* Helper functions to validate individual values (used by code generated by [go-swagger](https://github.com/go-swagger/go-swagger)).
+ * Required, RequiredNumber, RequiredString
+ * ReadOnly
+ * UniqueItems, MaxItems, MinItems
+ * Enum, EnumCase
+ * Pattern, MinLength, MaxLength
+ * Minimum, Maximum, MultipleOf
+ * FormatOf
+
+[Documentation](https://godoc.org/github.com/go-openapi/validate)
+
+## FAQ
+
+* Does this library support OpenAPI 3?
+
+> No.
+> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
+> There is no plan to make it evolve toward supporting OpenAPI 3.x.
+> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
+>
+> An early attempt to support Swagger 3 may be found at: https://github.com/go-openapi/spec3
diff --git a/vendor/github.com/go-openapi/validate/appveyor.yml b/vendor/github.com/go-openapi/validate/appveyor.yml
new file mode 100644
index 000000000..89e5bccb3
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/appveyor.yml
@@ -0,0 +1,32 @@
+version: "0.1.{build}"
+
+clone_folder: C:\go-openapi\validate
+shallow_clone: true # for startup speed
+pull_requests:
+ do_not_increment_build_number: true
+
+#skip_tags: true
+#skip_branch_with_pr: true
+
+# appveyor.yml
+build: off
+
+environment:
+ GOPATH: c:\gopath
+
+stack: go 1.15
+
+test_script:
+ - go test -v -timeout 20m -args -enable-long ./...
+
+deploy: off
+
+notifications:
+ - provider: Slack
+ incoming_webhook: https://hooks.slack.com/services/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
+ auth_token:
+ secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
+ channel: bots
+ on_build_success: false
+ on_build_failure: true
+ on_build_status_changed: true
diff --git a/vendor/github.com/go-openapi/validate/context.go b/vendor/github.com/go-openapi/validate/context.go
new file mode 100644
index 000000000..89977173b
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/context.go
@@ -0,0 +1,56 @@
+package validate
+
+import (
+ "context"
+)
+
+// validateCtxKey is the key type of context key in this pkg
+type validateCtxKey string
+
+const (
+ operationTypeKey validateCtxKey = "operationTypeKey"
+)
+
+type operationType string
+
+const (
+ request operationType = "request"
+ response operationType = "response"
+ none operationType = "none" // not specified in ctx
+)
+
+var operationTypeEnum = []operationType{request, response, none}
+
+// WithOperationRequest returns a new context with operationType request
+// in context value
+func WithOperationRequest(ctx context.Context) context.Context {
+ return withOperation(ctx, request)
+}
+
+// WithOperationRequest returns a new context with operationType response
+// in context value
+func WithOperationResponse(ctx context.Context) context.Context {
+ return withOperation(ctx, response)
+}
+
+func withOperation(ctx context.Context, operation operationType) context.Context {
+ return context.WithValue(ctx, operationTypeKey, operation)
+}
+
+// extractOperationType extracts the operation type from ctx
+// if not specified or of unknown value, return none operation type
+func extractOperationType(ctx context.Context) operationType {
+ v := ctx.Value(operationTypeKey)
+ if v == nil {
+ return none
+ }
+ res, ok := v.(operationType)
+ if !ok {
+ return none
+ }
+ // validate the value is in operation enum
+ if err := Enum("", "", res, operationTypeEnum); err != nil {
+ return none
+ }
+ return res
+}
diff --git a/vendor/github.com/go-openapi/validate/debug.go b/vendor/github.com/go-openapi/validate/debug.go
new file mode 100644
index 000000000..8815fd935
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/debug.go
@@ -0,0 +1,47 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+var (
+ // Debug is true when the SWAGGER_DEBUG env var is not empty.
+ // It enables a more verbose logging of validators.
+ Debug = os.Getenv("SWAGGER_DEBUG") != ""
+ // validateLogger is a debug logger for this package
+ validateLogger *log.Logger
+)
+
+func init() {
+ debugOptions()
+}
+
+func debugOptions() {
+ validateLogger = log.New(os.Stdout, "validate:", log.LstdFlags)
+}
+
+func debugLog(msg string, args ...interface{}) {
+ // A private, trivial trace logger, based on go-openapi/spec/expander.go:debugLog()
+ if Debug {
+ _, file1, pos1, _ := runtime.Caller(1)
+ validateLogger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
+ }
+}
diff --git a/vendor/github.com/go-openapi/validate/default_validator.go b/vendor/github.com/go-openapi/validate/default_validator.go
new file mode 100644
index 000000000..bd14c2a26
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/default_validator.go
@@ -0,0 +1,281 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// defaultValidator validates default values in a spec.
+// According to Swagger spec, default values MUST validate their schema.
+type defaultValidator struct {
+ SpecValidator *SpecValidator
+ visitedSchemas map[string]bool
+}
+
+// resetVisited resets the internal state of visited schemas
+func (d *defaultValidator) resetVisited() {
+ d.visitedSchemas = map[string]bool{}
+}
+
+func isVisited(path string, visitedSchemas map[string]bool) bool {
+ found := visitedSchemas[path]
+ if !found {
+ // search for overlapping paths
+ frags := strings.Split(path, ".")
+ if len(frags) < 2 {
+ // shortcut exit on smaller paths
+ return found
+ }
+ last := len(frags) - 1
+ var currentFragStr, parent string
+ for i := range frags {
+ if i == 0 {
+ currentFragStr = frags[last]
+ } else {
+ currentFragStr = strings.Join([]string{frags[last-i], currentFragStr}, ".")
+ }
+ if i < last {
+ parent = strings.Join(frags[0:last-i], ".")
+ } else {
+ parent = ""
+ }
+ if strings.HasSuffix(parent, currentFragStr) {
+ found = true
+ break
+ }
+ }
+ }
+ return found
+}
+
+// beingVisited asserts a schema is being visited
+func (d *defaultValidator) beingVisited(path string) {
+ d.visitedSchemas[path] = true
+}
+
+// isVisited tells if a path has already been visited
+func (d *defaultValidator) isVisited(path string) bool {
+ return isVisited(path, d.visitedSchemas)
+}
+
+// Validate validates the default values declared in the swagger spec
+func (d *defaultValidator) Validate() (errs *Result) {
+ errs = new(Result)
+ if d == nil || d.SpecValidator == nil {
+ return errs
+ }
+ d.resetVisited()
+ errs.Merge(d.validateDefaultValueValidAgainstSchema()) // error -
+ return errs
+}
+
+func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
+ // every default value that is specified must validate against the schema for that property
+ // headers, items, parameters, schema
+
+ res := new(Result)
+ s := d.SpecValidator
+
+ for method, pathItem := range s.expandedAnalyzer().Operations() {
+ for path, op := range pathItem {
+ // parameters
+ for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
+ if param.Default != nil && param.Required {
+ res.AddWarnings(requiredHasDefaultMsg(param.Name, param.In))
+ }
+
+ // reset explored schemas to get depth-first recursive-proof exploration
+ d.resetVisited()
+
+ // Check simple parameters first
+ // default values provided must validate against their inline definition (no explicit schema)
+ if param.Default != nil && param.Schema == nil {
+ // check param default value is valid
+ red := NewParamValidator(&param, s.KnownFormats).Validate(param.Default) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In))
+ res.Merge(red)
+ }
+ }
+
+ // Recursively follows Items and Schemas
+ if param.Items != nil {
+ red := d.validateDefaultValueItemsAgainstSchema(param.Name, param.In, &param, param.Items) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddErrors(defaultValueItemsDoesNotValidateMsg(param.Name, param.In))
+ res.Merge(red)
+ }
+ }
+
+ if param.Schema != nil {
+ // Validate default value against schema
+ red := d.validateDefaultValueSchemaAgainstSchema(param.Name, param.In, param.Schema)
+ if red.HasErrorsOrWarnings() {
+ res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In))
+ res.Merge(red)
+ }
+ }
+ }
+
+ if op.Responses != nil {
+ if op.Responses.Default != nil {
+ // Same constraint on default Response
+ res.Merge(d.validateDefaultInResponse(op.Responses.Default, jsonDefault, path, 0, op.ID))
+ }
+ // Same constraint on regular Responses
+ if op.Responses.StatusCodeResponses != nil { // Safeguard
+ for code, r := range op.Responses.StatusCodeResponses {
+ res.Merge(d.validateDefaultInResponse(&r, "response", path, code, op.ID)) //#nosec
+ }
+ }
+ } else if op.ID != "" {
+ // Empty op.ID means there is no meaningful operation: no need to report a specific message
+ res.AddErrors(noValidResponseMsg(op.ID))
+ }
+ }
+ }
+ if s.spec.Spec().Definitions != nil { // Safeguard
+ // reset explored schemas to get depth-first recursive-proof exploration
+ d.resetVisited()
+ for nm, sch := range s.spec.Spec().Definitions {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("definitions.%s", nm), "body", &sch)) //#nosec
+ }
+ }
+ return res
+}
+
+func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, responseType, path string, responseCode int, operationID string) *Result {
+ s := d.SpecValidator
+
+ response, res := responseHelp.expandResponseRef(resp, path, s)
+ if !res.IsValid() {
+ return res
+ }
+
+ responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode)
+
+ // nolint: dupl
+ if response.Headers != nil { // Safeguard
+ for nm, h := range response.Headers {
+ // reset explored schemas to get depth-first recursive-proof exploration
+ d.resetVisited()
+
+ if h.Default != nil {
+ red := NewHeaderValidator(nm, &h, s.KnownFormats).Validate(h.Default) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddErrors(defaultValueHeaderDoesNotValidateMsg(operationID, nm, responseName))
+ res.Merge(red)
+ }
+ }
+
+ // Headers have inline definition, like params
+ if h.Items != nil {
+ red := d.validateDefaultValueItemsAgainstSchema(nm, "header", &h, h.Items) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddErrors(defaultValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName))
+ res.Merge(red)
+ }
+ }
+
+ if _, err := compileRegexp(h.Pattern); err != nil {
+ res.AddErrors(invalidPatternInHeaderMsg(operationID, nm, responseName, h.Pattern, err))
+ }
+
+ // Headers don't have schema
+ }
+ }
+ if response.Schema != nil {
+ // reset explored schemas to get depth-first recursive-proof exploration
+ d.resetVisited()
+
+ red := d.validateDefaultValueSchemaAgainstSchema(responseCodeAsStr, "response", response.Schema)
+ if red.HasErrorsOrWarnings() {
+ // Additional message to make sure the context of the error is not lost
+ res.AddErrors(defaultValueInDoesNotValidateMsg(operationID, responseName))
+ res.Merge(red)
+ }
+ }
+ return res
+}
+
+func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in string, schema *spec.Schema) *Result {
+ if schema == nil || d.isVisited(path) {
+ // Avoids recursing if we are already done with that check
+ return nil
+ }
+ d.beingVisited(path)
+ res := new(Result)
+ s := d.SpecValidator
+
+ if schema.Default != nil {
+ res.Merge(NewSchemaValidator(schema, s.spec.Spec(), path+".default", s.KnownFormats, SwaggerSchema(true)).Validate(schema.Default))
+ }
+ if schema.Items != nil {
+ if schema.Items.Schema != nil {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".items.default", in, schema.Items.Schema))
+ }
+ // Multiple schemas in items
+ if schema.Items.Schemas != nil { // Safeguard
+ for i, sch := range schema.Items.Schemas {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.items[%d].default", path, i), in, &sch)) //#nosec
+ }
+ }
+ }
+ if _, err := compileRegexp(schema.Pattern); err != nil {
+ res.AddErrors(invalidPatternInMsg(path, in, schema.Pattern))
+ }
+ if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
+ // NOTE: we keep validating values, even though additionalItems is not supported by Swagger 2.0 (and 3.0 as well)
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalItems", path), in, schema.AdditionalItems.Schema))
+ }
+ for propName, prop := range schema.Properties {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
+ }
+ for propName, prop := range schema.PatternProperties {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
+ }
+ if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalProperties", path), in, schema.AdditionalProperties.Schema))
+ }
+ if schema.AllOf != nil {
+ for i, aoSch := range schema.AllOf {
+ res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.allOf[%d]", path, i), in, &aoSch)) //#nosec
+ }
+ }
+ return res
+}
+
+// TODO: Temporary duplicated code. Need to refactor with examples
+// nolint: dupl
+func (d *defaultValidator) validateDefaultValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result {
+ res := new(Result)
+ s := d.SpecValidator
+ if items != nil {
+ if items.Default != nil {
+ res.Merge(newItemsValidator(path, in, items, root, s.KnownFormats).Validate(0, items.Default))
+ }
+ if items.Items != nil {
+ res.Merge(d.validateDefaultValueItemsAgainstSchema(path+"[0].default", in, root, items.Items))
+ }
+ if _, err := compileRegexp(items.Pattern); err != nil {
+ res.AddErrors(invalidPatternInMsg(path, in, items.Pattern))
+ }
+ }
+ return res
+}
diff --git a/vendor/github.com/go-openapi/validate/doc.go b/vendor/github.com/go-openapi/validate/doc.go
new file mode 100644
index 000000000..f5ca9a5d5
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/doc.go
@@ -0,0 +1,85 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package validate provides methods to validate a swagger specification,
+as well as tools to validate data against their schema.
+
+This package follows Swagger 2.0. specification (aka OpenAPI 2.0). Reference
+can be found here: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md.
+
+Validating a specification
+
+Validates a spec document (from JSON or YAML) against the JSON schema for swagger,
+then checks a number of extra rules that can't be expressed in JSON schema.
+
+Entry points:
+ - Spec()
+ - NewSpecValidator()
+ - SpecValidator.Validate()
+
+Reported as errors:
+ [x] definition can't declare a property that's already defined by one of its ancestors
+ [x] definition's ancestor can't be a descendant of the same model
+ [x] path uniqueness: each api path should be non-verbatim (account for path param names) unique per method
+ [x] each security reference should contain only unique scopes
+ [x] each security scope in a security definition should be unique
+ [x] parameters in path must be unique
+ [x] each path parameter must correspond to a parameter placeholder and vice versa
+ [x] each referenceable definition must have references
+ [x] each definition property listed in the required array must be defined in the properties of the model
+ [x] each parameter should have a unique `name` and `type` combination
+ [x] each operation should have only 1 parameter of type body
+ [x] each reference must point to a valid object
+ [x] every default value that is specified must validate against the schema for that property
+ [x] items property is required for all schemas/definitions of type `array`
+ [x] path parameters must be declared a required
+ [x] headers must not contain $ref
+ [x] schema and property examples provided must validate against their respective object's schema
+ [x] examples provided must validate their schema
+
+Reported as warnings:
+ [x] path parameters should not contain any of [{,},\w]
+ [x] empty path
+ [x] unused definitions
+ [x] unsupported validation of examples on non-JSON media types
+ [x] examples in response without schema
+ [x] readOnly properties should not be required
+
+Validating a schema
+
+The schema validation toolkit validates data against JSON-schema-draft 04 schema.
+
+It is tested against the full json-schema-testing-suite (https://github.com/json-schema-org/JSON-Schema-Test-Suite),
+except for the optional part (bignum, ECMA regexp, ...).
+
+It supports the complete JSON-schema vocabulary, including keywords not supported by Swagger (e.g. additionalItems, ...)
+
+Entry points:
+ - AgainstSchema()
+ - ...
+
+Known limitations
+
+With the current version of this package, the following aspects of swagger are not yet supported:
+ [ ] errors and warnings are not reported with key/line number in spec
+ [ ] default values and examples on responses only support application/json producer type
+ [ ] invalid numeric constraints (such as Minimum, etc..) are not checked except for default and example values
+ [ ] rules for collectionFormat are not implemented
+ [ ] no validation rule for polymorphism support (discriminator) [not done here]
+ [ ] valid js ECMA regexp not supported by Go regexp engine are considered invalid
+ [ ] arbitrary large numbers are not supported: max is math.MaxFloat64
+
+*/
+package validate
diff --git a/vendor/github.com/go-openapi/validate/example_validator.go b/vendor/github.com/go-openapi/validate/example_validator.go
new file mode 100644
index 000000000..c8bffd78e
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/example_validator.go
@@ -0,0 +1,270 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+)
+
+// ExampleValidator validates example values defined in a spec
+type exampleValidator struct {
+ SpecValidator *SpecValidator
+ visitedSchemas map[string]bool
+}
+
+// resetVisited resets the internal state of visited schemas
+func (ex *exampleValidator) resetVisited() {
+ ex.visitedSchemas = map[string]bool{}
+}
+
+// beingVisited asserts a schema is being visited
+func (ex *exampleValidator) beingVisited(path string) {
+ ex.visitedSchemas[path] = true
+}
+
+// isVisited tells if a path has already been visited
+func (ex *exampleValidator) isVisited(path string) bool {
+ return isVisited(path, ex.visitedSchemas)
+}
+
+// Validate validates the example values declared in the swagger spec
+// Example values MUST conform to their schema.
+//
+// With Swagger 2.0, examples are supported in:
+// - schemas
+// - individual property
+// - responses
+//
+func (ex *exampleValidator) Validate() (errs *Result) {
+ errs = new(Result)
+ if ex == nil || ex.SpecValidator == nil {
+ return errs
+ }
+ ex.resetVisited()
+ errs.Merge(ex.validateExampleValueValidAgainstSchema()) // error -
+
+ return errs
+}
+
+func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
+ // every example value that is specified must validate against the schema for that property
+ // in: schemas, properties, object, items
+ // not in: headers, parameters without schema
+
+ res := new(Result)
+ s := ex.SpecValidator
+
+ for method, pathItem := range s.expandedAnalyzer().Operations() {
+ for path, op := range pathItem {
+ // parameters
+ for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
+
+ // As of swagger 2.0, Examples are not supported in simple parameters
+ // However, it looks like it is supported by go-openapi
+
+ // reset explored schemas to get depth-first recursive-proof exploration
+ ex.resetVisited()
+
+ // Check simple parameters first
+ // default values provided must validate against their inline definition (no explicit schema)
+ if param.Example != nil && param.Schema == nil {
+ // check param default value is valid
+ red := NewParamValidator(&param, s.KnownFormats).Validate(param.Example) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In))
+ res.MergeAsWarnings(red)
+ }
+ }
+
+ // Recursively follows Items and Schemas
+ if param.Items != nil {
+ red := ex.validateExampleValueItemsAgainstSchema(param.Name, param.In, &param, param.Items) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddWarnings(exampleValueItemsDoesNotValidateMsg(param.Name, param.In))
+ res.Merge(red)
+ }
+ }
+
+ if param.Schema != nil {
+ // Validate example value against schema
+ red := ex.validateExampleValueSchemaAgainstSchema(param.Name, param.In, param.Schema)
+ if red.HasErrorsOrWarnings() {
+ res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In))
+ res.Merge(red)
+ }
+ }
+ }
+
+ if op.Responses != nil {
+ if op.Responses.Default != nil {
+ // Same constraint on default Response
+ res.Merge(ex.validateExampleInResponse(op.Responses.Default, jsonDefault, path, 0, op.ID))
+ }
+ // Same constraint on regular Responses
+ if op.Responses.StatusCodeResponses != nil { // Safeguard
+ for code, r := range op.Responses.StatusCodeResponses {
+ res.Merge(ex.validateExampleInResponse(&r, "response", path, code, op.ID)) //#nosec
+ }
+ }
+ } else if op.ID != "" {
+ // Empty op.ID means there is no meaningful operation: no need to report a specific message
+ res.AddErrors(noValidResponseMsg(op.ID))
+ }
+ }
+ }
+ if s.spec.Spec().Definitions != nil { // Safeguard
+ // reset explored schemas to get depth-first recursive-proof exploration
+ ex.resetVisited()
+ for nm, sch := range s.spec.Spec().Definitions {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("definitions.%s", nm), "body", &sch)) //#nosec
+ }
+ }
+ return res
+}
+
+func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, responseType, path string, responseCode int, operationID string) *Result {
+ s := ex.SpecValidator
+
+ response, res := responseHelp.expandResponseRef(resp, path, s)
+ if !res.IsValid() { // Safeguard
+ return res
+ }
+
+ responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode)
+
+ // nolint: dupl
+ if response.Headers != nil { // Safeguard
+ for nm, h := range response.Headers {
+ // reset explored schemas to get depth-first recursive-proof exploration
+ ex.resetVisited()
+
+ if h.Example != nil {
+ red := NewHeaderValidator(nm, &h, s.KnownFormats).Validate(h.Example) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddWarnings(exampleValueHeaderDoesNotValidateMsg(operationID, nm, responseName))
+ res.MergeAsWarnings(red)
+ }
+ }
+
+ // Headers have inline definition, like params
+ if h.Items != nil {
+ red := ex.validateExampleValueItemsAgainstSchema(nm, "header", &h, h.Items) //#nosec
+ if red.HasErrorsOrWarnings() {
+ res.AddWarnings(exampleValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName))
+ res.MergeAsWarnings(red)
+ }
+ }
+
+ if _, err := compileRegexp(h.Pattern); err != nil {
+ res.AddErrors(invalidPatternInHeaderMsg(operationID, nm, responseName, h.Pattern, err))
+ }
+
+ // Headers don't have schema
+ }
+ }
+ if response.Schema != nil {
+ // reset explored schemas to get depth-first recursive-proof exploration
+ ex.resetVisited()
+
+ red := ex.validateExampleValueSchemaAgainstSchema(responseCodeAsStr, "response", response.Schema)
+ if red.HasErrorsOrWarnings() {
+ // Additional message to make sure the context of the error is not lost
+ res.AddWarnings(exampleValueInDoesNotValidateMsg(operationID, responseName))
+ res.Merge(red)
+ }
+ }
+
+ if response.Examples != nil {
+ if response.Schema != nil {
+ if example, ok := response.Examples["application/json"]; ok {
+ res.MergeAsWarnings(NewSchemaValidator(response.Schema, s.spec.Spec(), path+".examples", s.KnownFormats, SwaggerSchema(true)).Validate(example))
+ } else {
+ // TODO: validate other media types too
+ res.AddWarnings(examplesMimeNotSupportedMsg(operationID, responseName))
+ }
+ } else {
+ res.AddWarnings(examplesWithoutSchemaMsg(operationID, responseName))
+ }
+ }
+ return res
+}
+
+func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in string, schema *spec.Schema) *Result {
+ if schema == nil || ex.isVisited(path) {
+ // Avoids recursing if we are already done with that check
+ return nil
+ }
+ ex.beingVisited(path)
+ s := ex.SpecValidator
+ res := new(Result)
+
+ if schema.Example != nil {
+ res.MergeAsWarnings(NewSchemaValidator(schema, s.spec.Spec(), path+".example", s.KnownFormats, SwaggerSchema(true)).Validate(schema.Example))
+ }
+ if schema.Items != nil {
+ if schema.Items.Schema != nil {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".items.example", in, schema.Items.Schema))
+ }
+ // Multiple schemas in items
+ if schema.Items.Schemas != nil { // Safeguard
+ for i, sch := range schema.Items.Schemas {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.items[%d].example", path, i), in, &sch)) //#nosec
+ }
+ }
+ }
+ if _, err := compileRegexp(schema.Pattern); err != nil {
+ res.AddErrors(invalidPatternInMsg(path, in, schema.Pattern))
+ }
+ if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
+ // NOTE: we keep validating values, even though additionalItems is unsupported in Swagger 2.0 (and 3.0 as well)
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalItems", path), in, schema.AdditionalItems.Schema))
+ }
+ for propName, prop := range schema.Properties {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
+ }
+ for propName, prop := range schema.PatternProperties {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
+ }
+ if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalProperties", path), in, schema.AdditionalProperties.Schema))
+ }
+ if schema.AllOf != nil {
+ for i, aoSch := range schema.AllOf {
+ res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.allOf[%d]", path, i), in, &aoSch)) //#nosec
+ }
+ }
+ return res
+}
+
+// TODO: Temporary duplicated code. Need to refactor with examples
+// nolint: dupl
+func (ex *exampleValidator) validateExampleValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result {
+ res := new(Result)
+ s := ex.SpecValidator
+ if items != nil {
+ if items.Example != nil {
+ res.MergeAsWarnings(newItemsValidator(path, in, items, root, s.KnownFormats).Validate(0, items.Example))
+ }
+ if items.Items != nil {
+ res.Merge(ex.validateExampleValueItemsAgainstSchema(path+"[0].example", in, root, items.Items))
+ }
+ if _, err := compileRegexp(items.Pattern); err != nil {
+ res.AddErrors(invalidPatternInMsg(path, in, items.Pattern))
+ }
+ }
+ return res
+}
diff --git a/vendor/github.com/go-openapi/validate/formats.go b/vendor/github.com/go-openapi/validate/formats.go
new file mode 100644
index 000000000..0ad996cbb
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/formats.go
@@ -0,0 +1,69 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "reflect"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+type formatValidator struct {
+ Format string
+ Path string
+ In string
+ KnownFormats strfmt.Registry
+}
+
+func (f *formatValidator) SetPath(path string) {
+ f.Path = path
+}
+
+func (f *formatValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ doit := func() bool {
+ if source == nil {
+ return false
+ }
+ switch source := source.(type) {
+ case *spec.Items:
+ return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
+ case *spec.Parameter:
+ return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
+ case *spec.Schema:
+ return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
+ case *spec.Header:
+ return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
+ }
+ return false
+ }
+ r := doit()
+ debugLog("format validator for %q applies %t for %T (kind: %v)\n", f.Path, r, source, kind)
+ return r
+}
+
+func (f *formatValidator) Validate(val interface{}) *Result {
+ result := new(Result)
+ debugLog("validating \"%v\" against format: %s", val, f.Format)
+
+ if err := FormatOf(f.Path, f.In, f.Format, val.(string), f.KnownFormats); err != nil {
+ result.AddErrors(err)
+ }
+
+ if result.HasErrors() {
+ return result
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/validate/helpers.go b/vendor/github.com/go-openapi/validate/helpers.go
new file mode 100644
index 000000000..48ebfab58
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/helpers.go
@@ -0,0 +1,324 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+// TODO: define this as package validate/internal
+// This must be done while keeping CI intact with all tests and test coverage
+
+import (
+ "reflect"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+)
+
+const (
+ swaggerBody = "body"
+ swaggerExample = "example"
+ swaggerExamples = "examples"
+)
+
+const (
+ objectType = "object"
+ arrayType = "array"
+ stringType = "string"
+ integerType = "integer"
+ numberType = "number"
+ booleanType = "boolean"
+ fileType = "file"
+ nullType = "null"
+)
+
+const (
+ jsonProperties = "properties"
+ jsonItems = "items"
+ jsonType = "type"
+ // jsonSchema = "schema"
+ jsonDefault = "default"
+)
+
+const (
+ stringFormatDate = "date"
+ stringFormatDateTime = "date-time"
+ stringFormatPassword = "password"
+ stringFormatByte = "byte"
+ // stringFormatBinary = "binary"
+ stringFormatCreditCard = "creditcard"
+ stringFormatDuration = "duration"
+ stringFormatEmail = "email"
+ stringFormatHexColor = "hexcolor"
+ stringFormatHostname = "hostname"
+ stringFormatIPv4 = "ipv4"
+ stringFormatIPv6 = "ipv6"
+ stringFormatISBN = "isbn"
+ stringFormatISBN10 = "isbn10"
+ stringFormatISBN13 = "isbn13"
+ stringFormatMAC = "mac"
+ stringFormatBSONObjectID = "bsonobjectid"
+ stringFormatRGBColor = "rgbcolor"
+ stringFormatSSN = "ssn"
+ stringFormatURI = "uri"
+ stringFormatUUID = "uuid"
+ stringFormatUUID3 = "uuid3"
+ stringFormatUUID4 = "uuid4"
+ stringFormatUUID5 = "uuid5"
+
+ integerFormatInt32 = "int32"
+ integerFormatInt64 = "int64"
+ integerFormatUInt32 = "uint32"
+ integerFormatUInt64 = "uint64"
+
+ numberFormatFloat32 = "float32"
+ numberFormatFloat64 = "float64"
+ numberFormatFloat = "float"
+ numberFormatDouble = "double"
+)
+
+// Helpers available at the package level
+var (
+ pathHelp *pathHelper
+ valueHelp *valueHelper
+ errorHelp *errorHelper
+ paramHelp *paramHelper
+ responseHelp *responseHelper
+)
+
+type errorHelper struct {
+ // A collection of unexported helpers for error construction
+}
+
+func (h *errorHelper) sErr(err errors.Error) *Result {
+ // Builds a Result from standard errors.Error
+ return &Result{Errors: []error{err}}
+}
+
+func (h *errorHelper) addPointerError(res *Result, err error, ref string, fromPath string) *Result {
+ // Provides more context on error messages
+ // reported by the jsoinpointer package by altering the passed Result
+ if err != nil {
+ res.AddErrors(cannotResolveRefMsg(fromPath, ref, err))
+ }
+ return res
+}
+
+type pathHelper struct {
+ // A collection of unexported helpers for path validation
+}
+
+func (h *pathHelper) stripParametersInPath(path string) string {
+ // Returns a path stripped from all path parameters, with multiple or trailing slashes removed.
+ //
+ // Stripping is performed on a slash-separated basis, e.g '/a{/b}' remains a{/b} and not /a.
+ // - Trailing "/" make a difference, e.g. /a/ !~ /a (ex: canary/bitbucket.org/swagger.json)
+ // - presence or absence of a parameter makes a difference, e.g. /a/{log} !~ /a/ (ex: canary/kubernetes/swagger.json)
+
+ // Regexp to extract parameters from path, with surrounding {}.
+ // NOTE: important non-greedy modifier
+ rexParsePathParam := mustCompileRegexp(`{[^{}]+?}`)
+ strippedSegments := []string{}
+
+ for _, segment := range strings.Split(path, "/") {
+ strippedSegments = append(strippedSegments, rexParsePathParam.ReplaceAllString(segment, "X"))
+ }
+ return strings.Join(strippedSegments, "/")
+}
+
+func (h *pathHelper) extractPathParams(path string) (params []string) {
+ // Extracts all params from a path, with surrounding "{}"
+ rexParsePathParam := mustCompileRegexp(`{[^{}]+?}`)
+
+ for _, segment := range strings.Split(path, "/") {
+ for _, v := range rexParsePathParam.FindAllStringSubmatch(segment, -1) {
+ params = append(params, v...)
+ }
+ }
+ return
+}
+
+type valueHelper struct {
+ // A collection of unexported helpers for value validation
+}
+
+func (h *valueHelper) asInt64(val interface{}) int64 {
+ // Number conversion function for int64, without error checking
+ // (implements an implicit type upgrade).
+ v := reflect.ValueOf(val)
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int()
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return int64(v.Uint())
+ case reflect.Float32, reflect.Float64:
+ return int64(v.Float())
+ default:
+ // panic("Non numeric value in asInt64()")
+ return 0
+ }
+}
+
+func (h *valueHelper) asUint64(val interface{}) uint64 {
+ // Number conversion function for uint64, without error checking
+ // (implements an implicit type upgrade).
+ v := reflect.ValueOf(val)
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return uint64(v.Int())
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return v.Uint()
+ case reflect.Float32, reflect.Float64:
+ return uint64(v.Float())
+ default:
+ // panic("Non numeric value in asUint64()")
+ return 0
+ }
+}
+
+// Same for unsigned floats
+func (h *valueHelper) asFloat64(val interface{}) float64 {
+ // Number conversion function for float64, without error checking
+ // (implements an implicit type upgrade).
+ v := reflect.ValueOf(val)
+ switch v.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return float64(v.Int())
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ return float64(v.Uint())
+ case reflect.Float32, reflect.Float64:
+ return v.Float()
+ default:
+ // panic("Non numeric value in asFloat64()")
+ return 0
+ }
+}
+
+type paramHelper struct {
+ // A collection of unexported helpers for parameters resolution
+}
+
+func (h *paramHelper) safeExpandedParamsFor(path, method, operationID string, res *Result, s *SpecValidator) (params []spec.Parameter) {
+ operation, ok := s.expandedAnalyzer().OperationFor(method, path)
+ if ok {
+ // expand parameters first if necessary
+ resolvedParams := []spec.Parameter{}
+ for _, ppr := range operation.Parameters {
+ resolvedParam, red := h.resolveParam(path, method, operationID, &ppr, s) //#nosec
+ res.Merge(red)
+ if resolvedParam != nil {
+ resolvedParams = append(resolvedParams, *resolvedParam)
+ }
+ }
+ // remove params with invalid expansion from Slice
+ operation.Parameters = resolvedParams
+
+ for _, ppr := range s.expandedAnalyzer().SafeParamsFor(method, path,
+ func(p spec.Parameter, err error) bool {
+ // since params have already been expanded, there are few causes for error
+ res.AddErrors(someParametersBrokenMsg(path, method, operationID))
+ // original error from analyzer
+ res.AddErrors(err)
+ return true
+ }) {
+ params = append(params, ppr)
+ }
+ }
+ return
+}
+
+func (h *paramHelper) resolveParam(path, method, operationID string, param *spec.Parameter, s *SpecValidator) (*spec.Parameter, *Result) {
+ // Ensure parameter is expanded
+ var err error
+ res := new(Result)
+ isRef := param.Ref.String() != ""
+ if s.spec.SpecFilePath() == "" {
+ err = spec.ExpandParameterWithRoot(param, s.spec.Spec(), nil)
+ } else {
+ err = spec.ExpandParameter(param, s.spec.SpecFilePath())
+
+ }
+ if err != nil { // Safeguard
+ // NOTE: we may enter enter here when the whole parameter is an unresolved $ref
+ refPath := strings.Join([]string{"\"" + path + "\"", method}, ".")
+ errorHelp.addPointerError(res, err, param.Ref.String(), refPath)
+ return nil, res
+ }
+ res.Merge(h.checkExpandedParam(param, param.Name, param.In, operationID, isRef))
+ return param, res
+}
+
+func (h *paramHelper) checkExpandedParam(pr *spec.Parameter, path, in, operation string, isRef bool) *Result {
+ // Secure parameter structure after $ref resolution
+ res := new(Result)
+ simpleZero := spec.SimpleSchema{}
+ // Try to explain why... best guess
+ switch {
+ case pr.In == swaggerBody && (pr.SimpleSchema != simpleZero && pr.SimpleSchema.Type != objectType):
+ if isRef {
+ // Most likely, a $ref with a sibling is an unwanted situation: in itself this is a warning...
+ // but we detect it because of the following error:
+ // schema took over Parameter for an unexplained reason
+ res.AddWarnings(refShouldNotHaveSiblingsMsg(path, operation))
+ }
+ res.AddErrors(invalidParameterDefinitionMsg(path, in, operation))
+ case pr.In != swaggerBody && pr.Schema != nil:
+ if isRef {
+ res.AddWarnings(refShouldNotHaveSiblingsMsg(path, operation))
+ }
+ res.AddErrors(invalidParameterDefinitionAsSchemaMsg(path, in, operation))
+ case (pr.In == swaggerBody && pr.Schema == nil) || (pr.In != swaggerBody && pr.SimpleSchema == simpleZero):
+ // Other unexpected mishaps
+ res.AddErrors(invalidParameterDefinitionMsg(path, in, operation))
+ }
+ return res
+}
+
+type responseHelper struct {
+ // A collection of unexported helpers for response resolution
+}
+
+func (r *responseHelper) expandResponseRef(
+ response *spec.Response,
+ path string, s *SpecValidator) (*spec.Response, *Result) {
+ // Ensure response is expanded
+ var err error
+ res := new(Result)
+ if s.spec.SpecFilePath() == "" {
+ // there is no physical document to resolve $ref in response
+ err = spec.ExpandResponseWithRoot(response, s.spec.Spec(), nil)
+ } else {
+ err = spec.ExpandResponse(response, s.spec.SpecFilePath())
+ }
+ if err != nil { // Safeguard
+ // NOTE: we may enter here when the whole response is an unresolved $ref.
+ errorHelp.addPointerError(res, err, response.Ref.String(), path)
+ return nil, res
+ }
+ return response, res
+}
+
+func (r *responseHelper) responseMsgVariants(
+ responseType string,
+ responseCode int) (responseName, responseCodeAsStr string) {
+ // Path variants for messages
+ if responseType == jsonDefault {
+ responseCodeAsStr = jsonDefault
+ responseName = "default response"
+ } else {
+ responseCodeAsStr = strconv.Itoa(responseCode)
+ responseName = "response " + responseCodeAsStr
+ }
+ return
+}
diff --git a/vendor/github.com/go-openapi/validate/object_validator.go b/vendor/github.com/go-openapi/validate/object_validator.go
new file mode 100644
index 000000000..7bb12615d
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/object_validator.go
@@ -0,0 +1,279 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "reflect"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+type objectValidator struct {
+ Path string
+ In string
+ MaxProperties *int64
+ MinProperties *int64
+ Required []string
+ Properties map[string]spec.Schema
+ AdditionalProperties *spec.SchemaOrBool
+ PatternProperties map[string]spec.Schema
+ Root interface{}
+ KnownFormats strfmt.Registry
+ Options SchemaValidatorOptions
+}
+
+func (o *objectValidator) SetPath(path string) {
+ o.Path = path
+}
+
+func (o *objectValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ // TODO: this should also work for structs
+ // there is a problem in the type validator where it will be unhappy about null values
+ // so that requires more testing
+ r := reflect.TypeOf(source) == specSchemaType && (kind == reflect.Map || kind == reflect.Struct)
+ debugLog("object validator for %q applies %t for %T (kind: %v)\n", o.Path, r, source, kind)
+ return r
+}
+
+func (o *objectValidator) isProperties() bool {
+ p := strings.Split(o.Path, ".")
+ return len(p) > 1 && p[len(p)-1] == jsonProperties && p[len(p)-2] != jsonProperties
+}
+
+func (o *objectValidator) isDefault() bool {
+ p := strings.Split(o.Path, ".")
+ return len(p) > 1 && p[len(p)-1] == jsonDefault && p[len(p)-2] != jsonDefault
+}
+
+func (o *objectValidator) isExample() bool {
+ p := strings.Split(o.Path, ".")
+ return len(p) > 1 && (p[len(p)-1] == swaggerExample || p[len(p)-1] == swaggerExamples) && p[len(p)-2] != swaggerExample
+}
+
+func (o *objectValidator) checkArrayMustHaveItems(res *Result, val map[string]interface{}) {
+ // for swagger 2.0 schemas, there is an additional constraint to have array items defined explicitly.
+ // with pure jsonschema draft 4, one may have arrays with undefined items (i.e. any type).
+ if t, typeFound := val[jsonType]; typeFound {
+ if tpe, ok := t.(string); ok && tpe == arrayType {
+ if item, itemsKeyFound := val[jsonItems]; !itemsKeyFound {
+ res.AddErrors(errors.Required(jsonItems, o.Path, item))
+ }
+ }
+ }
+}
+
+func (o *objectValidator) checkItemsMustBeTypeArray(res *Result, val map[string]interface{}) {
+ if !o.isProperties() && !o.isDefault() && !o.isExample() {
+ if _, itemsKeyFound := val[jsonItems]; itemsKeyFound {
+ t, typeFound := val[jsonType]
+ if typeFound {
+ if tpe, ok := t.(string); !ok || tpe != arrayType {
+ res.AddErrors(errors.InvalidType(o.Path, o.In, arrayType, nil))
+ }
+ } else {
+ // there is no type
+ res.AddErrors(errors.Required(jsonType, o.Path, t))
+ }
+ }
+ }
+}
+
+func (o *objectValidator) precheck(res *Result, val map[string]interface{}) {
+ if o.Options.EnableArrayMustHaveItemsCheck {
+ o.checkArrayMustHaveItems(res, val)
+ }
+ if o.Options.EnableObjectArrayTypeCheck {
+ o.checkItemsMustBeTypeArray(res, val)
+ }
+}
+
+func (o *objectValidator) Validate(data interface{}) *Result {
+ val := data.(map[string]interface{})
+ // TODO: guard against nil data
+ numKeys := int64(len(val))
+
+ if o.MinProperties != nil && numKeys < *o.MinProperties {
+ return errorHelp.sErr(errors.TooFewProperties(o.Path, o.In, *o.MinProperties))
+ }
+ if o.MaxProperties != nil && numKeys > *o.MaxProperties {
+ return errorHelp.sErr(errors.TooManyProperties(o.Path, o.In, *o.MaxProperties))
+ }
+
+ res := new(Result)
+
+ o.precheck(res, val)
+
+ // check validity of field names
+ if o.AdditionalProperties != nil && !o.AdditionalProperties.Allows {
+ // Case: additionalProperties: false
+ for k := range val {
+ _, regularProperty := o.Properties[k]
+ matched := false
+
+ for pk := range o.PatternProperties {
+ if matches, _ := regexp.MatchString(pk, k); matches {
+ matched = true
+ break
+ }
+ }
+
+ if !regularProperty && k != "$schema" && k != "id" && !matched {
+ // Special properties "$schema" and "id" are ignored
+ res.AddErrors(errors.PropertyNotAllowed(o.Path, o.In, k))
+
+ // BUG(fredbi): This section should move to a part dedicated to spec validation as
+ // it will conflict with regular schemas where a property "headers" is defined.
+
+ //
+ // Croaks a more explicit message on top of the standard one
+ // on some recognized cases.
+ //
+ // NOTE: edge cases with invalid type assertion are simply ignored here.
+ // NOTE: prefix your messages here by "IMPORTANT!" so there are not filtered
+ // by higher level callers (the IMPORTANT! tag will be eventually
+ // removed).
+ if k == "headers" && val[k] != nil {
+ // $ref is forbidden in header
+ if headers, mapOk := val[k].(map[string]interface{}); mapOk {
+ for headerKey, headerBody := range headers {
+ if headerBody != nil {
+ if headerSchema, mapOfMapOk := headerBody.(map[string]interface{}); mapOfMapOk {
+ if _, found := headerSchema["$ref"]; found {
+ var msg string
+ if refString, stringOk := headerSchema["$ref"].(string); stringOk {
+ msg = strings.Join([]string{", one may not use $ref=\":", refString, "\""}, "")
+ }
+ res.AddErrors(refNotAllowedInHeaderMsg(o.Path, headerKey, msg))
+ }
+ }
+ }
+ }
+ }
+ /*
+ case "$ref":
+ if val[k] != nil {
+ // TODO: check context of that ref: warn about siblings, check against invalid context
+ }
+ */
+ }
+ }
+ }
+ } else {
+ // Cases: no additionalProperties (implying: true), or additionalProperties: true, or additionalProperties: { <<schema>> }
+ for key, value := range val {
+ _, regularProperty := o.Properties[key]
+
+ // Validates property against "patternProperties" if applicable
+ // BUG(fredbi): succeededOnce is always false
+
+ // NOTE: how about regular properties which do not match patternProperties?
+ matched, succeededOnce, _ := o.validatePatternProperty(key, value, res)
+
+ if !(regularProperty || matched || succeededOnce) {
+
+ // Cases: properties which are not regular properties and have not been matched by the PatternProperties validator
+ if o.AdditionalProperties != nil && o.AdditionalProperties.Schema != nil {
+ // AdditionalProperties as Schema
+ r := NewSchemaValidator(o.AdditionalProperties.Schema, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...).Validate(value)
+ res.mergeForField(data.(map[string]interface{}), key, r)
+ } else if regularProperty && !(matched || succeededOnce) {
+ // TODO: this is dead code since regularProperty=false here
+ res.AddErrors(errors.FailedAllPatternProperties(o.Path, o.In, key))
+ }
+ }
+ }
+ // Valid cases: additionalProperties: true or undefined
+ }
+
+ createdFromDefaults := map[string]bool{}
+
+ // Property types:
+ // - regular Property
+ for pName := range o.Properties {
+ pSchema := o.Properties[pName] // one instance per iteration
+ rName := pName
+ if o.Path != "" {
+ rName = o.Path + "." + pName
+ }
+
+ // Recursively validates each property against its schema
+ if v, ok := val[pName]; ok {
+ r := NewSchemaValidator(&pSchema, o.Root, rName, o.KnownFormats, o.Options.Options()...).Validate(v)
+ res.mergeForField(data.(map[string]interface{}), pName, r)
+ } else if pSchema.Default != nil {
+ // If a default value is defined, creates the property from defaults
+ // NOTE: JSON schema does not enforce default values to be valid against schema. Swagger does.
+ createdFromDefaults[pName] = true
+ res.addPropertySchemata(data.(map[string]interface{}), pName, &pSchema)
+ }
+ }
+
+ // Check required properties
+ if len(o.Required) > 0 {
+ for _, k := range o.Required {
+ if v, ok := val[k]; !ok && !createdFromDefaults[k] {
+ res.AddErrors(errors.Required(o.Path+"."+k, o.In, v))
+ continue
+ }
+ }
+ }
+
+ // Check patternProperties
+ // TODO: it looks like we have done that twice in many cases
+ for key, value := range val {
+ _, regularProperty := o.Properties[key]
+ matched, _ /*succeededOnce*/, patterns := o.validatePatternProperty(key, value, res)
+ if !regularProperty && (matched /*|| succeededOnce*/) {
+ for _, pName := range patterns {
+ if v, ok := o.PatternProperties[pName]; ok {
+ r := NewSchemaValidator(&v, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...).Validate(value)
+ res.mergeForField(data.(map[string]interface{}), key, r)
+ }
+ }
+ }
+ }
+ return res
+}
+
+// TODO: succeededOnce is not used anywhere
+func (o *objectValidator) validatePatternProperty(key string, value interface{}, result *Result) (bool, bool, []string) {
+ matched := false
+ succeededOnce := false
+ var patterns []string
+
+ for k, schema := range o.PatternProperties {
+ sch := schema
+ if match, _ := regexp.MatchString(k, key); match {
+ patterns = append(patterns, k)
+ matched = true
+ validator := NewSchemaValidator(&sch, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...)
+
+ res := validator.Validate(value)
+ result.Merge(res)
+ }
+ }
+
+ // BUG(fredbi): can't get to here. Should remove dead code (commented out).
+
+ // if succeededOnce {
+ // result.Inc()
+ // }
+
+ return matched, succeededOnce, patterns
+}
diff --git a/vendor/github.com/go-openapi/validate/options.go b/vendor/github.com/go-openapi/validate/options.go
new file mode 100644
index 000000000..deeec2f2e
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/options.go
@@ -0,0 +1,43 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import "sync"
+
+// Opts specifies validation options for a SpecValidator.
+//
+// NOTE: other options might be needed, for example a go-swagger specific mode.
+type Opts struct {
+ ContinueOnErrors bool // true: continue reporting errors, even if spec is invalid
+}
+
+var (
+ defaultOpts = Opts{ContinueOnErrors: false} // default is to stop validation on errors
+ defaultOptsMutex = &sync.Mutex{}
+)
+
+// SetContinueOnErrors sets global default behavior regarding spec validation errors reporting.
+//
+// For extended error reporting, you most likely want to set it to true.
+// For faster validation, it's better to give up early when a spec is detected as invalid: set it to false (this is the default).
+//
+// Setting this mode does NOT affect the validation status.
+//
+// NOTE: this method affects global defaults. It is not suitable for a concurrent usage.
+func SetContinueOnErrors(c bool) {
+ defer defaultOptsMutex.Unlock()
+ defaultOptsMutex.Lock()
+ defaultOpts.ContinueOnErrors = c
+}
diff --git a/vendor/github.com/go-openapi/validate/result.go b/vendor/github.com/go-openapi/validate/result.go
new file mode 100644
index 000000000..8f5f935e5
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/result.go
@@ -0,0 +1,486 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+)
+
+// Result represents a validation result set, composed of
+// errors and warnings.
+//
+// It is used to keep track of all detected errors and warnings during
+// the validation of a specification.
+//
+// Matchcount is used to determine
+// which errors are relevant in the case of AnyOf, OneOf
+// schema validation. Results from the validation branch
+// with most matches get eventually selected.
+//
+// TODO: keep path of key originating the error
+type Result struct {
+ Errors []error
+ Warnings []error
+ MatchCount int
+
+ // the object data
+ data interface{}
+
+ // Schemata for the root object
+ rootObjectSchemata schemata
+ // Schemata for object fields
+ fieldSchemata []fieldSchemata
+ // Schemata for slice items
+ itemSchemata []itemSchemata
+
+ cachedFieldSchemta map[FieldKey][]*spec.Schema
+ cachedItemSchemata map[ItemKey][]*spec.Schema
+}
+
+// FieldKey is a pair of an object and a field, usable as a key for a map.
+type FieldKey struct {
+ object reflect.Value // actually a map[string]interface{}, but the latter cannot be a key
+ field string
+}
+
+// ItemKey is a pair of a slice and an index, usable as a key for a map.
+type ItemKey struct {
+ slice reflect.Value // actually a []interface{}, but the latter cannot be a key
+ index int
+}
+
+// NewFieldKey returns a pair of an object and field usable as a key of a map.
+func NewFieldKey(obj map[string]interface{}, field string) FieldKey {
+ return FieldKey{object: reflect.ValueOf(obj), field: field}
+}
+
+// Object returns the underlying object of this key.
+func (fk *FieldKey) Object() map[string]interface{} {
+ return fk.object.Interface().(map[string]interface{})
+}
+
+// Field returns the underlying field of this key.
+func (fk *FieldKey) Field() string {
+ return fk.field
+}
+
+// NewItemKey returns a pair of a slice and index usable as a key of a map.
+func NewItemKey(slice interface{}, i int) ItemKey {
+ return ItemKey{slice: reflect.ValueOf(slice), index: i}
+}
+
+// Slice returns the underlying slice of this key.
+func (ik *ItemKey) Slice() []interface{} {
+ return ik.slice.Interface().([]interface{})
+}
+
+// Index returns the underlying index of this key.
+func (ik *ItemKey) Index() int {
+ return ik.index
+}
+
+type fieldSchemata struct {
+ obj map[string]interface{}
+ field string
+ schemata schemata
+}
+
+type itemSchemata struct {
+ slice reflect.Value
+ index int
+ schemata schemata
+}
+
+// Merge merges this result with the other one(s), preserving match counts etc.
+func (r *Result) Merge(others ...*Result) *Result {
+ for _, other := range others {
+ if other == nil {
+ continue
+ }
+ r.mergeWithoutRootSchemata(other)
+ r.rootObjectSchemata.Append(other.rootObjectSchemata)
+ }
+ return r
+}
+
+// Data returns the original data object used for validation. Mutating this renders
+// the result invalid.
+func (r *Result) Data() interface{} {
+ return r.data
+}
+
+// RootObjectSchemata returns the schemata which apply to the root object.
+func (r *Result) RootObjectSchemata() []*spec.Schema {
+ return r.rootObjectSchemata.Slice()
+}
+
+// FieldSchemata returns the schemata which apply to fields in objects.
+// nolint: dupl
+func (r *Result) FieldSchemata() map[FieldKey][]*spec.Schema {
+ if r.cachedFieldSchemta != nil {
+ return r.cachedFieldSchemta
+ }
+
+ ret := make(map[FieldKey][]*spec.Schema, len(r.fieldSchemata))
+ for _, fs := range r.fieldSchemata {
+ key := NewFieldKey(fs.obj, fs.field)
+ if fs.schemata.one != nil {
+ ret[key] = append(ret[key], fs.schemata.one)
+ } else if len(fs.schemata.multiple) > 0 {
+ ret[key] = append(ret[key], fs.schemata.multiple...)
+ }
+ }
+ r.cachedFieldSchemta = ret
+ return ret
+}
+
+// ItemSchemata returns the schemata which apply to items in slices.
+// nolint: dupl
+func (r *Result) ItemSchemata() map[ItemKey][]*spec.Schema {
+ if r.cachedItemSchemata != nil {
+ return r.cachedItemSchemata
+ }
+
+ ret := make(map[ItemKey][]*spec.Schema, len(r.itemSchemata))
+ for _, ss := range r.itemSchemata {
+ key := NewItemKey(ss.slice, ss.index)
+ if ss.schemata.one != nil {
+ ret[key] = append(ret[key], ss.schemata.one)
+ } else if len(ss.schemata.multiple) > 0 {
+ ret[key] = append(ret[key], ss.schemata.multiple...)
+ }
+ }
+ r.cachedItemSchemata = ret
+ return ret
+}
+
+func (r *Result) resetCaches() {
+ r.cachedFieldSchemta = nil
+ r.cachedItemSchemata = nil
+}
+
+// mergeForField merges other into r, assigning other's root schemata to the given Object and field name.
+// nolint: unparam
+func (r *Result) mergeForField(obj map[string]interface{}, field string, other *Result) *Result {
+ if other == nil {
+ return r
+ }
+ r.mergeWithoutRootSchemata(other)
+
+ if other.rootObjectSchemata.Len() > 0 {
+ if r.fieldSchemata == nil {
+ r.fieldSchemata = make([]fieldSchemata, len(obj))
+ }
+ r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{
+ obj: obj,
+ field: field,
+ schemata: other.rootObjectSchemata,
+ })
+ }
+
+ return r
+}
+
+// mergeForSlice merges other into r, assigning other's root schemata to the given slice and index.
+// nolint: unparam
+func (r *Result) mergeForSlice(slice reflect.Value, i int, other *Result) *Result {
+ if other == nil {
+ return r
+ }
+ r.mergeWithoutRootSchemata(other)
+
+ if other.rootObjectSchemata.Len() > 0 {
+ if r.itemSchemata == nil {
+ r.itemSchemata = make([]itemSchemata, slice.Len())
+ }
+ r.itemSchemata = append(r.itemSchemata, itemSchemata{
+ slice: slice,
+ index: i,
+ schemata: other.rootObjectSchemata,
+ })
+ }
+
+ return r
+}
+
+// addRootObjectSchemata adds the given schemata for the root object of the result.
+// The slice schemata might be reused. I.e. do not modify it after being added to a result.
+func (r *Result) addRootObjectSchemata(s *spec.Schema) {
+ r.rootObjectSchemata.Append(schemata{one: s})
+}
+
+// addPropertySchemata adds the given schemata for the object and field.
+// The slice schemata might be reused. I.e. do not modify it after being added to a result.
+func (r *Result) addPropertySchemata(obj map[string]interface{}, fld string, schema *spec.Schema) {
+ if r.fieldSchemata == nil {
+ r.fieldSchemata = make([]fieldSchemata, 0, len(obj))
+ }
+ r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{obj: obj, field: fld, schemata: schemata{one: schema}})
+}
+
+/*
+// addSliceSchemata adds the given schemata for the slice and index.
+// The slice schemata might be reused. I.e. do not modify it after being added to a result.
+func (r *Result) addSliceSchemata(slice reflect.Value, i int, schema *spec.Schema) {
+ if r.itemSchemata == nil {
+ r.itemSchemata = make([]itemSchemata, 0, slice.Len())
+ }
+ r.itemSchemata = append(r.itemSchemata, itemSchemata{slice: slice, index: i, schemata: schemata{one: schema}})
+}
+*/
+
+// mergeWithoutRootSchemata merges other into r, ignoring the rootObject schemata.
+func (r *Result) mergeWithoutRootSchemata(other *Result) {
+ r.resetCaches()
+ r.AddErrors(other.Errors...)
+ r.AddWarnings(other.Warnings...)
+ r.MatchCount += other.MatchCount
+
+ if other.fieldSchemata != nil {
+ if r.fieldSchemata == nil {
+ r.fieldSchemata = other.fieldSchemata
+ } else {
+ r.fieldSchemata = append(r.fieldSchemata, other.fieldSchemata...)
+ }
+ }
+
+ if other.itemSchemata != nil {
+ if r.itemSchemata == nil {
+ r.itemSchemata = other.itemSchemata
+ } else {
+ r.itemSchemata = append(r.itemSchemata, other.itemSchemata...)
+ }
+ }
+}
+
+// MergeAsErrors merges this result with the other one(s), preserving match counts etc.
+//
+// Warnings from input are merged as Errors in the returned merged Result.
+func (r *Result) MergeAsErrors(others ...*Result) *Result {
+ for _, other := range others {
+ if other != nil {
+ r.resetCaches()
+ r.AddErrors(other.Errors...)
+ r.AddErrors(other.Warnings...)
+ r.MatchCount += other.MatchCount
+ }
+ }
+ return r
+}
+
+// MergeAsWarnings merges this result with the other one(s), preserving match counts etc.
+//
+// Errors from input are merged as Warnings in the returned merged Result.
+func (r *Result) MergeAsWarnings(others ...*Result) *Result {
+ for _, other := range others {
+ if other != nil {
+ r.resetCaches()
+ r.AddWarnings(other.Errors...)
+ r.AddWarnings(other.Warnings...)
+ r.MatchCount += other.MatchCount
+ }
+ }
+ return r
+}
+
+// AddErrors adds errors to this validation result (if not already reported).
+//
+// Since the same check may be passed several times while exploring the
+// spec structure (via $ref, ...) reported messages are kept
+// unique.
+func (r *Result) AddErrors(errors ...error) {
+ for _, e := range errors {
+ found := false
+ if e != nil {
+ for _, isReported := range r.Errors {
+ if e.Error() == isReported.Error() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ r.Errors = append(r.Errors, e)
+ }
+ }
+ }
+}
+
+// AddWarnings adds warnings to this validation result (if not already reported).
+func (r *Result) AddWarnings(warnings ...error) {
+ for _, e := range warnings {
+ found := false
+ if e != nil {
+ for _, isReported := range r.Warnings {
+ if e.Error() == isReported.Error() {
+ found = true
+ break
+ }
+ }
+ if !found {
+ r.Warnings = append(r.Warnings, e)
+ }
+ }
+ }
+}
+
+func (r *Result) keepRelevantErrors() *Result {
+ // TODO: this one is going to disapear...
+ // keepRelevantErrors strips a result from standard errors and keeps
+ // the ones which are supposedly more accurate.
+ //
+ // The original result remains unaffected (creates a new instance of Result).
+ // This method is used to work around the "matchCount" filter which would otherwise
+ // strip our result from some accurate error reporting from lower level validators.
+ //
+ // NOTE: this implementation with a placeholder (IMPORTANT!) is neither clean nor
+ // very efficient. On the other hand, relying on go-openapi/errors to manipulate
+ // codes would require to change a lot here. So, for the moment, let's go with
+ // placeholders.
+ strippedErrors := []error{}
+ for _, e := range r.Errors {
+ if strings.HasPrefix(e.Error(), "IMPORTANT!") {
+ strippedErrors = append(strippedErrors, fmt.Errorf(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
+ }
+ }
+ strippedWarnings := []error{}
+ for _, e := range r.Warnings {
+ if strings.HasPrefix(e.Error(), "IMPORTANT!") {
+ strippedWarnings = append(strippedWarnings, fmt.Errorf(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
+ }
+ }
+ strippedResult := new(Result)
+ strippedResult.Errors = strippedErrors
+ strippedResult.Warnings = strippedWarnings
+ return strippedResult
+}
+
+// IsValid returns true when this result is valid.
+//
+// Returns true on a nil *Result.
+func (r *Result) IsValid() bool {
+ if r == nil {
+ return true
+ }
+ return len(r.Errors) == 0
+}
+
+// HasErrors returns true when this result is invalid.
+//
+// Returns false on a nil *Result.
+func (r *Result) HasErrors() bool {
+ if r == nil {
+ return false
+ }
+ return !r.IsValid()
+}
+
+// HasWarnings returns true when this result contains warnings.
+//
+// Returns false on a nil *Result.
+func (r *Result) HasWarnings() bool {
+ if r == nil {
+ return false
+ }
+ return len(r.Warnings) > 0
+}
+
+// HasErrorsOrWarnings returns true when this result contains
+// either errors or warnings.
+//
+// Returns false on a nil *Result.
+func (r *Result) HasErrorsOrWarnings() bool {
+ if r == nil {
+ return false
+ }
+ return len(r.Errors) > 0 || len(r.Warnings) > 0
+}
+
+// Inc increments the match count
+func (r *Result) Inc() {
+ r.MatchCount++
+}
+
+// AsError renders this result as an error interface
+//
+// TODO: reporting / pretty print with path ordered and indented
+func (r *Result) AsError() error {
+ if r.IsValid() {
+ return nil
+ }
+ return errors.CompositeValidationError(r.Errors...)
+}
+
+// schemata is an arbitrary number of schemata. It does a distinction between zero,
+// one and many schemata to avoid slice allocations.
+type schemata struct {
+ // one is set if there is exactly one schema. In that case multiple must be nil.
+ one *spec.Schema
+ // multiple is an arbitrary number of schemas. If it is set, one must be nil.
+ multiple []*spec.Schema
+}
+
+func (s *schemata) Len() int {
+ if s.one != nil {
+ return 1
+ }
+ return len(s.multiple)
+}
+
+func (s *schemata) Slice() []*spec.Schema {
+ if s == nil {
+ return nil
+ }
+ if s.one != nil {
+ return []*spec.Schema{s.one}
+ }
+ return s.multiple
+}
+
+// appendSchemata appends the schemata in other to s. It mutated s in-place.
+func (s *schemata) Append(other schemata) {
+ if other.one == nil && len(other.multiple) == 0 {
+ return
+ }
+ if s.one == nil && len(s.multiple) == 0 {
+ *s = other
+ return
+ }
+
+ if s.one != nil {
+ if other.one != nil {
+ s.multiple = []*spec.Schema{s.one, other.one}
+ } else {
+ t := make([]*spec.Schema, 0, 1+len(other.multiple))
+ s.multiple = append(append(t, s.one), other.multiple...)
+ }
+ s.one = nil
+ } else {
+ if other.one != nil {
+ s.multiple = append(s.multiple, other.one)
+ } else {
+ if cap(s.multiple) >= len(s.multiple)+len(other.multiple) {
+ s.multiple = append(s.multiple, other.multiple...)
+ } else {
+ t := make([]*spec.Schema, 0, len(s.multiple)+len(other.multiple))
+ s.multiple = append(append(t, s.multiple...), other.multiple...)
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/go-openapi/validate/rexp.go b/vendor/github.com/go-openapi/validate/rexp.go
new file mode 100644
index 000000000..76de03e1f
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/rexp.go
@@ -0,0 +1,71 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ re "regexp"
+ "sync"
+ "sync/atomic"
+)
+
+// Cache for compiled regular expressions
+var (
+ cacheMutex = &sync.Mutex{}
+ reDict = atomic.Value{} // map[string]*re.Regexp
+)
+
+func compileRegexp(pattern string) (*re.Regexp, error) {
+ if cache, ok := reDict.Load().(map[string]*re.Regexp); ok {
+ if r := cache[pattern]; r != nil {
+ return r, nil
+ }
+ }
+
+ r, err := re.Compile(pattern)
+ if err != nil {
+ return nil, err
+ }
+ cacheRegexp(r)
+ return r, nil
+}
+
+func mustCompileRegexp(pattern string) *re.Regexp {
+ if cache, ok := reDict.Load().(map[string]*re.Regexp); ok {
+ if r := cache[pattern]; r != nil {
+ return r
+ }
+ }
+
+ r := re.MustCompile(pattern)
+ cacheRegexp(r)
+ return r
+}
+
+func cacheRegexp(r *re.Regexp) {
+ cacheMutex.Lock()
+ defer cacheMutex.Unlock()
+
+ if cache, ok := reDict.Load().(map[string]*re.Regexp); !ok || cache[r.String()] == nil {
+ newCache := map[string]*re.Regexp{
+ r.String(): r,
+ }
+
+ for k, v := range cache {
+ newCache[k] = v
+ }
+
+ reDict.Store(newCache)
+ }
+}
diff --git a/vendor/github.com/go-openapi/validate/schema.go b/vendor/github.com/go-openapi/validate/schema.go
new file mode 100644
index 000000000..b817eb0ef
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/schema.go
@@ -0,0 +1,260 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "encoding/json"
+ "reflect"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+)
+
+var (
+ specSchemaType = reflect.TypeOf(&spec.Schema{})
+ specParameterType = reflect.TypeOf(&spec.Parameter{})
+ specHeaderType = reflect.TypeOf(&spec.Header{})
+ // specItemsType = reflect.TypeOf(&spec.Items{})
+)
+
+// SchemaValidator validates data against a JSON schema
+type SchemaValidator struct {
+ Path string
+ in string
+ Schema *spec.Schema
+ validators []valueValidator
+ Root interface{}
+ KnownFormats strfmt.Registry
+ Options SchemaValidatorOptions
+}
+
+// AgainstSchema validates the specified data against the provided schema, using a registry of supported formats.
+//
+// When no pre-parsed *spec.Schema structure is provided, it uses a JSON schema as default. See example.
+func AgainstSchema(schema *spec.Schema, data interface{}, formats strfmt.Registry, options ...Option) error {
+ res := NewSchemaValidator(schema, nil, "", formats, options...).Validate(data)
+ if res.HasErrors() {
+ return errors.CompositeValidationError(res.Errors...)
+ }
+ return nil
+}
+
+// NewSchemaValidator creates a new schema validator.
+//
+// Panics if the provided schema is invalid.
+func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, options ...Option) *SchemaValidator {
+ if schema == nil {
+ return nil
+ }
+
+ if rootSchema == nil {
+ rootSchema = schema
+ }
+
+ if schema.ID != "" || schema.Ref.String() != "" || schema.Ref.IsRoot() {
+ err := spec.ExpandSchema(schema, rootSchema, nil)
+ if err != nil {
+ msg := invalidSchemaProvidedMsg(err).Error()
+ panic(msg)
+ }
+ }
+ s := SchemaValidator{
+ Path: root,
+ in: "body",
+ Schema: schema,
+ Root: rootSchema,
+ KnownFormats: formats,
+ Options: SchemaValidatorOptions{}}
+ for _, o := range options {
+ o(&s.Options)
+ }
+ s.validators = []valueValidator{
+ s.typeValidator(),
+ s.schemaPropsValidator(),
+ s.stringValidator(),
+ s.formatValidator(),
+ s.numberValidator(),
+ s.sliceValidator(),
+ s.commonValidator(),
+ s.objectValidator(),
+ }
+ return &s
+}
+
+// SetPath sets the path for this schema valdiator
+func (s *SchemaValidator) SetPath(path string) {
+ s.Path = path
+}
+
+// Applies returns true when this schema validator applies
+func (s *SchemaValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ _, ok := source.(*spec.Schema)
+ return ok
+}
+
+// Validate validates the data against the schema
+func (s *SchemaValidator) Validate(data interface{}) *Result {
+ result := &Result{data: data}
+ if s == nil {
+ return result
+ }
+ if s.Schema != nil {
+ result.addRootObjectSchemata(s.Schema)
+ }
+
+ if data == nil {
+ result.Merge(s.validators[0].Validate(data)) // type validator
+ result.Merge(s.validators[6].Validate(data)) // common validator
+ return result
+ }
+
+ tpe := reflect.TypeOf(data)
+ kind := tpe.Kind()
+ for kind == reflect.Ptr {
+ tpe = tpe.Elem()
+ kind = tpe.Kind()
+ }
+ d := data
+
+ if kind == reflect.Struct {
+ // NOTE: since reflect retrieves the true nature of types
+ // this means that all strfmt types passed here (e.g. strfmt.Datetime, etc..)
+ // are converted here to strings, and structs are systematically converted
+ // to map[string]interface{}.
+ d = swag.ToDynamicJSON(data)
+ }
+
+ // TODO: this part should be handed over to type validator
+ // Handle special case of json.Number data (number marshalled as string)
+ isnumber := s.Schema.Type.Contains(numberType) || s.Schema.Type.Contains(integerType)
+ if num, ok := data.(json.Number); ok && isnumber {
+ if s.Schema.Type.Contains(integerType) { // avoid lossy conversion
+ in, erri := num.Int64()
+ if erri != nil {
+ result.AddErrors(invalidTypeConversionMsg(s.Path, erri))
+ result.Inc()
+ return result
+ }
+ d = in
+ } else {
+ nf, errf := num.Float64()
+ if errf != nil {
+ result.AddErrors(invalidTypeConversionMsg(s.Path, errf))
+ result.Inc()
+ return result
+ }
+ d = nf
+ }
+
+ tpe = reflect.TypeOf(d)
+ kind = tpe.Kind()
+ }
+
+ for _, v := range s.validators {
+ if !v.Applies(s.Schema, kind) {
+ debugLog("%T does not apply for %v", v, kind)
+ continue
+ }
+
+ err := v.Validate(d)
+ result.Merge(err)
+ result.Inc()
+ }
+ result.Inc()
+
+ return result
+}
+
+func (s *SchemaValidator) typeValidator() valueValidator {
+ return &typeValidator{Type: s.Schema.Type, Nullable: s.Schema.Nullable, Format: s.Schema.Format, In: s.in, Path: s.Path}
+}
+
+func (s *SchemaValidator) commonValidator() valueValidator {
+ return &basicCommonValidator{
+ Path: s.Path,
+ In: s.in,
+ Enum: s.Schema.Enum,
+ }
+}
+
+func (s *SchemaValidator) sliceValidator() valueValidator {
+ return &schemaSliceValidator{
+ Path: s.Path,
+ In: s.in,
+ MaxItems: s.Schema.MaxItems,
+ MinItems: s.Schema.MinItems,
+ UniqueItems: s.Schema.UniqueItems,
+ AdditionalItems: s.Schema.AdditionalItems,
+ Items: s.Schema.Items,
+ Root: s.Root,
+ KnownFormats: s.KnownFormats,
+ Options: s.Options,
+ }
+}
+
+func (s *SchemaValidator) numberValidator() valueValidator {
+ return &numberValidator{
+ Path: s.Path,
+ In: s.in,
+ Default: s.Schema.Default,
+ MultipleOf: s.Schema.MultipleOf,
+ Maximum: s.Schema.Maximum,
+ ExclusiveMaximum: s.Schema.ExclusiveMaximum,
+ Minimum: s.Schema.Minimum,
+ ExclusiveMinimum: s.Schema.ExclusiveMinimum,
+ }
+}
+
+func (s *SchemaValidator) stringValidator() valueValidator {
+ return &stringValidator{
+ Path: s.Path,
+ In: s.in,
+ MaxLength: s.Schema.MaxLength,
+ MinLength: s.Schema.MinLength,
+ Pattern: s.Schema.Pattern,
+ }
+}
+
+func (s *SchemaValidator) formatValidator() valueValidator {
+ return &formatValidator{
+ Path: s.Path,
+ In: s.in,
+ Format: s.Schema.Format,
+ KnownFormats: s.KnownFormats,
+ }
+}
+
+func (s *SchemaValidator) schemaPropsValidator() valueValidator {
+ sch := s.Schema
+ return newSchemaPropsValidator(s.Path, s.in, sch.AllOf, sch.OneOf, sch.AnyOf, sch.Not, sch.Dependencies, s.Root, s.KnownFormats, s.Options.Options()...)
+}
+
+func (s *SchemaValidator) objectValidator() valueValidator {
+ return &objectValidator{
+ Path: s.Path,
+ In: s.in,
+ MaxProperties: s.Schema.MaxProperties,
+ MinProperties: s.Schema.MinProperties,
+ Required: s.Schema.Required,
+ Properties: s.Schema.Properties,
+ AdditionalProperties: s.Schema.AdditionalProperties,
+ PatternProperties: s.Schema.PatternProperties,
+ Root: s.Root,
+ KnownFormats: s.KnownFormats,
+ Options: s.Options,
+ }
+}
diff --git a/vendor/github.com/go-openapi/validate/schema_messages.go b/vendor/github.com/go-openapi/validate/schema_messages.go
new file mode 100644
index 000000000..786e2e355
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/schema_messages.go
@@ -0,0 +1,78 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "github.com/go-openapi/errors"
+)
+
+// Error messages related to schema validation and returned as results.
+const (
+ // ArrayDoesNotAllowAdditionalItemsError when an additionalItems construct is not verified by the array values provided.
+ //
+ // TODO: should move to package go-openapi/errors
+ ArrayDoesNotAllowAdditionalItemsError = "array doesn't allow for additional items"
+
+ // HasDependencyError indicates that a dependencies construct was not verified
+ HasDependencyError = "%q has a dependency on %s"
+
+ // InvalidSchemaProvidedError indicates that the schema provided to validate a value cannot be properly compiled
+ InvalidSchemaProvidedError = "Invalid schema provided to SchemaValidator: %v"
+
+ // InvalidTypeConversionError indicates that a numerical conversion for the given type could not be carried on
+ InvalidTypeConversionError = "invalid type conversion in %s: %v "
+
+ // MustValidateAtLeastOneSchemaError indicates that in a AnyOf construct, none of the schema constraints specified were verified
+ MustValidateAtLeastOneSchemaError = "%q must validate at least one schema (anyOf)"
+
+ // MustValidateOnlyOneSchemaError indicates that in a OneOf construct, either none of the schema constraints specified were verified, or several were
+ MustValidateOnlyOneSchemaError = "%q must validate one and only one schema (oneOf). %s"
+
+ // MustValidateAllSchemasError indicates that in a AllOf construct, at least one of the schema constraints specified were not verified
+ //
+ // TODO: punctuation in message
+ MustValidateAllSchemasError = "%q must validate all the schemas (allOf)%s"
+
+ // MustNotValidateSchemaError indicates that in a Not construct, the schema constraint specified was verified
+ MustNotValidateSchemaError = "%q must not validate the schema (not)"
+)
+
+// Warning messages related to schema validation and returned as results
+const ()
+
+func invalidSchemaProvidedMsg(err error) errors.Error {
+ return errors.New(InternalErrorCode, InvalidSchemaProvidedError, err)
+}
+func invalidTypeConversionMsg(path string, err error) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidTypeConversionError, path, err)
+}
+func mustValidateOnlyOneSchemaMsg(path, additionalMsg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, MustValidateOnlyOneSchemaError, path, additionalMsg)
+}
+func mustValidateAtLeastOneSchemaMsg(path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, MustValidateAtLeastOneSchemaError, path)
+}
+func mustValidateAllSchemasMsg(path, additionalMsg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, MustValidateAllSchemasError, path, additionalMsg)
+}
+func mustNotValidatechemaMsg(path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, MustNotValidateSchemaError, path)
+}
+func hasADependencyMsg(path, depkey string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, HasDependencyError, path, depkey)
+}
+func arrayDoesNotAllowAdditionalItemsMsg() errors.Error {
+ return errors.New(errors.CompositeErrorCode, ArrayDoesNotAllowAdditionalItemsError)
+}
diff --git a/vendor/github.com/go-openapi/validate/schema_option.go b/vendor/github.com/go-openapi/validate/schema_option.go
new file mode 100644
index 000000000..4b4879de8
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/schema_option.go
@@ -0,0 +1,54 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+// SchemaValidatorOptions defines optional rules for schema validation
+type SchemaValidatorOptions struct {
+ EnableObjectArrayTypeCheck bool
+ EnableArrayMustHaveItemsCheck bool
+}
+
+// Option sets optional rules for schema validation
+type Option func(*SchemaValidatorOptions)
+
+// EnableObjectArrayTypeCheck activates the swagger rule: an items must be in type: array
+func EnableObjectArrayTypeCheck(enable bool) Option {
+ return func(svo *SchemaValidatorOptions) {
+ svo.EnableObjectArrayTypeCheck = enable
+ }
+}
+
+// EnableArrayMustHaveItemsCheck activates the swagger rule: an array must have items defined
+func EnableArrayMustHaveItemsCheck(enable bool) Option {
+ return func(svo *SchemaValidatorOptions) {
+ svo.EnableArrayMustHaveItemsCheck = enable
+ }
+}
+
+// SwaggerSchema activates swagger schema validation rules
+func SwaggerSchema(enable bool) Option {
+ return func(svo *SchemaValidatorOptions) {
+ svo.EnableObjectArrayTypeCheck = enable
+ svo.EnableArrayMustHaveItemsCheck = enable
+ }
+}
+
+// Options returns current options
+func (svo SchemaValidatorOptions) Options() []Option {
+ return []Option{
+ EnableObjectArrayTypeCheck(svo.EnableObjectArrayTypeCheck),
+ EnableArrayMustHaveItemsCheck(svo.EnableArrayMustHaveItemsCheck),
+ }
+}
diff --git a/vendor/github.com/go-openapi/validate/schema_props.go b/vendor/github.com/go-openapi/validate/schema_props.go
new file mode 100644
index 000000000..9bac3d29f
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/schema_props.go
@@ -0,0 +1,240 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+type schemaPropsValidator struct {
+ Path string
+ In string
+ AllOf []spec.Schema
+ OneOf []spec.Schema
+ AnyOf []spec.Schema
+ Not *spec.Schema
+ Dependencies spec.Dependencies
+ anyOfValidators []SchemaValidator
+ allOfValidators []SchemaValidator
+ oneOfValidators []SchemaValidator
+ notValidator *SchemaValidator
+ Root interface{}
+ KnownFormats strfmt.Registry
+ Options SchemaValidatorOptions
+}
+
+func (s *schemaPropsValidator) SetPath(path string) {
+ s.Path = path
+}
+
+func newSchemaPropsValidator(path string, in string, allOf, oneOf, anyOf []spec.Schema, not *spec.Schema, deps spec.Dependencies, root interface{}, formats strfmt.Registry, options ...Option) *schemaPropsValidator {
+ anyValidators := make([]SchemaValidator, 0, len(anyOf))
+ for _, v := range anyOf {
+ v := v
+ anyValidators = append(anyValidators, *NewSchemaValidator(&v, root, path, formats, options...))
+ }
+ allValidators := make([]SchemaValidator, 0, len(allOf))
+ for _, v := range allOf {
+ v := v
+ allValidators = append(allValidators, *NewSchemaValidator(&v, root, path, formats, options...))
+ }
+ oneValidators := make([]SchemaValidator, 0, len(oneOf))
+ for _, v := range oneOf {
+ v := v
+ oneValidators = append(oneValidators, *NewSchemaValidator(&v, root, path, formats, options...))
+ }
+
+ var notValidator *SchemaValidator
+ if not != nil {
+ notValidator = NewSchemaValidator(not, root, path, formats, options...)
+ }
+
+ schOptions := &SchemaValidatorOptions{}
+ for _, o := range options {
+ o(schOptions)
+ }
+ return &schemaPropsValidator{
+ Path: path,
+ In: in,
+ AllOf: allOf,
+ OneOf: oneOf,
+ AnyOf: anyOf,
+ Not: not,
+ Dependencies: deps,
+ anyOfValidators: anyValidators,
+ allOfValidators: allValidators,
+ oneOfValidators: oneValidators,
+ notValidator: notValidator,
+ Root: root,
+ KnownFormats: formats,
+ Options: *schOptions,
+ }
+}
+
+func (s *schemaPropsValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ r := reflect.TypeOf(source) == specSchemaType
+ debugLog("schema props validator for %q applies %t for %T (kind: %v)\n", s.Path, r, source, kind)
+ return r
+}
+
+func (s *schemaPropsValidator) Validate(data interface{}) *Result {
+ mainResult := new(Result)
+
+ // Intermediary error results
+
+ // IMPORTANT! messages from underlying validators
+ keepResultAnyOf := new(Result)
+ keepResultOneOf := new(Result)
+ keepResultAllOf := new(Result)
+
+ // Validates at least one in anyOf schemas
+ var firstSuccess *Result
+ if len(s.anyOfValidators) > 0 {
+ var bestFailures *Result
+ succeededOnce := false
+ for _, anyOfSchema := range s.anyOfValidators {
+ result := anyOfSchema.Validate(data)
+ // We keep inner IMPORTANT! errors no matter what MatchCount tells us
+ keepResultAnyOf.Merge(result.keepRelevantErrors())
+ if result.IsValid() {
+ bestFailures = nil
+ succeededOnce = true
+ if firstSuccess == nil {
+ firstSuccess = result
+ }
+ keepResultAnyOf = new(Result)
+ break
+ }
+ // MatchCount is used to select errors from the schema with most positive checks
+ if bestFailures == nil || result.MatchCount > bestFailures.MatchCount {
+ bestFailures = result
+ }
+ }
+
+ if !succeededOnce {
+ mainResult.AddErrors(mustValidateAtLeastOneSchemaMsg(s.Path))
+ }
+ if bestFailures != nil {
+ mainResult.Merge(bestFailures)
+ } else if firstSuccess != nil {
+ mainResult.Merge(firstSuccess)
+ }
+ }
+
+ // Validates exactly one in oneOf schemas
+ if len(s.oneOfValidators) > 0 {
+ var bestFailures *Result
+ var firstSuccess *Result
+ validated := 0
+
+ for _, oneOfSchema := range s.oneOfValidators {
+ result := oneOfSchema.Validate(data)
+ // We keep inner IMPORTANT! errors no matter what MatchCount tells us
+ keepResultOneOf.Merge(result.keepRelevantErrors())
+ if result.IsValid() {
+ validated++
+ bestFailures = nil
+ if firstSuccess == nil {
+ firstSuccess = result
+ }
+ keepResultOneOf = new(Result)
+ continue
+ }
+ // MatchCount is used to select errors from the schema with most positive checks
+ if validated == 0 && (bestFailures == nil || result.MatchCount > bestFailures.MatchCount) {
+ bestFailures = result
+ }
+ }
+
+ if validated != 1 {
+ var additionalMsg string
+ if validated == 0 {
+ additionalMsg = "Found none valid"
+ } else {
+ additionalMsg = fmt.Sprintf("Found %d valid alternatives", validated)
+ }
+
+ mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, additionalMsg))
+ if bestFailures != nil {
+ mainResult.Merge(bestFailures)
+ }
+ } else if firstSuccess != nil {
+ mainResult.Merge(firstSuccess)
+ }
+ }
+
+ // Validates all of allOf schemas
+ if len(s.allOfValidators) > 0 {
+ validated := 0
+
+ for _, allOfSchema := range s.allOfValidators {
+ result := allOfSchema.Validate(data)
+ // We keep inner IMPORTANT! errors no matter what MatchCount tells us
+ keepResultAllOf.Merge(result.keepRelevantErrors())
+ // keepResultAllOf.Merge(result)
+ if result.IsValid() {
+ validated++
+ }
+ mainResult.Merge(result)
+ }
+
+ if validated != len(s.allOfValidators) {
+ additionalMsg := ""
+ if validated == 0 {
+ additionalMsg = ". None validated"
+ }
+
+ mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, additionalMsg))
+ }
+ }
+
+ if s.notValidator != nil {
+ result := s.notValidator.Validate(data)
+ // We keep inner IMPORTANT! errors no matter what MatchCount tells us
+ if result.IsValid() {
+ mainResult.AddErrors(mustNotValidatechemaMsg(s.Path))
+ }
+ }
+
+ if s.Dependencies != nil && len(s.Dependencies) > 0 && reflect.TypeOf(data).Kind() == reflect.Map {
+ val := data.(map[string]interface{})
+ for key := range val {
+ if dep, ok := s.Dependencies[key]; ok {
+
+ if dep.Schema != nil {
+ mainResult.Merge(NewSchemaValidator(dep.Schema, s.Root, s.Path+"."+key, s.KnownFormats, s.Options.Options()...).Validate(data))
+ continue
+ }
+
+ if len(dep.Property) > 0 {
+ for _, depKey := range dep.Property {
+ if _, ok := val[depKey]; !ok {
+ mainResult.AddErrors(hasADependencyMsg(s.Path, depKey))
+ }
+ }
+ }
+ }
+ }
+ }
+
+ mainResult.Inc()
+ // In the end we retain best failures for schema validation
+ // plus, if any, composite errors which may explain special cases (tagged as IMPORTANT!).
+ return mainResult.Merge(keepResultAllOf, keepResultOneOf, keepResultAnyOf)
+}
diff --git a/vendor/github.com/go-openapi/validate/slice_validator.go b/vendor/github.com/go-openapi/validate/slice_validator.go
new file mode 100644
index 000000000..aa429f518
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/slice_validator.go
@@ -0,0 +1,105 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+type schemaSliceValidator struct {
+ Path string
+ In string
+ MaxItems *int64
+ MinItems *int64
+ UniqueItems bool
+ AdditionalItems *spec.SchemaOrBool
+ Items *spec.SchemaOrArray
+ Root interface{}
+ KnownFormats strfmt.Registry
+ Options SchemaValidatorOptions
+}
+
+func (s *schemaSliceValidator) SetPath(path string) {
+ s.Path = path
+}
+
+func (s *schemaSliceValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ _, ok := source.(*spec.Schema)
+ r := ok && kind == reflect.Slice
+ return r
+}
+
+func (s *schemaSliceValidator) Validate(data interface{}) *Result {
+ result := new(Result)
+ if data == nil {
+ return result
+ }
+ val := reflect.ValueOf(data)
+ size := val.Len()
+
+ if s.Items != nil && s.Items.Schema != nil {
+ validator := NewSchemaValidator(s.Items.Schema, s.Root, s.Path, s.KnownFormats, s.Options.Options()...)
+ for i := 0; i < size; i++ {
+ validator.SetPath(fmt.Sprintf("%s.%d", s.Path, i))
+ value := val.Index(i)
+ result.mergeForSlice(val, i, validator.Validate(value.Interface()))
+ }
+ }
+
+ itemsSize := 0
+ if s.Items != nil && len(s.Items.Schemas) > 0 {
+ itemsSize = len(s.Items.Schemas)
+ for i := 0; i < itemsSize; i++ {
+ validator := NewSchemaValidator(&s.Items.Schemas[i], s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options.Options()...)
+ if val.Len() <= i {
+ break
+ }
+ result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface()))
+ }
+ }
+ if s.AdditionalItems != nil && itemsSize < size {
+ if s.Items != nil && len(s.Items.Schemas) > 0 && !s.AdditionalItems.Allows {
+ result.AddErrors(arrayDoesNotAllowAdditionalItemsMsg())
+ }
+ if s.AdditionalItems.Schema != nil {
+ for i := itemsSize; i < size-itemsSize+1; i++ {
+ validator := NewSchemaValidator(s.AdditionalItems.Schema, s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options.Options()...)
+ result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface()))
+ }
+ }
+ }
+
+ if s.MinItems != nil {
+ if err := MinItems(s.Path, s.In, int64(size), *s.MinItems); err != nil {
+ result.AddErrors(err)
+ }
+ }
+ if s.MaxItems != nil {
+ if err := MaxItems(s.Path, s.In, int64(size), *s.MaxItems); err != nil {
+ result.AddErrors(err)
+ }
+ }
+ if s.UniqueItems {
+ if err := UniqueItems(s.Path, s.In, val.Interface()); err != nil {
+ result.AddErrors(err)
+ }
+ }
+ result.Inc()
+ return result
+}
diff --git a/vendor/github.com/go-openapi/validate/spec.go b/vendor/github.com/go-openapi/validate/spec.go
new file mode 100644
index 000000000..dff01f00b
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/spec.go
@@ -0,0 +1,804 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "encoding/json"
+ "fmt"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+// Spec validates an OpenAPI 2.0 specification document.
+//
+// Returns an error flattening in a single standard error, all validation messages.
+//
+// - TODO: $ref should not have siblings
+// - TODO: make sure documentation reflects all checks and warnings
+// - TODO: check on discriminators
+// - TODO: explicit message on unsupported keywords (better than "forbidden property"...)
+// - TODO: full list of unresolved refs
+// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples
+// - TODO: option to determine if we validate for go-swagger or in a more general context
+// - TODO: check on required properties to support anyOf, allOf, oneOf
+//
+// NOTE: SecurityScopes are maps: no need to check uniqueness
+//
+func Spec(doc *loads.Document, formats strfmt.Registry) error {
+ errs, _ /*warns*/ := NewSpecValidator(doc.Schema(), formats).Validate(doc)
+ if errs.HasErrors() {
+ return errors.CompositeValidationError(errs.Errors...)
+ }
+ return nil
+}
+
+// SpecValidator validates a swagger 2.0 spec
+type SpecValidator struct {
+ schema *spec.Schema // swagger 2.0 schema
+ spec *loads.Document
+ analyzer *analysis.Spec
+ expanded *loads.Document
+ KnownFormats strfmt.Registry
+ Options Opts // validation options
+}
+
+// NewSpecValidator creates a new swagger spec validator instance
+func NewSpecValidator(schema *spec.Schema, formats strfmt.Registry) *SpecValidator {
+ return &SpecValidator{
+ schema: schema,
+ KnownFormats: formats,
+ Options: defaultOpts,
+ }
+}
+
+// Validate validates the swagger spec
+func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
+ var sd *loads.Document
+ errs, warnings := new(Result), new(Result)
+
+ if v, ok := data.(*loads.Document); ok {
+ sd = v
+ }
+ if sd == nil {
+ errs.AddErrors(invalidDocumentMsg())
+ return errs, warnings // no point in continuing
+ }
+ s.spec = sd
+ s.analyzer = analysis.New(sd.Spec())
+
+ // Swagger schema validator
+ schv := NewSchemaValidator(s.schema, nil, "", s.KnownFormats, SwaggerSchema(true))
+ var obj interface{}
+
+ // Raw spec unmarshalling errors
+ if err := json.Unmarshal(sd.Raw(), &obj); err != nil {
+ // NOTE: under normal conditions, the *load.Document has been already unmarshalled
+ // So this one is just a paranoid check on the behavior of the spec package
+ panic(InvalidDocumentError)
+ }
+
+ defer func() {
+ // errs holds all errors and warnings,
+ // warnings only warnings
+ errs.MergeAsWarnings(warnings)
+ warnings.AddErrors(errs.Warnings...)
+ }()
+
+ errs.Merge(schv.Validate(obj)) // error -
+ // There may be a point in continuing to try and determine more accurate errors
+ if !s.Options.ContinueOnErrors && errs.HasErrors() {
+ return errs, warnings // no point in continuing
+ }
+
+ errs.Merge(s.validateReferencesValid()) // error -
+ // There may be a point in continuing to try and determine more accurate errors
+ if !s.Options.ContinueOnErrors && errs.HasErrors() {
+ return errs, warnings // no point in continuing
+ }
+
+ errs.Merge(s.validateDuplicateOperationIDs())
+ errs.Merge(s.validateDuplicatePropertyNames()) // error -
+ errs.Merge(s.validateParameters()) // error -
+ errs.Merge(s.validateItems()) // error -
+
+ // Properties in required definition MUST validate their schema
+ // Properties SHOULD NOT be declared as both required and readOnly (warning)
+ errs.Merge(s.validateRequiredDefinitions()) // error and warning
+
+ // There may be a point in continuing to try and determine more accurate errors
+ if !s.Options.ContinueOnErrors && errs.HasErrors() {
+ return errs, warnings // no point in continuing
+ }
+
+ // Values provided as default MUST validate their schema
+ df := &defaultValidator{SpecValidator: s}
+ errs.Merge(df.Validate())
+
+ // Values provided as examples MUST validate their schema
+ // Value provided as examples in a response without schema generate a warning
+ // Known limitations: examples in responses for mime type not application/json are ignored (warning)
+ ex := &exampleValidator{SpecValidator: s}
+ errs.Merge(ex.Validate())
+
+ errs.Merge(s.validateNonEmptyPathParamNames())
+
+ // errs.Merge(s.validateRefNoSibling()) // warning only
+ errs.Merge(s.validateReferenced()) // warning only
+
+ return errs, warnings
+}
+
+func (s *SpecValidator) validateNonEmptyPathParamNames() *Result {
+ res := new(Result)
+ if s.spec.Spec().Paths == nil {
+ // There is no Paths object: error
+ res.AddErrors(noValidPathMsg())
+ } else {
+ if s.spec.Spec().Paths.Paths == nil {
+ // Paths may be empty: warning
+ res.AddWarnings(noValidPathMsg())
+ } else {
+ for k := range s.spec.Spec().Paths.Paths {
+ if strings.Contains(k, "{}") {
+ res.AddErrors(emptyPathParameterMsg(k))
+ }
+ }
+ }
+ }
+ return res
+}
+
+func (s *SpecValidator) validateDuplicateOperationIDs() *Result {
+ // OperationID, if specified, must be unique across the board
+ var analyzer *analysis.Spec
+ if s.expanded != nil {
+ // $ref are valid: we can analyze operations on an expanded spec
+ analyzer = analysis.New(s.expanded.Spec())
+ } else {
+ // fallback on possible incomplete picture because of previous errors
+ analyzer = s.analyzer
+ }
+ res := new(Result)
+ known := make(map[string]int)
+ for _, v := range analyzer.OperationIDs() {
+ if v != "" {
+ known[v]++
+ }
+ }
+ for k, v := range known {
+ if v > 1 {
+ res.AddErrors(nonUniqueOperationIDMsg(k, v))
+ }
+ }
+ return res
+}
+
+type dupProp struct {
+ Name string
+ Definition string
+}
+
+func (s *SpecValidator) validateDuplicatePropertyNames() *Result {
+ // definition can't declare a property that's already defined by one of its ancestors
+ res := new(Result)
+ for k, sch := range s.spec.Spec().Definitions {
+ if len(sch.AllOf) == 0 {
+ continue
+ }
+
+ knownanc := map[string]struct{}{
+ "#/definitions/" + k: {},
+ }
+
+ ancs, rec := s.validateCircularAncestry(k, sch, knownanc)
+ if rec != nil && (rec.HasErrors() || !rec.HasWarnings()) {
+ res.Merge(rec)
+ }
+ if len(ancs) > 0 {
+ res.AddErrors(circularAncestryDefinitionMsg(k, ancs))
+ return res
+ }
+
+ knowns := make(map[string]struct{})
+ dups, rep := s.validateSchemaPropertyNames(k, sch, knowns)
+ if rep != nil && (rep.HasErrors() || rep.HasWarnings()) {
+ res.Merge(rep)
+ }
+ if len(dups) > 0 {
+ var pns []string
+ for _, v := range dups {
+ pns = append(pns, v.Definition+"."+v.Name)
+ }
+ res.AddErrors(duplicatePropertiesMsg(k, pns))
+ }
+
+ }
+ return res
+}
+
+func (s *SpecValidator) resolveRef(ref *spec.Ref) (*spec.Schema, error) {
+ if s.spec.SpecFilePath() != "" {
+ return spec.ResolveRefWithBase(s.spec.Spec(), ref, &spec.ExpandOptions{RelativeBase: s.spec.SpecFilePath()})
+ }
+ // NOTE: it looks like with the new spec resolver, this code is now unrecheable
+ return spec.ResolveRef(s.spec.Spec(), ref)
+}
+
+func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema, knowns map[string]struct{}) ([]dupProp, *Result) {
+ var dups []dupProp
+
+ schn := nm
+ schc := &sch
+ res := new(Result)
+
+ for schc.Ref.String() != "" {
+ // gather property names
+ reso, err := s.resolveRef(&schc.Ref)
+ if err != nil {
+ errorHelp.addPointerError(res, err, schc.Ref.String(), nm)
+ return dups, res
+ }
+ schc = reso
+ schn = sch.Ref.String()
+ }
+
+ if len(schc.AllOf) > 0 {
+ for _, chld := range schc.AllOf {
+ dup, rep := s.validateSchemaPropertyNames(schn, chld, knowns)
+ if rep != nil && (rep.HasErrors() || rep.HasWarnings()) {
+ res.Merge(rep)
+ }
+ dups = append(dups, dup...)
+ }
+ return dups, res
+ }
+
+ for k := range schc.Properties {
+ _, ok := knowns[k]
+ if ok {
+ dups = append(dups, dupProp{Name: k, Definition: schn})
+ } else {
+ knowns[k] = struct{}{}
+ }
+ }
+
+ return dups, res
+}
+
+func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, knowns map[string]struct{}) ([]string, *Result) {
+ res := new(Result)
+
+ if sch.Ref.String() == "" && len(sch.AllOf) == 0 { // Safeguard. We should not be able to actually get there
+ return nil, res
+ }
+ var ancs []string
+
+ schn := nm
+ schc := &sch
+
+ for schc.Ref.String() != "" {
+ reso, err := s.resolveRef(&schc.Ref)
+ if err != nil {
+ errorHelp.addPointerError(res, err, schc.Ref.String(), nm)
+ return ancs, res
+ }
+ schc = reso
+ schn = sch.Ref.String()
+ }
+
+ if schn != nm && schn != "" {
+ if _, ok := knowns[schn]; ok {
+ ancs = append(ancs, schn)
+ }
+ knowns[schn] = struct{}{}
+
+ if len(ancs) > 0 {
+ return ancs, res
+ }
+ }
+
+ if len(schc.AllOf) > 0 {
+ for _, chld := range schc.AllOf {
+ if chld.Ref.String() != "" || len(chld.AllOf) > 0 {
+ anc, rec := s.validateCircularAncestry(schn, chld, knowns)
+ if rec != nil && (rec.HasErrors() || !rec.HasWarnings()) {
+ res.Merge(rec)
+ }
+ ancs = append(ancs, anc...)
+ if len(ancs) > 0 {
+ return ancs, res
+ }
+ }
+ }
+ }
+ return ancs, res
+}
+
+func (s *SpecValidator) validateItems() *Result {
+ // validate parameter, items, schema and response objects for presence of item if type is array
+ res := new(Result)
+
+ for method, pi := range s.analyzer.Operations() {
+ for path, op := range pi {
+ for _, param := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
+
+ if param.TypeName() == arrayType && param.ItemsTypeName() == "" {
+ res.AddErrors(arrayInParamRequiresItemsMsg(param.Name, op.ID))
+ continue
+ }
+ if param.In != swaggerBody {
+ if param.Items != nil {
+ items := param.Items
+ for items.TypeName() == arrayType {
+ if items.ItemsTypeName() == "" {
+ res.AddErrors(arrayInParamRequiresItemsMsg(param.Name, op.ID))
+ break
+ }
+ items = items.Items
+ }
+ }
+ } else {
+ // In: body
+ if param.Schema != nil {
+ res.Merge(s.validateSchemaItems(*param.Schema, fmt.Sprintf("body param %q", param.Name), op.ID))
+ }
+ }
+ }
+
+ var responses []spec.Response
+ if op.Responses != nil {
+ if op.Responses.Default != nil {
+ responses = append(responses, *op.Responses.Default)
+ }
+ if op.Responses.StatusCodeResponses != nil {
+ for _, v := range op.Responses.StatusCodeResponses {
+ responses = append(responses, v)
+ }
+ }
+ }
+
+ for _, resp := range responses {
+ // Response headers with array
+ for hn, hv := range resp.Headers {
+ if hv.TypeName() == arrayType && hv.ItemsTypeName() == "" {
+ res.AddErrors(arrayInHeaderRequiresItemsMsg(hn, op.ID))
+ }
+ }
+ if resp.Schema != nil {
+ res.Merge(s.validateSchemaItems(*resp.Schema, "response body", op.ID))
+ }
+ }
+ }
+ }
+ return res
+}
+
+// Verifies constraints on array type
+func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID string) *Result {
+ res := new(Result)
+ if !schema.Type.Contains(arrayType) {
+ return res
+ }
+
+ if schema.Items == nil || schema.Items.Len() == 0 {
+ res.AddErrors(arrayRequiresItemsMsg(prefix, opID))
+ return res
+ }
+
+ if schema.Items.Schema != nil {
+ schema = *schema.Items.Schema
+ if _, err := compileRegexp(schema.Pattern); err != nil {
+ res.AddErrors(invalidItemsPatternMsg(prefix, opID, schema.Pattern))
+ }
+
+ res.Merge(s.validateSchemaItems(schema, prefix, opID))
+ }
+ return res
+}
+
+func (s *SpecValidator) validatePathParamPresence(path string, fromPath, fromOperation []string) *Result {
+ // Each defined operation path parameters must correspond to a named element in the API's path pattern.
+ // (For example, you cannot have a path parameter named id for the following path /pets/{petId} but you must have a path parameter named petId.)
+ res := new(Result)
+ for _, l := range fromPath {
+ var matched bool
+ for _, r := range fromOperation {
+ if l == "{"+r+"}" {
+ matched = true
+ break
+ }
+ }
+ if !matched {
+ res.AddErrors(noParameterInPathMsg(l))
+ }
+ }
+
+ for _, p := range fromOperation {
+ var matched bool
+ for _, r := range fromPath {
+ if "{"+p+"}" == r {
+ matched = true
+ break
+ }
+ }
+ if !matched {
+ res.AddErrors(pathParamNotInPathMsg(path, p))
+ }
+ }
+
+ return res
+}
+
+func (s *SpecValidator) validateReferenced() *Result {
+ var res Result
+ res.MergeAsWarnings(s.validateReferencedParameters())
+ res.MergeAsWarnings(s.validateReferencedResponses())
+ res.MergeAsWarnings(s.validateReferencedDefinitions())
+ return &res
+}
+
+// nolint: dupl
+func (s *SpecValidator) validateReferencedParameters() *Result {
+ // Each referenceable definition should have references.
+ params := s.spec.Spec().Parameters
+ if len(params) == 0 {
+ return nil
+ }
+
+ expected := make(map[string]struct{})
+ for k := range params {
+ expected["#/parameters/"+jsonpointer.Escape(k)] = struct{}{}
+ }
+ for _, k := range s.analyzer.AllParameterReferences() {
+ delete(expected, k)
+ }
+
+ if len(expected) == 0 {
+ return nil
+ }
+ result := new(Result)
+ for k := range expected {
+ result.AddWarnings(unusedParamMsg(k))
+ }
+ return result
+}
+
+// nolint: dupl
+func (s *SpecValidator) validateReferencedResponses() *Result {
+ // Each referenceable definition should have references.
+ responses := s.spec.Spec().Responses
+ if len(responses) == 0 {
+ return nil
+ }
+
+ expected := make(map[string]struct{})
+ for k := range responses {
+ expected["#/responses/"+jsonpointer.Escape(k)] = struct{}{}
+ }
+ for _, k := range s.analyzer.AllResponseReferences() {
+ delete(expected, k)
+ }
+
+ if len(expected) == 0 {
+ return nil
+ }
+ result := new(Result)
+ for k := range expected {
+ result.AddWarnings(unusedResponseMsg(k))
+ }
+ return result
+}
+
+// nolint: dupl
+func (s *SpecValidator) validateReferencedDefinitions() *Result {
+ // Each referenceable definition must have references.
+ defs := s.spec.Spec().Definitions
+ if len(defs) == 0 {
+ return nil
+ }
+
+ expected := make(map[string]struct{})
+ for k := range defs {
+ expected["#/definitions/"+jsonpointer.Escape(k)] = struct{}{}
+ }
+ for _, k := range s.analyzer.AllDefinitionReferences() {
+ delete(expected, k)
+ }
+
+ if len(expected) == 0 {
+ return nil
+ }
+
+ result := new(Result)
+ for k := range expected {
+ result.AddWarnings(unusedDefinitionMsg(k))
+ }
+ return result
+}
+
+func (s *SpecValidator) validateRequiredDefinitions() *Result {
+ // Each property listed in the required array must be defined in the properties of the model
+ res := new(Result)
+
+DEFINITIONS:
+ for d, schema := range s.spec.Spec().Definitions {
+ if schema.Required != nil { // Safeguard
+ for _, pn := range schema.Required {
+ red := s.validateRequiredProperties(pn, d, &schema) //#nosec
+ res.Merge(red)
+ if !red.IsValid() && !s.Options.ContinueOnErrors {
+ break DEFINITIONS // there is an error, let's stop that bleeding
+ }
+ }
+ }
+ }
+ return res
+}
+
+func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Schema) *Result {
+ // Takes care of recursive property definitions, which may be nested in additionalProperties schemas
+ res := new(Result)
+ propertyMatch := false
+ patternMatch := false
+ additionalPropertiesMatch := false
+ isReadOnly := false
+
+ // Regular properties
+ if _, ok := v.Properties[path]; ok {
+ propertyMatch = true
+ isReadOnly = v.Properties[path].ReadOnly
+ }
+
+ // NOTE: patternProperties are not supported in swagger. Even though, we continue validation here
+ // We check all defined patterns: if one regexp is invalid, croaks an error
+ for pp, pv := range v.PatternProperties {
+ re, err := compileRegexp(pp)
+ if err != nil {
+ res.AddErrors(invalidPatternMsg(pp, in))
+ } else if re.MatchString(path) {
+ patternMatch = true
+ if !propertyMatch {
+ isReadOnly = pv.ReadOnly
+ }
+ }
+ }
+
+ if !(propertyMatch || patternMatch) {
+ if v.AdditionalProperties != nil {
+ if v.AdditionalProperties.Allows && v.AdditionalProperties.Schema == nil {
+ additionalPropertiesMatch = true
+ } else if v.AdditionalProperties.Schema != nil {
+ // additionalProperties as schema are upported in swagger
+ // recursively validates additionalProperties schema
+ // TODO : anyOf, allOf, oneOf like in schemaPropsValidator
+ red := s.validateRequiredProperties(path, in, v.AdditionalProperties.Schema)
+ if red.IsValid() {
+ additionalPropertiesMatch = true
+ if !propertyMatch && !patternMatch {
+ isReadOnly = v.AdditionalProperties.Schema.ReadOnly
+ }
+ }
+ res.Merge(red)
+ }
+ }
+ }
+
+ if !(propertyMatch || patternMatch || additionalPropertiesMatch) {
+ res.AddErrors(requiredButNotDefinedMsg(path, in))
+ }
+
+ if isReadOnly {
+ res.AddWarnings(readOnlyAndRequiredMsg(in, path))
+ }
+ return res
+}
+
+func (s *SpecValidator) validateParameters() *Result {
+ // - for each method, path is unique, regardless of path parameters
+ // e.g. GET:/petstore/{id}, GET:/petstore/{pet}, GET:/petstore are
+ // considered duplicate paths
+ // - each parameter should have a unique `name` and `type` combination
+ // - each operation should have only 1 parameter of type body
+ // - there must be at most 1 parameter in body
+ // - parameters with pattern property must specify valid patterns
+ // - $ref in parameters must resolve
+ // - path param must be required
+ res := new(Result)
+ rexGarbledPathSegment := mustCompileRegexp(`.*[{}\s]+.*`)
+ for method, pi := range s.expandedAnalyzer().Operations() {
+ methodPaths := make(map[string]map[string]string)
+ for path, op := range pi {
+ pathToAdd := pathHelp.stripParametersInPath(path)
+
+ // Warn on garbled path afer param stripping
+ if rexGarbledPathSegment.MatchString(pathToAdd) {
+ res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd))
+ }
+
+ // Check uniqueness of stripped paths
+ if _, found := methodPaths[method][pathToAdd]; found {
+
+ // Sort names for stable, testable output
+ if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 {
+ res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd]))
+ } else {
+ res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path))
+ }
+ } else {
+ if _, found := methodPaths[method]; !found {
+ methodPaths[method] = map[string]string{}
+ }
+ methodPaths[method][pathToAdd] = path // Original non stripped path
+
+ }
+
+ var bodyParams []string
+ var paramNames []string
+ var hasForm, hasBody bool
+
+ // Check parameters names uniqueness for operation
+ // TODO: should be done after param expansion
+ res.Merge(s.checkUniqueParams(path, method, op))
+
+ for _, pr := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
+ // Validate pattern regexp for parameters with a Pattern property
+ if _, err := compileRegexp(pr.Pattern); err != nil {
+ res.AddErrors(invalidPatternInParamMsg(op.ID, pr.Name, pr.Pattern))
+ }
+
+ // There must be at most one parameter in body: list them all
+ if pr.In == swaggerBody {
+ bodyParams = append(bodyParams, fmt.Sprintf("%q", pr.Name))
+ hasBody = true
+ }
+
+ if pr.In == "path" {
+ paramNames = append(paramNames, pr.Name)
+ // Path declared in path must have the required: true property
+ if !pr.Required {
+ res.AddErrors(pathParamRequiredMsg(op.ID, pr.Name))
+ }
+ }
+
+ if pr.In == "formData" {
+ hasForm = true
+ }
+
+ if !(pr.Type == numberType || pr.Type == integerType) &&
+ (pr.Maximum != nil || pr.Minimum != nil || pr.MultipleOf != nil) {
+ // A non-numeric parameter has validation keywords for numeric instances (number and integer)
+ res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
+ }
+
+ if !(pr.Type == stringType) &&
+ // A non-string parameter has validation keywords for strings
+ (pr.MaxLength != nil || pr.MinLength != nil || pr.Pattern != "") {
+ res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
+ }
+
+ if !(pr.Type == arrayType) &&
+ // A non-array parameter has validation keywords for arrays
+ (pr.MaxItems != nil || pr.MinItems != nil || pr.UniqueItems) {
+ res.AddWarnings(parameterValidationTypeMismatchMsg(pr.Name, path, pr.Type))
+ }
+ }
+
+ // In:formData and In:body are mutually exclusive
+ if hasBody && hasForm {
+ res.AddErrors(bothFormDataAndBodyMsg(op.ID))
+ }
+ // There must be at most one body param
+ // Accurately report situations when more than 1 body param is declared (possibly unnamed)
+ if len(bodyParams) > 1 {
+ sort.Strings(bodyParams)
+ res.AddErrors(multipleBodyParamMsg(op.ID, bodyParams))
+ }
+
+ // Check uniqueness of parameters in path
+ paramsInPath := pathHelp.extractPathParams(path)
+ for i, p := range paramsInPath {
+ for j, q := range paramsInPath {
+ if p == q && i > j {
+ res.AddErrors(pathParamNotUniqueMsg(path, p, q))
+ break
+ }
+ }
+ }
+
+ // Warns about possible malformed params in path
+ rexGarbledParam := mustCompileRegexp(`{.*[{}\s]+.*}`)
+ for _, p := range paramsInPath {
+ if rexGarbledParam.MatchString(p) {
+ res.AddWarnings(pathParamGarbledMsg(path, p))
+ }
+ }
+
+ // Match params from path vs params from params section
+ res.Merge(s.validatePathParamPresence(path, paramsInPath, paramNames))
+ }
+ }
+ return res
+}
+
+func (s *SpecValidator) validateReferencesValid() *Result {
+ // each reference must point to a valid object
+ res := new(Result)
+ for _, r := range s.analyzer.AllRefs() {
+ if !r.IsValidURI(s.spec.SpecFilePath()) { // Safeguard - spec should always yield a valid URI
+ res.AddErrors(invalidRefMsg(r.String()))
+ }
+ }
+ if !res.HasErrors() {
+ // NOTE: with default settings, loads.Document.Expanded()
+ // stops on first error. Anyhow, the expand option to continue
+ // on errors fails to report errors at all.
+ exp, err := s.spec.Expanded()
+ if err != nil {
+ res.AddErrors(unresolvedReferencesMsg(err))
+ }
+ s.expanded = exp
+ }
+ return res
+}
+
+func (s *SpecValidator) checkUniqueParams(path, method string, op *spec.Operation) *Result {
+ // Check for duplicate parameters declaration in param section.
+ // Each parameter should have a unique `name` and `type` combination
+ // NOTE: this could be factorized in analysis (when constructing the params map)
+ // However, there are some issues with such a factorization:
+ // - analysis does not seem to fully expand params
+ // - param keys may be altered by x-go-name
+ res := new(Result)
+ pnames := make(map[string]struct{})
+
+ if op.Parameters != nil { // Safeguard
+ for _, ppr := range op.Parameters {
+ var ok bool
+ pr, red := paramHelp.resolveParam(path, method, op.ID, &ppr, s) //#nosec
+ res.Merge(red)
+
+ if pr != nil && pr.Name != "" { // params with empty name does no participate the check
+ key := fmt.Sprintf("%s#%s", pr.In, pr.Name)
+
+ if _, ok = pnames[key]; ok {
+ res.AddErrors(duplicateParamNameMsg(pr.In, pr.Name, op.ID))
+ }
+ pnames[key] = struct{}{}
+ }
+ }
+ }
+ return res
+}
+
+// SetContinueOnErrors sets the ContinueOnErrors option for this validator.
+func (s *SpecValidator) SetContinueOnErrors(c bool) {
+ s.Options.ContinueOnErrors = c
+}
+
+// expandedAnalyzer returns expanded.Analyzer when it is available.
+// otherwise just analyzer.
+func (s *SpecValidator) expandedAnalyzer() *analysis.Spec {
+ if s.expanded != nil && s.expanded.Analyzer != nil {
+ return s.expanded.Analyzer
+ }
+ return s.analyzer
+}
diff --git a/vendor/github.com/go-openapi/validate/spec_messages.go b/vendor/github.com/go-openapi/validate/spec_messages.go
new file mode 100644
index 000000000..b3757addd
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/spec_messages.go
@@ -0,0 +1,360 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/errors"
+)
+
+// Error messages related to spec validation and returned as results.
+const (
+ // ArrayRequiresItemsError ...
+ ArrayRequiresItemsError = "%s for %q is a collection without an element type (array requires items definition)"
+
+ // ArrayInParamRequiresItemsError ...
+ ArrayInParamRequiresItemsError = "param %q for %q is a collection without an element type (array requires item definition)"
+
+ // ArrayInHeaderRequiresItemsError ...
+ ArrayInHeaderRequiresItemsError = "header %q for %q is a collection without an element type (array requires items definition)"
+
+ // BothFormDataAndBodyError indicates that an operation specifies both a body and a formData parameter, which is forbidden
+ BothFormDataAndBodyError = "operation %q has both formData and body parameters. Only one such In: type may be used for a given operation"
+
+ // CannotResolveRefError when a $ref could not be resolved
+ CannotResolveReferenceError = "could not resolve reference in %s to $ref %s: %v"
+
+ // CircularAncestryDefinitionError ...
+ CircularAncestryDefinitionError = "definition %q has circular ancestry: %v"
+
+ // DefaultValueDoesNotValidateError results from an invalid default value provided
+ DefaultValueDoesNotValidateError = "default value for %s in %s does not validate its schema"
+
+ // DefaultValueItemsDoesNotValidateError results from an invalid default value provided for Items
+ DefaultValueItemsDoesNotValidateError = "default value for %s.items in %s does not validate its schema"
+
+ // DefaultValueHeaderDoesNotValidateError results from an invalid default value provided in header
+ DefaultValueHeaderDoesNotValidateError = "in operation %q, default value in header %s for %s does not validate its schema"
+
+ // DefaultValueHeaderItemsDoesNotValidateError results from an invalid default value provided in header.items
+ DefaultValueHeaderItemsDoesNotValidateError = "in operation %q, default value in header.items %s for %s does not validate its schema"
+
+ // DefaultValueInDoesNotValidateError ...
+ DefaultValueInDoesNotValidateError = "in operation %q, default value in %s does not validate its schema"
+
+ // DuplicateParamNameError ...
+ DuplicateParamNameError = "duplicate parameter name %q for %q in operation %q"
+
+ // DuplicatePropertiesError ...
+ DuplicatePropertiesError = "definition %q contains duplicate properties: %v"
+
+ // ExampleValueDoesNotValidateError results from an invalid example value provided
+ ExampleValueDoesNotValidateError = "example value for %s in %s does not validate its schema"
+
+ // ExampleValueItemsDoesNotValidateError results from an invalid example value provided for Items
+ ExampleValueItemsDoesNotValidateError = "example value for %s.items in %s does not validate its schema"
+
+ // ExampleValueHeaderDoesNotValidateError results from an invalid example value provided in header
+ ExampleValueHeaderDoesNotValidateError = "in operation %q, example value in header %s for %s does not validate its schema"
+
+ // ExampleValueHeaderItemsDoesNotValidateError results from an invalid example value provided in header.items
+ ExampleValueHeaderItemsDoesNotValidateError = "in operation %q, example value in header.items %s for %s does not validate its schema"
+
+ // ExampleValueInDoesNotValidateError ...
+ ExampleValueInDoesNotValidateError = "in operation %q, example value in %s does not validate its schema"
+
+ // EmptyPathParameterError means that a path parameter was found empty (e.g. "{}")
+ EmptyPathParameterError = "%q contains an empty path parameter"
+
+ // InvalidDocumentError states that spec validation only processes spec.Document objects
+ InvalidDocumentError = "spec validator can only validate spec.Document objects"
+
+ // InvalidItemsPatternError indicates an Items definition with invalid pattern
+ InvalidItemsPatternError = "%s for %q has invalid items pattern: %q"
+
+ // InvalidParameterDefinitionError indicates an error detected on a parameter definition
+ InvalidParameterDefinitionError = "invalid definition for parameter %s in %s in operation %q"
+
+ // InvalidParameterDefinitionAsSchemaError indicates an error detected on a parameter definition, which was mistaken with a schema definition.
+ // Most likely, this situation is encountered whenever a $ref has been added as a sibling of the parameter definition.
+ InvalidParameterDefinitionAsSchemaError = "invalid definition as Schema for parameter %s in %s in operation %q"
+
+ // InvalidPatternError ...
+ InvalidPatternError = "pattern %q is invalid in %s"
+
+ // InvalidPatternInError indicates an invalid pattern in a schema or items definition
+ InvalidPatternInError = "%s in %s has invalid pattern: %q"
+
+ // InvalidPatternInHeaderError indicates a header definition with an invalid pattern
+ InvalidPatternInHeaderError = "in operation %q, header %s for %s has invalid pattern %q: %v"
+
+ // InvalidPatternInParamError ...
+ InvalidPatternInParamError = "operation %q has invalid pattern in param %q: %q"
+
+ // InvalidReferenceError indicates that a $ref property could not be resolved
+ InvalidReferenceError = "invalid ref %q"
+
+ // InvalidResponseDefinitionAsSchemaError indicates an error detected on a response definition, which was mistaken with a schema definition.
+ // Most likely, this situation is encountered whenever a $ref has been added as a sibling of the response definition.
+ InvalidResponseDefinitionAsSchemaError = "invalid definition as Schema for response %s in %s"
+
+ // MultipleBodyParamError indicates that an operation specifies multiple parameter with in: body
+ MultipleBodyParamError = "operation %q has more than 1 body param: %v"
+
+ // NonUniqueOperationIDError indicates that the same operationId has been specified several times
+ NonUniqueOperationIDError = "%q is defined %d times"
+
+ // NoParameterInPathError indicates that a path was found without any parameter
+ NoParameterInPathError = "path param %q has no parameter definition"
+
+ // NoValidPathErrorOrWarning indicates that no single path could be validated. If Paths is empty, this message is only a warning.
+ NoValidPathErrorOrWarning = "spec has no valid path defined"
+
+ // NoValidResponseError indicates that no valid response description could be found for an operation
+ NoValidResponseError = "operation %q has no valid response"
+
+ // PathOverlapError ...
+ PathOverlapError = "path %s overlaps with %s"
+
+ // PathParamNotInPathError indicates that a parameter specified with in: path was not found in the path specification
+ PathParamNotInPathError = "path param %q is not present in path %q"
+
+ // PathParamNotUniqueError ...
+ PathParamNotUniqueError = "params in path %q must be unique: %q conflicts with %q"
+
+ // PathParamNotRequiredError ...
+ PathParamRequiredError = "in operation %q,path param %q must be declared as required"
+
+ // RefNotAllowedInHeaderError indicates a $ref was found in a header definition, which is not allowed by Swagger
+ RefNotAllowedInHeaderError = "IMPORTANT!in %q: $ref are not allowed in headers. In context for header %q%s"
+
+ // RequiredButNotDefinedError ...
+ RequiredButNotDefinedError = "%q is present in required but not defined as property in definition %q"
+
+ // SomeParametersBrokenError indicates that some parameters could not be resolved, which might result in partial checks to be carried on
+ SomeParametersBrokenError = "some parameters definitions are broken in %q.%s. Cannot carry on full checks on parameters for operation %s"
+
+ // UnresolvedReferencesError indicates that at least one $ref could not be resolved
+ UnresolvedReferencesError = "some references could not be resolved in spec. First found: %v"
+)
+
+// Warning messages related to spec validation and returned as results
+const (
+ // ExamplesWithoutSchemaWarning indicates that examples are provided for a response,but not schema to validate the example against
+ ExamplesWithoutSchemaWarning = "Examples provided without schema in operation %q, %s"
+
+ // ExamplesMimeNotSupportedWarning indicates that examples are provided with a mime type different than application/json, which
+ // the validator dos not support yetl
+ ExamplesMimeNotSupportedWarning = "No validation attempt for examples for media types other than application/json, in operation %q, %s"
+
+ // PathParamGarbledWarning ...
+ PathParamGarbledWarning = "in path %q, param %q contains {,} or white space. Albeit not stricly illegal, this is probably no what you want"
+
+ // ParamValidationTypeMismatch indicates that parameter has validation which does not match its type
+ ParamValidationTypeMismatch = "validation keywords of parameter %q in path %q don't match its type %s"
+
+ // PathStrippedParamGarbledWarning ...
+ PathStrippedParamGarbledWarning = "path stripped from path parameters %s contains {,} or white space. This is probably no what you want."
+
+ // ReadOnlyAndRequiredWarning ...
+ ReadOnlyAndRequiredWarning = "Required property %s in %q should not be marked as both required and readOnly"
+
+ // RefShouldNotHaveSiblingsWarning indicates that a $ref was found with a sibling definition. This results in the $ref taking over its siblings,
+ // which is most likely not wanted.
+ RefShouldNotHaveSiblingsWarning = "$ref property should have no sibling in %q.%s"
+
+ // RequiredHasDefaultWarning indicates that a required parameter property should not have a default
+ RequiredHasDefaultWarning = "%s in %s has a default value and is required as parameter"
+
+ // UnusedDefinitionWarning ...
+ UnusedDefinitionWarning = "definition %q is not used anywhere"
+
+ // UnusedParamWarning ...
+ UnusedParamWarning = "parameter %q is not used anywhere"
+
+ // UnusedResponseWarning ...
+ UnusedResponseWarning = "response %q is not used anywhere"
+)
+
+// Additional error codes
+const (
+ // InternalErrorCode reports an internal technical error
+ InternalErrorCode = http.StatusInternalServerError
+ // NotFoundErrorCode indicates that a resource (e.g. a $ref) could not be found
+ NotFoundErrorCode = http.StatusNotFound
+)
+
+func invalidDocumentMsg() errors.Error {
+ return errors.New(InternalErrorCode, InvalidDocumentError)
+}
+func invalidRefMsg(path string) errors.Error {
+ return errors.New(NotFoundErrorCode, InvalidReferenceError, path)
+}
+func unresolvedReferencesMsg(err error) errors.Error {
+ return errors.New(errors.CompositeErrorCode, UnresolvedReferencesError, err)
+}
+func noValidPathMsg() errors.Error {
+ return errors.New(errors.CompositeErrorCode, NoValidPathErrorOrWarning)
+}
+func emptyPathParameterMsg(path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, EmptyPathParameterError, path)
+}
+func nonUniqueOperationIDMsg(path string, i int) errors.Error {
+ return errors.New(errors.CompositeErrorCode, NonUniqueOperationIDError, path, i)
+}
+func circularAncestryDefinitionMsg(path string, args interface{}) errors.Error {
+ return errors.New(errors.CompositeErrorCode, CircularAncestryDefinitionError, path, args)
+}
+func duplicatePropertiesMsg(path string, args interface{}) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DuplicatePropertiesError, path, args)
+}
+func pathParamNotInPathMsg(path, param string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, PathParamNotInPathError, param, path)
+}
+func arrayRequiresItemsMsg(path, operation string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ArrayRequiresItemsError, path, operation)
+}
+func arrayInParamRequiresItemsMsg(path, operation string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ArrayInParamRequiresItemsError, path, operation)
+}
+func arrayInHeaderRequiresItemsMsg(path, operation string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ArrayInHeaderRequiresItemsError, path, operation)
+}
+func invalidItemsPatternMsg(path, operation, pattern string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidItemsPatternError, path, operation, pattern)
+}
+func invalidPatternMsg(pattern, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidPatternError, pattern, path)
+}
+func requiredButNotDefinedMsg(path, definition string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, RequiredButNotDefinedError, path, definition)
+}
+func pathParamGarbledMsg(path, param string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, PathParamGarbledWarning, path, param)
+}
+func pathStrippedParamGarbledMsg(path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, PathStrippedParamGarbledWarning, path)
+}
+func pathOverlapMsg(path, arg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, PathOverlapError, path, arg)
+}
+func invalidPatternInParamMsg(operation, param, pattern string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidPatternInParamError, operation, param, pattern)
+}
+func pathParamRequiredMsg(operation, param string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, PathParamRequiredError, operation, param)
+}
+func bothFormDataAndBodyMsg(operation string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, BothFormDataAndBodyError, operation)
+}
+func multipleBodyParamMsg(operation string, args interface{}) errors.Error {
+ return errors.New(errors.CompositeErrorCode, MultipleBodyParamError, operation, args)
+}
+func pathParamNotUniqueMsg(path, param, arg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, PathParamNotUniqueError, path, param, arg)
+}
+func duplicateParamNameMsg(path, param, operation string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DuplicateParamNameError, param, path, operation)
+}
+func unusedParamMsg(arg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, UnusedParamWarning, arg)
+}
+func unusedDefinitionMsg(arg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, UnusedDefinitionWarning, arg)
+}
+func unusedResponseMsg(arg string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, UnusedResponseWarning, arg)
+}
+func readOnlyAndRequiredMsg(path, param string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ReadOnlyAndRequiredWarning, param, path)
+}
+func noParameterInPathMsg(param string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, NoParameterInPathError, param)
+}
+func requiredHasDefaultMsg(param, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, RequiredHasDefaultWarning, param, path)
+}
+func defaultValueDoesNotValidateMsg(param, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DefaultValueDoesNotValidateError, param, path)
+}
+func defaultValueItemsDoesNotValidateMsg(param, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DefaultValueItemsDoesNotValidateError, param, path)
+}
+func noValidResponseMsg(operation string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, NoValidResponseError, operation)
+}
+func defaultValueHeaderDoesNotValidateMsg(operation, header, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DefaultValueHeaderDoesNotValidateError, operation, header, path)
+}
+func defaultValueHeaderItemsDoesNotValidateMsg(operation, header, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DefaultValueHeaderItemsDoesNotValidateError, operation, header, path)
+}
+func invalidPatternInHeaderMsg(operation, header, path, pattern string, args interface{}) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidPatternInHeaderError, operation, header, path, pattern, args)
+}
+func invalidPatternInMsg(path, in, pattern string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidPatternInError, path, in, pattern)
+}
+func defaultValueInDoesNotValidateMsg(operation, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, DefaultValueInDoesNotValidateError, operation, path)
+}
+func exampleValueDoesNotValidateMsg(param, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExampleValueDoesNotValidateError, param, path)
+}
+func exampleValueItemsDoesNotValidateMsg(param, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExampleValueItemsDoesNotValidateError, param, path)
+}
+func exampleValueHeaderDoesNotValidateMsg(operation, header, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExampleValueHeaderDoesNotValidateError, operation, header, path)
+}
+func exampleValueHeaderItemsDoesNotValidateMsg(operation, header, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExampleValueHeaderItemsDoesNotValidateError, operation, header, path)
+}
+func exampleValueInDoesNotValidateMsg(operation, path string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExampleValueInDoesNotValidateError, operation, path)
+}
+func examplesWithoutSchemaMsg(operation, response string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExamplesWithoutSchemaWarning, operation, response)
+}
+func examplesMimeNotSupportedMsg(operation, response string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ExamplesMimeNotSupportedWarning, operation, response)
+}
+func refNotAllowedInHeaderMsg(path, header, ref string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, RefNotAllowedInHeaderError, path, header, ref)
+}
+func cannotResolveRefMsg(path, ref string, err error) errors.Error {
+ return errors.New(errors.CompositeErrorCode, CannotResolveReferenceError, path, ref, err)
+}
+func invalidParameterDefinitionMsg(path, method, operationID string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidParameterDefinitionError, path, method, operationID)
+}
+func invalidParameterDefinitionAsSchemaMsg(path, method, operationID string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, InvalidParameterDefinitionAsSchemaError, path, method, operationID)
+}
+func parameterValidationTypeMismatchMsg(param, path, typ string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, ParamValidationTypeMismatch, param, path, typ)
+}
+
+// disabled
+// func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error {
+// return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method)
+// }
+func someParametersBrokenMsg(path, method, operationID string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, SomeParametersBrokenError, path, method, operationID)
+}
+func refShouldNotHaveSiblingsMsg(path, operationID string) errors.Error {
+ return errors.New(errors.CompositeErrorCode, RefShouldNotHaveSiblingsWarning, operationID, path)
+}
diff --git a/vendor/github.com/go-openapi/validate/type.go b/vendor/github.com/go-openapi/validate/type.go
new file mode 100644
index 000000000..876467588
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/type.go
@@ -0,0 +1,177 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "reflect"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+)
+
+type typeValidator struct {
+ Type spec.StringOrArray
+ Nullable bool
+ Format string
+ In string
+ Path string
+}
+
+func (t *typeValidator) schemaInfoForType(data interface{}) (string, string) {
+ // internal type to JSON type with swagger 2.0 format (with go-openapi/strfmt extensions),
+ // see https://github.com/go-openapi/strfmt/blob/master/README.md
+ // TODO: this switch really is some sort of reverse lookup for formats. It should be provided by strfmt.
+ switch data.(type) {
+ case []byte, strfmt.Base64, *strfmt.Base64:
+ return stringType, stringFormatByte
+ case strfmt.CreditCard, *strfmt.CreditCard:
+ return stringType, stringFormatCreditCard
+ case strfmt.Date, *strfmt.Date:
+ return stringType, stringFormatDate
+ case strfmt.DateTime, *strfmt.DateTime:
+ return stringType, stringFormatDateTime
+ case strfmt.Duration, *strfmt.Duration:
+ return stringType, stringFormatDuration
+ case swag.File, *swag.File:
+ return fileType, ""
+ case strfmt.Email, *strfmt.Email:
+ return stringType, stringFormatEmail
+ case strfmt.HexColor, *strfmt.HexColor:
+ return stringType, stringFormatHexColor
+ case strfmt.Hostname, *strfmt.Hostname:
+ return stringType, stringFormatHostname
+ case strfmt.IPv4, *strfmt.IPv4:
+ return stringType, stringFormatIPv4
+ case strfmt.IPv6, *strfmt.IPv6:
+ return stringType, stringFormatIPv6
+ case strfmt.ISBN, *strfmt.ISBN:
+ return stringType, stringFormatISBN
+ case strfmt.ISBN10, *strfmt.ISBN10:
+ return stringType, stringFormatISBN10
+ case strfmt.ISBN13, *strfmt.ISBN13:
+ return stringType, stringFormatISBN13
+ case strfmt.MAC, *strfmt.MAC:
+ return stringType, stringFormatMAC
+ case strfmt.ObjectId, *strfmt.ObjectId:
+ return stringType, stringFormatBSONObjectID
+ case strfmt.Password, *strfmt.Password:
+ return stringType, stringFormatPassword
+ case strfmt.RGBColor, *strfmt.RGBColor:
+ return stringType, stringFormatRGBColor
+ case strfmt.SSN, *strfmt.SSN:
+ return stringType, stringFormatSSN
+ case strfmt.URI, *strfmt.URI:
+ return stringType, stringFormatURI
+ case strfmt.UUID, *strfmt.UUID:
+ return stringType, stringFormatUUID
+ case strfmt.UUID3, *strfmt.UUID3:
+ return stringType, stringFormatUUID3
+ case strfmt.UUID4, *strfmt.UUID4:
+ return stringType, stringFormatUUID4
+ case strfmt.UUID5, *strfmt.UUID5:
+ return stringType, stringFormatUUID5
+ // TODO: missing binary (io.ReadCloser)
+ // TODO: missing json.Number
+ default:
+ val := reflect.ValueOf(data)
+ tpe := val.Type()
+ switch tpe.Kind() {
+ case reflect.Bool:
+ return booleanType, ""
+ case reflect.String:
+ return stringType, ""
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Uint8, reflect.Uint16, reflect.Uint32:
+ // NOTE: that is the spec. With go-openapi, is that not uint32 for unsigned integers?
+ return integerType, integerFormatInt32
+ case reflect.Int, reflect.Int64, reflect.Uint, reflect.Uint64:
+ return integerType, integerFormatInt64
+ case reflect.Float32:
+ // NOTE: is that not numberFormatFloat?
+ return numberType, numberFormatFloat32
+ case reflect.Float64:
+ // NOTE: is that not "double"?
+ return numberType, numberFormatFloat64
+ // NOTE: go arrays (reflect.Array) are not supported (fixed length)
+ case reflect.Slice:
+ return arrayType, ""
+ case reflect.Map, reflect.Struct:
+ return objectType, ""
+ case reflect.Interface:
+ // What to do here?
+ panic("dunno what to do here")
+ case reflect.Ptr:
+ return t.schemaInfoForType(reflect.Indirect(val).Interface())
+ }
+ }
+ return "", ""
+}
+
+func (t *typeValidator) SetPath(path string) {
+ t.Path = path
+}
+
+func (t *typeValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ // typeValidator applies to Schema, Parameter and Header objects
+ stpe := reflect.TypeOf(source)
+ r := (len(t.Type) > 0 || t.Format != "") && (stpe == specSchemaType || stpe == specParameterType || stpe == specHeaderType)
+ debugLog("type validator for %q applies %t for %T (kind: %v)\n", t.Path, r, source, kind)
+ return r
+}
+
+func (t *typeValidator) Validate(data interface{}) *Result {
+ result := new(Result)
+ result.Inc()
+ if data == nil {
+ // nil or zero value for the passed structure require Type: null
+ if len(t.Type) > 0 && !t.Type.Contains(nullType) && !t.Nullable { // TODO: if a property is not required it also passes this
+ return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), nullType))
+ }
+ return result
+ }
+
+ // check if the type matches, should be used in every validator chain as first item
+ val := reflect.Indirect(reflect.ValueOf(data))
+ kind := val.Kind()
+
+ // infer schema type (JSON) and format from passed data type
+ schType, format := t.schemaInfoForType(data)
+
+ debugLog("path: %s, schType: %s, format: %s, expType: %s, expFmt: %s, kind: %s", t.Path, schType, format, t.Type, t.Format, val.Kind().String())
+
+ // check numerical types
+ // TODO: check unsigned ints
+ // TODO: check json.Number (see schema.go)
+ isLowerInt := t.Format == integerFormatInt64 && format == integerFormatInt32
+ isLowerFloat := t.Format == numberFormatFloat64 && format == numberFormatFloat32
+ isFloatInt := schType == numberType && swag.IsFloat64AJSONInteger(val.Float()) && t.Type.Contains(integerType)
+ isIntFloat := schType == integerType && t.Type.Contains(numberType)
+
+ if kind != reflect.String && kind != reflect.Slice && t.Format != "" && !(t.Type.Contains(schType) || format == t.Format || isFloatInt || isIntFloat || isLowerInt || isLowerFloat) {
+ // TODO: test case
+ return errorHelp.sErr(errors.InvalidType(t.Path, t.In, t.Format, format))
+ }
+
+ if !(t.Type.Contains(numberType) || t.Type.Contains(integerType)) && t.Format != "" && (kind == reflect.String || kind == reflect.Slice) {
+ return result
+ }
+
+ if !(t.Type.Contains(schType) || isFloatInt || isIntFloat) {
+ return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), schType))
+ }
+ return result
+}
diff --git a/vendor/github.com/go-openapi/validate/update-fixtures.sh b/vendor/github.com/go-openapi/validate/update-fixtures.sh
new file mode 100644
index 000000000..21b06e2b0
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/update-fixtures.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -eu -o pipefail
+dir=$(git rev-parse --show-toplevel)
+scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
+
+function finish {
+ rm -rf "$scratch"
+}
+trap finish EXIT SIGHUP SIGINT SIGTERM
+
+cd "$scratch"
+git clone https://github.com/json-schema-org/JSON-Schema-Test-Suite Suite
+cp -r Suite/tests/draft4/* "$dir/fixtures/jsonschema_suite"
+cp -a Suite/remotes "$dir/fixtures/jsonschema_suite"
diff --git a/vendor/github.com/go-openapi/validate/validator.go b/vendor/github.com/go-openapi/validate/validator.go
new file mode 100644
index 000000000..38cdb9bb6
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/validator.go
@@ -0,0 +1,645 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+// An EntityValidator is an interface for things that can validate entities
+type EntityValidator interface {
+ Validate(interface{}) *Result
+}
+
+type valueValidator interface {
+ SetPath(path string)
+ Applies(interface{}, reflect.Kind) bool
+ Validate(interface{}) *Result
+}
+
+type itemsValidator struct {
+ items *spec.Items
+ root interface{}
+ path string
+ in string
+ validators []valueValidator
+ KnownFormats strfmt.Registry
+}
+
+func newItemsValidator(path, in string, items *spec.Items, root interface{}, formats strfmt.Registry) *itemsValidator {
+ iv := &itemsValidator{path: path, in: in, items: items, root: root, KnownFormats: formats}
+ iv.validators = []valueValidator{
+ &typeValidator{
+ Type: spec.StringOrArray([]string{items.Type}),
+ Nullable: items.Nullable,
+ Format: items.Format,
+ In: in,
+ Path: path,
+ },
+ iv.stringValidator(),
+ iv.formatValidator(),
+ iv.numberValidator(),
+ iv.sliceValidator(),
+ iv.commonValidator(),
+ }
+ return iv
+}
+
+func (i *itemsValidator) Validate(index int, data interface{}) *Result {
+ tpe := reflect.TypeOf(data)
+ kind := tpe.Kind()
+ mainResult := new(Result)
+ path := fmt.Sprintf("%s.%d", i.path, index)
+
+ for _, validator := range i.validators {
+ validator.SetPath(path)
+ if validator.Applies(i.root, kind) {
+ result := validator.Validate(data)
+ mainResult.Merge(result)
+ mainResult.Inc()
+ if result != nil && result.HasErrors() {
+ return mainResult
+ }
+ }
+ }
+ return mainResult
+}
+
+func (i *itemsValidator) commonValidator() valueValidator {
+ return &basicCommonValidator{
+ In: i.in,
+ Default: i.items.Default,
+ Enum: i.items.Enum,
+ }
+}
+
+func (i *itemsValidator) sliceValidator() valueValidator {
+ return &basicSliceValidator{
+ In: i.in,
+ Default: i.items.Default,
+ MaxItems: i.items.MaxItems,
+ MinItems: i.items.MinItems,
+ UniqueItems: i.items.UniqueItems,
+ Source: i.root,
+ Items: i.items.Items,
+ KnownFormats: i.KnownFormats,
+ }
+}
+
+func (i *itemsValidator) numberValidator() valueValidator {
+ return &numberValidator{
+ In: i.in,
+ Default: i.items.Default,
+ MultipleOf: i.items.MultipleOf,
+ Maximum: i.items.Maximum,
+ ExclusiveMaximum: i.items.ExclusiveMaximum,
+ Minimum: i.items.Minimum,
+ ExclusiveMinimum: i.items.ExclusiveMinimum,
+ Type: i.items.Type,
+ Format: i.items.Format,
+ }
+}
+
+func (i *itemsValidator) stringValidator() valueValidator {
+ return &stringValidator{
+ In: i.in,
+ Default: i.items.Default,
+ MaxLength: i.items.MaxLength,
+ MinLength: i.items.MinLength,
+ Pattern: i.items.Pattern,
+ AllowEmptyValue: false,
+ }
+}
+
+func (i *itemsValidator) formatValidator() valueValidator {
+ return &formatValidator{
+ In: i.in,
+ //Default: i.items.Default,
+ Format: i.items.Format,
+ KnownFormats: i.KnownFormats,
+ }
+}
+
+type basicCommonValidator struct {
+ Path string
+ In string
+ Default interface{}
+ Enum []interface{}
+}
+
+func (b *basicCommonValidator) SetPath(path string) {
+ b.Path = path
+}
+
+func (b *basicCommonValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ switch source.(type) {
+ case *spec.Parameter, *spec.Schema, *spec.Header:
+ return true
+ }
+ return false
+}
+
+func (b *basicCommonValidator) Validate(data interface{}) (res *Result) {
+ if len(b.Enum) > 0 {
+ for _, enumValue := range b.Enum {
+ actualType := reflect.TypeOf(enumValue)
+ if actualType != nil { // Safeguard
+ expectedValue := reflect.ValueOf(data)
+ if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
+ if reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) {
+ return nil
+ }
+ }
+ }
+ }
+ return errorHelp.sErr(errors.EnumFail(b.Path, b.In, data, b.Enum))
+ }
+ return nil
+}
+
+// A HeaderValidator has very limited subset of validations to apply
+type HeaderValidator struct {
+ name string
+ header *spec.Header
+ validators []valueValidator
+ KnownFormats strfmt.Registry
+}
+
+// NewHeaderValidator creates a new header validator object
+func NewHeaderValidator(name string, header *spec.Header, formats strfmt.Registry) *HeaderValidator {
+ p := &HeaderValidator{name: name, header: header, KnownFormats: formats}
+ p.validators = []valueValidator{
+ &typeValidator{
+ Type: spec.StringOrArray([]string{header.Type}),
+ Nullable: header.Nullable,
+ Format: header.Format,
+ In: "header",
+ Path: name,
+ },
+ p.stringValidator(),
+ p.formatValidator(),
+ p.numberValidator(),
+ p.sliceValidator(),
+ p.commonValidator(),
+ }
+ return p
+}
+
+// Validate the value of the header against its schema
+func (p *HeaderValidator) Validate(data interface{}) *Result {
+ result := new(Result)
+ tpe := reflect.TypeOf(data)
+ kind := tpe.Kind()
+
+ for _, validator := range p.validators {
+ if validator.Applies(p.header, kind) {
+ if err := validator.Validate(data); err != nil {
+ result.Merge(err)
+ if err.HasErrors() {
+ return result
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (p *HeaderValidator) commonValidator() valueValidator {
+ return &basicCommonValidator{
+ Path: p.name,
+ In: "response",
+ Default: p.header.Default,
+ Enum: p.header.Enum,
+ }
+}
+
+func (p *HeaderValidator) sliceValidator() valueValidator {
+ return &basicSliceValidator{
+ Path: p.name,
+ In: "response",
+ Default: p.header.Default,
+ MaxItems: p.header.MaxItems,
+ MinItems: p.header.MinItems,
+ UniqueItems: p.header.UniqueItems,
+ Items: p.header.Items,
+ Source: p.header,
+ KnownFormats: p.KnownFormats,
+ }
+}
+
+func (p *HeaderValidator) numberValidator() valueValidator {
+ return &numberValidator{
+ Path: p.name,
+ In: "response",
+ Default: p.header.Default,
+ MultipleOf: p.header.MultipleOf,
+ Maximum: p.header.Maximum,
+ ExclusiveMaximum: p.header.ExclusiveMaximum,
+ Minimum: p.header.Minimum,
+ ExclusiveMinimum: p.header.ExclusiveMinimum,
+ Type: p.header.Type,
+ Format: p.header.Format,
+ }
+}
+
+func (p *HeaderValidator) stringValidator() valueValidator {
+ return &stringValidator{
+ Path: p.name,
+ In: "response",
+ Default: p.header.Default,
+ Required: true,
+ MaxLength: p.header.MaxLength,
+ MinLength: p.header.MinLength,
+ Pattern: p.header.Pattern,
+ AllowEmptyValue: false,
+ }
+}
+
+func (p *HeaderValidator) formatValidator() valueValidator {
+ return &formatValidator{
+ Path: p.name,
+ In: "response",
+ //Default: p.header.Default,
+ Format: p.header.Format,
+ KnownFormats: p.KnownFormats,
+ }
+}
+
+// A ParamValidator has very limited subset of validations to apply
+type ParamValidator struct {
+ param *spec.Parameter
+ validators []valueValidator
+ KnownFormats strfmt.Registry
+}
+
+// NewParamValidator creates a new param validator object
+func NewParamValidator(param *spec.Parameter, formats strfmt.Registry) *ParamValidator {
+ p := &ParamValidator{param: param, KnownFormats: formats}
+ p.validators = []valueValidator{
+ &typeValidator{
+ Type: spec.StringOrArray([]string{param.Type}),
+ Nullable: param.Nullable,
+ Format: param.Format,
+ In: param.In,
+ Path: param.Name,
+ },
+ p.stringValidator(),
+ p.formatValidator(),
+ p.numberValidator(),
+ p.sliceValidator(),
+ p.commonValidator(),
+ }
+ return p
+}
+
+// Validate the data against the description of the parameter
+func (p *ParamValidator) Validate(data interface{}) *Result {
+ result := new(Result)
+ tpe := reflect.TypeOf(data)
+ kind := tpe.Kind()
+
+ // TODO: validate type
+ for _, validator := range p.validators {
+ if validator.Applies(p.param, kind) {
+ if err := validator.Validate(data); err != nil {
+ result.Merge(err)
+ if err.HasErrors() {
+ return result
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (p *ParamValidator) commonValidator() valueValidator {
+ return &basicCommonValidator{
+ Path: p.param.Name,
+ In: p.param.In,
+ Default: p.param.Default,
+ Enum: p.param.Enum,
+ }
+}
+
+func (p *ParamValidator) sliceValidator() valueValidator {
+ return &basicSliceValidator{
+ Path: p.param.Name,
+ In: p.param.In,
+ Default: p.param.Default,
+ MaxItems: p.param.MaxItems,
+ MinItems: p.param.MinItems,
+ UniqueItems: p.param.UniqueItems,
+ Items: p.param.Items,
+ Source: p.param,
+ KnownFormats: p.KnownFormats,
+ }
+}
+
+func (p *ParamValidator) numberValidator() valueValidator {
+ return &numberValidator{
+ Path: p.param.Name,
+ In: p.param.In,
+ Default: p.param.Default,
+ MultipleOf: p.param.MultipleOf,
+ Maximum: p.param.Maximum,
+ ExclusiveMaximum: p.param.ExclusiveMaximum,
+ Minimum: p.param.Minimum,
+ ExclusiveMinimum: p.param.ExclusiveMinimum,
+ Type: p.param.Type,
+ Format: p.param.Format,
+ }
+}
+
+func (p *ParamValidator) stringValidator() valueValidator {
+ return &stringValidator{
+ Path: p.param.Name,
+ In: p.param.In,
+ Default: p.param.Default,
+ AllowEmptyValue: p.param.AllowEmptyValue,
+ Required: p.param.Required,
+ MaxLength: p.param.MaxLength,
+ MinLength: p.param.MinLength,
+ Pattern: p.param.Pattern,
+ }
+}
+
+func (p *ParamValidator) formatValidator() valueValidator {
+ return &formatValidator{
+ Path: p.param.Name,
+ In: p.param.In,
+ //Default: p.param.Default,
+ Format: p.param.Format,
+ KnownFormats: p.KnownFormats,
+ }
+}
+
+type basicSliceValidator struct {
+ Path string
+ In string
+ Default interface{}
+ MaxItems *int64
+ MinItems *int64
+ UniqueItems bool
+ Items *spec.Items
+ Source interface{}
+ itemsValidator *itemsValidator
+ KnownFormats strfmt.Registry
+}
+
+func (s *basicSliceValidator) SetPath(path string) {
+ s.Path = path
+}
+
+func (s *basicSliceValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ switch source.(type) {
+ case *spec.Parameter, *spec.Items, *spec.Header:
+ return kind == reflect.Slice
+ }
+ return false
+}
+
+func (s *basicSliceValidator) Validate(data interface{}) *Result {
+ val := reflect.ValueOf(data)
+
+ size := int64(val.Len())
+ if s.MinItems != nil {
+ if err := MinItems(s.Path, s.In, size, *s.MinItems); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+
+ if s.MaxItems != nil {
+ if err := MaxItems(s.Path, s.In, size, *s.MaxItems); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+
+ if s.UniqueItems {
+ if err := UniqueItems(s.Path, s.In, data); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+
+ if s.itemsValidator == nil && s.Items != nil {
+ s.itemsValidator = newItemsValidator(s.Path, s.In, s.Items, s.Source, s.KnownFormats)
+ }
+
+ if s.itemsValidator != nil {
+ for i := 0; i < int(size); i++ {
+ ele := val.Index(i)
+ if err := s.itemsValidator.Validate(i, ele.Interface()); err != nil && err.HasErrors() {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+/* unused
+func (s *basicSliceValidator) hasDuplicates(value reflect.Value, size int) bool {
+ dict := make(map[interface{}]struct{})
+ for i := 0; i < size; i++ {
+ ele := value.Index(i)
+ if _, ok := dict[ele.Interface()]; ok {
+ return true
+ }
+ dict[ele.Interface()] = struct{}{}
+ }
+ return false
+}
+*/
+
+type numberValidator struct {
+ Path string
+ In string
+ Default interface{}
+ MultipleOf *float64
+ Maximum *float64
+ ExclusiveMaximum bool
+ Minimum *float64
+ ExclusiveMinimum bool
+ // Allows for more accurate behavior regarding integers
+ Type string
+ Format string
+}
+
+func (n *numberValidator) SetPath(path string) {
+ n.Path = path
+}
+
+func (n *numberValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ switch source.(type) {
+ case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header:
+ isInt := kind >= reflect.Int && kind <= reflect.Uint64
+ isFloat := kind == reflect.Float32 || kind == reflect.Float64
+ r := isInt || isFloat
+ debugLog("schema props validator for %q applies %t for %T (kind: %v) isInt=%t, isFloat=%t\n", n.Path, r, source, kind, isInt, isFloat)
+ return r
+ }
+ debugLog("schema props validator for %q applies %t for %T (kind: %v)\n", n.Path, false, source, kind)
+ return false
+}
+
+// Validate provides a validator for generic JSON numbers,
+//
+// By default, numbers are internally represented as float64.
+// Formats float, or float32 may alter this behavior by mapping to float32.
+// A special validation process is followed for integers, with optional "format":
+// this is an attempt to provide a validation with native types.
+//
+// NOTE: since the constraint specified (boundary, multipleOf) is unmarshalled
+// as float64, loss of information remains possible (e.g. on very large integers).
+//
+// Since this value directly comes from the unmarshalling, it is not possible
+// at this stage of processing to check further and guarantee the correctness of such values.
+//
+// Normally, the JSON Number.MAX_SAFE_INTEGER (resp. Number.MIN_SAFE_INTEGER)
+// would check we do not get such a loss.
+//
+// If this is the case, replace AddErrors() by AddWarnings() and IsValid() by !HasWarnings().
+//
+// TODO: consider replacing boundary check errors by simple warnings.
+//
+// TODO: default boundaries with MAX_SAFE_INTEGER are not checked (specific to json.Number?)
+func (n *numberValidator) Validate(val interface{}) *Result {
+ res := new(Result)
+
+ resMultiple := new(Result)
+ resMinimum := new(Result)
+ resMaximum := new(Result)
+
+ // Used only to attempt to validate constraint on value,
+ // even though value or constraint specified do not match type and format
+ data := valueHelp.asFloat64(val)
+
+ // Is the provided value within the range of the specified numeric type and format?
+ res.AddErrors(IsValueValidAgainstRange(val, n.Type, n.Format, "Checked", n.Path))
+
+ if n.MultipleOf != nil {
+ // Is the constraint specifier within the range of the specific numeric type and format?
+ resMultiple.AddErrors(IsValueValidAgainstRange(*n.MultipleOf, n.Type, n.Format, "MultipleOf", n.Path))
+ if resMultiple.IsValid() {
+ // Constraint validated with compatible types
+ if err := MultipleOfNativeType(n.Path, n.In, val, *n.MultipleOf); err != nil {
+ resMultiple.Merge(errorHelp.sErr(err))
+ }
+ } else {
+ // Constraint nevertheless validated, converted as general number
+ if err := MultipleOf(n.Path, n.In, data, *n.MultipleOf); err != nil {
+ resMultiple.Merge(errorHelp.sErr(err))
+ }
+ }
+ }
+
+ // nolint: dupl
+ if n.Maximum != nil {
+ // Is the constraint specifier within the range of the specific numeric type and format?
+ resMaximum.AddErrors(IsValueValidAgainstRange(*n.Maximum, n.Type, n.Format, "Maximum boundary", n.Path))
+ if resMaximum.IsValid() {
+ // Constraint validated with compatible types
+ if err := MaximumNativeType(n.Path, n.In, val, *n.Maximum, n.ExclusiveMaximum); err != nil {
+ resMaximum.Merge(errorHelp.sErr(err))
+ }
+ } else {
+ // Constraint nevertheless validated, converted as general number
+ if err := Maximum(n.Path, n.In, data, *n.Maximum, n.ExclusiveMaximum); err != nil {
+ resMaximum.Merge(errorHelp.sErr(err))
+ }
+ }
+ }
+
+ // nolint: dupl
+ if n.Minimum != nil {
+ // Is the constraint specifier within the range of the specific numeric type and format?
+ resMinimum.AddErrors(IsValueValidAgainstRange(*n.Minimum, n.Type, n.Format, "Minimum boundary", n.Path))
+ if resMinimum.IsValid() {
+ // Constraint validated with compatible types
+ if err := MinimumNativeType(n.Path, n.In, val, *n.Minimum, n.ExclusiveMinimum); err != nil {
+ resMinimum.Merge(errorHelp.sErr(err))
+ }
+ } else {
+ // Constraint nevertheless validated, converted as general number
+ if err := Minimum(n.Path, n.In, data, *n.Minimum, n.ExclusiveMinimum); err != nil {
+ resMinimum.Merge(errorHelp.sErr(err))
+ }
+ }
+ }
+ res.Merge(resMultiple, resMinimum, resMaximum)
+ res.Inc()
+ return res
+}
+
+type stringValidator struct {
+ Default interface{}
+ Required bool
+ AllowEmptyValue bool
+ MaxLength *int64
+ MinLength *int64
+ Pattern string
+ Path string
+ In string
+}
+
+func (s *stringValidator) SetPath(path string) {
+ s.Path = path
+}
+
+func (s *stringValidator) Applies(source interface{}, kind reflect.Kind) bool {
+ switch source.(type) {
+ case *spec.Parameter, *spec.Schema, *spec.Items, *spec.Header:
+ r := kind == reflect.String
+ debugLog("string validator for %q applies %t for %T (kind: %v)\n", s.Path, r, source, kind)
+ return r
+ }
+ debugLog("string validator for %q applies %t for %T (kind: %v)\n", s.Path, false, source, kind)
+ return false
+}
+
+func (s *stringValidator) Validate(val interface{}) *Result {
+ data, ok := val.(string)
+ if !ok {
+ return errorHelp.sErr(errors.InvalidType(s.Path, s.In, stringType, val))
+ }
+
+ if s.Required && !s.AllowEmptyValue && (s.Default == nil || s.Default == "") {
+ if err := RequiredString(s.Path, s.In, data); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+
+ if s.MaxLength != nil {
+ if err := MaxLength(s.Path, s.In, data, *s.MaxLength); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+
+ if s.MinLength != nil {
+ if err := MinLength(s.Path, s.In, data, *s.MinLength); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+
+ if s.Pattern != "" {
+ if err := Pattern(s.Path, s.In, data, s.Pattern); err != nil {
+ return errorHelp.sErr(err)
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/validate/values.go b/vendor/github.com/go-openapi/validate/values.go
new file mode 100644
index 000000000..e7ad8c103
--- /dev/null
+++ b/vendor/github.com/go-openapi/validate/values.go
@@ -0,0 +1,450 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package validate
+
+import (
+ "context"
+ "fmt"
+ "reflect"
+ "strings"
+ "unicode/utf8"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+)
+
+// Enum validates if the data is a member of the enum
+func Enum(path, in string, data interface{}, enum interface{}) *errors.Validation {
+ return EnumCase(path, in, data, enum, true)
+}
+
+// EnumCase validates if the data is a member of the enum and may respect case-sensitivity for strings
+func EnumCase(path, in string, data interface{}, enum interface{}, caseSensitive bool) *errors.Validation {
+ val := reflect.ValueOf(enum)
+ if val.Kind() != reflect.Slice {
+ return nil
+ }
+
+ dataString := convertEnumCaseStringKind(data, caseSensitive)
+ var values []interface{}
+ for i := 0; i < val.Len(); i++ {
+ ele := val.Index(i)
+ enumValue := ele.Interface()
+ if data != nil {
+ if reflect.DeepEqual(data, enumValue) {
+ return nil
+ }
+ enumString := convertEnumCaseStringKind(enumValue, caseSensitive)
+ if dataString != nil && enumString != nil && strings.EqualFold(*dataString, *enumString) {
+ return nil
+ }
+ actualType := reflect.TypeOf(enumValue)
+ if actualType == nil { // Safeguard. Frankly, I don't know how we may get a nil
+ continue
+ }
+ expectedValue := reflect.ValueOf(data)
+ if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
+ // Attempt comparison after type conversion
+ if reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), enumValue) {
+ return nil
+ }
+ }
+ }
+ values = append(values, enumValue)
+ }
+ return errors.EnumFail(path, in, data, values)
+}
+
+// convertEnumCaseStringKind converts interface if it is kind of string and case insensitivity is set
+func convertEnumCaseStringKind(value interface{}, caseSensitive bool) *string {
+ if caseSensitive {
+ return nil
+ }
+
+ val := reflect.ValueOf(value)
+ if val.Kind() != reflect.String {
+ return nil
+ }
+
+ str := fmt.Sprintf("%v", value)
+ return &str
+}
+
+// MinItems validates that there are at least n items in a slice
+func MinItems(path, in string, size, min int64) *errors.Validation {
+ if size < min {
+ return errors.TooFewItems(path, in, min, size)
+ }
+ return nil
+}
+
+// MaxItems validates that there are at most n items in a slice
+func MaxItems(path, in string, size, max int64) *errors.Validation {
+ if size > max {
+ return errors.TooManyItems(path, in, max, size)
+ }
+ return nil
+}
+
+// UniqueItems validates that the provided slice has unique elements
+func UniqueItems(path, in string, data interface{}) *errors.Validation {
+ val := reflect.ValueOf(data)
+ if val.Kind() != reflect.Slice {
+ return nil
+ }
+ var unique []interface{}
+ for i := 0; i < val.Len(); i++ {
+ v := val.Index(i).Interface()
+ for _, u := range unique {
+ if reflect.DeepEqual(v, u) {
+ return errors.DuplicateItems(path, in)
+ }
+ }
+ unique = append(unique, v)
+ }
+ return nil
+}
+
+// MinLength validates a string for minimum length
+func MinLength(path, in, data string, minLength int64) *errors.Validation {
+ strLen := int64(utf8.RuneCount([]byte(data)))
+ if strLen < minLength {
+ return errors.TooShort(path, in, minLength, data)
+ }
+ return nil
+}
+
+// MaxLength validates a string for maximum length
+func MaxLength(path, in, data string, maxLength int64) *errors.Validation {
+ strLen := int64(utf8.RuneCount([]byte(data)))
+ if strLen > maxLength {
+ return errors.TooLong(path, in, maxLength, data)
+ }
+ return nil
+}
+
+// ReadOnly validates an interface for readonly
+func ReadOnly(ctx context.Context, path, in string, data interface{}) *errors.Validation {
+
+ // read only is only validated when operationType is request
+ if op := extractOperationType(ctx); op != request {
+ return nil
+ }
+
+ // data must be of zero value of its type
+ val := reflect.ValueOf(data)
+ if val.IsValid() {
+ if reflect.DeepEqual(reflect.Zero(val.Type()).Interface(), val.Interface()) {
+ return nil
+ }
+ } else {
+ return nil
+ }
+
+ return errors.ReadOnly(path, in, data)
+}
+
+// Required validates an interface for requiredness
+func Required(path, in string, data interface{}) *errors.Validation {
+ val := reflect.ValueOf(data)
+ if val.IsValid() {
+ if reflect.DeepEqual(reflect.Zero(val.Type()).Interface(), val.Interface()) {
+ return errors.Required(path, in, data)
+ }
+ return nil
+ }
+ return errors.Required(path, in, data)
+}
+
+// RequiredString validates a string for requiredness
+func RequiredString(path, in, data string) *errors.Validation {
+ if data == "" {
+ return errors.Required(path, in, data)
+ }
+ return nil
+}
+
+// RequiredNumber validates a number for requiredness
+func RequiredNumber(path, in string, data float64) *errors.Validation {
+ if data == 0 {
+ return errors.Required(path, in, data)
+ }
+ return nil
+}
+
+// Pattern validates a string against a regular expression
+func Pattern(path, in, data, pattern string) *errors.Validation {
+ re, err := compileRegexp(pattern)
+ if err != nil {
+ return errors.FailedPattern(path, in, fmt.Sprintf("%s, but pattern is invalid: %s", pattern, err.Error()), data)
+ }
+ if !re.MatchString(data) {
+ return errors.FailedPattern(path, in, pattern, data)
+ }
+ return nil
+}
+
+// MaximumInt validates if a number is smaller than a given maximum
+func MaximumInt(path, in string, data, max int64, exclusive bool) *errors.Validation {
+ if (!exclusive && data > max) || (exclusive && data >= max) {
+ return errors.ExceedsMaximumInt(path, in, max, exclusive, data)
+ }
+ return nil
+}
+
+// MaximumUint validates if a number is smaller than a given maximum
+func MaximumUint(path, in string, data, max uint64, exclusive bool) *errors.Validation {
+ if (!exclusive && data > max) || (exclusive && data >= max) {
+ return errors.ExceedsMaximumUint(path, in, max, exclusive, data)
+ }
+ return nil
+}
+
+// Maximum validates if a number is smaller than a given maximum
+func Maximum(path, in string, data, max float64, exclusive bool) *errors.Validation {
+ if (!exclusive && data > max) || (exclusive && data >= max) {
+ return errors.ExceedsMaximum(path, in, max, exclusive, data)
+ }
+ return nil
+}
+
+// Minimum validates if a number is smaller than a given minimum
+func Minimum(path, in string, data, min float64, exclusive bool) *errors.Validation {
+ if (!exclusive && data < min) || (exclusive && data <= min) {
+ return errors.ExceedsMinimum(path, in, min, exclusive, data)
+ }
+ return nil
+}
+
+// MinimumInt validates if a number is smaller than a given minimum
+func MinimumInt(path, in string, data, min int64, exclusive bool) *errors.Validation {
+ if (!exclusive && data < min) || (exclusive && data <= min) {
+ return errors.ExceedsMinimumInt(path, in, min, exclusive, data)
+ }
+ return nil
+}
+
+// MinimumUint validates if a number is smaller than a given minimum
+func MinimumUint(path, in string, data, min uint64, exclusive bool) *errors.Validation {
+ if (!exclusive && data < min) || (exclusive && data <= min) {
+ return errors.ExceedsMinimumUint(path, in, min, exclusive, data)
+ }
+ return nil
+}
+
+// MultipleOf validates if the provided number is a multiple of the factor
+func MultipleOf(path, in string, data, factor float64) *errors.Validation {
+ // multipleOf factor must be positive
+ if factor <= 0 {
+ return errors.MultipleOfMustBePositive(path, in, factor)
+ }
+ var mult float64
+ if factor < 1 {
+ mult = 1 / factor * data
+ } else {
+ mult = data / factor
+ }
+ if !swag.IsFloat64AJSONInteger(mult) {
+ return errors.NotMultipleOf(path, in, factor, data)
+ }
+ return nil
+}
+
+// MultipleOfInt validates if the provided integer is a multiple of the factor
+func MultipleOfInt(path, in string, data int64, factor int64) *errors.Validation {
+ // multipleOf factor must be positive
+ if factor <= 0 {
+ return errors.MultipleOfMustBePositive(path, in, factor)
+ }
+ mult := data / factor
+ if mult*factor != data {
+ return errors.NotMultipleOf(path, in, factor, data)
+ }
+ return nil
+}
+
+// MultipleOfUint validates if the provided unsigned integer is a multiple of the factor
+func MultipleOfUint(path, in string, data, factor uint64) *errors.Validation {
+ // multipleOf factor must be positive
+ if factor == 0 {
+ return errors.MultipleOfMustBePositive(path, in, factor)
+ }
+ mult := data / factor
+ if mult*factor != data {
+ return errors.NotMultipleOf(path, in, factor, data)
+ }
+ return nil
+}
+
+// FormatOf validates if a string matches a format in the format registry
+func FormatOf(path, in, format, data string, registry strfmt.Registry) *errors.Validation {
+ if registry == nil {
+ registry = strfmt.Default
+ }
+ if ok := registry.ContainsName(format); !ok {
+ return errors.InvalidTypeName(format)
+ }
+ if ok := registry.Validates(format, data); !ok {
+ return errors.InvalidType(path, in, format, data)
+ }
+ return nil
+}
+
+// MaximumNativeType provides native type constraint validation as a facade
+// to various numeric types versions of Maximum constraint check.
+//
+// Assumes that any possible loss conversion during conversion has been
+// checked beforehand.
+//
+// NOTE: currently, the max value is marshalled as a float64, no matter what,
+// which means there may be a loss during conversions (e.g. for very large integers)
+//
+// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
+func MaximumNativeType(path, in string, val interface{}, max float64, exclusive bool) *errors.Validation {
+ kind := reflect.ValueOf(val).Type().Kind()
+ switch kind {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ value := valueHelp.asInt64(val)
+ return MaximumInt(path, in, value, int64(max), exclusive)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ value := valueHelp.asUint64(val)
+ if max < 0 {
+ return errors.ExceedsMaximum(path, in, max, exclusive, val)
+ }
+ return MaximumUint(path, in, value, uint64(max), exclusive)
+ case reflect.Float32, reflect.Float64:
+ fallthrough
+ default:
+ value := valueHelp.asFloat64(val)
+ return Maximum(path, in, value, max, exclusive)
+ }
+}
+
+// MinimumNativeType provides native type constraint validation as a facade
+// to various numeric types versions of Minimum constraint check.
+//
+// Assumes that any possible loss conversion during conversion has been
+// checked beforehand.
+//
+// NOTE: currently, the min value is marshalled as a float64, no matter what,
+// which means there may be a loss during conversions (e.g. for very large integers)
+//
+// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
+func MinimumNativeType(path, in string, val interface{}, min float64, exclusive bool) *errors.Validation {
+ kind := reflect.ValueOf(val).Type().Kind()
+ switch kind {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ value := valueHelp.asInt64(val)
+ return MinimumInt(path, in, value, int64(min), exclusive)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ value := valueHelp.asUint64(val)
+ if min < 0 {
+ return nil
+ }
+ return MinimumUint(path, in, value, uint64(min), exclusive)
+ case reflect.Float32, reflect.Float64:
+ fallthrough
+ default:
+ value := valueHelp.asFloat64(val)
+ return Minimum(path, in, value, min, exclusive)
+ }
+}
+
+// MultipleOfNativeType provides native type constraint validation as a facade
+// to various numeric types version of MultipleOf constraint check.
+//
+// Assumes that any possible loss conversion during conversion has been
+// checked beforehand.
+//
+// NOTE: currently, the multipleOf factor is marshalled as a float64, no matter what,
+// which means there may be a loss during conversions (e.g. for very large integers)
+//
+// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
+func MultipleOfNativeType(path, in string, val interface{}, multipleOf float64) *errors.Validation {
+ kind := reflect.ValueOf(val).Type().Kind()
+ switch kind {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ value := valueHelp.asInt64(val)
+ return MultipleOfInt(path, in, value, int64(multipleOf))
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ value := valueHelp.asUint64(val)
+ return MultipleOfUint(path, in, value, uint64(multipleOf))
+ case reflect.Float32, reflect.Float64:
+ fallthrough
+ default:
+ value := valueHelp.asFloat64(val)
+ return MultipleOf(path, in, value, multipleOf)
+ }
+}
+
+// IsValueValidAgainstRange checks that a numeric value is compatible with
+// the range defined by Type and Format, that is, may be converted without loss.
+//
+// NOTE: this check is about type capacity and not formal verification such as: 1.0 != 1L
+func IsValueValidAgainstRange(val interface{}, typeName, format, prefix, path string) error {
+ kind := reflect.ValueOf(val).Type().Kind()
+
+ // What is the string representation of val
+ var stringRep string
+ switch kind {
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ stringRep = swag.FormatUint64(valueHelp.asUint64(val))
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ stringRep = swag.FormatInt64(valueHelp.asInt64(val))
+ case reflect.Float32, reflect.Float64:
+ stringRep = swag.FormatFloat64(valueHelp.asFloat64(val))
+ default:
+ return fmt.Errorf("%s value number range checking called with invalid (non numeric) val type in %s", prefix, path)
+ }
+
+ var errVal error
+
+ switch typeName {
+ case integerType:
+ switch format {
+ case integerFormatInt32:
+ _, errVal = swag.ConvertInt32(stringRep)
+ case integerFormatUInt32:
+ _, errVal = swag.ConvertUint32(stringRep)
+ case integerFormatUInt64:
+ _, errVal = swag.ConvertUint64(stringRep)
+ case integerFormatInt64:
+ fallthrough
+ default:
+ _, errVal = swag.ConvertInt64(stringRep)
+ }
+ case numberType:
+ fallthrough
+ default:
+ switch format {
+ case numberFormatFloat, numberFormatFloat32:
+ _, errVal = swag.ConvertFloat32(stringRep)
+ case numberFormatDouble, numberFormatFloat64:
+ fallthrough
+ default:
+ // No check can be performed here since
+ // no number beyond float64 is supported
+ }
+ }
+ if errVal != nil { // We don't report the actual errVal from strconv
+ if format != "" {
+ errVal = fmt.Errorf("%s value must be of type %s with format %s in %s", prefix, typeName, format, path)
+ } else {
+ errVal = fmt.Errorf("%s value must be of type %s (default format) in %s", prefix, typeName, path)
+ }
+ }
+ return errVal
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/LICENSE b/vendor/github.com/go-swagger/go-swagger/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore
new file mode 100644
index 000000000..60607586b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/.gitignore
@@ -0,0 +1,5 @@
+swagger
+swagger.json
+models
+operations
+cmd
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go
new file mode 100644
index 000000000..5999f4948
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff.go
@@ -0,0 +1,145 @@
+package commands
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "log"
+ "os"
+
+ "errors"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-swagger/go-swagger/cmd/swagger/commands/diff"
+)
+
+// JSONFormat for json
+const JSONFormat = "json"
+
+// DiffCommand is a command that generates the diff of two swagger specs.
+//
+// There are no specific options for this expansion.
+type DiffCommand struct {
+ OnlyBreakingChanges bool `long:"break" short:"b" description:"When present, only shows incompatible changes"`
+ Format string `long:"format" short:"f" description:"When present, writes output as json" default:"txt" choice:"txt" choice:"json"`
+ IgnoreFile string `long:"ignore" short:"i" description:"Exception file of diffs to ignore (copy output from json diff format)" default:"none specified"`
+ Destination string `long:"dest" short:"d" description:"Output destination file or stdout" default:"stdout"`
+ Args struct {
+ OldSpec string `positional-arg-name:"{old spec}"`
+ NewSpec string `positional-arg-name:"{new spec}"`
+ } `required:"2" positional-args:"specs" description:"Input specs to be diff-ed"`
+}
+
+// Execute diffs the two specs provided
+func (c *DiffCommand) Execute(_ []string) error {
+ if c.Args.OldSpec == "" || c.Args.NewSpec == "" {
+ return errors.New(`missing arguments for diff command (use --help for more info)`)
+ }
+
+ c.printInfo()
+
+ var (
+ output io.WriteCloser
+ err error
+ )
+ if c.Destination != "stdout" {
+ output, err = os.OpenFile(c.Destination, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600)
+ if err != nil {
+ return fmt.Errorf("%s: %w", c.Destination, err)
+ }
+ defer func() {
+ _ = output.Close()
+ }()
+ } else {
+ output = os.Stdout
+ }
+
+ diffs, err := c.getDiffs()
+ if err != nil {
+ return err
+ }
+
+ ignores, err := c.readIgnores()
+ if err != nil {
+ return err
+ }
+
+ diffs = diffs.FilterIgnores(ignores)
+ if len(ignores) > 0 {
+ log.Printf("Diff Report Ignored Items from IgnoreFile")
+ for _, eachItem := range ignores {
+ log.Printf("%s", eachItem.String())
+ }
+ }
+
+ var (
+ input io.Reader
+ warn error
+ )
+ if c.Format != JSONFormat && c.OnlyBreakingChanges {
+ input, err, warn = diffs.ReportCompatibility()
+ } else {
+ input, err, warn = diffs.ReportAllDiffs(c.Format == JSONFormat)
+ }
+ if err != nil {
+ return err
+ }
+ _, err = io.Copy(output, input)
+ if err != nil {
+ return err
+ }
+ return warn
+}
+
+func (c *DiffCommand) readIgnores() (diff.SpecDifferences, error) {
+ ignoreFile := c.IgnoreFile
+ ignoreDiffs := diff.SpecDifferences{}
+
+ if ignoreFile == "none specified" || ignoreFile == "" {
+ return ignoreDiffs, nil
+ }
+ // Open our jsonFile
+ jsonFile, err := os.Open(ignoreFile)
+ if err != nil {
+ return nil, fmt.Errorf("%s: %w", ignoreFile, err)
+ }
+ defer func() {
+ _ = jsonFile.Close()
+ }()
+ byteValue, err := io.ReadAll(jsonFile)
+ if err != nil {
+ return nil, fmt.Errorf("reading %s: %w", ignoreFile, err)
+ }
+ err = json.Unmarshal(byteValue, &ignoreDiffs)
+ if err != nil {
+ return nil, err
+ }
+ return ignoreDiffs, nil
+}
+
+func (c *DiffCommand) getDiffs() (diff.SpecDifferences, error) {
+ oldSpecPath, newSpecPath := c.Args.OldSpec, c.Args.NewSpec
+ swaggerDoc1 := oldSpecPath
+ specDoc1, err := loads.Spec(swaggerDoc1)
+ if err != nil {
+ return nil, err
+ }
+
+ swaggerDoc2 := newSpecPath
+ specDoc2, err := loads.Spec(swaggerDoc2)
+ if err != nil {
+ return nil, err
+ }
+
+ return diff.Compare(specDoc1.Spec(), specDoc2.Spec())
+}
+
+func (c *DiffCommand) printInfo() {
+ log.Println("Run Config:")
+ log.Printf("Spec1: %s", c.Args.OldSpec)
+ log.Printf("Spec2: %s", c.Args.NewSpec)
+ log.Printf("ReportOnlyBreakingChanges (-c) :%v", c.OnlyBreakingChanges)
+ log.Printf("OutputFormat (-f) :%s", c.Format)
+ log.Printf("IgnoreFile (-i) :%s", c.IgnoreFile)
+ log.Printf("Diff Report Destination (-d) :%s", c.Destination)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go
new file mode 100644
index 000000000..6e2fef3bc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/array_diff.go
@@ -0,0 +1,106 @@
+package diff
+
+// This is a simple DSL for diffing arrays
+
+// fromArrayStruct utility struct to encompass diffing of string arrays
+type fromArrayStruct struct {
+ from []string
+}
+
+// fromStringArray starts a fluent diff expression
+func fromStringArray(from []string) fromArrayStruct {
+ return fromArrayStruct{from}
+}
+
+// DiffsTo completes a fluent diff expression
+func (f fromArrayStruct) DiffsTo(toArray []string) (added, deleted, common []string) {
+ inFrom := 1
+ inTo := 2
+
+ if f.from == nil {
+ return toArray, []string{}, []string{}
+ }
+
+ m := make(map[string]int, len(toArray))
+ added = make([]string, 0, len(toArray))
+ deleted = make([]string, 0, len(f.from))
+ common = make([]string, 0, len(f.from))
+
+ for _, item := range f.from {
+ m[item] = inFrom
+ }
+
+ for _, item := range toArray {
+ if _, ok := m[item]; ok {
+ m[item] |= inTo
+ } else {
+ m[item] = inTo
+ }
+ }
+ for key, val := range m {
+ switch val {
+ case inFrom:
+ deleted = append(deleted, key)
+ case inTo:
+ added = append(added, key)
+ default:
+ common = append(common, key)
+ }
+ }
+ return
+}
+
+// fromMapStruct utility struct to encompass diffing of string arrays
+type fromMapStruct struct {
+ srcMap map[string]interface{}
+}
+
+// fromStringMap starts a comparison by declaring a source map
+func fromStringMap(srcMap map[string]interface{}) fromMapStruct {
+ return fromMapStruct{srcMap}
+}
+
+// Pair stores a pair of items which share a key in two maps
+type Pair struct {
+ First interface{}
+ Second interface{}
+}
+
+// DiffsTo - generates diffs for a comparison
+func (f fromMapStruct) DiffsTo(destMap map[string]interface{}) (added, deleted, common map[string]interface{}) {
+ added = make(map[string]interface{})
+ deleted = make(map[string]interface{})
+ common = make(map[string]interface{})
+
+ inSrc := 1
+ inDest := 2
+
+ m := make(map[string]int)
+
+ // enter values for all items in the source array
+ for key := range f.srcMap {
+ m[key] = inSrc
+ }
+
+ // now either set or 'boolean or' a new flag if in the second collection
+ for key := range destMap {
+ if _, ok := m[key]; ok {
+ m[key] |= inDest
+ } else {
+ m[key] = inDest
+ }
+ }
+ // finally inspect the values and generate the left,right and shared collections
+ // for the shared items, store both values in case there's a diff
+ for key, val := range m {
+ switch val {
+ case inSrc:
+ deleted[key] = f.srcMap[key]
+ case inDest:
+ added[key] = destMap[key]
+ default:
+ common[key] = Pair{f.srcMap[key], destMap[key]}
+ }
+ }
+ return added, deleted, common
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go
new file mode 100644
index 000000000..2ae1b8227
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/checks.go
@@ -0,0 +1,266 @@
+package diff
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// CompareEnums returns added, deleted enum values
+func CompareEnums(left, right []interface{}) []TypeDiff {
+ diffs := []TypeDiff{}
+
+ leftStrs := []string{}
+ rightStrs := []string{}
+ for _, eachLeft := range left {
+ leftStrs = append(leftStrs, fmt.Sprintf("%v", eachLeft))
+ }
+ for _, eachRight := range right {
+ rightStrs = append(rightStrs, fmt.Sprintf("%v", eachRight))
+ }
+ added, deleted, _ := fromStringArray(leftStrs).DiffsTo(rightStrs)
+ if len(added) > 0 {
+ typeChange := strings.Join(added, ",")
+ diffs = append(diffs, TypeDiff{Change: AddedEnumValue, Description: typeChange})
+ }
+ if len(deleted) > 0 {
+ typeChange := strings.Join(deleted, ",")
+ diffs = append(diffs, TypeDiff{Change: DeletedEnumValue, Description: typeChange})
+ }
+
+ return diffs
+}
+
+// CompareProperties recursive property comparison
+func CompareProperties(location DifferenceLocation, schema1 *spec.Schema, schema2 *spec.Schema, getRefFn1 SchemaFromRefFn, getRefFn2 SchemaFromRefFn, cmp CompareSchemaFn) []SpecDifference {
+ propDiffs := []SpecDifference{}
+
+ if schema1.Properties == nil && schema2.Properties == nil {
+ return propDiffs
+ }
+
+ schema1Props := propertiesFor(schema1, getRefFn1)
+ schema2Props := propertiesFor(schema2, getRefFn2)
+ // find deleted and changed properties
+
+ for eachProp1Name, eachProp1 := range schema1Props {
+ eachProp1 := eachProp1
+ childLoc := addChildDiffNode(location, eachProp1Name, eachProp1.Schema)
+
+ if eachProp2, ok := schema2Props[eachProp1Name]; ok {
+ diffs := CheckToFromRequired(eachProp1.Required, eachProp2.Required)
+ if len(diffs) > 0 {
+ for _, diff := range diffs {
+ propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: diff.Change})
+ }
+ }
+ cmp(childLoc, eachProp1.Schema, eachProp2.Schema)
+ } else {
+ propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: DeletedProperty})
+ }
+ }
+
+ // find added properties
+ for eachProp2Name, eachProp2 := range schema2.Properties {
+ eachProp2 := eachProp2
+ if _, ok := schema1.Properties[eachProp2Name]; !ok {
+ childLoc := addChildDiffNode(location, eachProp2Name, &eachProp2)
+ propDiffs = append(propDiffs, SpecDifference{DifferenceLocation: childLoc, Code: AddedProperty})
+ }
+ }
+ return propDiffs
+
+}
+
+// CompareFloatValues compares a float data item
+func CompareFloatValues(fieldName string, val1 *float64, val2 *float64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) []TypeDiff {
+ diffs := []TypeDiff{}
+ if val1 != nil && val2 != nil {
+ if *val2 > *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)})
+ } else if *val2 < *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %f->%f", fieldName, *val1, *val2)})
+ }
+ } else {
+ if val1 != val2 {
+ if val1 != nil {
+ diffs = append(diffs, TypeDiff{Change: DeletedConstraint, Description: fmt.Sprintf("%s(%f)", fieldName, *val1)})
+ } else {
+ diffs = append(diffs, TypeDiff{Change: AddedConstraint, Description: fmt.Sprintf("%s(%f)", fieldName, *val2)})
+ }
+ }
+ }
+ return diffs
+}
+
+// CompareIntValues compares to int data items
+func CompareIntValues(fieldName string, val1 *int64, val2 *int64, ifGreaterCode SpecChangeCode, ifLessCode SpecChangeCode) []TypeDiff {
+ diffs := []TypeDiff{}
+ if val1 != nil && val2 != nil {
+ if *val2 > *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifGreaterCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)})
+ } else if *val2 < *val1 {
+ diffs = append(diffs, TypeDiff{Change: ifLessCode, Description: fmt.Sprintf("%s %d->%d", fieldName, *val1, *val2)})
+ }
+ } else {
+ if val1 != val2 {
+ if val1 != nil {
+ diffs = append(diffs, TypeDiff{Change: DeletedConstraint, Description: fmt.Sprintf("%s(%d)", fieldName, *val1)})
+ } else {
+ diffs = append(diffs, TypeDiff{Change: AddedConstraint, Description: fmt.Sprintf("%s(%d)", fieldName, *val2)})
+ }
+ }
+ }
+ return diffs
+}
+
+// CheckToFromPrimitiveType check for diff to or from a primitive
+func CheckToFromPrimitiveType(diffs []TypeDiff, type1, type2 interface{}) []TypeDiff {
+
+ type1IsPrimitive := isPrimitive(type1)
+ type2IsPrimitive := isPrimitive(type2)
+
+ // Primitive to Obj or Obj to Primitive
+ if type1IsPrimitive != type2IsPrimitive {
+ typeStr1, isarray1 := getSchemaType(type1)
+ typeStr2, isarray2 := getSchemaType(type2)
+ return addTypeDiff(diffs, TypeDiff{Change: ChangedType, FromType: formatTypeString(typeStr1, isarray1), ToType: formatTypeString(typeStr2, isarray2)})
+ }
+
+ return diffs
+}
+
+// CheckRefChange has the property ref changed
+func CheckRefChange(diffs []TypeDiff, type1, type2 interface{}) (diffReturn []TypeDiff) {
+
+ diffReturn = diffs
+ if isRefType(type1) && isRefType(type2) {
+ // both refs but to different objects (TODO detect renamed object)
+ ref1 := definitionFromRef(getRef(type1))
+ ref2 := definitionFromRef(getRef(type2))
+ if ref1 != ref2 {
+ diffReturn = addTypeDiff(diffReturn, TypeDiff{Change: RefTargetChanged, FromType: getSchemaTypeStr(type1), ToType: getSchemaTypeStr(type2)})
+ }
+ } else if isRefType(type1) != isRefType(type2) {
+ diffReturn = addTypeDiff(diffReturn, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(type1), ToType: getSchemaTypeStr(type2)})
+ }
+ return
+}
+
+// checkNumericTypeChanges checks for changes to or from a numeric type
+func checkNumericTypeChanges(diffs []TypeDiff, type1, type2 *spec.SchemaProps) []TypeDiff {
+ // Number
+ _, type1IsNumeric := numberWideness[type1.Type[0]]
+ _, type2IsNumeric := numberWideness[type2.Type[0]]
+
+ if type1IsNumeric && type2IsNumeric {
+ foundDiff := false
+ if type1.ExclusiveMaximum && !type2.ExclusiveMaximum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Maximum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
+ foundDiff = true
+ }
+ if !type1.ExclusiveMaximum && type2.ExclusiveMaximum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Maximum Added:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
+ foundDiff = true
+ }
+ if type1.ExclusiveMinimum && !type2.ExclusiveMinimum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: WidenedType, Description: fmt.Sprintf("Exclusive Minimum Removed:%v->%v", type1.ExclusiveMaximum, type2.ExclusiveMaximum)})
+ foundDiff = true
+ }
+ if !type1.ExclusiveMinimum && type2.ExclusiveMinimum {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: NarrowedType, Description: fmt.Sprintf("Exclusive Minimum Added:%v->%v", type1.ExclusiveMinimum, type2.ExclusiveMinimum)})
+ foundDiff = true
+ }
+ if !foundDiff {
+ maxDiffs := CompareFloatValues("Maximum", type1.Maximum, type2.Maximum, WidenedType, NarrowedType)
+ diffs = append(diffs, maxDiffs...)
+ minDiffs := CompareFloatValues("Minimum", type1.Minimum, type2.Minimum, NarrowedType, WidenedType)
+ diffs = append(diffs, minDiffs...)
+ }
+ }
+ return diffs
+}
+
+// CheckStringTypeChanges checks for changes to or from a string type
+func CheckStringTypeChanges(diffs []TypeDiff, type1, type2 *spec.SchemaProps) []TypeDiff {
+ // string changes
+ if type1.Type[0] == StringType &&
+ type2.Type[0] == StringType {
+ minLengthDiffs := CompareIntValues("MinLength", type1.MinLength, type2.MinLength, NarrowedType, WidenedType)
+ diffs = append(diffs, minLengthDiffs...)
+ maxLengthDiffs := CompareIntValues("MaxLength", type1.MinLength, type2.MinLength, WidenedType, NarrowedType)
+ diffs = append(diffs, maxLengthDiffs...)
+ if type1.Pattern != type2.Pattern {
+ diffs = addTypeDiff(diffs, TypeDiff{Change: ChangedType, Description: fmt.Sprintf("Pattern Changed:%s->%s", type1.Pattern, type2.Pattern)})
+ }
+ if type1.Type[0] == StringType {
+ if len(type1.Enum) > 0 {
+ enumDiffs := CompareEnums(type1.Enum, type2.Enum)
+ diffs = append(diffs, enumDiffs...)
+ }
+ }
+ }
+ return diffs
+}
+
+// CheckToFromRequired checks for changes to or from a required property
+func CheckToFromRequired(required1, required2 bool) (diffs []TypeDiff) {
+ if required1 != required2 {
+ code := ChangedOptionalToRequired
+ if required1 {
+ code = ChangedRequiredToOptional
+ }
+ diffs = addTypeDiff(diffs, TypeDiff{Change: code})
+ }
+ return diffs
+}
+
+const objType = "object"
+
+func getTypeHierarchyChange(type1, type2 string) TypeDiff {
+ fromType := type1
+ if fromType == "" {
+ fromType = objType
+ }
+ toType := type2
+ if toType == "" {
+ toType = objType
+ }
+ diffDescription := fmt.Sprintf("%s -> %s", fromType, toType)
+ if isStringType(type1) && !isStringType(type2) {
+ return TypeDiff{Change: NarrowedType, Description: diffDescription}
+ }
+ if !isStringType(type1) && isStringType(type2) {
+ return TypeDiff{Change: WidenedType, Description: diffDescription}
+ }
+ type1Wideness, type1IsNumeric := numberWideness[type1]
+ type2Wideness, type2IsNumeric := numberWideness[type2]
+ if type1IsNumeric && type2IsNumeric {
+ if type1Wideness == type2Wideness {
+ return TypeDiff{Change: ChangedToCompatibleType, Description: diffDescription}
+ }
+ if type1Wideness > type2Wideness {
+ return TypeDiff{Change: NarrowedType, Description: diffDescription}
+ }
+ if type1Wideness < type2Wideness {
+ return TypeDiff{Change: WidenedType, Description: diffDescription}
+ }
+ }
+ return TypeDiff{Change: ChangedType, Description: diffDescription}
+}
+
+func isRefType(item interface{}) bool {
+ switch s := item.(type) {
+ case spec.Refable:
+ return s.Ref.String() != ""
+ case *spec.Schema:
+ return s.Ref.String() != ""
+ case *spec.SchemaProps:
+ return s.Ref.String() != ""
+ case *spec.SimpleSchema:
+ return false
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go
new file mode 100644
index 000000000..d31c0e63a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/compatibility.go
@@ -0,0 +1,111 @@
+package diff
+
+// CompatibilityPolicy decides which changes are breaking and which are not
+type CompatibilityPolicy struct {
+ ForResponse map[SpecChangeCode]Compatibility
+ ForRequest map[SpecChangeCode]Compatibility
+ ForChange map[SpecChangeCode]Compatibility
+}
+
+var compatibility CompatibilityPolicy
+
+func init() {
+ compatibility = CompatibilityPolicy{
+ ForResponse: map[SpecChangeCode]Compatibility{
+ AddedRequiredProperty: Breaking,
+ DeletedProperty: Breaking,
+ AddedProperty: NonBreaking,
+ DeletedResponse: Breaking,
+ AddedResponse: NonBreaking,
+ WidenedType: NonBreaking,
+ NarrowedType: NonBreaking,
+ ChangedType: Breaking,
+ ChangedToCompatibleType: NonBreaking,
+ AddedEnumValue: Breaking,
+ DeletedEnumValue: NonBreaking,
+ AddedResponseHeader: NonBreaking,
+ ChangedResponseHeader: Breaking,
+ DeletedResponseHeader: Breaking,
+ ChangedDescripton: NonBreaking,
+ AddedDescripton: NonBreaking,
+ DeletedDescripton: NonBreaking,
+ ChangedTag: NonBreaking,
+ AddedTag: NonBreaking,
+ DeletedTag: NonBreaking,
+ DeletedConstraint: Breaking,
+ AddedConstraint: NonBreaking,
+ DeletedExtension: Warning,
+ AddedExtension: Warning,
+ },
+ ForRequest: map[SpecChangeCode]Compatibility{
+ AddedRequiredProperty: Breaking,
+ DeletedProperty: Breaking,
+ AddedProperty: Breaking,
+ AddedOptionalParam: NonBreaking,
+ AddedRequiredParam: Breaking,
+ DeletedOptionalParam: NonBreaking,
+ DeletedRequiredParam: NonBreaking,
+ WidenedType: NonBreaking,
+ NarrowedType: Breaking,
+ ChangedType: Breaking,
+ ChangedToCompatibleType: NonBreaking,
+ ChangedOptionalToRequired: Breaking,
+ ChangedRequiredToOptional: NonBreaking,
+ AddedEnumValue: NonBreaking,
+ DeletedEnumValue: Breaking,
+ ChangedDescripton: NonBreaking,
+ AddedDescripton: NonBreaking,
+ DeletedDescripton: NonBreaking,
+ ChangedTag: NonBreaking,
+ AddedTag: NonBreaking,
+ DeletedTag: NonBreaking,
+ DeletedConstraint: NonBreaking,
+ AddedConstraint: Breaking,
+ ChangedDefault: Warning,
+ AddedDefault: Warning,
+ DeletedDefault: Warning,
+ ChangedExample: NonBreaking,
+ AddedExample: NonBreaking,
+ DeletedExample: NonBreaking,
+ ChangedCollectionFormat: Breaking,
+ DeletedExtension: Warning,
+ AddedExtension: Warning,
+ },
+ ForChange: map[SpecChangeCode]Compatibility{
+ NoChangeDetected: NonBreaking,
+ AddedEndpoint: NonBreaking,
+ DeletedEndpoint: Breaking,
+ DeletedDeprecatedEndpoint: NonBreaking,
+ AddedConsumesFormat: NonBreaking,
+ DeletedConsumesFormat: Breaking,
+ AddedProducesFormat: NonBreaking,
+ DeletedProducesFormat: Breaking,
+ AddedSchemes: NonBreaking,
+ DeletedSchemes: Breaking,
+ ChangedHostURL: Breaking,
+ ChangedBasePath: Breaking,
+ ChangedDescripton: NonBreaking,
+ AddedDescripton: NonBreaking,
+ DeletedDescripton: NonBreaking,
+ ChangedTag: NonBreaking,
+ AddedTag: NonBreaking,
+ DeletedTag: NonBreaking,
+ RefTargetChanged: Breaking,
+ RefTargetRenamed: NonBreaking,
+ AddedDefinition: NonBreaking,
+ DeletedDefinition: NonBreaking,
+ DeletedExtension: Warning,
+ AddedExtension: Warning,
+ },
+ }
+}
+
+func getCompatibilityForChange(diffCode SpecChangeCode, where DataDirection) Compatibility {
+ if compat, commonChange := compatibility.ForChange[diffCode]; commonChange {
+ return compat
+ }
+ if where == Request {
+ return compatibility.ForRequest[diffCode]
+ }
+ return compatibility.ForResponse[diffCode]
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go
new file mode 100644
index 000000000..3bd700b53
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difference_location.go
@@ -0,0 +1,22 @@
+package diff
+
+// DifferenceLocation indicates where the difference occurs
+type DifferenceLocation struct {
+ URL string `json:"url"`
+ Method string `json:"method,omitempty"`
+ Response int `json:"response,omitempty"`
+ Node *Node `json:"node,omitempty"`
+}
+
+// AddNode returns a copy of this location with the leaf node added
+func (dl DifferenceLocation) AddNode(node *Node) DifferenceLocation {
+ newLoc := dl
+
+ if newLoc.Node != nil {
+ newLoc.Node = newLoc.Node.Copy()
+ newLoc.Node.AddLeafNode(node)
+ } else {
+ newLoc.Node = node
+ }
+ return newLoc
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go
new file mode 100644
index 000000000..007862fb9
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/difftypes.go
@@ -0,0 +1,337 @@
+package diff
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "log"
+)
+
+// SpecChangeCode enumerates the various types of diffs from one spec to another
+type SpecChangeCode int
+
+const (
+ // NoChangeDetected - the specs have no changes
+ NoChangeDetected SpecChangeCode = iota
+ // DeletedProperty - A message property has been deleted in the new spec
+ DeletedProperty
+ // AddedProperty - A message property has been added in the new spec
+ AddedProperty
+ // AddedRequiredProperty - A required message property has been added in the new spec
+ AddedRequiredProperty
+ // DeletedOptionalParam - An endpoint parameter has been deleted in the new spec
+ DeletedOptionalParam
+ // ChangedDescripton - Changed a description
+ ChangedDescripton
+ // AddedDescripton - Added a description
+ AddedDescripton
+ // DeletedDescripton - Deleted a description
+ DeletedDescripton
+ // ChangedTag - Changed a tag
+ ChangedTag
+ // AddedTag - Added a tag
+ AddedTag
+ // DeletedTag - Deleted a tag
+ DeletedTag
+ // DeletedResponse - An endpoint response has been deleted in the new spec
+ DeletedResponse
+ // DeletedEndpoint - An endpoint has been deleted in the new spec
+ DeletedEndpoint
+ // DeletedDeprecatedEndpoint - A deprecated endpoint has been deleted in the new spec
+ DeletedDeprecatedEndpoint
+ // AddedRequiredParam - A required parameter has been added in the new spec
+ AddedRequiredParam
+ // DeletedRequiredParam - A required parameter has been deleted in the new spec
+ DeletedRequiredParam
+ // AddedEndpoint - An endpoint has been added in the new spec
+ AddedEndpoint
+ // WidenedType - An type has been changed to a more permissive type eg int->string
+ WidenedType
+ // NarrowedType - An type has been changed to a less permissive type eg string->int
+ NarrowedType
+ // ChangedToCompatibleType - An type has been changed to a compatible type eg password->string
+ ChangedToCompatibleType
+ // ChangedType - An type has been changed to a type whose relative compatibility cannot be determined
+ ChangedType
+ // AddedEnumValue - An enum type has had a new potential value added to it
+ AddedEnumValue
+ // DeletedEnumValue - An enum type has had a existing value removed from it
+ DeletedEnumValue
+ // AddedOptionalParam - A new optional parameter has been added to the new spec
+ AddedOptionalParam
+ // ChangedOptionalToRequired - An optional parameter is now required in the new spec
+ ChangedOptionalToRequired
+ // ChangedRequiredToOptional - An required parameter is now optional in the new spec
+ ChangedRequiredToOptional
+ // AddedResponse An endpoint has new response code in the new spec
+ AddedResponse
+ // AddedConsumesFormat - a new consumes format (json/xml/yaml etc) has been added in the new spec
+ AddedConsumesFormat
+ // DeletedConsumesFormat - an existing format has been removed in the new spec
+ DeletedConsumesFormat
+ // AddedProducesFormat - a new produces format (json/xml/yaml etc) has been added in the new spec
+ AddedProducesFormat
+ // DeletedProducesFormat - an existing produces format has been removed in the new spec
+ DeletedProducesFormat
+ // AddedSchemes - a new scheme has been added to the new spec
+ AddedSchemes
+ // DeletedSchemes - a scheme has been removed from the new spec
+ DeletedSchemes
+ // ChangedHostURL - the host url has been changed. If this is used in the client generation, then clients will break.
+ ChangedHostURL
+ // ChangedBasePath - the host base path has been changed. If this is used in the client generation, then clients will break.
+ ChangedBasePath
+ // AddedResponseHeader Added a header Item
+ AddedResponseHeader
+ // ChangedResponseHeader Added a header Item
+ ChangedResponseHeader
+ // DeletedResponseHeader Added a header Item
+ DeletedResponseHeader
+ // RefTargetChanged Changed a ref to point to a different object
+ RefTargetChanged
+ // RefTargetRenamed Renamed a ref to point to the same object
+ RefTargetRenamed
+ // DeletedConstraint Deleted a schema constraint
+ DeletedConstraint
+ // AddedConstraint Added a schema constraint
+ AddedConstraint
+ // DeletedDefinition removed one of the definitions
+ DeletedDefinition
+ // AddedDefinition removed one of the definitions
+ AddedDefinition
+ // ChangedDefault - Changed default value
+ ChangedDefault
+ // AddedDefault - Added a default value
+ AddedDefault
+ // DeletedDefault - Deleted a default value
+ DeletedDefault
+ // ChangedExample - Changed an example value
+ ChangedExample
+ // AddedExample - Added an example value
+ AddedExample
+ // DeletedExample - Deleted an example value
+ DeletedExample
+ // ChangedCollectionFormat - A collectionFormat has been changed to a collectionFormat whose relative compatibility cannot be determined
+ ChangedCollectionFormat
+ // DeletedExtension deleted an extension
+ DeletedExtension
+ // AddedExtension added an extension
+ AddedExtension
+)
+
+var toLongStringSpecChangeCode = map[SpecChangeCode]string{
+ NoChangeDetected: "No Change detected",
+ AddedEndpoint: "Added endpoint",
+ DeletedEndpoint: "Deleted endpoint",
+ DeletedDeprecatedEndpoint: "Deleted a deprecated endpoint",
+ AddedRequiredProperty: "Added required property",
+ DeletedProperty: "Deleted property",
+ ChangedDescripton: "Changed a description",
+ AddedDescripton: "Added a description",
+ DeletedDescripton: "Deleted a description",
+ ChangedTag: "Changed a tag",
+ AddedTag: "Added a tag",
+ DeletedTag: "Deleted a tag",
+ AddedProperty: "Added property",
+ AddedOptionalParam: "Added optional param",
+ AddedRequiredParam: "Added required param",
+ DeletedOptionalParam: "Deleted optional param",
+ DeletedRequiredParam: "Deleted required param",
+ DeletedResponse: "Deleted response",
+ AddedResponse: "Added response",
+ WidenedType: "Widened type",
+ NarrowedType: "Narrowed type",
+ ChangedType: "Changed type",
+ ChangedToCompatibleType: "Changed type to equivalent type",
+ ChangedOptionalToRequired: "Changed optional param to required",
+ ChangedRequiredToOptional: "Changed required param to optional",
+ AddedEnumValue: "Added possible enumeration(s)",
+ DeletedEnumValue: "Deleted possible enumeration(s)",
+ AddedConsumesFormat: "Added a consumes format",
+ DeletedConsumesFormat: "Deleted a consumes format",
+ AddedProducesFormat: "Added produces format",
+ DeletedProducesFormat: "Deleted produces format",
+ AddedSchemes: "Added schemes",
+ DeletedSchemes: "Deleted schemes",
+ ChangedHostURL: "Changed host URL",
+ ChangedBasePath: "Changed base path",
+ AddedResponseHeader: "Added response header",
+ ChangedResponseHeader: "Changed response header",
+ DeletedResponseHeader: "Deleted response header",
+ RefTargetChanged: "Changed ref to different object",
+ RefTargetRenamed: "Changed ref to renamed object",
+ DeletedConstraint: "Deleted a schema constraint",
+ AddedConstraint: "Added a schema constraint",
+ DeletedDefinition: "Deleted a schema definition",
+ AddedDefinition: "Added a schema definition",
+ ChangedDefault: "Default value is changed",
+ AddedDefault: "Default value is added",
+ DeletedDefault: "Default value is removed",
+ ChangedExample: "Example value is changed",
+ AddedExample: "Example value is added",
+ DeletedExample: "Example value is removed",
+ ChangedCollectionFormat: "Changed collection format",
+ DeletedExtension: "Deleted Extension",
+ AddedExtension: "Added Extension",
+}
+
+var toStringSpecChangeCode = map[SpecChangeCode]string{
+ AddedEndpoint: "AddedEndpoint",
+ NoChangeDetected: "NoChangeDetected",
+ DeletedEndpoint: "DeletedEndpoint",
+ DeletedDeprecatedEndpoint: "DeletedDeprecatedEndpoint",
+ AddedRequiredProperty: "AddedRequiredProperty",
+ DeletedProperty: "DeletedProperty",
+ AddedProperty: "AddedProperty",
+ ChangedDescripton: "ChangedDescription",
+ AddedDescripton: "AddedDescription",
+ DeletedDescripton: "DeletedDescription",
+ ChangedTag: "ChangedTag",
+ AddedTag: "AddedTag",
+ DeletedTag: "DeletedTag",
+ AddedOptionalParam: "AddedOptionalParam",
+ AddedRequiredParam: "AddedRequiredParam",
+ DeletedOptionalParam: "DeletedRequiredParam",
+ DeletedRequiredParam: "Deleted required param",
+ DeletedResponse: "DeletedResponse",
+ AddedResponse: "AddedResponse",
+ WidenedType: "WidenedType",
+ NarrowedType: "NarrowedType",
+ ChangedType: "ChangedType",
+ ChangedToCompatibleType: "ChangedToCompatibleType",
+ ChangedOptionalToRequired: "ChangedOptionalToRequiredParam",
+ ChangedRequiredToOptional: "ChangedRequiredToOptionalParam",
+ AddedEnumValue: "AddedEnumValue",
+ DeletedEnumValue: "DeletedEnumValue",
+ AddedConsumesFormat: "AddedConsumesFormat",
+ DeletedConsumesFormat: "DeletedConsumesFormat",
+ AddedProducesFormat: "AddedProducesFormat",
+ DeletedProducesFormat: "DeletedProducesFormat",
+ AddedSchemes: "AddedSchemes",
+ DeletedSchemes: "DeletedSchemes",
+ ChangedHostURL: "ChangedHostURL",
+ ChangedBasePath: "ChangedBasePath",
+ AddedResponseHeader: "AddedResponseHeader",
+ ChangedResponseHeader: "ChangedResponseHeader",
+ DeletedResponseHeader: "DeletedResponseHeader",
+ RefTargetChanged: "RefTargetChanged",
+ RefTargetRenamed: "RefTargetRenamed",
+ DeletedConstraint: "DeletedConstraint",
+ AddedConstraint: "AddedConstraint",
+ DeletedDefinition: "DeletedDefinition",
+ AddedDefinition: "AddedDefinition",
+ ChangedDefault: "ChangedDefault",
+ AddedDefault: "AddedDefault",
+ DeletedDefault: "DeletedDefault",
+ ChangedExample: "ChangedExample",
+ AddedExample: "AddedExample",
+ DeletedExample: "DeletedExample",
+ ChangedCollectionFormat: "ChangedCollectionFormat",
+ DeletedExtension: "DeletedExtension",
+ AddedExtension: "AddedExtension",
+}
+
+var toIDSpecChangeCode = map[string]SpecChangeCode{}
+
+// Description returns an english version of this error
+func (s SpecChangeCode) Description() (result string) {
+ result, ok := toLongStringSpecChangeCode[s]
+ if !ok {
+ log.Printf("warning: No description for %v", s)
+ result = "UNDEFINED"
+ }
+ return
+}
+
+// MarshalJSON marshals the enum as a quoted json string
+func (s SpecChangeCode) MarshalJSON() ([]byte, error) {
+ return stringAsQuotedBytes(toStringSpecChangeCode[s])
+}
+
+// UnmarshalJSON unmashalls a quoted json string to the enum value
+func (s *SpecChangeCode) UnmarshalJSON(b []byte) error {
+ str, err := readStringFromByteStream(b)
+ if err != nil {
+ return err
+ }
+ // Note that if the string cannot be found then it will return an error to the caller.
+ val, ok := toIDSpecChangeCode[str]
+
+ if ok {
+ *s = val
+ } else {
+ return fmt.Errorf("unknown enum value. cannot unmarshal '%s'", str)
+ }
+ return nil
+}
+
+// Compatibility - whether this is a breaking or non-breaking change
+type Compatibility int
+
+const (
+ // Breaking this change could break existing clients
+ Breaking Compatibility = iota
+ // NonBreaking This is a backwards-compatible API change
+ NonBreaking
+ // Warning changes are technically non-breaking but can cause behavior changes in client and thus should be reported differently
+ Warning
+)
+
+func (s Compatibility) String() string {
+ return toStringCompatibility[s]
+}
+
+var toStringCompatibility = map[Compatibility]string{
+ Breaking: "Breaking",
+ NonBreaking: "NonBreaking",
+ Warning: "Warning",
+}
+
+var toIDCompatibility = map[string]Compatibility{}
+
+// MarshalJSON marshals the enum as a quoted json string
+func (s Compatibility) MarshalJSON() ([]byte, error) {
+ return stringAsQuotedBytes(toStringCompatibility[s])
+}
+
+// UnmarshalJSON unmashals a quoted json string to the enum value
+func (s *Compatibility) UnmarshalJSON(b []byte) error {
+ str, err := readStringFromByteStream(b)
+ if err != nil {
+ return err
+ }
+ // Note that if the string cannot be found then it will return an error to the caller.
+ val, ok := toIDCompatibility[str]
+
+ if ok {
+ *s = val
+ } else {
+ return fmt.Errorf("unknown enum value. cannot unmarshal '%s'", str)
+ }
+ return nil
+}
+
+func stringAsQuotedBytes(str string) ([]byte, error) {
+ buffer := bytes.NewBufferString(`"`)
+ buffer.WriteString(str)
+ buffer.WriteString(`"`)
+ return buffer.Bytes(), nil
+}
+
+func readStringFromByteStream(b []byte) (string, error) {
+ var j string
+ err := json.Unmarshal(b, &j)
+ if err != nil {
+ return "", err
+ }
+ return j, nil
+}
+
+func init() {
+ for key, val := range toStringSpecChangeCode {
+ toIDSpecChangeCode[val] = key
+ }
+ for key, val := range toStringCompatibility {
+ toIDCompatibility[val] = key
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go
new file mode 100644
index 000000000..e1c7c95f1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/node.go
@@ -0,0 +1,82 @@
+package diff
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+)
+
+// Node is the position od a diff in a spec
+type Node struct {
+ Field string `json:"name,omitempty"`
+ TypeName string `json:"type,omitempty"`
+ IsArray bool `json:"is_array,omitempty"`
+ ChildNode *Node `json:"child,omitempty"`
+}
+
+// String std string render
+func (n *Node) String() string {
+ name := n.Field
+ if n.IsArray {
+ name = fmt.Sprintf("%s<array[%s]>", name, n.TypeName)
+ } else if len(n.TypeName) > 0 {
+ name = fmt.Sprintf("%s<%s>", name, n.TypeName)
+ }
+ if n.ChildNode != nil {
+ return fmt.Sprintf("%s.%s", name, n.ChildNode.String())
+ }
+ return name
+}
+
+// AddLeafNode Adds (recursive) a Child to the first non-nil child found
+func (n *Node) AddLeafNode(toAdd *Node) *Node {
+
+ if n.ChildNode == nil {
+ n.ChildNode = toAdd
+ } else {
+ n.ChildNode.AddLeafNode(toAdd)
+ }
+
+ return n
+}
+
+// Copy deep copy of this node and children
+func (n Node) Copy() *Node {
+ newChild := n.ChildNode
+ if newChild != nil {
+ newChild = newChild.Copy()
+ }
+ newNode := Node{
+ Field: n.Field,
+ TypeName: n.TypeName,
+ IsArray: n.IsArray,
+ ChildNode: newChild,
+ }
+
+ return &newNode
+}
+
+func getSchemaDiffNode(name string, schema interface{}) *Node {
+ node := Node{
+ Field: name,
+ }
+ if schema != nil {
+ switch s := schema.(type) {
+ case spec.Refable:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case *spec.Schema:
+ node.TypeName, node.IsArray = getSchemaType(s.SchemaProps)
+ case spec.SimpleSchema:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case *spec.SimpleSchema:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case *spec.SchemaProps:
+ node.TypeName, node.IsArray = getSchemaType(s)
+ case spec.SchemaProps:
+ node.TypeName, node.IsArray = getSchemaType(&s)
+ default:
+ node.TypeName = fmt.Sprintf("Unknown type %v", schema)
+ }
+ }
+ return &node
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go
new file mode 100644
index 000000000..7ef627226
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/reporting.go
@@ -0,0 +1,118 @@
+package diff
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "io"
+
+ "github.com/go-openapi/spec"
+)
+
+// ArrayType const for array
+var ArrayType = "array"
+
+// ObjectType const for object
+var ObjectType = "object"
+
+// Compare returns the result of analysing breaking and non breaking changes
+// between to Swagger specs
+func Compare(spec1, spec2 *spec.Swagger) (diffs SpecDifferences, err error) {
+ analyser := NewSpecAnalyser()
+ err = analyser.Analyse(spec1, spec2)
+ if err != nil {
+ return nil, err
+ }
+ diffs = analyser.Diffs
+ return
+}
+
+// PathItemOp - combines path and operation into a single keyed entity
+type PathItemOp struct {
+ ParentPathItem *spec.PathItem `json:"pathitem"`
+ Operation *spec.Operation `json:"operation"`
+ Extensions spec.Extensions `json:"extensions"`
+}
+
+// URLMethod - combines url and method into a single keyed entity
+type URLMethod struct {
+ Path string `json:"path"`
+ Method string `json:"method"`
+}
+
+// DataDirection indicates the direction of change Request vs Response
+type DataDirection int
+
+const (
+ // Request Used for messages/param diffs in a request
+ Request DataDirection = iota
+ // Response Used for messages/param diffs in a response
+ Response
+)
+
+func getParams(pathParams, opParams []spec.Parameter, location string) map[string]spec.Parameter {
+ params := map[string]spec.Parameter{}
+ // add shared path params
+ for _, eachParam := range pathParams {
+ if eachParam.In == location {
+ params[eachParam.Name] = eachParam
+ }
+ }
+ // add any overridden params
+ for _, eachParam := range opParams {
+ if eachParam.In == location {
+ params[eachParam.Name] = eachParam
+ }
+ }
+ return params
+}
+
+func getNameOnlyDiffNode(forLocation string) *Node {
+ node := Node{
+ Field: forLocation,
+ }
+ return &node
+}
+
+func primitiveTypeString(typeName, typeFormat string) string {
+ if typeFormat != "" {
+ return fmt.Sprintf("%s.%s", typeName, typeFormat)
+ }
+ return typeName
+}
+
+// TypeDiff - describes a primitive type change
+type TypeDiff struct {
+ Change SpecChangeCode `json:"change-type,omitempty"`
+ Description string `json:"description,omitempty"`
+ FromType string `json:"from-type,omitempty"`
+ ToType string `json:"to-type,omitempty"`
+}
+
+// didn't use 'width' so as not to confuse with bit width
+var numberWideness = map[string]int{
+ "number": 3,
+ "number.double": 3,
+ "double": 3,
+ "number.float": 2,
+ "float": 2,
+ "long": 1,
+ "integer.int64": 1,
+ "integer": 0,
+ "integer.int32": 0,
+}
+
+func prettyprint(b []byte) (io.ReadWriter, error) {
+ var out bytes.Buffer
+ err := json.Indent(&out, b, "", " ")
+ return &out, err
+}
+
+// JSONMarshal allows the item to be correctly rendered to json
+func JSONMarshal(t interface{}) ([]byte, error) {
+ buffer := &bytes.Buffer{}
+ encoder := json.NewEncoder(buffer)
+ encoder.SetEscapeHTML(false)
+ err := encoder.Encode(t)
+ return buffer.Bytes(), err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go
new file mode 100644
index 000000000..0874154bb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/schema.go
@@ -0,0 +1,126 @@
+package diff
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+func getTypeFromSchema(schema *spec.Schema) (typeName string, isArray bool) {
+ refStr := definitionFromRef(schema.Ref)
+ if len(refStr) > 0 {
+ return refStr, false
+ }
+ typeName = schema.Type[0]
+ if typeName == ArrayType {
+ typeName, _ = getSchemaType(&schema.Items.Schema.SchemaProps)
+ return typeName, true
+ }
+ return typeName, false
+
+}
+
+func getTypeFromSimpleSchema(schema *spec.SimpleSchema) (typeName string, isArray bool) {
+ typeName = schema.Type
+ format := schema.Format
+ if len(format) > 0 {
+ typeName = fmt.Sprintf("%s.%s", typeName, format)
+ }
+ if typeName == ArrayType {
+ typeName, _ = getSchemaType(&schema.Items.SimpleSchema)
+ return typeName, true
+ }
+ return typeName, false
+
+}
+
+func getTypeFromSchemaProps(schema *spec.SchemaProps) (typeName string, isArray bool) {
+ refStr := definitionFromRef(schema.Ref)
+ if len(refStr) > 0 {
+ return refStr, false
+ }
+ if len(schema.Type) > 0 {
+ typeName = schema.Type[0]
+ format := schema.Format
+ if len(format) > 0 {
+ typeName = fmt.Sprintf("%s.%s", typeName, format)
+ }
+ if typeName == ArrayType {
+ typeName, _ = getSchemaType(&schema.Items.Schema.SchemaProps)
+ return typeName, true
+ }
+ }
+ return typeName, false
+
+}
+
+func getSchemaTypeStr(item interface{}) string {
+ typeStr, isArray := getSchemaType(item)
+ return formatTypeString(typeStr, isArray)
+}
+
+func getSchemaType(item interface{}) (typeName string, isArray bool) {
+
+ switch s := item.(type) {
+ case *spec.Schema:
+ typeName, isArray = getTypeFromSchema(s)
+ case *spec.SchemaProps:
+ typeName, isArray = getTypeFromSchemaProps(s)
+ case spec.SchemaProps:
+ typeName, isArray = getTypeFromSchemaProps(&s)
+ case spec.SimpleSchema:
+ typeName, isArray = getTypeFromSimpleSchema(&s)
+ case *spec.SimpleSchema:
+ typeName, isArray = getTypeFromSimpleSchema(s)
+ default:
+ typeName = "unknown"
+ }
+
+ return
+
+}
+
+func formatTypeString(typ string, isarray bool) string {
+ if isarray {
+ return fmt.Sprintf("<array[%s]>", typ)
+ }
+ return fmt.Sprintf("<%s>", typ)
+}
+
+func definitionFromRef(ref spec.Ref) string {
+ url := ref.GetURL()
+ if url == nil {
+ return ""
+ }
+ fragmentParts := strings.Split(url.Fragment, "/")
+ numParts := len(fragmentParts)
+
+ return fragmentParts[numParts-1]
+}
+
+func isArray(item interface{}) bool {
+ switch s := item.(type) {
+ case *spec.Schema:
+ return isArrayType(s.Type)
+ case *spec.SchemaProps:
+ return isArrayType(s.Type)
+ case *spec.SimpleSchema:
+ return isArrayType(spec.StringOrArray{s.Type})
+ default:
+ return false
+ }
+}
+
+func isPrimitive(item interface{}) bool {
+ switch s := item.(type) {
+ case *spec.Schema:
+ return isPrimitiveType(s.Type)
+ case *spec.SchemaProps:
+ return isPrimitiveType(s.Type)
+ case spec.StringOrArray:
+ return isPrimitiveType(s)
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go
new file mode 100644
index 000000000..8df44aeb2
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_analyser.go
@@ -0,0 +1,759 @@
+package diff
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// StringType For identifying string types
+const StringType = "string"
+
+// URLMethodResponse encapsulates these three elements to act as a map key
+type URLMethodResponse struct {
+ Path string `json:"path"`
+ Method string `json:"method"`
+ Response string `json:"response"`
+}
+
+// MarshalText - for serializing as a map key
+func (p URLMethod) MarshalText() (text []byte, err error) {
+ return []byte(fmt.Sprintf("%s %s", p.Path, p.Method)), nil
+}
+
+// URLMethods allows iteration of endpoints based on url and method
+type URLMethods map[URLMethod]*PathItemOp
+
+// SpecAnalyser contains all the differences for a Spec
+type SpecAnalyser struct {
+ Diffs SpecDifferences
+ urlMethods1 URLMethods
+ urlMethods2 URLMethods
+ Definitions1 spec.Definitions
+ Definitions2 spec.Definitions
+ Info1 *spec.Info
+ Info2 *spec.Info
+ ReferencedDefinitions map[string]bool
+
+ schemasCompared map[string]struct{}
+}
+
+// NewSpecAnalyser returns an empty SpecDiffs
+func NewSpecAnalyser() *SpecAnalyser {
+ return &SpecAnalyser{
+ Diffs: SpecDifferences{},
+ ReferencedDefinitions: map[string]bool{},
+ }
+}
+
+// Analyse the differences in two specs
+func (sd *SpecAnalyser) Analyse(spec1, spec2 *spec.Swagger) error {
+ sd.schemasCompared = make(map[string]struct{})
+ sd.Definitions1 = spec1.Definitions
+ sd.Definitions2 = spec2.Definitions
+ sd.Info1 = spec1.Info
+ sd.Info2 = spec2.Info
+ sd.urlMethods1 = getURLMethodsFor(spec1)
+ sd.urlMethods2 = getURLMethodsFor(spec2)
+
+ sd.analyseSpecMetadata(spec1, spec2)
+ sd.analyseEndpoints()
+ sd.analyseRequestParams()
+ sd.analyseEndpointData()
+ sd.analyseResponseParams()
+ sd.analyseExtensions(spec1, spec2)
+ sd.AnalyseDefinitions()
+
+ return nil
+}
+
+func (sd *SpecAnalyser) analyseSpecMetadata(spec1, spec2 *spec.Swagger) {
+ // breaking if it no longer consumes any formats
+ added, deleted, _ := fromStringArray(spec1.Consumes).DiffsTo(spec2.Consumes)
+
+ node := getNameOnlyDiffNode("Spec")
+ location := DifferenceLocation{Node: node}
+ consumesLoation := location.AddNode(getNameOnlyDiffNode("consumes"))
+
+ for _, eachAdded := range added {
+ sd.Diffs = sd.Diffs.addDiff(
+ SpecDifference{DifferenceLocation: consumesLoation, Code: AddedConsumesFormat, Compatibility: NonBreaking, DiffInfo: eachAdded})
+ }
+ for _, eachDeleted := range deleted {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: consumesLoation, Code: DeletedConsumesFormat, Compatibility: Breaking, DiffInfo: eachDeleted})
+ }
+
+ // // breaking if it no longer produces any formats
+ added, deleted, _ = fromStringArray(spec1.Produces).DiffsTo(spec2.Produces)
+ producesLocation := location.AddNode(getNameOnlyDiffNode("produces"))
+ for _, eachAdded := range added {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: producesLocation, Code: AddedProducesFormat, Compatibility: NonBreaking, DiffInfo: eachAdded})
+ }
+ for _, eachDeleted := range deleted {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: producesLocation, Code: DeletedProducesFormat, Compatibility: Breaking, DiffInfo: eachDeleted})
+ }
+
+ // // breaking if it no longer supports a scheme
+ added, deleted, _ = fromStringArray(spec1.Schemes).DiffsTo(spec2.Schemes)
+ schemesLocation := location.AddNode(getNameOnlyDiffNode("schemes"))
+
+ for _, eachAdded := range added {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: schemesLocation, Code: AddedSchemes, Compatibility: NonBreaking, DiffInfo: eachAdded})
+ }
+ for _, eachDeleted := range deleted {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: schemesLocation, Code: DeletedSchemes, Compatibility: Breaking, DiffInfo: eachDeleted})
+ }
+
+ // host should be able to change without any issues?
+ sd.analyseMetaDataProperty(spec1.Info.Description, spec2.Info.Description, ChangedDescripton, NonBreaking)
+
+ // // host should be able to change without any issues?
+ sd.analyseMetaDataProperty(spec1.Host, spec2.Host, ChangedHostURL, Breaking)
+ // sd.Host = compareStrings(spec1.Host, spec2.Host)
+
+ // // Base Path change will break non generated clients
+ sd.analyseMetaDataProperty(spec1.BasePath, spec2.BasePath, ChangedBasePath, Breaking)
+
+ // TODO: what to do about security?
+ // Missing security scheme will break a client
+ // Security []map[string][]string `json:"security,omitempty"`
+ // Tags []Tag `json:"tags,omitempty"`
+ // ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
+}
+
+func (sd *SpecAnalyser) analyseEndpoints() {
+ sd.findDeletedEndpoints()
+ sd.findAddedEndpoints()
+}
+
+// AnalyseDefinitions check for changes to definition objects not referenced in any endpoint
+func (sd *SpecAnalyser) AnalyseDefinitions() {
+ alreadyReferenced := map[string]bool{}
+ for k := range sd.ReferencedDefinitions {
+ alreadyReferenced[k] = true
+ }
+ location := DifferenceLocation{Node: &Node{Field: "Spec Definitions"}}
+ for name1, sch := range sd.Definitions1 {
+ schema1 := sch
+ if _, ok := alreadyReferenced[name1]; !ok {
+ childLocation := location.AddNode(&Node{Field: name1})
+ if schema2, ok := sd.Definitions2[name1]; ok {
+ sd.compareSchema(childLocation, &schema1, &schema2)
+ } else {
+ sd.addDiffs(childLocation, []TypeDiff{{Change: DeletedDefinition}})
+ }
+ }
+ }
+ for name2 := range sd.Definitions2 {
+ if _, ok := sd.Definitions1[name2]; !ok {
+ childLocation := location.AddNode(&Node{Field: name2})
+ sd.addDiffs(childLocation, []TypeDiff{{Change: AddedDefinition}})
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseEndpointData() {
+
+ for URLMethod, op2 := range sd.urlMethods2 {
+ if op1, ok := sd.urlMethods1[URLMethod]; ok {
+ addedTags, deletedTags, _ := fromStringArray(op1.Operation.Tags).DiffsTo(op2.Operation.Tags)
+ location := DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method}
+
+ for _, eachAddedTag := range addedTags {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: AddedTag, DiffInfo: fmt.Sprintf(`"%s"`, eachAddedTag)})
+ }
+ for _, eachDeletedTag := range deletedTags {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedTag, DiffInfo: fmt.Sprintf(`"%s"`, eachDeletedTag)})
+ }
+
+ sd.compareDescripton(location, op1.Operation.Description, op2.Operation.Description)
+
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseRequestParams() {
+ locations := []string{"query", "path", "body", "header", "formData"}
+
+ for _, paramLocation := range locations {
+ rootNode := getNameOnlyDiffNode(strings.Title(paramLocation))
+ for URLMethod, op2 := range sd.urlMethods2 {
+ if op1, ok := sd.urlMethods1[URLMethod]; ok {
+
+ params1 := getParams(op1.ParentPathItem.Parameters, op1.Operation.Parameters, paramLocation)
+ params2 := getParams(op2.ParentPathItem.Parameters, op2.Operation.Parameters, paramLocation)
+
+ location := DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method, Node: rootNode}
+
+ // detect deleted params
+ for paramName1, param1 := range params1 {
+ if _, ok := params2[paramName1]; !ok {
+ childLocation := location.AddNode(getSchemaDiffNode(paramName1, &param1.SimpleSchema))
+ code := DeletedOptionalParam
+ if param1.Required {
+ code = DeletedRequiredParam
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
+ }
+ }
+ // detect added changed params
+ for paramName2, param2 := range params2 {
+ // changed?
+ if param1, ok := params1[paramName2]; ok {
+ sd.compareParams(URLMethod, paramLocation, paramName2, param1, param2)
+ } else {
+ // Added
+ childLocation := location.AddNode(getSchemaDiffNode(paramName2, &param2.SimpleSchema))
+ code := AddedOptionalParam
+ if param2.Required {
+ code = AddedRequiredParam
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: childLocation, Code: code})
+ }
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseResponseParams() {
+ // Loop through url+methods in spec 2 - check deleted and changed
+ for eachURLMethodFrom2, op2 := range sd.urlMethods2 {
+
+ // present in both specs? Use key from spec 2 to lookup in spec 1
+ if op1, ok := sd.urlMethods1[eachURLMethodFrom2]; ok {
+ // compare responses for url and method
+ op1Responses := op1.Operation.Responses.StatusCodeResponses
+ op2Responses := op2.Operation.Responses.StatusCodeResponses
+
+ // deleted responses
+ for code1 := range op1Responses {
+ if _, ok := op2Responses[code1]; !ok {
+ location := DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code1, Node: getSchemaDiffNode("Body", op1Responses[code1].Schema)}
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: DeletedResponse})
+ }
+ }
+ // Added updated Response Codes
+ for code2, op2Response := range op2Responses {
+ if op1Response, ok := op1Responses[code2]; ok {
+ op1Headers := op1Response.ResponseProps.Headers
+ headerRootNode := getNameOnlyDiffNode("Headers")
+
+ // Iterate Spec2 Headers looking for added and updated
+ location := DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: headerRootNode}
+ for op2HeaderName, op2Header := range op2Response.ResponseProps.Headers {
+ if op1Header, ok := op1Headers[op2HeaderName]; ok {
+ diffs := sd.CompareProps(forHeader(op1Header), forHeader(op2Header))
+ sd.addDiffs(location, diffs)
+ } else {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: location.AddNode(getSchemaDiffNode(op2HeaderName, &op2Header.SimpleSchema)),
+ Code: AddedResponseHeader})
+ }
+ }
+ for op1HeaderName := range op1Response.ResponseProps.Headers {
+ if _, ok := op2Response.ResponseProps.Headers[op1HeaderName]; !ok {
+ op1Header := op1Response.ResponseProps.Headers[op1HeaderName]
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: location.AddNode(getSchemaDiffNode(op1HeaderName, &op1Header.SimpleSchema)),
+ Code: DeletedResponseHeader})
+ }
+ }
+ schem := op1Response.Schema
+ node := getNameOnlyDiffNode("NoContent")
+ if schem != nil {
+ node = getSchemaDiffNode("Body", &schem.SchemaProps)
+ }
+ responseLocation := DifferenceLocation{URL: eachURLMethodFrom2.Path,
+ Method: eachURLMethodFrom2.Method,
+ Response: code2,
+ Node: node}
+ sd.compareDescripton(responseLocation, op1Response.Description, op2Response.Description)
+
+ if op1Response.Schema != nil {
+ sd.compareSchema(
+ DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op1Response.Schema)},
+ op1Response.Schema,
+ op2Response.Schema)
+ }
+ } else {
+ // op2Response
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: DifferenceLocation{URL: eachURLMethodFrom2.Path, Method: eachURLMethodFrom2.Method, Response: code2, Node: getSchemaDiffNode("Body", op2Response.Schema)},
+ Code: AddedResponse})
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseExtensions(spec1, spec2 *spec.Swagger) {
+ // root
+ specLoc := DifferenceLocation{Node: &Node{Field: "Spec"}}
+ sd.checkAddedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "")
+ sd.checkDeletedExtensions(spec1.Extensions, spec2.Extensions, specLoc, "")
+
+ sd.analyzeInfoExtensions()
+ sd.analyzeTagExtensions(spec1, spec2)
+ sd.analyzeSecurityDefinitionExtensions(spec1, spec2)
+
+ sd.analyzeOperationExtensions()
+}
+
+func (sd *SpecAnalyser) analyzeOperationExtensions() {
+ for urlMethod, op2 := range sd.urlMethods2 {
+ pathAndMethodLoc := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
+ if op1, ok := sd.urlMethods1[urlMethod]; ok {
+ sd.checkAddedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "")
+ sd.checkAddedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses")
+ sd.checkAddedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "")
+
+ for code, resp := range op1.Operation.Responses.StatusCodeResponses {
+ for hdr, h := range resp.Headers {
+ op2StatusCode, ok := op2.Operation.Responses.StatusCodeResponses[code]
+ if ok {
+ if _, ok = op2StatusCode.Headers[hdr]; ok {
+ sd.checkAddedExtensions(h.Extensions, op2StatusCode.Headers[hdr].Extensions, DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: getNameOnlyDiffNode("Headers")}, hdr)
+ }
+ }
+ }
+
+ resp2 := op2.Operation.Responses.StatusCodeResponses[code]
+ sd.analyzeSchemaExtensions(resp.Schema, resp2.Schema, code, urlMethod)
+ }
+
+ }
+ }
+
+ for urlMethod, op1 := range sd.urlMethods1 {
+ pathAndMethodLoc := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
+ if op2, ok := sd.urlMethods2[urlMethod]; ok {
+ sd.checkDeletedExtensions(op1.Extensions, op2.Extensions, DifferenceLocation{URL: urlMethod.Path}, "")
+ sd.checkDeletedExtensions(op1.Operation.Responses.Extensions, op2.Operation.Responses.Extensions, pathAndMethodLoc, "Responses")
+ sd.checkDeletedExtensions(op1.Operation.Extensions, op2.Operation.Extensions, pathAndMethodLoc, "")
+ for code, resp := range op1.Operation.Responses.StatusCodeResponses {
+ for hdr, h := range resp.Headers {
+ op2StatusCode, ok := op2.Operation.Responses.StatusCodeResponses[code]
+ if ok {
+ if _, ok = op2StatusCode.Headers[hdr]; ok {
+ sd.checkDeletedExtensions(h.Extensions, op2StatusCode.Headers[hdr].Extensions, DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method, Node: getNameOnlyDiffNode("Headers")}, hdr)
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeSecurityDefinitionExtensions(spec1 *spec.Swagger, spec2 *spec.Swagger) {
+ securityDefLoc := DifferenceLocation{Node: &Node{Field: "Security Definitions"}}
+ for key, securityDef := range spec1.SecurityDefinitions {
+ if securityDef2, ok := spec2.SecurityDefinitions[key]; ok {
+ sd.checkAddedExtensions(securityDef.Extensions, securityDef2.Extensions, securityDefLoc, "")
+ }
+ }
+
+ for key, securityDef := range spec2.SecurityDefinitions {
+ if securityDef1, ok := spec1.SecurityDefinitions[key]; ok {
+ sd.checkDeletedExtensions(securityDef1.Extensions, securityDef.Extensions, securityDefLoc, "")
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeSchemaExtensions(schema1, schema2 *spec.Schema, code int, urlMethod URLMethod) {
+ if schema1 != nil && schema2 != nil {
+ diffLoc := DifferenceLocation{Response: code, URL: urlMethod.Path, Method: urlMethod.Method, Node: getSchemaDiffNode("Body", schema2)}
+ sd.checkAddedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "")
+ sd.checkDeletedExtensions(schema1.Extensions, schema2.Extensions, diffLoc, "")
+ if schema1.Items != nil && schema2.Items != nil {
+ sd.analyzeSchemaExtensions(schema1.Items.Schema, schema2.Items.Schema, code, urlMethod)
+ for i := range schema1.Items.Schemas {
+ s1 := schema1.Items.Schemas[i]
+ for j := range schema2.Items.Schemas {
+ s2 := schema2.Items.Schemas[j]
+ sd.analyzeSchemaExtensions(&s1, &s2, code, urlMethod)
+ }
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeInfoExtensions() {
+ if sd.Info1 != nil && sd.Info2 != nil {
+ diffLocation := DifferenceLocation{Node: &Node{Field: "Spec Info"}}
+ sd.checkAddedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "")
+ sd.checkDeletedExtensions(sd.Info1.Extensions, sd.Info2.Extensions, diffLocation, "")
+ if sd.Info1.Contact != nil && sd.Info2.Contact != nil {
+ diffLocation = DifferenceLocation{Node: &Node{Field: "Spec Info.Contact"}}
+ sd.checkAddedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "")
+ sd.checkDeletedExtensions(sd.Info1.Contact.Extensions, sd.Info2.Contact.Extensions, diffLocation, "")
+ }
+ if sd.Info1.License != nil && sd.Info2.License != nil {
+ diffLocation = DifferenceLocation{Node: &Node{Field: "Spec Info.License"}}
+ sd.checkAddedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "")
+ sd.checkDeletedExtensions(sd.Info1.License.Extensions, sd.Info2.License.Extensions, diffLocation, "")
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyzeTagExtensions(spec1 *spec.Swagger, spec2 *spec.Swagger) {
+ diffLocation := DifferenceLocation{Node: &Node{Field: "Spec Tags"}}
+ for _, spec2Tag := range spec2.Tags {
+ for _, spec1Tag := range spec1.Tags {
+ if spec2Tag.Name == spec1Tag.Name {
+ sd.checkAddedExtensions(spec1Tag.Extensions, spec2Tag.Extensions, diffLocation, "")
+ }
+ }
+ }
+ for _, spec1Tag := range spec1.Tags {
+ for _, spec2Tag := range spec2.Tags {
+ if spec1Tag.Name == spec2Tag.Name {
+ sd.checkDeletedExtensions(spec1Tag.Extensions, spec2Tag.Extensions, diffLocation, "")
+ }
+ }
+ }
+}
+
+func (sd *SpecAnalyser) checkAddedExtensions(extensions1 spec.Extensions, extensions2 spec.Extensions, diffLocation DifferenceLocation, fieldPrefix string) {
+ for extKey := range extensions2 {
+ if _, ok := extensions1[extKey]; !ok {
+ if fieldPrefix != "" {
+ extKey = fmt.Sprintf("%s.%s", fieldPrefix, extKey)
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: diffLocation.AddNode(&Node{Field: extKey}),
+ Code: AddedExtension,
+ Compatibility: Warning, // this could potentially be a breaking change
+ })
+ }
+ }
+}
+
+func (sd *SpecAnalyser) checkDeletedExtensions(extensions1 spec.Extensions, extensions2 spec.Extensions, diffLocation DifferenceLocation, fieldPrefix string) {
+ for extKey := range extensions1 {
+ if _, ok := extensions2[extKey]; !ok {
+ if fieldPrefix != "" {
+ extKey = fmt.Sprintf("%s.%s", fieldPrefix, extKey)
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: diffLocation.AddNode(&Node{Field: extKey}),
+ Code: DeletedExtension,
+ Compatibility: Warning, // this could potentially be a breaking change
+ })
+ }
+ }
+}
+
+func addTypeDiff(diffs []TypeDiff, diff TypeDiff) []TypeDiff {
+ if diff.Change != NoChangeDetected {
+ diffs = append(diffs, diff)
+ }
+ return diffs
+}
+
+// CompareProps computes type specific property diffs
+func (sd *SpecAnalyser) CompareProps(type1, type2 *spec.SchemaProps) []TypeDiff {
+
+ diffs := []TypeDiff{}
+
+ diffs = CheckToFromPrimitiveType(diffs, type1, type2)
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ if isArray(type1) {
+ maxItemDiffs := CompareIntValues("MaxItems", type1.MaxItems, type2.MaxItems, WidenedType, NarrowedType)
+ diffs = append(diffs, maxItemDiffs...)
+ minItemsDiff := CompareIntValues("MinItems", type1.MinItems, type2.MinItems, NarrowedType, WidenedType)
+ diffs = append(diffs, minItemsDiff...)
+ }
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ diffs = CheckRefChange(diffs, type1, type2)
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ if !(isPrimitiveType(type1.Type) && isPrimitiveType(type2.Type)) {
+ return diffs
+ }
+
+ // check primitive type hierarchy change eg string -> integer = NarrowedChange
+ if type1.Type[0] != type2.Type[0] ||
+ type1.Format != type2.Format {
+ diff := getTypeHierarchyChange(primitiveTypeString(type1.Type[0], type1.Format), primitiveTypeString(type2.Type[0], type2.Format))
+ diffs = addTypeDiff(diffs, diff)
+ }
+
+ diffs = CheckStringTypeChanges(diffs, type1, type2)
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ diffs = checkNumericTypeChanges(diffs, type1, type2)
+
+ if len(diffs) > 0 {
+ return diffs
+ }
+
+ return diffs
+}
+
+func (sd *SpecAnalyser) compareParams(urlMethod URLMethod, location string, name string, param1, param2 spec.Parameter) {
+ diffLocation := DifferenceLocation{URL: urlMethod.Path, Method: urlMethod.Method}
+
+ childLocation := diffLocation.AddNode(getNameOnlyDiffNode(strings.Title(location)))
+ paramLocation := diffLocation.AddNode(getNameOnlyDiffNode(name))
+ sd.compareDescripton(paramLocation, param1.Description, param2.Description)
+
+ if param1.Schema != nil && param2.Schema != nil {
+ if len(name) > 0 {
+ childLocation = childLocation.AddNode(getSchemaDiffNode(name, param2.Schema))
+ }
+ sd.compareSchema(childLocation, param1.Schema, param2.Schema)
+ }
+
+ diffs := sd.CompareProps(forParam(param1), forParam(param2))
+
+ childLocation = childLocation.AddNode(getSchemaDiffNode(name, &param2.SimpleSchema))
+ if len(diffs) > 0 {
+ sd.addDiffs(childLocation, diffs)
+ }
+
+ diffs = CheckToFromRequired(param1.Required, param2.Required)
+ if len(diffs) > 0 {
+ sd.addDiffs(childLocation, diffs)
+ }
+
+ sd.compareSimpleSchema(childLocation, &param1.SimpleSchema, &param2.SimpleSchema)
+}
+
+func (sd *SpecAnalyser) addTypeDiff(location DifferenceLocation, diff *TypeDiff) {
+ diffCopy := diff
+ desc := diffCopy.Description
+ if len(desc) == 0 {
+ if diffCopy.FromType != diffCopy.ToType {
+ desc = fmt.Sprintf("%s -> %s", diffCopy.FromType, diffCopy.ToType)
+ }
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{
+ DifferenceLocation: location,
+ Code: diffCopy.Change,
+ DiffInfo: desc})
+}
+
+func (sd *SpecAnalyser) compareDescripton(location DifferenceLocation, desc1, desc2 string) {
+ if desc1 != desc2 {
+ code := ChangedDescripton
+ if len(desc1) > 0 {
+ code = DeletedDescripton
+ } else if len(desc2) > 0 {
+ code = AddedDescripton
+ }
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: location, Code: code})
+ }
+}
+
+func isPrimitiveType(item spec.StringOrArray) bool {
+ return len(item) > 0 && item[0] != ArrayType && item[0] != ObjectType
+}
+
+func isArrayType(item spec.StringOrArray) bool {
+ return len(item) > 0 && item[0] == ArrayType
+}
+func (sd *SpecAnalyser) getRefSchemaFromSpec1(ref spec.Ref) (*spec.Schema, string) {
+ return sd.schemaFromRef(ref, &sd.Definitions1)
+}
+
+func (sd *SpecAnalyser) getRefSchemaFromSpec2(ref spec.Ref) (*spec.Schema, string) {
+ return sd.schemaFromRef(ref, &sd.Definitions2)
+}
+
+// CompareSchemaFn Fn spec for comparing schemas
+type CompareSchemaFn func(location DifferenceLocation, schema1, schema2 *spec.Schema)
+
+func (sd *SpecAnalyser) compareSchema(location DifferenceLocation, schema1, schema2 *spec.Schema) {
+
+ refDiffs := []TypeDiff{}
+ refDiffs = CheckRefChange(refDiffs, schema1, schema2)
+ if len(refDiffs) > 0 {
+ for _, d := range refDiffs {
+ diff := d
+ sd.addTypeDiff(location, &diff)
+ }
+ return
+ }
+
+ if isRefType(schema1) {
+ key := schemaLocationKey(location)
+ if _, ok := sd.schemasCompared[key]; ok {
+ return
+ }
+ sd.schemasCompared[key] = struct{}{}
+ schema1, _ = sd.schemaFromRef(getRef(schema1), &sd.Definitions1)
+ }
+
+ if isRefType(schema2) {
+ schema2, _ = sd.schemaFromRef(getRef(schema2), &sd.Definitions2)
+ }
+
+ sd.compareDescripton(location, schema1.Description, schema2.Description)
+
+ typeDiffs := sd.CompareProps(&schema1.SchemaProps, &schema2.SchemaProps)
+ if len(typeDiffs) > 0 {
+ sd.addDiffs(location, typeDiffs)
+ return
+ }
+
+ if isArray(schema1) {
+ if isArray(schema2) {
+ sd.compareSchema(location, schema1.Items.Schema, schema2.Items.Schema)
+ } else {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ diffs := CompareProperties(location, schema1, schema2, sd.getRefSchemaFromSpec1, sd.getRefSchemaFromSpec2, sd.compareSchema)
+ for _, diff := range diffs {
+ sd.Diffs = sd.Diffs.addDiff(diff)
+ }
+}
+
+func (sd *SpecAnalyser) compareSimpleSchema(location DifferenceLocation, schema1, schema2 *spec.SimpleSchema) {
+ // check optional/required
+ if schema1.Nullable != schema2.Nullable {
+ // If optional is made required
+ if schema1.Nullable && !schema2.Nullable {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedOptionalToRequired, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ } else if !schema1.Nullable && schema2.Nullable {
+ // If required is made optional
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedRequiredToOptional, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ if schema1.CollectionFormat != schema2.CollectionFormat {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedCollectionFormat, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+
+ if schema1.Default != schema2.Default {
+ switch {
+ case schema1.Default == nil && schema2.Default != nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: AddedDefault, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ case schema1.Default != nil && schema2.Default == nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: DeletedDefault, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ default:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedDefault, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ if schema1.Example != schema2.Example {
+ switch {
+ case schema1.Example == nil && schema2.Example != nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: AddedExample, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ case schema1.Example != nil && schema2.Example == nil:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: DeletedExample, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ default:
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedExample, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+
+ if isArray(schema1) {
+ if isArray(schema2) {
+ sd.compareSimpleSchema(location, &schema1.Items.SimpleSchema, &schema2.Items.SimpleSchema)
+ } else {
+ sd.addDiffs(location, addTypeDiff([]TypeDiff{}, TypeDiff{Change: ChangedType, FromType: getSchemaTypeStr(schema1), ToType: getSchemaTypeStr(schema2)}))
+ }
+ }
+}
+
+func (sd *SpecAnalyser) addDiffs(location DifferenceLocation, diffs []TypeDiff) {
+ for _, e := range diffs {
+ eachTypeDiff := e
+ if eachTypeDiff.Change != NoChangeDetected {
+ sd.addTypeDiff(location, &eachTypeDiff)
+ }
+ }
+}
+
+func addChildDiffNode(location DifferenceLocation, propName string, propSchema *spec.Schema) DifferenceLocation {
+ newNode := location.Node
+ childNode := fromSchemaProps(propName, &propSchema.SchemaProps)
+ if newNode != nil {
+ newNode = newNode.Copy()
+ newNode.AddLeafNode(&childNode)
+ } else {
+ newNode = &childNode
+ }
+ return DifferenceLocation{
+ URL: location.URL,
+ Method: location.Method,
+ Response: location.Response,
+ Node: newNode,
+ }
+}
+
+func fromSchemaProps(fieldName string, props *spec.SchemaProps) Node {
+ node := Node{}
+ node.TypeName, node.IsArray = getSchemaType(props)
+ node.Field = fieldName
+ return node
+}
+
+func (sd *SpecAnalyser) findAddedEndpoints() {
+ for URLMethod := range sd.urlMethods2 {
+ if _, ok := sd.urlMethods1[URLMethod]; !ok {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{URL: URLMethod.Path, Method: URLMethod.Method}, Code: AddedEndpoint})
+ }
+ }
+}
+
+func (sd *SpecAnalyser) findDeletedEndpoints() {
+ for eachURLMethod, operation1 := range sd.urlMethods1 {
+ code := DeletedEndpoint
+ if (operation1.ParentPathItem.Options != nil && operation1.ParentPathItem.Options.Deprecated) ||
+ (operation1.Operation.Deprecated) {
+ code = DeletedDeprecatedEndpoint
+ }
+ if _, ok := sd.urlMethods2[eachURLMethod]; !ok {
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{URL: eachURLMethod.Path, Method: eachURLMethod.Method}, Code: code})
+ }
+ }
+}
+
+func (sd *SpecAnalyser) analyseMetaDataProperty(item1, item2 string, codeIfDiff SpecChangeCode, compatIfDiff Compatibility) {
+ if item1 != item2 {
+ diffSpec := fmt.Sprintf("%s -> %s", item1, item2)
+ sd.Diffs = sd.Diffs.addDiff(SpecDifference{DifferenceLocation: DifferenceLocation{Node: &Node{Field: "Spec Metadata"}}, Code: codeIfDiff, Compatibility: compatIfDiff, DiffInfo: diffSpec})
+ }
+}
+
+func (sd *SpecAnalyser) schemaFromRef(ref spec.Ref, defns *spec.Definitions) (actualSchema *spec.Schema, definitionName string) {
+ definitionName = definitionFromRef(ref)
+ foundSchema, ok := (*defns)[definitionName]
+ if !ok {
+ return nil, definitionName
+ }
+ sd.ReferencedDefinitions[definitionName] = true
+ actualSchema = &foundSchema
+ return
+
+}
+
+func schemaLocationKey(location DifferenceLocation) string {
+ return location.Method + location.URL + location.Node.Field + location.Node.TypeName
+}
+
+// PropertyDefn combines a property with its required-ness
+type PropertyDefn struct {
+ Schema *spec.Schema
+ Required bool
+}
+
+// PropertyMap a unified map including all AllOf fields
+type PropertyMap map[string]PropertyDefn
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go
new file mode 100644
index 000000000..73e38ce4e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/spec_difference.go
@@ -0,0 +1,216 @@
+package diff
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "sort"
+ "strings"
+)
+
+// SpecDifference encapsulates the details of an individual diff in part of a spec
+type SpecDifference struct {
+ DifferenceLocation DifferenceLocation `json:"location"`
+ Code SpecChangeCode `json:"code"`
+ Compatibility Compatibility `json:"compatibility"`
+ DiffInfo string `json:"info,omitempty"`
+}
+
+// SpecDifferences list of differences
+type SpecDifferences []SpecDifference
+
+// Matches returns true if the diff matches another
+func (sd SpecDifference) Matches(other SpecDifference) bool {
+ return sd.Code == other.Code &&
+ sd.Compatibility == other.Compatibility &&
+ sd.DiffInfo == other.DiffInfo &&
+ equalLocations(sd.DifferenceLocation, other.DifferenceLocation)
+}
+
+func equalLocations(a, b DifferenceLocation) bool {
+ return a.Method == b.Method &&
+ a.Response == b.Response &&
+ a.URL == b.URL &&
+ equalNodes(a.Node, b.Node)
+}
+
+func equalNodes(a, b *Node) bool {
+ if a == nil && b == nil {
+ return true
+ }
+ if a == nil || b == nil {
+ return false
+ }
+ return a.Field == b.Field &&
+ a.IsArray == b.IsArray &&
+ a.TypeName == b.TypeName &&
+ equalNodes(a.ChildNode, b.ChildNode)
+
+}
+
+// BreakingChangeCount Calculates the breaking change count
+func (sd SpecDifferences) BreakingChangeCount() int {
+ count := 0
+ for _, eachDiff := range sd {
+ if eachDiff.Compatibility == Breaking {
+ count++
+ }
+ }
+ return count
+}
+
+// WarningChangeCount Calculates the warning change count
+func (sd SpecDifferences) WarningChangeCount() int {
+ count := 0
+ for _, eachDiff := range sd {
+ if eachDiff.Compatibility == Warning {
+ count++
+ }
+ }
+ return count
+}
+
+// FilterIgnores returns a copy of the list without the items in the specified ignore list
+func (sd SpecDifferences) FilterIgnores(ignores SpecDifferences) SpecDifferences {
+ newDiffs := SpecDifferences{}
+ for _, eachDiff := range sd {
+ if !ignores.Contains(eachDiff) {
+ newDiffs = newDiffs.addDiff(eachDiff)
+ }
+ }
+ return newDiffs
+}
+
+// Contains Returns true if the item contains the specified item
+func (sd SpecDifferences) Contains(diff SpecDifference) bool {
+ for _, eachDiff := range sd {
+ if eachDiff.Matches(diff) {
+ return true
+ }
+ }
+ return false
+}
+
+// String std string renderer
+func (sd SpecDifference) String() string {
+ isResponse := sd.DifferenceLocation.Response > 0
+ hasMethod := len(sd.DifferenceLocation.Method) > 0
+ hasURL := len(sd.DifferenceLocation.URL) > 0
+
+ prefix := ""
+ direction := ""
+
+ if hasMethod {
+ if hasURL {
+ prefix = fmt.Sprintf("%s:%s", sd.DifferenceLocation.URL, sd.DifferenceLocation.Method)
+ }
+ if isResponse {
+ prefix += fmt.Sprintf(" -> %d", sd.DifferenceLocation.Response)
+ direction = "Response"
+ } else {
+ direction = "Request"
+ }
+ } else {
+ prefix = sd.DifferenceLocation.URL
+ }
+
+ paramOrPropertyLocation := ""
+ if sd.DifferenceLocation.Node != nil {
+ paramOrPropertyLocation = sd.DifferenceLocation.Node.String()
+ }
+ optionalInfo := ""
+ if sd.DiffInfo != "" {
+ optionalInfo = sd.DiffInfo
+ }
+
+ items := []string{}
+ for _, item := range []string{prefix, direction, paramOrPropertyLocation, sd.Code.Description(), optionalInfo} {
+ if item != "" {
+ items = append(items, item)
+ }
+ }
+ return strings.Join(items, " - ")
+ // return fmt.Sprintf("%s%s%s - %s%s", prefix, direction, paramOrPropertyLocation, sd.Code.Description(), optionalInfo)
+}
+
+func (sd SpecDifferences) addDiff(diff SpecDifference) SpecDifferences {
+ context := Request
+ if diff.DifferenceLocation.Response > 0 {
+ context = Response
+ }
+ diff.Compatibility = getCompatibilityForChange(diff.Code, context)
+
+ return append(sd, diff)
+}
+
+// ReportCompatibility lists and spec
+func (sd *SpecDifferences) ReportCompatibility() (io.Reader, error, error) {
+ var out bytes.Buffer
+ breakingCount := sd.BreakingChangeCount()
+ if breakingCount > 0 {
+ if len(*sd) != breakingCount {
+ fmt.Fprintln(&out, "")
+ }
+ fmt.Fprintln(&out, "BREAKING CHANGES:\n=================")
+ _, _ = out.ReadFrom(sd.reportChanges(Breaking))
+ msg := fmt.Sprintf("compatibility test FAILED: %d breaking changes detected", breakingCount)
+ fmt.Fprintln(&out, msg)
+ return &out, nil, errors.New(msg)
+ }
+ fmt.Fprintf(&out, "compatibility test OK. No breaking changes identified.\n")
+ return &out, nil, nil
+}
+
+func (sd SpecDifferences) reportChanges(compat Compatibility) io.Reader {
+ toReportList := []string{}
+ var out bytes.Buffer
+
+ for _, diff := range sd {
+ if diff.Compatibility == compat {
+ toReportList = append(toReportList, diff.String())
+ }
+ }
+
+ sort.Slice(toReportList, func(i, j int) bool {
+ return toReportList[i] < toReportList[j]
+ })
+
+ for _, eachDiff := range toReportList {
+ fmt.Fprintln(&out, eachDiff)
+ }
+ return &out
+}
+
+// ReportAllDiffs lists all the diffs between two specs
+func (sd SpecDifferences) ReportAllDiffs(fmtJSON bool) (io.Reader, error, error) {
+ if fmtJSON {
+ b, err := JSONMarshal(sd)
+ if err != nil {
+ return nil, fmt.Errorf("couldn't print results: %v", err), nil
+ }
+ out, err := prettyprint(b)
+ return out, err, nil
+ }
+ numDiffs := len(sd)
+ if numDiffs == 0 {
+ return bytes.NewBuffer([]byte("No changes identified\n")), nil, nil
+ }
+
+ var out bytes.Buffer
+ if numDiffs != sd.BreakingChangeCount() {
+ fmt.Fprintln(&out, "NON-BREAKING CHANGES:\n=====================")
+ _, _ = out.ReadFrom(sd.reportChanges(NonBreaking))
+ if sd.WarningChangeCount() > 0 {
+ fmt.Fprintln(&out, "\nNON-BREAKING CHANGES WITH WARNING:\n==================================")
+ _, _ = out.ReadFrom(sd.reportChanges(Warning))
+ }
+ }
+
+ more, err, warn := sd.ReportCompatibility()
+ if err != nil {
+ return nil, err, warn
+ }
+ _, _ = out.ReadFrom(more)
+ return &out, nil, warn
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go
new file mode 100644
index 000000000..5679367fd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/diff/type_adapters.go
@@ -0,0 +1,163 @@
+package diff
+
+import (
+ "github.com/go-openapi/spec"
+)
+
+func forItems(items *spec.Items) *spec.Schema {
+ if items == nil {
+ return nil
+ }
+ valids := items.CommonValidations
+ schema := spec.Schema{
+ SchemaProps: spec.SchemaProps{
+ Type: []string{items.SimpleSchema.Type},
+ Format: items.SimpleSchema.Format,
+ Maximum: valids.Maximum,
+ ExclusiveMaximum: valids.ExclusiveMaximum,
+ Minimum: valids.Minimum,
+ ExclusiveMinimum: valids.ExclusiveMinimum,
+ MaxLength: valids.MaxLength,
+ MinLength: valids.MinLength,
+ Pattern: valids.Pattern,
+ MaxItems: valids.MaxItems,
+ MinItems: valids.MinItems,
+ UniqueItems: valids.UniqueItems,
+ MultipleOf: valids.MultipleOf,
+ Enum: valids.Enum,
+ },
+ }
+ return &schema
+}
+
+func forHeader(header spec.Header) *spec.SchemaProps {
+ return &spec.SchemaProps{
+ Type: []string{header.Type},
+ Format: header.Format,
+ Items: &spec.SchemaOrArray{Schema: forItems(header.Items)},
+ Maximum: header.Maximum,
+ ExclusiveMaximum: header.ExclusiveMaximum,
+ Minimum: header.Minimum,
+ ExclusiveMinimum: header.ExclusiveMinimum,
+ MaxLength: header.MaxLength,
+ MinLength: header.MinLength,
+ Pattern: header.Pattern,
+ MaxItems: header.MaxItems,
+ MinItems: header.MinItems,
+ UniqueItems: header.UniqueItems,
+ MultipleOf: header.MultipleOf,
+ Enum: header.Enum,
+ }
+}
+
+func forParam(param spec.Parameter) *spec.SchemaProps {
+ return &spec.SchemaProps{
+ Type: []string{param.Type},
+ Format: param.Format,
+ Items: &spec.SchemaOrArray{Schema: forItems(param.Items)},
+ Maximum: param.Maximum,
+ ExclusiveMaximum: param.ExclusiveMaximum,
+ Minimum: param.Minimum,
+ ExclusiveMinimum: param.ExclusiveMinimum,
+ MaxLength: param.MaxLength,
+ MinLength: param.MinLength,
+ Pattern: param.Pattern,
+ MaxItems: param.MaxItems,
+ MinItems: param.MinItems,
+ UniqueItems: param.UniqueItems,
+ MultipleOf: param.MultipleOf,
+ Enum: param.Enum,
+ }
+}
+
+// OperationMap saves indexing operations in PathItems individually
+type OperationMap map[string]*spec.Operation
+
+func toMap(item *spec.PathItem) OperationMap {
+ m := make(OperationMap)
+
+ if item.Post != nil {
+ m["post"] = item.Post
+ }
+ if item.Get != nil {
+ m["get"] = item.Get
+ }
+ if item.Put != nil {
+ m["put"] = item.Put
+ }
+ if item.Patch != nil {
+ m["patch"] = item.Patch
+ }
+ if item.Head != nil {
+ m["head"] = item.Head
+ }
+ if item.Options != nil {
+ m["options"] = item.Options
+ }
+ if item.Delete != nil {
+ m["delete"] = item.Delete
+ }
+ return m
+}
+
+func getURLMethodsFor(spec *spec.Swagger) URLMethods {
+ returnURLMethods := URLMethods{}
+
+ for url, eachPath := range spec.Paths.Paths {
+ eachPath := eachPath
+ opsMap := toMap(&eachPath)
+ for method, op := range opsMap {
+ returnURLMethods[URLMethod{url, method}] = &PathItemOp{&eachPath, op, eachPath.Extensions}
+ }
+ }
+ return returnURLMethods
+}
+
+func isStringType(typeName string) bool {
+ return typeName == "string" || typeName == "password"
+}
+
+// SchemaFromRefFn define this to get a schema for a ref
+type SchemaFromRefFn func(spec.Ref) (*spec.Schema, string)
+
+func propertiesFor(schema *spec.Schema, getRefFn SchemaFromRefFn) PropertyMap {
+ if isRefType(schema) {
+ schema, _ = getRefFn(schema.Ref)
+ }
+ props := PropertyMap{}
+
+ requiredProps := schema.Required
+ requiredMap := map[string]bool{}
+ for _, prop := range requiredProps {
+ requiredMap[prop] = true
+ }
+
+ if schema.Properties != nil {
+ for name, prop := range schema.Properties {
+ prop := prop
+ required := requiredMap[name]
+ props[name] = PropertyDefn{Schema: &prop, Required: required}
+ }
+ }
+ for _, e := range schema.AllOf {
+ eachAllOf := e
+ allOfMap := propertiesFor(&eachAllOf, getRefFn)
+ for name, prop := range allOfMap {
+ props[name] = prop
+ }
+ }
+ return props
+}
+
+func getRef(item interface{}) spec.Ref {
+ switch s := item.(type) {
+ case *spec.Refable:
+ return s.Ref
+ case *spec.Schema:
+ return s.Ref
+ case *spec.SchemaProps:
+ return s.Ref
+ default:
+ return spec.Ref{}
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go
new file mode 100644
index 000000000..d8a704673
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/expand.go
@@ -0,0 +1,81 @@
+package commands
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "os"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ flags "github.com/jessevdk/go-flags"
+)
+
+// ExpandSpec is a command that expands the $refs in a swagger document.
+//
+// There are no specific options for this expansion.
+type ExpandSpec struct {
+ Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
+}
+
+// Execute expands the spec
+func (c *ExpandSpec) Execute(args []string) error {
+ if len(args) != 1 {
+ return errors.New("expand command requires the single swagger document url to be specified")
+ }
+
+ swaggerDoc := args[0]
+ specDoc, err := loads.Spec(swaggerDoc)
+ if err != nil {
+ return err
+ }
+
+ exp, err := specDoc.Expanded()
+ if err != nil {
+ return err
+ }
+
+ return writeToFile(exp.Spec(), !c.Compact, c.Format, string(c.Output))
+}
+
+func writeToFile(swspec *spec.Swagger, pretty bool, format string, output string) error {
+ var b []byte
+ var err error
+ asJSON := format == "json"
+
+ log.Println("format = ", format)
+ switch {
+ case pretty && asJSON:
+ b, err = json.MarshalIndent(swspec, "", " ")
+ case asJSON:
+ b, err = json.Marshal(swspec)
+ default:
+ // marshals as YAML
+ b, err = json.Marshal(swspec)
+ if err == nil {
+ var data swag.JSONMapSlice
+ if erg := json.Unmarshal(b, &data); erg != nil {
+ log.Fatalln(erg)
+ }
+ var bb interface{}
+ bb, err = data.MarshalYAML()
+ b = bb.([]byte)
+ }
+
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if output == "" {
+ fmt.Println(string(b))
+ return nil
+ }
+
+ return os.WriteFile(output, b, 0644) // #nosec
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go
new file mode 100644
index 000000000..b30b50fd5
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/flatten.go
@@ -0,0 +1,48 @@
+package commands
+
+import (
+ "errors"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-swagger/go-swagger/cmd/swagger/commands/generate"
+ flags "github.com/jessevdk/go-flags"
+)
+
+// FlattenSpec is a command that flattens a swagger document
+// which will expand the remote references in a spec and move inline schemas to definitions
+// after flattening there are no complex inlined anymore
+type FlattenSpec struct {
+ Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
+ generate.FlattenCmdOptions
+}
+
+// Execute flattens the spec
+func (c *FlattenSpec) Execute(args []string) error {
+ if len(args) != 1 {
+ return errors.New("flatten command requires the single swagger document url to be specified")
+ }
+
+ swaggerDoc := args[0]
+ specDoc, err := loads.Spec(swaggerDoc)
+ if err != nil {
+ return err
+ }
+
+ flattenOpts := c.FlattenCmdOptions.SetFlattenOptions(&analysis.FlattenOpts{
+ // defaults
+ Minimal: true,
+ Verbose: true,
+ Expand: false,
+ RemoveUnused: false,
+ })
+ flattenOpts.BasePath = specDoc.SpecFilePath()
+ flattenOpts.Spec = analysis.New(specDoc.Spec())
+ if err := analysis.Flatten(*flattenOpts); err != nil {
+ return err
+ }
+
+ return writeToFile(specDoc.Spec(), !c.Compact, c.Format, string(c.Output))
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go
new file mode 100644
index 000000000..5f4b8598f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate.go
@@ -0,0 +1,29 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import "github.com/go-swagger/go-swagger/cmd/swagger/commands/generate"
+
+// Generate command to group all generator commands together
+type Generate struct {
+ Model *generate.Model `command:"model"`
+ Operation *generate.Operation `command:"operation"`
+ Support *generate.Support `command:"support"`
+ Server *generate.Server `command:"server"`
+ Spec *generate.SpecFile `command:"spec"`
+ Client *generate.Client `command:"client"`
+ Cli *generate.Cli `command:"cli"`
+ Markdown *generate.Markdown `command:"markdown"`
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go
new file mode 100644
index 000000000..e8ea11c79
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/cli.go
@@ -0,0 +1,26 @@
+package generate
+
+import "github.com/go-swagger/go-swagger/generator"
+
+type Cli struct {
+ // generate a cli includes all client code
+ Client
+ // cmd/<cli-app-name>/main.go will be generated. This ensures that go install will compile the app with desired name.
+ CliAppName string `long:"cli-app-name" description:"the app name for the cli executable. useful for go install." default:"cli"`
+}
+
+func (c Cli) apply(opts *generator.GenOpts) {
+ c.Client.apply(opts)
+ opts.IncludeCLi = true
+ opts.CliPackage = "cli" // hardcoded for now, can be exposed via cmd opt later
+ opts.CliAppName = c.CliAppName
+}
+
+func (c *Cli) generate(opts *generator.GenOpts) error {
+ return c.Client.generate(opts)
+}
+
+// Execute runs this command
+func (c *Cli) Execute(args []string) error {
+ return createSwagger(c)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go
new file mode 100644
index 000000000..3a78b5622
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/client.go
@@ -0,0 +1,86 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type clientOptions struct {
+ ClientPackage string `long:"client-package" short:"c" description:"the package to save the client specific code" default:"client"`
+}
+
+func (co clientOptions) apply(opts *generator.GenOpts) {
+ opts.ClientPackage = co.ClientPackage
+}
+
+// Client the command to generate a swagger client
+type Client struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ clientOptions
+ schemeOptions
+ mediaOptions
+
+ SkipModels bool `long:"skip-models" description:"no models will be generated when this flag is specified"`
+ SkipOperations bool `long:"skip-operations" description:"no operations will be generated when this flag is specified"`
+
+ Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
+}
+
+func (c Client) apply(opts *generator.GenOpts) {
+ c.Shared.apply(opts)
+ c.Models.apply(opts)
+ c.Operations.apply(opts)
+ c.clientOptions.apply(opts)
+ c.schemeOptions.apply(opts)
+ c.mediaOptions.apply(opts)
+
+ opts.IncludeModel = !c.SkipModels
+ opts.IncludeValidator = !c.SkipModels
+ opts.IncludeHandler = !c.SkipOperations
+ opts.IncludeParameters = !c.SkipOperations
+ opts.IncludeResponses = !c.SkipOperations
+ opts.Name = c.Name
+
+ opts.IsClient = true
+ opts.IncludeSupport = true
+}
+
+func (c *Client) generate(opts *generator.GenOpts) error {
+ return generator.GenerateClient(c.Name, c.Models.Models, c.Operations.Operations, opts)
+}
+
+func (c *Client) log(rp string) {
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/errors
+ * github.com/go-openapi/runtime
+ * github.com/go-openapi/runtime/client
+ * github.com/go-openapi/strfmt
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute runs this command
+func (c *Client) Execute(args []string) error {
+ return createSwagger(c)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go
new file mode 100644
index 000000000..196558e70
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/contrib.go
@@ -0,0 +1,17 @@
+package generate
+
+import (
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+// contribOptionsOverride gives contributed templates the ability to override the options if they need
+func contribOptionsOverride(opts *generator.GenOpts) {
+ // nolint: gocritic
+ switch opts.Template {
+ case "stratoscale":
+ // Stratoscale template needs to regenerate the configureapi on every run.
+ opts.RegenerateConfigureAPI = true
+ // It also does not use the main.go
+ opts.IncludeMain = false
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go
new file mode 100644
index 000000000..ba9df3812
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/markdown.go
@@ -0,0 +1,33 @@
+package generate
+
+import (
+ "github.com/go-swagger/go-swagger/generator"
+ "github.com/jessevdk/go-flags"
+)
+
+// Markdown generates a markdown representation of the spec
+type Markdown struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ Output flags.Filename `long:"output" short:"" description:"the file to write the generated markdown." default:"markdown.md"`
+}
+
+func (m Markdown) apply(opts *generator.GenOpts) {
+ m.Shared.apply(opts)
+ m.Models.apply(opts)
+ m.Operations.apply(opts)
+}
+
+func (m *Markdown) generate(opts *generator.GenOpts) error {
+ return generator.GenerateMarkdown(string(m.Output), m.Models.Models, m.Operations.Operations, opts)
+}
+
+func (m Markdown) log(rp string) {
+}
+
+// Execute runs this command
+func (m *Markdown) Execute(args []string) error {
+ return createSwagger(m)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go
new file mode 100644
index 000000000..fb8c14268
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/model.go
@@ -0,0 +1,98 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "errors"
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type modelOptions struct {
+ ModelPackage string `long:"model-package" short:"m" description:"the package to save the models" default:"models"`
+ Models []string `long:"model" short:"M" description:"specify a model to include in generation, repeat for multiple (defaults to all)"`
+ ExistingModels string `long:"existing-models" description:"use pre-generated models e.g. github.com/foobar/model"`
+ StrictAdditionalProperties bool `long:"strict-additional-properties" description:"disallow extra properties when additionalProperties is set to false"`
+ KeepSpecOrder bool `long:"keep-spec-order" description:"keep schema properties order identical to spec file"`
+ AllDefinitions bool `long:"all-definitions" description:"generate all model definitions regardless of usage in operations" hidden:"deprecated"`
+ StructTags []string `long:"struct-tags" description:"the struct tags to generate, repeat for multiple (defaults to json)"`
+}
+
+func (mo modelOptions) apply(opts *generator.GenOpts) {
+ opts.ModelPackage = mo.ModelPackage
+ opts.Models = mo.Models
+ opts.ExistingModels = mo.ExistingModels
+ opts.StrictAdditionalProperties = mo.StrictAdditionalProperties
+ opts.PropertiesSpecOrder = mo.KeepSpecOrder
+ opts.IgnoreOperations = mo.AllDefinitions
+ opts.StructTags = mo.StructTags
+}
+
+// WithModels adds the model options group.
+//
+// This group is available to all commands that need some model generation.
+type WithModels struct {
+ Models modelOptions `group:"Options for model generation"`
+}
+
+// Model the generate model file command.
+//
+// Define the options that are specific to the "swagger generate model" command.
+type Model struct {
+ WithShared
+ WithModels
+
+ NoStruct bool `long:"skip-struct" description:"when present will not generate the model struct" hidden:"deprecated"`
+ Name []string `long:"name" short:"n" description:"the model to generate, repeat for multiple (defaults to all). Same as --models"`
+ AcceptDefinitionsOnly bool `long:"accept-definitions-only" description:"accepts a partial swagger spec with only the definitions key"`
+}
+
+func (m Model) apply(opts *generator.GenOpts) {
+ m.Shared.apply(opts)
+ m.Models.apply(opts)
+
+ opts.IncludeModel = !m.NoStruct
+ opts.IncludeValidator = !m.NoStruct
+ opts.AcceptDefinitionsOnly = m.AcceptDefinitionsOnly
+}
+
+func (m Model) log(rp string) {
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/validate
+ * github.com/go-openapi/strfmt
+
+You can get these now with: go mod tidy`)
+}
+
+func (m *Model) generate(opts *generator.GenOpts) error {
+ return generator.GenerateModels(append(m.Name, m.Models.Models...), opts)
+}
+
+// Execute generates a model file
+func (m *Model) Execute(args []string) error {
+
+ if m.Shared.DumpData && len(append(m.Name, m.Models.Models...)) > 1 {
+ return errors.New("only 1 model at a time is supported for dumping data")
+ }
+
+ if m.Models.ExistingModels != "" {
+ log.Println("warning: Ignoring existing-models flag when generating models.")
+ }
+ return createSwagger(m)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go
new file mode 100644
index 000000000..ba554314a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/operation.go
@@ -0,0 +1,104 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "errors"
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type operationOptions struct {
+ Operations []string `long:"operation" short:"O" description:"specify an operation to include, repeat for multiple (defaults to all)"`
+ Tags []string `long:"tags" description:"the tags to include, if not specified defaults to all" group:"operations"`
+ APIPackage string `long:"api-package" short:"a" description:"the package to save the operations" default:"operations"`
+ WithEnumCI bool `long:"with-enum-ci" description:"allow case-insensitive enumerations"`
+
+ // tags handling
+ SkipTagPackages bool `long:"skip-tag-packages" description:"skips the generation of tag-based operation packages, resulting in a flat generation"`
+}
+
+func (oo operationOptions) apply(opts *generator.GenOpts) {
+ opts.Operations = oo.Operations
+ opts.Tags = oo.Tags
+ opts.APIPackage = oo.APIPackage
+ opts.AllowEnumCI = oo.WithEnumCI
+ opts.SkipTagPackages = oo.SkipTagPackages
+}
+
+// WithOperations adds the operations options group
+type WithOperations struct {
+ Operations operationOptions `group:"Options for operation generation"`
+}
+
+// Operation the generate operation files command
+type Operation struct {
+ WithShared
+ WithOperations
+
+ clientOptions
+ serverOptions
+ schemeOptions
+ mediaOptions
+
+ ModelPackage string `long:"model-package" short:"m" description:"the package to save the models" default:"models"`
+
+ NoHandler bool `long:"skip-handler" description:"when present will not generate an operation handler"`
+ NoStruct bool `long:"skip-parameters" description:"when present will not generate the parameter model struct"`
+ NoResponses bool `long:"skip-responses" description:"when present will not generate the response model struct"`
+ NoURLBuilder bool `long:"skip-url-builder" description:"when present will not generate a URL builder"`
+
+ Name []string `long:"name" short:"n" description:"the operations to generate, repeat for multiple (defaults to all). Same as --operations"`
+}
+
+func (o Operation) apply(opts *generator.GenOpts) {
+ o.Shared.apply(opts)
+ o.Operations.apply(opts)
+ o.clientOptions.apply(opts)
+ o.serverOptions.apply(opts)
+ o.schemeOptions.apply(opts)
+ o.mediaOptions.apply(opts)
+
+ opts.ModelPackage = o.ModelPackage
+ opts.IncludeHandler = !o.NoHandler
+ opts.IncludeResponses = !o.NoResponses
+ opts.IncludeParameters = !o.NoStruct
+ opts.IncludeURLBuilder = !o.NoURLBuilder
+}
+
+func (o *Operation) generate(opts *generator.GenOpts) error {
+ return generator.GenerateServerOperation(append(o.Name, o.Operations.Operations...), opts)
+}
+
+func (o Operation) log(rp string) {
+
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/runtime
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute generates a model file
+func (o *Operation) Execute(args []string) error {
+ if o.Shared.DumpData && len(append(o.Name, o.Operations.Operations...)) > 1 {
+ return errors.New("only 1 operation at a time is supported for dumping data")
+ }
+
+ return createSwagger(o)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go
new file mode 100644
index 000000000..92495adde
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/server.go
@@ -0,0 +1,119 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "log"
+ "strings"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+type serverOptions struct {
+ ServerPackage string `long:"server-package" short:"s" description:"the package to save the server specific code" default:"restapi"`
+ MainTarget string `long:"main-package" short:"" description:"the location of the generated main. Defaults to cmd/{name}-server" default:""`
+ ImplementationPackage string `long:"implementation-package" short:"" description:"the location of the backend implementation of the server, which will be autowired with api" default:""`
+}
+
+func (cs serverOptions) apply(opts *generator.GenOpts) {
+ opts.ServerPackage = cs.ServerPackage
+}
+
+// Server the command to generate an entire server application
+type Server struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ serverOptions
+ schemeOptions
+ mediaOptions
+
+ SkipModels bool `long:"skip-models" description:"no models will be generated when this flag is specified"`
+ SkipOperations bool `long:"skip-operations" description:"no operations will be generated when this flag is specified"`
+ SkipSupport bool `long:"skip-support" description:"no supporting files will be generated when this flag is specified"`
+ ExcludeMain bool `long:"exclude-main" description:"exclude main function, so just generate the library"`
+ ExcludeSpec bool `long:"exclude-spec" description:"don't embed the swagger specification"`
+ FlagStrategy string `long:"flag-strategy" description:"the strategy to provide flags for the server" default:"go-flags" choice:"go-flags" choice:"pflag" choice:"flag"` // nolint: staticcheck
+ CompatibilityMode string `long:"compatibility-mode" description:"the compatibility mode for the tls server" default:"modern" choice:"modern" choice:"intermediate"` // nolint: staticcheck
+ RegenerateConfigureAPI bool `long:"regenerate-configureapi" description:"Force regeneration of configureapi.go"`
+
+ Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
+ // TODO(fredbi): CmdName string `long:"cmd-name" short:"A" description:"the name of the server command, when main is generated (defaults to {name}-server)"`
+
+ // deprecated flags
+ WithContext bool `long:"with-context" description:"handlers get a context as first arg (deprecated)"`
+}
+
+func (s Server) apply(opts *generator.GenOpts) {
+ if s.WithContext {
+ log.Printf("warning: deprecated option --with-context is ignored")
+ }
+
+ s.Shared.apply(opts)
+ s.Models.apply(opts)
+ s.Operations.apply(opts)
+ s.serverOptions.apply(opts)
+ s.schemeOptions.apply(opts)
+ s.mediaOptions.apply(opts)
+
+ opts.IncludeModel = !s.SkipModels
+ opts.IncludeValidator = !s.SkipModels
+ opts.IncludeHandler = !s.SkipOperations
+ opts.IncludeParameters = !s.SkipOperations
+ opts.IncludeResponses = !s.SkipOperations
+ opts.IncludeURLBuilder = !s.SkipOperations
+ opts.IncludeSupport = !s.SkipSupport
+ opts.IncludeMain = !s.ExcludeMain
+ opts.ExcludeSpec = s.ExcludeSpec
+ opts.FlagStrategy = s.FlagStrategy
+ opts.CompatibilityMode = s.CompatibilityMode
+ opts.RegenerateConfigureAPI = s.RegenerateConfigureAPI
+
+ opts.Name = s.Name
+ opts.MainPackage = s.MainTarget
+
+ opts.ImplementationPackage = s.ImplementationPackage
+}
+
+func (s *Server) generate(opts *generator.GenOpts) error {
+ return generator.GenerateServer(s.Name, s.Models.Models, s.Operations.Operations, opts)
+}
+
+func (s Server) log(rp string) {
+ var flagsPackage string
+ switch {
+ case strings.HasPrefix(s.FlagStrategy, "pflag"):
+ flagsPackage = "github.com/spf13/pflag"
+ case strings.HasPrefix(s.FlagStrategy, "flag"):
+ flagsPackage = "flag"
+ default:
+ flagsPackage = "github.com/jessevdk/go-flags"
+ }
+
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in your go.mod:
+
+ * github.com/go-openapi/runtime
+ * ` + flagsPackage + `
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute runs this command
+func (s *Server) Execute(args []string) error {
+ return createSwagger(s)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go
new file mode 100644
index 000000000..ab9725a7c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/shared.go
@@ -0,0 +1,240 @@
+package generate
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/swag"
+ "github.com/go-swagger/go-swagger/generator"
+ flags "github.com/jessevdk/go-flags"
+ "github.com/spf13/viper"
+)
+
+// FlattenCmdOptions determines options to the flatten spec preprocessing
+type FlattenCmdOptions struct {
+ WithExpand bool `long:"with-expand" description:"expands all $ref's in spec prior to generation (shorthand to --with-flatten=expand)" group:"shared"`
+ WithFlatten []string `long:"with-flatten" description:"flattens all $ref's in spec prior to generation" choice:"minimal" choice:"full" choice:"expand" choice:"verbose" choice:"noverbose" choice:"remove-unused" default:"minimal" default:"verbose" group:"shared"` // nolint: staticcheck
+}
+
+// SetFlattenOptions builds flatten options from command line args
+func (f *FlattenCmdOptions) SetFlattenOptions(dflt *analysis.FlattenOpts) (res *analysis.FlattenOpts) {
+ res = &analysis.FlattenOpts{}
+ if dflt != nil {
+ *res = *dflt
+ }
+ if f == nil {
+ return
+ }
+ verboseIsSet := false
+ minimalIsSet := false
+ expandIsSet := false
+ if f.WithExpand {
+ res.Expand = true
+ expandIsSet = true
+ }
+ for _, opt := range f.WithFlatten {
+ switch opt {
+ case "verbose":
+ res.Verbose = true
+ verboseIsSet = true
+ case "noverbose":
+ if !verboseIsSet {
+ // verbose flag takes precedence
+ res.Verbose = false
+ verboseIsSet = true
+ }
+ case "remove-unused":
+ res.RemoveUnused = true
+ case "expand":
+ res.Expand = true
+ expandIsSet = true
+ case "full":
+ if !minimalIsSet && !expandIsSet {
+ // minimal flag takes precedence
+ res.Minimal = false
+ minimalIsSet = true
+ }
+ case "minimal":
+ if !expandIsSet {
+ // expand flag takes precedence
+ res.Minimal = true
+ minimalIsSet = true
+ }
+ }
+ }
+ return
+}
+
+type sharedCommand interface {
+ apply(*generator.GenOpts)
+ getConfigFile() string
+ generate(*generator.GenOpts) error
+ log(string)
+}
+
+type schemeOptions struct {
+ Principal string `short:"P" long:"principal" description:"the model to use for the security principal"`
+ DefaultScheme string `long:"default-scheme" description:"the default scheme for this API" default:"http"`
+
+ PrincipalIface bool `long:"principal-is-interface" description:"the security principal provided is an interface, not a struct"`
+}
+
+func (so schemeOptions) apply(opts *generator.GenOpts) {
+ opts.Principal = so.Principal
+ opts.PrincipalCustomIface = so.PrincipalIface
+ opts.DefaultScheme = so.DefaultScheme
+}
+
+type mediaOptions struct {
+ DefaultProduces string `long:"default-produces" description:"the default mime type that API operations produce" default:"application/json"`
+ DefaultConsumes string `long:"default-consumes" description:"the default mime type that API operations consume" default:"application/json"`
+}
+
+func (m mediaOptions) apply(opts *generator.GenOpts) {
+ opts.DefaultProduces = m.DefaultProduces
+ opts.DefaultConsumes = m.DefaultConsumes
+
+ const xmlIdentifier = "xml"
+ opts.WithXML = strings.Contains(opts.DefaultProduces, xmlIdentifier) || strings.Contains(opts.DefaultConsumes, xmlIdentifier)
+}
+
+// WithShared adds the shared options group
+type WithShared struct {
+ Shared sharedOptions `group:"Options common to all code generation commands"`
+}
+
+func (w WithShared) getConfigFile() string {
+ return string(w.Shared.ConfigFile)
+}
+
+type sharedOptionsCommon struct {
+ Spec flags.Filename `long:"spec" short:"f" description:"the spec file to use (default swagger.{json,yml,yaml})" group:"shared"`
+ Target flags.Filename `long:"target" short:"t" default:"./" description:"the base directory for generating the files" group:"shared"`
+ Template string `long:"template" description:"load contributed templates" choice:"stratoscale" group:"shared"`
+ TemplateDir flags.Filename `long:"template-dir" short:"T" description:"alternative template override directory" group:"shared"`
+ ConfigFile flags.Filename `long:"config-file" short:"C" description:"configuration file to use for overriding template options" group:"shared"`
+ CopyrightFile flags.Filename `long:"copyright-file" short:"r" description:"copyright file used to add copyright header" group:"shared"`
+ AdditionalInitialisms []string `long:"additional-initialism" description:"consecutive capitals that should be considered intialisms" group:"shared"`
+ AllowTemplateOverride bool `long:"allow-template-override" description:"allows overriding protected templates" group:"shared"`
+ SkipValidation bool `long:"skip-validation" description:"skips validation of spec prior to generation" group:"shared"`
+ DumpData bool `long:"dump-data" description:"when present dumps the json for the template generator instead of generating files" group:"shared"`
+ StrictResponders bool `long:"strict-responders" description:"Use strict type for the handler return value"`
+ FlattenCmdOptions
+}
+
+func (s sharedOptionsCommon) apply(opts *generator.GenOpts) {
+ opts.Spec = string(s.Spec)
+ opts.Target = string(s.Target)
+ opts.Template = s.Template
+ opts.TemplateDir = string(s.TemplateDir)
+ opts.AllowTemplateOverride = s.AllowTemplateOverride
+ opts.ValidateSpec = !s.SkipValidation
+ opts.DumpData = s.DumpData
+ opts.FlattenOpts = s.FlattenCmdOptions.SetFlattenOptions(opts.FlattenOpts)
+ opts.Copyright = string(s.CopyrightFile)
+ opts.StrictResponders = s.StrictResponders
+
+ swag.AddInitialisms(s.AdditionalInitialisms...)
+}
+
+func setCopyright(copyrightFile string) (string, error) {
+ // read the Copyright from file path in opts
+ if copyrightFile == "" {
+ return "", nil
+ }
+ bytebuffer, err := os.ReadFile(copyrightFile)
+ if err != nil {
+ return "", err
+ }
+ return string(bytebuffer), nil
+}
+
+func createSwagger(s sharedCommand) error {
+ cfg, err := readConfig(s.getConfigFile())
+ if err != nil {
+ return err
+ }
+ setDebug(cfg) // viper config Debug
+
+ opts := new(generator.GenOpts)
+ s.apply(opts)
+
+ opts.Copyright, err = setCopyright(opts.Copyright)
+ if err != nil {
+ return fmt.Errorf("could not load copyright file: %v", err)
+ }
+
+ if opts.Template != "" {
+ contribOptionsOverride(opts)
+ }
+
+ if err = opts.EnsureDefaults(); err != nil {
+ return err
+ }
+
+ if err = configureOptsFromConfig(cfg, opts); err != nil {
+ return err
+ }
+
+ if err = s.generate(opts); err != nil {
+ return err
+ }
+
+ basepath, err := filepath.Abs(".")
+ if err != nil {
+ return err
+ }
+
+ targetAbs, err := filepath.Abs(opts.Target)
+ if err != nil {
+ return err
+ }
+ rp, err := filepath.Rel(basepath, targetAbs)
+ if err != nil {
+ return err
+ }
+
+ s.log(rp)
+
+ return nil
+}
+
+func readConfig(filename string) (*viper.Viper, error) {
+ if filename == "" {
+ return nil, nil
+ }
+
+ abspath, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ log.Println("trying to read config from", abspath)
+ return generator.ReadConfig(abspath)
+}
+
+func configureOptsFromConfig(cfg *viper.Viper, opts *generator.GenOpts) error {
+ if cfg == nil {
+ return nil
+ }
+
+ var def generator.LanguageDefinition
+ if err := cfg.Unmarshal(&def); err != nil {
+ return err
+ }
+ return def.ConfigureOpts(opts)
+}
+
+func setDebug(cfg *viper.Viper) {
+ // viper config debug
+ if os.Getenv("DEBUG") != "" || os.Getenv("SWAGGER_DEBUG") != "" {
+ if cfg != nil {
+ cfg.Debug()
+ } else {
+ log.Println("No config read")
+ }
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go
new file mode 100644
index 000000000..7f7c25187
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_nonwin.go
@@ -0,0 +1,19 @@
+//go:build !windows
+// +build !windows
+
+package generate
+
+import (
+ "github.com/go-swagger/go-swagger/generator"
+ "github.com/jessevdk/go-flags"
+)
+
+type sharedOptions struct {
+ sharedOptionsCommon
+ TemplatePlugin flags.Filename `long:"template-plugin" short:"p" description:"the template plugin to use" group:"shared"`
+}
+
+func (s sharedOptions) apply(opts *generator.GenOpts) {
+ opts.TemplatePlugin = string(s.TemplatePlugin)
+ s.sharedOptionsCommon.apply(opts)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go
new file mode 100644
index 000000000..b2cf00f91
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/sharedopts_win.go
@@ -0,0 +1,8 @@
+//go:build windows
+// +build windows
+
+package generate
+
+type sharedOptions struct {
+ sharedOptionsCommon
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go
new file mode 100644
index 000000000..3e16789b6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec.go
@@ -0,0 +1,125 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-swagger/go-swagger/scan"
+ "github.com/jessevdk/go-flags"
+ "gopkg.in/yaml.v3"
+)
+
+// SpecFile command to generate a swagger spec from a go application
+type SpecFile struct {
+ BasePath string `long:"base-path" short:"b" description:"the base path to use" default:"."`
+ BuildTags string `long:"tags" short:"t" description:"build tags" default:""`
+ ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"`
+ Compact bool `long:"compact" description:"when present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Input flags.Filename `long:"input" short:"i" description:"an input swagger file with which to merge"`
+ Include []string `long:"include" short:"c" description:"include packages matching pattern"`
+ Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"`
+ IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"`
+ ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"`
+}
+
+// Execute runs this command
+func (s *SpecFile) Execute(args []string) error {
+ input, err := loadSpec(string(s.Input))
+ if err != nil {
+ return err
+ }
+
+ var opts scan.Opts
+ opts.BasePath = s.BasePath
+ opts.Input = input
+ opts.ScanModels = s.ScanModels
+ opts.BuildTags = s.BuildTags
+ opts.Include = s.Include
+ opts.Exclude = s.Exclude
+ opts.IncludeTags = s.IncludeTags
+ opts.ExcludeTags = s.ExcludeTags
+ swspec, err := scan.Application(opts)
+ if err != nil {
+ return err
+ }
+
+ return writeToFile(swspec, !s.Compact, string(s.Output))
+}
+
+func loadSpec(input string) (*spec.Swagger, error) {
+ if fi, err := os.Stat(input); err == nil {
+ if fi.IsDir() {
+ return nil, fmt.Errorf("expected %q to be a file not a directory", input)
+ }
+ sp, err := loads.Spec(input)
+ if err != nil {
+ return nil, err
+ }
+ return sp.Spec(), nil
+ }
+ return nil, nil
+}
+
+func writeToFile(swspec *spec.Swagger, pretty bool, output string) error {
+ var b []byte
+ var err error
+
+ if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") {
+ b, err = marshalToYAMLFormat(swspec)
+ } else {
+ b, err = marshalToJSONFormat(swspec, pretty)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if output == "" {
+ fmt.Println(string(b))
+ return nil
+ }
+ return os.WriteFile(output, b, 0644)
+}
+
+func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) {
+ if pretty {
+ return json.MarshalIndent(swspec, "", " ")
+ }
+ return json.Marshal(swspec)
+}
+
+func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) {
+ b, err := json.Marshal(swspec)
+ if err != nil {
+ return nil, err
+ }
+
+ var jsonObj interface{}
+ if err := yaml.Unmarshal(b, &jsonObj); err != nil {
+ return nil, err
+ }
+
+ return yaml.Marshal(jsonObj)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go
new file mode 100644
index 000000000..bf2295864
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/spec_go111.go
@@ -0,0 +1,119 @@
+//go:build go1.11
+// +build go1.11
+
+package generate
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/go-swagger/go-swagger/codescan"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/jessevdk/go-flags"
+ "gopkg.in/yaml.v3"
+)
+
+// SpecFile command to generate a swagger spec from a go application
+type SpecFile struct {
+ WorkDir string `long:"work-dir" short:"w" description:"the base path to use" default:"."`
+ BuildTags string `long:"tags" short:"t" description:"build tags" default:""`
+ ScanModels bool `long:"scan-models" short:"m" description:"includes models that were annotated with 'swagger:model'"`
+ Compact bool `long:"compact" description:"when present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ Input flags.Filename `long:"input" short:"i" description:"an input swagger file with which to merge"`
+ Include []string `long:"include" short:"c" description:"include packages matching pattern"`
+ Exclude []string `long:"exclude" short:"x" description:"exclude packages matching pattern"`
+ IncludeTags []string `long:"include-tag" short:"" description:"include routes having specified tags (can be specified many times)"`
+ ExcludeTags []string `long:"exclude-tag" short:"" description:"exclude routes having specified tags (can be specified many times)"`
+ ExcludeDeps bool `long:"exclude-deps" short:"" description:"exclude all dependencies of project"`
+}
+
+// Execute runs this command
+func (s *SpecFile) Execute(args []string) error {
+ if len(args) == 0 { // by default consider all the paths under the working directory
+ args = []string{"./..."}
+ }
+
+ input, err := loadSpec(string(s.Input))
+ if err != nil {
+ return err
+ }
+
+ var opts codescan.Options
+ opts.Packages = args
+ opts.WorkDir = s.WorkDir
+ opts.InputSpec = input
+ opts.ScanModels = s.ScanModels
+ opts.BuildTags = s.BuildTags
+ opts.Include = s.Include
+ opts.Exclude = s.Exclude
+ opts.IncludeTags = s.IncludeTags
+ opts.ExcludeTags = s.ExcludeTags
+ opts.ExcludeDeps = s.ExcludeDeps
+ swspec, err := codescan.Run(&opts)
+ if err != nil {
+ return err
+ }
+
+ return writeToFile(swspec, !s.Compact, string(s.Output))
+}
+
+func loadSpec(input string) (*spec.Swagger, error) {
+ if fi, err := os.Stat(input); err == nil {
+ if fi.IsDir() {
+ return nil, fmt.Errorf("expected %q to be a file not a directory", input)
+ }
+ sp, err := loads.Spec(input)
+ if err != nil {
+ return nil, err
+ }
+ return sp.Spec(), nil
+ }
+ return nil, nil
+}
+
+func writeToFile(swspec *spec.Swagger, pretty bool, output string) error {
+ var b []byte
+ var err error
+
+ if strings.HasSuffix(output, "yml") || strings.HasSuffix(output, "yaml") {
+ b, err = marshalToYAMLFormat(swspec)
+ } else {
+ b, err = marshalToJSONFormat(swspec, pretty)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if output == "" {
+ fmt.Println(string(b))
+ return nil
+ }
+ return os.WriteFile(output, b, 0644) // #nosec
+}
+
+func marshalToJSONFormat(swspec *spec.Swagger, pretty bool) ([]byte, error) {
+ if pretty {
+ return json.MarshalIndent(swspec, "", " ")
+ }
+ return json.Marshal(swspec)
+}
+
+func marshalToYAMLFormat(swspec *spec.Swagger) ([]byte, error) {
+ b, err := json.Marshal(swspec)
+ if err != nil {
+ return nil, err
+ }
+
+ var jsonObj interface{}
+ if err := yaml.Unmarshal(b, &jsonObj); err != nil {
+ return nil, err
+ }
+
+ return yaml.Marshal(jsonObj)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go
new file mode 100644
index 000000000..9e52f428c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/generate/support.go
@@ -0,0 +1,67 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generate
+
+import (
+ "log"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+// Support generates the supporting files
+type Support struct {
+ WithShared
+ WithModels
+ WithOperations
+
+ clientOptions
+ serverOptions
+ schemeOptions
+ mediaOptions
+
+ Name string `long:"name" short:"A" description:"the name of the application, defaults to a mangled value of info.title"`
+}
+
+func (s *Support) apply(opts *generator.GenOpts) {
+ s.Shared.apply(opts)
+ s.Models.apply(opts)
+ s.Operations.apply(opts)
+ s.clientOptions.apply(opts)
+ s.serverOptions.apply(opts)
+ s.schemeOptions.apply(opts)
+ s.mediaOptions.apply(opts)
+}
+
+func (s *Support) generate(opts *generator.GenOpts) error {
+ return generator.GenerateSupport(s.Name, s.Models.Models, s.Operations.Operations, opts)
+}
+
+func (s Support) log(rp string) {
+
+ log.Println(`Generation completed!
+
+For this generation to compile you need to have some packages in go.mod:
+
+ * github.com/go-openapi/runtime
+ * github.com/asaskevich/govalidator
+ * github.com/jessevdk/go-flags
+
+You can get these now with: go mod tidy`)
+}
+
+// Execute generates the supporting files file
+func (s *Support) Execute(args []string) error {
+ return createSwagger(s)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go
new file mode 100644
index 000000000..7a992f2b7
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd.go
@@ -0,0 +1,13 @@
+package commands
+
+import "github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd"
+
+// InitCmd is a command namespace for initializing things like a swagger spec.
+type InitCmd struct {
+ Model *initcmd.Spec `command:"spec"`
+}
+
+// Execute provides default empty implementation
+func (i *InitCmd) Execute(args []string) error {
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go
new file mode 100644
index 000000000..c540dc5b4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd/spec.go
@@ -0,0 +1,111 @@
+package initcmd
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+
+ "gopkg.in/yaml.v3"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// Spec a command struct for initializing a new swagger application.
+type Spec struct {
+ Format string `long:"format" description:"the format for the spec document" default:"yaml" choice:"yaml" choice:"json"`
+ Title string `long:"title" description:"the title of the API"`
+ Description string `long:"description" description:"the description of the API"`
+ Version string `long:"version" description:"the version of the API" default:"0.1.0"`
+ Terms string `long:"terms" description:"the terms of services"`
+ Consumes []string `long:"consumes" description:"add a content type to the global consumes definitions, can repeat" default:"application/json"`
+ Produces []string `long:"produces" description:"add a content type to the global produces definitions, can repeat" default:"application/json"`
+ Schemes []string `long:"scheme" description:"add a scheme to the global schemes definition, can repeat" default:"http"`
+ Contact struct {
+ Name string `long:"contact.name" description:"name of the primary contact for the API"`
+ URL string `long:"contact.url" description:"url of the primary contact for the API"`
+ Email string `long:"contact.email" description:"email of the primary contact for the API"`
+ }
+ License struct {
+ Name string `long:"license.name" description:"name of the license for the API"`
+ URL string `long:"license.url" description:"url of the license for the API"`
+ }
+}
+
+// Execute this command
+func (s *Spec) Execute(args []string) error {
+ targetPath := "."
+ if len(args) > 0 {
+ targetPath = args[0]
+ }
+ realPath, err := filepath.Abs(targetPath)
+ if err != nil {
+ return err
+ }
+ var file *os.File
+ switch s.Format {
+ case "json":
+ file, err = os.Create(filepath.Join(realPath, "swagger.json"))
+ if err != nil {
+ return err
+ }
+ case "yaml", "yml":
+ file, err = os.Create(filepath.Join(realPath, "swagger.yml"))
+ if err != nil {
+ return err
+ }
+ default:
+ return fmt.Errorf("invalid format: %s", s.Format)
+ }
+ defer file.Close()
+ log.Println("creating specification document in", filepath.Join(targetPath, file.Name()))
+
+ var doc spec.Swagger
+ info := new(spec.Info)
+ doc.Info = info
+
+ doc.Swagger = "2.0"
+ doc.Paths = new(spec.Paths)
+ doc.Definitions = make(spec.Definitions)
+
+ info.Title = s.Title
+ if info.Title == "" {
+ info.Title = swag.ToHumanNameTitle(filepath.Base(realPath))
+ }
+ info.Description = s.Description
+ info.Version = s.Version
+ info.TermsOfService = s.Terms
+ if s.Contact.Name != "" || s.Contact.Email != "" || s.Contact.URL != "" {
+ var contact spec.ContactInfo
+ contact.Name = s.Contact.Name
+ contact.Email = s.Contact.Email
+ contact.URL = s.Contact.URL
+ info.Contact = &contact
+ }
+ if s.License.Name != "" || s.License.URL != "" {
+ var license spec.License
+ license.Name = s.License.Name
+ license.URL = s.License.URL
+ info.License = &license
+ }
+
+ doc.Consumes = append(doc.Consumes, s.Consumes...)
+ doc.Produces = append(doc.Produces, s.Produces...)
+ doc.Schemes = append(doc.Schemes, s.Schemes...)
+
+ if s.Format == "json" {
+ enc := json.NewEncoder(file)
+ return enc.Encode(doc)
+ }
+
+ b, err := yaml.Marshal(swag.ToDynamicJSON(doc))
+ if err != nil {
+ return err
+ }
+ if _, err := file.Write(b); err != nil {
+ return err
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go
new file mode 100644
index 000000000..79e26c440
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/mixin.go
@@ -0,0 +1,117 @@
+package commands
+
+import (
+ "errors"
+ "io"
+ "log"
+ "os"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ flags "github.com/jessevdk/go-flags"
+
+ "github.com/go-swagger/go-swagger/generator"
+)
+
+const (
+ // Output messages
+ nothingToDo = "nothing to do. Need some swagger files to merge.\nUSAGE: swagger mixin [-c <expected#Collisions>] <primary-swagger-file> <mixin-swagger-file...>"
+ ignoreConflictsAndCollisionsSpecified = "both the flags ignore conflicts and collisions were specified. These have conflicting meaning so please only specify one"
+)
+
+// MixinSpec holds command line flag definitions specific to the mixin
+// command. The flags are defined using struct field tags with the
+// "github.com/jessevdk/go-flags" format.
+type MixinSpec struct {
+ ExpectedCollisionCount uint `short:"c" description:"expected # of rejected mixin paths, defs, etc due to existing key. Non-zero exit if does not match actual."`
+ Compact bool `long:"compact" description:"applies to JSON formatted specs. When present, doesn't prettify the json"`
+ Output flags.Filename `long:"output" short:"o" description:"the file to write to"`
+ KeepSpecOrder bool `long:"keep-spec-order" description:"Keep schema properties order identical to spec file"`
+ Format string `long:"format" description:"the format for the spec document" default:"json" choice:"yaml" choice:"json"`
+ IgnoreConflicts bool `long:"ignore-conflicts" description:"Ignore conflict"`
+}
+
+// Execute runs the mixin command which merges Swagger 2.0 specs into
+// one spec
+//
+// Use cases include adding independently versioned metadata APIs to
+// application APIs for microservices.
+//
+// Typically, multiple APIs to the same service instance is not a
+// problem for client generation as you can create more than one
+// client to the service from the same calling process (one for each
+// API). However, merging clients can improve clarity of client code
+// by having a single client to given service vs several.
+//
+// Server skeleton generation, ie generating the model & marshaling
+// code, http server instance etc. from Swagger, becomes easier with a
+// merged spec for some tools & target-languages. Server code
+// generation tools that natively support hosting multiple specs in
+// one server process will not need this tool.
+func (c *MixinSpec) Execute(args []string) error {
+
+ if len(args) < 2 {
+ return errors.New(nothingToDo)
+ }
+ if c.IgnoreConflicts && c.ExpectedCollisionCount != 0 {
+ return errors.New(ignoreConflictsAndCollisionsSpecified)
+ }
+
+ log.Printf("args[0] = %v\n", args[0])
+ log.Printf("args[1:] = %v\n", args[1:])
+ collisions, err := c.MixinFiles(args[0], args[1:], os.Stdout)
+
+ for _, warn := range collisions {
+ log.Println(warn)
+ }
+
+ if err != nil {
+ return err
+ }
+
+ if c.IgnoreConflicts {
+ return nil
+ }
+ if len(collisions) != int(c.ExpectedCollisionCount) {
+ if len(collisions) != 0 {
+ // use bash $? to get actual # collisions
+ // (but has to be non-zero)
+ os.Exit(len(collisions))
+ }
+ os.Exit(254)
+ }
+ return nil
+}
+
+// MixinFiles is a convenience function for Mixin that reads the given
+// swagger files, adds the mixins to primary, calls
+// FixEmptyResponseDescriptions on the primary, and writes the primary
+// with mixins to the given writer in JSON. Returns the warning
+// messages for collisions that occurred during mixin process and any
+// error.
+func (c *MixinSpec) MixinFiles(primaryFile string, mixinFiles []string, w io.Writer) ([]string, error) {
+
+ primaryDoc, err := loads.Spec(primaryFile)
+ if err != nil {
+ return nil, err
+ }
+ primary := primaryDoc.Spec()
+
+ var mixins []*spec.Swagger
+ for _, mixinFile := range mixinFiles {
+ if c.KeepSpecOrder {
+ mixinFile = generator.WithAutoXOrder(mixinFile)
+ }
+ mixin, lerr := loads.Spec(mixinFile)
+ if lerr != nil {
+ return nil, lerr
+ }
+ mixins = append(mixins, mixin.Spec())
+ }
+
+ collisions := analysis.Mixin(primary, mixins...)
+ analysis.FixEmptyResponseDescriptions(primary)
+
+ return collisions, writeToFile(primary, !c.Compact, c.Format, string(c.Output))
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go
new file mode 100644
index 000000000..aeea4cedd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/serve.go
@@ -0,0 +1,117 @@
+package commands
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "net"
+ "net/http"
+ "path"
+ "strconv"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ "github.com/gorilla/handlers"
+ "github.com/toqueteos/webbrowser"
+)
+
+// ServeCmd to serve a swagger spec with docs ui
+type ServeCmd struct {
+ BasePath string `long:"base-path" description:"the base path to serve the spec and UI at"`
+ Flavor string `short:"F" long:"flavor" description:"the flavor of docs, can be swagger or redoc" default:"redoc" choice:"redoc" choice:"swagger"`
+ DocURL string `long:"doc-url" description:"override the url which takes a url query param to render the doc ui"`
+ NoOpen bool `long:"no-open" description:"when present won't open the the browser to show the url"`
+ NoUI bool `long:"no-ui" description:"when present, only the swagger spec will be served"`
+ Flatten bool `long:"flatten" description:"when present, flatten the swagger spec before serving it"`
+ Port int `long:"port" short:"p" description:"the port to serve this site" env:"PORT"`
+ Host string `long:"host" description:"the interface to serve this site, defaults to 0.0.0.0" default:"0.0.0.0" env:"HOST"`
+ Path string `long:"path" description:"the uri path at which the docs will be served" default:"docs"`
+}
+
+// Execute the serve command
+func (s *ServeCmd) Execute(args []string) error {
+ if len(args) == 0 {
+ return errors.New("specify the spec to serve as argument to the serve command")
+ }
+
+ specDoc, err := loads.Spec(args[0])
+ if err != nil {
+ return err
+ }
+
+ if s.Flatten {
+ specDoc, err = specDoc.Expanded(&spec.ExpandOptions{
+ SkipSchemas: false,
+ ContinueOnError: true,
+ AbsoluteCircularRef: true,
+ })
+
+ if err != nil {
+ return err
+ }
+ }
+
+ b, err := json.MarshalIndent(specDoc.Spec(), "", " ")
+ if err != nil {
+ return err
+ }
+
+ basePath := s.BasePath
+ if basePath == "" {
+ basePath = "/"
+ }
+
+ listener, err := net.Listen("tcp4", net.JoinHostPort(s.Host, strconv.Itoa(s.Port)))
+ if err != nil {
+ return err
+ }
+ sh, sp, err := swag.SplitHostPort(listener.Addr().String())
+ if err != nil {
+ return err
+ }
+ if sh == "0.0.0.0" {
+ sh = "localhost"
+ }
+
+ visit := s.DocURL
+ handler := http.NotFoundHandler()
+ if !s.NoUI {
+ if s.Flavor == "redoc" {
+ handler = middleware.Redoc(middleware.RedocOpts{
+ BasePath: basePath,
+ SpecURL: path.Join(basePath, "swagger.json"),
+ Path: s.Path,
+ }, handler)
+ visit = fmt.Sprintf("http://%s:%d%s", sh, sp, path.Join(basePath, "docs"))
+ } else if visit != "" || s.Flavor == "swagger" {
+ handler = middleware.SwaggerUI(middleware.SwaggerUIOpts{
+ BasePath: basePath,
+ SpecURL: path.Join(basePath, "swagger.json"),
+ Path: s.Path,
+ }, handler)
+ visit = fmt.Sprintf("http://%s:%d%s", sh, sp, path.Join(basePath, s.Path))
+ }
+ }
+
+ handler = handlers.CORS()(middleware.Spec(basePath, b, handler))
+ errFuture := make(chan error)
+ go func() {
+ docServer := new(http.Server)
+ docServer.SetKeepAlivesEnabled(true)
+ docServer.Handler = handler
+
+ errFuture <- docServer.Serve(listener)
+ }()
+
+ if !s.NoOpen && !s.NoUI {
+ err := webbrowser.Open(visit)
+ if err != nil {
+ return err
+ }
+ }
+ log.Println("serving docs at", visit)
+ return <-errFuture
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go
new file mode 100644
index 000000000..220c8b853
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/validate.go
@@ -0,0 +1,83 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package commands
+
+import (
+ "errors"
+ "fmt"
+ "log"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/validate"
+)
+
+const (
+ // Output messages
+ missingArgMsg = "the validate command requires the swagger document url to be specified"
+ validSpecMsg = "\nThe swagger spec at %q is valid against swagger specification %s\n"
+ invalidSpecMsg = "\nThe swagger spec at %q is invalid against swagger specification %s.\nSee errors below:\n"
+ warningSpecMsg = "\nThe swagger spec at %q showed up some valid but possibly unwanted constructs."
+)
+
+// ValidateSpec is a command that validates a swagger document
+// against the swagger specification
+type ValidateSpec struct {
+ // SchemaURL string `long:"schema" description:"The schema url to use" default:"http://swagger.io/v2/schema.json"`
+ SkipWarnings bool `long:"skip-warnings" description:"when present will not show up warnings upon validation"`
+ StopOnError bool `long:"stop-on-error" description:"when present will not continue validation after critical errors are found"`
+}
+
+// Execute validates the spec
+func (c *ValidateSpec) Execute(args []string) error {
+ if len(args) == 0 {
+ return errors.New(missingArgMsg)
+ }
+
+ swaggerDoc := args[0]
+
+ specDoc, err := loads.Spec(swaggerDoc)
+ if err != nil {
+ return err
+ }
+
+ // Attempts to report about all errors
+ validate.SetContinueOnErrors(!c.StopOnError)
+
+ v := validate.NewSpecValidator(specDoc.Schema(), strfmt.Default)
+ result, _ := v.Validate(specDoc) // returns fully detailed result with errors and warnings
+
+ if result.IsValid() {
+ log.Printf(validSpecMsg, swaggerDoc, specDoc.Version())
+ }
+ if result.HasWarnings() {
+ log.Printf(warningSpecMsg, swaggerDoc)
+ if !c.SkipWarnings {
+ log.Printf("See warnings below:\n")
+ for _, desc := range result.Warnings {
+ log.Printf("- WARNING: %s\n", desc.Error())
+ }
+ }
+ }
+ if result.HasErrors() {
+ str := fmt.Sprintf(invalidSpecMsg, swaggerDoc, specDoc.Version())
+ for _, desc := range result.Errors {
+ str += fmt.Sprintf("- %s\n", desc.Error())
+ }
+ return errors.New(str)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go
new file mode 100644
index 000000000..9a860653b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/commands/version.go
@@ -0,0 +1,37 @@
+package commands
+
+import (
+ "fmt"
+ "runtime/debug"
+)
+
+var (
+ // Version for the swagger command
+ Version string
+ // Commit for the swagger command
+ Commit string
+)
+
+// PrintVersion the command
+type PrintVersion struct {
+}
+
+// Execute this command
+func (p *PrintVersion) Execute(args []string) error {
+ if Version == "" {
+ if info, available := debug.ReadBuildInfo(); available && info.Main.Version != "(devel)" {
+ // built from source, with module (e.g. go get)
+ fmt.Println("version:", info.Main.Version)
+ fmt.Println("commit:", fmt.Sprintf("(unknown, mod sum: %q)", info.Main.Sum))
+ return nil
+ }
+ // built from source, local repo
+ fmt.Println("dev")
+ return nil
+ }
+ // released version
+ fmt.Println("version:", Version)
+ fmt.Println("commit:", Commit)
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go
new file mode 100644
index 000000000..dfc89ba2e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/cmd/swagger/swagger.go
@@ -0,0 +1,143 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+ "io"
+ "log"
+ "os"
+
+ "github.com/go-swagger/go-swagger/cmd/swagger/commands"
+ flags "github.com/jessevdk/go-flags"
+)
+
+var opts struct {
+ // General options applicable to all commands
+ Quiet func() `long:"quiet" short:"q" description:"silence logs"`
+ LogFile func(string) `long:"log-output" description:"redirect logs to file" value-name:"LOG-FILE"`
+ // Version bool `long:"version" short:"v" description:"print the version of the command"`
+}
+
+func main() {
+ // TODO: reactivate 'defer catch all' once product is stable
+ // Recovering from internal panics
+ // Stack may be printed in Debug mode
+ // Need import "runtime/debug".
+ // defer func() {
+ // r := recover()
+ // if r != nil {
+ // log.Printf("Fatal error:", r)
+ // if Debug {
+ // debug.PrintStack()
+ // }
+ // os.Exit(1)
+ // }
+ // }()
+
+ parser := flags.NewParser(&opts, flags.Default)
+ parser.ShortDescription = "helps you keep your API well described"
+ parser.LongDescription = `
+Swagger tries to support you as best as possible when building APIs.
+
+It aims to represent the contract of your API with a language agnostic description of your application in json or yaml.
+`
+ _, err := parser.AddCommand("validate", "validate the swagger document", "validate the provided swagger document against a swagger spec", &commands.ValidateSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("init", "initialize a spec document", "initialize a swagger spec document", &commands.InitCmd{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("version", "print the version", "print the version of the swagger command", &commands.PrintVersion{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("serve", "serve spec and docs", "serve a spec and swagger or redoc documentation ui", &commands.ServeCmd{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("expand", "expand $ref fields in a swagger spec", "expands the $refs in a swagger document to inline schemas", &commands.ExpandSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("flatten", "flattens a swagger document", "expand the remote references in a spec and move inline schemas to definitions, after flattening there are no complex inlined anymore", &commands.FlattenSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("mixin", "merge swagger documents", "merge additional specs into first/primary spec by copying their paths and definitions", &commands.MixinSpec{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ _, err = parser.AddCommand("diff", "diff swagger documents", "diff specs showing which changes will break existing clients", &commands.DiffCommand{})
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ genpar, err := parser.AddCommand("generate", "generate go code", "generate go code for the swagger spec file", &commands.Generate{})
+ if err != nil {
+ log.Fatalln(err)
+ }
+ for _, cmd := range genpar.Commands() {
+ switch cmd.Name {
+ case "spec":
+ cmd.ShortDescription = "generate a swagger spec document from a go application"
+ cmd.LongDescription = cmd.ShortDescription
+ case "client":
+ cmd.ShortDescription = "generate all the files for a client library"
+ cmd.LongDescription = cmd.ShortDescription
+ case "server":
+ cmd.ShortDescription = "generate all the files for a server application"
+ cmd.LongDescription = cmd.ShortDescription
+ case "model":
+ cmd.ShortDescription = "generate one or more models from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ case "support":
+ cmd.ShortDescription = "generate supporting files like the main function and the api builder"
+ cmd.LongDescription = cmd.ShortDescription
+ case "operation":
+ cmd.ShortDescription = "generate one or more server operations from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ case "markdown":
+ cmd.ShortDescription = "generate a markdown representation from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ case "cli":
+ cmd.ShortDescription = "generate a command line client tool from the swagger spec"
+ cmd.LongDescription = cmd.ShortDescription
+ }
+ }
+
+ opts.Quiet = func() {
+ log.SetOutput(io.Discard)
+ }
+ opts.LogFile = func(logfile string) {
+ f, err := os.OpenFile(logfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
+ if err != nil {
+ log.Fatalf("cannot write to file %s: %v", logfile, err)
+ }
+ log.SetOutput(f)
+ }
+
+ if _, err := parser.Parse(); err != nil {
+ os.Exit(1)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/README.md b/vendor/github.com/go-swagger/go-swagger/codescan/README.md
new file mode 100644
index 000000000..7468cda5b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/README.md
@@ -0,0 +1,3 @@
+# codescan
+
+Version of the go source parser with support for go modules, from go1.11 onwards.
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/application.go b/vendor/github.com/go-swagger/go-swagger/codescan/application.go
new file mode 100644
index 000000000..952d9fb1f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/application.go
@@ -0,0 +1,674 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "log"
+ "os"
+ "strings"
+
+ "github.com/go-openapi/swag"
+
+ "golang.org/x/tools/go/packages"
+
+ "github.com/go-openapi/spec"
+)
+
+const pkgLoadMode = packages.NeedName | packages.NeedFiles | packages.NeedImports | packages.NeedDeps | packages.NeedTypes | packages.NeedSyntax | packages.NeedTypesInfo
+
+func safeConvert(str string) bool {
+ b, err := swag.ConvertBool(str)
+ if err != nil {
+ return false
+ }
+ return b
+}
+
+// Debug is true when process is run with DEBUG=1 env var
+var Debug = safeConvert(os.Getenv("DEBUG"))
+
+type node uint32
+
+const (
+ metaNode node = 1 << iota
+ routeNode
+ operationNode
+ modelNode
+ parametersNode
+ responseNode
+)
+
+// Options for the scanner
+type Options struct {
+ Packages []string
+ InputSpec *spec.Swagger
+ ScanModels bool
+ WorkDir string
+ BuildTags string
+ ExcludeDeps bool
+ Include []string
+ Exclude []string
+ IncludeTags []string
+ ExcludeTags []string
+}
+
+type scanCtx struct {
+ pkgs []*packages.Package
+ app *typeIndex
+}
+
+func sliceToSet(names []string) map[string]bool {
+ result := make(map[string]bool)
+ for _, v := range names {
+ result[v] = true
+ }
+ return result
+}
+
+// Run the scanner to produce a spec with the options provided
+func Run(opts *Options) (*spec.Swagger, error) {
+ sc, err := newScanCtx(opts)
+ if err != nil {
+ return nil, err
+ }
+ sb := newSpecBuilder(opts.InputSpec, sc, opts.ScanModels)
+ return sb.Build()
+}
+
+func newScanCtx(opts *Options) (*scanCtx, error) {
+ cfg := &packages.Config{
+ Dir: opts.WorkDir,
+ Mode: pkgLoadMode,
+ Tests: false,
+ }
+ if opts.BuildTags != "" {
+ cfg.BuildFlags = []string{"-tags", opts.BuildTags}
+ }
+
+ pkgs, err := packages.Load(cfg, opts.Packages...)
+ if err != nil {
+ return nil, err
+ }
+
+ app, err := newTypeIndex(pkgs, opts.ExcludeDeps,
+ sliceToSet(opts.IncludeTags), sliceToSet(opts.ExcludeTags),
+ opts.Include, opts.Exclude)
+ if err != nil {
+ return nil, err
+ }
+
+ return &scanCtx{
+ pkgs: pkgs,
+ app: app,
+ }, nil
+}
+
+type entityDecl struct {
+ Comments *ast.CommentGroup
+ Type *types.Named
+ Ident *ast.Ident
+ Spec *ast.TypeSpec
+ File *ast.File
+ Pkg *packages.Package
+ hasModelAnnotation bool
+ hasResponseAnnotation bool
+ hasParameterAnnotation bool
+}
+
+func (d *entityDecl) Names() (name, goName string) {
+ goName = d.Ident.Name
+ name = goName
+ if d.Comments == nil {
+ return
+ }
+
+DECLS:
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasModelAnnotation = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ return
+}
+
+func (d *entityDecl) ResponseNames() (name, goName string) {
+ goName = d.Ident.Name
+ name = goName
+ if d.Comments == nil {
+ return
+ }
+
+DECLS:
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxResponseOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasResponseAnnotation = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ return
+}
+
+func (d *entityDecl) OperationIDS() (result []string) {
+ if d == nil || d.Comments == nil {
+ return nil
+ }
+
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxParametersOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasParameterAnnotation = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ for _, pt := range strings.Split(matches[1], " ") {
+ tr := strings.TrimSpace(pt)
+ if len(tr) > 0 {
+ result = append(result, tr)
+ }
+ }
+ }
+ }
+ }
+ return
+}
+
+func (d *entityDecl) HasModelAnnotation() bool {
+ if d.hasModelAnnotation {
+ return true
+ }
+ if d.Comments == nil {
+ return false
+ }
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasModelAnnotation = true
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (d *entityDecl) HasResponseAnnotation() bool {
+ if d.hasResponseAnnotation {
+ return true
+ }
+ if d.Comments == nil {
+ return false
+ }
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxResponseOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasResponseAnnotation = true
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (d *entityDecl) HasParameterAnnotation() bool {
+ if d.hasParameterAnnotation {
+ return true
+ }
+ if d.Comments == nil {
+ return false
+ }
+ for _, cmt := range d.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxParametersOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ d.hasParameterAnnotation = true
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func (s *scanCtx) FindDecl(pkgPath, name string) (*entityDecl, bool) {
+ if pkg, ok := s.app.AllPackages[pkgPath]; ok {
+ for _, file := range pkg.Syntax {
+ for _, d := range file.Decls {
+ gd, ok := d.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, sp := range gd.Specs {
+ if ts, ok := sp.(*ast.TypeSpec); ok && ts.Name.Name == name {
+ def, ok := pkg.TypesInfo.Defs[ts.Name]
+ if !ok {
+ debugLog("couldn't find type info for %s", ts.Name)
+ continue
+ }
+ nt, isNamed := def.Type().(*types.Named)
+ if !isNamed {
+ debugLog("%s is not a named type but a %T", ts.Name, def.Type())
+ continue
+ }
+
+ comments := ts.Doc // type ( /* doc */ Foo struct{} )
+ if comments == nil {
+ comments = gd.Doc // /* doc */ type ( Foo struct{} )
+ }
+
+ decl := &entityDecl{
+ Comments: comments,
+ Type: nt,
+ Ident: ts.Name,
+ Spec: ts,
+ File: file,
+ Pkg: pkg,
+ }
+ return decl, true
+ }
+
+ }
+ }
+ }
+ }
+ return nil, false
+}
+
+func (s *scanCtx) FindModel(pkgPath, name string) (*entityDecl, bool) {
+ for _, cand := range s.app.Models {
+ ct := cand.Type.Obj()
+ if ct.Name() == name && ct.Pkg().Path() == pkgPath {
+ return cand, true
+ }
+ }
+ if decl, found := s.FindDecl(pkgPath, name); found {
+ s.app.ExtraModels[decl.Ident] = decl
+ return decl, true
+ }
+ return nil, false
+}
+
+func (s *scanCtx) PkgForPath(pkgPath string) (*packages.Package, bool) {
+ v, ok := s.app.AllPackages[pkgPath]
+ return v, ok
+}
+
+func (s *scanCtx) DeclForType(t types.Type) (*entityDecl, bool) {
+ switch tpe := t.(type) {
+ case *types.Pointer:
+ return s.DeclForType(tpe.Elem())
+ case *types.Named:
+ return s.FindDecl(tpe.Obj().Pkg().Path(), tpe.Obj().Name())
+
+ default:
+ log.Printf("unknown type to find the package for [%T]: %s", t, t.String())
+ return nil, false
+ }
+}
+
+func (s *scanCtx) PkgForType(t types.Type) (*packages.Package, bool) {
+ switch tpe := t.(type) {
+ // case *types.Basic:
+ // case *types.Struct:
+ // case *types.Pointer:
+ // case *types.Interface:
+ // case *types.Array:
+ // case *types.Slice:
+ // case *types.Map:
+ case *types.Named:
+ v, ok := s.app.AllPackages[tpe.Obj().Pkg().Path()]
+ return v, ok
+ default:
+ log.Printf("unknown type to find the package for [%T]: %s", t, t.String())
+ return nil, false
+ }
+}
+
+func (s *scanCtx) FindComments(pkg *packages.Package, name string) (*ast.CommentGroup, bool) {
+ for _, f := range pkg.Syntax {
+ for _, d := range f.Decls {
+ gd, ok := d.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, s := range gd.Specs {
+ if ts, ok := s.(*ast.TypeSpec); ok {
+ if ts.Name.Name == name {
+ return gd.Doc, true
+ }
+ }
+ }
+ }
+ }
+ return nil, false
+}
+
+func (s *scanCtx) FindEnumValues(pkg *packages.Package, enumName string) (list []interface{}, descList []string, _ bool) {
+ for _, f := range pkg.Syntax {
+ for _, d := range f.Decls {
+ gd, ok := d.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ if gd.Tok != token.CONST {
+ continue
+ }
+
+ for _, s := range gd.Specs {
+ if vs, ok := s.(*ast.ValueSpec); ok {
+ if vsIdent, ok := vs.Type.(*ast.Ident); ok {
+ if vsIdent.Name == enumName {
+ if len(vs.Values) > 0 {
+ if bl, ok := vs.Values[0].(*ast.BasicLit); ok {
+ blValue := getEnumBasicLitValue(bl)
+ list = append(list, blValue)
+
+ // build the enum description
+ var (
+ desc = &strings.Builder{}
+ namesLen = len(vs.Names)
+ )
+ desc.WriteString(fmt.Sprintf("%v ", blValue))
+ for i, name := range vs.Names {
+ desc.WriteString(name.Name)
+ if i < namesLen-1 {
+ desc.WriteString(" ")
+ }
+ }
+ if vs.Doc != nil {
+ docListLen := len(vs.Doc.List)
+ if docListLen > 0 {
+ desc.WriteString(" ")
+ }
+ for i, doc := range vs.Doc.List {
+ if doc.Text != "" {
+ var text = strings.TrimPrefix(doc.Text, "//")
+ desc.WriteString(text)
+ if i < docListLen-1 {
+ desc.WriteString(" ")
+ }
+ }
+ }
+ }
+ descList = append(descList, desc.String())
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return list, descList, true
+}
+
+func newTypeIndex(pkgs []*packages.Package,
+ excludeDeps bool, includeTags, excludeTags map[string]bool,
+ includePkgs, excludePkgs []string) (*typeIndex, error) {
+
+ ac := &typeIndex{
+ AllPackages: make(map[string]*packages.Package),
+ Models: make(map[*ast.Ident]*entityDecl),
+ ExtraModels: make(map[*ast.Ident]*entityDecl),
+ excludeDeps: excludeDeps,
+ includeTags: includeTags,
+ excludeTags: excludeTags,
+ includePkgs: includePkgs,
+ excludePkgs: excludePkgs,
+ }
+ if err := ac.build(pkgs); err != nil {
+ return nil, err
+ }
+ return ac, nil
+}
+
+type typeIndex struct {
+ AllPackages map[string]*packages.Package
+ Models map[*ast.Ident]*entityDecl
+ ExtraModels map[*ast.Ident]*entityDecl
+ Meta []metaSection
+ Routes []parsedPathContent
+ Operations []parsedPathContent
+ Parameters []*entityDecl
+ Responses []*entityDecl
+ excludeDeps bool
+ includeTags map[string]bool
+ excludeTags map[string]bool
+ includePkgs []string
+ excludePkgs []string
+}
+
+func (a *typeIndex) build(pkgs []*packages.Package) error {
+ for _, pkg := range pkgs {
+ if _, known := a.AllPackages[pkg.PkgPath]; known {
+ continue
+ }
+ a.AllPackages[pkg.PkgPath] = pkg
+ if err := a.processPackage(pkg); err != nil {
+ return err
+ }
+ if err := a.walkImports(pkg); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (a *typeIndex) processPackage(pkg *packages.Package) error {
+ if !shouldAcceptPkg(pkg.PkgPath, a.includePkgs, a.excludePkgs) {
+ debugLog("package %s is ignored due to rules", pkg.Name)
+ return nil
+ }
+
+ for _, file := range pkg.Syntax {
+ n, err := a.detectNodes(file)
+ if err != nil {
+ return err
+ }
+
+ if n&metaNode != 0 {
+ a.Meta = append(a.Meta, metaSection{Comments: file.Doc})
+ }
+
+ if n&operationNode != 0 {
+ for _, cmts := range file.Comments {
+ pp := parsePathAnnotation(rxOperation, cmts.List)
+ if pp.Method == "" {
+ continue // not a valid operation
+ }
+ if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) {
+ debugLog("operation %s %s is ignored due to tag rules", pp.Method, pp.Path)
+ continue
+ }
+ a.Operations = append(a.Operations, pp)
+ }
+ }
+
+ if n&routeNode != 0 {
+ for _, cmts := range file.Comments {
+ pp := parsePathAnnotation(rxRoute, cmts.List)
+ if pp.Method == "" {
+ continue // not a valid operation
+ }
+ if !shouldAcceptTag(pp.Tags, a.includeTags, a.excludeTags) {
+ debugLog("operation %s %s is ignored due to tag rules", pp.Method, pp.Path)
+ continue
+ }
+ a.Routes = append(a.Routes, pp)
+ }
+ }
+
+ for _, dt := range file.Decls {
+ switch fd := dt.(type) {
+ case *ast.BadDecl:
+ continue
+ case *ast.FuncDecl:
+ if fd.Body == nil {
+ continue
+ }
+ for _, stmt := range fd.Body.List {
+ if dstm, ok := stmt.(*ast.DeclStmt); ok {
+ if gd, isGD := dstm.Decl.(*ast.GenDecl); isGD {
+ a.processDecl(pkg, file, n, gd)
+ }
+ }
+ }
+ case *ast.GenDecl:
+ a.processDecl(pkg, file, n, fd)
+ }
+ }
+ }
+ return nil
+}
+
+func (a *typeIndex) processDecl(pkg *packages.Package, file *ast.File, n node, gd *ast.GenDecl) {
+ for _, sp := range gd.Specs {
+ switch ts := sp.(type) {
+ case *ast.ValueSpec:
+ debugLog("saw value spec: %v", ts.Names)
+ return
+ case *ast.ImportSpec:
+ debugLog("saw import spec: %v", ts.Name)
+ return
+ case *ast.TypeSpec:
+ def, ok := pkg.TypesInfo.Defs[ts.Name]
+ if !ok {
+ debugLog("couldn't find type info for %s", ts.Name)
+ continue
+ }
+ nt, isNamed := def.Type().(*types.Named)
+ if !isNamed {
+ debugLog("%s is not a named type but a %T", ts.Name, def.Type())
+ continue
+ }
+
+ comments := ts.Doc // type ( /* doc */ Foo struct{} )
+ if comments == nil {
+ comments = gd.Doc // /* doc */ type ( Foo struct{} )
+ }
+
+ decl := &entityDecl{
+ Comments: comments,
+ Type: nt,
+ Ident: ts.Name,
+ Spec: ts,
+ File: file,
+ Pkg: pkg,
+ }
+ key := ts.Name
+ if n&modelNode != 0 && decl.HasModelAnnotation() {
+ a.Models[key] = decl
+ }
+ if n&parametersNode != 0 && decl.HasParameterAnnotation() {
+ a.Parameters = append(a.Parameters, decl)
+ }
+ if n&responseNode != 0 && decl.HasResponseAnnotation() {
+ a.Responses = append(a.Responses, decl)
+ }
+ }
+ }
+}
+
+func (a *typeIndex) walkImports(pkg *packages.Package) error {
+ if a.excludeDeps {
+ return nil
+ }
+ for _, v := range pkg.Imports {
+ if _, known := a.AllPackages[v.PkgPath]; known {
+ continue
+ }
+
+ a.AllPackages[v.PkgPath] = v
+ if err := a.processPackage(v); err != nil {
+ return err
+ }
+ if err := a.walkImports(v); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (a *typeIndex) detectNodes(file *ast.File) (node, error) {
+ var n node
+ for _, comments := range file.Comments {
+ var seenStruct string
+ for _, cline := range comments.List {
+ if cline == nil {
+ continue
+ }
+ }
+
+ for _, cline := range comments.List {
+ if cline == nil {
+ continue
+ }
+
+ matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
+ if len(matches) < 2 {
+ continue
+ }
+
+ switch matches[1] {
+ case "route":
+ n |= routeNode
+ case "operation":
+ n |= operationNode
+ case "model":
+ n |= modelNode
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "meta":
+ n |= metaNode
+ case "parameters":
+ n |= parametersNode
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "response":
+ n |= responseNode
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return 0, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
+ // TODO: perhaps collect these and pass along to avoid lookups later on
+ case "allOf":
+ case "ignore":
+ default:
+ return 0, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
+ }
+ }
+ }
+ return n, nil
+}
+
+func debugLog(format string, args ...interface{}) {
+ if Debug {
+ log.Printf(format, args...)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/doc.go b/vendor/github.com/go-swagger/go-swagger/codescan/doc.go
new file mode 100644
index 000000000..3d4c3539c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/doc.go
@@ -0,0 +1,6 @@
+/*
+Package codescan provides a scanner for go files that produces a swagger spec document.
+
+This package is intendnd for go1.11 onwards, and does support go modules.
+*/
+package codescan
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/enum.go b/vendor/github.com/go-swagger/go-swagger/codescan/enum.go
new file mode 100644
index 000000000..bc1bb52e7
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/enum.go
@@ -0,0 +1,32 @@
+package codescan
+
+import (
+ "go/ast"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+func getEnumBasicLitValue(basicLit *ast.BasicLit) interface{} {
+ switch basicLit.Kind.String() {
+ case "INT":
+ if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil {
+ return result
+ }
+ case "FLOAT":
+ if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil {
+ return result
+ }
+ default:
+ return strings.Trim(basicLit.Value, "\"")
+ }
+ return nil
+}
+
+const extEnumDesc = "x-go-enum-desc"
+
+func getEnumDesc(extensions spec.Extensions) (desc string) {
+ desc, _ = extensions.GetString(extEnumDesc)
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/meta.go b/vendor/github.com/go-swagger/go-swagger/codescan/meta.go
new file mode 100644
index 000000000..20dbb7cb8
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/meta.go
@@ -0,0 +1,252 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codescan
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "net/mail"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type metaSection struct {
+ Comments *ast.CommentGroup
+}
+
+func metaTOSSetter(meta *spec.Info) func([]string) {
+ return func(lines []string) {
+ meta.TermsOfService = joinDropLast(lines)
+ }
+}
+
+func metaConsumesSetter(meta *spec.Swagger) func([]string) {
+ return func(consumes []string) { meta.Consumes = consumes }
+}
+
+func metaProducesSetter(meta *spec.Swagger) func([]string) {
+ return func(produces []string) { meta.Produces = produces }
+}
+
+func metaSchemeSetter(meta *spec.Swagger) func([]string) {
+ return func(schemes []string) { meta.Schemes = schemes }
+}
+
+func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
+ return func(secDefs []map[string][]string) { meta.Security = secDefs }
+}
+
+func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.SecurityDefinitions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ meta.SecurityDefinitions = jsonData
+ return nil
+ }
+}
+
+func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Extensions = jsonData
+ return nil
+ }
+}
+
+func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Info.Extensions = jsonData
+ return nil
+ }
+}
+
+func newMetaParser(swspec *spec.Swagger) *sectionedParser {
+ sp := new(sectionedParser)
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ info := swspec.Info
+ sp.setTitle = func(lines []string) {
+ tosave := joinDropLast(lines)
+ if len(tosave) > 0 {
+ tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
+ }
+ info.Title = tosave
+ }
+ sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
+ newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
+ newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
+ newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
+ newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
+ newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
+ newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
+ newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
+ }
+ return sp
+}
+
+type setMetaSingle struct {
+ spec *spec.Swagger
+ rx *regexp.Regexp
+ set func(spec *spec.Swagger, lines []string) error
+}
+
+func (s *setMetaSingle) Matches(line string) bool {
+ return s.rx.MatchString(line)
+}
+
+func (s *setMetaSingle) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := s.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ return s.set(s.spec, []string{matches[1]})
+ }
+ return nil
+}
+
+func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
+ lns := lines
+ if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ lns = []string{"localhost"}
+ }
+ swspec.Host = lns[0]
+ return nil
+}
+
+func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
+ var ln string
+ if len(lines) > 0 {
+ ln = lines[0]
+ }
+ swspec.BasePath = ln
+ return nil
+}
+
+func setInfoVersion(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 {
+ return nil
+ }
+ info := safeInfo(swspec)
+ info.Version = strings.TrimSpace(lines[0])
+ return nil
+}
+
+func setInfoContact(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ contact, err := parseContactInfo(lines[0])
+ if err != nil {
+ return err
+ }
+ info := safeInfo(swspec)
+ info.Contact = contact
+ return nil
+}
+
+func parseContactInfo(line string) (*spec.ContactInfo, error) {
+ nameEmail, url := splitURL(line)
+ var name, email string
+ if len(nameEmail) > 0 {
+ addr, err := mail.ParseAddress(nameEmail)
+ if err != nil {
+ return nil, err
+ }
+ name, email = addr.Name, addr.Address
+ }
+ return &spec.ContactInfo{
+ ContactInfoProps: spec.ContactInfoProps{
+ URL: url,
+ Name: name,
+ Email: email,
+ },
+ }, nil
+}
+
+func setInfoLicense(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ info := safeInfo(swspec)
+ line := lines[0]
+ name, url := splitURL(line)
+ info.License = &spec.License{
+ LicenseProps: spec.LicenseProps{
+ Name: name,
+ URL: url,
+ },
+ }
+ return nil
+}
+
+func safeInfo(swspec *spec.Swagger) *spec.Info {
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ return swspec.Info
+}
+
+// httpFTPScheme matches http://, https://, ws://, wss://
+var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
+
+func splitURL(line string) (notURL, url string) {
+ str := strings.TrimSpace(line)
+ parts := httpFTPScheme.FindStringIndex(str)
+ if len(parts) == 0 {
+ if len(str) > 0 {
+ notURL = str
+ }
+ return
+ }
+ if len(parts) > 0 {
+ notURL = strings.TrimSpace(str[:parts[0]])
+ url = strings.TrimSpace(str[parts[0]:])
+ }
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/operations.go b/vendor/github.com/go-swagger/go-swagger/codescan/operations.go
new file mode 100644
index 000000000..c6a194526
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/operations.go
@@ -0,0 +1,170 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type operationsBuilder struct {
+ ctx *scanCtx
+ path parsedPathContent
+ operations map[string]*spec.Operation
+}
+
+func (o *operationsBuilder) Build(tgt *spec.Paths) error {
+ pthObj := tgt.Paths[o.path.Path]
+
+ op := setPathOperation(
+ o.path.Method, o.path.ID,
+ &pthObj, o.operations[o.path.ID])
+
+ op.Tags = o.path.Tags
+
+ sp := new(yamlSpecScanner)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+
+ if err := sp.Parse(o.path.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+ if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+
+ tgt.Paths[o.path.Path] = pthObj
+ return nil
+}
+
+type parsedPathContent struct {
+ Method, Path, ID string
+ Tags []string
+ Remaining *ast.CommentGroup
+}
+
+func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
+ var justMatched bool
+
+ for _, cmt := range lines {
+ txt := cmt.Text
+ for _, line := range strings.Split(txt, "\n") {
+ matches := annotation.FindStringSubmatch(line)
+ if len(matches) > 3 {
+ cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
+ cnt.Tags = rxSpace.Split(matches[3], -1)
+ if len(matches[3]) == 0 {
+ cnt.Tags = nil
+ }
+ justMatched = true
+ } else if cnt.Method != "" {
+ if cnt.Remaining == nil {
+ cnt.Remaining = new(ast.CommentGroup)
+ }
+ if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
+ cc := new(ast.Comment)
+ cc.Slash = cmt.Slash
+ cc.Text = line
+ cnt.Remaining.List = append(cnt.Remaining.List, cc)
+ justMatched = false
+ }
+ }
+ }
+ }
+
+ return
+}
+
+func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
+ if op == nil {
+ op = new(spec.Operation)
+ op.ID = id
+ }
+
+ switch strings.ToUpper(method) {
+ case "GET":
+ if pthObj.Get != nil {
+ if id == pthObj.Get.ID {
+ op = pthObj.Get
+ } else {
+ pthObj.Get = op
+ }
+ } else {
+ pthObj.Get = op
+ }
+
+ case "POST":
+ if pthObj.Post != nil {
+ if id == pthObj.Post.ID {
+ op = pthObj.Post
+ } else {
+ pthObj.Post = op
+ }
+ } else {
+ pthObj.Post = op
+ }
+
+ case "PUT":
+ if pthObj.Put != nil {
+ if id == pthObj.Put.ID {
+ op = pthObj.Put
+ } else {
+ pthObj.Put = op
+ }
+ } else {
+ pthObj.Put = op
+ }
+
+ case "PATCH":
+ if pthObj.Patch != nil {
+ if id == pthObj.Patch.ID {
+ op = pthObj.Patch
+ } else {
+ pthObj.Patch = op
+ }
+ } else {
+ pthObj.Patch = op
+ }
+
+ case "HEAD":
+ if pthObj.Head != nil {
+ if id == pthObj.Head.ID {
+ op = pthObj.Head
+ } else {
+ pthObj.Head = op
+ }
+ } else {
+ pthObj.Head = op
+ }
+
+ case "DELETE":
+ if pthObj.Delete != nil {
+ if id == pthObj.Delete.ID {
+ op = pthObj.Delete
+ } else {
+ pthObj.Delete = op
+ }
+ } else {
+ pthObj.Delete = op
+ }
+
+ case "OPTIONS":
+ if pthObj.Options != nil {
+ if id == pthObj.Options.ID {
+ op = pthObj.Options
+ } else {
+ pthObj.Options = op
+ }
+ } else {
+ pthObj.Options = op
+ }
+ }
+
+ return op
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go b/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go
new file mode 100644
index 000000000..b00916825
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parameters.go
@@ -0,0 +1,518 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/pkg/errors"
+
+ "github.com/go-openapi/spec"
+)
+
+type paramTypable struct {
+ param *spec.Parameter
+}
+
+func (pt paramTypable) Level() int { return 0 }
+
+func (pt paramTypable) Typed(tpe, format string) {
+ pt.param.Typed(tpe, format)
+}
+
+func (pt paramTypable) SetRef(ref spec.Ref) {
+ pt.param.Ref = ref
+}
+
+func (pt paramTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
+ if bdt != nil {
+ pt.param.Schema = schema
+ return bdt
+ }
+
+ if pt.param.Items == nil {
+ pt.param.Items = new(spec.Items)
+ }
+ pt.param.Type = "array"
+ return itemsTypable{pt.param.Items, 1}
+}
+
+func (pt paramTypable) Schema() *spec.Schema {
+ if pt.param.In != "body" {
+ return nil
+ }
+ if pt.param.Schema == nil {
+ pt.param.Schema = new(spec.Schema)
+ }
+ return pt.param.Schema
+}
+
+func (pt paramTypable) AddExtension(key string, value interface{}) {
+ if pt.param.In == "body" {
+ pt.Schema().AddExtension(key, value)
+ } else {
+ pt.param.AddExtension(key, value)
+ }
+}
+
+func (pt paramTypable) WithEnum(values ...interface{}) {
+ pt.param.WithEnum(values...)
+}
+
+func (pt paramTypable) WithEnumDescription(desc string) {
+ if desc == "" {
+ return
+ }
+ pt.param.AddExtension(extEnumDesc, desc)
+}
+
+type itemsTypable struct {
+ items *spec.Items
+ level int
+}
+
+func (pt itemsTypable) Level() int { return pt.level }
+
+func (pt itemsTypable) Typed(tpe, format string) {
+ pt.items.Typed(tpe, format)
+}
+
+func (pt itemsTypable) SetRef(ref spec.Ref) {
+ pt.items.Ref = ref
+}
+
+func (pt itemsTypable) Schema() *spec.Schema {
+ return nil
+}
+
+func (pt itemsTypable) Items() swaggerTypable {
+ if pt.items.Items == nil {
+ pt.items.Items = new(spec.Items)
+ }
+ pt.items.Type = "array"
+ return itemsTypable{pt.items.Items, pt.level + 1}
+}
+
+func (pt itemsTypable) AddExtension(key string, value interface{}) {
+ pt.items.AddExtension(key, value)
+}
+
+func (pt itemsTypable) WithEnum(values ...interface{}) {
+ pt.items.WithEnum(values...)
+}
+
+func (pt itemsTypable) WithEnumDescription(_ string) {
+ // no
+}
+
+type paramValidations struct {
+ current *spec.Parameter
+}
+
+func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv paramValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type itemsValidations struct {
+ current *spec.Items
+}
+
+func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv itemsValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type parameterBuilder struct {
+ ctx *scanCtx
+ decl *entityDecl
+ postDecls []*entityDecl
+}
+
+func (p *parameterBuilder) Build(operations map[string]*spec.Operation) error {
+
+ // check if there is a swagger:parameters tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+ for _, opid := range p.decl.OperationIDS() {
+ operation, ok := operations[opid]
+ if !ok {
+ operation = new(spec.Operation)
+ operations[opid] = operation
+ operation.ID = opid
+ }
+ debugLog("building parameters for: %s", opid)
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if err := p.buildFromType(p.decl.Type, operation, make(map[string]spec.Parameter)); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (p *parameterBuilder) buildFromType(otpe types.Type, op *spec.Operation, seen map[string]spec.Parameter) error {
+ switch tpe := otpe.(type) {
+ case *types.Pointer:
+ return p.buildFromType(tpe.Elem(), op, seen)
+ case *types.Named:
+ o := tpe.Obj()
+ switch stpe := o.Type().Underlying().(type) {
+ case *types.Struct:
+ debugLog("build from type %s: %T", tpe.Obj().Name(), otpe)
+ if decl, found := p.ctx.DeclForType(o.Type()); found {
+ return p.buildFromStruct(decl, stpe, op, seen)
+ }
+ return p.buildFromStruct(p.decl, stpe, op, seen)
+ default:
+ return errors.Errorf("unhandled type (%T): %s", stpe, o.Type().Underlying().String())
+ }
+ default:
+ return errors.Errorf("unhandled type (%T): %s", otpe, tpe.String())
+ }
+}
+
+func (p *parameterBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]spec.Parameter) error {
+ debugLog("build from field %s: %T", fld.Name(), tpe)
+ switch ftpe := tpe.(type) {
+ case *types.Basic:
+ return swaggerSchemaForType(ftpe.Name(), typable)
+ case *types.Struct:
+ sb := schemaBuilder{
+ decl: p.decl,
+ ctx: p.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ p.postDecls = append(p.postDecls, sb.postDecls...)
+ return nil
+ case *types.Pointer:
+ return p.buildFromField(fld, ftpe.Elem(), typable, seen)
+ case *types.Interface:
+ sb := schemaBuilder{
+ decl: p.decl,
+ ctx: p.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ p.postDecls = append(p.postDecls, sb.postDecls...)
+ return nil
+ case *types.Array:
+ return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Slice:
+ return p.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Map:
+ schema := new(spec.Schema)
+ typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{
+ Schema: schema,
+ }
+ sb := schemaBuilder{
+ decl: p.decl,
+ ctx: p.ctx,
+ }
+ if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1}); err != nil {
+ return err
+ }
+ return nil
+ case *types.Named:
+ if decl, found := p.ctx.DeclForType(ftpe.Obj().Type()); found {
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ typable.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ typable.Typed("string", sfnm)
+ return nil
+ }
+ sb := &schemaBuilder{ctx: p.ctx, decl: decl}
+ sb.inferNames()
+ if err := sb.buildFromType(decl.Type, typable); err != nil {
+ return err
+ }
+ p.postDecls = append(p.postDecls, sb.postDecls...)
+ return nil
+ }
+ return errors.Errorf("unable to find package and source file for: %s", ftpe.String())
+ default:
+ return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type())
+ }
+}
+
+func spExtensionsSetter(ps *spec.Parameter) func(*spec.Extensions) {
+ return func(exts *spec.Extensions) {
+ for name, value := range *exts {
+ addExtension(&ps.VendorExtensible, name, value)
+ }
+ }
+}
+
+func (p *parameterBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, op *spec.Operation, seen map[string]spec.Parameter) error {
+ if tpe.NumFields() == 0 {
+ return nil
+ }
+
+ var sequence []string
+
+ for i := 0; i < tpe.NumFields(); i++ {
+ fld := tpe.Field(i)
+
+ if fld.Embedded() {
+ if err := p.buildFromType(fld.Type(), op, seen); err != nil {
+ return err
+ }
+ continue
+ }
+
+ if !fld.Exported() {
+ debugLog("skipping field %s because it's not exported", fld.Name())
+ continue
+ }
+
+ tg := tpe.Tag(i)
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), tpe.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name, ignore, _, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ in := "query"
+ // scan for param location first, this changes some behavior down the line
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := seen[name]
+ ps.In = in
+ var pty swaggerTypable = paramTypable{&ps}
+ if in == "body" {
+ pty = schemaTypable{pty.Schema(), 0}
+ }
+ if in == "formData" && afld.Doc != nil && fileParam(afld.Doc) {
+ pty.Typed("file", "")
+ } else if err := p.buildFromField(fld, fld.Type(), pty, seen); err != nil {
+ return err
+ }
+
+ if strfmtName, ok := strfmtName(afld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) {
+ ps.Description = joinDropLast(lines)
+ enumDesc := getEnumDesc(ps.Extensions)
+ if enumDesc != "" {
+ ps.Description += "\n" + enumDesc
+ }
+ }
+ if ps.Ref.String() == "" {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredParam{&ps}),
+ newMultiLineTagParser("Extensions", newSetExtensions(spExtensionsSetter(&ps)), true),
+ }
+
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.SelectorExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", name)
+ }
+ }
+
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := afld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
+ newMultiLineTagParser("Extensions", newSetExtensions(spExtensionsSetter(&ps)), true),
+ }
+ }
+ if err := sp.Parse(afld.Doc); err != nil {
+ return err
+ }
+ if ps.In == "path" {
+ ps.Required = true
+ }
+
+ if ps.Name == "" {
+ ps.Name = name
+ }
+
+ if name != fld.Name() {
+ addExtension(&ps.VendorExtensible, "x-go-name", fld.Name())
+ }
+ seen[name] = ps
+ sequence = append(sequence, name)
+ }
+
+ for _, k := range sequence {
+ p := seen[k]
+ for i, v := range op.Parameters {
+ if v.Name == k {
+ op.Parameters = append(op.Parameters[:i], op.Parameters[i+1:]...)
+ break
+ }
+ }
+ op.Parameters = append(op.Parameters, p)
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser.go
new file mode 100644
index 000000000..9637e6c22
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser.go
@@ -0,0 +1,1667 @@
+package codescan
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/loads/fmts"
+ "github.com/go-openapi/spec"
+ "github.com/pkg/errors"
+ "gopkg.in/yaml.v3"
+)
+
+func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool {
+ for _, tag := range tags {
+ if len(includeTags) > 0 {
+ if includeTags[tag] {
+ return true
+ }
+ } else if len(excludeTags) > 0 {
+ if excludeTags[tag] {
+ return false
+ }
+ }
+ }
+ return len(includeTags) == 0
+}
+
+func shouldAcceptPkg(path string, includePkgs, excludePkgs []string) bool {
+ if len(includePkgs) == 0 && len(excludePkgs) == 0 {
+ return true
+ }
+ for _, pkgName := range includePkgs {
+ matched, _ := regexp.MatchString(pkgName, path)
+ if matched {
+ return true
+ }
+ }
+ for _, pkgName := range excludePkgs {
+ matched, _ := regexp.MatchString(pkgName, path)
+ if matched {
+ return false
+ }
+ }
+ return len(includePkgs) == 0
+}
+
+// Many thanks go to https://github.com/yvasiyarov/swagger
+// this is loosely based on that implementation but for swagger 2.0
+
+func joinDropLast(lines []string) string {
+ l := len(lines)
+ lns := lines
+ if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 {
+ lns = lines[:l-1]
+ }
+ return strings.Join(lns, "\n")
+}
+
+func removeEmptyLines(lines []string) (notEmpty []string) {
+ for _, l := range lines {
+ if len(strings.TrimSpace(l)) > 0 {
+ notEmpty = append(notEmpty, l)
+ }
+ }
+ return
+}
+
+func rxf(rxp, ar string) *regexp.Regexp {
+ return regexp.MustCompile(fmt.Sprintf(rxp, ar))
+}
+
+func allOfMember(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAllOf.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func fileParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxFileUpload.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func strfmtName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxStrFmt.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func ignored(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxIgnoreOverride.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func enumName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxEnum.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func aliasParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAlias.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func isAliasParam(prop swaggerTypable) bool {
+ var isParam bool
+ if param, ok := prop.(paramTypable); ok {
+ isParam = param.param.In == "query" ||
+ param.param.In == "path" ||
+ param.param.In == "formData"
+ }
+ return isParam
+}
+
+func defaultName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxDefault.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func typeName(comments *ast.CommentGroup) (string, bool) {
+ var typ string
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxType.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ typ = strings.TrimSpace(matches[1])
+ return typ, true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+type swaggerTypable interface {
+ Typed(string, string)
+ SetRef(spec.Ref)
+ Items() swaggerTypable
+ Schema() *spec.Schema
+ Level() int
+ AddExtension(key string, value interface{})
+ WithEnum(...interface{})
+ WithEnumDescription(desc string)
+}
+
+// Map all Go builtin types that have Json representation to Swagger/Json types.
+// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/
+func swaggerSchemaForType(typeName string, prop swaggerTypable) error {
+ switch typeName {
+ case "bool":
+ prop.Typed("boolean", "")
+ case "byte":
+ prop.Typed("integer", "uint8")
+ case "complex128", "complex64":
+ return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName)
+ case "error":
+ // TODO: error is often marshalled into a string but not always (e.g. errors package creates
+ // errors that are marshalled into an empty object), this could be handled the same way
+ // custom JSON marshallers are handled (in future)
+ prop.Typed("string", "")
+ case "float32":
+ prop.Typed("number", "float")
+ case "float64":
+ prop.Typed("number", "double")
+ case "int":
+ prop.Typed("integer", "int64")
+ case "int16":
+ prop.Typed("integer", "int16")
+ case "int32":
+ prop.Typed("integer", "int32")
+ case "int64":
+ prop.Typed("integer", "int64")
+ case "int8":
+ prop.Typed("integer", "int8")
+ case "rune":
+ prop.Typed("integer", "int32")
+ case "string":
+ prop.Typed("string", "")
+ case "uint":
+ prop.Typed("integer", "uint64")
+ case "uint16":
+ prop.Typed("integer", "uint16")
+ case "uint32":
+ prop.Typed("integer", "uint32")
+ case "uint64":
+ prop.Typed("integer", "uint64")
+ case "uint8":
+ prop.Typed("integer", "uint8")
+ case "uintptr":
+ prop.Typed("integer", "uint64")
+ case "object":
+ prop.Typed("object", "")
+ default:
+ return fmt.Errorf("unsupported type %q", typeName)
+ }
+ return nil
+}
+
+func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: true,
+ SkipCleanUp: skipCleanUp,
+ Parser: parser,
+ }
+}
+
+func newSingleLineTagParser(name string, parser valueParser) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: false,
+ SkipCleanUp: false,
+ Parser: parser,
+ }
+}
+
+type tagParser struct {
+ Name string
+ MultiLine bool
+ SkipCleanUp bool
+ Lines []string
+ Parser valueParser
+}
+
+func (st *tagParser) Matches(line string) bool {
+ return st.Parser.Matches(line)
+}
+
+func (st *tagParser) Parse(lines []string) error {
+ return st.Parser.Parse(lines)
+}
+
+func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser {
+ return &yamlParser{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type yamlParser struct {
+ set func(json.RawMessage) error
+ rx *regexp.Regexp
+}
+
+func (y *yamlParser) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var uncommented []string
+ uncommented = append(uncommented, removeYamlIndent(lines)...)
+
+ yamlContent := strings.Join(uncommented, "\n")
+ var yamlValue interface{}
+ err := yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return err
+ }
+
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return err
+ }
+
+ return y.set(jsonValue)
+}
+
+func (y *yamlParser) Matches(line string) bool {
+ return y.rx.MatchString(line)
+}
+
+// aggregates lines in header until it sees `---`,
+// the beginning of a YAML spec
+type yamlSpecScanner struct {
+ header []string
+ yamlSpec []string
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ title []string
+ skipHeader bool
+}
+
+func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string {
+ // bail early when there is nothing to parse
+ if len(lines) == 0 {
+ return lines
+ }
+ seenLine := -1
+ var lastContent int
+ var uncommented []string
+ var startBlock bool
+ var yamlLines []string
+ for i, v := range lines {
+ if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock {
+ startBlock = true
+ if seenLine < 0 {
+ seenLine = i
+ }
+ continue
+ }
+ if startBlock {
+ if yamlBlock != nil && yamlBlock.MatchString(v) {
+ startBlock = false
+ uncommented = append(uncommented, removeIndent(yamlLines)...)
+ continue
+ }
+ yamlLines = append(yamlLines, v)
+ if v != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ continue
+ }
+ str := ur.ReplaceAllString(v, "")
+ uncommented = append(uncommented, str)
+ if str != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ }
+
+ // fixes issue #50
+ if seenLine == -1 {
+ return nil
+ }
+ return uncommented[seenLine : lastContent+1]
+}
+
+func (sp *yamlSpecScanner) collectTitleDescription() {
+ if sp.workedOutTitle {
+ return
+ }
+ if sp.setTitle == nil {
+ sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ sp.workedOutTitle = true
+ sp.title, sp.header = collectScannerTitleDescription(sp.header)
+}
+
+func (sp *yamlSpecScanner) Title() []string {
+ sp.collectTitleDescription()
+ return sp.title
+}
+
+func (sp *yamlSpecScanner) Description() []string {
+ sp.collectTitleDescription()
+ return sp.header
+}
+
+func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+ var startedYAMLSpec bool
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ if !startedYAMLSpec {
+ if rxBeginYAMLSpec.MatchString(line) {
+ startedYAMLSpec = true
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ continue
+ }
+
+ if !sp.skipHeader {
+ sp.header = append(sp.header, line)
+ }
+
+ // no YAML spec yet, moving on
+ continue
+ }
+
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ }
+ }
+ if sp.setTitle != nil {
+ sp.setTitle(sp.Title())
+ }
+ if sp.setDescription != nil {
+ sp.setDescription(sp.Description())
+ }
+ return nil
+}
+
+func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) {
+ specYaml := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil)
+ if len(specYaml) == 0 {
+ return errors.New("no spec available to unmarshal")
+ }
+
+ if !strings.Contains(specYaml[0], "---") {
+ return errors.New("yaml spec has to start with `---`")
+ }
+
+ // remove indentation
+ specYaml = removeIndent(specYaml)
+
+ // 1. parse yaml lines
+ yamlValue := make(map[interface{}]interface{})
+
+ yamlContent := strings.Join(specYaml, "\n")
+ err = yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 2. convert to json
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 3. unmarshal the json into an interface
+ var data []byte
+ data, err = jsonValue.MarshalJSON()
+ if err != nil {
+ return
+ }
+ err = u(data)
+ if err != nil {
+ return
+ }
+
+ // all parsed, returning...
+ sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines
+ return
+}
+
+// removes indent base on the first line
+func removeIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ if loc[1] == 0 {
+ return spec
+ }
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ spec[i] = spec[i][loc[1]-1:]
+ start := rxNotIndent.FindStringIndex(spec[i])
+ if start[1] == 0 {
+ continue
+ }
+
+ spec[i] = strings.Replace(spec[i], "\t", " ", start[1])
+ }
+ }
+ return spec
+}
+
+// removes indent base on the first line
+func removeYamlIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ if loc[1] == 0 {
+ return nil
+ }
+ var s []string
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ s = append(s, spec[i][loc[1]-1:])
+ }
+ }
+ return s
+}
+
+// aggregates lines in header until it sees a tag.
+type sectionedParser struct {
+ header []string
+ matched map[string]tagParser
+ annotation valueParser
+
+ seenTag bool
+ skipHeader bool
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ taggers []tagParser
+ currentTagger *tagParser
+ title []string
+ ignored bool
+}
+
+func (st *sectionedParser) collectTitleDescription() {
+ if st.workedOutTitle {
+ return
+ }
+ if st.setTitle == nil {
+ st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ st.workedOutTitle = true
+ st.title, st.header = collectScannerTitleDescription(st.header)
+}
+
+func (st *sectionedParser) Title() []string {
+ st.collectTitleDescription()
+ return st.title
+}
+
+func (st *sectionedParser) Description() []string {
+ st.collectTitleDescription()
+ return st.header
+}
+
+func (st *sectionedParser) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ if rxIgnoreOverride.MatchString(line) {
+ st.ignored = true
+ break COMMENTS // an explicit ignore terminates this parser
+ }
+ if st.annotation == nil || !st.annotation.Matches(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ _ = st.annotation.Parse([]string{line})
+ if len(st.header) > 0 {
+ st.seenTag = true
+ }
+ continue
+ }
+
+ var matched bool
+ for _, tg := range st.taggers {
+ tagger := tg
+ if tagger.Matches(line) {
+ st.seenTag = true
+ st.currentTagger = &tagger
+ matched = true
+ break
+ }
+ }
+
+ if st.currentTagger == nil {
+ if !st.skipHeader && !st.seenTag {
+ st.header = append(st.header, line)
+ }
+ // didn't match a tag, moving on
+ continue
+ }
+
+ if st.currentTagger.MultiLine && matched {
+ // the first line of a multiline tagger doesn't count
+ continue
+ }
+
+ ts, ok := st.matched[st.currentTagger.Name]
+ if !ok {
+ ts = *st.currentTagger
+ }
+ ts.Lines = append(ts.Lines, line)
+ if st.matched == nil {
+ st.matched = make(map[string]tagParser)
+ }
+ st.matched[st.currentTagger.Name] = ts
+
+ if !st.currentTagger.MultiLine {
+ st.currentTagger = nil
+ }
+ }
+ }
+ if st.setTitle != nil {
+ st.setTitle(st.Title())
+ }
+ if st.setDescription != nil {
+ st.setDescription(st.Description())
+ }
+ for _, mt := range st.matched {
+ if !mt.SkipCleanUp {
+ mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil)
+ }
+ if err := mt.Parse(mt.Lines); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+type validationBuilder interface {
+ SetMaximum(float64, bool)
+ SetMinimum(float64, bool)
+ SetMultipleOf(float64)
+
+ SetMinItems(int64)
+ SetMaxItems(int64)
+
+ SetMinLength(int64)
+ SetMaxLength(int64)
+ SetPattern(string)
+
+ SetUnique(bool)
+ SetEnum(string)
+ SetDefault(interface{})
+ SetExample(interface{})
+}
+
+type valueParser interface {
+ Parse([]string) error
+ Matches(string) bool
+}
+
+type operationValidationBuilder interface {
+ validationBuilder
+ SetCollectionFormat(string)
+}
+
+type setMaximum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaximum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ max, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaximum(max, matches[1] == "<")
+ }
+ return nil
+}
+
+func (sm *setMaximum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinimum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinimum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinimum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ min, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinimum(min, matches[1] == ">")
+ }
+ return nil
+}
+
+type setMultipleOf struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMultipleOf) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMultipleOf) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[1]) > 0 {
+ multipleOf, err := strconv.ParseFloat(matches[1], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMultipleOf(multipleOf)
+ }
+ return nil
+}
+
+type setMaxItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMaxItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxItems(maxItems)
+ }
+ return nil
+}
+
+type setMinItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinItems(minItems)
+ }
+ return nil
+}
+
+type setMaxLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxLength(maxLength)
+ }
+ return nil
+}
+
+func (sm *setMaxLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinLength(minLength)
+ }
+ return nil
+}
+
+func (sm *setMinLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setPattern struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setPattern) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetPattern(matches[1])
+ }
+ return nil
+}
+
+func (sm *setPattern) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setCollectionFormat struct {
+ builder operationValidationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setCollectionFormat) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetCollectionFormat(matches[1])
+ }
+ return nil
+}
+
+func (sm *setCollectionFormat) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setUnique struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (su *setUnique) Matches(line string) bool {
+ return su.rx.MatchString(line)
+}
+
+func (su *setUnique) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := su.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.builder.SetUnique(req)
+ }
+ return nil
+}
+
+type setEnum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setEnum) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setEnum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ se.builder.SetEnum(matches[1])
+ }
+ return nil
+}
+
+func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) {
+ if schema != nil {
+ switch strings.Trim(schema.TypeName(), "\"") {
+ case "integer", "int", "int64", "int32", "int16":
+ return strconv.Atoi(s)
+ case "bool", "boolean":
+ return strconv.ParseBool(s)
+ case "number", "float64", "float32":
+ return strconv.ParseFloat(s, 64)
+ case "object":
+ var obj map[string]interface{}
+ if err := json.Unmarshal([]byte(s), &obj); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return obj, nil
+ case "array":
+ var slice []interface{}
+ if err := json.Unmarshal([]byte(s), &slice); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return slice, nil
+ default:
+ return s, nil
+ }
+ } else {
+ return s, nil
+ }
+}
+
+type setDefault struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sd *setDefault) Matches(line string) bool {
+ return sd.rx.MatchString(line)
+}
+
+func (sd *setDefault) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sd.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], sd.scheme)
+ if err != nil {
+ return err
+ }
+ sd.builder.SetDefault(d)
+ }
+ return nil
+}
+
+type setExample struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setExample) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setExample) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], se.scheme)
+ if err != nil {
+ return err
+ }
+ se.builder.SetExample(d)
+ }
+ return nil
+}
+
+type matchOnlyParam struct {
+ tgt *spec.Parameter
+ rx *regexp.Regexp
+}
+
+func (mo *matchOnlyParam) Matches(line string) bool {
+ return mo.rx.MatchString(line)
+}
+
+func (mo *matchOnlyParam) Parse(_ []string) error {
+ return nil
+}
+
+type setRequiredParam struct {
+ tgt *spec.Parameter
+}
+
+func (su *setRequiredParam) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredParam) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.Required = req
+ }
+ return nil
+}
+
+type setReadOnlySchema struct {
+ tgt *spec.Schema
+}
+
+func (su *setReadOnlySchema) Matches(line string) bool {
+ return rxReadOnly.MatchString(line)
+}
+
+func (su *setReadOnlySchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxReadOnly.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.ReadOnly = req
+ }
+ return nil
+}
+
+type setDeprecatedOp struct {
+ tgt *spec.Operation
+}
+
+func (su *setDeprecatedOp) Matches(line string) bool {
+ return rxDeprecated.MatchString(line)
+}
+
+func (su *setDeprecatedOp) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxDeprecated.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.Deprecated = req
+ }
+ return nil
+}
+
+type setDiscriminator struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setDiscriminator) Matches(line string) bool {
+ return rxDiscriminator.MatchString(line)
+}
+
+func (su *setDiscriminator) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxDiscriminator.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ if req {
+ su.schema.Discriminator = su.field
+ } else if su.schema.Discriminator == su.field {
+ su.schema.Discriminator = ""
+ }
+ }
+ return nil
+}
+
+type setRequiredSchema struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setRequiredSchema) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredSchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ midx := -1
+ for i, nm := range su.schema.Required {
+ if nm == su.field {
+ midx = i
+ break
+ }
+ }
+ if req {
+ if midx < 0 {
+ su.schema.Required = append(su.schema.Required, su.field)
+ }
+ } else if midx >= 0 {
+ su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...)
+ }
+ }
+ return nil
+}
+
+func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser {
+ return &multiLineDropEmptyParser{
+ rx: rx,
+ set: set,
+ }
+}
+
+type multiLineDropEmptyParser struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (m *multiLineDropEmptyParser) Matches(line string) bool {
+ return m.rx.MatchString(line)
+}
+
+func (m *multiLineDropEmptyParser) Parse(lines []string) error {
+ m.set(removeEmptyLines(lines))
+ return nil
+}
+
+func newSetSchemes(set func([]string)) *setSchemes {
+ return &setSchemes{
+ set: set,
+ rx: rxSchemes,
+ }
+}
+
+type setSchemes struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSchemes) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSchemes) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := ss.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sch := strings.Split(matches[1], ", ")
+
+ schemes := []string{}
+ for _, s := range sch {
+ ts := strings.TrimSpace(s)
+ if ts != "" {
+ schemes = append(schemes, ts)
+ }
+ }
+ ss.set(schemes)
+ }
+ return nil
+}
+
+func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity {
+ return &setSecurity{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type setSecurity struct {
+ set func([]map[string][]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSecurity) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSecurity) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var result []map[string][]string
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ scopes := []string{}
+ var key string
+
+ if len(kv) > 1 {
+ scs := strings.Split(kv[1], ",")
+ for _, scope := range scs {
+ tr := strings.TrimSpace(scope)
+ if tr != "" {
+ tr = strings.SplitAfter(tr, " ")[0]
+ scopes = append(scopes, strings.TrimSpace(tr))
+ }
+ }
+
+ key = strings.TrimSpace(kv[0])
+
+ result = append(result, map[string][]string{key: scopes})
+ }
+ }
+ ss.set(result)
+ return nil
+}
+
+func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses {
+ return &setOpResponses{
+ set: setter,
+ rx: rxResponses,
+ definitions: definitions,
+ responses: responses,
+ }
+}
+
+type setOpResponses struct {
+ set func(*spec.Response, map[int]spec.Response)
+ rx *regexp.Regexp
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+}
+
+func (ss *setOpResponses) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+// ResponseTag used when specifying a response to point to a defined swagger:response
+const ResponseTag = "response"
+
+// BodyTag used when specifying a response to point to a model/schema
+const BodyTag = "body"
+
+// DescriptionTag used when specifying a response that gives a description of the response
+const DescriptionTag = "description"
+
+func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) {
+ tags := strings.Split(line, " ")
+ parsedModelOrResponse := false
+
+ for i, tagAndValue := range tags {
+ tagValList := strings.SplitN(tagAndValue, ":", 2)
+ var tag, value string
+ if len(tagValList) > 1 {
+ tag = tagValList[0]
+ value = tagValList[1]
+ } else {
+ // TODO: Print a warning, and in the long term, do not support not tagged values
+ // Add a default tag if none is supplied
+ if i == 0 {
+ tag = ResponseTag
+ } else {
+ tag = DescriptionTag
+ }
+ value = tagValList[0]
+ }
+
+ foundModelOrResponse := false
+ if !parsedModelOrResponse {
+ if tag == BodyTag {
+ foundModelOrResponse = true
+ isDefinitionRef = true
+ }
+ if tag == ResponseTag {
+ foundModelOrResponse = true
+ isDefinitionRef = false
+ }
+ }
+ if foundModelOrResponse {
+ // Read the model or response tag
+ parsedModelOrResponse = true
+ // Check for nested arrays
+ arrays = 0
+ for strings.HasPrefix(value, "[]") {
+ arrays++
+ value = value[2:]
+ }
+ // What's left over is the model name
+ modelOrResponse = value
+ } else {
+ foundDescription := false
+ if tag == DescriptionTag {
+ foundDescription = true
+ }
+ if foundDescription {
+ // Descriptions are special, they make they read the rest of the line
+ descriptionWords := []string{value}
+ if i < len(tags)-1 {
+ descriptionWords = append(descriptionWords, tags[i+1:]...)
+ }
+ description = strings.Join(descriptionWords, " ")
+ break
+ }
+ if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag {
+ err = fmt.Errorf("valid tag %s, but not in a valid position", tag)
+ } else {
+ err = fmt.Errorf("invalid tag: %s", tag)
+ }
+ // return error
+ return
+ }
+ }
+
+ // TODO: Maybe do, if !parsedModelOrResponse {return some error}
+ return
+}
+
+func (ss *setOpResponses) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var def *spec.Response
+ var scr map[int]spec.Response
+
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ var key, value string
+
+ if len(kv) > 1 {
+ key = strings.TrimSpace(kv[0])
+ if key == "" {
+ // this must be some weird empty line
+ continue
+ }
+ value = strings.TrimSpace(kv[1])
+ if value == "" {
+ var resp spec.Response
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ continue
+ }
+ refTarget, arrays, isDefinitionRef, description, err := parseTags(value)
+ if err != nil {
+ return err
+ }
+ // A possible exception for having a definition
+ if _, ok := ss.responses[refTarget]; !ok {
+ if _, ok := ss.definitions[refTarget]; ok {
+ isDefinitionRef = true
+ }
+ }
+
+ var ref spec.Ref
+ if isDefinitionRef {
+ if description == "" {
+ description = refTarget
+ }
+ ref, err = spec.NewRef("#/definitions/" + refTarget)
+ } else {
+ ref, err = spec.NewRef("#/responses/" + refTarget)
+ }
+ if err != nil {
+ return err
+ }
+
+ // description should used on anyway.
+ resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}}
+
+ if isDefinitionRef {
+ resp.Schema = new(spec.Schema)
+ resp.Description = description
+ if arrays == 0 {
+ resp.Schema.Ref = ref
+ } else {
+ cs := resp.Schema
+ for i := 0; i < arrays; i++ {
+ cs.Typed("array", "")
+ cs.Items = new(spec.SchemaOrArray)
+ cs.Items.Schema = new(spec.Schema)
+ cs = cs.Items.Schema
+ }
+ cs.Ref = ref
+ }
+ // ref. could be empty while use description tag
+ } else if len(refTarget) > 0 {
+ resp.Ref = ref
+ }
+
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ }
+ }
+ ss.set(def, scr)
+ return nil
+}
+
+func parseEnum(val string, s *spec.SimpleSchema) []interface{} {
+ list := strings.Split(val, ",")
+ interfaceSlice := make([]interface{}, len(list))
+ for i, d := range list {
+ v, err := parseValueFromSchema(d, s)
+ if err != nil {
+ interfaceSlice[i] = d
+ continue
+ }
+
+ interfaceSlice[i] = v
+ }
+ return interfaceSlice
+}
+
+// AlphaChars used when parsing for Vendor Extensions
+const AlphaChars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
+
+func newSetExtensions(setter func(*spec.Extensions)) *setOpExtensions {
+ return &setOpExtensions{
+ set: setter,
+ rx: rxExtensions,
+ }
+}
+
+type setOpExtensions struct {
+ set func(*spec.Extensions)
+ rx *regexp.Regexp
+}
+
+type extensionObject struct {
+ Extension string
+ Root interface{}
+}
+
+type extensionParsingStack []interface{}
+
+// Helper function to walk back through extensions until the proper nest level is reached
+func (stack *extensionParsingStack) walkBack(rawLines []string, lineIndex int) {
+ indent := strings.IndexAny(rawLines[lineIndex], AlphaChars)
+ nextIndent := strings.IndexAny(rawLines[lineIndex+1], AlphaChars)
+ if nextIndent < indent {
+ // Pop elements off the stack until we're back where we need to be
+ runbackIndex := 0
+ poppedIndent := 1000
+ for {
+ checkIndent := strings.IndexAny(rawLines[lineIndex-runbackIndex], AlphaChars)
+ if nextIndent == checkIndent {
+ break
+ }
+ if checkIndent < poppedIndent {
+ *stack = (*stack)[:len(*stack)-1]
+ poppedIndent = checkIndent
+ }
+ runbackIndex++
+ }
+ }
+}
+
+// Recursively parses through the given extension lines, building and adding extension objects as it goes.
+// Extensions may be key:value pairs, arrays, or objects.
+func buildExtensionObjects(rawLines []string, cleanLines []string, lineIndex int, extObjs *[]extensionObject, stack *extensionParsingStack) {
+ if lineIndex >= len(rawLines) {
+ if stack != nil {
+ if ext, ok := (*stack)[0].(extensionObject); ok {
+ *extObjs = append(*extObjs, ext)
+ }
+ }
+ return
+ }
+ kv := strings.SplitN(cleanLines[lineIndex], ":", 2)
+ key := strings.TrimSpace(kv[0])
+ if key == "" {
+ // Some odd empty line
+ return
+ }
+
+ nextIsList := false
+ if lineIndex < len(rawLines)-1 {
+ next := strings.SplitAfterN(cleanLines[lineIndex+1], ":", 2)
+ nextIsList = len(next) == 1
+ }
+
+ if len(kv) > 1 {
+ // Should be the start of a map or a key:value pair
+ value := strings.TrimSpace(kv[1])
+
+ if rxAllowedExtensions.MatchString(key) {
+ // New extension started
+ if stack != nil {
+ if ext, ok := (*stack)[0].(extensionObject); ok {
+ *extObjs = append(*extObjs, ext)
+ }
+ }
+
+ if value != "" {
+ ext := extensionObject{
+ Extension: key,
+ }
+ // Extension is simple key:value pair, no stack
+ ext.Root = make(map[string]string)
+ ext.Root.(map[string]string)[key] = value
+ *extObjs = append(*extObjs, ext)
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, nil)
+ } else {
+ ext := extensionObject{
+ Extension: key,
+ }
+ if nextIsList {
+ // Extension is an array
+ ext.Root = make(map[string]*[]string)
+ rootList := make([]string, 0)
+ ext.Root.(map[string]*[]string)[key] = &rootList
+ stack = &extensionParsingStack{}
+ *stack = append(*stack, ext)
+ *stack = append(*stack, ext.Root.(map[string]*[]string)[key])
+ } else {
+ // Extension is an object
+ ext.Root = make(map[string]interface{})
+ rootMap := make(map[string]interface{})
+ ext.Root.(map[string]interface{})[key] = rootMap
+ stack = &extensionParsingStack{}
+ *stack = append(*stack, ext)
+ *stack = append(*stack, rootMap)
+ }
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack)
+ }
+ } else if stack != nil && len(*stack) != 0 {
+ stackIndex := len(*stack) - 1
+ if value == "" {
+ if nextIsList {
+ // start of new list
+ newList := make([]string, 0)
+ (*stack)[stackIndex].(map[string]interface{})[key] = &newList
+ *stack = append(*stack, &newList)
+ } else {
+ // start of new map
+ newMap := make(map[string]interface{})
+ (*stack)[stackIndex].(map[string]interface{})[key] = newMap
+ *stack = append(*stack, newMap)
+ }
+ } else {
+ // key:value
+ if reflect.TypeOf((*stack)[stackIndex]).Kind() == reflect.Map {
+ (*stack)[stackIndex].(map[string]interface{})[key] = value
+ }
+ if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) {
+ stack.walkBack(rawLines, lineIndex)
+ }
+ }
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack)
+ }
+ } else if stack != nil && len(*stack) != 0 {
+ // Should be a list item
+ stackIndex := len(*stack) - 1
+ list := (*stack)[stackIndex].(*[]string)
+ *list = append(*list, key)
+ (*stack)[stackIndex] = list
+ if lineIndex < len(rawLines)-1 && !rxAllowedExtensions.MatchString(cleanLines[lineIndex+1]) {
+ stack.walkBack(rawLines, lineIndex)
+ }
+ buildExtensionObjects(rawLines, cleanLines, lineIndex+1, extObjs, stack)
+ }
+}
+
+func (ss *setOpExtensions) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setOpExtensions) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ cleanLines := cleanupScannerLines(lines, rxUncommentHeaders, nil)
+
+ exts := new(spec.VendorExtensible)
+ extList := make([]extensionObject, 0)
+ buildExtensionObjects(lines, cleanLines, 0, &extList, nil)
+
+ // Extensions can be one of the following:
+ // key:value pair
+ // list/array
+ // object
+ for _, ext := range extList {
+ if _, ok := ext.Root.(map[string]string); ok {
+ exts.AddExtension(ext.Extension, ext.Root.(map[string]string)[ext.Extension])
+ } else if _, ok := ext.Root.(map[string]*[]string); ok {
+ exts.AddExtension(ext.Extension, *(ext.Root.(map[string]*[]string)[ext.Extension]))
+ } else if _, ok := ext.Root.(map[string]interface{}); ok {
+ exts.AddExtension(ext.Extension, ext.Root.(map[string]interface{})[ext.Extension])
+ } else {
+ debugLog("Unknown Extension type: %s", fmt.Sprint(reflect.TypeOf(ext.Root)))
+ }
+ }
+
+ ss.set(&exts.Extensions)
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go
new file mode 100644
index 000000000..6ffac76af
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers.go
@@ -0,0 +1,51 @@
+//go:build go1.19
+// +build go1.19
+
+package codescan
+
+import (
+ "strings"
+)
+
+// a shared function that can be used to split given headers
+// into a title and description
+func collectScannerTitleDescription(headers []string) (title, desc []string) {
+ hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
+
+ idx := -1
+ for i, line := range hdrs {
+ if strings.TrimSpace(line) == "" {
+ idx = i
+ break
+ }
+ }
+
+ if idx > -1 {
+ title = hdrs[:idx]
+ if len(title) > 0 {
+ title[0] = rxTitleStart.ReplaceAllString(title[0], "")
+ }
+ if len(hdrs) > idx+1 {
+ desc = hdrs[idx+1:]
+ } else {
+ desc = nil
+ }
+ return
+ }
+
+ if len(hdrs) > 0 {
+ line := hdrs[0]
+ switch {
+ case rxPunctuationEnd.MatchString(line):
+ title = []string{line}
+ desc = hdrs[1:]
+ case rxTitleStart.MatchString(line):
+ title = []string{rxTitleStart.ReplaceAllString(line, "")}
+ desc = hdrs[1:]
+ default:
+ desc = hdrs
+ }
+ }
+
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go
new file mode 100644
index 000000000..62eb59a96
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/parser_helpers_go118.go
@@ -0,0 +1,42 @@
+//go:build !go1.19
+// +build !go1.19
+
+package codescan
+
+import "strings"
+
+// a shared function that can be used to split given headers
+// into a title and description
+func collectScannerTitleDescription(headers []string) (title, desc []string) {
+ hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
+
+ idx := -1
+ for i, line := range hdrs {
+ if strings.TrimSpace(line) == "" {
+ idx = i
+ break
+ }
+ }
+
+ if idx > -1 {
+ title = hdrs[:idx]
+ if len(hdrs) > idx+1 {
+ desc = hdrs[idx+1:]
+ } else {
+ desc = nil
+ }
+ return
+ }
+
+ if len(hdrs) > 0 {
+ line := hdrs[0]
+ if rxPunctuationEnd.MatchString(line) {
+ title = []string{line}
+ desc = hdrs[1:]
+ } else {
+ desc = hdrs
+ }
+ }
+
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go b/vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go
new file mode 100644
index 000000000..365b56f53
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/regexprs.go
@@ -0,0 +1,96 @@
+package codescan
+
+import "regexp"
+
+const (
+ rxMethod = "(\\p{L}+)"
+ rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
+ rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
+ rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
+
+ rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+
+ rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
+
+ rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
+
+ rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
+)
+
+var (
+ rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
+ rxFileUpload = regexp.MustCompile(`swagger:file`)
+ rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxAlias = regexp.MustCompile(`swagger:alias`)
+ rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
+ rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
+ rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
+ rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxRoute = regexp.MustCompile(
+ "swagger:route\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+ rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
+ rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
+ rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
+ rxOperation = regexp.MustCompile(
+ "swagger:operation\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+
+ rxSpace = regexp.MustCompile(`\p{Zs}+`)
+ rxIndent = regexp.MustCompile(`[\p{Zs}\t]*/*[\p{Zs}\t]*[^\p{Zs}\t]`)
+ rxNotIndent = regexp.MustCompile(`[^\p{Zs}\t]`)
+ rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
+ rxTitleStart = regexp.MustCompile(`^[#]+\p{Zs}+`)
+ rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
+ rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
+ rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
+
+ rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
+ rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
+ rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
+ rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
+ rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
+ rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
+ rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
+ rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
+ rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
+ rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
+ rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
+ rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
+ rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
+ rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
+ rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
+ rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
+ rxDeprecated = regexp.MustCompile(`[Dd]eprecated\p{Zs}*:\p{Zs}*(true|false)$`)
+ // currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
+)
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/responses.go b/vendor/github.com/go-swagger/go-swagger/codescan/responses.go
new file mode 100644
index 000000000..350cd3a7b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/responses.go
@@ -0,0 +1,454 @@
+package codescan
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "strings"
+
+ "github.com/pkg/errors"
+
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/go-openapi/spec"
+)
+
+type responseTypable struct {
+ in string
+ header *spec.Header
+ response *spec.Response
+}
+
+func (ht responseTypable) Level() int { return 0 }
+
+func (ht responseTypable) Typed(tpe, format string) {
+ ht.header.Typed(tpe, format)
+}
+
+func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
+ if in == "body" {
+ // get the schema for items on the schema property
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ if schema.Items == nil {
+ schema.Items = new(spec.SchemaOrArray)
+ }
+ if schema.Items.Schema == nil {
+ schema.Items.Schema = new(spec.Schema)
+ }
+ schema.Typed("array", "")
+ return schemaTypable{schema.Items.Schema, 1}, schema
+ }
+ return nil, nil
+}
+
+func (ht responseTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(ht.in, ht.response.Schema)
+ if bdt != nil {
+ ht.response.Schema = schema
+ return bdt
+ }
+
+ if ht.header.Items == nil {
+ ht.header.Items = new(spec.Items)
+ }
+ ht.header.Type = "array"
+ return itemsTypable{ht.header.Items, 1}
+}
+
+func (ht responseTypable) SetRef(ref spec.Ref) {
+ // having trouble seeing the usefulness of this one here
+ ht.Schema().Ref = ref
+}
+
+func (ht responseTypable) Schema() *spec.Schema {
+ if ht.response.Schema == nil {
+ ht.response.Schema = new(spec.Schema)
+ }
+ return ht.response.Schema
+}
+
+func (ht responseTypable) SetSchema(schema *spec.Schema) {
+ ht.response.Schema = schema
+}
+
+func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
+ ht.header.CollectionOf(items, format)
+}
+
+func (ht responseTypable) AddExtension(key string, value interface{}) {
+ ht.response.AddExtension(key, value)
+}
+
+func (ht responseTypable) WithEnum(values ...interface{}) {
+ ht.header.WithEnum(values)
+}
+
+func (ht responseTypable) WithEnumDescription(_ string) {
+ // no
+}
+
+type headerValidations struct {
+ current *spec.Header
+}
+
+func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv headerValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type responseBuilder struct {
+ ctx *scanCtx
+ decl *entityDecl
+ postDecls []*entityDecl
+}
+
+func (r *responseBuilder) Build(responses map[string]spec.Response) error {
+ // check if there is a swagger:response tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+
+ name, _ := r.decl.ResponseNames()
+ response := responses[name]
+ debugLog("building response: %s", name)
+
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { response.Description = joinDropLast(lines) }
+ if err := sp.Parse(r.decl.Comments); err != nil {
+ return err
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if err := r.buildFromType(r.decl.Type, &response, make(map[string]bool)); err != nil {
+ return err
+ }
+ responses[name] = response
+ return nil
+}
+
+func (r *responseBuilder) buildFromField(fld *types.Var, tpe types.Type, typable swaggerTypable, seen map[string]bool) error {
+ debugLog("build from field %s: %T", fld.Name(), tpe)
+ switch ftpe := tpe.(type) {
+ case *types.Basic:
+ return swaggerSchemaForType(ftpe.Name(), typable)
+ case *types.Struct:
+ sb := schemaBuilder{
+ decl: r.decl,
+ ctx: r.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ case *types.Pointer:
+ return r.buildFromField(fld, ftpe.Elem(), typable, seen)
+ case *types.Interface:
+ sb := schemaBuilder{
+ decl: r.decl,
+ ctx: r.ctx,
+ }
+ if err := sb.buildFromType(tpe, typable); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ case *types.Array:
+ return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Slice:
+ return r.buildFromField(fld, ftpe.Elem(), typable.Items(), seen)
+ case *types.Map:
+ schema := new(spec.Schema)
+ typable.Schema().Typed("object", "").AdditionalProperties = &spec.SchemaOrBool{
+ Schema: schema,
+ }
+ sb := schemaBuilder{
+ decl: r.decl,
+ ctx: r.ctx,
+ }
+ if err := sb.buildFromType(ftpe.Elem(), schemaTypable{schema, typable.Level() + 1}); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ case *types.Named:
+ if decl, found := r.ctx.DeclForType(ftpe.Obj().Type()); found {
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ typable.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ typable.Typed("string", sfnm)
+ return nil
+ }
+ sb := &schemaBuilder{ctx: r.ctx, decl: decl}
+ sb.inferNames()
+ if err := sb.buildFromType(decl.Type, typable); err != nil {
+ return err
+ }
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ }
+ return errors.Errorf("unable to find package and source file for: %s", ftpe.String())
+ default:
+ return errors.Errorf("unknown type for %s: %T", fld.String(), fld.Type())
+ }
+}
+
+func (r *responseBuilder) buildFromType(otpe types.Type, resp *spec.Response, seen map[string]bool) error {
+ switch tpe := otpe.(type) {
+ case *types.Pointer:
+ return r.buildFromType(tpe.Elem(), resp, seen)
+ case *types.Named:
+ o := tpe.Obj()
+ switch stpe := o.Type().Underlying().(type) {
+ case *types.Struct:
+ debugLog("build from type %s: %T", tpe.Obj().Name(), otpe)
+ if decl, found := r.ctx.DeclForType(o.Type()); found {
+ return r.buildFromStruct(decl, stpe, resp, seen)
+ }
+ return r.buildFromStruct(r.decl, stpe, resp, seen)
+ default:
+ if decl, found := r.ctx.DeclForType(o.Type()); found {
+ var schema spec.Schema
+ typable := schemaTypable{schema: &schema, level: 0}
+
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ typable.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ typable.Typed("string", sfnm)
+ return nil
+ }
+ sb := &schemaBuilder{ctx: r.ctx, decl: decl}
+ sb.inferNames()
+ if err := sb.buildFromType(tpe.Underlying(), typable); err != nil {
+ return err
+ }
+ resp.WithSchema(&schema)
+ r.postDecls = append(r.postDecls, sb.postDecls...)
+ return nil
+ }
+ return errors.Errorf("responses can only be structs, did you mean for %s to be the response body?", otpe.String())
+ }
+ default:
+ return errors.New("anonymous types are currently not supported for responses")
+ }
+}
+
+func (r *responseBuilder) buildFromStruct(decl *entityDecl, tpe *types.Struct, resp *spec.Response, seen map[string]bool) error {
+ if tpe.NumFields() == 0 {
+ return nil
+ }
+
+ for i := 0; i < tpe.NumFields(); i++ {
+ fld := tpe.Field(i)
+ if fld.Embedded() {
+
+ if err := r.buildFromType(fld.Type(), resp, seen); err != nil {
+ return err
+ }
+ continue
+ }
+ if fld.Anonymous() {
+ debugLog("skipping anonymous field")
+ continue
+ }
+
+ tg := tpe.Tag(i)
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), tpe.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name, ignore, _, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ var in string
+ // scan for param location first, this changes some behavior down the line
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := resp.Headers[name]
+
+ // support swagger:file for response
+ // An API operation can return a file, such as an image or PDF. In this case,
+ // define the response schema with type: file and specify the appropriate MIME types in the produces section.
+ if afld.Doc != nil && fileParam(afld.Doc) {
+ resp.Schema = &spec.Schema{}
+ resp.Schema.Typed("file", "")
+ } else if err := r.buildFromField(fld, fld.Type(), responseTypable{in, &ps, resp}, seen); err != nil {
+ return err
+ }
+
+ if strfmtName, ok := strfmtName(afld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
+ }
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.SelectorExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", name)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := afld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ if err := sp.Parse(afld.Doc); err != nil {
+ return err
+ }
+
+ if in != "body" {
+ seen[name] = true
+ if resp.Headers == nil {
+ resp.Headers = make(map[string]spec.Header)
+ }
+ resp.Headers[name] = ps
+ }
+ }
+
+ for k := range resp.Headers {
+ if !seen[k] {
+ delete(resp.Headers, k)
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/route_params.go b/vendor/github.com/go-swagger/go-swagger/codescan/route_params.go
new file mode 100644
index 000000000..7ca5b0237
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/route_params.go
@@ -0,0 +1,263 @@
+package codescan
+
+import (
+ "errors"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+const (
+ // ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
+ ParamDescriptionKey = "description"
+ // ParamNameKey indicates the tag used to define a parameter name in swagger:route
+ ParamNameKey = "name"
+ // ParamInKey indicates the tag used to define a parameter location in swagger:route
+ ParamInKey = "in"
+ // ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
+ ParamRequiredKey = "required"
+ // ParamTypeKey indicates the tag used to define the parameter type in swagger:route
+ ParamTypeKey = "type"
+ // ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
+ ParamAllowEmptyKey = "allowempty"
+
+ // SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
+ SchemaMinKey = "min"
+ // SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
+ SchemaMaxKey = "max"
+ // SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
+ SchemaEnumKey = "enum"
+ // SchemaFormatKey indicates the expected format for this field in swagger:route
+ SchemaFormatKey = "format"
+ // SchemaDefaultKey indicates the default value for this field in swagger:route
+ SchemaDefaultKey = "default"
+ // SchemaMinLenKey indicates the minimum length this field in swagger:route
+ SchemaMinLenKey = "minlength"
+ // SchemaMaxLenKey indicates the minimum length this field in swagger:route
+ SchemaMaxLenKey = "maxlength"
+
+ // TypeArray is the identifier for an array type in swagger:route
+ TypeArray = "array"
+ // TypeNumber is the identifier for a number type in swagger:route
+ TypeNumber = "number"
+ // TypeInteger is the identifier for an integer type in swagger:route
+ TypeInteger = "integer"
+ // TypeBoolean is the identifier for a boolean type in swagger:route
+ TypeBoolean = "boolean"
+ // TypeBool is the identifier for a boolean type in swagger:route
+ TypeBool = "bool"
+ // TypeObject is the identifier for an object type in swagger:route
+ TypeObject = "object"
+ // TypeString is the identifier for a string type in swagger:route
+ TypeString = "string"
+)
+
+var (
+ validIn = []string{"path", "query", "header", "body", "form"}
+ basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
+)
+
+func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
+ return &setOpParams{
+ set: setter,
+ parameters: params,
+ }
+}
+
+type setOpParams struct {
+ set func([]*spec.Parameter)
+ parameters []*spec.Parameter
+}
+
+func (s *setOpParams) Matches(line string) bool {
+ return rxParameters.MatchString(line)
+}
+
+func (s *setOpParams) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var current *spec.Parameter
+ var extraData map[string]string
+
+ for _, line := range lines {
+ l := strings.TrimSpace(line)
+
+ if strings.HasPrefix(l, "+") {
+ s.finalizeParam(current, extraData)
+ current = new(spec.Parameter)
+ extraData = make(map[string]string)
+ l = strings.TrimPrefix(l, "+")
+ }
+
+ kv := strings.SplitN(l, ":", 2)
+
+ if len(kv) <= 1 {
+ continue
+ }
+
+ key := strings.ToLower(strings.TrimSpace(kv[0]))
+ value := strings.TrimSpace(kv[1])
+
+ if current == nil {
+ return errors.New("invalid route/operation schema provided")
+ }
+
+ switch key {
+ case ParamDescriptionKey:
+ current.Description = value
+ case ParamNameKey:
+ current.Name = value
+ case ParamInKey:
+ v := strings.ToLower(value)
+ if contains(validIn, v) {
+ current.In = v
+ }
+ case ParamRequiredKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.Required = v
+ }
+ case ParamTypeKey:
+ if current.Schema == nil {
+ current.Schema = new(spec.Schema)
+ }
+ if contains(basicTypes, value) {
+ current.Type = strings.ToLower(value)
+ if current.Type == TypeBool {
+ current.Type = TypeBoolean
+ }
+ } else if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
+ current.Type = TypeObject
+ current.Schema.Ref = ref
+ }
+ current.Schema.Type = spec.StringOrArray{current.Type}
+ case ParamAllowEmptyKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.AllowEmptyValue = v
+ }
+ default:
+ extraData[key] = value
+ }
+ }
+
+ s.finalizeParam(current, extraData)
+ s.set(s.parameters)
+ return nil
+}
+
+func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
+ if param == nil {
+ return
+ }
+
+ processSchema(data, param)
+
+ // schema is only allowed for parameters in "body"
+ // see https://swagger.io/specification/v2/#parameterObject
+ switch {
+ case param.In == "body":
+ param.Type = ""
+
+ case param.Schema != nil:
+ // convert schema into validations
+ param.SetValidations(param.Schema.Validations())
+ param.Default = param.Schema.Default
+ param.Format = param.Schema.Format
+ param.Schema = nil
+ }
+
+ s.parameters = append(s.parameters, param)
+}
+
+func processSchema(data map[string]string, param *spec.Parameter) {
+ if param.Schema == nil {
+ return
+ }
+
+ var enumValues []string
+
+ for key, value := range data {
+ switch key {
+ case SchemaMinKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Minimum = &v
+ }
+ case SchemaMaxKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Maximum = &v
+ }
+ case SchemaMinLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MinLength = &v
+ }
+ case SchemaMaxLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MaxLength = &v
+ }
+ case SchemaEnumKey:
+ enumValues = strings.Split(value, ",")
+ case SchemaFormatKey:
+ param.Schema.Format = value
+ case SchemaDefaultKey:
+ param.Schema.Default = convert(param.Type, value)
+ }
+ }
+
+ if param.Description != "" {
+ param.Schema.Description = param.Description
+ }
+
+ convertEnum(param.Schema, enumValues)
+}
+
+func convertEnum(schema *spec.Schema, enumValues []string) {
+ if len(enumValues) == 0 {
+ return
+ }
+
+ var finalEnum []interface{}
+ for _, v := range enumValues {
+ finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
+ }
+ schema.Enum = finalEnum
+}
+
+func convert(typeStr, valueStr string) interface{} {
+ switch typeStr {
+ case TypeInteger:
+ fallthrough
+ case TypeNumber:
+ if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
+ return num
+ }
+ case TypeBoolean:
+ fallthrough
+ case TypeBool:
+ if b, err := strconv.ParseBool(valueStr); err == nil {
+ return b
+ }
+ }
+ return valueStr
+}
+
+func getType(schema *spec.Schema) string {
+ if len(schema.Type) == 0 {
+ return ""
+ }
+ return schema.Type[0]
+}
+
+func contains(arr []string, obj string) bool {
+ for _, v := range arr {
+ if v == obj {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/routes.go b/vendor/github.com/go-swagger/go-swagger/codescan/routes.go
new file mode 100644
index 000000000..af58e43f3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/routes.go
@@ -0,0 +1,93 @@
+package codescan
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+)
+
+func opConsumesSetter(op *spec.Operation) func([]string) {
+ return func(consumes []string) { op.Consumes = consumes }
+}
+
+func opProducesSetter(op *spec.Operation) func([]string) {
+ return func(produces []string) { op.Produces = produces }
+}
+
+func opSchemeSetter(op *spec.Operation) func([]string) {
+ return func(schemes []string) { op.Schemes = schemes }
+}
+
+func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
+ return func(securityDefs []map[string][]string) { op.Security = securityDefs }
+}
+
+func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
+ return func(def *spec.Response, scr map[int]spec.Response) {
+ if op.Responses == nil {
+ op.Responses = new(spec.Responses)
+ }
+ op.Responses.Default = def
+ op.Responses.StatusCodeResponses = scr
+ }
+}
+
+func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
+ return func(params []*spec.Parameter) {
+ for _, v := range params {
+ op.AddParam(v)
+ }
+ }
+}
+
+func opExtensionsSetter(op *spec.Operation) func(*spec.Extensions) {
+ return func(exts *spec.Extensions) {
+ for name, value := range *exts {
+ op.AddExtension(name, value)
+ }
+ }
+}
+
+type routesBuilder struct {
+ ctx *scanCtx
+ route parsedPathContent
+ definitions map[string]spec.Schema
+ operations map[string]*spec.Operation
+ responses map[string]spec.Response
+ parameters []*spec.Parameter
+}
+
+func (r *routesBuilder) Build(tgt *spec.Paths) error {
+
+ pthObj := tgt.Paths[r.route.Path]
+ op := setPathOperation(
+ r.route.Method, r.route.ID,
+ &pthObj, r.operations[r.route.ID])
+
+ op.Tags = r.route.Tags
+
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+ sr := newSetResponses(r.definitions, r.responses, opResponsesSetter(op))
+ spa := newSetParams(r.parameters, opParamSetter(op))
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
+ newMultiLineTagParser("Parameters", spa, false),
+ newMultiLineTagParser("Responses", sr, false),
+ newSingleLineTagParser("Deprecated", &setDeprecatedOp{op}),
+ newMultiLineTagParser("Extensions", newSetExtensions(opExtensionsSetter(op)), true),
+ }
+ if err := sp.Parse(r.route.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+ tgt.Paths[r.route.Path] = pthObj
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/schema.go b/vendor/github.com/go-swagger/go-swagger/codescan/schema.go
new file mode 100644
index 000000000..8c6723040
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/schema.go
@@ -0,0 +1,1155 @@
+package codescan
+
+import (
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/importer"
+ "go/types"
+ "log"
+ "os"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+
+ "github.com/go-openapi/spec"
+ "github.com/pkg/errors"
+)
+
+func addExtension(ve *spec.VendorExtensible, key string, value interface{}) {
+ if os.Getenv("SWAGGER_GENERATE_EXTENSION") == "false" {
+ return
+ }
+
+ ve.AddExtension(key, value)
+}
+
+type schemaTypable struct {
+ schema *spec.Schema
+ level int
+}
+
+func (st schemaTypable) Typed(tpe, format string) {
+ st.schema.Typed(tpe, format)
+}
+
+func (st schemaTypable) SetRef(ref spec.Ref) {
+ st.schema.Ref = ref
+}
+
+func (st schemaTypable) Schema() *spec.Schema {
+ return st.schema
+}
+
+func (st schemaTypable) Items() swaggerTypable {
+ if st.schema.Items == nil {
+ st.schema.Items = new(spec.SchemaOrArray)
+ }
+ if st.schema.Items.Schema == nil {
+ st.schema.Items.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("array", "")
+ return schemaTypable{st.schema.Items.Schema, st.level + 1}
+}
+
+func (st schemaTypable) AdditionalProperties() swaggerTypable {
+ if st.schema.AdditionalProperties == nil {
+ st.schema.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ if st.schema.AdditionalProperties.Schema == nil {
+ st.schema.AdditionalProperties.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("object", "")
+ return schemaTypable{st.schema.AdditionalProperties.Schema, st.level + 1}
+}
+
+func (st schemaTypable) Level() int { return st.level }
+
+func (st schemaTypable) AddExtension(key string, value interface{}) {
+ addExtension(&st.schema.VendorExtensible, key, value)
+}
+
+func (st schemaTypable) WithEnum(values ...interface{}) {
+ st.schema.WithEnum(values...)
+}
+
+func (st schemaTypable) WithEnumDescription(desc string) {
+ if desc == "" {
+ return
+ }
+ st.AddExtension(extEnumDesc, desc)
+}
+
+type schemaValidations struct {
+ current *spec.Schema
+}
+
+func (sv schemaValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv schemaValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv schemaValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv schemaValidations) SetExample(val interface{}) { sv.current.Example = val }
+func (sv schemaValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: sv.current.Type[0]})
+}
+
+type schemaBuilder struct {
+ ctx *scanCtx
+ decl *entityDecl
+ GoName string
+ Name string
+ annotated bool
+ discovered []*entityDecl
+ postDecls []*entityDecl
+}
+
+func (s *schemaBuilder) inferNames() (goName string, name string) {
+ if s.GoName != "" {
+ goName, name = s.GoName, s.Name
+ return
+ }
+
+ goName = s.decl.Ident.Name
+ name = goName
+ defer func() {
+ s.GoName = goName
+ s.Name = name
+ }()
+ if s.decl.Comments == nil {
+ return
+ }
+
+DECLS:
+ for _, cmt := range s.decl.Comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ s.annotated = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ return
+}
+
+func (s *schemaBuilder) Build(definitions map[string]spec.Schema) error {
+ s.inferNames()
+
+ schema := definitions[s.Name]
+ err := s.buildFromDecl(s.decl, &schema)
+ if err != nil {
+ return err
+ }
+ definitions[s.Name] = schema
+ return nil
+}
+
+func (s *schemaBuilder) buildFromDecl(_ *entityDecl, schema *spec.Schema) error {
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { schema.Title = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) {
+ schema.Description = joinDropLast(lines)
+ enumDesc := getEnumDesc(schema.VendorExtensible.Extensions)
+ if enumDesc != "" {
+ schema.Description += "\n" + enumDesc
+ }
+ }
+ if err := sp.Parse(s.decl.Comments); err != nil {
+ return err
+ }
+
+ // if the type is marked to ignore, just return
+ if sp.ignored {
+ return nil
+ }
+
+ switch tpe := s.decl.Type.Obj().Type().(type) {
+ case *types.Basic:
+ debugLog("basic: %v", tpe.Name())
+ case *types.Struct:
+ if err := s.buildFromStruct(s.decl, tpe, schema, make(map[string]string)); err != nil {
+ return err
+ }
+ case *types.Interface:
+ if err := s.buildFromInterface(s.decl, tpe, schema, make(map[string]string)); err != nil {
+ return err
+ }
+ case *types.Array:
+ debugLog("array: %v -> %v", s.decl.Ident.Name, tpe.Elem().String())
+ case *types.Slice:
+ debugLog("slice: %v -> %v", s.decl.Ident.Name, tpe.Elem().String())
+ case *types.Map:
+ debugLog("map: %v -> [%v]%v", s.decl.Ident.Name, tpe.Key().String(), tpe.Elem().String())
+ case *types.Named:
+ o := tpe.Obj()
+ if o != nil {
+ debugLog("got the named type object: %s.%s | isAlias: %t | exported: %t", o.Pkg().Path(), o.Name(), o.IsAlias(), o.Exported())
+ if o.Pkg().Name() == "time" && o.Name() == "Time" {
+ schema.Typed("string", "date-time")
+ return nil
+ }
+
+ ps := schemaTypable{schema, 0}
+ for {
+ ti := s.decl.Pkg.TypesInfo.Types[s.decl.Spec.Type]
+ if ti.IsBuiltin() {
+ break
+ }
+ if ti.IsType() {
+ if err := s.buildFromType(ti.Type, ps); err != nil {
+ return err
+ }
+ break
+ }
+ }
+ }
+ default:
+ log.Printf("WARNING: Missing parser for a %T, skipping model: %s\n", tpe, s.Name)
+ return nil
+ }
+
+ if schema.Ref.String() == "" {
+ if s.Name != s.GoName {
+ addExtension(&schema.VendorExtensible, "x-go-name", s.GoName)
+ }
+ addExtension(&schema.VendorExtensible, "x-go-package", s.decl.Type.Obj().Pkg().Path())
+ }
+ return nil
+}
+
+func (s *schemaBuilder) buildFromTextMarshal(tpe types.Type, tgt swaggerTypable) error {
+ if typePtr, ok := tpe.(*types.Pointer); ok {
+ return s.buildFromTextMarshal(typePtr.Elem(), tgt)
+ }
+
+ typeNamed, ok := tpe.(*types.Named)
+ if !ok {
+ tgt.Typed("string", "")
+ return nil
+ }
+
+ tio := typeNamed.Obj()
+ if tio.Pkg() == nil && tio.Name() == "error" {
+ return swaggerSchemaForType(tio.Name(), tgt)
+ }
+
+ debugLog("named refined type %s.%s", tio.Pkg().Path(), tio.Name())
+ pkg, found := s.ctx.PkgForType(tpe)
+
+ if strings.ToLower(tio.Name()) == "uuid" {
+ tgt.Typed("string", "uuid")
+ return nil
+ }
+
+ if !found {
+ // this must be a builtin
+ debugLog("skipping because package is nil: %s", tpe.String())
+ return nil
+ }
+ if pkg.Name == "time" && tio.Name() == "Time" {
+ tgt.Typed("string", "date-time")
+ return nil
+ }
+ if pkg.PkgPath == "encoding/json" && tio.Name() == "RawMessage" {
+ tgt.Typed("object", "")
+ return nil
+ }
+ cmt, hasComments := s.ctx.FindComments(pkg, tio.Name())
+ if !hasComments {
+ cmt = new(ast.CommentGroup)
+ }
+
+ if sfnm, isf := strfmtName(cmt); isf {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+
+ tgt.Typed("string", "")
+ return nil
+}
+
+func (s *schemaBuilder) buildFromType(tpe types.Type, tgt swaggerTypable) error {
+ pkg, err := importer.Default().Import("encoding")
+ if err != nil {
+ return nil
+ }
+ ifc := pkg.Scope().Lookup("TextMarshaler").Type().Underlying().(*types.Interface)
+
+ // check if the type implements encoding.TextMarshaler interface
+ isTextMarshaler := types.Implements(tpe, ifc)
+ if isTextMarshaler {
+ return s.buildFromTextMarshal(tpe, tgt)
+ }
+
+ switch titpe := tpe.(type) {
+ case *types.Basic:
+ return swaggerSchemaForType(titpe.String(), tgt)
+ case *types.Pointer:
+ return s.buildFromType(titpe.Elem(), tgt)
+ case *types.Struct:
+ return s.buildFromStruct(s.decl, titpe, tgt.Schema(), make(map[string]string))
+ case *types.Interface:
+ return s.buildFromInterface(s.decl, titpe, tgt.Schema(), make(map[string]string))
+ case *types.Slice:
+ return s.buildFromType(titpe.Elem(), tgt.Items())
+ case *types.Array:
+ return s.buildFromType(titpe.Elem(), tgt.Items())
+ case *types.Map:
+ // debugLog("map: %v -> [%v]%v", fld.Name(), ftpe.Key().String(), ftpe.Elem().String())
+ // check if key is a string type, if not print a message
+ // and skip the map property. Only maps with string keys can go into additional properties
+ sch := tgt.Schema()
+ if sch == nil {
+ return errors.New("items doesn't support maps")
+ }
+ eleProp := schemaTypable{sch, tgt.Level()}
+ key := titpe.Key()
+ isTextMarshaler := types.Implements(key, ifc)
+ if key.Underlying().String() == "string" || isTextMarshaler {
+ return s.buildFromType(titpe.Elem(), eleProp.AdditionalProperties())
+ }
+ case *types.Named:
+ tio := titpe.Obj()
+ if tio.Pkg() == nil && tio.Name() == "error" {
+ return swaggerSchemaForType(tio.Name(), tgt)
+ }
+ debugLog("named refined type %s.%s", tio.Pkg().Path(), tio.Name())
+ pkg, found := s.ctx.PkgForType(tpe)
+ if !found {
+ // this must be a builtin
+ debugLog("skipping because package is nil: %s", tpe.String())
+ return nil
+ }
+ if pkg.Name == "time" && tio.Name() == "Time" {
+ tgt.Typed("string", "date-time")
+ return nil
+ }
+ if pkg.PkgPath == "encoding/json" && tio.Name() == "RawMessage" {
+ tgt.Typed("object", "")
+ return nil
+ }
+ cmt, hasComments := s.ctx.FindComments(pkg, tio.Name())
+ if !hasComments {
+ cmt = new(ast.CommentGroup)
+ }
+
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, tgt)
+ return nil
+ }
+
+ switch utitpe := tpe.Underlying().(type) {
+ case *types.Struct:
+
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ if decl.Type.Obj().Pkg().Path() == "time" && decl.Type.Obj().Name() == "Time" {
+ tgt.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(cmt); isf {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, tgt)
+ return nil
+ }
+
+ return s.makeRef(decl, tgt)
+ }
+ case *types.Interface:
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ case *types.Basic:
+ if sfnm, isf := strfmtName(cmt); isf {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+
+ if enumName, ok := enumName(cmt); ok {
+ enumValues, enumDesces, _ := s.ctx.FindEnumValues(pkg, enumName)
+ if len(enumValues) > 0 {
+ tgt.WithEnum(enumValues...)
+ enumTypeName := reflect.TypeOf(enumValues[0]).String()
+ _ = swaggerSchemaForType(enumTypeName, tgt)
+ }
+ if len(enumDesces) > 0 {
+ tgt.WithEnumDescription(strings.Join(enumDesces, "\n"))
+ }
+ return nil
+ }
+
+ if defaultName, ok := defaultName(cmt); ok {
+ debugLog(defaultName)
+ return nil
+ }
+
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, tgt)
+ return nil
+
+ }
+
+ if isAliasParam(tgt) || aliasParam(cmt) {
+ err := swaggerSchemaForType(utitpe.Name(), tgt)
+ if err == nil {
+ return nil
+ }
+ }
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return swaggerSchemaForType(utitpe.String(), tgt)
+ case *types.Array:
+ if sfnm, isf := strfmtName(cmt); isf {
+ if sfnm == "byte" {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+ if sfnm == "bsonobjectid" {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+
+ tgt.Items().Typed("string", sfnm)
+ return nil
+ }
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return s.buildFromType(utitpe.Elem(), tgt.Items())
+ case *types.Slice:
+ if sfnm, isf := strfmtName(cmt); isf {
+ if sfnm == "byte" {
+ tgt.Typed("string", sfnm)
+ return nil
+ }
+ tgt.Items().Typed("string", sfnm)
+ return nil
+ }
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return s.buildFromType(utitpe.Elem(), tgt.Items())
+ case *types.Map:
+ if decl, ok := s.ctx.FindModel(tio.Pkg().Path(), tio.Name()); ok {
+ return s.makeRef(decl, tgt)
+ }
+ return nil
+
+ default:
+ log.Printf("WARNING: can't figure out object type for named type (%T): %v [alias: %t]", tpe.Underlying(), tpe.Underlying(), titpe.Obj().IsAlias())
+
+ return nil
+ }
+ default:
+ panic(fmt.Sprintf("WARNING: can't determine refined type %s (%T)", titpe.String(), titpe))
+ }
+
+ return nil
+}
+
+func (s *schemaBuilder) buildFromInterface(decl *entityDecl, it *types.Interface, schema *spec.Schema, seen map[string]string) error {
+ if it.Empty() {
+ return nil
+ }
+
+ var (
+ tgt *spec.Schema
+ hasAllOf bool
+ )
+
+ var flist []*ast.Field
+ if specType, ok := decl.Spec.Type.(*ast.InterfaceType); ok {
+ flist = make([]*ast.Field, it.NumEmbeddeds()+it.NumExplicitMethods())
+ copy(flist, specType.Methods.List)
+ // for i := range specType.Methods.List {
+ // flist[i] = specType.Methods.List[i]
+ // }
+ }
+
+ // First collect the embedded interfaces
+ // create refs when the embedded interface is decorated with an allOf annotation
+ for i := 0; i < it.NumEmbeddeds(); i++ {
+ fld := it.EmbeddedType(i)
+
+ switch ftpe := fld.(type) {
+ case *types.Named:
+ o := ftpe.Obj()
+ var afld *ast.Field
+ for _, an := range flist {
+ if len(an.Names) != 0 {
+ continue
+ }
+
+ tpp := decl.Pkg.TypesInfo.Types[an.Type]
+ if tpp.Type.String() != o.Type().String() {
+ continue
+ }
+
+ // decl.
+ debugLog("maybe interface field %s: %s(%T)", o.Name(), o.Type().String(), o.Type())
+ afld = an
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), it.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ if !allOfMember(afld.Doc) {
+ var newSch spec.Schema
+ if err := s.buildEmbedded(o.Type(), &newSch, seen); err != nil {
+ return err
+ }
+ schema.AllOf = append(schema.AllOf, newSch)
+ hasAllOf = true
+ continue
+ }
+
+ hasAllOf = true
+ if tgt == nil {
+ tgt = &spec.Schema{}
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := s.buildAllOf(o.Type(), &newSch); err != nil {
+ return err
+ }
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ schema.AddExtension("x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ schema.AllOf = append(schema.AllOf, newSch)
+ default:
+ log.Printf("WARNING: can't figure out object type for allOf named type (%T): %v", ftpe, ftpe.Underlying())
+ }
+ debugLog("got embedded interface: %s {%T}", fld.String(), fld)
+ }
+
+ if tgt == nil {
+ tgt = schema
+ }
+ // We can finally build the actual schema for the struct
+ if tgt.Properties == nil {
+ tgt.Properties = make(map[string]spec.Schema)
+ }
+ tgt.Typed("object", "")
+
+ for i := 0; i < it.NumExplicitMethods(); i++ {
+ fld := it.ExplicitMethod(i)
+ if !fld.Exported() {
+ continue
+ }
+ sig, isSignature := fld.Type().(*types.Signature)
+ if !isSignature {
+ continue
+ }
+ if sig.Params().Len() > 0 {
+ continue
+ }
+ if sig.Results() == nil || sig.Results().Len() != 1 {
+ continue
+ }
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ // debugLog("got %d nodes (exact: %t)", len(ans), isExact)
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("maybe interface field %s: %s(%T)", fld.Name(), fld.Type().String(), fld.Type())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), it.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name := fld.Name()
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxName.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ name = matches[ml-1]
+ }
+ }
+ }
+ }
+ ps := tgt.Properties[name]
+ if err := s.buildFromType(sig.Results().At(0).Type(), schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+ if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt {
+ ps.Typed("string", sfName)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+
+ if err := s.createParser(name, tgt, &ps, afld).Parse(afld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && name != fld.Name() {
+ ps.AddExtension("x-go-name", fld.Name())
+ }
+
+ seen[name] = fld.Name()
+ tgt.Properties[name] = ps
+ }
+
+ if tgt == nil {
+ return nil
+ }
+ if hasAllOf && len(tgt.Properties) > 0 {
+ schema.AllOf = append(schema.AllOf, *tgt)
+ }
+ for k := range tgt.Properties {
+ if _, ok := seen[k]; !ok {
+ delete(tgt.Properties, k)
+ }
+ }
+ return nil
+}
+
+func (s *schemaBuilder) buildFromStruct(decl *entityDecl, st *types.Struct, schema *spec.Schema, seen map[string]string) error {
+ s.ctx.FindComments(decl.Pkg, decl.Type.Obj().Name())
+ cmt, hasComments := s.ctx.FindComments(decl.Pkg, decl.Type.Obj().Name())
+ if !hasComments {
+ cmt = new(ast.CommentGroup)
+ }
+ if typeName, ok := typeName(cmt); ok {
+ _ = swaggerSchemaForType(typeName, schemaTypable{schema: schema})
+ return nil
+ }
+ // First check for all of schemas
+ var tgt *spec.Schema
+ hasAllOf := false
+
+ for i := 0; i < st.NumFields(); i++ {
+ fld := st.Field(i)
+ if !fld.Anonymous() {
+ debugLog("skipping field %q for allOf scan because not anonymous", fld.Name())
+ continue
+ }
+ tg := st.Tag(i)
+
+ debugLog("maybe allof field(%t) %s: %s (%T) [%q](anon: %t, embedded: %t)", fld.IsField(), fld.Name(), fld.Type().String(), fld.Type(), tg, fld.Anonymous(), fld.Embedded())
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ // debugLog("got %d nodes (exact: %t)", len(ans), isExact)
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("maybe allof field %s: %s(%T) [%q]", fld.Name(), fld.Type().String(), fld.Type(), tg)
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s for %s", fld.String(), st.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ _, ignore, _, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ if !allOfMember(afld.Doc) {
+ if tgt == nil {
+ tgt = schema
+ }
+ if err := s.buildEmbedded(fld.Type(), tgt, seen); err != nil {
+ return err
+ }
+ continue
+ }
+ // if this created an allOf property then we have to rejig the schema var
+ // because all the fields collected that aren't from embedded structs should go in
+ // their own proper schema
+ // first process embedded structs in order of embedding
+ hasAllOf = true
+ if tgt == nil {
+ tgt = &spec.Schema{}
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := s.buildAllOf(fld.Type(), &newSch); err != nil {
+ return err
+ }
+
+ if afld.Doc != nil {
+ for _, cmt := range afld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ schema.AddExtension("x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ schema.AllOf = append(schema.AllOf, newSch)
+ }
+
+ if tgt == nil {
+ if schema != nil {
+ tgt = schema
+ } else {
+ tgt = &spec.Schema{}
+ }
+ }
+ // We can finally build the actual schema for the struct
+ if tgt.Properties == nil {
+ tgt.Properties = make(map[string]spec.Schema)
+ }
+ tgt.Typed("object", "")
+
+ for i := 0; i < st.NumFields(); i++ {
+ fld := st.Field(i)
+ tg := st.Tag(i)
+
+ if fld.Embedded() {
+ continue
+ }
+
+ if !fld.Exported() {
+ debugLog("skipping field %s because it's not exported", fld.Name())
+ continue
+ }
+
+ var afld *ast.Field
+ ans, _ := astutil.PathEnclosingInterval(decl.File, fld.Pos(), fld.Pos())
+ // debugLog("got %d nodes (exact: %t)", len(ans), isExact)
+ for _, an := range ans {
+ at, valid := an.(*ast.Field)
+ if !valid {
+ continue
+ }
+
+ debugLog("field %s: %s(%T) [%q] ==> %s", fld.Name(), fld.Type().String(), fld.Type(), tg, at.Doc.Text())
+ afld = at
+ break
+ }
+
+ if afld == nil {
+ debugLog("can't find source associated with %s", fld.String())
+ continue
+ }
+
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(afld.Doc) {
+ continue
+ }
+
+ name, ignore, isString, err := parseJSONTag(afld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ for seenTagName, seenFieldName := range seen {
+ if seenFieldName == fld.Name() {
+ delete(tgt.Properties, seenTagName)
+ break
+ }
+ }
+ continue
+ }
+
+ ps := tgt.Properties[name]
+ if err = s.buildFromType(fld.Type(), schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+ if isString {
+ ps.Typed("string", ps.Format)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+ if sfName, isStrfmt := strfmtName(afld.Doc); isStrfmt {
+ ps.Typed("string", sfName)
+ ps.Ref = spec.Ref{}
+ ps.Items = nil
+ }
+
+ if err = s.createParser(name, tgt, &ps, afld).Parse(afld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && name != fld.Name() {
+ addExtension(&ps.VendorExtensible, "x-go-name", fld.Name())
+ }
+
+ // we have 2 cases:
+ // 1. field with different name override tag
+ // 2. field with different name removes tag
+ // so we need to save both tag&name
+ seen[name] = fld.Name()
+ tgt.Properties[name] = ps
+ }
+
+ if tgt == nil {
+ return nil
+ }
+ if hasAllOf && len(tgt.Properties) > 0 {
+ schema.AllOf = append(schema.AllOf, *tgt)
+ }
+ for k := range tgt.Properties {
+ if _, ok := seen[k]; !ok {
+ delete(tgt.Properties, k)
+ }
+ }
+ return nil
+}
+
+func (s *schemaBuilder) buildAllOf(tpe types.Type, schema *spec.Schema) error {
+ debugLog("allOf %s", tpe.Underlying())
+ switch ftpe := tpe.(type) {
+ case *types.Pointer:
+ return s.buildAllOf(ftpe.Elem(), schema)
+ case *types.Named:
+ switch utpe := ftpe.Underlying().(type) {
+ case *types.Struct:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ if ftpe.Obj().Pkg().Path() == "time" && ftpe.Obj().Name() == "Time" {
+ schema.Typed("string", "date-time")
+ return nil
+ }
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ schema.Typed("string", sfnm)
+ return nil
+ }
+ if decl.HasModelAnnotation() {
+ return s.makeRef(decl, schemaTypable{schema, 0})
+ }
+ return s.buildFromStruct(decl, utpe, schema, make(map[string]string))
+ }
+ return errors.Errorf("can't find source file for struct: %s", ftpe.String())
+ case *types.Interface:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ if sfnm, isf := strfmtName(decl.Comments); isf {
+ schema.Typed("string", sfnm)
+ return nil
+ }
+ if decl.HasModelAnnotation() {
+ return s.makeRef(decl, schemaTypable{schema, 0})
+ }
+ return s.buildFromInterface(decl, utpe, schema, make(map[string]string))
+ }
+ return errors.Errorf("can't find source file for interface: %s", ftpe.String())
+ default:
+ log.Printf("WARNING: can't figure out object type for allOf named type (%T): %v", ftpe, ftpe.Underlying())
+ return fmt.Errorf("unable to locate source file for allOf %s", utpe.String())
+ }
+ default:
+ log.Printf("WARNING: Missing allOf parser for a %T, skipping field", ftpe)
+ return fmt.Errorf("unable to resolve allOf member for: %v", ftpe)
+ }
+}
+
+func (s *schemaBuilder) buildEmbedded(tpe types.Type, schema *spec.Schema, seen map[string]string) error {
+ debugLog("embedded %s", tpe.Underlying())
+ switch ftpe := tpe.(type) {
+ case *types.Pointer:
+ return s.buildEmbedded(ftpe.Elem(), schema, seen)
+ case *types.Named:
+ debugLog("embedded named type: %T", ftpe.Underlying())
+ switch utpe := ftpe.Underlying().(type) {
+ case *types.Struct:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ return s.buildFromStruct(decl, utpe, schema, seen)
+ }
+ return errors.Errorf("can't find source file for struct: %s", ftpe.String())
+ case *types.Interface:
+ decl, found := s.ctx.FindModel(ftpe.Obj().Pkg().Path(), ftpe.Obj().Name())
+ if found {
+ return s.buildFromInterface(decl, utpe, schema, seen)
+ }
+ return errors.Errorf("can't find source file for struct: %s", ftpe.String())
+ default:
+ log.Printf("WARNING: can't figure out object type for embedded named type (%T): %v", ftpe, ftpe.Underlying())
+ }
+ default:
+ log.Printf("WARNING: Missing embedded parser for a %T, skipping model\n", ftpe)
+ return nil
+ }
+ return nil
+}
+
+func (s *schemaBuilder) makeRef(decl *entityDecl, prop swaggerTypable) error {
+ nm, _ := decl.Names()
+ ref, err := spec.NewRef("#/definitions/" + nm)
+ if err != nil {
+ return err
+ }
+ prop.SetRef(ref)
+ s.postDecls = append(s.postDecls, decl)
+ return nil
+}
+
+func (s *schemaBuilder) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser {
+ sp := new(sectionedParser)
+
+ schemeType, err := ps.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+
+ if ps.Ref.String() == "" {
+ sp.setDescription = func(lines []string) {
+ ps.Description = joinDropLast(lines)
+ enumDesc := getEnumDesc(ps.VendorExtensible.Extensions)
+ if enumDesc != "" {
+ ps.Description += "\n" + enumDesc
+ }
+ }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{schemaValidations{ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("type", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}),
+ newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, schemaVendorExtensibleSetter(ps)), true),
+ }
+
+ itemsTaggers := func(items *spec.Schema, level int) []tagParser {
+ schemeType, err := items.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{schemaValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) {
+ if items == nil || items.Schema == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items.Schema, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items.Schema, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return sp
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ }
+ }
+ return sp
+}
+
+func schemaVendorExtensibleSetter(meta *spec.Schema) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Extensions = jsonData
+ return nil
+ }
+}
+
+type tagOptions []string
+
+func (t tagOptions) Contain(option string) bool {
+ for i := 1; i < len(t); i++ {
+ if t[i] == option {
+ return true
+ }
+ }
+ return false
+}
+
+func (t tagOptions) Name() string {
+ return t[0]
+}
+
+func parseJSONTag(field *ast.Field) (name string, ignore bool, isString bool, err error) {
+ if len(field.Names) > 0 {
+ name = field.Names[0].Name
+ }
+ if field.Tag == nil || len(strings.TrimSpace(field.Tag.Value)) == 0 {
+ return name, false, false, nil
+ }
+
+ tv, err := strconv.Unquote(field.Tag.Value)
+ if err != nil {
+ return name, false, false, err
+ }
+
+ if strings.TrimSpace(tv) != "" {
+ st := reflect.StructTag(tv)
+ jsonParts := tagOptions(strings.Split(st.Get("json"), ","))
+
+ if jsonParts.Contain("string") {
+ // Need to check if the field type is a scalar. Otherwise, the
+ // ",string" directive doesn't apply.
+ isString = isFieldStringable(field.Type)
+ }
+
+ switch jsonParts.Name() {
+ case "-":
+ return name, true, isString, nil
+ case "":
+ return name, false, isString, nil
+ default:
+ return jsonParts.Name(), false, isString, nil
+ }
+ }
+ return name, false, false, nil
+}
+
+// isFieldStringable check if the field type is a scalar. If the field type is
+// *ast.StarExpr and is pointer type, check if it refers to a scalar.
+// Otherwise, the ",string" directive doesn't apply.
+func isFieldStringable(tpe ast.Expr) bool {
+ if ident, ok := tpe.(*ast.Ident); ok {
+ switch ident.Name {
+ case "int", "int8", "int16", "int32", "int64",
+ "uint", "uint8", "uint16", "uint32", "uint64",
+ "float64", "string", "bool":
+ return true
+ }
+ } else if starExpr, ok := tpe.(*ast.StarExpr); ok {
+ return isFieldStringable(starExpr.X)
+ } else {
+ return false
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/codescan/spec.go b/vendor/github.com/go-swagger/go-swagger/codescan/spec.go
new file mode 100644
index 000000000..726787c11
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/codescan/spec.go
@@ -0,0 +1,258 @@
+package codescan
+
+import (
+ "go/ast"
+
+ "github.com/go-openapi/spec"
+)
+
+func newSpecBuilder(input *spec.Swagger, sc *scanCtx, scanModels bool) *specBuilder {
+ if input == nil {
+ input = new(spec.Swagger)
+ input.Swagger = "2.0"
+ }
+
+ if input.Paths == nil {
+ input.Paths = new(spec.Paths)
+ }
+ if input.Definitions == nil {
+ input.Definitions = make(map[string]spec.Schema)
+ }
+ if input.Responses == nil {
+ input.Responses = make(map[string]spec.Response)
+ }
+ if input.Extensions == nil {
+ input.Extensions = make(spec.Extensions)
+ }
+
+ return &specBuilder{
+ ctx: sc,
+ input: input,
+ scanModels: scanModels,
+ operations: collectOperationsFromInput(input),
+ definitions: input.Definitions,
+ responses: input.Responses,
+ }
+}
+
+type specBuilder struct {
+ scanModels bool
+ input *spec.Swagger
+ ctx *scanCtx
+ discovered []*entityDecl
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+ operations map[string]*spec.Operation
+}
+
+func (s *specBuilder) Build() (*spec.Swagger, error) {
+ if err := s.buildModels(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildParameters(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildRespones(); err != nil {
+ return nil, err
+ }
+
+ // build definitions dictionary
+ if err := s.buildDiscovered(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildRoutes(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildOperations(); err != nil {
+ return nil, err
+ }
+
+ if err := s.buildMeta(); err != nil {
+ return nil, err
+ }
+
+ if s.input.Swagger == "" {
+ s.input.Swagger = "2.0"
+ }
+
+ return s.input, nil
+}
+
+func (s *specBuilder) buildDiscovered() error {
+ // loop over discovered until all the items are in definitions
+ keepGoing := len(s.discovered) > 0
+ for keepGoing {
+ var queue []*entityDecl
+ for _, d := range s.discovered {
+ nm, _ := d.Names()
+ if _, ok := s.definitions[nm]; !ok {
+ queue = append(queue, d)
+ }
+ }
+ s.discovered = nil
+ for _, sd := range queue {
+ if err := s.buildDiscoveredSchema(sd); err != nil {
+ return err
+ }
+ }
+ keepGoing = len(s.discovered) > 0
+ }
+
+ return nil
+}
+
+func (s *specBuilder) buildDiscoveredSchema(decl *entityDecl) error {
+ sb := &schemaBuilder{
+ ctx: s.ctx,
+ decl: decl,
+ discovered: s.discovered,
+ }
+ if err := sb.Build(s.definitions); err != nil {
+ return err
+ }
+ s.discovered = append(s.discovered, sb.postDecls...)
+ return nil
+}
+
+func (s *specBuilder) buildMeta() error {
+ // build swagger object
+ for _, decl := range s.ctx.app.Meta {
+ if err := newMetaParser(s.input).Parse(decl.Comments); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (s *specBuilder) buildOperations() error {
+ for _, pp := range s.ctx.app.Operations {
+ ob := &operationsBuilder{
+ operations: s.operations,
+ ctx: s.ctx,
+ path: pp,
+ }
+ if err := ob.Build(s.input.Paths); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (s *specBuilder) buildRoutes() error {
+ // build paths dictionary
+ for _, pp := range s.ctx.app.Routes {
+ rb := &routesBuilder{
+ ctx: s.ctx,
+ route: pp,
+ responses: s.responses,
+ operations: s.operations,
+ definitions: s.definitions,
+ }
+ if err := rb.Build(s.input.Paths); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (s *specBuilder) buildRespones() error {
+ // build responses dictionary
+ for _, decl := range s.ctx.app.Responses {
+ rb := &responseBuilder{
+ ctx: s.ctx,
+ decl: decl,
+ }
+ if err := rb.Build(s.responses); err != nil {
+ return err
+ }
+ s.discovered = append(s.discovered, rb.postDecls...)
+ }
+ return nil
+}
+
+func (s *specBuilder) buildParameters() error {
+ // build parameters dictionary
+ for _, decl := range s.ctx.app.Parameters {
+ pb := &parameterBuilder{
+ ctx: s.ctx,
+ decl: decl,
+ }
+ if err := pb.Build(s.operations); err != nil {
+ return err
+ }
+ s.discovered = append(s.discovered, pb.postDecls...)
+ }
+ return nil
+}
+
+func (s *specBuilder) buildModels() error {
+ // build models dictionary
+ if !s.scanModels {
+ return nil
+ }
+
+ for _, decl := range s.ctx.app.Models {
+ if err := s.buildDiscoveredSchema(decl); err != nil {
+ return err
+ }
+ }
+
+ return s.joinExtraModels()
+}
+
+func (s *specBuilder) joinExtraModels() error {
+ tmp := make(map[*ast.Ident]*entityDecl, len(s.ctx.app.ExtraModels))
+ for k, v := range s.ctx.app.ExtraModels {
+ tmp[k] = v
+ s.ctx.app.Models[k] = v
+ delete(s.ctx.app.ExtraModels, k)
+ }
+
+ // process extra models and see if there is any reference to a new extra one
+ for _, decl := range tmp {
+ if err := s.buildDiscoveredSchema(decl); err != nil {
+ return err
+ }
+ }
+
+ if len(s.ctx.app.ExtraModels) > 0 {
+ return s.joinExtraModels()
+ }
+
+ return nil
+}
+
+func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
+ operations := make(map[string]*spec.Operation)
+ if input != nil && input.Paths != nil {
+ for _, pth := range input.Paths.Paths {
+ if pth.Get != nil {
+ operations[pth.Get.ID] = pth.Get
+ }
+ if pth.Post != nil {
+ operations[pth.Post.ID] = pth.Post
+ }
+ if pth.Put != nil {
+ operations[pth.Put.ID] = pth.Put
+ }
+ if pth.Patch != nil {
+ operations[pth.Patch.ID] = pth.Patch
+ }
+ if pth.Delete != nil {
+ operations[pth.Delete.ID] = pth.Delete
+ }
+ if pth.Head != nil {
+ operations[pth.Head.ID] = pth.Head
+ }
+ if pth.Options != nil {
+ operations[pth.Options.ID] = pth.Options
+ }
+ }
+ }
+ return operations
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/.gitignore b/vendor/github.com/go-swagger/go-swagger/generator/.gitignore
new file mode 100644
index 000000000..9ab870da8
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/.gitignore
@@ -0,0 +1 @@
+generated/
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/bindata.go b/vendor/github.com/go-swagger/go-swagger/generator/bindata.go
new file mode 100644
index 000000000..379362734
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/bindata.go
@@ -0,0 +1,40 @@
+package generator
+
+import (
+ "embed"
+ "io/fs"
+)
+
+//go:embed templates
+var _bindata embed.FS
+
+// AssetNames returns the names of the assets.
+func AssetNames() []string {
+ names := make([]string, 0)
+ _ = fs.WalkDir(_bindata, "templates", func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ return err
+ }
+ names = append(names, path)
+ return nil
+ })
+ return names
+}
+
+// Asset loads and returns the asset for the given name.
+// It returns an error if the asset could not be found or
+// could not be loaded.
+func Asset(name string) ([]byte, error) {
+ return _bindata.ReadFile(name)
+}
+
+// MustAsset is like Asset but panics when Asset would return an error.
+// It simplifies safe initialization of global variables.
+func MustAsset(name string) []byte {
+ a, err := Asset(name)
+ if err != nil {
+ panic("asset: Asset(" + name + "): " + err.Error())
+ }
+
+ return a
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/client.go b/vendor/github.com/go-swagger/go-swagger/generator/client.go
new file mode 100644
index 000000000..037938e35
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/client.go
@@ -0,0 +1,120 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "errors"
+
+ "github.com/go-openapi/swag"
+)
+
+// GenerateClient generates a client library for a swagger spec document.
+func GenerateClient(name string, modelNames, operationIDs []string, opts *GenOpts) error {
+ if err := opts.CheckOpts(); err != nil {
+ return err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return err
+ }
+
+ specDoc, analyzed, err := opts.analyzeSpec()
+ if err != nil {
+ return err
+ }
+
+ models, err := gatherModels(specDoc, modelNames)
+ if err != nil {
+ return err
+ }
+
+ operations := gatherOperations(analyzed, operationIDs)
+ if len(operations) == 0 {
+ return errors.New("no operations were selected")
+ }
+
+ generator := appGenerator{
+ Name: appNameOrDefault(specDoc, name, defaultClientName),
+ SpecDoc: specDoc,
+ Analyzed: analyzed,
+ Models: models,
+ Operations: operations,
+ Target: opts.Target,
+ DumpData: opts.DumpData,
+ Package: opts.LanguageOpts.ManglePackageName(opts.ClientPackage, defaultClientTarget),
+ APIPackage: opts.LanguageOpts.ManglePackagePath(opts.APIPackage, defaultOperationsTarget),
+ ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, defaultModelsTarget),
+ ServerPackage: opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget),
+ ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ OperationsPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ Principal: opts.PrincipalAlias(),
+ DefaultScheme: opts.DefaultScheme,
+ DefaultProduces: opts.DefaultProduces,
+ DefaultConsumes: opts.DefaultConsumes,
+ GenOpts: opts,
+ }
+ generator.Receiver = "o"
+ return (&clientGenerator{generator}).Generate()
+}
+
+type clientGenerator struct {
+ appGenerator
+}
+
+func (c *clientGenerator) Generate() error {
+ app, err := c.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+
+ if c.DumpData {
+ return dumpData(swag.ToDynamicJSON(app))
+ }
+
+ if c.GenOpts.IncludeModel {
+ for _, m := range app.Models {
+ if m.IsStream {
+ continue
+ }
+ mod := m
+ if err := c.GenOpts.renderDefinition(&mod); err != nil {
+ return err
+ }
+ }
+ }
+
+ if c.GenOpts.IncludeHandler {
+ for _, g := range app.OperationGroups {
+ opg := g
+ for _, o := range opg.Operations {
+ op := o
+ if err := c.GenOpts.renderOperation(&op); err != nil {
+ return err
+ }
+ }
+ if err := c.GenOpts.renderOperationGroup(&opg); err != nil {
+ return err
+ }
+ }
+ }
+
+ if c.GenOpts.IncludeSupport {
+ if err := c.GenOpts.renderApplication(&app); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/config.go b/vendor/github.com/go-swagger/go-swagger/generator/config.go
new file mode 100644
index 000000000..2d9413218
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/config.go
@@ -0,0 +1,61 @@
+package generator
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/spf13/viper"
+)
+
+// LanguageDefinition in the configuration file.
+type LanguageDefinition struct {
+ Layout SectionOpts `mapstructure:"layout"`
+}
+
+// ConfigureOpts for generation
+func (d *LanguageDefinition) ConfigureOpts(opts *GenOpts) error {
+ opts.Sections = d.Layout
+ if opts.LanguageOpts == nil {
+ opts.LanguageOpts = GoLangOpts()
+ }
+ return nil
+}
+
+// LanguageConfig structure that is obtained from parsing a config file
+type LanguageConfig map[string]LanguageDefinition
+
+// ReadConfig at the specified path, when no path is specified it will look into
+// the current directory and load a .swagger.{yml,json,hcl,toml,properties} file
+// Returns a viper config or an error
+func ReadConfig(fpath string) (*viper.Viper, error) {
+ v := viper.New()
+ if fpath != "" {
+ if !fileExists(fpath, "") {
+ return nil, fmt.Errorf("can't find file for %q", fpath)
+ }
+ file, err := os.Open(fpath)
+ if err != nil {
+ return nil, err
+ }
+ defer func() { _ = file.Close() }()
+ ext := filepath.Ext(fpath)
+ if len(ext) > 0 {
+ ext = ext[1:]
+ }
+ v.SetConfigType(ext)
+ if err := v.ReadConfig(file); err != nil {
+ return nil, err
+ }
+ return v, nil
+ }
+
+ v.SetConfigName(".swagger")
+ v.AddConfigPath(".")
+ if err := v.ReadInConfig(); err != nil {
+ if _, ok := err.(viper.UnsupportedConfigError); !ok && v.ConfigFileUsed() != "" {
+ return nil, err
+ }
+ }
+ return v, nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/debug.go b/vendor/github.com/go-swagger/go-swagger/generator/debug.go
new file mode 100644
index 000000000..61b4b8d48
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/debug.go
@@ -0,0 +1,64 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "encoding/json"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+var (
+ // Debug when the env var DEBUG or SWAGGER_DEBUG is not empty
+ // the generators will be very noisy about what they are doing
+ Debug = os.Getenv("DEBUG") != "" || os.Getenv("SWAGGER_DEBUG") != ""
+ // generatorLogger is a debug logger for this package
+ generatorLogger *log.Logger
+)
+
+func debugOptions() {
+ generatorLogger = log.New(os.Stdout, "generator:", log.LstdFlags)
+}
+
+// debugLog wraps log.Printf with a debug-specific logger
+func debugLog(frmt string, args ...interface{}) {
+ if Debug {
+ _, file, pos, _ := runtime.Caller(1)
+ generatorLogger.Printf("%s:%d: %s", filepath.Base(file), pos,
+ fmt.Sprintf(frmt, args...))
+ }
+}
+
+// debugLogAsJSON unmarshals its last arg as pretty JSON
+func debugLogAsJSON(frmt string, args ...interface{}) {
+ if Debug {
+ var dfrmt string
+ _, file, pos, _ := runtime.Caller(1)
+ dargs := make([]interface{}, 0, len(args)+2)
+ dargs = append(dargs, filepath.Base(file), pos)
+ if len(args) > 0 {
+ dfrmt = "%s:%d: " + frmt + "\n%s"
+ bbb, _ := json.MarshalIndent(args[len(args)-1], "", " ")
+ dargs = append(dargs, args[0:len(args)-1]...)
+ dargs = append(dargs, string(bbb))
+ } else {
+ dfrmt = "%s:%d: " + frmt
+ }
+ generatorLogger.Printf(dfrmt, dargs...)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/discriminators.go b/vendor/github.com/go-swagger/go-swagger/generator/discriminators.go
new file mode 100644
index 000000000..244a2ba56
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/discriminators.go
@@ -0,0 +1,75 @@
+package generator
+
+import (
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+type discInfo struct {
+ Discriminators map[string]discor
+ Discriminated map[string]discee
+}
+
+type discor struct {
+ FieldName string `json:"fieldName"`
+ GoType string `json:"goType"`
+ JSONName string `json:"jsonName"`
+ Children []discee `json:"children"`
+}
+
+type discee struct {
+ FieldName string `json:"fieldName"`
+ FieldValue string `json:"fieldValue"`
+ GoType string `json:"goType"`
+ JSONName string `json:"jsonName"`
+ Ref spec.Ref `json:"ref"`
+ ParentRef spec.Ref `json:"parentRef"`
+}
+
+func discriminatorInfo(doc *analysis.Spec) *discInfo {
+ baseTypes := make(map[string]discor)
+ for _, sch := range doc.AllDefinitions() {
+ if sch.Schema.Discriminator != "" {
+ tpe, _ := sch.Schema.Extensions.GetString(xGoName)
+ if tpe == "" {
+ tpe = swag.ToGoName(sch.Name)
+ }
+ baseTypes[sch.Ref.String()] = discor{
+ FieldName: sch.Schema.Discriminator,
+ GoType: tpe,
+ JSONName: sch.Name,
+ }
+ }
+ }
+
+ subTypes := make(map[string]discee)
+ for _, sch := range doc.SchemasWithAllOf() {
+ for _, ao := range sch.Schema.AllOf {
+ if ao.Ref.String() != "" {
+ if bt, ok := baseTypes[ao.Ref.String()]; ok {
+ name, _ := sch.Schema.Extensions.GetString(xClass)
+ if name == "" {
+ name = sch.Name
+ }
+ tpe, _ := sch.Schema.Extensions.GetString(xGoName)
+ if tpe == "" {
+ tpe = swag.ToGoName(sch.Name)
+ }
+ dce := discee{
+ FieldName: bt.FieldName,
+ FieldValue: name,
+ Ref: sch.Ref,
+ ParentRef: ao.Ref,
+ JSONName: sch.Name,
+ GoType: tpe,
+ }
+ subTypes[sch.Ref.String()] = dce
+ bt.Children = append(bt.Children, dce)
+ baseTypes[ao.Ref.String()] = bt
+ }
+ }
+ }
+ }
+ return &discInfo{Discriminators: baseTypes, Discriminated: subTypes}
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/doc.go b/vendor/github.com/go-swagger/go-swagger/generator/doc.go
new file mode 100644
index 000000000..49cbf3950
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/doc.go
@@ -0,0 +1,78 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package generator provides the code generation library for go-swagger.
+
+# Generating data types
+
+The general idea is that you should rarely see interface{} in the generated code.
+You get a complete representation of a swagger document in somewhat idiomatic go.
+
+To do so, there is a set of mapping patterns that are applied,
+to map a Swagger specification to go types:
+
+ definition of primitive => type alias/name
+ definition of array => type alias/name
+ definition of map => type alias/name
+
+ definition of object
+ with properties => struct
+ definition of $ref => type alias/name
+
+ object with only
+ additional properties => map[string]T
+
+ object with additional
+ properties and properties => custom serializer
+
+ schema with schema array
+ in items => tuple (struct with properties, custom serializer)
+
+ schema with all of => struct
+
+ * allOf schema with $ref => embedded value
+ * allOf schema with properties => properties are included in struct
+ * adding an allOf schema with just "x-isnullable": true or
+ "x-nullable": true turns the schema into a pointer when
+ there are only other extension properties provided
+
+NOTE: anyOf and oneOf JSON-schema constructs are not supported by Swagger 2.0
+
+A property on a definition is a pointer when any one of the following conditions is met:
+
+ it is an object schema (struct)
+ it has x-nullable or x-isnullable as vendor extension
+ it is a primitive where the zero value is valid but would fail validation
+ otherwise strings minLength > 0 or required results in non-pointer
+ numbers min > 0, max < 0 and min < max
+
+JSONSchema and by extension Swagger allow for items that have a fixed size array,
+with the schema describing the items at each index. This can be combined with additional items
+to form some kind of tuple with varargs.
+
+To map this to go it creates a struct that has fixed names and a custom json serializer.
+
+NOTE: the additionalItems keyword is not supported by Swagger 2.0. However, the generator and validator parts
+in go-swagger do.
+
+# Documenting the generated code
+
+The code that is generated also gets the doc comments that are used by the scanner
+to generate a spec from go code. So that after generation you should be able to reverse
+generate a spec from the code that was generated by your spec.
+
+It should be equivalent to the original spec but might miss some default values and examples.
+*/
+package generator
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/formats.go b/vendor/github.com/go-swagger/go-swagger/generator/formats.go
new file mode 100644
index 000000000..3d127333f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/formats.go
@@ -0,0 +1,226 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+// TODO: we may probably find a way to register most of this dynamically from strfmt
+
+// map of function calls to be generated to get the zero value of a given type
+var zeroes = map[string]string{
+ "bool": "false",
+ "float32": "0",
+ "float64": "0",
+ "int": "0",
+ "int8": "0",
+ "int16": "0",
+ "int32": "0",
+ "int64": "0",
+ "string": "\"\"",
+ "uint": "0",
+ "uint8": "0",
+ "uint16": "0",
+ "uint32": "0",
+ "uint64": "0",
+ // Extended formats (23 formats corresponding to the Default registry
+ // provided by go-openapi/strfmt)
+ "strfmt.Base64": "strfmt.Base64([]byte(nil))",
+ "strfmt.CreditCard": "strfmt.CreditCard(\"\")",
+ "strfmt.Date": "strfmt.Date{}",
+ "strfmt.DateTime": "strfmt.DateTime{}",
+ "strfmt.Duration": "strfmt.Duration(0)",
+ "strfmt.Email": "strfmt.Email(\"\")",
+ "strfmt.HexColor": "strfmt.HexColor(\"#000000\")",
+ "strfmt.Hostname": "strfmt.Hostname(\"\")",
+ "strfmt.IPv4": "strfmt.IPv4(\"\")",
+ "strfmt.IPv6": "strfmt.IPv6(\"\")",
+ "strfmt.ISBN": "strfmt.ISBN(\"\")",
+ "strfmt.ISBN10": "strfmt.ISBN10(\"\")",
+ "strfmt.ISBN13": "strfmt.ISBN13(\"\")",
+ "strfmt.MAC": "strfmt.MAC(\"\")",
+ "strfmt.ObjectId": "strfmt.ObjectId{}",
+ "strfmt.Password": "strfmt.Password(\"\")",
+ "strfmt.RGBColor": "strfmt.RGBColor(\"rgb(0,0,0)\")",
+ "strfmt.SSN": "strfmt.SSN(\"\")",
+ "strfmt.URI": "strfmt.URI(\"\")",
+ "strfmt.UUID": "strfmt.UUID(\"\")",
+ "strfmt.UUID3": "strfmt.UUID3(\"\")",
+ "strfmt.UUID4": "strfmt.UUID4(\"\")",
+ "strfmt.UUID5": "strfmt.UUID5(\"\")",
+ // "file": "runtime.File",
+}
+
+// conversion functions from string representation to a numerical or boolean
+// primitive type
+var stringConverters = map[string]string{
+ "bool": "swag.ConvertBool",
+ "float32": "swag.ConvertFloat32",
+ "float64": "swag.ConvertFloat64",
+ "int8": "swag.ConvertInt8",
+ "int16": "swag.ConvertInt16",
+ "int32": "swag.ConvertInt32",
+ "int64": "swag.ConvertInt64",
+ "uint8": "swag.ConvertUint8",
+ "uint16": "swag.ConvertUint16",
+ "uint32": "swag.ConvertUint32",
+ "uint64": "swag.ConvertUint64",
+}
+
+// formatting (string representation) functions from a native representation
+// of a numerical or boolean primitive type
+var stringFormatters = map[string]string{
+ "bool": "swag.FormatBool",
+ "float32": "swag.FormatFloat32",
+ "float64": "swag.FormatFloat64",
+ "int8": "swag.FormatInt8",
+ "int16": "swag.FormatInt16",
+ "int32": "swag.FormatInt32",
+ "int64": "swag.FormatInt64",
+ "uint8": "swag.FormatUint8",
+ "uint16": "swag.FormatUint16",
+ "uint32": "swag.FormatUint32",
+ "uint64": "swag.FormatUint64",
+}
+
+// typeMapping contains a mapping of type name to go type
+var typeMapping = map[string]string{
+ // Standard formats with native, straightforward, mapping
+ "string": "string",
+ "boolean": "bool",
+ "integer": "int64",
+ "number": "float64",
+ // For file producers
+ "file": "runtime.File",
+}
+
+// formatMapping contains a type-specific version of mapping of format to go type
+var formatMapping = map[string]map[string]string{
+ "number": {
+ "double": "float64",
+ "float": "float32",
+ "int": "int64",
+ "int8": "int8",
+ "int16": "int16",
+ "int32": "int32",
+ "int64": "int64",
+ "uint": "uint64",
+ "uint8": "uint8",
+ "uint16": "uint16",
+ "uint32": "uint32",
+ "uint64": "uint64",
+ },
+ "integer": {
+ "int": "int64",
+ "int8": "int8",
+ "int16": "int16",
+ "int32": "int32",
+ "int64": "int64",
+ "uint": "uint64",
+ "uint8": "uint8",
+ "uint16": "uint16",
+ "uint32": "uint32",
+ "uint64": "uint64",
+ },
+ "string": {
+ "char": "rune",
+ // Extended format registry from go-openapi/strfmt.
+ // Currently, 23 such formats are supported (default strftm registry),
+ // plus the following aliases:
+ // - "datetime" alias for the more official "date-time"
+ // - "objectid" and "ObjectId" aliases for "bsonobjectid"
+ "binary": "io.ReadCloser",
+ "byte": "strfmt.Base64",
+ "creditcard": "strfmt.CreditCard",
+ "date": "strfmt.Date",
+ "date-time": "strfmt.DateTime",
+ "datetime": "strfmt.DateTime",
+ "duration": "strfmt.Duration",
+ "email": "strfmt.Email",
+ "hexcolor": "strfmt.HexColor",
+ "hostname": "strfmt.Hostname",
+ "ipv4": "strfmt.IPv4",
+ "ipv6": "strfmt.IPv6",
+ "isbn": "strfmt.ISBN",
+ "isbn10": "strfmt.ISBN10",
+ "isbn13": "strfmt.ISBN13",
+ "mac": "strfmt.MAC",
+ "bsonobjectid": "strfmt.ObjectId",
+ "objectid": "strfmt.ObjectId",
+ "ObjectId": "strfmt.ObjectId", // NOTE: does it work with uppercase?
+ "password": "strfmt.Password",
+ "rgbcolor": "strfmt.RGBColor",
+ "ssn": "strfmt.SSN",
+ "uri": "strfmt.URI",
+ "uuid": "strfmt.UUID",
+ "uuid3": "strfmt.UUID3",
+ "uuid4": "strfmt.UUID4",
+ "uuid5": "strfmt.UUID5",
+ // For file producers
+ "file": "runtime.File",
+ },
+}
+
+// go primitive types
+var primitives = map[string]struct{}{
+ "bool": {},
+ "byte": {},
+ "[]byte": {},
+ "complex64": {},
+ "complex128": {},
+ "float32": {},
+ "float64": {},
+ "int": {},
+ "int8": {},
+ "int16": {},
+ "int32": {},
+ "int64": {},
+ "rune": {},
+ "string": {},
+ "uint": {},
+ "uint8": {},
+ "uint16": {},
+ "uint32": {},
+ "uint64": {},
+}
+
+// Formats with a custom formatter.
+// Currently, 23 such formats are supported
+var customFormatters = map[string]struct{}{
+ "strfmt.Base64": {},
+ "strfmt.CreditCard": {},
+ "strfmt.Date": {},
+ "strfmt.DateTime": {},
+ "strfmt.Duration": {},
+ "strfmt.Email": {},
+ "strfmt.HexColor": {},
+ "strfmt.Hostname": {},
+ "strfmt.IPv4": {},
+ "strfmt.IPv6": {},
+ "strfmt.ISBN": {},
+ "strfmt.ISBN10": {},
+ "strfmt.ISBN13": {},
+ "strfmt.MAC": {},
+ "strfmt.ObjectId": {},
+ "strfmt.Password": {},
+ "strfmt.RGBColor": {},
+ "strfmt.SSN": {},
+ "strfmt.URI": {},
+ "strfmt.UUID": {},
+ "strfmt.UUID3": {},
+ "strfmt.UUID4": {},
+ "strfmt.UUID5": {},
+ // the following interfaces do not generate validations
+ "io.ReadCloser": {}, // for "format": "binary" (server side)
+ "io.Writer": {}, // for "format": "binary" (client side)
+ // NOTE: runtime.File is not a customFormatter
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go b/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go
new file mode 100644
index 000000000..7e2a4f1c0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/genopts_nonwin.go
@@ -0,0 +1,50 @@
+//go:build !windows
+// +build !windows
+
+package generator
+
+import (
+ "log"
+ "plugin"
+ "text/template"
+)
+
+type GenOpts struct {
+ GenOptsCommon
+ TemplatePlugin string
+}
+
+func (g *GenOpts) setTemplates() error {
+ if g.TemplatePlugin != "" {
+ if err := g.templates.LoadPlugin(g.TemplatePlugin); err != nil {
+ return err
+ }
+ }
+
+ return g.GenOptsCommon.setTemplates()
+}
+
+// LoadPlugin will load the named plugin and inject its functions into the funcMap
+//
+// The plugin must implement a function matching the signature:
+// `func AddFuncs(f template.FuncMap)`
+// which can add any number of functions to the template repository funcMap.
+// Any existing sprig or go-swagger templates with the same name will be overridden.
+func (t *Repository) LoadPlugin(pluginPath string) error {
+ log.Printf("Attempting to load template plugin: %s", pluginPath)
+
+ p, err := plugin.Open(pluginPath)
+
+ if err != nil {
+ return err
+ }
+
+ f, err := p.Lookup("AddFuncs")
+
+ if err != nil {
+ return err
+ }
+
+ f.(func(template.FuncMap))(t.funcs)
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go b/vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go
new file mode 100644
index 000000000..6dfbc1b27
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/genopts_win.go
@@ -0,0 +1,12 @@
+//go:build windows
+// +build windows
+
+package generator
+
+type GenOpts struct {
+ GenOptsCommon
+}
+
+func (g *GenOpts) setTemplates() error {
+ return g.GenOptsCommon.setTemplates()
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/language.go b/vendor/github.com/go-swagger/go-swagger/generator/language.go
new file mode 100644
index 000000000..01c7a318e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/language.go
@@ -0,0 +1,440 @@
+package generator
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ goruntime "runtime"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/swag"
+ "golang.org/x/tools/imports"
+)
+
+var (
+ // DefaultLanguageFunc defines the default generation language
+ DefaultLanguageFunc func() *LanguageOpts
+
+ moduleRe *regexp.Regexp
+)
+
+func initLanguage() {
+ DefaultLanguageFunc = GoLangOpts
+
+ moduleRe = regexp.MustCompile(`module[ \t]+([^\s]+)`)
+}
+
+// LanguageOpts to describe a language to the code generator
+type LanguageOpts struct {
+ ReservedWords []string
+ BaseImportFunc func(string) string `json:"-"`
+ ImportsFunc func(map[string]string) string `json:"-"`
+ ArrayInitializerFunc func(interface{}) (string, error) `json:"-"`
+ reservedWordsSet map[string]struct{}
+ initialized bool
+ formatFunc func(string, []byte) ([]byte, error)
+ fileNameFunc func(string) string // language specific source file naming rules
+ dirNameFunc func(string) string // language specific directory naming rules
+}
+
+// Init the language option
+func (l *LanguageOpts) Init() {
+ if l.initialized {
+ return
+ }
+ l.initialized = true
+ l.reservedWordsSet = make(map[string]struct{})
+ for _, rw := range l.ReservedWords {
+ l.reservedWordsSet[rw] = struct{}{}
+ }
+}
+
+// MangleName makes sure a reserved word gets a safe name
+func (l *LanguageOpts) MangleName(name, suffix string) string {
+ if _, ok := l.reservedWordsSet[swag.ToFileName(name)]; !ok {
+ return name
+ }
+ return strings.Join([]string{name, suffix}, "_")
+}
+
+// MangleVarName makes sure a reserved word gets a safe name
+func (l *LanguageOpts) MangleVarName(name string) string {
+ nm := swag.ToVarName(name)
+ if _, ok := l.reservedWordsSet[nm]; !ok {
+ return nm
+ }
+ return nm + "Var"
+}
+
+// MangleFileName makes sure a file name gets a safe name
+func (l *LanguageOpts) MangleFileName(name string) string {
+ if l.fileNameFunc != nil {
+ return l.fileNameFunc(name)
+ }
+ return swag.ToFileName(name)
+}
+
+// ManglePackageName makes sure a package gets a safe name.
+// In case of a file system path (e.g. name contains "/" or "\" on Windows), this return only the last element.
+func (l *LanguageOpts) ManglePackageName(name, suffix string) string {
+ if name == "" {
+ return suffix
+ }
+ if l.dirNameFunc != nil {
+ name = l.dirNameFunc(name)
+ }
+ pth := filepath.ToSlash(filepath.Clean(name)) // preserve path
+ pkg := importAlias(pth) // drop path
+ return l.MangleName(swag.ToFileName(prefixForName(pkg)+pkg), suffix)
+}
+
+// ManglePackagePath makes sure a full package path gets a safe name.
+// Only the last part of the path is altered.
+func (l *LanguageOpts) ManglePackagePath(name string, suffix string) string {
+ if name == "" {
+ return suffix
+ }
+ target := filepath.ToSlash(filepath.Clean(name)) // preserve path
+ parts := strings.Split(target, "/")
+ parts[len(parts)-1] = l.ManglePackageName(parts[len(parts)-1], suffix)
+ return strings.Join(parts, "/")
+}
+
+// FormatContent formats a file with a language specific formatter
+func (l *LanguageOpts) FormatContent(name string, content []byte) ([]byte, error) {
+ if l.formatFunc != nil {
+ return l.formatFunc(name, content)
+ }
+ return content, nil
+}
+
+// imports generate the code to import some external packages, possibly aliased
+func (l *LanguageOpts) imports(imports map[string]string) string {
+ if l.ImportsFunc != nil {
+ return l.ImportsFunc(imports)
+ }
+ return ""
+}
+
+// arrayInitializer builds a litteral array
+func (l *LanguageOpts) arrayInitializer(data interface{}) (string, error) {
+ if l.ArrayInitializerFunc != nil {
+ return l.ArrayInitializerFunc(data)
+ }
+ return "", nil
+}
+
+// baseImport figures out the base path to generate import statements
+func (l *LanguageOpts) baseImport(tgt string) string {
+ if l.BaseImportFunc != nil {
+ return l.BaseImportFunc(tgt)
+ }
+ debugLog("base import func is nil")
+ return ""
+}
+
+// GoLangOpts for rendering items as golang code
+func GoLangOpts() *LanguageOpts {
+ var goOtherReservedSuffixes = map[string]bool{
+ // see:
+ // https://golang.org/src/go/build/syslist.go
+ // https://golang.org/doc/install/source#environment
+
+ // goos
+ "aix": true,
+ "android": true,
+ "darwin": true,
+ "dragonfly": true,
+ "freebsd": true,
+ "hurd": true,
+ "illumos": true,
+ "js": true,
+ "linux": true,
+ "nacl": true,
+ "netbsd": true,
+ "openbsd": true,
+ "plan9": true,
+ "solaris": true,
+ "windows": true,
+ "zos": true,
+
+ // arch
+ "386": true,
+ "amd64": true,
+ "amd64p32": true,
+ "arm": true,
+ "armbe": true,
+ "arm64": true,
+ "arm64be": true,
+ "mips": true,
+ "mipsle": true,
+ "mips64": true,
+ "mips64le": true,
+ "mips64p32": true,
+ "mips64p32le": true,
+ "ppc": true,
+ "ppc64": true,
+ "ppc64le": true,
+ "riscv": true,
+ "riscv64": true,
+ "s390": true,
+ "s390x": true,
+ "sparc": true,
+ "sparc64": true,
+ "wasm": true,
+
+ // other reserved suffixes
+ "test": true,
+ }
+
+ opts := new(LanguageOpts)
+ opts.ReservedWords = []string{
+ "break", "default", "func", "interface", "select",
+ "case", "defer", "go", "map", "struct",
+ "chan", "else", "goto", "package", "switch",
+ "const", "fallthrough", "if", "range", "type",
+ "continue", "for", "import", "return", "var",
+ }
+
+ opts.formatFunc = func(ffn string, content []byte) ([]byte, error) {
+ opts := new(imports.Options)
+ opts.TabIndent = true
+ opts.TabWidth = 2
+ opts.Fragment = true
+ opts.Comments = true
+ return imports.Process(ffn, content, opts)
+ }
+
+ opts.fileNameFunc = func(name string) string {
+ // whenever a generated file name ends with a suffix
+ // that is meaningful to go build, adds a "swagger"
+ // suffix
+ parts := strings.Split(swag.ToFileName(name), "_")
+ if goOtherReservedSuffixes[parts[len(parts)-1]] {
+ // file name ending with a reserved arch or os name
+ // are appended an innocuous suffix "swagger"
+ parts = append(parts, "swagger")
+ }
+ return strings.Join(parts, "_")
+ }
+
+ opts.dirNameFunc = func(name string) string {
+ // whenever a generated directory name is a special
+ // golang directory, append an innocuous suffix
+ switch name {
+ case "vendor", "internal":
+ return strings.Join([]string{name, "swagger"}, "_")
+ }
+ return name
+ }
+
+ opts.ImportsFunc = func(imports map[string]string) string {
+ if len(imports) == 0 {
+ return ""
+ }
+ result := make([]string, 0, len(imports))
+ for k, v := range imports {
+ _, name := path.Split(v)
+ if name != k {
+ result = append(result, fmt.Sprintf("\t%s %q", k, v))
+ } else {
+ result = append(result, fmt.Sprintf("\t%q", v))
+ }
+ }
+ sort.Strings(result)
+ return strings.Join(result, "\n")
+ }
+
+ opts.ArrayInitializerFunc = func(data interface{}) (string, error) {
+ // ArrayInitializer constructs a Go literal initializer from interface{} literals.
+ // e.g. []interface{}{"a", "b"} is transformed in {"a","b",}
+ // e.g. map[string]interface{}{ "a": "x", "b": "y"} is transformed in {"a":"x","b":"y",}.
+ //
+ // NOTE: this is currently used to construct simple slice intializers for default values.
+ // This allows for nicer slice initializers for slices of primitive types and avoid systematic use for json.Unmarshal().
+ b, err := json.Marshal(data)
+ if err != nil {
+ return "", err
+ }
+ return strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(strings.ReplaceAll(string(b), "}", ",}"), "[", "{"), "]", ",}"), "{,}", "{}"), nil
+ }
+
+ opts.BaseImportFunc = func(tgt string) string {
+ tgt = filepath.Clean(tgt)
+ // On Windows, filepath.Abs("") behaves differently than on Unix.
+ // Windows: yields an error, since Abs() does not know the volume.
+ // UNIX: returns current working directory
+ if tgt == "" {
+ tgt = "."
+ }
+ tgtAbsPath, err := filepath.Abs(tgt)
+ if err != nil {
+ log.Fatalf("could not evaluate base import path with target \"%s\": %v", tgt, err)
+ }
+
+ var tgtAbsPathExtended string
+ tgtAbsPathExtended, err = filepath.EvalSymlinks(tgtAbsPath)
+ if err != nil {
+ log.Fatalf("could not evaluate base import path with target \"%s\" (with symlink resolution): %v", tgtAbsPath, err)
+ }
+
+ gopath := os.Getenv("GOPATH")
+ if gopath == "" {
+ homeDir, herr := os.UserHomeDir()
+ if herr != nil {
+ log.Fatalln(herr)
+ }
+ gopath = filepath.Join(homeDir, "go")
+ }
+
+ var pth string
+ for _, gp := range filepath.SplitList(gopath) {
+ if _, derr := os.Stat(filepath.Join(gp, "src")); os.IsNotExist(derr) {
+ continue
+ }
+ // EvalSymLinks also calls the Clean
+ gopathExtended, er := filepath.EvalSymlinks(gp)
+ if er != nil {
+ panic(er)
+ }
+ gopathExtended = filepath.Join(gopathExtended, "src")
+ gp = filepath.Join(gp, "src")
+
+ // At this stage we have expanded and unexpanded target path. GOPATH is fully expanded.
+ // Expanded means symlink free.
+ // We compare both types of targetpath<s> with gopath.
+ // If any one of them coincides with gopath , it is imperative that
+ // target path lies inside gopath. How?
+ // - Case 1: Irrespective of symlinks paths coincide. Both non-expanded paths.
+ // - Case 2: Symlink in target path points to location inside GOPATH. (Expanded Target Path)
+ // - Case 3: Symlink in target path points to directory outside GOPATH (Unexpanded target path)
+
+ // Case 1: - Do nothing case. If non-expanded paths match just generate base import path as if
+ // there are no symlinks.
+
+ // Case 2: - Symlink in target path points to location inside GOPATH. (Expanded Target Path)
+ // First if will fail. Second if will succeed.
+
+ // Case 3: - Symlink in target path points to directory outside GOPATH (Unexpanded target path)
+ // First if will succeed and break.
+
+ // compares non expanded path for both
+ if ok, relativepath := checkPrefixAndFetchRelativePath(tgtAbsPath, gp); ok {
+ pth = relativepath
+ break
+ }
+
+ // Compares non-expanded target path
+ if ok, relativepath := checkPrefixAndFetchRelativePath(tgtAbsPath, gopathExtended); ok {
+ pth = relativepath
+ break
+ }
+
+ // Compares expanded target path.
+ if ok, relativepath := checkPrefixAndFetchRelativePath(tgtAbsPathExtended, gopathExtended); ok {
+ pth = relativepath
+ break
+ }
+
+ }
+
+ mod, goModuleAbsPath, err := tryResolveModule(tgtAbsPath)
+ switch {
+ case err != nil:
+ log.Fatalf("Failed to resolve module using go.mod file: %s", err)
+ case mod != "":
+ relTgt := relPathToRelGoPath(goModuleAbsPath, tgtAbsPath)
+ if !strings.HasSuffix(mod, relTgt) {
+ return filepath.ToSlash(mod + relTgt)
+ }
+ return filepath.ToSlash(mod)
+ }
+
+ if pth == "" {
+ log.Fatalln("target must reside inside a location in the $GOPATH/src or be a module")
+ }
+ return filepath.ToSlash(pth)
+ }
+ opts.Init()
+ return opts
+}
+
+// resolveGoModFile walks up the directory tree starting from 'dir' until it
+// finds a go.mod file. If go.mod is found it will return the related file
+// object. If no go.mod file is found it will return an error.
+func resolveGoModFile(dir string) (*os.File, string, error) {
+ goModPath := filepath.Join(dir, "go.mod")
+ f, err := os.Open(goModPath)
+ if err != nil {
+ if os.IsNotExist(err) && dir != filepath.Dir(dir) {
+ return resolveGoModFile(filepath.Dir(dir))
+ }
+ return nil, "", err
+ }
+ return f, dir, nil
+}
+
+// relPathToRelGoPath takes a relative os path and returns the relative go
+// package path. For unix nothing will change but for windows \ will be
+// converted to /.
+func relPathToRelGoPath(modAbsPath, absPath string) string {
+ if absPath == "." {
+ return ""
+ }
+
+ path := strings.TrimPrefix(absPath, modAbsPath)
+ pathItems := strings.Split(path, string(filepath.Separator))
+ return strings.Join(pathItems, "/")
+}
+
+func tryResolveModule(baseTargetPath string) (string, string, error) {
+ f, goModAbsPath, err := resolveGoModFile(baseTargetPath)
+ switch {
+ case os.IsNotExist(err):
+ return "", "", nil
+ case err != nil:
+ return "", "", err
+ }
+
+ src, err := io.ReadAll(f)
+ if err != nil {
+ return "", "", err
+ }
+
+ match := moduleRe.FindSubmatch(src)
+ if len(match) != 2 {
+ return "", "", nil
+ }
+
+ return string(match[1]), goModAbsPath, nil
+}
+
+// 1. Checks if the child path and parent path coincide.
+// 2. If they do return child path relative to parent path.
+// 3. Everything else return false
+func checkPrefixAndFetchRelativePath(childpath string, parentpath string) (bool, string) {
+ // Windows (local) file systems - NTFS, as well as FAT and variants
+ // are case insensitive.
+ cp, pp := childpath, parentpath
+ if goruntime.GOOS == "windows" {
+ cp = strings.ToLower(cp)
+ pp = strings.ToLower(pp)
+ }
+
+ if strings.HasPrefix(cp, pp) {
+ pth, err := filepath.Rel(parentpath, childpath)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ return true, pth
+ }
+
+ return false, ""
+
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/media.go b/vendor/github.com/go-swagger/go-swagger/generator/media.go
new file mode 100644
index 000000000..f9dad9fa4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/media.go
@@ -0,0 +1,191 @@
+package generator
+
+import (
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+)
+
+const jsonSerializer = "json"
+
+var mediaTypeNames = map[*regexp.Regexp]string{
+ regexp.MustCompile("application/.*json"): jsonSerializer,
+ regexp.MustCompile("application/.*yaml"): "yaml",
+ regexp.MustCompile("application/.*protobuf"): "protobuf",
+ regexp.MustCompile("application/.*capnproto"): "capnproto",
+ regexp.MustCompile("application/.*thrift"): "thrift",
+ regexp.MustCompile("(?:application|text)/.*xml"): "xml",
+ regexp.MustCompile("text/.*markdown"): "markdown",
+ regexp.MustCompile("text/.*html"): "html",
+ regexp.MustCompile("text/.*csv"): "csv",
+ regexp.MustCompile("text/.*tsv"): "tsv",
+ regexp.MustCompile("text/.*javascript"): "js",
+ regexp.MustCompile("text/.*css"): "css",
+ regexp.MustCompile("text/.*plain"): "txt",
+ regexp.MustCompile("application/.*octet-stream"): "bin",
+ regexp.MustCompile("application/.*tar"): "tar",
+ regexp.MustCompile("application/.*gzip"): "gzip",
+ regexp.MustCompile("application/.*gz"): "gzip",
+ regexp.MustCompile("application/.*raw-stream"): "bin",
+ regexp.MustCompile("application/x-www-form-urlencoded"): "urlform",
+ regexp.MustCompile("application/javascript"): "txt",
+ regexp.MustCompile("multipart/form-data"): "multipartform",
+ regexp.MustCompile("image/.*"): "bin",
+ regexp.MustCompile("audio/.*"): "bin",
+ regexp.MustCompile("application/pdf"): "bin",
+}
+
+var knownProducers = map[string]string{
+ jsonSerializer: "runtime.JSONProducer()",
+ "yaml": "yamlpc.YAMLProducer()",
+ "xml": "runtime.XMLProducer()",
+ "txt": "runtime.TextProducer()",
+ "bin": "runtime.ByteStreamProducer()",
+ "urlform": "runtime.DiscardProducer",
+ "multipartform": "runtime.DiscardProducer",
+}
+
+var knownConsumers = map[string]string{
+ jsonSerializer: "runtime.JSONConsumer()",
+ "yaml": "yamlpc.YAMLConsumer()",
+ "xml": "runtime.XMLConsumer()",
+ "txt": "runtime.TextConsumer()",
+ "bin": "runtime.ByteStreamConsumer()",
+ "urlform": "runtime.DiscardConsumer",
+ "multipartform": "runtime.DiscardConsumer",
+}
+
+func wellKnownMime(tn string) (string, bool) {
+ for k, v := range mediaTypeNames {
+ if k.MatchString(tn) {
+ return v, true
+ }
+ }
+ return "", false
+}
+
+func mediaMime(orig string) string {
+ return strings.SplitN(orig, ";", 2)[0]
+}
+
+func mediaParameters(orig string) string {
+ parts := strings.SplitN(orig, ";", 2)
+ if len(parts) < 2 {
+ return ""
+ }
+ return parts[1]
+}
+
+func (a *appGenerator) makeSerializers(mediaTypes []string, known func(string) (string, bool)) (GenSerGroups, bool) {
+ supportsJSON := false
+ uniqueSerializers := make(map[string]*GenSerializer, len(mediaTypes))
+ uniqueSerializerGroups := make(map[string]*GenSerGroup, len(mediaTypes))
+
+ // build all required serializers
+ for _, media := range mediaTypes {
+ key := mediaMime(media)
+ nm, ok := wellKnownMime(key)
+ if !ok {
+ // keep this serializer named, even though its implementation is empty (cf. #1557)
+ nm = key
+ }
+ name := swag.ToJSONName(nm)
+ impl, _ := known(name)
+
+ ser, ok := uniqueSerializers[key]
+ if !ok {
+ ser = &GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: name,
+ MediaType: key,
+ Implementation: impl,
+ Parameters: []string{},
+ }
+ uniqueSerializers[key] = ser
+ }
+ // provide all known parameters (currently unused by codegen templates)
+ if params := strings.TrimSpace(mediaParameters(media)); params != "" {
+ found := false
+ for _, p := range ser.Parameters {
+ if params == p {
+ found = true
+ break
+ }
+ }
+ if !found {
+ ser.Parameters = append(ser.Parameters, params)
+ }
+ }
+
+ uniqueSerializerGroups[name] = &GenSerGroup{
+ GenSerializer: GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: name,
+ Implementation: impl,
+ },
+ }
+ }
+
+ if len(uniqueSerializers) == 0 {
+ impl, _ := known(jsonSerializer)
+ uniqueSerializers[runtime.JSONMime] = &GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: jsonSerializer,
+ MediaType: runtime.JSONMime,
+ Implementation: impl,
+ Parameters: []string{},
+ }
+ uniqueSerializerGroups[jsonSerializer] = &GenSerGroup{
+ GenSerializer: GenSerializer{
+ AppName: a.Name,
+ ReceiverName: a.Receiver,
+ Name: jsonSerializer,
+ Implementation: impl,
+ },
+ }
+ supportsJSON = true
+ }
+
+ // group serializers by consumer/producer to serve several mime media types
+ serializerGroups := make(GenSerGroups, 0, len(uniqueSerializers))
+
+ for _, group := range uniqueSerializerGroups {
+ if group.Name == jsonSerializer {
+ supportsJSON = true
+ }
+ serializers := make(GenSerializers, 0, len(uniqueSerializers))
+ for _, ser := range uniqueSerializers {
+ if group.Name == ser.Name {
+ sort.Strings(ser.Parameters)
+ serializers = append(serializers, *ser)
+ }
+ }
+ sort.Sort(serializers)
+ group.AllSerializers = serializers // provides the full list of mime media types for this serializer group
+ serializerGroups = append(serializerGroups, *group)
+ }
+ sort.Sort(serializerGroups)
+ return serializerGroups, supportsJSON
+}
+
+func (a *appGenerator) makeConsumes() (GenSerGroups, bool) {
+ // builds a codegen struct from all consumes in the spec
+ return a.makeSerializers(a.Analyzed.RequiredConsumes(), func(media string) (string, bool) {
+ c, ok := knownConsumers[media]
+ return c, ok
+ })
+}
+
+func (a *appGenerator) makeProduces() (GenSerGroups, bool) {
+ // builds a codegen struct from all produces in the spec
+ return a.makeSerializers(a.Analyzed.RequiredProduces(), func(media string) (string, bool) {
+ p, ok := knownProducers[media]
+ return p, ok
+ })
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/model.go b/vendor/github.com/go-swagger/go-swagger/generator/model.go
new file mode 100644
index 000000000..132927d48
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/model.go
@@ -0,0 +1,2118 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "errors"
+ "fmt"
+ "log"
+ "path"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+const asMethod = "()"
+
+/*
+Rewrite specification document first:
+
+* anonymous objects
+* tuples
+* extensible objects (properties + additionalProperties)
+* AllOfs when they match the rewrite criteria (not a nullable allOf)
+
+Find string enums and generate specialized idiomatic enum with them
+
+Every action that happens tracks the path which is a linked list of refs
+
+
+*/
+
+// GenerateModels generates all model files for some schema definitions
+func GenerateModels(modelNames []string, opts *GenOpts) error {
+ // overide any default or incompatible options setting
+ opts.IncludeModel = true
+ opts.IgnoreOperations = true
+ opts.ExistingModels = ""
+ opts.IncludeHandler = false
+ opts.IncludeMain = false
+ opts.IncludeSupport = false
+ generator, err := newAppGenerator("", modelNames, nil, opts)
+ if err != nil {
+ return err
+ }
+ return generator.Generate()
+}
+
+// GenerateDefinition generates a single model file for some schema definitions
+func GenerateDefinition(modelNames []string, opts *GenOpts) error {
+ if err := opts.CheckOpts(); err != nil {
+ return err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return err
+ }
+
+ specDoc, _, err := opts.analyzeSpec()
+ if err != nil {
+ return err
+ }
+
+ modelNames = pruneEmpty(modelNames)
+ if len(modelNames) == 0 {
+ for k := range specDoc.Spec().Definitions {
+ modelNames = append(modelNames, k)
+ }
+ }
+
+ for _, modelName := range modelNames {
+ // lookup schema
+ model, ok := specDoc.Spec().Definitions[modelName]
+ if !ok {
+ return fmt.Errorf("model %q not found in definitions given by %q", modelName, opts.Spec)
+ }
+
+ // generate files
+ generator := definitionGenerator{
+ Name: modelName,
+ Model: model,
+ SpecDoc: specDoc,
+ Target: filepath.Join(
+ opts.Target,
+ filepath.FromSlash(opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, ""))),
+ opts: opts,
+ }
+
+ if err := generator.Generate(); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+type definitionGenerator struct {
+ Name string
+ Model spec.Schema
+ SpecDoc *loads.Document
+ Target string
+ opts *GenOpts
+}
+
+func (m *definitionGenerator) Generate() error {
+
+ mod, err := makeGenDefinition(m.Name, m.Target, m.Model, m.SpecDoc, m.opts)
+ if err != nil {
+ return fmt.Errorf("could not generate definitions for model %s on target %s: %v", m.Name, m.Target, err)
+ }
+
+ if m.opts.DumpData {
+ return dumpData(swag.ToDynamicJSON(mod))
+ }
+
+ if m.opts.IncludeModel {
+ log.Println("including additional model")
+ if err := m.generateModel(mod); err != nil {
+ return fmt.Errorf("could not generate model: %v", err)
+ }
+ }
+ log.Println("generated model", m.Name)
+
+ return nil
+}
+
+func (m *definitionGenerator) generateModel(g *GenDefinition) error {
+ debugLog("rendering definitions for %+v", *g)
+ return m.opts.renderDefinition(g)
+}
+
+func makeGenDefinition(name, pkg string, schema spec.Schema, specDoc *loads.Document, opts *GenOpts) (*GenDefinition, error) {
+ gd, err := makeGenDefinitionHierarchy(name, pkg, "", schema, specDoc, opts)
+
+ if err == nil && gd != nil {
+ // before yielding the schema to the renderer, we check if the top-level Validate method gets some content
+ // this means that the immediate content of the top level definitions has at least one validation.
+ //
+ // If none is found at this level and that no special case where no Validate() method is exposed at all
+ // (e.g. io.ReadCloser and interface{} types and their aliases), then there is an empty Validate() method which
+ // just return nil (the object abides by the runtime.Validatable interface, but knows it has nothing to validate).
+ //
+ // We do this at the top level because of the possibility of aliased types which always bubble up validation to types which
+ // are referring to them. This results in correct but inelegant code with empty validations.
+ gd.GenSchema.HasValidations = shallowValidationLookup(gd.GenSchema)
+ }
+ return gd, err
+}
+
+func shallowValidationLookup(sch GenSchema) bool {
+ // scan top level need for validations
+ //
+ // NOTE: this supersedes the previous NeedsValidation flag
+ // With the introduction of this shallow lookup, it is no more necessary
+ // to establish a distinction between HasValidations (e.g. carries on validations)
+ // and NeedsValidation (e.g. should have a Validate method with something in it).
+ // The latter was almost not used anyhow.
+
+ if sch.HasAdditionalProperties && sch.AdditionalProperties == nil {
+ log.Printf("warning: schema for additional properties in schema %q is empty. skipped", sch.Name)
+ }
+
+ if sch.IsArray && sch.HasValidations {
+ return true
+ }
+ if sch.IsStream || sch.IsInterface { // these types have no validation - aliased types on those do not implement the Validatable interface
+ return false
+ }
+ if sch.Required || hasFormatValidation(sch.resolvedType) {
+ return true
+ }
+ if sch.HasStringValidations() || sch.HasNumberValidations() || sch.HasEnum() || len(sch.ItemsEnum) > 0 || sch.HasObjectValidations() {
+ return true
+ }
+ for _, a := range sch.AllOf {
+ if a.HasValidations {
+ return true
+ }
+ }
+ for _, p := range sch.Properties {
+ // Using a base type within another structure triggers validation of the base type.
+ // The discriminator property in the base type definition itself does not.
+ if (p.HasValidations || p.Required) && !(sch.IsBaseType && p.Name == sch.DiscriminatorField) || (p.IsAliased || p.IsComplexObject) && !(p.IsInterface || p.IsStream) {
+ return true
+ }
+ }
+ if sch.IsTuple && (sch.AdditionalItems != nil && (sch.AdditionalItems.HasValidations || sch.AdditionalItems.Required)) {
+ return true
+ }
+ if sch.HasAdditionalProperties && sch.AdditionalProperties != nil && (sch.AdditionalProperties.IsInterface || sch.AdditionalProperties.IsStream) {
+ return false
+ }
+
+ if sch.HasAdditionalProperties && sch.AdditionalProperties != nil && (sch.AdditionalProperties.HasValidations || sch.AdditionalProperties.Required || sch.AdditionalProperties.IsAliased && !(sch.AdditionalProperties.IsInterface || sch.AdditionalProperties.IsStream)) {
+ return true
+ }
+
+ if sch.IsAliased && (sch.IsPrimitive && sch.HasValidations) { // non primitive aliased have either other attributes with validation (above) or shall not validate
+ return true
+ }
+ if sch.HasBaseType || sch.IsSubType {
+ return true
+ }
+ return false
+}
+
+func isExternal(schema spec.Schema) bool {
+ extType, ok := hasExternalType(schema.Extensions)
+ return ok && !extType.Embedded
+}
+
+func makeGenDefinitionHierarchy(name, pkg, container string, schema spec.Schema, specDoc *loads.Document, opts *GenOpts) (*GenDefinition, error) {
+ // Check if model is imported from external package using x-go-type
+ receiver := "m"
+ // models are resolved in the current package
+ modelPkg := opts.LanguageOpts.ManglePackageName(path.Base(filepath.ToSlash(pkg)), "definitions")
+ resolver := newTypeResolver("", "", specDoc).withDefinitionPackage(modelPkg)
+ resolver.ModelName = name
+ analyzed := analysis.New(specDoc.Spec())
+
+ di := discriminatorInfo(analyzed)
+
+ pg := schemaGenContext{
+ Path: "",
+ Name: name,
+ Receiver: receiver,
+ IndexVar: "i",
+ ValueExpr: receiver,
+ Schema: schema,
+ Required: false,
+ TypeResolver: resolver,
+ Named: true,
+ ExtraSchemas: make(map[string]GenSchema),
+ Discrimination: di,
+ Container: container,
+ IncludeValidator: opts.IncludeValidator,
+ IncludeModel: opts.IncludeModel,
+ StrictAdditionalProperties: opts.StrictAdditionalProperties,
+ WithXML: opts.WithXML,
+ StructTags: opts.StructTags,
+ }
+ if err := pg.makeGenSchema(); err != nil {
+ return nil, fmt.Errorf("could not generate schema for %s: %v", name, err)
+ }
+ dsi, ok := di.Discriminators["#/definitions/"+name]
+ if ok {
+ // when these 2 are true then the schema will render as an interface
+ pg.GenSchema.IsBaseType = true
+ pg.GenSchema.IsExported = true
+ pg.GenSchema.DiscriminatorField = dsi.FieldName
+
+ if pg.GenSchema.Discriminates == nil {
+ pg.GenSchema.Discriminates = make(map[string]string)
+ }
+ pg.GenSchema.Discriminates[name] = dsi.GoType
+ pg.GenSchema.DiscriminatorValue = name
+
+ for _, v := range dsi.Children {
+ pg.GenSchema.Discriminates[v.FieldValue] = v.GoType
+ }
+
+ for j := range pg.GenSchema.Properties {
+ if !strings.HasSuffix(pg.GenSchema.Properties[j].ValueExpression, asMethod) {
+ pg.GenSchema.Properties[j].ValueExpression += asMethod
+ }
+ }
+ }
+
+ dse, ok := di.Discriminated["#/definitions/"+name]
+ if ok {
+ pg.GenSchema.DiscriminatorField = dse.FieldName
+ pg.GenSchema.DiscriminatorValue = dse.FieldValue
+ pg.GenSchema.IsSubType = true
+ knownProperties := make(map[string]struct{})
+
+ // find the referenced definitions
+ // check if it has a discriminator defined
+ // when it has a discriminator get the schema and run makeGenSchema for it.
+ // replace the ref with this new genschema
+ swsp := specDoc.Spec()
+ for i, ss := range schema.AllOf {
+ if pg.GenSchema.AllOf == nil {
+ log.Printf("warning: resolved schema for subtype %q.AllOf[%d] is empty. skipped", name, i)
+ continue
+ }
+ ref := ss.Ref
+ for ref.String() != "" {
+ var rsch *spec.Schema
+ var err error
+ rsch, err = spec.ResolveRef(swsp, &ref)
+ if err != nil {
+ return nil, err
+ }
+ if rsch != nil && rsch.Ref.String() != "" {
+ ref = rsch.Ref
+ continue
+ }
+ ref = spec.Ref{}
+ if rsch != nil && rsch.Discriminator != "" {
+ gs, err := makeGenDefinitionHierarchy(strings.TrimPrefix(ss.Ref.String(), "#/definitions/"), pkg, pg.GenSchema.Name, *rsch, specDoc, opts)
+ if err != nil {
+ return nil, err
+ }
+ gs.GenSchema.IsBaseType = true
+ gs.GenSchema.IsExported = true
+ pg.GenSchema.AllOf[i] = gs.GenSchema
+ schPtr := &(pg.GenSchema.AllOf[i])
+ if schPtr.AdditionalItems != nil {
+ schPtr.AdditionalItems.IsBaseType = true
+ }
+ if schPtr.AdditionalProperties != nil {
+ schPtr.AdditionalProperties.IsBaseType = true
+ }
+ for j := range schPtr.Properties {
+ schPtr.Properties[j].IsBaseType = true
+ knownProperties[schPtr.Properties[j].Name] = struct{}{}
+ }
+ }
+ }
+ }
+
+ // dedupe the fields
+ alreadySeen := make(map[string]struct{})
+ for i, ss := range pg.GenSchema.AllOf {
+ var remainingProperties GenSchemaList
+ for _, p := range ss.Properties {
+ if _, ok := knownProperties[p.Name]; !ok || ss.IsBaseType {
+ if _, seen := alreadySeen[p.Name]; !seen {
+ remainingProperties = append(remainingProperties, p)
+ alreadySeen[p.Name] = struct{}{}
+ }
+ }
+ }
+ pg.GenSchema.AllOf[i].Properties = remainingProperties
+ }
+
+ }
+
+ defaultImports := map[string]string{
+ "errors": "github.com/go-openapi/errors",
+ "runtime": "github.com/go-openapi/runtime",
+ "swag": "github.com/go-openapi/swag",
+ "validate": "github.com/go-openapi/validate",
+ }
+
+ return &GenDefinition{
+ GenCommon: GenCommon{
+ Copyright: opts.Copyright,
+ TargetImportPath: opts.LanguageOpts.baseImport(opts.Target),
+ },
+ Package: modelPkg,
+ GenSchema: pg.GenSchema,
+ DependsOn: pg.Dependencies,
+ DefaultImports: defaultImports,
+ ExtraSchemas: gatherExtraSchemas(pg.ExtraSchemas),
+ Imports: findImports(&pg.GenSchema),
+ External: isExternal(schema),
+ }, nil
+}
+
+func findImports(sch *GenSchema) map[string]string {
+ imp := make(map[string]string, 20)
+ t := sch.resolvedType
+ if t.Pkg != "" && t.PkgAlias != "" {
+ imp[t.PkgAlias] = t.Pkg
+ }
+ if t.IsEmbedded && t.ElemType != nil {
+ if t.ElemType.Pkg != "" && t.ElemType.PkgAlias != "" {
+ imp[t.ElemType.PkgAlias] = t.ElemType.Pkg
+ }
+ }
+ if sch.Items != nil {
+ sub := findImports(sch.Items)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.AdditionalItems != nil {
+ sub := findImports(sch.AdditionalItems)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.Object != nil {
+ sub := findImports(sch.Object)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.Properties != nil {
+ for _, props := range sch.Properties {
+ p := props
+ sub := findImports(&p)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ }
+ if sch.AdditionalProperties != nil {
+ sub := findImports(sch.AdditionalProperties)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ if sch.AllOf != nil {
+ for _, props := range sch.AllOf {
+ p := props
+ sub := findImports(&p)
+ for k, v := range sub {
+ imp[k] = v
+ }
+ }
+ }
+ for k, v := range sch.ExtraImports {
+ if k != "" && v != "" {
+ imp[k] = v
+ }
+ }
+
+ return imp
+}
+
+type schemaGenContext struct {
+ Required bool
+ AdditionalProperty bool
+ Untyped bool
+ Named bool
+ RefHandled bool
+ IsVirtual bool
+ IsTuple bool
+ IncludeValidator bool
+ IncludeModel bool
+ StrictAdditionalProperties bool
+ WithXML bool
+ Index int
+
+ Path string
+ Name string
+ ParamName string
+ Accessor string
+ Receiver string
+ IndexVar string
+ KeyVar string
+ ValueExpr string
+ Container string
+ Schema spec.Schema
+ TypeResolver *typeResolver
+ StructTags []string
+
+ GenSchema GenSchema
+ Dependencies []string // NOTE: Dependencies is actually set nowhere
+ ExtraSchemas map[string]GenSchema
+ Discriminator *discor
+ Discriminated *discee
+ Discrimination *discInfo
+
+ // force to use container in inlined definitions (for deconflicting)
+ UseContainerInName bool
+}
+
+func (sg *schemaGenContext) NewSliceBranch(schema *spec.Schema) *schemaGenContext {
+ debugLog("new slice branch %s (model: %s)", sg.Name, sg.TypeResolver.ModelName)
+ pg := sg.shallowClone()
+ indexVar := pg.IndexVar
+ if pg.Path == "" {
+ pg.Path = "strconv.Itoa(" + indexVar + ")"
+ } else {
+ pg.Path = pg.Path + "+ \".\" + strconv.Itoa(" + indexVar + ")"
+ }
+ // check who is parent, if it's a base type then rewrite the value expression
+ if sg.Discrimination != nil && sg.Discrimination.Discriminators != nil {
+ _, rewriteValueExpr := sg.Discrimination.Discriminators["#/definitions/"+sg.TypeResolver.ModelName]
+ if (pg.IndexVar == "i" && rewriteValueExpr) || sg.GenSchema.ElemType.IsBaseType {
+ if !sg.GenSchema.IsAliased {
+ pg.ValueExpr = sg.Receiver + "." + swag.ToJSONName(sg.GenSchema.Name) + "Field"
+ } else {
+ pg.ValueExpr = sg.Receiver
+ }
+ }
+ }
+ sg.GenSchema.IsBaseType = sg.GenSchema.ElemType.HasDiscriminator
+ pg.IndexVar = indexVar + "i"
+ pg.ValueExpr = pg.ValueExpr + "[" + indexVar + "]"
+ pg.Schema = *schema
+ pg.Required = false
+ if sg.IsVirtual {
+ pg.TypeResolver = sg.TypeResolver.NewWithModelName(sg.TypeResolver.ModelName)
+ }
+
+ // when this is an anonymous complex object, this needs to become a ref
+ return pg
+}
+
+func (sg *schemaGenContext) NewAdditionalItems(schema *spec.Schema) *schemaGenContext {
+ debugLog("new additional items\n")
+
+ pg := sg.shallowClone()
+ indexVar := pg.IndexVar
+ pg.Name = sg.Name + " items"
+ itemsLen := 0
+ if sg.Schema.Items != nil {
+ itemsLen = sg.Schema.Items.Len()
+ }
+ var mod string
+ if itemsLen > 0 {
+ mod = "+" + strconv.Itoa(itemsLen)
+ }
+ if pg.Path == "" {
+ pg.Path = "strconv.Itoa(" + indexVar + mod + ")"
+ } else {
+ pg.Path = pg.Path + "+ \".\" + strconv.Itoa(" + indexVar + mod + ")"
+ }
+ pg.IndexVar = indexVar
+ pg.ValueExpr = sg.ValueExpr + "." + pascalize(sg.GoName()) + "Items[" + indexVar + "]"
+ pg.Schema = spec.Schema{}
+ if schema != nil {
+ pg.Schema = *schema
+ }
+ pg.Required = false
+ return pg
+}
+
+func (sg *schemaGenContext) NewTupleElement(schema *spec.Schema, index int) *schemaGenContext {
+ debugLog("New tuple element\n")
+
+ pg := sg.shallowClone()
+ if pg.Path == "" {
+ pg.Path = "\"" + strconv.Itoa(index) + "\""
+ } else {
+ pg.Path = pg.Path + "+ \".\"+\"" + strconv.Itoa(index) + "\""
+ }
+ pg.ValueExpr = pg.ValueExpr + ".P" + strconv.Itoa(index)
+
+ pg.Required = true
+ pg.IsTuple = true
+ pg.Schema = *schema
+
+ return pg
+}
+
+func (sg *schemaGenContext) NewStructBranch(name string, schema spec.Schema) *schemaGenContext {
+ debugLog("new struct branch %s (parent %s)", sg.Name, sg.Container)
+ pg := sg.shallowClone()
+ if sg.Path == "" {
+ pg.Path = fmt.Sprintf("%q", name)
+ } else {
+ pg.Path = pg.Path + "+\".\"+" + fmt.Sprintf("%q", name)
+ }
+ pg.Name = name
+ pg.ValueExpr = pg.ValueExpr + "." + pascalize(goName(&schema, name))
+ pg.Schema = schema
+ for _, fn := range sg.Schema.Required {
+ if name == fn {
+ pg.Required = true
+ break
+ }
+ }
+ debugLog("made new struct branch %s (parent %s)", pg.Name, pg.Container)
+ return pg
+}
+
+func (sg *schemaGenContext) shallowClone() *schemaGenContext {
+ debugLog("cloning context %s\n", sg.Name)
+ pg := new(schemaGenContext)
+ *pg = *sg
+ if pg.Container == "" {
+ pg.Container = sg.Name
+ }
+ pg.GenSchema = GenSchema{StructTags: sg.StructTags}
+ pg.Dependencies = nil
+ pg.Named = false
+ pg.Index = 0
+ pg.IsTuple = false
+ pg.IncludeValidator = sg.IncludeValidator
+ pg.IncludeModel = sg.IncludeModel
+ pg.StrictAdditionalProperties = sg.StrictAdditionalProperties
+ return pg
+}
+
+func (sg *schemaGenContext) NewCompositionBranch(schema spec.Schema, index int) *schemaGenContext {
+ debugLog("new composition branch %s (parent: %s, index: %d)", sg.Name, sg.Container, index)
+ pg := sg.shallowClone()
+ pg.Schema = schema
+ pg.Name = "AO" + strconv.Itoa(index)
+ if sg.Name != sg.TypeResolver.ModelName {
+ pg.Name = sg.Name + pg.Name
+ }
+ pg.Index = index
+ debugLog("made new composition branch %s (parent: %s)", pg.Name, pg.Container)
+ return pg
+}
+
+func (sg *schemaGenContext) NewAdditionalProperty(schema spec.Schema) *schemaGenContext {
+ debugLog("new additional property %s (expr: %s)", sg.Name, sg.ValueExpr)
+ pg := sg.shallowClone()
+ pg.Schema = schema
+ if pg.KeyVar == "" {
+ pg.ValueExpr = sg.ValueExpr
+ }
+ pg.KeyVar += "k"
+ pg.ValueExpr += "[" + pg.KeyVar + "]"
+ pg.Path = pg.KeyVar
+ pg.GenSchema.Suffix = "Value"
+ if sg.Path != "" {
+ pg.Path = sg.Path + "+\".\"+" + pg.KeyVar
+ }
+ // propagates the special IsNullable override for maps of slices and
+ // maps of aliased types.
+ pg.GenSchema.IsMapNullOverride = sg.GenSchema.IsMapNullOverride
+ return pg
+}
+
+func hasContextValidations(model *spec.Schema) bool {
+ // always assume ref needs context validate
+ // TODO: find away to determine ref needs context validate or not
+ if model.ReadOnly || model.Ref.String() != "" {
+ return true
+ }
+ return false
+}
+
+func hasValidations(model *spec.Schema, isRequired bool) bool {
+ if isRequired {
+ return true
+ }
+
+ v := model.Validations()
+ if v.HasNumberValidations() || v.HasStringValidations() || v.HasArrayValidations() || v.HasEnum() || v.HasObjectValidations() {
+ return true
+ }
+
+ // since this was added to deal with discriminator, we'll fix this when testing discriminated types
+ if len(model.Properties) > 0 && model.Discriminator == "" {
+ return true
+ }
+
+ // lift validations from allOf branches
+ for _, s := range model.AllOf {
+ schema := s
+ if s.Ref.String() != "" || hasValidations(&schema, false) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func hasFormatValidation(tpe resolvedType) bool {
+ if tpe.IsCustomFormatter && !tpe.IsStream && !tpe.IsBase64 {
+ return true
+ }
+ if tpe.IsArray && tpe.ElemType != nil {
+ return hasFormatValidation(*tpe.ElemType)
+ }
+ return false
+}
+
+func (sg *schemaGenContext) schemaValidations() sharedValidations {
+ model := sg.Schema
+
+ isRequired := sg.Required
+ if model.Default != nil || model.ReadOnly {
+ // when readOnly or default is specified, this disables Required validation (Swagger-specific)
+ isRequired = false
+ if sg.Required {
+ log.Printf("warn: properties with a default value or readOnly should not be required [%s]", sg.Name)
+ }
+ }
+
+ v := model.Validations()
+ return sharedValidations{
+ Required: sg.Required, /* TODO(fred): guard for cases with discriminator field, default and readOnly*/
+ SchemaValidations: v,
+ HasSliceValidations: v.HasArrayValidations() || v.HasEnum(),
+ HasValidations: hasValidations(&model, isRequired),
+ }
+}
+
+func mergeValidation(other *schemaGenContext) bool {
+ // NOTE: NeesRequired and NeedsValidation are deprecated
+ if other.GenSchema.AdditionalProperties != nil && other.GenSchema.AdditionalProperties.HasValidations {
+ return true
+ }
+ if other.GenSchema.AdditionalItems != nil && other.GenSchema.AdditionalItems.HasValidations {
+ return true
+ }
+ for _, sch := range other.GenSchema.AllOf {
+ if sch.HasValidations {
+ return true
+ }
+ }
+ return other.GenSchema.HasValidations
+}
+
+func (sg *schemaGenContext) MergeResult(other *schemaGenContext, liftsRequired bool) {
+ sg.GenSchema.HasValidations = sg.GenSchema.HasValidations || mergeValidation(other)
+ sg.GenSchema.HasContextValidations = sg.GenSchema.HasContextValidations || other.GenSchema.HasContextValidations
+
+ if liftsRequired && other.GenSchema.AdditionalProperties != nil && other.GenSchema.AdditionalProperties.Required {
+ sg.GenSchema.Required = true
+ }
+ if liftsRequired && other.GenSchema.Required {
+ sg.GenSchema.Required = other.GenSchema.Required
+ }
+
+ if other.GenSchema.HasBaseType {
+ sg.GenSchema.HasBaseType = other.GenSchema.HasBaseType
+ }
+
+ sg.Dependencies = append(sg.Dependencies, other.Dependencies...)
+
+ // lift extra schemas
+ for k, v := range other.ExtraSchemas {
+ sg.ExtraSchemas[k] = v
+ }
+ if other.GenSchema.IsMapNullOverride {
+ sg.GenSchema.IsMapNullOverride = true
+ }
+
+ // lift extra imports
+ if other.GenSchema.Pkg != "" && other.GenSchema.PkgAlias != "" {
+ sg.GenSchema.ExtraImports[other.GenSchema.PkgAlias] = other.GenSchema.Pkg
+ }
+ for k, v := range other.GenSchema.ExtraImports {
+ sg.GenSchema.ExtraImports[k] = v
+ }
+}
+
+func (sg *schemaGenContext) buildProperties() error {
+ debugLog("building properties %s (parent: %s)", sg.Name, sg.Container)
+
+ for k, v := range sg.Schema.Properties {
+ debugLogAsJSON("building property %s[%q] (IsTuple: %t) (IsBaseType: %t) (HasValidations: %t)",
+ sg.Name, k, sg.IsTuple, sg.GenSchema.IsBaseType, sg.GenSchema.HasValidations, v)
+
+ vv := v
+
+ // check if this requires de-anonymizing, if so lift this as a new struct and extra schema
+ tpe, err := sg.TypeResolver.ResolveSchema(&vv, true, sg.IsTuple || swag.ContainsStrings(sg.Schema.Required, k))
+ if err != nil {
+ return err
+ }
+ if sg.Schema.Discriminator == k {
+ tpe.IsNullable = false
+ }
+
+ var hasValidation bool
+ if tpe.IsComplexObject && tpe.IsAnonymous && len(v.Properties) > 0 {
+ // this is an anonymous complex construct: build a new new type for it
+ pg := sg.makeNewStruct(sg.makeRefName()+swag.ToGoName(k), v)
+ pg.IsTuple = sg.IsTuple
+ if sg.Path != "" {
+ pg.Path = sg.Path + "+ \".\"+" + fmt.Sprintf("%q", k)
+ } else {
+ pg.Path = fmt.Sprintf("%q", k)
+ }
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ if v.Discriminator != "" {
+ pg.GenSchema.IsBaseType = true
+ pg.GenSchema.IsExported = true
+ pg.GenSchema.HasBaseType = true
+ }
+
+ vv = *spec.RefProperty("#/definitions/" + pg.Name)
+ hasValidation = pg.GenSchema.HasValidations
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+ // NOTE: MergeResult lifts validation status and extra schemas
+ sg.MergeResult(pg, false)
+ }
+
+ emprop := sg.NewStructBranch(k, vv)
+ emprop.IsTuple = sg.IsTuple
+
+ if err := emprop.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // whatever the validations says, if we have an interface{}, do not validate
+ // NOTE: this may be the case when the type is left empty and we get a Enum validation.
+ if emprop.GenSchema.IsInterface || emprop.GenSchema.IsStream {
+ emprop.GenSchema.HasValidations = false
+ } else if hasValidation || emprop.GenSchema.HasValidations || emprop.GenSchema.Required || emprop.GenSchema.IsAliased || len(emprop.GenSchema.AllOf) > 0 {
+ emprop.GenSchema.HasValidations = true
+ sg.GenSchema.HasValidations = true
+ }
+
+ // generates format validation on property
+ emprop.GenSchema.HasValidations = emprop.GenSchema.HasValidations || hasFormatValidation(tpe)
+
+ if emprop.Schema.Ref.String() != "" {
+ // expand the schema of this property, so we take informed decisions about its type
+ ref := emprop.Schema.Ref
+ var sch *spec.Schema
+ for ref.String() != "" {
+ var rsch *spec.Schema
+ var err error
+ specDoc := sg.TypeResolver.Doc
+ rsch, err = spec.ResolveRef(specDoc.Spec(), &ref)
+ if err != nil {
+ return err
+ }
+ if rsch == nil {
+ return errors.New("spec.ResolveRef returned nil schema")
+ }
+ if rsch != nil && rsch.Ref.String() != "" {
+ ref = rsch.Ref
+ continue
+ }
+ ref = spec.Ref{}
+ sch = rsch
+ }
+
+ if emprop.Discrimination != nil {
+ if _, ok := emprop.Discrimination.Discriminators[emprop.Schema.Ref.String()]; ok {
+ emprop.GenSchema.IsBaseType = true
+ emprop.GenSchema.IsNullable = false
+ emprop.GenSchema.HasBaseType = true
+ }
+ if _, ok := emprop.Discrimination.Discriminated[emprop.Schema.Ref.String()]; ok {
+ emprop.GenSchema.IsSubType = true
+ }
+ }
+
+ // set property name
+ var nm = filepath.Base(emprop.Schema.Ref.GetURL().Fragment)
+
+ tr := sg.TypeResolver.NewWithModelName(goName(&emprop.Schema, swag.ToGoName(nm)))
+ ttpe, err := tr.ResolveSchema(sch, false, true)
+ if err != nil {
+ return err
+ }
+ if ttpe.IsAliased {
+ emprop.GenSchema.IsAliased = true
+ }
+
+ // lift validations
+ hv := hasValidations(sch, false)
+
+ // include format validation, excluding binary
+ hv = hv || hasFormatValidation(ttpe)
+
+ // a base type property is always validated against the base type
+ // exception: for the base type definition itself (see shallowValidationLookup())
+ if (hv || emprop.GenSchema.IsBaseType) && !(emprop.GenSchema.IsInterface || emprop.GenSchema.IsStream) {
+ emprop.GenSchema.HasValidations = true
+ }
+ if ttpe.HasAdditionalItems && sch.AdditionalItems.Schema != nil {
+ // when AdditionalItems specifies a Schema, there is a validation
+ // check if we stepped upon an exception
+ child, err := tr.ResolveSchema(sch.AdditionalItems.Schema, false, true)
+ if err != nil {
+ return err
+ }
+ if !child.IsInterface && !child.IsStream {
+ emprop.GenSchema.HasValidations = true
+ }
+ }
+ if ttpe.IsMap && sch.AdditionalProperties != nil && sch.AdditionalProperties.Schema != nil {
+ // when AdditionalProperties specifies a Schema, there is a validation
+ // check if we stepped upon an exception
+ child, err := tr.ResolveSchema(sch.AdditionalProperties.Schema, false, true)
+ if err != nil {
+ return err
+ }
+ if !child.IsInterface && !child.IsStream {
+ emprop.GenSchema.HasValidations = true
+ }
+ }
+ }
+
+ if sg.Schema.Discriminator == k {
+ // this is the discriminator property:
+ // it is required, but forced as non-nullable,
+ // since we never fill it with a zero-value
+ // TODO: when no other property than discriminator, there is no validation
+ emprop.GenSchema.IsNullable = false
+ }
+ if emprop.GenSchema.IsBaseType {
+ sg.GenSchema.HasBaseType = true
+ }
+ sg.MergeResult(emprop, false)
+
+ // when discriminated, data is accessed via a getter func
+ if emprop.GenSchema.HasDiscriminator {
+ emprop.GenSchema.ValueExpression += asMethod
+ }
+
+ emprop.GenSchema.Extensions = emprop.Schema.Extensions
+
+ // set custom serializer tag
+ if customTag, found := tpe.Extensions[xGoCustomTag]; found {
+ tagAsStr, ok := customTag.(string)
+ if ok {
+ emprop.GenSchema.CustomTag = tagAsStr
+ } else {
+ log.Printf("warning: expect %s extension to be a string, got: %v. Skipped", xGoCustomTag, customTag)
+ }
+ }
+ sg.GenSchema.Properties = append(sg.GenSchema.Properties, emprop.GenSchema)
+ }
+ sort.Sort(sg.GenSchema.Properties)
+
+ return nil
+}
+
+func (sg *schemaGenContext) buildAllOf() error {
+ if len(sg.Schema.AllOf) == 0 {
+ return nil
+ }
+
+ var hasArray, hasNonArray int
+
+ sort.Sort(sg.GenSchema.AllOf)
+ if sg.Container == "" {
+ sg.Container = sg.Name
+ }
+ debugLogAsJSON("building all of for %d entries", len(sg.Schema.AllOf), sg.Schema)
+ for i, schema := range sg.Schema.AllOf {
+ sch := schema
+ tpe, ert := sg.TypeResolver.ResolveSchema(&sch, sch.Ref.String() == "", false)
+ if ert != nil {
+ return ert
+ }
+
+ // check for multiple arrays in allOf branches.
+ // Although a valid JSON-Schema construct, it is not suited for serialization.
+ // This is the same if we attempt to serialize an array with another object.
+ // We issue a generation warning on this.
+ if tpe.IsArray {
+ hasArray++
+ } else {
+ hasNonArray++
+ }
+ debugLogAsJSON("trying", sch)
+ if (tpe.IsAnonymous && len(sch.AllOf) > 0) || (sch.Ref.String() == "" && !tpe.IsComplexObject && (tpe.IsArray || tpe.IsInterface || tpe.IsPrimitive)) {
+ // cases where anonymous structures cause the creation of a new type:
+ // - nested allOf: this one is itself a AllOf: build a new type for it
+ // - anonymous simple types for edge cases: array, primitive, interface{}
+ // NOTE: when branches are aliased or anonymous, the nullable property in the branch type is lost.
+ name := swag.ToVarName(goName(&sch, sg.makeRefName()+"AllOf"+strconv.Itoa(i)))
+ debugLog("building anonymous nested allOf in %s: %s", sg.Name, name)
+ ng := sg.makeNewStruct(name, sch)
+ if err := ng.makeGenSchema(); err != nil {
+ return err
+ }
+
+ newsch := spec.RefProperty("#/definitions/" + ng.Name)
+ sg.Schema.AllOf[i] = *newsch
+
+ pg := sg.NewCompositionBranch(*newsch, i)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // lift extra schemas & validations from new type
+ pg.MergeResult(ng, true)
+
+ // lift validations when complex or ref'ed:
+ // - parent always calls its Validatable child
+ // - child may or may not have validations
+ //
+ // Exception: child is not Validatable when interface or stream
+ if !pg.GenSchema.IsInterface && !pg.GenSchema.IsStream {
+ sg.GenSchema.HasValidations = true
+ }
+
+ // add the newly created type to the list of schemas to be rendered inline
+ pg.ExtraSchemas[ng.Name] = ng.GenSchema
+
+ sg.MergeResult(pg, true)
+
+ sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, pg.GenSchema)
+
+ continue
+ }
+
+ comprop := sg.NewCompositionBranch(sch, i)
+ if err := comprop.makeGenSchema(); err != nil {
+ return err
+ }
+ if comprop.GenSchema.IsMap && comprop.GenSchema.HasAdditionalProperties && comprop.GenSchema.AdditionalProperties != nil && !comprop.GenSchema.IsInterface {
+ // the anonymous branch is a map for AdditionalProperties: rewrite value expression
+ comprop.GenSchema.ValueExpression = comprop.GenSchema.ValueExpression + "." + comprop.Name
+ comprop.GenSchema.AdditionalProperties.ValueExpression = comprop.GenSchema.ValueExpression + "[" + comprop.GenSchema.AdditionalProperties.KeyVar + "]"
+ }
+
+ // lift validations when complex or ref'ed
+ if (comprop.GenSchema.IsComplexObject || comprop.Schema.Ref.String() != "") && !(comprop.GenSchema.IsInterface || comprop.GenSchema.IsStream) {
+ comprop.GenSchema.HasValidations = true
+ }
+ sg.MergeResult(comprop, true)
+ sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, comprop.GenSchema)
+ }
+
+ if hasArray > 1 || (hasArray > 0 && hasNonArray > 0) {
+ log.Printf("warning: cannot generate serializable allOf with conflicting array definitions in %s", sg.Container)
+ }
+
+ // AllOf types are always considered nullable, except when an extension says otherwise
+ if override, ok := sg.TypeResolver.isNullableOverride(&sg.Schema); ok {
+ sg.GenSchema.IsNullable = override
+ } else {
+ sg.GenSchema.IsNullable = true
+ }
+
+ // prevent IsAliased to bubble up (e.g. when a single branch is itself aliased)
+ sg.GenSchema.IsAliased = sg.GenSchema.IsAliased && len(sg.GenSchema.AllOf) < 2
+
+ return nil
+}
+
+type mapStack struct {
+ Type *spec.Schema
+ Next *mapStack
+ Previous *mapStack
+ ValueRef *schemaGenContext
+ Context *schemaGenContext
+ NewObj *schemaGenContext
+}
+
+func newMapStack(context *schemaGenContext) (first, last *mapStack, err error) {
+ ms := &mapStack{
+ Type: &context.Schema,
+ Context: context,
+ }
+
+ l := ms
+ for l.HasMore() {
+ tpe, err := l.Context.TypeResolver.ResolveSchema(l.Type.AdditionalProperties.Schema, true, true)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if !tpe.IsMap {
+ // reached the end of the rabbit hole
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ // found an anonymous object: create the struct from a newly created definition
+ nw := l.Context.makeNewStruct(l.Context.makeRefName()+" Anon", *l.Type.AdditionalProperties.Schema)
+ sch := spec.RefProperty("#/definitions/" + nw.Name)
+ l.NewObj = nw
+
+ l.Type.AdditionalProperties.Schema = sch
+ l.ValueRef = l.Context.NewAdditionalProperty(*sch)
+ }
+
+ // other cases where to stop are: a $ref or a simple object
+ break
+ }
+
+ // continue digging for maps
+ l.Next = &mapStack{
+ Previous: l,
+ Type: l.Type.AdditionalProperties.Schema,
+ Context: l.Context.NewAdditionalProperty(*l.Type.AdditionalProperties.Schema),
+ }
+ l = l.Next
+ }
+
+ // return top and bottom entries of this stack of AdditionalProperties
+ return ms, l, nil
+}
+
+// Build rewinds the stack of additional properties, building schemas from bottom to top
+func (mt *mapStack) Build() error {
+ if mt.NewObj == nil && mt.ValueRef == nil && mt.Next == nil && mt.Previous == nil {
+ csch := mt.Type.AdditionalProperties.Schema
+ cp := mt.Context.NewAdditionalProperty(*csch)
+ d := mt.Context.TypeResolver.Doc
+
+ asch, err := analysis.Schema(analysis.SchemaOpts{
+ Root: d.Spec(),
+ BasePath: d.SpecFilePath(),
+ Schema: csch,
+ })
+ if err != nil {
+ return err
+ }
+ cp.Required = !asch.IsSimpleSchema && !asch.IsMap
+
+ // when the schema is an array or an alias, this may result in inconsistent
+ // nullable status between the map element and the array element (resp. the aliased type).
+ //
+ // Example: when an object has no property and only additionalProperties,
+ // which turn out to be arrays of some other object.
+
+ // save the initial override
+ hadOverride := cp.GenSchema.IsMapNullOverride
+ if err := cp.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // if we have an override at the top of stack, propagates it down nested arrays
+ if hadOverride && cp.GenSchema.IsArray {
+ // do it for nested arrays: override is also about map[string][][]... constructs
+ it := &cp.GenSchema
+ for it.Items != nil && it.IsArray {
+ it.Items.IsMapNullOverride = hadOverride
+ it = it.Items
+ }
+ }
+ // cover other cases than arrays (aliased types)
+ cp.GenSchema.IsMapNullOverride = hadOverride
+
+ mt.Context.MergeResult(cp, false)
+ mt.Context.GenSchema.AdditionalProperties = &cp.GenSchema
+
+ // lift validations
+ if (csch.Ref.String() != "" || cp.GenSchema.IsAliased) && !(cp.GenSchema.IsInterface || cp.GenSchema.IsStream) {
+ // - we stopped on a ref, or anything else that require we call its Validate() method
+ // - if the alias / ref is on an interface (or stream) type: no validation
+ mt.Context.GenSchema.HasValidations = true
+ mt.Context.GenSchema.AdditionalProperties.HasValidations = true
+ }
+
+ debugLog("early mapstack exit, nullable: %t for %s", cp.GenSchema.IsNullable, cp.GenSchema.Name)
+ return nil
+ }
+ cur := mt
+ for cur != nil {
+ if cur.NewObj != nil {
+ // a new model has been created during the stack construction (new ref on anonymous object)
+ if err := cur.NewObj.makeGenSchema(); err != nil {
+ return err
+ }
+ }
+
+ if cur.ValueRef != nil {
+ if err := cur.ValueRef.makeGenSchema(); err != nil {
+ return nil
+ }
+ }
+
+ if cur.NewObj != nil {
+ // newly created model from anonymous object is declared as extra schema
+ cur.Context.MergeResult(cur.NewObj, false)
+
+ // propagates extra schemas
+ cur.Context.ExtraSchemas[cur.NewObj.Name] = cur.NewObj.GenSchema
+ }
+
+ if cur.ValueRef != nil {
+ // this is the genSchema for this new anonymous AdditionalProperty
+ if err := cur.Context.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // if there is a ValueRef, we must have a NewObj (from newMapStack() construction)
+ cur.ValueRef.GenSchema.HasValidations = cur.NewObj.GenSchema.HasValidations
+ cur.Context.MergeResult(cur.ValueRef, false)
+ cur.Context.GenSchema.AdditionalProperties = &cur.ValueRef.GenSchema
+ }
+
+ if cur.Previous != nil {
+ // we have a parent schema: build a schema for current AdditionalProperties
+ if err := cur.Context.makeGenSchema(); err != nil {
+ return err
+ }
+ }
+ if cur.Next != nil {
+ // we previously made a child schema: lifts things from that one
+ // - Required is not lifted (in a cascade of maps, only the last element is actually checked for Required)
+ cur.Context.MergeResult(cur.Next.Context, false)
+ cur.Context.GenSchema.AdditionalProperties = &cur.Next.Context.GenSchema
+
+ // lift validations
+ c := &cur.Next.Context.GenSchema
+ if (cur.Next.Context.Schema.Ref.String() != "" || c.IsAliased) && !(c.IsInterface || c.IsStream) {
+ // - we stopped on a ref, or anything else that require we call its Validate()
+ // - if the alias / ref is on an interface (or stream) type: no validation
+ cur.Context.GenSchema.HasValidations = true
+ cur.Context.GenSchema.AdditionalProperties.HasValidations = true
+ }
+ }
+ if cur.ValueRef != nil {
+ cur.Context.MergeResult(cur.ValueRef, false)
+ cur.Context.GenSchema.AdditionalProperties = &cur.ValueRef.GenSchema
+ }
+
+ if cur.Context.GenSchema.AdditionalProperties != nil {
+ // propagate overrides up the resolved schemas, but leaves any ExtraSchema untouched
+ cur.Context.GenSchema.AdditionalProperties.IsMapNullOverride = cur.Context.GenSchema.IsMapNullOverride
+ }
+ cur = cur.Previous
+ }
+
+ return nil
+}
+
+func (mt *mapStack) HasMore() bool {
+ return mt.Type.AdditionalProperties != nil && (mt.Type.AdditionalProperties.Schema != nil || mt.Type.AdditionalProperties.Allows)
+}
+
+/* currently unused:
+func (mt *mapStack) Dict() map[string]interface{} {
+ res := make(map[string]interface{})
+ res["context"] = mt.Context.Schema
+ if mt.Next != nil {
+ res["next"] = mt.Next.Dict()
+ }
+ if mt.NewObj != nil {
+ res["obj"] = mt.NewObj.Schema
+ }
+ if mt.ValueRef != nil {
+ res["value"] = mt.ValueRef.Schema
+ }
+ return res
+}
+*/
+
+func (sg *schemaGenContext) buildAdditionalProperties() error {
+ if sg.Schema.AdditionalProperties == nil {
+ return nil
+ }
+ addp := *sg.Schema.AdditionalProperties
+
+ wantsAdditional := addp.Schema != nil || addp.Allows
+ sg.GenSchema.HasAdditionalProperties = wantsAdditional
+ if !wantsAdditional {
+ return nil
+ }
+
+ // flag swap
+ if sg.GenSchema.IsComplexObject {
+ sg.GenSchema.IsAdditionalProperties = true
+ sg.GenSchema.IsComplexObject = false
+ sg.GenSchema.IsMap = false
+ }
+
+ if addp.Schema == nil {
+ // this is for AdditionalProperties:true|false
+ if addp.Allows {
+ // additionalProperties: true is rendered as: map[string]interface{}
+ addp.Schema = &spec.Schema{}
+
+ addp.Schema.Typed("object", "")
+ sg.GenSchema.HasAdditionalProperties = true
+ sg.GenSchema.IsComplexObject = false
+ sg.GenSchema.IsMap = true
+
+ sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.Name+" additionalProperties")
+ cp := sg.NewAdditionalProperty(*addp.Schema)
+ cp.Name += "AdditionalProperties"
+ cp.Required = false
+ if err := cp.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(cp, false)
+ sg.GenSchema.AdditionalProperties = &cp.GenSchema
+ debugLog("added interface{} schema for additionalProperties[allows == true], IsInterface=%t", cp.GenSchema.IsInterface)
+ }
+ return nil
+ }
+
+ if !sg.GenSchema.IsMap && (sg.GenSchema.IsAdditionalProperties && sg.Named) {
+ // we have a complex object with an AdditionalProperties schema
+
+ tpe, ert := sg.TypeResolver.ResolveSchema(addp.Schema, addp.Schema.Ref.String() == "", false)
+ if ert != nil {
+ return ert
+ }
+
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ // if the AdditionalProperties is an anonymous complex object, generate a new type for it
+ pg := sg.makeNewStruct(sg.makeRefName()+" Anon", *addp.Schema)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(pg, false)
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+
+ sg.Schema.AdditionalProperties.Schema = spec.RefProperty("#/definitions/" + pg.Name)
+ sg.IsVirtual = true
+
+ comprop := sg.NewAdditionalProperty(*sg.Schema.AdditionalProperties.Schema)
+ if err := comprop.makeGenSchema(); err != nil {
+ return err
+ }
+
+ comprop.GenSchema.Required = true
+ comprop.GenSchema.HasValidations = true
+
+ comprop.GenSchema.ValueExpression = sg.GenSchema.ValueExpression + "." + swag.ToGoName(sg.GenSchema.Name) + "[" + comprop.KeyVar + "]"
+
+ sg.GenSchema.AdditionalProperties = &comprop.GenSchema
+ sg.GenSchema.HasAdditionalProperties = true
+ sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.GenSchema.Name)
+
+ sg.MergeResult(comprop, false)
+
+ return nil
+ }
+
+ // this is a regular named schema for AdditionalProperties
+ sg.GenSchema.ValueExpression += "." + swag.ToGoName(sg.GenSchema.Name)
+ comprop := sg.NewAdditionalProperty(*addp.Schema)
+ d := sg.TypeResolver.Doc
+ asch, err := analysis.Schema(analysis.SchemaOpts{
+ Root: d.Spec(),
+ BasePath: d.SpecFilePath(),
+ Schema: addp.Schema,
+ })
+ if err != nil {
+ return err
+ }
+ comprop.Required = !asch.IsSimpleSchema && !asch.IsMap
+ if err := comprop.makeGenSchema(); err != nil {
+ return err
+ }
+
+ sg.MergeResult(comprop, false)
+ sg.GenSchema.AdditionalProperties = &comprop.GenSchema
+ sg.GenSchema.AdditionalProperties.ValueExpression = sg.GenSchema.ValueExpression + "[" + comprop.KeyVar + "]"
+
+ // rewrite value expression for arrays and arrays of arrays in maps (rendered as map[string][][]...)
+ if sg.GenSchema.AdditionalProperties.IsArray {
+ // maps of slices are where an override may take effect
+ sg.GenSchema.AdditionalProperties.Items.IsMapNullOverride = sg.GenSchema.AdditionalProperties.IsMapNullOverride
+ sg.GenSchema.AdditionalProperties.Items.ValueExpression = sg.GenSchema.ValueExpression + "[" + comprop.KeyVar + "]" + "[" + sg.GenSchema.AdditionalProperties.IndexVar + "]"
+ ap := sg.GenSchema.AdditionalProperties.Items
+ for ap != nil && ap.IsArray {
+ ap.Items.IsMapNullOverride = ap.IsMapNullOverride
+ ap.Items.ValueExpression = ap.ValueExpression + "[" + ap.IndexVar + "]"
+ ap = ap.Items
+ }
+ }
+
+ // lift validation
+ if (sg.GenSchema.AdditionalProperties.IsComplexObject || sg.GenSchema.AdditionalProperties.IsAliased || sg.GenSchema.AdditionalProperties.Required) && !(sg.GenSchema.AdditionalProperties.IsInterface || sg.GenSchema.IsStream) {
+ sg.GenSchema.HasValidations = true
+ }
+ return nil
+ }
+
+ if sg.GenSchema.IsMap && wantsAdditional {
+ // this is itself an AdditionalProperties schema with some AdditionalProperties.
+ // this also runs for aliased map types (with zero properties save additionalProperties)
+ //
+ // find out how deep this rabbit hole goes
+ // descend, unwind and rewrite
+ // This needs to be depth first, so it first goes as deep as it can and then
+ // builds the result in reverse order.
+ _, ls, err := newMapStack(sg)
+ if err != nil {
+ return err
+ }
+ return ls.Build()
+ }
+
+ if sg.GenSchema.IsAdditionalProperties && !sg.Named {
+ // for an anonymous object, first build the new object
+ // and then replace the current one with a $ref to the
+ // new object
+ newObj := sg.makeNewStruct(sg.GenSchema.Name+" P"+strconv.Itoa(sg.Index), sg.Schema)
+ if err := newObj.makeGenSchema(); err != nil {
+ return err
+ }
+
+ hasMapNullOverride := sg.GenSchema.IsMapNullOverride
+ sg.GenSchema = GenSchema{StructTags: sg.StructTags}
+ sg.Schema = *spec.RefProperty("#/definitions/" + newObj.Name)
+ if err := sg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(newObj, false)
+
+ sg.GenSchema.IsMapNullOverride = hasMapNullOverride
+ if sg.GenSchema.IsArray {
+ sg.GenSchema.Items.IsMapNullOverride = hasMapNullOverride
+ }
+
+ sg.GenSchema.HasValidations = newObj.GenSchema.HasValidations
+ sg.ExtraSchemas[newObj.Name] = newObj.GenSchema
+ return nil
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) makeNewStruct(name string, schema spec.Schema) *schemaGenContext {
+ debugLog("making new struct: name: %s, container: %s", name, sg.Container)
+ sp := sg.TypeResolver.Doc.Spec()
+ name = swag.ToGoName(name)
+ if sg.TypeResolver.ModelName != sg.Name {
+ name = swag.ToGoName(sg.TypeResolver.ModelName + " " + name)
+ }
+ if sp.Definitions == nil {
+ sp.Definitions = make(spec.Definitions)
+ }
+ sp.Definitions[name] = schema
+ pg := schemaGenContext{
+ Path: "",
+ Name: name,
+ Receiver: sg.Receiver,
+ IndexVar: "i",
+ ValueExpr: sg.Receiver,
+ Schema: schema,
+ Required: false,
+ Named: true,
+ ExtraSchemas: make(map[string]GenSchema),
+ Discrimination: sg.Discrimination,
+ Container: sg.Container,
+ IncludeValidator: sg.IncludeValidator,
+ IncludeModel: sg.IncludeModel,
+ StrictAdditionalProperties: sg.StrictAdditionalProperties,
+ StructTags: sg.StructTags,
+ }
+ if schema.Ref.String() == "" {
+ pg.TypeResolver = sg.TypeResolver.NewWithModelName(name)
+ }
+ pg.GenSchema.IsVirtual = true
+
+ sg.ExtraSchemas[name] = pg.GenSchema
+ return &pg
+}
+
+func (sg *schemaGenContext) buildArray() error {
+ tpe, err := sg.TypeResolver.ResolveSchema(sg.Schema.Items.Schema, true, false)
+ if err != nil {
+ return err
+ }
+
+ // check if the element is a complex object, if so generate a new type for it
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ pg := sg.makeNewStruct(sg.makeRefName()+" items"+strconv.Itoa(sg.Index), *sg.Schema.Items.Schema)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(pg, false)
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+ sg.Schema.Items.Schema = spec.RefProperty("#/definitions/" + pg.Name)
+ sg.IsVirtual = true
+ return sg.makeGenSchema()
+ }
+
+ // create the generation schema for items
+ elProp := sg.NewSliceBranch(sg.Schema.Items.Schema)
+
+ // when building a slice of maps, the map item is not required
+ // items from maps of aliased or nullable type remain required
+
+ // NOTE(fredbi): since this is reset below, this Required = true serves the obscure purpose
+ // of indirectly lifting validations from the slice. This is carried out differently now.
+ // elProp.Required = true
+
+ if err := elProp.makeGenSchema(); err != nil {
+ return err
+ }
+
+ sg.MergeResult(elProp, false)
+
+ sg.GenSchema.IsBaseType = elProp.GenSchema.IsBaseType
+ sg.GenSchema.ItemsEnum = elProp.GenSchema.Enum
+ elProp.GenSchema.Suffix = "Items"
+
+ elProp.GenSchema.IsNullable = tpe.IsNullable && !tpe.HasDiscriminator
+ if elProp.GenSchema.IsNullable {
+ sg.GenSchema.GoType = "[]*" + elProp.GenSchema.GoType
+ } else {
+ sg.GenSchema.GoType = "[]" + elProp.GenSchema.GoType
+ }
+
+ sg.GenSchema.IsArray = true
+
+ schemaCopy := elProp.GenSchema
+
+ schemaCopy.Required = false
+
+ // validations of items
+ // include format validation, excluding binary and base64 format validation
+ hv := hasValidations(sg.Schema.Items.Schema, false) || hasFormatValidation(schemaCopy.resolvedType)
+
+ // base types of polymorphic types must be validated
+ // NOTE: IsNullable is not useful to figure out a validation: we use Refed and IsAliased below instead
+ if hv || elProp.GenSchema.IsBaseType {
+ schemaCopy.HasValidations = true
+ }
+
+ if (elProp.Schema.Ref.String() != "" || elProp.GenSchema.IsAliased) && !(elProp.GenSchema.IsInterface || elProp.GenSchema.IsStream) {
+ schemaCopy.HasValidations = true
+ }
+
+ // lift validations
+ sg.GenSchema.HasValidations = sg.GenSchema.HasValidations || schemaCopy.HasValidations
+ sg.GenSchema.HasSliceValidations = sg.Schema.Validations().HasArrayValidations() || sg.Schema.Validations().HasEnum()
+
+ // prevents bubbling custom formatter flag
+ sg.GenSchema.IsCustomFormatter = false
+
+ sg.GenSchema.Items = &schemaCopy
+ if sg.Named {
+ sg.GenSchema.AliasedType = sg.GenSchema.GoType
+ }
+
+ return nil
+}
+
+func (sg *schemaGenContext) buildItems() error {
+ if sg.Schema.Items == nil {
+ // in swagger, arrays MUST have an items schema
+ return nil
+ }
+
+ // in Items spec, we have either Schema (array) or Schemas (tuple)
+ presentsAsSingle := sg.Schema.Items.Schema != nil
+ if presentsAsSingle && sg.Schema.AdditionalItems != nil { // unsure if this a valid of invalid schema
+ return fmt.Errorf("single schema (%s) can't have additional items", sg.Name)
+ }
+ if presentsAsSingle {
+ return sg.buildArray()
+ }
+
+ // This is a tuple, build a new model that represents this
+ if sg.Named {
+ sg.GenSchema.Name = sg.Name
+ sg.GenSchema.GoType = sg.TypeResolver.goTypeName(sg.Name)
+ for i, sch := range sg.Schema.Items.Schemas {
+ s := sch
+ elProp := sg.NewTupleElement(&s, i)
+
+ if s.Ref.String() == "" {
+ tpe, err := sg.TypeResolver.ResolveSchema(&s, s.Ref.String() == "", true)
+ if err != nil {
+ return err
+ }
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ // if the tuple element is an anonymous complex object, build a new type for it
+ pg := sg.makeNewStruct(sg.makeRefName()+" Items"+strconv.Itoa(i), s)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ elProp.Schema = *spec.RefProperty("#/definitions/" + pg.Name)
+ elProp.MergeResult(pg, false)
+ elProp.ExtraSchemas[pg.Name] = pg.GenSchema
+ }
+ }
+
+ if err := elProp.makeGenSchema(); err != nil {
+ return err
+ }
+ if elProp.GenSchema.IsInterface || elProp.GenSchema.IsStream {
+ elProp.GenSchema.HasValidations = false
+ }
+ sg.MergeResult(elProp, false)
+
+ elProp.GenSchema.Name = "p" + strconv.Itoa(i)
+ sg.GenSchema.Properties = append(sg.GenSchema.Properties, elProp.GenSchema)
+ sg.GenSchema.IsTuple = true
+ }
+ return nil
+ }
+
+ // for an anonymous object, first build the new object
+ // and then replace the current one with a $ref to the
+ // new tuple object
+ var sch spec.Schema
+ sch.Typed("object", "")
+ sch.Properties = make(map[string]spec.Schema, len(sg.Schema.Items.Schemas))
+ for i, v := range sg.Schema.Items.Schemas {
+ sch.Required = append(sch.Required, "P"+strconv.Itoa(i))
+ sch.Properties["P"+strconv.Itoa(i)] = v
+ }
+ sch.AdditionalItems = sg.Schema.AdditionalItems
+ tup := sg.makeNewStruct(sg.GenSchema.Name+"Tuple"+strconv.Itoa(sg.Index), sch)
+ tup.IsTuple = true
+ if err := tup.makeGenSchema(); err != nil {
+ return err
+ }
+ tup.GenSchema.IsTuple = true
+ tup.GenSchema.IsComplexObject = false
+ tup.GenSchema.Title = tup.GenSchema.Name + " a representation of an anonymous Tuple type"
+ tup.GenSchema.Description = ""
+ sg.ExtraSchemas[tup.Name] = tup.GenSchema
+
+ sg.Schema = *spec.RefProperty("#/definitions/" + tup.Name)
+ if err := sg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.MergeResult(tup, false)
+ return nil
+}
+
+func (sg *schemaGenContext) buildAdditionalItems() error {
+ wantsAdditionalItems :=
+ sg.Schema.AdditionalItems != nil &&
+ (sg.Schema.AdditionalItems.Allows || sg.Schema.AdditionalItems.Schema != nil)
+
+ sg.GenSchema.HasAdditionalItems = wantsAdditionalItems
+ if wantsAdditionalItems {
+ // check if the element is a complex object, if so generate a new type for it
+ tpe, err := sg.TypeResolver.ResolveSchema(sg.Schema.AdditionalItems.Schema, true, true)
+ if err != nil {
+ return err
+ }
+ if tpe.IsComplexObject && tpe.IsAnonymous {
+ pg := sg.makeNewStruct(sg.makeRefName()+" Items", *sg.Schema.AdditionalItems.Schema)
+ if err := pg.makeGenSchema(); err != nil {
+ return err
+ }
+ sg.Schema.AdditionalItems.Schema = spec.RefProperty("#/definitions/" + pg.Name)
+ pg.GenSchema.HasValidations = true
+ sg.MergeResult(pg, false)
+ sg.ExtraSchemas[pg.Name] = pg.GenSchema
+ }
+
+ it := sg.NewAdditionalItems(sg.Schema.AdditionalItems.Schema)
+ // if AdditionalItems are themselves arrays, bump the index var
+ if tpe.IsArray {
+ it.IndexVar += "i"
+ }
+
+ if tpe.IsInterface {
+ it.Untyped = true
+ }
+
+ if err := it.makeGenSchema(); err != nil {
+ return err
+ }
+
+ // lift validations when complex is not anonymous or ref'ed
+ if (tpe.IsComplexObject || it.Schema.Ref.String() != "") && !(tpe.IsInterface || tpe.IsStream) {
+ it.GenSchema.HasValidations = true
+ }
+
+ sg.MergeResult(it, true)
+ sg.GenSchema.AdditionalItems = &it.GenSchema
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) buildXMLNameWithTags() error {
+ // render some "xml" struct tag under one the following conditions:
+ // - consumes/produces in spec contains xml
+ // - struct tags CLI option contains xml
+ // - XML object present in spec for this schema
+ if sg.WithXML || swag.ContainsStrings(sg.StructTags, "xml") || sg.Schema.XML != nil {
+ sg.GenSchema.XMLName = sg.Name
+
+ if sg.Schema.XML != nil {
+ if sg.Schema.XML.Name != "" {
+ sg.GenSchema.XMLName = sg.Schema.XML.Name
+ }
+ if sg.Schema.XML.Attribute {
+ sg.GenSchema.XMLName += ",attr"
+ }
+ }
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) shortCircuitNamedRef() (bool, error) {
+ // This if block ensures that a struct gets
+ // rendered with the ref as embedded ref.
+ //
+ // NOTE: this assumes that all $ref point to a definition,
+ // i.e. the spec is canonical, as guaranteed by minimal flattening.
+ //
+ // TODO: RefHandled is actually set nowhere
+ if sg.RefHandled || !sg.Named || sg.Schema.Ref.String() == "" {
+ return false, nil
+ }
+ debugLogAsJSON("short circuit named ref: %q", sg.Schema.Ref.String(), sg.Schema)
+
+ // Simple aliased types (arrays, maps and primitives)
+ //
+ // Before deciding to make a struct with a composition branch (below),
+ // check if the $ref points to a simple type or polymorphic (base) type.
+ //
+ // If this is the case, just realias this simple type, without creating a struct.
+ asch, era := analysis.Schema(analysis.SchemaOpts{
+ Root: sg.TypeResolver.Doc.Spec(),
+ BasePath: sg.TypeResolver.Doc.SpecFilePath(),
+ Schema: &sg.Schema,
+ })
+ if era != nil {
+ return false, era
+ }
+
+ if asch.IsArray || asch.IsMap || asch.IsKnownType || asch.IsBaseType {
+ tpx, ers := sg.TypeResolver.ResolveSchema(&sg.Schema, false, true)
+ if ers != nil {
+ return false, ers
+ }
+ tpe := resolvedType{}
+ tpe.IsMap = asch.IsMap
+ tpe.IsArray = asch.IsArray
+ tpe.IsPrimitive = asch.IsKnownType
+
+ tpe.IsAliased = true
+ tpe.AliasedType = ""
+ tpe.IsComplexObject = false
+ tpe.IsAnonymous = false
+ tpe.IsCustomFormatter = false
+ tpe.IsBaseType = tpx.IsBaseType
+
+ tpe.GoType = sg.TypeResolver.goTypeName(path.Base(sg.Schema.Ref.String()))
+ tpe.Pkg = sg.TypeResolver.definitionPkg
+
+ tpe.IsNullable = tpx.IsNullable // TODO
+ tpe.IsInterface = tpx.IsInterface
+ tpe.IsStream = tpx.IsStream
+ tpe.IsEmbedded = tpx.IsEmbedded
+
+ tpe.SwaggerType = tpx.SwaggerType
+ sch := spec.Schema{}
+ pg := sg.makeNewStruct(sg.Name, sch)
+ if err := pg.makeGenSchema(); err != nil {
+ return true, err
+ }
+ sg.MergeResult(pg, true)
+ sg.GenSchema = pg.GenSchema
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.resolvedType.IsSuperAlias = true
+ sg.GenSchema.IsBaseType = tpe.IsBaseType
+
+ return true, nil
+ }
+
+ // Aliased object: use golang struct composition.
+ // This is rendered as a struct with type field, i.e. :
+ // Alias struct {
+ // AliasedType
+ // }
+ nullableOverride := sg.GenSchema.IsNullable
+
+ tpe := resolvedType{}
+ tpe.GoType = sg.TypeResolver.goTypeName(sg.Name)
+ tpe.Pkg = sg.TypeResolver.definitionPkg
+ tpe.SwaggerType = "object"
+ tpe.IsComplexObject = true
+ tpe.IsMap = false
+ tpe.IsArray = false
+ tpe.IsAnonymous = false
+ tpe.IsNullable = sg.TypeResolver.isNullable(&sg.Schema)
+
+ item := sg.NewCompositionBranch(sg.Schema, 0)
+ if err := item.makeGenSchema(); err != nil {
+ return true, err
+ }
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.IsNullable = sg.GenSchema.IsNullable || nullableOverride
+ // prevent format from bubbling up in composed type
+ item.GenSchema.IsCustomFormatter = false
+
+ sg.MergeResult(item, true)
+ sg.GenSchema.AllOf = append(sg.GenSchema.AllOf, item.GenSchema)
+ return true, nil
+}
+
+// liftSpecialAllOf attempts to simplify the rendering of allOf constructs by lifting simple things into the current schema.
+func (sg *schemaGenContext) liftSpecialAllOf() error {
+ // if there is only a $ref or a primitive and an x-isnullable schema then this is a nullable pointer
+ // so this should not compose several objects, just 1
+ // if there is a ref with a discriminator then we look for x-class on the current definition to know
+ // the value of the discriminator to instantiate the class
+ if len(sg.Schema.AllOf) < 2 {
+ return nil
+ }
+ var seenSchema int
+ var seenNullable bool
+ var schemaToLift spec.Schema
+
+ for _, schema := range sg.Schema.AllOf {
+ sch := schema
+ tpe, err := sg.TypeResolver.ResolveSchema(&sch, true, true)
+ if err != nil {
+ return err
+ }
+ if sg.TypeResolver.isNullable(&sch) {
+ seenNullable = true
+ }
+ if len(sch.Type) > 0 || len(sch.Properties) > 0 || sch.Ref.GetURL() != nil || len(sch.AllOf) > 0 {
+ seenSchema++
+ if seenSchema > 1 {
+ // won't do anything if several candidates for a lift
+ break
+ }
+ if (!tpe.IsAnonymous && tpe.IsComplexObject) || tpe.IsPrimitive {
+ // lifting complex objects here results in inlined structs in the model
+ schemaToLift = sch
+ }
+ }
+ }
+
+ if seenSchema == 1 {
+ // when there only a single schema to lift in allOf, replace the schema by its allOf definition
+ debugLog("lifted schema in allOf for %s", sg.Name)
+ sg.Schema = schemaToLift
+ sg.GenSchema.IsNullable = seenNullable
+ }
+ return nil
+}
+
+func (sg *schemaGenContext) buildAliased() error {
+ if !sg.GenSchema.IsPrimitive && !sg.GenSchema.IsMap && !sg.GenSchema.IsArray && !sg.GenSchema.IsInterface {
+ return nil
+ }
+
+ if sg.GenSchema.IsPrimitive {
+ if sg.GenSchema.SwaggerType == "string" && sg.GenSchema.SwaggerFormat == "" {
+ sg.GenSchema.IsAliased = sg.GenSchema.GoType != sg.GenSchema.SwaggerType
+ }
+ if sg.GenSchema.IsNullable && sg.Named {
+ sg.GenSchema.IsNullable = false
+ }
+ }
+
+ if sg.GenSchema.IsInterface {
+ sg.GenSchema.IsAliased = sg.GenSchema.GoType != iface
+ }
+
+ if sg.GenSchema.IsMap {
+ sg.GenSchema.IsAliased = !strings.HasPrefix(sg.GenSchema.GoType, "map[")
+ }
+
+ if sg.GenSchema.IsArray {
+ sg.GenSchema.IsAliased = !strings.HasPrefix(sg.GenSchema.GoType, "[]")
+ }
+ return nil
+}
+
+func (sg schemaGenContext) makeRefName() string {
+ // figure out a longer name for deconflicting anonymous models.
+ // This is used when makeNewStruct() is followed by the creation of a new ref to definitions
+ if sg.UseContainerInName && sg.Container != sg.Name {
+ return sg.Container + swag.ToGoName(sg.Name)
+ }
+ return sg.Name
+}
+
+func (sg *schemaGenContext) GoName() string {
+ return goName(&sg.Schema, sg.Name)
+}
+
+func goName(sch *spec.Schema, orig string) string {
+ name, _ := sch.Extensions.GetString(xGoName)
+ if name != "" {
+ return name
+ }
+ return orig
+}
+
+func (sg *schemaGenContext) derefMapElement(outer *GenSchema, _ *GenSchema, elem *GenSchema) {
+ derefType := strings.TrimPrefix(elem.GoType, "*")
+
+ if outer.IsAliased {
+ nesting := strings.TrimSuffix(strings.TrimSuffix(outer.AliasedType, elem.GoType), "*")
+ outer.AliasedType = nesting + derefType
+ outer.GoType = derefType
+ } else {
+ nesting := strings.TrimSuffix(strings.TrimSuffix(outer.GoType, elem.GoType), "*")
+ outer.GoType = nesting + derefType
+ }
+
+ elem.GoType = derefType
+}
+
+func (sg *schemaGenContext) checkNeedsPointer(outer *GenSchema, sch *GenSchema, elem *GenSchema) {
+ derefType := strings.TrimPrefix(elem.GoType, "*")
+ switch {
+ case outer.IsAliased && !strings.HasSuffix(outer.AliasedType, "*"+derefType):
+ // override nullability of map of primitive elements: render element of aliased or anonymous map as a pointer
+ outer.AliasedType = strings.TrimSuffix(outer.AliasedType, derefType) + "*" + derefType
+ case sch != nil:
+ // nullable primitive
+ if sch.IsAnonymous && !strings.HasSuffix(outer.GoType, "*"+derefType) {
+ sch.GoType = strings.TrimSuffix(sch.GoType, derefType) + "*" + derefType
+ }
+ case outer.IsAnonymous && !strings.HasSuffix(outer.GoType, "*"+derefType):
+ outer.GoType = strings.TrimSuffix(outer.GoType, derefType) + "*" + derefType
+ }
+}
+
+// buildMapOfNullable equalizes the nullablity status for aliased and anonymous maps of simple things,
+// with the nullability of its innermost element.
+//
+// NOTE: at the moment, we decide to align the type of the outer element (map) to the type of the inner element
+// The opposite could be done and result in non nullable primitive elements. If we do so, the validation
+// code needs to be adapted by removing IsZero() and Required() calls in codegen.
+func (sg *schemaGenContext) buildMapOfNullable(sch *GenSchema) {
+ outer := &sg.GenSchema
+ if sch == nil {
+ sch = outer
+ }
+ if sch.IsMap && (outer.IsAliased || outer.IsAnonymous) {
+ elem := sch.AdditionalProperties
+ for elem != nil {
+ if elem.IsPrimitive && elem.IsNullable {
+ sg.checkNeedsPointer(outer, nil, elem)
+ } else if elem.IsArray {
+ // override nullability of array of primitive elements:
+ // render element of aliased or anonyous map as a pointer
+ it := elem.Items
+ for it != nil {
+ switch {
+ case it.IsPrimitive && it.IsNullable:
+ sg.checkNeedsPointer(outer, sch, it)
+ case it.IsMap:
+ sg.buildMapOfNullable(it)
+ case !it.IsPrimitive && !it.IsArray && it.IsComplexObject && it.IsNullable:
+ // structs in map are not rendered as pointer by default
+ // unless some x-nullable overrides says so
+ _, forced := it.Extensions[xNullable]
+ if !forced {
+ _, forced = it.Extensions[xIsNullable]
+ }
+ if !forced {
+ sg.derefMapElement(outer, sch, it)
+ }
+ }
+ it = it.Items
+ }
+ }
+ elem = elem.AdditionalProperties
+ }
+ }
+}
+
+func (sg *schemaGenContext) makeGenSchema() error {
+ debugLogAsJSON("making gen schema (anon: %t, req: %t, tuple: %t) %s\n",
+ !sg.Named, sg.Required, sg.IsTuple, sg.Name, sg.Schema)
+
+ sg.GenSchema.Example = ""
+ if sg.Schema.Example != nil {
+ data, err := asJSON(sg.Schema.Example)
+ if err != nil {
+ return err
+ }
+ // Deleting the unnecessary double quotes for string types
+ // otherwise the generate spec will generate as "\"foo\""
+ sg.GenSchema.Example = strings.Trim(data, "\"")
+ }
+ sg.GenSchema.ExternalDocs = trimExternalDoc(sg.Schema.ExternalDocs)
+ sg.GenSchema.IsExported = true
+ sg.GenSchema.Path = sg.Path
+ sg.GenSchema.IndexVar = sg.IndexVar
+ sg.GenSchema.Location = body
+ sg.GenSchema.ValueExpression = sg.ValueExpr
+ sg.GenSchema.KeyVar = sg.KeyVar
+ sg.GenSchema.OriginalName = sg.Name
+ sg.GenSchema.Name = sg.GoName()
+ sg.GenSchema.Title = sg.Schema.Title
+ sg.GenSchema.Description = trimBOM(sg.Schema.Description)
+ sg.GenSchema.ReceiverName = sg.Receiver
+ sg.GenSchema.sharedValidations = sg.schemaValidations()
+ sg.GenSchema.ReadOnly = sg.Schema.ReadOnly
+ sg.GenSchema.IncludeValidator = sg.IncludeValidator
+ sg.GenSchema.IncludeModel = sg.IncludeModel
+ sg.GenSchema.StrictAdditionalProperties = sg.StrictAdditionalProperties
+ sg.GenSchema.Default = sg.Schema.Default
+ sg.GenSchema.StructTags = sg.StructTags
+ sg.GenSchema.ExtraImports = make(map[string]string)
+
+ var err error
+ returns, err := sg.shortCircuitNamedRef()
+ if err != nil {
+ return err
+ }
+ if returns {
+ return nil
+ }
+ debugLogAsJSON("after short circuit named ref", sg.Schema)
+
+ if e := sg.liftSpecialAllOf(); e != nil {
+ return e
+ }
+ nullableOverride := sg.GenSchema.IsNullable
+ debugLogAsJSON("after lifting special all of", sg.Schema)
+
+ if sg.Container == "" {
+ sg.Container = sg.GenSchema.Name
+ }
+ if e := sg.buildAllOf(); e != nil {
+ return e
+ }
+
+ var tpe resolvedType
+ if sg.Untyped {
+ tpe, err = sg.TypeResolver.ResolveSchema(nil, !sg.Named, sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ } else {
+ tpe, err = sg.TypeResolver.ResolveSchema(&sg.Schema, !sg.Named, sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ }
+ if err != nil {
+ return err
+ }
+
+ debugLog("gschema rrequired: %t, nullable: %t", sg.GenSchema.Required, sg.GenSchema.IsNullable)
+ tpe.IsNullable = tpe.IsNullable || nullableOverride
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.IsBaseType = tpe.IsBaseType
+ sg.GenSchema.HasDiscriminator = tpe.HasDiscriminator
+
+ // include format validations, excluding binary
+ sg.GenSchema.HasValidations = sg.GenSchema.HasValidations || hasFormatValidation(tpe)
+
+ // include context validations
+ sg.GenSchema.HasContextValidations = sg.GenSchema.HasContextValidations || hasContextValidations(&sg.Schema) && !tpe.IsInterface && !tpe.IsStream && !tpe.SkipExternalValidation
+
+ // usage of a polymorphic base type is rendered with getter funcs on private properties.
+ // In the case of aliased types, the value expression remains unchanged to the receiver.
+ if tpe.IsArray && tpe.ElemType != nil && tpe.ElemType.IsBaseType && sg.GenSchema.ValueExpression != sg.GenSchema.ReceiverName {
+ sg.GenSchema.ValueExpression += asMethod
+ }
+
+ if tpe.IsExternal { // anonymous external types
+ extType, pkg, alias := sg.TypeResolver.knownDefGoType(sg.GenSchema.Name, sg.Schema, sg.TypeResolver.goTypeName)
+ if pkg != "" && alias != "" {
+ sg.GenSchema.ExtraImports[alias] = pkg
+ }
+
+ if !tpe.IsEmbedded {
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.Required = sg.Required
+ // assume we validate everything but interface and io.Reader - validation may be disabled by using the noValidation hint
+ sg.GenSchema.HasValidations = !(tpe.IsInterface || tpe.IsStream || tpe.SkipExternalValidation)
+ sg.GenSchema.IsAliased = sg.GenSchema.HasValidations
+
+ log.Printf("INFO: type %s is external, with inferred spec type %s, referred to as %s", sg.GenSchema.Name, sg.GenSchema.GoType, extType)
+ sg.GenSchema.GoType = extType
+ sg.GenSchema.AliasedType = extType
+ return nil
+ }
+ // TODO: case for embedded types as anonymous definitions
+ return fmt.Errorf("ERROR: inline definitions embedded types are not supported")
+ }
+
+ debugLog("gschema nullable: %t", sg.GenSchema.IsNullable)
+ if e := sg.buildAdditionalProperties(); e != nil {
+ return e
+ }
+
+ // rewrite value expression from top-down
+ cur := &sg.GenSchema
+ for cur.AdditionalProperties != nil {
+ cur.AdditionalProperties.ValueExpression = cur.ValueExpression + "[" + cur.AdditionalProperties.KeyVar + "]"
+ cur = cur.AdditionalProperties
+ }
+
+ prev := sg.GenSchema
+ if sg.Untyped {
+ debugLogAsJSON("untyped resolve:%t", sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required, sg.Schema)
+ tpe, err = sg.TypeResolver.ResolveSchema(nil, !sg.Named, sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ } else {
+ debugLogAsJSON("typed resolve, isAnonymous(%t), n: %t, t: %t, sgr: %t, sr: %t, isRequired(%t), BaseType(%t)",
+ !sg.Named, sg.Named, sg.IsTuple, sg.Required, sg.GenSchema.Required,
+ sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required, sg.GenSchema.IsBaseType, sg.Schema)
+ tpe, err = sg.TypeResolver.ResolveSchema(&sg.Schema, !sg.Named, sg.Named || sg.IsTuple || sg.Required || sg.GenSchema.Required)
+ }
+ if err != nil {
+ return err
+ }
+ otn := tpe.IsNullable // for debug only
+ tpe.IsNullable = tpe.IsNullable || nullableOverride
+ sg.GenSchema.resolvedType = tpe
+ sg.GenSchema.IsComplexObject = prev.IsComplexObject
+ sg.GenSchema.IsMap = prev.IsMap
+ sg.GenSchema.IsAdditionalProperties = prev.IsAdditionalProperties
+ sg.GenSchema.IsBaseType = sg.GenSchema.HasDiscriminator
+
+ debugLogAsJSON("gschema nnullable:IsNullable:%t,resolver.IsNullable:%t,nullableOverride:%t",
+ sg.GenSchema.IsNullable, otn, nullableOverride, sg.Schema)
+ if err := sg.buildProperties(); err != nil {
+ return err
+ }
+
+ if err := sg.buildXMLNameWithTags(); err != nil {
+ return err
+ }
+
+ if err := sg.buildAdditionalItems(); err != nil {
+ return err
+ }
+
+ if err := sg.buildItems(); err != nil {
+ return err
+ }
+
+ if err := sg.buildAliased(); err != nil {
+ return err
+ }
+
+ sg.buildMapOfNullable(nil)
+
+ // extra serializers & interfaces
+
+ // generate MarshalBinary for:
+ // - tuple
+ // - struct
+ // - map
+ // - aliased primitive of a formatter type which is not a stringer
+ //
+ // but not for:
+ // - interface{}
+ // - io.Reader
+ gs := sg.GenSchema
+ sg.GenSchema.WantsMarshalBinary = !(gs.IsInterface || gs.IsStream || gs.IsBaseType) &&
+ (gs.IsTuple || gs.IsComplexObject || gs.IsAdditionalProperties || (gs.IsPrimitive && gs.IsAliased && gs.IsCustomFormatter && !strings.Contains(gs.Zero(), `("`)))
+
+ debugLog("finished gen schema for %q", sg.Name)
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/operation.go b/vendor/github.com/go-swagger/go-swagger/generator/operation.go
new file mode 100644
index 000000000..8f4b8b2f6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/operation.go
@@ -0,0 +1,1303 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+type respSort struct {
+ Code int
+ Response spec.Response
+}
+
+type responses []respSort
+
+func (s responses) Len() int { return len(s) }
+func (s responses) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+func (s responses) Less(i, j int) bool { return s[i].Code < s[j].Code }
+
+// sortedResponses produces a sorted list of responses.
+// TODO: this is redundant with the definition given in struct.go
+func sortedResponses(input map[int]spec.Response) responses {
+ var res responses
+ for k, v := range input {
+ if k > 0 {
+ res = append(res, respSort{k, v})
+ }
+ }
+ sort.Sort(res)
+ return res
+}
+
+// GenerateServerOperation generates a parameter model, parameter validator, http handler implementations for a given operation.
+//
+// It also generates an operation handler interface that uses the parameter model for handling a valid request.
+// Allows for specifying a list of tags to include only certain tags for the generation
+func GenerateServerOperation(operationNames []string, opts *GenOpts) error {
+ if err := opts.CheckOpts(); err != nil {
+ return err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return err
+ }
+
+ specDoc, analyzed, err := opts.analyzeSpec()
+ if err != nil {
+ return err
+ }
+
+ ops := gatherOperations(analyzed, operationNames)
+
+ if len(ops) == 0 {
+ return errors.New("no operations were selected")
+ }
+
+ for operationName, opRef := range ops {
+ method, path, operation := opRef.Method, opRef.Path, opRef.Op
+
+ serverPackage := opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget)
+ generator := operationGenerator{
+ Name: operationName,
+ Method: method,
+ Path: path,
+ BasePath: specDoc.BasePath(),
+ APIPackage: opts.LanguageOpts.ManglePackagePath(opts.APIPackage, defaultOperationsTarget),
+ ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, defaultModelsTarget),
+ ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ ServerPackage: serverPackage,
+ Operation: *operation,
+ SecurityRequirements: analyzed.SecurityRequirementsFor(operation),
+ SecurityDefinitions: analyzed.SecurityDefinitionsFor(operation),
+ Principal: opts.PrincipalAlias(),
+ Target: filepath.Join(opts.Target, filepath.FromSlash(serverPackage)),
+ Base: opts.Target,
+ Tags: opts.Tags,
+ IncludeHandler: opts.IncludeHandler,
+ IncludeParameters: opts.IncludeParameters,
+ IncludeResponses: opts.IncludeResponses,
+ IncludeValidator: opts.IncludeValidator,
+ DumpData: opts.DumpData,
+ DefaultScheme: opts.DefaultScheme,
+ DefaultProduces: opts.DefaultProduces,
+ DefaultConsumes: opts.DefaultConsumes,
+ Doc: specDoc,
+ Analyzed: analyzed,
+ GenOpts: opts,
+ }
+ if err := generator.Generate(); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+type operationGenerator struct {
+ Authorized bool
+ IncludeHandler bool
+ IncludeParameters bool
+ IncludeResponses bool
+ IncludeValidator bool
+ DumpData bool
+
+ Principal string
+ Target string
+ Base string
+ Name string
+ Method string
+ Path string
+ BasePath string
+ APIPackage string
+ ModelsPackage string
+ ServerPackage string
+ ClientPackage string
+ Operation spec.Operation
+ SecurityRequirements [][]analysis.SecurityRequirement
+ SecurityDefinitions map[string]spec.SecurityScheme
+ Tags []string
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ Doc *loads.Document
+ Analyzed *analysis.Spec
+ GenOpts *GenOpts
+}
+
+// Generate a single operation
+func (o *operationGenerator) Generate() error {
+
+ defaultImports := o.GenOpts.defaultImports()
+
+ apiPackage := o.GenOpts.LanguageOpts.ManglePackagePath(o.GenOpts.APIPackage, defaultOperationsTarget)
+ imports := o.GenOpts.initImports(
+ filepath.Join(o.GenOpts.LanguageOpts.ManglePackagePath(o.GenOpts.ServerPackage, defaultServerTarget), apiPackage))
+
+ bldr := codeGenOpBuilder{
+ ModelsPackage: o.ModelsPackage,
+ Principal: o.GenOpts.PrincipalAlias(),
+ Target: o.Target,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ DefaultScheme: o.DefaultScheme,
+ Doc: o.Doc,
+ Analyzed: o.Analyzed,
+ BasePath: o.BasePath,
+ GenOpts: o.GenOpts,
+ Name: o.Name,
+ Operation: o.Operation,
+ Method: o.Method,
+ Path: o.Path,
+ IncludeValidator: o.IncludeValidator,
+ APIPackage: o.APIPackage, // defaults to main operations package
+ DefaultProduces: o.DefaultProduces,
+ DefaultConsumes: o.DefaultConsumes,
+ Authed: len(o.Analyzed.SecurityRequirementsFor(&o.Operation)) > 0,
+ Security: o.Analyzed.SecurityRequirementsFor(&o.Operation),
+ SecurityDefinitions: o.Analyzed.SecurityDefinitionsFor(&o.Operation),
+ RootAPIPackage: o.GenOpts.LanguageOpts.ManglePackageName(o.ServerPackage, defaultServerTarget),
+ }
+
+ _, tags, _ := bldr.analyzeTags()
+
+ op, err := bldr.MakeOperation()
+ if err != nil {
+ return err
+ }
+
+ op.Tags = tags
+ operations := make(GenOperations, 0, 1)
+ operations = append(operations, op)
+ sort.Sort(operations)
+
+ for _, pp := range operations {
+ op := pp
+ if o.GenOpts.DumpData {
+ _ = dumpData(swag.ToDynamicJSON(op))
+ continue
+ }
+ if err := o.GenOpts.renderOperation(&op); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+type codeGenOpBuilder struct {
+ Authed bool
+ IncludeValidator bool
+
+ Name string
+ Method string
+ Path string
+ BasePath string
+ APIPackage string
+ APIPackageAlias string
+ RootAPIPackage string
+ ModelsPackage string
+ Principal string
+ Target string
+ Operation spec.Operation
+ Doc *loads.Document
+ PristineDoc *loads.Document
+ Analyzed *analysis.Spec
+ DefaultImports map[string]string
+ Imports map[string]string
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ Security [][]analysis.SecurityRequirement
+ SecurityDefinitions map[string]spec.SecurityScheme
+ ExtraSchemas map[string]GenSchema
+ GenOpts *GenOpts
+}
+
+// paramMappings yields a map of safe parameter names for an operation
+func paramMappings(params map[string]spec.Parameter) (map[string]map[string]string, string) {
+ idMapping := map[string]map[string]string{
+ "query": make(map[string]string, len(params)),
+ "path": make(map[string]string, len(params)),
+ "formData": make(map[string]string, len(params)),
+ "header": make(map[string]string, len(params)),
+ "body": make(map[string]string, len(params)),
+ }
+
+ // In order to avoid unstable generation, adopt same naming convention
+ // for all parameters with same name across locations.
+ seenIds := make(map[string]interface{}, len(params))
+ for id, p := range params {
+ if val, ok := seenIds[p.Name]; ok {
+ previous := val.(struct{ id, in string })
+ idMapping[p.In][p.Name] = swag.ToGoName(id)
+ // rewrite the previously found one
+ idMapping[previous.in][p.Name] = swag.ToGoName(previous.id)
+ } else {
+ idMapping[p.In][p.Name] = swag.ToGoName(p.Name)
+ }
+ seenIds[strings.ToLower(idMapping[p.In][p.Name])] = struct{ id, in string }{id: id, in: p.In}
+ }
+
+ // pick a deconflicted private name for timeout for this operation
+ timeoutName := renameTimeout(seenIds, "timeout")
+
+ return idMapping, timeoutName
+}
+
+// renameTimeout renames the variable in use by client template to avoid conflicting
+// with param names.
+//
+// NOTE: this merely protects the timeout field in the client parameter struct,
+// fields "Context" and "HTTPClient" remain exposed to name conflicts.
+func renameTimeout(seenIds map[string]interface{}, timeoutName string) string {
+ if seenIds == nil {
+ return timeoutName
+ }
+ current := strings.ToLower(timeoutName)
+ if _, ok := seenIds[current]; !ok {
+ return timeoutName
+ }
+ var next string
+ switch current {
+ case "timeout":
+ next = "requestTimeout"
+ case "requesttimeout":
+ next = "httpRequestTimeout"
+ case "httprequesttimeout":
+ next = "swaggerTimeout"
+ case "swaggertimeout":
+ next = "operationTimeout"
+ case "operationtimeout":
+ next = "opTimeout"
+ case "optimeout":
+ next = "operTimeout"
+ default:
+ next = timeoutName + "1"
+ }
+ return renameTimeout(seenIds, next)
+}
+
+func (b *codeGenOpBuilder) MakeOperation() (GenOperation, error) {
+ debugLog("[%s %s] parsing operation (id: %q)", b.Method, b.Path, b.Operation.ID)
+ // NOTE: we assume flatten is enabled by default (i.e. complex constructs are resolved from the models package),
+ // but do not assume the spec is necessarily fully flattened (i.e. all schemas moved to definitions).
+ //
+ // Fully flattened means that all complex constructs are present as
+ // definitions and models produced accordingly in ModelsPackage,
+ // whereas minimal flatten simply ensures that there are no weird $ref's in the spec.
+ //
+ // When some complex anonymous constructs are specified, extra schemas are produced in the operations package.
+ //
+ // In all cases, resetting definitions to the _original_ (untransformed) spec is not an option:
+ // we take from there the spec possibly already transformed by the GenDefinitions stage.
+ resolver := newTypeResolver(b.GenOpts.LanguageOpts.ManglePackageName(b.ModelsPackage, defaultModelsTarget), b.DefaultImports[b.ModelsPackage], b.Doc)
+ receiver := "o"
+
+ operation := b.Operation
+ var params, qp, pp, hp, fp GenParameters
+ var hasQueryParams, hasPathParams, hasHeaderParams, hasFormParams, hasFileParams, hasFormValueParams, hasBodyParams bool
+ paramsForOperation := b.Analyzed.ParamsFor(b.Method, b.Path)
+
+ idMapping, timeoutName := paramMappings(paramsForOperation)
+
+ for _, p := range paramsForOperation {
+ cp, err := b.MakeParameter(receiver, resolver, p, idMapping)
+
+ if err != nil {
+ return GenOperation{}, err
+ }
+ if cp.IsQueryParam() {
+ hasQueryParams = true
+ qp = append(qp, cp)
+ }
+ if cp.IsFormParam() {
+ if p.Type == file {
+ hasFileParams = true
+ }
+ if p.Type != file {
+ hasFormValueParams = true
+ }
+ hasFormParams = true
+ fp = append(fp, cp)
+ }
+ if cp.IsPathParam() {
+ hasPathParams = true
+ pp = append(pp, cp)
+ }
+ if cp.IsHeaderParam() {
+ hasHeaderParams = true
+ hp = append(hp, cp)
+ }
+ if cp.IsBodyParam() {
+ hasBodyParams = true
+ }
+ params = append(params, cp)
+ }
+ sort.Sort(params)
+ sort.Sort(qp)
+ sort.Sort(pp)
+ sort.Sort(hp)
+ sort.Sort(fp)
+
+ var srs responses
+ if operation.Responses != nil {
+ srs = sortedResponses(operation.Responses.StatusCodeResponses)
+ }
+ responses := make([]GenResponse, 0, len(srs))
+ var defaultResponse *GenResponse
+ var successResponses []GenResponse
+ if operation.Responses != nil {
+ for _, v := range srs {
+ name, ok := v.Response.Extensions.GetString(xGoName)
+ if !ok {
+ // look for name of well-known codes
+ name = runtime.Statuses[v.Code]
+ if name == "" {
+ // non-standard codes deserve some name
+ name = fmt.Sprintf("Status %d", v.Code)
+ }
+ }
+ name = swag.ToJSONName(b.Name + " " + name)
+ isSuccess := v.Code/100 == 2
+ gr, err := b.MakeResponse(receiver, name, isSuccess, resolver, v.Code, v.Response)
+ if err != nil {
+ return GenOperation{}, err
+ }
+ if isSuccess {
+ successResponses = append(successResponses, gr)
+ }
+ responses = append(responses, gr)
+ }
+
+ if operation.Responses.Default != nil {
+ gr, err := b.MakeResponse(receiver, b.Name+" default", false, resolver, -1, *operation.Responses.Default)
+ if err != nil {
+ return GenOperation{}, err
+ }
+ defaultResponse = &gr
+ }
+ }
+
+ // Always render a default response, even when no responses were defined
+ if operation.Responses == nil || (operation.Responses.Default == nil && len(srs) == 0) {
+ gr, err := b.MakeResponse(receiver, b.Name+" default", false, resolver, -1, spec.Response{})
+ if err != nil {
+ return GenOperation{}, err
+ }
+ defaultResponse = &gr
+ }
+
+ swsp := resolver.Doc.Spec()
+
+ schemes, extraSchemes := gatherURISchemes(swsp, operation)
+ originalSchemes := operation.Schemes
+ originalExtraSchemes := getExtraSchemes(operation.Extensions)
+
+ produces := producesOrDefault(operation.Produces, swsp.Produces, b.DefaultProduces)
+ sort.Strings(produces)
+
+ consumes := producesOrDefault(operation.Consumes, swsp.Consumes, b.DefaultConsumes)
+ sort.Strings(consumes)
+
+ var successResponse *GenResponse
+ for _, resp := range successResponses {
+ sr := resp
+ if sr.IsSuccess {
+ successResponse = &sr
+ break
+ }
+ }
+
+ var hasStreamingResponse bool
+ if defaultResponse != nil && defaultResponse.Schema != nil && defaultResponse.Schema.IsStream {
+ hasStreamingResponse = true
+ }
+
+ if !hasStreamingResponse {
+ for _, sr := range successResponses {
+ if !hasStreamingResponse && sr.Schema != nil && sr.Schema.IsStream {
+ hasStreamingResponse = true
+ break
+ }
+ }
+ }
+
+ if !hasStreamingResponse {
+ for _, r := range responses {
+ if r.Schema != nil && r.Schema.IsStream {
+ hasStreamingResponse = true
+ break
+ }
+ }
+ }
+
+ return GenOperation{
+ GenCommon: GenCommon{
+ Copyright: b.GenOpts.Copyright,
+ TargetImportPath: b.GenOpts.LanguageOpts.baseImport(b.GenOpts.Target),
+ },
+ Package: b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, defaultOperationsTarget),
+ PackageAlias: b.APIPackageAlias,
+ RootPackage: b.RootAPIPackage,
+ Name: b.Name,
+ Method: b.Method,
+ Path: b.Path,
+ BasePath: b.BasePath,
+ Tags: operation.Tags,
+ UseTags: len(operation.Tags) > 0 && !b.GenOpts.SkipTagPackages,
+ Description: trimBOM(operation.Description),
+ ReceiverName: receiver,
+ DefaultImports: b.DefaultImports,
+ Imports: b.Imports,
+ Params: params,
+ Summary: trimBOM(operation.Summary),
+ QueryParams: qp,
+ PathParams: pp,
+ HeaderParams: hp,
+ FormParams: fp,
+ HasQueryParams: hasQueryParams,
+ HasPathParams: hasPathParams,
+ HasHeaderParams: hasHeaderParams,
+ HasFormParams: hasFormParams,
+ HasFormValueParams: hasFormValueParams,
+ HasFileParams: hasFileParams,
+ HasBodyParams: hasBodyParams,
+ HasStreamingResponse: hasStreamingResponse,
+ Authorized: b.Authed,
+ Security: b.makeSecurityRequirements(receiver), // resolved security requirements, for codegen
+ SecurityDefinitions: b.makeSecuritySchemes(receiver),
+ SecurityRequirements: securityRequirements(operation.Security), // raw security requirements, for doc
+ Principal: b.Principal,
+ Responses: responses,
+ DefaultResponse: defaultResponse,
+ SuccessResponse: successResponse,
+ SuccessResponses: successResponses,
+ ExtraSchemas: gatherExtraSchemas(b.ExtraSchemas),
+ Schemes: schemeOrDefault(schemes, b.DefaultScheme),
+ SchemeOverrides: originalSchemes, // raw operation schemes, for doc
+ ProducesMediaTypes: produces, // resolved produces, for codegen
+ ConsumesMediaTypes: consumes, // resolved consumes, for codegen
+ Produces: operation.Produces, // for doc
+ Consumes: operation.Consumes, // for doc
+ ExtraSchemes: extraSchemes, // resolved schemes, for codegen
+ ExtraSchemeOverrides: originalExtraSchemes, // raw operation extra schemes, for doc
+ TimeoutName: timeoutName,
+ Extensions: operation.Extensions,
+ StrictResponders: b.GenOpts.StrictResponders,
+
+ PrincipalIsNullable: b.GenOpts.PrincipalIsNullable(),
+ ExternalDocs: trimExternalDoc(operation.ExternalDocs),
+ }, nil
+}
+
+func producesOrDefault(produces []string, fallback []string, defaultProduces string) []string {
+ if len(produces) > 0 {
+ return produces
+ }
+ if len(fallback) > 0 {
+ return fallback
+ }
+ return []string{defaultProduces}
+}
+
+func schemeOrDefault(schemes []string, defaultScheme string) []string {
+ if len(schemes) == 0 {
+ return []string{defaultScheme}
+ }
+ return schemes
+}
+
+func (b *codeGenOpBuilder) MakeResponse(receiver, name string, isSuccess bool, resolver *typeResolver, code int, resp spec.Response) (GenResponse, error) {
+ debugLog("[%s %s] making id %q", b.Method, b.Path, b.Operation.ID)
+
+ // assume minimal flattening has been carried on, so there is not $ref in response (but some may remain in response schema)
+ examples := make(GenResponseExamples, 0, len(resp.Examples))
+ for k, v := range resp.Examples {
+ examples = append(examples, GenResponseExample{MediaType: k, Example: v})
+ }
+ sort.Sort(examples)
+
+ res := GenResponse{
+ Package: b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, defaultOperationsTarget),
+ ModelsPackage: b.ModelsPackage,
+ ReceiverName: receiver,
+ Name: name,
+ Description: trimBOM(resp.Description),
+ DefaultImports: b.DefaultImports,
+ Imports: b.Imports,
+ IsSuccess: isSuccess,
+ Code: code,
+ Method: b.Method,
+ Path: b.Path,
+ Extensions: resp.Extensions,
+ StrictResponders: b.GenOpts.StrictResponders,
+ OperationName: b.Name,
+ Examples: examples,
+ }
+
+ // prepare response headers
+ for hName, header := range resp.Headers {
+ hdr, err := b.MakeHeader(receiver, hName, header)
+ if err != nil {
+ return GenResponse{}, err
+ }
+ res.Headers = append(res.Headers, hdr)
+ }
+ sort.Sort(res.Headers)
+
+ if resp.Schema != nil {
+ // resolve schema model
+ schema, ers := b.buildOperationSchema(fmt.Sprintf("%q", name), name+"Body", swag.ToGoName(name+"Body"), receiver, "i", resp.Schema, resolver)
+ if ers != nil {
+ return GenResponse{}, ers
+ }
+ res.Schema = &schema
+ }
+ return res, nil
+}
+
+func (b *codeGenOpBuilder) MakeHeader(receiver, name string, hdr spec.Header) (GenHeader, error) {
+ tpe := simpleResolvedType(hdr.Type, hdr.Format, hdr.Items, &hdr.CommonValidations)
+
+ id := swag.ToGoName(name)
+ res := GenHeader{
+ sharedValidations: sharedValidations{
+ Required: true,
+ SchemaValidations: hdr.Validations(), // NOTE: Required is not defined by the Swagger schema for header. Set arbitrarily to true for convenience in templates.
+ },
+ resolvedType: tpe,
+ Package: b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, defaultOperationsTarget),
+ ReceiverName: receiver,
+ ID: id,
+ Name: name,
+ Path: fmt.Sprintf("%q", name),
+ ValueExpression: fmt.Sprintf("%s.%s", receiver, id),
+ Description: trimBOM(hdr.Description),
+ Default: hdr.Default,
+ HasDefault: hdr.Default != nil,
+ Converter: stringConverters[tpe.GoType],
+ Formatter: stringFormatters[tpe.GoType],
+ ZeroValue: tpe.Zero(),
+ CollectionFormat: hdr.CollectionFormat,
+ IndexVar: "i",
+ }
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(hdr.CommonValidations, res.resolvedType)
+
+ hasChildValidations := false
+ if hdr.Items != nil {
+ pi, err := b.MakeHeaderItem(receiver, name+" "+res.IndexVar, res.IndexVar+"i", "fmt.Sprintf(\"%s.%v\", \"header\", "+res.IndexVar+")", res.Name+"I", hdr.Items, nil)
+ if err != nil {
+ return GenHeader{}, err
+ }
+ res.Child = &pi
+ hasChildValidations = pi.HasValidations
+ }
+ // we feed the GenHeader structure the same way as we do for
+ // GenParameter, even though there is currently no actual validation
+ // for response headers.
+ res.HasValidations = res.HasValidations || hasChildValidations
+
+ return res, nil
+}
+
+func (b *codeGenOpBuilder) MakeHeaderItem(receiver, paramName, indexVar, path, valueExpression string, items, _ *spec.Items) (GenItems, error) {
+ var res GenItems
+ res.resolvedType = simpleResolvedType(items.Type, items.Format, items.Items, &items.CommonValidations)
+
+ res.sharedValidations = sharedValidations{
+ Required: false,
+ SchemaValidations: items.Validations(),
+ }
+ res.Name = paramName
+ res.Path = path
+ res.Location = "header"
+ res.ValueExpression = swag.ToVarName(valueExpression)
+ res.CollectionFormat = items.CollectionFormat
+ res.Converter = stringConverters[res.GoType]
+ res.Formatter = stringFormatters[res.GoType]
+ res.IndexVar = indexVar
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(items.CommonValidations, res.resolvedType)
+ res.IsEnumCI = b.GenOpts.AllowEnumCI || hasEnumCI(items.Extensions)
+
+ if items.Items != nil {
+ // Recursively follows nested arrays
+ // IMPORTANT! transmitting a ValueExpression consistent with the parent's one
+ hi, err := b.MakeHeaderItem(receiver, paramName+" "+indexVar, indexVar+"i", "fmt.Sprintf(\"%s.%v\", \"header\", "+indexVar+")", res.ValueExpression+"I", items.Items, items)
+ if err != nil {
+ return GenItems{}, err
+ }
+ res.Child = &hi
+ hi.Parent = &res
+ // Propagates HasValidations flag to outer Items definition (currently not in use: done to remain consistent with parameters)
+ res.HasValidations = res.HasValidations || hi.HasValidations
+ }
+
+ return res, nil
+}
+
+// HasValidations resolves the validation status for simple schema objects
+func (b *codeGenOpBuilder) HasValidations(sh spec.CommonValidations, rt resolvedType) (hasValidations bool, hasSliceValidations bool) {
+ hasSliceValidations = sh.HasArrayValidations() || sh.HasEnum()
+ hasValidations = sh.HasNumberValidations() || sh.HasStringValidations() || hasSliceValidations || hasFormatValidation(rt)
+ return
+}
+
+func (b *codeGenOpBuilder) MakeParameterItem(receiver, paramName, indexVar, path, valueExpression, location string, resolver *typeResolver, items, _ *spec.Items) (GenItems, error) {
+ debugLog("making parameter item recv=%s param=%s index=%s valueExpr=%s path=%s location=%s", receiver, paramName, indexVar, valueExpression, path, location)
+ var res GenItems
+ res.resolvedType = simpleResolvedType(items.Type, items.Format, items.Items, &items.CommonValidations)
+
+ res.sharedValidations = sharedValidations{
+ Required: false,
+ SchemaValidations: items.Validations(),
+ }
+ res.Name = paramName
+ res.Path = path
+ res.Location = location
+ res.ValueExpression = swag.ToVarName(valueExpression)
+ res.CollectionFormat = items.CollectionFormat
+ res.Converter = stringConverters[res.GoType]
+ res.Formatter = stringFormatters[res.GoType]
+ res.IndexVar = indexVar
+
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(items.CommonValidations, res.resolvedType)
+ res.IsEnumCI = b.GenOpts.AllowEnumCI || hasEnumCI(items.Extensions)
+ res.NeedsIndex = res.HasValidations || res.Converter != "" || (res.IsCustomFormatter && !res.SkipParse)
+
+ if items.Items != nil {
+ // Recursively follows nested arrays
+ // IMPORTANT! transmitting a ValueExpression consistent with the parent's one
+ pi, err := b.MakeParameterItem(receiver, paramName+" "+indexVar, indexVar+"i", "fmt.Sprintf(\"%s.%v\", "+path+", "+indexVar+")", res.ValueExpression+"I", location, resolver, items.Items, items)
+ if err != nil {
+ return GenItems{}, err
+ }
+ res.Child = &pi
+ pi.Parent = &res
+ // Propagates HasValidations flag to outer Items definition
+ res.HasValidations = res.HasValidations || pi.HasValidations
+ res.NeedsIndex = res.NeedsIndex || pi.NeedsIndex
+ }
+
+ return res, nil
+}
+
+func (b *codeGenOpBuilder) MakeParameter(receiver string, resolver *typeResolver, param spec.Parameter, idMapping map[string]map[string]string) (GenParameter, error) {
+ debugLog("[%s %s] making parameter %q", b.Method, b.Path, param.Name)
+
+ // assume minimal flattening has been carried on, so there is not $ref in response (but some may remain in response schema)
+
+ var child *GenItems
+ id := swag.ToGoName(param.Name)
+ if goName, ok := param.Extensions["x-go-name"]; ok {
+ id, ok = goName.(string)
+ if !ok {
+ return GenParameter{}, fmt.Errorf(`%s %s, parameter %q: "x-go-name" field must be a string, not a %T`,
+ b.Method, b.Path, param.Name, goName)
+ }
+ } else if len(idMapping) > 0 {
+ id = idMapping[param.In][param.Name]
+ }
+
+ res := GenParameter{
+ ID: id,
+ Name: param.Name,
+ ModelsPackage: b.ModelsPackage,
+ Path: fmt.Sprintf("%q", param.Name),
+ ValueExpression: fmt.Sprintf("%s.%s", receiver, id),
+ IndexVar: "i",
+ Default: param.Default,
+ HasDefault: param.Default != nil,
+ Description: trimBOM(param.Description),
+ ReceiverName: receiver,
+ CollectionFormat: param.CollectionFormat,
+ Child: child,
+ Location: param.In,
+ AllowEmptyValue: (param.In == "query" || param.In == "formData") && param.AllowEmptyValue,
+ Extensions: param.Extensions,
+ }
+
+ if param.In == "body" {
+ // Process parameters declared in body (i.e. have a Schema)
+ res.Required = param.Required
+ if err := b.MakeBodyParameter(&res, resolver, param.Schema); err != nil {
+ return GenParameter{}, err
+ }
+ } else {
+ // Process parameters declared in other inputs: path, query, header (SimpleSchema)
+ res.resolvedType = simpleResolvedType(param.Type, param.Format, param.Items, &param.CommonValidations)
+ res.sharedValidations = sharedValidations{
+ Required: param.Required,
+ SchemaValidations: param.Validations(),
+ }
+
+ res.ZeroValue = res.resolvedType.Zero()
+
+ hasChildValidations := false
+ if param.Items != nil {
+ // Follow Items definition for array parameters
+ pi, err := b.MakeParameterItem(receiver, param.Name+" "+res.IndexVar, res.IndexVar+"i", "fmt.Sprintf(\"%s.%v\", "+res.Path+", "+res.IndexVar+")", res.Name+"I", param.In, resolver, param.Items, nil)
+ if err != nil {
+ return GenParameter{}, err
+ }
+ res.Child = &pi
+ // Propagates HasValidations from from child array
+ hasChildValidations = pi.HasValidations
+ }
+ res.IsNullable = !param.Required && !param.AllowEmptyValue
+ res.HasValidations, res.HasSliceValidations = b.HasValidations(param.CommonValidations, res.resolvedType)
+ res.HasValidations = res.HasValidations || hasChildValidations
+ res.IsEnumCI = b.GenOpts.AllowEnumCI || hasEnumCI(param.Extensions)
+ }
+
+ // Select codegen strategy for body param validation
+ res.Converter = stringConverters[res.GoType]
+ res.Formatter = stringFormatters[res.GoType]
+ b.setBodyParamValidation(&res)
+
+ return res, nil
+}
+
+// MakeBodyParameter constructs a body parameter schema
+func (b *codeGenOpBuilder) MakeBodyParameter(res *GenParameter, resolver *typeResolver, sch *spec.Schema) error {
+ // resolve schema model
+ schema, ers := b.buildOperationSchema(res.Path, b.Operation.ID+"ParamsBody", swag.ToGoName(b.Operation.ID+" Body"), res.ReceiverName, res.IndexVar, sch, resolver)
+ if ers != nil {
+ return ers
+ }
+ res.Schema = &schema
+ res.Schema.Required = res.Required // Required in body is managed independently from validations
+
+ // build Child items for nested slices and maps
+ var items *GenItems
+ res.KeyVar = "k"
+ res.Schema.KeyVar = "k"
+ switch {
+ case schema.IsMap && !schema.IsInterface:
+ items = b.MakeBodyParameterItemsAndMaps(res, res.Schema.AdditionalProperties)
+ case schema.IsArray:
+ items = b.MakeBodyParameterItemsAndMaps(res, res.Schema.Items)
+ default:
+ items = new(GenItems)
+ }
+
+ // templates assume at least one .Child != nil
+ res.Child = items
+ schema.HasValidations = schema.HasValidations || items.HasValidations
+
+ res.resolvedType = schema.resolvedType
+
+ // simple and schema views share the same validations
+ res.sharedValidations = schema.sharedValidations
+ res.ZeroValue = schema.Zero()
+ return nil
+}
+
+// MakeBodyParameterItemsAndMaps clones the .Items schema structure (resp. .AdditionalProperties) as a .GenItems structure
+// for compatibility with simple param templates.
+//
+// Constructed children assume simple structures: any complex object is assumed to be resolved by a model or extra schema definition
+func (b *codeGenOpBuilder) MakeBodyParameterItemsAndMaps(res *GenParameter, it *GenSchema) *GenItems {
+ items := new(GenItems)
+ if it != nil {
+ var prev *GenItems
+ next := items
+ if res.Schema.IsArray {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + res.Path + ", " + res.IndexVar + ")"
+ } else if res.Schema.IsMap {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + res.Path + ", " + res.KeyVar + ")"
+ }
+ next.Name = res.Name + " " + res.Schema.IndexVar
+ next.IndexVar = res.Schema.IndexVar + "i"
+ next.KeyVar = res.Schema.KeyVar + "k"
+ next.ValueExpression = swag.ToVarName(res.Name + "I")
+ next.Location = "body"
+ for it != nil {
+ next.resolvedType = it.resolvedType
+ next.sharedValidations = it.sharedValidations
+ next.Formatter = stringFormatters[it.SwaggerFormat]
+ next.Converter = stringConverters[res.GoType]
+ next.Parent = prev
+ _, next.IsCustomFormatter = customFormatters[it.GoType]
+ next.IsCustomFormatter = next.IsCustomFormatter && !it.IsStream
+
+ // special instruction to avoid using CollectionFormat for body params
+ next.SkipParse = true
+
+ if prev != nil {
+ if prev.IsArray {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + prev.Path + ", " + prev.IndexVar + ")"
+ } else if prev.IsMap {
+ next.Path = "fmt.Sprintf(\"%s.%v\", " + prev.Path + ", " + prev.KeyVar + ")"
+ }
+ next.Name = prev.Name + prev.IndexVar
+ next.IndexVar = prev.IndexVar + "i"
+ next.KeyVar = prev.KeyVar + "k"
+ next.ValueExpression = swag.ToVarName(prev.ValueExpression + "I")
+ prev.Child = next
+ }
+
+ // found a complex or aliased thing
+ // hide details from the aliased type and stop recursing
+ if next.IsAliased || next.IsComplexObject {
+ next.IsArray = false
+ next.IsMap = false
+ next.IsCustomFormatter = false
+ next.IsComplexObject = true
+ next.IsAliased = true
+ break
+ }
+ if next.IsInterface || next.IsStream || next.IsBase64 {
+ next.HasValidations = false
+ }
+ next.NeedsIndex = next.HasValidations || next.Converter != "" || (next.IsCustomFormatter && !next.SkipParse)
+ prev = next
+ next = new(GenItems)
+
+ switch {
+ case it.Items != nil:
+ it = it.Items
+ case it.AdditionalProperties != nil:
+ it = it.AdditionalProperties
+ default:
+ it = nil
+ }
+ }
+ // propagate HasValidations
+ var propag func(child *GenItems) (bool, bool)
+ propag = func(child *GenItems) (bool, bool) {
+ if child == nil {
+ return false, false
+ }
+ cValidations, cIndex := propag(child.Child)
+ child.HasValidations = child.HasValidations || cValidations
+ child.NeedsIndex = child.HasValidations || child.Converter != "" || (child.IsCustomFormatter && !child.SkipParse) || cIndex
+ return child.HasValidations, child.NeedsIndex
+ }
+ items.HasValidations, items.NeedsIndex = propag(items)
+
+ // resolve nullability conflicts when declaring body as a map of array of an anonymous complex object
+ // (e.g. refer to an extra schema type, which is nullable, but not rendered as a pointer in arrays or maps)
+ // Rule: outer type rules (with IsMapNullOverride), inner types are fixed
+ var fixNullable func(child *GenItems) string
+ fixNullable = func(child *GenItems) string {
+ if !child.IsArray && !child.IsMap {
+ if child.IsComplexObject {
+ return child.GoType
+ }
+ return ""
+ }
+ if innerType := fixNullable(child.Child); innerType != "" {
+ if child.IsMapNullOverride && child.IsArray {
+ child.GoType = "[]" + innerType
+ return child.GoType
+ }
+ }
+ return ""
+ }
+ fixNullable(items)
+ }
+ return items
+}
+
+func (b *codeGenOpBuilder) setBodyParamValidation(p *GenParameter) {
+ // Determine validation strategy for body param.
+ //
+ // Here are the distinct strategies:
+ // - the body parameter is a model object => delegates
+ // - the body parameter is an array of model objects => carry on slice validations, then iterate and delegate
+ // - the body parameter is a map of model objects => iterate and delegate
+ // - the body parameter is an array of simple objects (including maps)
+ // - the body parameter is a map of simple objects (including arrays)
+ if p.IsBodyParam() {
+ var hasSimpleBodyParams, hasSimpleBodyItems, hasSimpleBodyMap, hasModelBodyParams, hasModelBodyItems, hasModelBodyMap bool
+ s := p.Schema
+ if s != nil {
+ doNot := s.IsInterface || s.IsStream || s.IsBase64
+ // composition of primitive fields must be properly identified: hack this through
+ _, isPrimitive := primitives[s.GoType]
+ _, isFormatter := customFormatters[s.GoType]
+ isComposedPrimitive := s.IsPrimitive && !(isPrimitive || isFormatter)
+
+ hasSimpleBodyParams = !s.IsComplexObject && !s.IsAliased && !isComposedPrimitive && !doNot
+ hasModelBodyParams = (s.IsComplexObject || s.IsAliased || isComposedPrimitive) && !doNot
+
+ if s.IsArray && s.Items != nil {
+ it := s.Items
+ doNot = it.IsInterface || it.IsStream || it.IsBase64
+ hasSimpleBodyItems = !it.IsComplexObject && !(it.IsAliased || doNot)
+ hasModelBodyItems = (it.IsComplexObject || it.IsAliased) && !doNot
+ }
+ if s.IsMap && s.AdditionalProperties != nil {
+ it := s.AdditionalProperties
+ hasSimpleBodyMap = !it.IsComplexObject && !(it.IsAliased || doNot)
+ hasModelBodyMap = !hasSimpleBodyMap && !doNot
+ }
+ }
+ // set validation strategy for body param
+ p.HasSimpleBodyParams = hasSimpleBodyParams
+ p.HasSimpleBodyItems = hasSimpleBodyItems
+ p.HasModelBodyParams = hasModelBodyParams
+ p.HasModelBodyItems = hasModelBodyItems
+ p.HasModelBodyMap = hasModelBodyMap
+ p.HasSimpleBodyMap = hasSimpleBodyMap
+ }
+
+}
+
+// makeSecuritySchemes produces a sorted list of security schemes for this operation
+func (b *codeGenOpBuilder) makeSecuritySchemes(receiver string) GenSecuritySchemes {
+ return gatherSecuritySchemes(b.SecurityDefinitions, b.Name, b.Principal, receiver, b.GenOpts.PrincipalIsNullable())
+}
+
+// makeSecurityRequirements produces a sorted list of security requirements for this operation.
+// As for current, these requirements are not used by codegen (sec. requirement is determined at runtime).
+// We keep the order of the slice from the original spec, but sort the inner slice which comes from a map,
+// as well as the map of scopes.
+func (b *codeGenOpBuilder) makeSecurityRequirements(_ string) []GenSecurityRequirements {
+ if b.Security == nil {
+ // nil (default requirement) is different than [] (no requirement)
+ return nil
+ }
+
+ securityRequirements := make([]GenSecurityRequirements, 0, len(b.Security))
+ for _, req := range b.Security {
+ jointReq := make(GenSecurityRequirements, 0, len(req))
+ for _, j := range req {
+ scopes := j.Scopes
+ sort.Strings(scopes)
+ jointReq = append(jointReq, GenSecurityRequirement{
+ Name: j.Name,
+ Scopes: scopes,
+ })
+ }
+ // sort joint requirements (come from a map in spec)
+ sort.Sort(jointReq)
+ securityRequirements = append(securityRequirements, jointReq)
+ }
+ return securityRequirements
+}
+
+// cloneSchema returns a deep copy of a schema
+func (b *codeGenOpBuilder) cloneSchema(schema *spec.Schema) *spec.Schema {
+ savedSchema := &spec.Schema{}
+ schemaRep, _ := json.Marshal(schema)
+ _ = json.Unmarshal(schemaRep, savedSchema)
+ return savedSchema
+}
+
+// saveResolveContext keeps a copy of known definitions and schema to properly roll back on a makeGenSchema() call
+// This uses a deep clone the spec document to construct a type resolver which knows about definitions when the making of this operation started,
+// and only these definitions. We are not interested in the "original spec", but in the already transformed spec.
+func (b *codeGenOpBuilder) saveResolveContext(resolver *typeResolver, schema *spec.Schema) (*typeResolver, *spec.Schema) {
+ if b.PristineDoc == nil {
+ b.PristineDoc = b.Doc.Pristine()
+ }
+ rslv := newTypeResolver(b.GenOpts.LanguageOpts.ManglePackageName(resolver.ModelsPackage, defaultModelsTarget), b.DefaultImports[b.ModelsPackage], b.PristineDoc)
+
+ return rslv, b.cloneSchema(schema)
+}
+
+// liftExtraSchemas constructs the schema for an anonymous construct with some ExtraSchemas.
+//
+// When some ExtraSchemas are produced from something else than a definition,
+// this indicates we are not running in fully flattened mode and we need to render
+// these ExtraSchemas in the operation's package.
+// We need to rebuild the schema with a new type resolver to reflect this change in the
+// models package.
+func (b *codeGenOpBuilder) liftExtraSchemas(resolver, rslv *typeResolver, bs *spec.Schema, sc *schemaGenContext) (schema *GenSchema, err error) {
+ // restore resolving state before previous call to makeGenSchema()
+ sc.Schema = *bs
+
+ pg := sc.shallowClone()
+ pkg := b.GenOpts.LanguageOpts.ManglePackageName(resolver.ModelsPackage, defaultModelsTarget)
+
+ // make a resolver for current package (i.e. operations)
+ pg.TypeResolver = newTypeResolver("", b.DefaultImports[b.APIPackage], rslv.Doc).
+ withKeepDefinitionsPackage(pkg).
+ withDefinitionPackage(b.APIPackageAlias) // all new extra schemas are going to be in api pkg
+ pg.ExtraSchemas = make(map[string]GenSchema, len(sc.ExtraSchemas))
+ pg.UseContainerInName = true
+
+ // rebuild schema within local package
+ if err = pg.makeGenSchema(); err != nil {
+ return
+ }
+
+ // lift nested extra schemas (inlined types)
+ if b.ExtraSchemas == nil {
+ b.ExtraSchemas = make(map[string]GenSchema, len(pg.ExtraSchemas))
+ }
+ for _, v := range pg.ExtraSchemas {
+ vv := v
+ if !v.IsStream {
+ b.ExtraSchemas[vv.Name] = vv
+ }
+ }
+ schema = &pg.GenSchema
+ return
+}
+
+// buildOperationSchema constructs a schema for an operation (for body params or responses).
+// It determines if the schema is readily available from the models package,
+// or if a schema has to be generated in the operations package (i.e. is anonymous).
+// Whenever an anonymous schema needs some extra schemas, we also determine if these extras are
+// available from models or must be generated alongside the schema in the operations package.
+//
+// Duplicate extra schemas are pruned later on, when operations grouping in packages (e.g. from tags) takes place.
+func (b *codeGenOpBuilder) buildOperationSchema(schemaPath, containerName, schemaName, receiverName, indexVar string, sch *spec.Schema, resolver *typeResolver) (GenSchema, error) {
+ var schema GenSchema
+
+ if sch == nil {
+ sch = &spec.Schema{}
+ }
+ shallowClonedResolver := *resolver
+ shallowClonedResolver.ModelsFullPkg = b.DefaultImports[b.ModelsPackage]
+ rslv := &shallowClonedResolver
+
+ sc := schemaGenContext{
+ Path: schemaPath,
+ Name: containerName,
+ Receiver: receiverName,
+ ValueExpr: receiverName,
+ IndexVar: indexVar,
+ Schema: *sch,
+ Required: false,
+ TypeResolver: rslv,
+ Named: false,
+ IncludeModel: true,
+ IncludeValidator: b.GenOpts.IncludeValidator,
+ StrictAdditionalProperties: b.GenOpts.StrictAdditionalProperties,
+ ExtraSchemas: make(map[string]GenSchema),
+ StructTags: b.GenOpts.StructTags,
+ }
+
+ var (
+ br *typeResolver
+ bs *spec.Schema
+ )
+
+ if sch.Ref.String() == "" {
+ // backup the type resolver context
+ // (not needed when the schema has a name)
+ br, bs = b.saveResolveContext(rslv, sch)
+ }
+
+ if err := sc.makeGenSchema(); err != nil {
+ return GenSchema{}, err
+ }
+ for alias, pkg := range findImports(&sc.GenSchema) {
+ b.Imports[alias] = pkg
+ }
+
+ if sch.Ref.String() == "" && len(sc.ExtraSchemas) > 0 {
+ newSchema, err := b.liftExtraSchemas(resolver, br, bs, &sc)
+ if err != nil {
+ return GenSchema{}, err
+ }
+ if newSchema != nil {
+ schema = *newSchema
+ }
+ } else {
+ schema = sc.GenSchema
+ }
+
+ // new schemas will be in api pkg
+ schemaPkg := b.GenOpts.LanguageOpts.ManglePackageName(b.APIPackage, "")
+ schema.Pkg = schemaPkg
+
+ if schema.IsAnonymous {
+ // a generated name for anonymous schema
+ // TODO: support x-go-name
+ hasProperties := len(schema.Properties) > 0
+ isAllOf := len(schema.AllOf) > 0
+ isInterface := schema.IsInterface
+ hasValidations := schema.HasValidations
+
+ // for complex anonymous objects, produce an extra schema
+ if hasProperties || isAllOf {
+ if b.ExtraSchemas == nil {
+ b.ExtraSchemas = make(map[string]GenSchema)
+ }
+ schema.Name = schemaName
+ schema.GoType = schemaName
+ schema.IsAnonymous = false
+ b.ExtraSchemas[schemaName] = schema
+
+ // constructs new schema to refer to the newly created type
+ schema = GenSchema{}
+ schema.IsAnonymous = false
+ schema.IsComplexObject = true
+ schema.SwaggerType = schemaName
+ schema.HasValidations = hasValidations
+ schema.GoType = schemaName
+ schema.Pkg = schemaPkg
+ } else if isInterface {
+ schema = GenSchema{}
+ schema.IsAnonymous = false
+ schema.IsComplexObject = false
+ schema.IsInterface = true
+ schema.HasValidations = false
+ schema.GoType = iface
+ }
+ }
+
+ return schema, nil
+}
+
+func intersectTags(left, right []string) []string {
+ // dedupe
+ uniqueTags := make(map[string]struct{}, maxInt(len(left), len(right)))
+ for _, l := range left {
+ if len(right) == 0 || swag.ContainsStrings(right, l) {
+ uniqueTags[l] = struct{}{}
+ }
+ }
+ filtered := make([]string, 0, len(uniqueTags))
+ // stable output across generations, preserving original order
+ for _, k := range left {
+ if _, ok := uniqueTags[k]; !ok {
+ continue
+ }
+ filtered = append(filtered, k)
+ delete(uniqueTags, k)
+ }
+ return filtered
+}
+
+// analyze tags for an operation
+func (b *codeGenOpBuilder) analyzeTags() (string, []string, bool) {
+ var (
+ filter []string
+ tag string
+ hasTagOverride bool
+ )
+ if b.GenOpts != nil {
+ filter = b.GenOpts.Tags
+ }
+ intersected := intersectTags(pruneEmpty(b.Operation.Tags), filter)
+ if !b.GenOpts.SkipTagPackages && len(intersected) > 0 {
+ // override generation with: x-go-operation-tag
+ tag, hasTagOverride = b.Operation.Extensions.GetString(xGoOperationTag)
+ if !hasTagOverride {
+ // TODO(fred): this part should be delegated to some new TagsFor(operation) in go-openapi/analysis
+ tag = intersected[0]
+ gtags := b.Doc.Spec().Tags
+ for _, gtag := range gtags {
+ if gtag.Name != tag {
+ continue
+ }
+ // honor x-go-name in tag
+ if name, hasGoName := gtag.Extensions.GetString(xGoName); hasGoName {
+ tag = name
+ break
+ }
+ // honor x-go-operation-tag in tag
+ if name, hasOpName := gtag.Extensions.GetString(xGoOperationTag); hasOpName {
+ tag = name
+ break
+ }
+ }
+ }
+ }
+ if tag == b.APIPackage {
+ // conflict with "operations" package is handled separately
+ tag = renameOperationPackage(intersected, tag)
+ }
+ b.APIPackage = b.GenOpts.LanguageOpts.ManglePackageName(tag, b.APIPackage) // actual package name
+ b.APIPackageAlias = deconflictTag(intersected, b.APIPackage) // deconflicted import alias
+ return tag, intersected, len(filter) == 0 || len(filter) > 0 && len(intersected) > 0
+}
+
+func maxInt(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+// deconflictTag ensures generated packages for operations based on tags do not conflict
+// with other imports
+func deconflictTag(seenTags []string, pkg string) string {
+ return deconflictPkg(pkg, func(pkg string) string { return renameOperationPackage(seenTags, pkg) })
+}
+
+// deconflictPrincipal ensures that whenever an external principal package is added, it doesn't conflict
+// with standard imports
+func deconflictPrincipal(pkg string) string {
+ switch pkg {
+ case "principal":
+ return renamePrincipalPackage(pkg)
+ default:
+ return deconflictPkg(pkg, renamePrincipalPackage)
+ }
+}
+
+// deconflictPkg renames package names which conflict with standard imports
+func deconflictPkg(pkg string, renamer func(string) string) string {
+ switch pkg {
+ // package conflict with variables
+ case "api", "httptransport", "formats", "server":
+ fallthrough
+ // package conflict with go-openapi imports
+ case "errors", "runtime", "middleware", "security", "spec", "strfmt", "loads", "swag", "validate":
+ fallthrough
+ // package conflict with stdlib/other lib imports
+ case "tls", "http", "fmt", "strings", "log", "flags", "pflag", "json", "time":
+ return renamer(pkg)
+ }
+ return pkg
+}
+
+func renameOperationPackage(seenTags []string, pkg string) string {
+ current := strings.ToLower(pkg) + "ops"
+ if len(seenTags) == 0 {
+ return current
+ }
+ for swag.ContainsStringsCI(seenTags, current) {
+ current += "1"
+ }
+ return current
+}
+
+func renamePrincipalPackage(_ string) string {
+ // favors readability over perfect deconfliction
+ return "auth"
+}
+
+func renameServerPackage(pkg string) string {
+ // favors readability over perfect deconfliction
+ return "swagger" + pkg + "srv"
+}
+
+func renameAPIPackage(pkg string) string {
+ // favors readability over perfect deconfliction
+ return "swagger" + pkg
+}
+
+func renameImplementationPackage(pkg string) string {
+ // favors readability over perfect deconfliction
+ return "swagger" + pkg + "impl"
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/shared.go b/vendor/github.com/go-swagger/go-swagger/generator/shared.go
new file mode 100644
index 000000000..5e2c2cee2
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/shared.go
@@ -0,0 +1,1096 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strings"
+ "text/template"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+const (
+ // default generation targets structure
+ defaultModelsTarget = "models"
+ defaultServerTarget = "restapi"
+ defaultClientTarget = "client"
+ defaultOperationsTarget = "operations"
+ defaultClientName = "rest"
+ defaultServerName = "swagger"
+ defaultScheme = "http"
+ defaultImplementationTarget = "implementation"
+)
+
+func init() {
+ // all initializations for the generator package
+ debugOptions()
+ initLanguage()
+ initTemplateRepo()
+ initTypes()
+}
+
+// DefaultSectionOpts for a given opts, this is used when no config file is passed
+// and uses the embedded templates when no local override can be found
+func DefaultSectionOpts(gen *GenOpts) {
+ sec := gen.Sections
+ if len(sec.Models) == 0 {
+ opts := []TemplateOpts{
+ {
+ Name: "definition",
+ Source: "asset:model",
+ Target: "{{ joinFilePath .Target (toPackagePath .ModelPackage) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}.go",
+ },
+ }
+ if gen.IncludeCLi {
+ opts = append(opts, TemplateOpts{
+ Name: "clidefinitionhook",
+ Source: "asset:cliModelcli",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_model.go",
+ })
+ }
+ sec.Models = opts
+ }
+
+ if len(sec.Operations) == 0 {
+ if gen.IsClient {
+ opts := []TemplateOpts{
+ {
+ Name: "parameters",
+ Source: "asset:clientParameter",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) (toPackagePath .Package) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_parameters.go",
+ },
+ {
+ Name: "responses",
+ Source: "asset:clientResponse",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) (toPackagePath .Package) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_responses.go",
+ },
+ }
+ if gen.IncludeCLi {
+ opts = append(opts, TemplateOpts{
+ Name: "clioperation",
+ Source: "asset:cliOperation",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_operation.go",
+ })
+ }
+ sec.Operations = opts
+ } else {
+ ops := []TemplateOpts{}
+ if gen.IncludeParameters {
+ ops = append(ops, TemplateOpts{
+ Name: "parameters",
+ Source: "asset:serverParameter",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_parameters.go",
+ })
+ }
+ if gen.IncludeURLBuilder {
+ ops = append(ops, TemplateOpts{
+ Name: "urlbuilder",
+ Source: "asset:serverUrlbuilder",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_urlbuilder.go",
+ })
+ }
+ if gen.IncludeResponses {
+ ops = append(ops, TemplateOpts{
+ Name: "responses",
+ Source: "asset:serverResponses",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_responses.go",
+ })
+ }
+ if gen.IncludeHandler {
+ ops = append(ops, TemplateOpts{
+ Name: "handler",
+ Source: "asset:serverOperation",
+ Target: "{{ if .UseTags }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) (toPackagePath .Package) }}{{ else }}{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .Package) }}{{ end }}",
+ FileName: "{{ (snakize (pascalize .Name)) }}.go",
+ })
+ }
+ sec.Operations = ops
+ }
+ }
+
+ if len(sec.OperationGroups) == 0 {
+ if gen.IsClient {
+ sec.OperationGroups = []TemplateOpts{
+ {
+ Name: "client",
+ Source: "asset:clientClient",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) (toPackagePath .Name)}}",
+ FileName: "{{ (snakize (pascalize .Name)) }}_client.go",
+ },
+ }
+ } else {
+ sec.OperationGroups = []TemplateOpts{}
+ }
+ }
+
+ if len(sec.Application) == 0 {
+ if gen.IsClient {
+ opts := []TemplateOpts{
+ {
+ Name: "facade",
+ Source: "asset:clientFacade",
+ Target: "{{ joinFilePath .Target (toPackagePath .ClientPackage) }}",
+ FileName: "{{ snakize .Name }}Client.go",
+ },
+ }
+ if gen.IncludeCLi {
+ // include a commandline tool app
+ opts = append(opts, []TemplateOpts{{
+ Name: "commandline",
+ Source: "asset:cliCli",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "cli.go",
+ }, {
+ Name: "climain",
+ Source: "asset:cliMain",
+ Target: "{{ joinFilePath .Target \"cmd\" (toPackagePath .CliAppName) }}",
+ FileName: "main.go",
+ }, {
+ Name: "cliAutoComplete",
+ Source: "asset:cliCompletion",
+ Target: "{{ joinFilePath .Target (toPackagePath .CliPackage) }}",
+ FileName: "autocomplete.go",
+ }}...)
+ }
+ sec.Application = opts
+ } else {
+ opts := []TemplateOpts{
+ {
+ Name: "main",
+ Source: "asset:serverMain",
+ Target: "{{ joinFilePath .Target \"cmd\" .MainPackage }}",
+ FileName: "main.go",
+ },
+ {
+ Name: "embedded_spec",
+ Source: "asset:swaggerJsonEmbed",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "embedded_spec.go",
+ },
+ {
+ Name: "server",
+ Source: "asset:serverServer",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "server.go",
+ },
+ {
+ Name: "builder",
+ Source: "asset:serverBuilder",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) (toPackagePath .APIPackage) }}",
+ FileName: "{{ snakize (pascalize .Name) }}_api.go",
+ },
+ {
+ Name: "doc",
+ Source: "asset:serverDoc",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "doc.go",
+ },
+ }
+ if gen.ImplementationPackage != "" {
+ // Use auto configure template
+ opts = append(opts, TemplateOpts{
+ Name: "autoconfigure",
+ Source: "asset:serverAutoconfigureapi",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "auto_configure_{{ (snakize (pascalize .Name)) }}.go",
+ })
+
+ } else {
+ opts = append(opts, TemplateOpts{
+ Name: "configure",
+ Source: "asset:serverConfigureapi",
+ Target: "{{ joinFilePath .Target (toPackagePath .ServerPackage) }}",
+ FileName: "configure_{{ (snakize (pascalize .Name)) }}.go",
+ SkipExists: !gen.RegenerateConfigureAPI,
+ })
+ }
+ sec.Application = opts
+ }
+ }
+ gen.Sections = sec
+
+}
+
+// MarkdownOpts for rendering a spec as markdown
+func MarkdownOpts() *LanguageOpts {
+ opts := &LanguageOpts{}
+ opts.Init()
+ return opts
+}
+
+// MarkdownSectionOpts for a given opts and output file.
+func MarkdownSectionOpts(gen *GenOpts, output string) {
+ gen.Sections.Models = nil
+ gen.Sections.OperationGroups = nil
+ gen.Sections.Operations = nil
+ gen.LanguageOpts = MarkdownOpts()
+ gen.Sections.Application = []TemplateOpts{
+ {
+ Name: "markdowndocs",
+ Source: "asset:markdownDocs",
+ Target: filepath.Dir(output),
+ FileName: filepath.Base(output),
+ },
+ }
+}
+
+// TemplateOpts allows for codegen customization
+type TemplateOpts struct {
+ Name string `mapstructure:"name"`
+ Source string `mapstructure:"source"`
+ Target string `mapstructure:"target"`
+ FileName string `mapstructure:"file_name"`
+ SkipExists bool `mapstructure:"skip_exists"`
+ SkipFormat bool `mapstructure:"skip_format"`
+}
+
+// SectionOpts allows for specifying options to customize the templates used for generation
+type SectionOpts struct {
+ Application []TemplateOpts `mapstructure:"application"`
+ Operations []TemplateOpts `mapstructure:"operations"`
+ OperationGroups []TemplateOpts `mapstructure:"operation_groups"`
+ Models []TemplateOpts `mapstructure:"models"`
+}
+
+// GenOptsCommon the options for the generator
+type GenOptsCommon struct {
+ IncludeModel bool
+ IncludeValidator bool
+ IncludeHandler bool
+ IncludeParameters bool
+ IncludeResponses bool
+ IncludeURLBuilder bool
+ IncludeMain bool
+ IncludeSupport bool
+ IncludeCLi bool
+ ExcludeSpec bool
+ DumpData bool
+ ValidateSpec bool
+ FlattenOpts *analysis.FlattenOpts
+ IsClient bool
+ defaultsEnsured bool
+ PropertiesSpecOrder bool
+ StrictAdditionalProperties bool
+ AllowTemplateOverride bool
+
+ Spec string
+ APIPackage string
+ ModelPackage string
+ ServerPackage string
+ ClientPackage string
+ CliPackage string
+ CliAppName string // name of cli app. For example "dockerctl"
+ ImplementationPackage string
+ Principal string
+ PrincipalCustomIface bool // user-provided interface for Principal (non-nullable)
+ Target string // dir location where generated code is written to
+ Sections SectionOpts
+ LanguageOpts *LanguageOpts
+ TypeMapping map[string]string
+ Imports map[string]string
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ WithXML bool
+ TemplateDir string
+ Template string
+ RegenerateConfigureAPI bool
+ Operations []string
+ Models []string
+ Tags []string
+ StructTags []string
+ Name string
+ FlagStrategy string
+ CompatibilityMode string
+ ExistingModels string
+ Copyright string
+ SkipTagPackages bool
+ MainPackage string
+ IgnoreOperations bool
+ AllowEnumCI bool
+ StrictResponders bool
+ AcceptDefinitionsOnly bool
+
+ templates *Repository // a shallow clone of the global template repository
+}
+
+// CheckOpts carries out some global consistency checks on options.
+func (g *GenOpts) CheckOpts() error {
+ if g == nil {
+ return errors.New("gen opts are required")
+ }
+
+ if !filepath.IsAbs(g.Target) {
+ if _, err := filepath.Abs(g.Target); err != nil {
+ return fmt.Errorf("could not locate target %s: %v", g.Target, err)
+ }
+ }
+
+ if filepath.IsAbs(g.ServerPackage) {
+ return fmt.Errorf("you shouldn't specify an absolute path in --server-package: %s", g.ServerPackage)
+ }
+
+ if strings.HasPrefix(g.Spec, "http://") || strings.HasPrefix(g.Spec, "https://") {
+ return nil
+ }
+
+ pth, err := findSwaggerSpec(g.Spec)
+ if err != nil {
+ return err
+ }
+
+ // ensure spec path is absolute
+ g.Spec, err = filepath.Abs(pth)
+ if err != nil {
+ return fmt.Errorf("could not locate spec: %s", g.Spec)
+ }
+
+ return nil
+}
+
+// TargetPath returns the target generation path relative to the server package.
+// This method is used by templates, e.g. with {{ .TargetPath }}
+//
+// Errors cases are prevented by calling CheckOpts beforehand.
+//
+// Example:
+// Target: ${PWD}/tmp
+// ServerPackage: abc/efg
+//
+// Server is generated in ${PWD}/tmp/abc/efg
+// relative TargetPath returned: ../../../tmp
+func (g *GenOpts) TargetPath() string {
+ var tgt string
+ if g.Target == "" {
+ tgt = "." // That's for windows
+ } else {
+ tgt = g.Target
+ }
+ tgtAbs, _ := filepath.Abs(tgt)
+ srvPkg := filepath.FromSlash(g.LanguageOpts.ManglePackagePath(g.ServerPackage, "server"))
+ srvrAbs := filepath.Join(tgtAbs, srvPkg)
+ tgtRel, _ := filepath.Rel(srvrAbs, filepath.Dir(tgtAbs))
+ tgtRel = filepath.Join(tgtRel, filepath.Base(tgtAbs))
+ return tgtRel
+}
+
+// SpecPath returns the path to the spec relative to the server package.
+// If the spec is remote keep this absolute location.
+//
+// If spec is not relative to server (e.g. lives on a different drive on windows),
+// then the resolved path is absolute.
+//
+// This method is used by templates, e.g. with {{ .SpecPath }}
+//
+// Errors cases are prevented by calling CheckOpts beforehand.
+func (g *GenOpts) SpecPath() string {
+ if strings.HasPrefix(g.Spec, "http://") || strings.HasPrefix(g.Spec, "https://") {
+ return g.Spec
+ }
+ // Local specifications
+ specAbs, _ := filepath.Abs(g.Spec)
+ var tgt string
+ if g.Target == "" {
+ tgt = "." // That's for windows
+ } else {
+ tgt = g.Target
+ }
+ tgtAbs, _ := filepath.Abs(tgt)
+ srvPkg := filepath.FromSlash(g.LanguageOpts.ManglePackagePath(g.ServerPackage, "server"))
+ srvAbs := filepath.Join(tgtAbs, srvPkg)
+ specRel, err := filepath.Rel(srvAbs, specAbs)
+ if err != nil {
+ return specAbs
+ }
+ return specRel
+}
+
+// PrincipalIsNullable indicates whether the principal type used for authentication
+// may be used as a pointer
+func (g *GenOpts) PrincipalIsNullable() bool {
+ debugLog("Principal: %s, %t, isnullable: %t", g.Principal, g.PrincipalCustomIface, g.Principal != iface && !g.PrincipalCustomIface)
+ return g.Principal != iface && !g.PrincipalCustomIface
+}
+
+// EnsureDefaults for these gen opts
+func (g *GenOpts) EnsureDefaults() error {
+ if g.defaultsEnsured {
+ return nil
+ }
+
+ g.templates = templates.ShallowClone()
+
+ g.templates.LoadDefaults()
+
+ if g.LanguageOpts == nil {
+ g.LanguageOpts = DefaultLanguageFunc()
+ }
+
+ DefaultSectionOpts(g)
+
+ // set defaults for flattening options
+ if g.FlattenOpts == nil {
+ g.FlattenOpts = &analysis.FlattenOpts{
+ Minimal: true,
+ Verbose: true,
+ RemoveUnused: false,
+ Expand: false,
+ }
+ }
+
+ if g.DefaultScheme == "" {
+ g.DefaultScheme = defaultScheme
+ }
+
+ if g.DefaultConsumes == "" {
+ g.DefaultConsumes = runtime.JSONMime
+ }
+
+ if g.DefaultProduces == "" {
+ g.DefaultProduces = runtime.JSONMime
+ }
+
+ // always include validator with models
+ g.IncludeValidator = true
+
+ if g.Principal == "" {
+ g.Principal = iface
+ g.PrincipalCustomIface = false
+ }
+
+ g.defaultsEnsured = true
+ return nil
+}
+
+func (g *GenOpts) location(t *TemplateOpts, data interface{}) (string, string, error) {
+ v := reflect.Indirect(reflect.ValueOf(data))
+ fld := v.FieldByName("Name")
+ var name string
+ if fld.IsValid() {
+ log.Println("name field", fld.String())
+ name = fld.String()
+ }
+
+ fldpack := v.FieldByName("Package")
+ pkg := g.APIPackage
+ if fldpack.IsValid() {
+ log.Println("package field", fldpack.String())
+ pkg = fldpack.String()
+ }
+
+ var tags []string
+ tagsF := v.FieldByName("Tags")
+ if tagsF.IsValid() {
+ if tt, ok := tagsF.Interface().([]string); ok {
+ tags = tt
+ }
+ }
+
+ var useTags bool
+ useTagsF := v.FieldByName("UseTags")
+ if useTagsF.IsValid() {
+ useTags = useTagsF.Interface().(bool)
+ }
+
+ funcMap := FuncMapFunc(g.LanguageOpts)
+
+ pthTpl, err := template.New(t.Name + "-target").Funcs(funcMap).Parse(t.Target)
+ if err != nil {
+ return "", "", err
+ }
+
+ fNameTpl, err := template.New(t.Name + "-filename").Funcs(funcMap).Parse(t.FileName)
+ if err != nil {
+ return "", "", err
+ }
+
+ d := struct {
+ Name, CliAppName, Package, APIPackage, ServerPackage, ClientPackage, CliPackage, ModelPackage, MainPackage, Target string
+ Tags []string
+ UseTags bool
+ Context interface{}
+ }{
+ Name: name,
+ CliAppName: g.CliAppName,
+ Package: pkg,
+ APIPackage: g.APIPackage,
+ ServerPackage: g.ServerPackage,
+ ClientPackage: g.ClientPackage,
+ CliPackage: g.CliPackage,
+ ModelPackage: g.ModelPackage,
+ MainPackage: g.MainPackage,
+ Target: g.Target,
+ Tags: tags,
+ UseTags: useTags,
+ Context: data,
+ }
+
+ var pthBuf bytes.Buffer
+ if e := pthTpl.Execute(&pthBuf, d); e != nil {
+ return "", "", e
+ }
+
+ var fNameBuf bytes.Buffer
+ if e := fNameTpl.Execute(&fNameBuf, d); e != nil {
+ return "", "", e
+ }
+ return pthBuf.String(), fileName(fNameBuf.String()), nil
+}
+
+func (g *GenOpts) render(t *TemplateOpts, data interface{}) ([]byte, error) {
+ var templ *template.Template
+
+ if strings.HasPrefix(strings.ToLower(t.Source), "asset:") {
+ tt, err := g.templates.Get(strings.TrimPrefix(t.Source, "asset:"))
+ if err != nil {
+ return nil, err
+ }
+ templ = tt
+ }
+
+ if templ == nil {
+ // try to load from repository (and enable dependencies)
+ name := swag.ToJSONName(strings.TrimSuffix(t.Source, ".gotmpl"))
+ tt, err := g.templates.Get(name)
+ if err == nil {
+ templ = tt
+ }
+ }
+
+ if templ == nil {
+ // try to load template from disk, in TemplateDir if specified
+ // (dependencies resolution is limited to preloaded assets)
+ var templateFile string
+ if g.TemplateDir != "" {
+ templateFile = filepath.Join(g.TemplateDir, t.Source)
+ } else {
+ templateFile = t.Source
+ }
+ content, err := os.ReadFile(templateFile)
+ if err != nil {
+ return nil, fmt.Errorf("error while opening %s template file: %v", templateFile, err)
+ }
+ tt, err := template.New(t.Source).Funcs(FuncMapFunc(g.LanguageOpts)).Parse(string(content))
+ if err != nil {
+ return nil, fmt.Errorf("template parsing failed on template %s: %v", t.Name, err)
+ }
+ templ = tt
+ }
+
+ if templ == nil {
+ return nil, fmt.Errorf("template %q not found", t.Source)
+ }
+
+ var tBuf bytes.Buffer
+ if err := templ.Execute(&tBuf, data); err != nil {
+ return nil, fmt.Errorf("template execution failed for template %s: %v", t.Name, err)
+ }
+ log.Printf("executed template %s", t.Source)
+
+ return tBuf.Bytes(), nil
+}
+
+// Render template and write generated source code
+// generated code is reformatted ("linted"), which gives an
+// additional level of checking. If this step fails, the generated
+// code is still dumped, for template debugging purposes.
+func (g *GenOpts) write(t *TemplateOpts, data interface{}) error {
+ dir, fname, err := g.location(t, data)
+ if err != nil {
+ return fmt.Errorf("failed to resolve template location for template %s: %v", t.Name, err)
+ }
+
+ if t.SkipExists && fileExists(dir, fname) {
+ debugLog("skipping generation of %s because it already exists and skip_exist directive is set for %s",
+ filepath.Join(dir, fname), t.Name)
+ return nil
+ }
+
+ log.Printf("creating generated file %q in %q as %s", fname, dir, t.Name)
+ content, err := g.render(t, data)
+ if err != nil {
+ return fmt.Errorf("failed rendering template data for %s: %v", t.Name, err)
+ }
+
+ if dir != "" {
+ _, exists := os.Stat(dir)
+ if os.IsNotExist(exists) {
+ debugLog("creating directory %q for \"%s\"", dir, t.Name)
+ // Directory settings consistent with file privileges.
+ // Environment's umask may alter this setup
+ if e := os.MkdirAll(dir, 0755); e != nil {
+ return e
+ }
+ }
+ }
+
+ // Conditionally format the code, unless the user wants to skip
+ formatted := content
+ var writeerr error
+
+ if !t.SkipFormat {
+ formatted, err = g.LanguageOpts.FormatContent(filepath.Join(dir, fname), content)
+ if err != nil {
+ log.Printf("source formatting failed on template-generated source (%q for %s). Check that your template produces valid code", filepath.Join(dir, fname), t.Name)
+ writeerr = os.WriteFile(filepath.Join(dir, fname), content, 0644) // #nosec
+ if writeerr != nil {
+ return fmt.Errorf("failed to write (unformatted) file %q in %q: %v", fname, dir, writeerr)
+ }
+ log.Printf("unformatted generated source %q has been dumped for template debugging purposes. DO NOT build on this source!", fname)
+ return fmt.Errorf("source formatting on generated source %q failed: %v", t.Name, err)
+ }
+ }
+
+ writeerr = os.WriteFile(filepath.Join(dir, fname), formatted, 0644) // #nosec
+ if writeerr != nil {
+ return fmt.Errorf("failed to write file %q in %q: %v", fname, dir, writeerr)
+ }
+ return err
+}
+
+func fileName(in string) string {
+ ext := filepath.Ext(in)
+ return swag.ToFileName(strings.TrimSuffix(in, ext)) + ext
+}
+
+func (g *GenOpts) shouldRenderApp(t *TemplateOpts, _ *GenApp) bool {
+ switch swag.ToFileName(swag.ToGoName(t.Name)) {
+ case "main":
+ return g.IncludeMain
+ case "embedded_spec":
+ return !g.ExcludeSpec
+ default:
+ return true
+ }
+}
+
+func (g *GenOpts) shouldRenderOperations() bool {
+ return g.IncludeHandler || g.IncludeParameters || g.IncludeResponses
+}
+
+func (g *GenOpts) renderApplication(app *GenApp) error {
+ log.Printf("rendering %d templates for application %s", len(g.Sections.Application), app.Name)
+ for _, tp := range g.Sections.Application {
+ templ := tp
+ if !g.shouldRenderApp(&templ, app) {
+ continue
+ }
+ if err := g.write(&templ, app); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOpts) renderOperationGroup(gg *GenOperationGroup) error {
+ log.Printf("rendering %d templates for operation group %s", len(g.Sections.OperationGroups), g.Name)
+ for _, tp := range g.Sections.OperationGroups {
+ templ := tp
+ if !g.shouldRenderOperations() {
+ continue
+ }
+
+ if err := g.write(&templ, gg); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOpts) renderOperation(gg *GenOperation) error {
+ log.Printf("rendering %d templates for operation %s", len(g.Sections.Operations), g.Name)
+ for _, tp := range g.Sections.Operations {
+ templ := tp
+ if !g.shouldRenderOperations() {
+ continue
+ }
+
+ if err := g.write(&templ, gg); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOpts) renderDefinition(gg *GenDefinition) error {
+ log.Printf("rendering %d templates for model %s", len(g.Sections.Models), gg.Name)
+ for _, tp := range g.Sections.Models {
+ templ := tp
+ if !g.IncludeModel {
+ continue
+ }
+
+ if err := g.write(&templ, gg); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (g *GenOptsCommon) setTemplates() error {
+ if g.Template != "" {
+ // set contrib templates
+ if err := g.templates.LoadContrib(g.Template); err != nil {
+ return err
+ }
+ }
+
+ g.templates.SetAllowOverride(g.AllowTemplateOverride)
+
+ if g.TemplateDir != "" {
+ // set custom templates
+ if err := g.templates.LoadDir(g.TemplateDir); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// defaultImports produces a default map for imports with models
+func (g *GenOpts) defaultImports() map[string]string {
+ baseImport := g.LanguageOpts.baseImport(g.Target)
+ defaultImports := make(map[string]string, 50)
+
+ var modelsAlias, importPath string
+ if g.ExistingModels == "" {
+ // generated models
+ importPath = path.Join(
+ baseImport,
+ g.LanguageOpts.ManglePackagePath(g.ModelPackage, defaultModelsTarget))
+ modelsAlias = g.LanguageOpts.ManglePackageName(g.ModelPackage, defaultModelsTarget)
+ } else {
+ // external models
+ importPath = g.LanguageOpts.ManglePackagePath(g.ExistingModels, "")
+ modelsAlias = path.Base(defaultModelsTarget)
+ }
+ defaultImports[modelsAlias] = importPath
+
+ // resolve model representing an authenticated principal
+ alias, _, target := g.resolvePrincipal()
+ if alias == "" || target == g.ModelPackage || path.Base(target) == modelsAlias {
+ // if principal is specified with the models generation package, do not import any extra package
+ return defaultImports
+ }
+
+ if pth, _ := path.Split(target); pth != "" {
+ // if principal is specified with a path, assume this is a fully qualified package and generate this import
+ defaultImports[alias] = target
+ } else {
+ // if principal is specified with a relative path (no "/", e.g. internal.Principal), assume it is located in generated target
+ defaultImports[alias] = path.Join(baseImport, target)
+ }
+ return defaultImports
+}
+
+// initImports produces a default map for import with the specified root for operations
+func (g *GenOpts) initImports(operationsPackage string) map[string]string {
+ baseImport := g.LanguageOpts.baseImport(g.Target)
+
+ imports := make(map[string]string, 50)
+ imports[g.LanguageOpts.ManglePackageName(operationsPackage, defaultOperationsTarget)] = path.Join(
+ baseImport,
+ g.LanguageOpts.ManglePackagePath(operationsPackage, defaultOperationsTarget))
+ return imports
+}
+
+// PrincipalAlias returns an aliased type to the principal
+func (g *GenOpts) PrincipalAlias() string {
+ _, principal, _ := g.resolvePrincipal()
+ return principal
+}
+
+func (g *GenOpts) resolvePrincipal() (string, string, string) {
+ dotLocation := strings.LastIndex(g.Principal, ".")
+ if dotLocation < 0 {
+ return "", g.Principal, ""
+ }
+
+ // handle possible conflicts with injected principal package
+ // NOTE(fred): we do not check here for conflicts with packages created from operation tags, only standard imports
+ alias := deconflictPrincipal(importAlias(g.Principal[:dotLocation]))
+ return alias, alias + g.Principal[dotLocation:], g.Principal[:dotLocation]
+}
+
+func fileExists(target, name string) bool {
+ _, err := os.Stat(filepath.Join(target, name))
+ return !os.IsNotExist(err)
+}
+
+func gatherModels(specDoc *loads.Document, modelNames []string) (map[string]spec.Schema, error) {
+ modelNames = pruneEmpty(modelNames)
+ models, mnc := make(map[string]spec.Schema), len(modelNames)
+ defs := specDoc.Spec().Definitions
+
+ if mnc > 0 {
+ var unknownModels []string
+ for _, m := range modelNames {
+ _, ok := defs[m]
+ if !ok {
+ unknownModels = append(unknownModels, m)
+ }
+ }
+ if len(unknownModels) != 0 {
+ return nil, fmt.Errorf("unknown models: %s", strings.Join(unknownModels, ", "))
+ }
+ }
+ for k, v := range defs {
+ if mnc == 0 {
+ models[k] = v
+ }
+ for _, nm := range modelNames {
+ if k == nm {
+ models[k] = v
+ }
+ }
+ }
+ return models, nil
+}
+
+// titleOrDefault infers a name for the app from the title of the spec
+func titleOrDefault(specDoc *loads.Document, name, defaultName string) string {
+ if strings.TrimSpace(name) == "" {
+ if specDoc.Spec().Info != nil && strings.TrimSpace(specDoc.Spec().Info.Title) != "" {
+ name = specDoc.Spec().Info.Title
+ } else {
+ name = defaultName
+ }
+ }
+ return swag.ToGoName(name)
+}
+
+func mainNameOrDefault(specDoc *loads.Document, name, defaultName string) string {
+ // *_test won't do as main server name
+ return strings.TrimSuffix(titleOrDefault(specDoc, name, defaultName), "Test")
+}
+
+func appNameOrDefault(specDoc *loads.Document, name, defaultName string) string {
+ // *_test won't do as app names
+ name = strings.TrimSuffix(titleOrDefault(specDoc, name, defaultName), "Test")
+ if name == "" {
+ name = swag.ToGoName(defaultName)
+ }
+ return name
+}
+
+type opRef struct {
+ Method string
+ Path string
+ Key string
+ ID string
+ Op *spec.Operation
+}
+
+type opRefs []opRef
+
+func (o opRefs) Len() int { return len(o) }
+func (o opRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
+func (o opRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
+
+func gatherOperations(specDoc *analysis.Spec, operationIDs []string) map[string]opRef {
+ operationIDs = pruneEmpty(operationIDs)
+ var oprefs opRefs
+
+ for method, pathItem := range specDoc.Operations() {
+ for path, operation := range pathItem {
+ vv := *operation
+ oprefs = append(oprefs, opRef{
+ Key: swag.ToGoName(strings.ToLower(method) + " " + swag.ToHumanNameTitle(path)),
+ Method: method,
+ Path: path,
+ ID: vv.ID,
+ Op: &vv,
+ })
+ }
+ }
+
+ sort.Sort(oprefs)
+
+ operations := make(map[string]opRef)
+ for _, opr := range oprefs {
+ nm := opr.ID
+ if nm == "" {
+ nm = opr.Key
+ }
+
+ oo, found := operations[nm]
+ if found && oo.Method != opr.Method && oo.Path != opr.Path {
+ nm = opr.Key
+ }
+ if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
+ opr.ID = nm
+ opr.Op.ID = nm
+ operations[nm] = opr
+ }
+ }
+
+ return operations
+}
+
+func pruneEmpty(in []string) (out []string) {
+ for _, v := range in {
+ if v != "" {
+ out = append(out, v)
+ }
+ }
+ return
+}
+
+func trimBOM(in string) string {
+ return strings.Trim(in, "\xef\xbb\xbf")
+}
+
+// gatherSecuritySchemes produces a sorted representation from a map of spec security schemes
+func gatherSecuritySchemes(securitySchemes map[string]spec.SecurityScheme, appName, principal, receiver string, nullable bool) (security GenSecuritySchemes) {
+ for scheme, req := range securitySchemes {
+ isOAuth2 := strings.ToLower(req.Type) == "oauth2"
+ scopes := make([]string, 0, len(req.Scopes))
+ genScopes := make([]GenSecurityScope, 0, len(req.Scopes))
+ if isOAuth2 {
+ for k, v := range req.Scopes {
+ scopes = append(scopes, k)
+ genScopes = append(genScopes, GenSecurityScope{Name: k, Description: v})
+ }
+ sort.Strings(scopes)
+ }
+
+ security = append(security, GenSecurityScheme{
+ AppName: appName,
+ ID: scheme,
+ ReceiverName: receiver,
+ Name: req.Name,
+ IsBasicAuth: strings.ToLower(req.Type) == "basic",
+ IsAPIKeyAuth: strings.ToLower(req.Type) == "apikey",
+ IsOAuth2: isOAuth2,
+ Scopes: scopes,
+ ScopesDesc: genScopes,
+ Principal: principal,
+ Source: req.In,
+ // from original spec
+ Description: req.Description,
+ Type: strings.ToLower(req.Type),
+ In: req.In,
+ Flow: req.Flow,
+ AuthorizationURL: req.AuthorizationURL,
+ TokenURL: req.TokenURL,
+ Extensions: req.Extensions,
+
+ PrincipalIsNullable: nullable,
+ })
+ }
+ sort.Sort(security)
+ return
+}
+
+// securityRequirements just clones the original SecurityRequirements from either the spec
+// or an operation, without any modification. This is used to generate documentation.
+func securityRequirements(orig []map[string][]string) (result []analysis.SecurityRequirement) {
+ for _, r := range orig {
+ for k, v := range r {
+ result = append(result, analysis.SecurityRequirement{Name: k, Scopes: v})
+ }
+ }
+ // TODO(fred): sort this for stable generation
+ return
+}
+
+// gatherExtraSchemas produces a sorted list of extra schemas.
+//
+// ExtraSchemas are inlined types rendered in the same model file.
+func gatherExtraSchemas(extraMap map[string]GenSchema) (extras GenSchemaList) {
+ var extraKeys []string
+ for k := range extraMap {
+ extraKeys = append(extraKeys, k)
+ }
+ sort.Strings(extraKeys)
+ for _, k := range extraKeys {
+ // figure out if top level validations are needed
+ p := extraMap[k]
+ p.HasValidations = shallowValidationLookup(p)
+ extras = append(extras, p)
+ }
+ return
+}
+
+func getExtraSchemes(ext spec.Extensions) []string {
+ if ess, ok := ext.GetStringSlice(xSchemes); ok {
+ return ess
+ }
+ return nil
+}
+
+func gatherURISchemes(swsp *spec.Swagger, operation spec.Operation) ([]string, []string) {
+ var extraSchemes []string
+ extraSchemes = append(extraSchemes, getExtraSchemes(operation.Extensions)...)
+ extraSchemes = concatUnique(getExtraSchemes(swsp.Extensions), extraSchemes)
+ sort.Strings(extraSchemes)
+
+ schemes := concatUnique(swsp.Schemes, operation.Schemes)
+ sort.Strings(schemes)
+
+ return schemes, extraSchemes
+}
+
+func dumpData(data interface{}) error {
+ bb, err := json.MarshalIndent(data, "", " ")
+ if err != nil {
+ return err
+ }
+ fmt.Fprintln(os.Stdout, string(bb))
+ return nil
+}
+
+func importAlias(pkg string) string {
+ _, k := path.Split(pkg)
+ return k
+}
+
+// concatUnique concatenate collections of strings with deduplication
+func concatUnique(collections ...[]string) []string {
+ resultSet := make(map[string]struct{})
+ for _, c := range collections {
+ for _, i := range c {
+ if _, ok := resultSet[i]; !ok {
+ resultSet[i] = struct{}{}
+ }
+ }
+ }
+ result := make([]string, 0, len(resultSet))
+ for k := range resultSet {
+ result = append(result, k)
+ }
+ return result
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/spec.go b/vendor/github.com/go-swagger/go-swagger/generator/spec.go
new file mode 100644
index 000000000..e7399bb95
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/spec.go
@@ -0,0 +1,273 @@
+package generator
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+
+ "github.com/go-openapi/analysis"
+ swaggererrors "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ yamlv2 "gopkg.in/yaml.v2"
+)
+
+func (g *GenOpts) validateAndFlattenSpec() (*loads.Document, error) {
+ // Load spec document
+ specDoc, err := loads.Spec(g.Spec)
+ if err != nil {
+ return nil, err
+ }
+
+ // If accepts definitions only, add dummy swagger header to pass validation
+ if g.AcceptDefinitionsOnly {
+ specDoc, err = applyDefaultSwagger(specDoc)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Validate if needed
+ if g.ValidateSpec {
+ log.Printf("validating spec %v", g.Spec)
+ validationErrors := validate.Spec(specDoc, strfmt.Default)
+ if validationErrors != nil {
+ str := fmt.Sprintf("The swagger spec at %q is invalid against swagger specification %s. see errors :\n",
+ g.Spec, specDoc.Version())
+ for _, desc := range validationErrors.(*swaggererrors.CompositeError).Errors {
+ str += fmt.Sprintf("- %s\n", desc)
+ }
+ return nil, errors.New(str)
+ }
+ // TODO(fredbi): due to uncontrolled $ref state in spec, we need to reload the spec atm, or flatten won't
+ // work properly (validate expansion alters the $ref cache in go-openapi/spec)
+ specDoc, _ = loads.Spec(g.Spec)
+ }
+
+ // Flatten spec
+ //
+ // Some preprocessing is required before codegen
+ //
+ // This ensures at least that $ref's in the spec document are canonical,
+ // i.e all $ref are local to this file and point to some uniquely named definition.
+ //
+ // Default option is to ensure minimal flattening of $ref, bundling remote $refs and relocating arbitrary JSON
+ // pointers as definitions.
+ // This preprocessing may introduce duplicate names (e.g. remote $ref with same name). In this case, a definition
+ // suffixed with "OAIGen" is produced.
+ //
+ // Full flattening option farther transforms the spec by moving every complex object (e.g. with some properties)
+ // as a standalone definition.
+ //
+ // Eventually, an "expand spec" option is available. It is essentially useful for testing purposes.
+ //
+ // NOTE(fredbi): spec expansion may produce some unsupported constructs and is not yet protected against the
+ // following cases:
+ // - polymorphic types generation may fail with expansion (expand destructs the reuse intent of the $ref in allOf)
+ // - name duplicates may occur and result in compilation failures
+ //
+ // The right place to fix these shortcomings is go-openapi/analysis.
+
+ g.FlattenOpts.BasePath = specDoc.SpecFilePath()
+ g.FlattenOpts.Spec = analysis.New(specDoc.Spec())
+
+ g.printFlattenOpts()
+
+ if err = analysis.Flatten(*g.FlattenOpts); err != nil {
+ return nil, err
+ }
+
+ // yields the preprocessed spec document
+ return specDoc, nil
+}
+
+func (g *GenOpts) analyzeSpec() (*loads.Document, *analysis.Spec, error) {
+ // load, validate and flatten
+ specDoc, err := g.validateAndFlattenSpec()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // spec preprocessing option
+ if g.PropertiesSpecOrder {
+ g.Spec = WithAutoXOrder(g.Spec)
+ specDoc, err = loads.Spec(g.Spec)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+
+ // analyze the spec
+ analyzed := analysis.New(specDoc.Spec())
+
+ return specDoc, analyzed, nil
+}
+
+func (g *GenOpts) printFlattenOpts() {
+ var preprocessingOption string
+ switch {
+ case g.FlattenOpts.Expand:
+ preprocessingOption = "expand"
+ case g.FlattenOpts.Minimal:
+ preprocessingOption = "minimal flattening"
+ default:
+ preprocessingOption = "full flattening"
+ }
+ log.Printf("preprocessing spec with option: %s", preprocessingOption)
+}
+
+// findSwaggerSpec fetches a default swagger spec if none is provided
+func findSwaggerSpec(nm string) (string, error) {
+ specs := []string{"swagger.json", "swagger.yml", "swagger.yaml"}
+ if nm != "" {
+ specs = []string{nm}
+ }
+ var name string
+ for _, nn := range specs {
+ f, err := os.Stat(nn)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return "", err
+ }
+ if f.IsDir() {
+ return "", fmt.Errorf("%s is a directory", nn)
+ }
+ name = nn
+ break
+ }
+ if name == "" {
+ return "", errors.New("couldn't find a swagger spec")
+ }
+ return name, nil
+}
+
+// WithAutoXOrder amends the spec to specify property order as they appear
+// in the spec (supports yaml documents only).
+func WithAutoXOrder(specPath string) string {
+ lookFor := func(ele interface{}, key string) (yamlv2.MapSlice, bool) {
+ if slice, ok := ele.(yamlv2.MapSlice); ok {
+ for _, v := range slice {
+ if v.Key == key {
+ if slice, ok := v.Value.(yamlv2.MapSlice); ok {
+ return slice, ok
+ }
+ }
+ }
+ }
+ return nil, false
+ }
+
+ var addXOrder func(interface{})
+ addXOrder = func(element interface{}) {
+ if props, ok := lookFor(element, "properties"); ok {
+ for i, prop := range props {
+ if pSlice, ok := prop.Value.(yamlv2.MapSlice); ok {
+ isObject := false
+ xOrderIndex := -1 // find if x-order already exists
+
+ for i, v := range pSlice {
+ if v.Key == "type" && v.Value == object {
+ isObject = true
+ }
+ if v.Key == xOrder {
+ xOrderIndex = i
+ break
+ }
+ }
+
+ if xOrderIndex > -1 { // override existing x-order
+ pSlice[xOrderIndex] = yamlv2.MapItem{Key: xOrder, Value: i}
+ } else { // append new x-order
+ pSlice = append(pSlice, yamlv2.MapItem{Key: xOrder, Value: i})
+ }
+ prop.Value = pSlice
+ props[i] = prop
+
+ if isObject {
+ addXOrder(pSlice)
+ }
+ }
+ }
+ }
+ }
+
+ data, err := swag.LoadFromFileOrHTTP(specPath)
+ if err != nil {
+ panic(err)
+ }
+
+ yamlDoc, err := BytesToYAMLv2Doc(data)
+ if err != nil {
+ panic(err)
+ }
+
+ if defs, ok := lookFor(yamlDoc, "definitions"); ok {
+ for _, def := range defs {
+ addXOrder(def.Value)
+ }
+ }
+
+ addXOrder(yamlDoc)
+
+ out, err := yamlv2.Marshal(yamlDoc)
+ if err != nil {
+ panic(err)
+ }
+
+ tmpDir, err := os.MkdirTemp("", "go-swagger-")
+ if err != nil {
+ panic(err)
+ }
+
+ tmpFile := filepath.Join(tmpDir, filepath.Base(specPath))
+ if err := os.WriteFile(tmpFile, out, 0600); err != nil {
+ panic(err)
+ }
+ return tmpFile
+}
+
+// BytesToYAMLDoc converts a byte slice into a YAML document
+func BytesToYAMLv2Doc(data []byte) (interface{}, error) {
+ var canary map[interface{}]interface{} // validate this is an object and not a different type
+ if err := yamlv2.Unmarshal(data, &canary); err != nil {
+ return nil, err
+ }
+
+ var document yamlv2.MapSlice // preserve order that is present in the document
+ if err := yamlv2.Unmarshal(data, &document); err != nil {
+ return nil, err
+ }
+ return document, nil
+}
+
+func applyDefaultSwagger(doc *loads.Document) (*loads.Document, error) {
+ // bake a minimal swagger spec to pass validation
+ swspec := doc.Spec()
+ if swspec.Swagger == "" {
+ swspec.Swagger = "2.0"
+ }
+ if swspec.Info == nil {
+ info := new(spec.Info)
+ info.Version = "0.0.0"
+ info.Title = "minimal"
+ swspec.Info = info
+ }
+ if swspec.Paths == nil {
+ swspec.Paths = &spec.Paths{}
+ }
+ // rewrite the document with the new addition
+ jazon, err := json.Marshal(swspec)
+ if err != nil {
+ return nil, err
+ }
+ return loads.Analyzed(jazon, swspec.Swagger)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/structs.go b/vendor/github.com/go-swagger/go-swagger/generator/structs.go
new file mode 100644
index 000000000..522be1446
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/structs.go
@@ -0,0 +1,803 @@
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/spec"
+)
+
+// GenCommon contains common properties needed across
+// definitions, app and operations
+// TargetImportPath may be used by templates to import other (possibly
+// generated) packages in the generation path (e.g. relative to GOPATH).
+// TargetImportPath is NOT used by standard templates.
+type GenCommon struct {
+ Copyright string
+ TargetImportPath string
+}
+
+// GenDefinition contains all the properties to generate a
+// definition from a swagger spec
+type GenDefinition struct {
+ GenCommon
+ GenSchema
+ Package string
+ Imports map[string]string
+ DefaultImports map[string]string
+ ExtraSchemas GenSchemaList
+ DependsOn []string
+ External bool
+}
+
+// GenDefinitions represents a list of operations to generate
+// this implements a sort by operation id
+type GenDefinitions []GenDefinition
+
+func (g GenDefinitions) Len() int { return len(g) }
+func (g GenDefinitions) Less(i, j int) bool { return g[i].Name < g[j].Name }
+func (g GenDefinitions) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+
+// GenSchemaList is a list of schemas for generation.
+//
+// It can be sorted by name to get a stable struct layout for
+// version control and such
+type GenSchemaList []GenSchema
+
+// GenSchema contains all the information needed to generate the code
+// for a schema
+type GenSchema struct {
+ resolvedType
+ sharedValidations
+ Example string
+ OriginalName string
+ Name string
+ Suffix string
+ Path string
+ ValueExpression string
+ IndexVar string
+ KeyVar string
+ Title string
+ Description string
+ Location string
+ ReceiverName string
+ Items *GenSchema
+ AllowsAdditionalItems bool
+ HasAdditionalItems bool
+ AdditionalItems *GenSchema
+ Object *GenSchema
+ XMLName string
+ CustomTag string
+ Properties GenSchemaList
+ AllOf GenSchemaList
+ HasAdditionalProperties bool
+ IsAdditionalProperties bool
+ AdditionalProperties *GenSchema
+ StrictAdditionalProperties bool
+ ReadOnly bool
+ IsVirtual bool
+ IsBaseType bool
+ HasBaseType bool
+ IsSubType bool
+ IsExported bool
+ DiscriminatorField string
+ DiscriminatorValue string
+ Discriminates map[string]string
+ Parents []string
+ IncludeValidator bool
+ IncludeModel bool
+ Default interface{}
+ WantsMarshalBinary bool // do we generate MarshalBinary interface?
+ StructTags []string
+ ExtraImports map[string]string // non-standard imports detected when using external types
+ ExternalDocs *spec.ExternalDocumentation
+}
+
+func (g GenSchema) renderMarshalTag() string {
+ if g.HasBaseType {
+ return "-"
+ }
+
+ var result strings.Builder
+
+ result.WriteString(g.OriginalName)
+
+ if !g.Required && g.IsEmptyOmitted {
+ result.WriteString(",omitempty")
+ }
+
+ if g.IsJSONString {
+ result.WriteString(",string")
+ }
+
+ return result.String()
+}
+
+// PrintTags takes care of rendering tags for a struct field
+func (g GenSchema) PrintTags() string {
+ tags := make(map[string]string, 3)
+ orderedTags := make([]string, 0, 3)
+
+ tags["json"] = g.renderMarshalTag()
+ orderedTags = append(orderedTags, "json")
+
+ if len(g.XMLName) > 0 {
+ if !g.Required && g.IsEmptyOmitted {
+ tags["xml"] = g.XMLName + ",omitempty"
+ } else {
+ tags["xml"] = g.XMLName
+ }
+ orderedTags = append(orderedTags, "xml")
+ }
+
+ // Add extra struct tags, only if the tag hasn't already been set, i.e. example.
+ // Extra struct tags have the same value has the `json` tag.
+ for _, tag := range g.StructTags {
+ if _, exists := tags[tag]; exists {
+ // dedupe
+ continue
+ }
+
+ switch {
+ case tag == "example" && len(g.Example) > 0:
+ // only add example tag if it's contained in the struct tags
+ tags["example"] = g.Example // json representation of the example object
+ case tag == "description" && len(g.Description) > 0:
+ tags["description"] = g.Description
+ default:
+ tags[tag] = tags["json"]
+ }
+
+ orderedTags = append(orderedTags, tag)
+ }
+
+ // Assemble the tags in key value pairs with the value properly quoted.
+ kvPairs := make([]string, 0, len(orderedTags)+1)
+ for _, key := range orderedTags {
+ kvPairs = append(kvPairs, fmt.Sprintf("%s:%s", key, strconv.Quote(tags[key])))
+ }
+
+ if len(g.CustomTag) > 0 {
+ kvPairs = append(kvPairs, g.CustomTag)
+ }
+
+ // Join the key value pairs by a space.
+ completeTag := strings.Join(kvPairs, " ")
+
+ // If the values contain a backtick, we cannot render the tag using backticks because Go does not support
+ // escaping backticks in raw string literals.
+ valuesHaveBacktick := false
+ for _, value := range tags {
+ if !strconv.CanBackquote(value) {
+ valuesHaveBacktick = true
+ break
+ }
+ }
+
+ if !valuesHaveBacktick {
+ return fmt.Sprintf("`%s`", completeTag)
+ }
+
+ // We have to escape the tag again to put it in a literal with double quotes as the tag format uses double quotes.
+ return strconv.Quote(completeTag)
+}
+
+// UnderlyingType tells the go type or the aliased go type
+func (g GenSchema) UnderlyingType() string {
+ if g.IsAliased {
+ return g.AliasedType
+ }
+ return g.GoType
+}
+
+// ToString returns a string conversion expression for the schema
+func (g GenSchema) ToString() string {
+ return g.resolvedType.ToString(g.ValueExpression)
+}
+
+func (g GenSchemaList) Len() int { return len(g) }
+func (g GenSchemaList) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSchemaList) Less(i, j int) bool {
+ a, okA := g[i].Extensions[xOrder].(float64)
+ b, okB := g[j].Extensions[xOrder].(float64)
+
+ // If both properties have x-order defined, then the one with lower x-order is smaller
+ if okA && okB {
+ return a < b
+ }
+
+ // If only the first property has x-order defined, then it is smaller
+ if okA {
+ return true
+ }
+
+ // If only the second property has x-order defined, then it is smaller
+ if okB {
+ return false
+ }
+
+ // If neither property has x-order defined, then the one with lower lexicographic name is smaller
+ return g[i].Name < g[j].Name
+}
+
+type sharedValidations struct {
+ spec.SchemaValidations
+
+ HasValidations bool
+ HasContextValidations bool
+ Required bool
+ HasSliceValidations bool
+ ItemsEnum []interface{}
+
+ // NOTE: "patternProperties" and "dependencies" not supported by Swagger 2.0
+}
+
+// GenResponse represents a response object for code generation
+type GenResponse struct {
+ Package string
+ ModelsPackage string
+ ReceiverName string
+ Name string
+ Description string
+
+ IsSuccess bool
+
+ Code int
+ Method string
+ Path string
+ Headers GenHeaders
+ Schema *GenSchema
+ AllowsForStreaming bool
+
+ Imports map[string]string
+ DefaultImports map[string]string
+
+ Extensions map[string]interface{}
+
+ StrictResponders bool
+ OperationName string
+ Examples GenResponseExamples
+}
+
+// GenResponseExamples is a sortable collection []GenResponseExample
+type GenResponseExamples []GenResponseExample
+
+func (g GenResponseExamples) Len() int { return len(g) }
+func (g GenResponseExamples) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenResponseExamples) Less(i, j int) bool { return g[i].MediaType < g[j].MediaType }
+
+// GenResponseExample captures an example provided for a response for some mime type
+type GenResponseExample struct {
+ MediaType string
+ Example interface{}
+}
+
+// GenHeader represents a header on a response for code generation
+type GenHeader struct {
+ resolvedType
+ sharedValidations
+
+ Package string
+ ReceiverName string
+ IndexVar string
+
+ ID string
+ Name string
+ Path string
+ ValueExpression string
+
+ Title string
+ Description string
+ Default interface{}
+ HasDefault bool
+
+ CollectionFormat string
+
+ Child *GenItems
+ Parent *GenItems
+
+ Converter string
+ Formatter string
+
+ ZeroValue string
+}
+
+// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
+// For a header objects it always returns "".
+func (h *GenHeader) ItemsDepth() string {
+ // NOTE: this is currently used by templates to generate explicit comments in nested structures
+ return ""
+}
+
+// ToString returns a string conversion expression for the header
+func (h GenHeader) ToString() string {
+ return h.resolvedType.ToString(h.ValueExpression)
+}
+
+// GenHeaders is a sorted collection of headers for codegen
+type GenHeaders []GenHeader
+
+func (g GenHeaders) Len() int { return len(g) }
+func (g GenHeaders) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenHeaders) Less(i, j int) bool { return g[i].Name < g[j].Name }
+
+// HasSomeDefaults returns true is at least one header has a default value set
+func (g GenHeaders) HasSomeDefaults() bool {
+ // NOTE: this is currently used by templates to avoid empty constructs
+ for _, header := range g {
+ if header.HasDefault {
+ return true
+ }
+ }
+ return false
+}
+
+// GenParameter is used to represent
+// a parameter or a header for code generation.
+type GenParameter struct {
+ resolvedType
+ sharedValidations
+
+ ID string
+ Name string
+ ModelsPackage string
+ Path string
+ ValueExpression string
+ IndexVar string
+ KeyVar string
+ ReceiverName string
+ Location string
+ Title string
+ Description string
+ Converter string
+ Formatter string
+
+ Schema *GenSchema
+
+ CollectionFormat string
+
+ Child *GenItems
+ Parent *GenItems
+
+ // Unused
+ // BodyParam *GenParameter
+
+ Default interface{}
+ HasDefault bool
+ ZeroValue string
+ AllowEmptyValue bool
+
+ // validation strategy for Body params, which may mix model and simple constructs.
+ // Distinguish the following cases:
+ // - HasSimpleBodyParams: body is an inline simple type
+ // - HasModelBodyParams: body is a model objectd
+ // - HasSimpleBodyItems: body is an inline array of simple type
+ // - HasModelBodyItems: body is an array of model objects
+ // - HasSimpleBodyMap: body is a map of simple objects (possibly arrays)
+ // - HasModelBodyMap: body is a map of model objects
+ HasSimpleBodyParams bool
+ HasModelBodyParams bool
+ HasSimpleBodyItems bool
+ HasModelBodyItems bool
+ HasSimpleBodyMap bool
+ HasModelBodyMap bool
+
+ Extensions map[string]interface{}
+}
+
+// IsQueryParam returns true when this parameter is a query param
+func (g *GenParameter) IsQueryParam() bool {
+ return g.Location == "query"
+}
+
+// IsPathParam returns true when this parameter is a path param
+func (g *GenParameter) IsPathParam() bool {
+ return g.Location == "path"
+}
+
+// IsFormParam returns true when this parameter is a form param
+func (g *GenParameter) IsFormParam() bool {
+ return g.Location == "formData"
+}
+
+// IsHeaderParam returns true when this parameter is a header param
+func (g *GenParameter) IsHeaderParam() bool {
+ return g.Location == "header"
+}
+
+// IsBodyParam returns true when this parameter is a body param
+func (g *GenParameter) IsBodyParam() bool {
+ return g.Location == "body"
+}
+
+// IsFileParam returns true when this parameter is a file param
+func (g *GenParameter) IsFileParam() bool {
+ return g.SwaggerType == "file"
+}
+
+// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
+// For a parameter object, it always returns "".
+func (g *GenParameter) ItemsDepth() string {
+ // NOTE: this is currently used by templates to generate explicit comments in nested structures
+ return ""
+}
+
+// UnderlyingType tells the go type or the aliased go type
+func (g GenParameter) UnderlyingType() string {
+ return g.GoType
+}
+
+// ToString returns a string conversion expression for the parameter
+func (g GenParameter) ToString() string {
+ return g.resolvedType.ToString(g.ValueExpression)
+}
+
+// GenParameters represents a sorted parameter collection
+type GenParameters []GenParameter
+
+func (g GenParameters) Len() int { return len(g) }
+func (g GenParameters) Less(i, j int) bool { return g[i].Name < g[j].Name }
+func (g GenParameters) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+
+// HasSomeDefaults returns true is at least one parameter has a default value set
+func (g GenParameters) HasSomeDefaults() bool {
+ // NOTE: this is currently used by templates to avoid empty constructs
+ for _, param := range g {
+ if param.HasDefault {
+ return true
+ }
+ }
+ return false
+}
+
+// GenItems represents the collection items for a collection parameter
+type GenItems struct {
+ sharedValidations
+ resolvedType
+
+ Name string
+ Path string
+ ValueExpression string
+ CollectionFormat string
+ Child *GenItems
+ Parent *GenItems
+ Converter string
+ Formatter string
+
+ Location string
+ IndexVar string
+ KeyVar string
+
+ // instructs generator to skip the splitting and parsing from CollectionFormat
+ SkipParse bool
+ // instructs generator that some nested structure needs an higher level loop index
+ NeedsIndex bool
+}
+
+// ItemsDepth returns a string "items.items..." with as many items as the level of nesting of the array.
+func (g *GenItems) ItemsDepth() string {
+ // NOTE: this is currently used by templates to generate explicit comments in nested structures
+ current := g
+ i := 1
+ for current.Parent != nil {
+ i++
+ current = current.Parent
+ }
+ return strings.Repeat("items.", i)
+}
+
+// UnderlyingType tells the go type or the aliased go type
+func (g GenItems) UnderlyingType() string {
+ return g.GoType
+}
+
+// ToString returns a string conversion expression for the item
+func (g GenItems) ToString() string {
+ return g.resolvedType.ToString(g.ValueExpression)
+}
+
+// GenOperationGroup represents a named (tagged) group of operations
+type GenOperationGroup struct {
+ GenCommon
+ Name string
+ Operations GenOperations
+
+ Summary string
+ Description string
+ Imports map[string]string
+ DefaultImports map[string]string
+ RootPackage string
+ GenOpts *GenOpts
+ PackageAlias string
+}
+
+// GenOperationGroups is a sorted collection of operation groups
+type GenOperationGroups []GenOperationGroup
+
+func (g GenOperationGroups) Len() int { return len(g) }
+func (g GenOperationGroups) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenOperationGroups) Less(i, j int) bool { return g[i].Name < g[j].Name }
+
+// GenStatusCodeResponses a container for status code responses
+type GenStatusCodeResponses []GenResponse
+
+func (g GenStatusCodeResponses) Len() int { return len(g) }
+func (g GenStatusCodeResponses) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenStatusCodeResponses) Less(i, j int) bool { return g[i].Code < g[j].Code }
+
+// MarshalJSON marshals these responses to json
+//
+// This is used by DumpData.
+func (g GenStatusCodeResponses) MarshalJSON() ([]byte, error) {
+ if g == nil {
+ return nil, nil
+ }
+ responses := make(GenStatusCodeResponses, len(g))
+ copy(responses, g)
+ // order marshalled output
+ sort.Sort(responses)
+
+ var buf bytes.Buffer
+ buf.WriteRune('{')
+ for i, v := range responses {
+ rb, err := json.Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+ if i > 0 {
+ buf.WriteRune(',')
+ }
+ buf.WriteString(fmt.Sprintf("%q:", strconv.Itoa(v.Code)))
+ buf.Write(rb)
+ }
+ buf.WriteRune('}')
+ return buf.Bytes(), nil
+}
+
+// UnmarshalJSON unmarshals this GenStatusCodeResponses from json
+func (g *GenStatusCodeResponses) UnmarshalJSON(data []byte) error {
+ var dd map[string]GenResponse
+ if err := json.Unmarshal(data, &dd); err != nil {
+ return err
+ }
+ var gg GenStatusCodeResponses
+ for _, v := range dd {
+ gg = append(gg, v)
+ }
+ sort.Sort(gg)
+ *g = gg
+ return nil
+}
+
+// GenOperation represents an operation for code generation
+type GenOperation struct {
+ GenCommon
+ Package string
+ ReceiverName string
+ Name string
+ Summary string
+ Description string
+ Method string
+ Path string
+ BasePath string
+ Tags []string
+ UseTags bool
+ RootPackage string
+
+ Imports map[string]string
+ DefaultImports map[string]string
+ ExtraSchemas GenSchemaList
+ PackageAlias string
+
+ Authorized bool
+ Security []GenSecurityRequirements // resolved security requirements for the operation
+ SecurityDefinitions GenSecuritySchemes
+ SecurityRequirements []analysis.SecurityRequirement // original security requirements as per the spec (for doc)
+ Principal string
+ PrincipalIsNullable bool
+
+ SuccessResponse *GenResponse
+ SuccessResponses []GenResponse
+ Responses GenStatusCodeResponses
+ DefaultResponse *GenResponse
+
+ Params GenParameters
+ QueryParams GenParameters
+ PathParams GenParameters
+ HeaderParams GenParameters
+ FormParams GenParameters
+ HasQueryParams bool
+ HasPathParams bool
+ HasHeaderParams bool
+ HasFormParams bool
+ HasFormValueParams bool
+ HasFileParams bool
+ HasBodyParams bool
+ HasStreamingResponse bool
+
+ Schemes []string
+ ExtraSchemes []string
+ SchemeOverrides []string // original scheme overrides for operation, as per spec (for doc)
+ ExtraSchemeOverrides []string // original extra scheme overrides for operation, as per spec (for doc)
+ ProducesMediaTypes []string
+ ConsumesMediaTypes []string
+ TimeoutName string
+
+ Extensions map[string]interface{}
+
+ StrictResponders bool
+ ExternalDocs *spec.ExternalDocumentation
+ Produces []string // original produces for operation (for doc)
+ Consumes []string // original consumes for operation (for doc)
+}
+
+// GenOperations represents a list of operations to generate
+// this implements a sort by operation id
+type GenOperations []GenOperation
+
+func (g GenOperations) Len() int { return len(g) }
+func (g GenOperations) Less(i, j int) bool { return g[i].Name < g[j].Name }
+func (g GenOperations) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+
+// GenApp represents all the meta data needed to generate an application
+// from a swagger spec
+type GenApp struct {
+ GenCommon
+ APIPackage string
+ ServerPackageAlias string
+ ImplementationPackageAlias string
+ APIPackageAlias string
+ Package string
+ ReceiverName string
+ Name string
+ Principal string
+ PrincipalIsNullable bool
+ DefaultConsumes string
+ DefaultProduces string
+ Host string
+ BasePath string
+ Info *spec.Info
+ ExternalDocs *spec.ExternalDocumentation
+ Tags []spec.Tag
+ Imports map[string]string
+ DefaultImports map[string]string
+ Schemes []string
+ ExtraSchemes []string
+ Consumes GenSerGroups
+ Produces GenSerGroups
+ SecurityDefinitions GenSecuritySchemes
+ SecurityRequirements []analysis.SecurityRequirement // original security requirements as per the spec (for doc)
+ Models []GenDefinition
+ Operations GenOperations
+ OperationGroups GenOperationGroups
+ SwaggerJSON string
+ // Embedded specs: this is important for when the generated server adds routes.
+ // NOTE: there is a distinct advantage to having this in runtime rather than generated code.
+ // We are not ever going to generate the router.
+ // If embedding spec is an issue (e.g. memory usage), this can be excluded with the --exclude-spec
+ // generation option. Alternative methods to serve spec (e.g. from disk, ...) may be implemented by
+ // adding a middleware to the generated API.
+ FlatSwaggerJSON string
+ ExcludeSpec bool
+ GenOpts *GenOpts
+}
+
+// UseGoStructFlags returns true when no strategy is specified or it is set to "go-flags"
+func (g *GenApp) UseGoStructFlags() bool {
+ if g.GenOpts == nil {
+ return true
+ }
+ return g.GenOpts.FlagStrategy == "" || g.GenOpts.FlagStrategy == "go-flags"
+}
+
+// UsePFlags returns true when the flag strategy is set to pflag
+func (g *GenApp) UsePFlags() bool {
+ return g.GenOpts != nil && strings.HasPrefix(g.GenOpts.FlagStrategy, "pflag")
+}
+
+// UseFlags returns true when the flag strategy is set to flag
+func (g *GenApp) UseFlags() bool {
+ return g.GenOpts != nil && strings.HasPrefix(g.GenOpts.FlagStrategy, "flag")
+}
+
+// UseIntermediateMode for https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
+func (g *GenApp) UseIntermediateMode() bool {
+ return g.GenOpts != nil && g.GenOpts.CompatibilityMode == "intermediate"
+}
+
+// UseModernMode for https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
+func (g *GenApp) UseModernMode() bool {
+ return g.GenOpts == nil || g.GenOpts.CompatibilityMode == "" || g.GenOpts.CompatibilityMode == "modern"
+}
+
+// GenSerGroups sorted representation of serializer groups
+type GenSerGroups []GenSerGroup
+
+func (g GenSerGroups) Len() int { return len(g) }
+func (g GenSerGroups) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSerGroups) Less(i, j int) bool { return g[i].Name < g[j].Name }
+
+// GenSerGroup represents a group of serializers: this links a serializer to a list of
+// prioritized media types (mime).
+type GenSerGroup struct {
+ GenSerializer
+
+ // All media types for this serializer. The redundant representation allows for easier use in templates
+ AllSerializers GenSerializers
+}
+
+// GenSerializers sorted representation of serializers
+type GenSerializers []GenSerializer
+
+func (g GenSerializers) Len() int { return len(g) }
+func (g GenSerializers) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSerializers) Less(i, j int) bool { return g[i].MediaType < g[j].MediaType }
+
+// GenSerializer represents a single serializer for a particular media type
+type GenSerializer struct {
+ AppName string // Application name
+ ReceiverName string
+ Name string // Name of the Producer/Consumer (e.g. json, yaml, txt, bin)
+ MediaType string // mime
+ Implementation string // func implementing the Producer/Consumer
+ Parameters []string // parameters supported by this serializer
+}
+
+// GenSecurityScheme represents a security scheme for code generation
+type GenSecurityScheme struct {
+ AppName string
+ ID string
+ Name string
+ ReceiverName string
+ IsBasicAuth bool
+ IsAPIKeyAuth bool
+ IsOAuth2 bool
+ Scopes []string
+ Source string
+ Principal string
+ PrincipalIsNullable bool
+
+ // from spec.SecurityScheme
+ Description string
+ Type string
+ In string
+ Flow string
+ AuthorizationURL string
+ TokenURL string
+ Extensions map[string]interface{}
+ ScopesDesc []GenSecurityScope
+}
+
+// GenSecuritySchemes sorted representation of serializers
+type GenSecuritySchemes []GenSecurityScheme
+
+func (g GenSecuritySchemes) Len() int { return len(g) }
+func (g GenSecuritySchemes) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSecuritySchemes) Less(i, j int) bool { return g[i].ID < g[j].ID }
+
+// GenSecurityRequirement represents a security requirement for an operation
+type GenSecurityRequirement struct {
+ Name string
+ Scopes []string
+}
+
+// GenSecurityScope represents a scope descriptor for an OAuth2 security scheme
+type GenSecurityScope struct {
+ Name string
+ Description string
+}
+
+// GenSecurityRequirements represents a compounded security requirement specification.
+// In a []GenSecurityRequirements complete requirements specification,
+// outer elements are interpreted as optional requirements (OR), and
+// inner elements are interpreted as jointly required (AND).
+type GenSecurityRequirements []GenSecurityRequirement
+
+func (g GenSecurityRequirements) Len() int { return len(g) }
+func (g GenSecurityRequirements) Swap(i, j int) { g[i], g[j] = g[j], g[i] }
+func (g GenSecurityRequirements) Less(i, j int) bool { return g[i].Name < g[j].Name }
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/support.go b/vendor/github.com/go-swagger/go-swagger/generator/support.go
new file mode 100644
index 000000000..df3996df4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/support.go
@@ -0,0 +1,546 @@
+// Copyright 2015 go-swagger maintainers
+
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log"
+ "path"
+ "path/filepath"
+ "sort"
+
+ "github.com/go-openapi/analysis"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// GenerateServer generates a server application
+func GenerateServer(name string, modelNames, operationIDs []string, opts *GenOpts) error {
+ generator, err := newAppGenerator(name, modelNames, operationIDs, opts)
+ if err != nil {
+ return err
+ }
+ return generator.Generate()
+}
+
+// GenerateSupport generates the supporting files for an API
+func GenerateSupport(name string, modelNames, operationIDs []string, opts *GenOpts) error {
+ generator, err := newAppGenerator(name, modelNames, operationIDs, opts)
+ if err != nil {
+ return err
+ }
+ return generator.GenerateSupport(nil)
+}
+
+// GenerateMarkdown documentation for a swagger specification
+func GenerateMarkdown(output string, modelNames, operationIDs []string, opts *GenOpts) error {
+ if output == "." || output == "" {
+ output = "markdown.md"
+ }
+
+ if err := opts.EnsureDefaults(); err != nil {
+ return err
+ }
+ MarkdownSectionOpts(opts, output)
+
+ generator, err := newAppGenerator("", modelNames, operationIDs, opts)
+ if err != nil {
+ return err
+ }
+
+ return generator.GenerateMarkdown()
+}
+
+func newAppGenerator(name string, modelNames, operationIDs []string, opts *GenOpts) (*appGenerator, error) {
+ if err := opts.CheckOpts(); err != nil {
+ return nil, err
+ }
+
+ if err := opts.setTemplates(); err != nil {
+ return nil, err
+ }
+
+ specDoc, analyzed, err := opts.analyzeSpec()
+ if err != nil {
+ return nil, err
+ }
+
+ models, err := gatherModels(specDoc, modelNames)
+ if err != nil {
+ return nil, err
+ }
+
+ operations := gatherOperations(analyzed, operationIDs)
+
+ if len(operations) == 0 && !opts.IgnoreOperations {
+ return nil, errors.New("no operations were selected")
+ }
+
+ opts.Name = appNameOrDefault(specDoc, name, defaultServerName)
+ if opts.IncludeMain && opts.MainPackage == "" {
+ // default target for the generated main
+ opts.MainPackage = swag.ToCommandName(mainNameOrDefault(specDoc, name, defaultServerName) + "-server")
+ }
+
+ apiPackage := opts.LanguageOpts.ManglePackagePath(opts.APIPackage, defaultOperationsTarget)
+ return &appGenerator{
+ Name: opts.Name,
+ Receiver: "o",
+ SpecDoc: specDoc,
+ Analyzed: analyzed,
+ Models: models,
+ Operations: operations,
+ Target: opts.Target,
+ DumpData: opts.DumpData,
+ Package: opts.LanguageOpts.ManglePackageName(apiPackage, defaultOperationsTarget),
+ APIPackage: apiPackage,
+ ModelsPackage: opts.LanguageOpts.ManglePackagePath(opts.ModelPackage, defaultModelsTarget),
+ ServerPackage: opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget),
+ ClientPackage: opts.LanguageOpts.ManglePackagePath(opts.ClientPackage, defaultClientTarget),
+ OperationsPackage: filepath.Join(opts.LanguageOpts.ManglePackagePath(opts.ServerPackage, defaultServerTarget), apiPackage),
+ Principal: opts.PrincipalAlias(),
+ DefaultScheme: opts.DefaultScheme,
+ DefaultProduces: opts.DefaultProduces,
+ DefaultConsumes: opts.DefaultConsumes,
+ GenOpts: opts,
+ }, nil
+}
+
+type appGenerator struct {
+ Name string
+ Receiver string
+ SpecDoc *loads.Document
+ Analyzed *analysis.Spec
+ Package string
+ APIPackage string
+ ModelsPackage string
+ ServerPackage string
+ ClientPackage string
+ OperationsPackage string
+ MainPackage string
+ Principal string
+ Models map[string]spec.Schema
+ Operations map[string]opRef
+ Target string
+ DumpData bool
+ DefaultScheme string
+ DefaultProduces string
+ DefaultConsumes string
+ GenOpts *GenOpts
+}
+
+func (a *appGenerator) Generate() error {
+ app, err := a.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+
+ if a.DumpData {
+ return dumpData(app)
+ }
+
+ // NOTE: relative to previous implem with chan.
+ // IPC removed concurrent execution because of the FuncMap that is being shared
+ // templates are now lazy loaded so there is concurrent map access I can't guard
+ if a.GenOpts.IncludeModel {
+ log.Printf("rendering %d models", len(app.Models))
+ for _, md := range app.Models {
+ mod := md
+ mod.IncludeModel = true
+ mod.IncludeValidator = a.GenOpts.IncludeValidator
+ if err := a.GenOpts.renderDefinition(&mod); err != nil {
+ return err
+ }
+ }
+ }
+
+ if a.GenOpts.IncludeHandler {
+ log.Printf("rendering %d operation groups (tags)", app.OperationGroups.Len())
+ for _, g := range app.OperationGroups {
+ opg := g
+ log.Printf("rendering %d operations for %s", opg.Operations.Len(), opg.Name)
+ for _, p := range opg.Operations {
+ op := p
+ if err := a.GenOpts.renderOperation(&op); err != nil {
+ return err
+ }
+ }
+ // optional OperationGroups templates generation
+ if err := a.GenOpts.renderOperationGroup(&opg); err != nil {
+ return fmt.Errorf("error while rendering operation group: %v", err)
+ }
+ }
+ }
+
+ if a.GenOpts.IncludeSupport {
+ log.Printf("rendering support")
+ if err := a.GenerateSupport(&app); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (a *appGenerator) GenerateSupport(ap *GenApp) error {
+ app := ap
+ if ap == nil {
+ // allows for calling GenerateSupport standalone
+ ca, err := a.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+ app = &ca
+ }
+
+ baseImport := a.GenOpts.LanguageOpts.baseImport(a.Target)
+ serverPath := path.Join(baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.ServerPackage, defaultServerTarget))
+
+ pkgAlias := deconflictPkg(importAlias(serverPath), renameServerPackage)
+ app.DefaultImports[pkgAlias] = serverPath
+ app.ServerPackageAlias = pkgAlias
+
+ // add client import for cli generation
+ clientPath := path.Join(baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.ClientPackage, defaultClientTarget))
+ clientPkgAlias := importAlias(clientPath)
+ app.DefaultImports[clientPkgAlias] = clientPath
+
+ return a.GenOpts.renderApplication(app)
+}
+
+func (a *appGenerator) GenerateMarkdown() error {
+ app, err := a.makeCodegenApp()
+ if err != nil {
+ return err
+ }
+
+ return a.GenOpts.renderApplication(&app)
+}
+
+func (a *appGenerator) makeSecuritySchemes() GenSecuritySchemes {
+ requiredSecuritySchemes := make(map[string]spec.SecurityScheme, len(a.Analyzed.RequiredSecuritySchemes()))
+ for _, scheme := range a.Analyzed.RequiredSecuritySchemes() {
+ if req, ok := a.SpecDoc.Spec().SecurityDefinitions[scheme]; ok && req != nil {
+ requiredSecuritySchemes[scheme] = *req
+ }
+ }
+ return gatherSecuritySchemes(requiredSecuritySchemes, a.Name, a.Principal, a.Receiver, a.GenOpts.PrincipalIsNullable())
+}
+
+func (a *appGenerator) makeCodegenApp() (GenApp, error) {
+ log.Println("building a plan for generation")
+
+ sw := a.SpecDoc.Spec()
+ receiver := a.Receiver
+
+ consumes, _ := a.makeConsumes()
+ produces, _ := a.makeProduces()
+ security := a.makeSecuritySchemes()
+
+ log.Println("generation target", a.Target)
+
+ baseImport := a.GenOpts.LanguageOpts.baseImport(a.Target)
+ defaultImports := a.GenOpts.defaultImports()
+
+ imports := make(map[string]string, 50)
+ alias := deconflictPkg(a.GenOpts.LanguageOpts.ManglePackageName(a.OperationsPackage, defaultOperationsTarget), renameAPIPackage)
+ imports[alias] = path.Join(
+ baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget))
+
+ implAlias := ""
+ if a.GenOpts.ImplementationPackage != "" {
+ implAlias = deconflictPkg(a.GenOpts.LanguageOpts.ManglePackageName(a.GenOpts.ImplementationPackage, defaultImplementationTarget), renameImplementationPackage)
+ imports[implAlias] = a.GenOpts.ImplementationPackage
+ }
+
+ log.Printf("planning definitions (found: %d)", len(a.Models))
+
+ genModels := make(GenDefinitions, 0, len(a.Models))
+ for mn, m := range a.Models {
+ model, err := makeGenDefinition(
+ mn,
+ a.ModelsPackage,
+ m,
+ a.SpecDoc,
+ a.GenOpts,
+ )
+ if err != nil {
+ return GenApp{}, fmt.Errorf("error in model %s while planning definitions: %v", mn, err)
+ }
+ if model != nil {
+ if !model.External {
+ genModels = append(genModels, *model)
+ }
+
+ // Copy model imports to operation imports
+ // TODO(fredbi): mangle model pkg aliases
+ for alias, pkg := range model.Imports {
+ target := a.GenOpts.LanguageOpts.ManglePackageName(alias, "")
+ imports[target] = pkg
+ }
+ }
+ }
+ sort.Sort(genModels)
+
+ log.Printf("planning operations (found: %d)", len(a.Operations))
+
+ genOps := make(GenOperations, 0, len(a.Operations))
+ for operationName, opp := range a.Operations {
+ o := opp.Op
+ o.ID = operationName
+
+ bldr := codeGenOpBuilder{
+ ModelsPackage: a.ModelsPackage,
+ Principal: a.GenOpts.PrincipalAlias(),
+ Target: a.Target,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ DefaultScheme: a.DefaultScheme,
+ Doc: a.SpecDoc,
+ Analyzed: a.Analyzed,
+ BasePath: a.SpecDoc.BasePath(),
+ GenOpts: a.GenOpts,
+ Name: operationName,
+ Operation: *o,
+ Method: opp.Method,
+ Path: opp.Path,
+ IncludeValidator: a.GenOpts.IncludeValidator,
+ APIPackage: a.APIPackage, // defaults to main operations package
+ DefaultProduces: a.DefaultProduces,
+ DefaultConsumes: a.DefaultConsumes,
+ }
+
+ tag, tags, ok := bldr.analyzeTags()
+ if !ok {
+ continue // operation filtered according to CLI params
+ }
+
+ bldr.Authed = len(a.Analyzed.SecurityRequirementsFor(o)) > 0
+ bldr.Security = a.Analyzed.SecurityRequirementsFor(o)
+ bldr.SecurityDefinitions = a.Analyzed.SecurityDefinitionsFor(o)
+ bldr.RootAPIPackage = a.GenOpts.LanguageOpts.ManglePackageName(a.ServerPackage, defaultServerTarget)
+
+ st := o.Tags
+ if a.GenOpts != nil {
+ st = a.GenOpts.Tags
+ }
+ intersected := intersectTags(o.Tags, st)
+ if len(st) > 0 && len(intersected) == 0 {
+ continue
+ }
+
+ op, err := bldr.MakeOperation()
+ if err != nil {
+ return GenApp{}, err
+ }
+
+ op.ReceiverName = receiver
+ op.Tags = tags // ordered tags for this operation, possibly filtered by CLI params
+ genOps = append(genOps, op)
+
+ if !a.GenOpts.SkipTagPackages && tag != "" {
+ importPath := filepath.ToSlash(
+ path.Join(
+ baseImport,
+ a.GenOpts.LanguageOpts.ManglePackagePath(a.OperationsPackage, defaultOperationsTarget),
+ a.GenOpts.LanguageOpts.ManglePackageName(bldr.APIPackage, defaultOperationsTarget),
+ ))
+ defaultImports[bldr.APIPackageAlias] = importPath
+ }
+ }
+ sort.Sort(genOps)
+
+ opsGroupedByPackage := make(map[string]GenOperations, len(genOps))
+ for _, operation := range genOps {
+ opsGroupedByPackage[operation.PackageAlias] = append(opsGroupedByPackage[operation.PackageAlias], operation)
+ }
+
+ log.Printf("grouping operations into packages (packages: %d)", len(opsGroupedByPackage))
+
+ opGroups := make(GenOperationGroups, 0, len(opsGroupedByPackage))
+ for k, v := range opsGroupedByPackage {
+ log.Printf("operations for package packages %q (found: %d)", k, len(v))
+ sort.Sort(v)
+ // trim duplicate extra schemas within the same package
+ vv := make(GenOperations, 0, len(v))
+ seenExtraSchema := make(map[string]bool)
+ for _, op := range v {
+ uniqueExtraSchemas := make(GenSchemaList, 0, len(op.ExtraSchemas))
+ for _, xs := range op.ExtraSchemas {
+ if _, alreadyThere := seenExtraSchema[xs.Name]; !alreadyThere {
+ seenExtraSchema[xs.Name] = true
+ uniqueExtraSchemas = append(uniqueExtraSchemas, xs)
+ }
+ }
+ op.ExtraSchemas = uniqueExtraSchemas
+ vv = append(vv, op)
+ }
+ var pkg string
+ if len(vv) > 0 {
+ pkg = vv[0].Package
+ } else {
+ pkg = k
+ }
+
+ opGroup := GenOperationGroup{
+ GenCommon: GenCommon{
+ Copyright: a.GenOpts.Copyright,
+ TargetImportPath: baseImport,
+ },
+ Name: pkg,
+ PackageAlias: k,
+ Operations: vv,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ RootPackage: a.APIPackage,
+ GenOpts: a.GenOpts,
+ }
+ opGroups = append(opGroups, opGroup)
+ }
+ sort.Sort(opGroups)
+
+ log.Println("planning meta data and facades")
+
+ var collectedSchemes, extraSchemes []string
+ for _, op := range genOps {
+ collectedSchemes = concatUnique(collectedSchemes, op.Schemes)
+ extraSchemes = concatUnique(extraSchemes, op.ExtraSchemes)
+ }
+ sort.Strings(collectedSchemes)
+ sort.Strings(extraSchemes)
+
+ host := "localhost"
+ if sw.Host != "" {
+ host = sw.Host
+ }
+
+ basePath := "/"
+ if sw.BasePath != "" {
+ basePath = sw.BasePath
+ }
+
+ jsonb, _ := json.MarshalIndent(a.SpecDoc.OrigSpec(), "", " ")
+ flatjsonb, _ := json.MarshalIndent(a.SpecDoc.Spec(), "", " ")
+
+ return GenApp{
+ GenCommon: GenCommon{
+ Copyright: a.GenOpts.Copyright,
+ TargetImportPath: baseImport,
+ },
+ APIPackage: a.GenOpts.LanguageOpts.ManglePackageName(a.ServerPackage, defaultServerTarget),
+ APIPackageAlias: alias,
+ ImplementationPackageAlias: implAlias,
+ Package: a.Package,
+ ReceiverName: receiver,
+ Name: a.Name,
+ Host: host,
+ BasePath: basePath,
+ Schemes: schemeOrDefault(collectedSchemes, a.DefaultScheme),
+ ExtraSchemes: extraSchemes,
+ ExternalDocs: trimExternalDoc(sw.ExternalDocs),
+ Tags: trimTags(sw.Tags),
+ Info: trimInfo(sw.Info),
+ Consumes: consumes,
+ Produces: produces,
+ DefaultConsumes: a.DefaultConsumes,
+ DefaultProduces: a.DefaultProduces,
+ DefaultImports: defaultImports,
+ Imports: imports,
+ SecurityDefinitions: security,
+ SecurityRequirements: securityRequirements(a.SpecDoc.Spec().Security), // top level securityRequirements
+ Models: genModels,
+ Operations: genOps,
+ OperationGroups: opGroups,
+ Principal: a.GenOpts.PrincipalAlias(),
+ SwaggerJSON: generateReadableSpec(jsonb),
+ FlatSwaggerJSON: generateReadableSpec(flatjsonb),
+ ExcludeSpec: a.GenOpts.ExcludeSpec,
+ GenOpts: a.GenOpts,
+
+ PrincipalIsNullable: a.GenOpts.PrincipalIsNullable(),
+ }, nil
+}
+
+// generateReadableSpec makes swagger json spec as a string instead of bytes
+// the only character that needs to be escaped is '`' symbol, since it cannot be escaped in the GO string
+// that is quoted as `string data`. The function doesn't care about the beginning or the ending of the
+// string it escapes since all data that needs to be escaped is always in the middle of the swagger spec.
+func generateReadableSpec(spec []byte) string {
+ buf := &bytes.Buffer{}
+ for _, b := range string(spec) {
+ if b == '`' {
+ buf.WriteString("`+\"`\"+`")
+ } else {
+ buf.WriteRune(b)
+ }
+ }
+ return buf.String()
+}
+
+func trimExternalDoc(in *spec.ExternalDocumentation) *spec.ExternalDocumentation {
+ if in == nil {
+ return nil
+ }
+
+ return &spec.ExternalDocumentation{
+ URL: in.URL,
+ Description: trimBOM(in.Description),
+ }
+}
+
+func trimInfo(in *spec.Info) *spec.Info {
+ if in == nil {
+ return nil
+ }
+
+ return &spec.Info{
+ InfoProps: spec.InfoProps{
+ Contact: in.Contact,
+ Title: trimBOM(in.Title),
+ Description: trimBOM(in.Description),
+ TermsOfService: trimBOM(in.TermsOfService),
+ License: in.License,
+ Version: in.Version,
+ },
+ VendorExtensible: in.VendorExtensible,
+ }
+}
+
+func trimTags(in []spec.Tag) []spec.Tag {
+ if in == nil {
+ return nil
+ }
+
+ tags := make([]spec.Tag, 0, len(in))
+
+ for _, tag := range in {
+ tags = append(tags, spec.Tag{
+ TagProps: spec.TagProps{
+ Name: tag.Name,
+ Description: trimBOM(tag.Description),
+ ExternalDocs: trimExternalDoc(tag.ExternalDocs),
+ },
+ })
+ }
+
+ return tags
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go b/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go
new file mode 100644
index 000000000..e78ae602a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/template_repo.go
@@ -0,0 +1,855 @@
+package generator
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "math"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+ "sync"
+ "text/template"
+ "text/template/parse"
+ "unicode"
+
+ "log"
+
+ "github.com/Masterminds/sprig/v3"
+ "github.com/go-openapi/inflect"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+ "github.com/kr/pretty"
+)
+
+var (
+ assets map[string][]byte
+ protectedTemplates map[string]bool
+
+ // FuncMapFunc yields a map with all functions for templates
+ FuncMapFunc func(*LanguageOpts) template.FuncMap
+
+ templates *Repository
+
+ docFormat map[string]string
+)
+
+func initTemplateRepo() {
+ FuncMapFunc = DefaultFuncMap
+
+ // this makes the ToGoName func behave with the special
+ // prefixing rule above
+ swag.GoNamePrefixFunc = prefixForName
+
+ assets = defaultAssets()
+ protectedTemplates = defaultProtectedTemplates()
+ templates = NewRepository(FuncMapFunc(DefaultLanguageFunc()))
+
+ docFormat = map[string]string{
+ "binary": "binary (byte stream)",
+ "byte": "byte (base64 string)",
+ }
+}
+
+// DefaultFuncMap yields a map with default functions for use in the templates.
+// These are available in every template
+func DefaultFuncMap(lang *LanguageOpts) template.FuncMap {
+ f := sprig.TxtFuncMap()
+ extra := template.FuncMap{
+ "pascalize": pascalize,
+ "camelize": swag.ToJSONName,
+ "varname": lang.MangleVarName,
+ "humanize": swag.ToHumanNameLower,
+ "snakize": lang.MangleFileName,
+ "toPackagePath": func(name string) string {
+ return filepath.FromSlash(lang.ManglePackagePath(name, ""))
+ },
+ "toPackage": func(name string) string {
+ return lang.ManglePackagePath(name, "")
+ },
+ "toPackageName": func(name string) string {
+ return lang.ManglePackageName(name, "")
+ },
+ "dasherize": swag.ToCommandName,
+ "pluralizeFirstWord": pluralizeFirstWord,
+ "json": asJSON,
+ "prettyjson": asPrettyJSON,
+ "hasInsecure": func(arg []string) bool {
+ return swag.ContainsStringsCI(arg, "http") || swag.ContainsStringsCI(arg, "ws")
+ },
+ "hasSecure": func(arg []string) bool {
+ return swag.ContainsStringsCI(arg, "https") || swag.ContainsStringsCI(arg, "wss")
+ },
+ "dropPackage": dropPackage,
+ "containsPkgStr": containsPkgStr,
+ "contains": swag.ContainsStrings,
+ "padSurround": padSurround,
+ "joinFilePath": filepath.Join,
+ "joinPath": path.Join,
+ "comment": padComment,
+ "blockcomment": blockComment,
+ "inspect": pretty.Sprint,
+ "cleanPath": path.Clean,
+ "mediaTypeName": mediaMime,
+ "arrayInitializer": lang.arrayInitializer,
+ "hasPrefix": strings.HasPrefix,
+ "stringContains": strings.Contains,
+ "imports": lang.imports,
+ "dict": dict,
+ "isInteger": isInteger,
+ "escapeBackticks": func(arg string) string {
+ return strings.ReplaceAll(arg, "`", "`+\"`\"+`")
+ },
+ "paramDocType": func(param GenParameter) string {
+ return resolvedDocType(param.SwaggerType, param.SwaggerFormat, param.Child)
+ },
+ "headerDocType": func(header GenHeader) string {
+ return resolvedDocType(header.SwaggerType, header.SwaggerFormat, header.Child)
+ },
+ "schemaDocType": func(in interface{}) string {
+ switch schema := in.(type) {
+ case GenSchema:
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ case *GenSchema:
+ if schema == nil {
+ return ""
+ }
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ case GenDefinition:
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ case *GenDefinition:
+ if schema == nil {
+ return ""
+ }
+ return resolvedDocSchemaType(schema.SwaggerType, schema.SwaggerFormat, schema.Items)
+ default:
+ panic("dev error: schemaDocType should be called with GenSchema or GenDefinition")
+ }
+ },
+ "schemaDocMapType": func(schema GenSchema) string {
+ return resolvedDocElemType("object", schema.SwaggerFormat, &schema.resolvedType)
+ },
+ "docCollectionFormat": resolvedDocCollectionFormat,
+ "trimSpace": strings.TrimSpace,
+ "httpStatus": httpStatus,
+ "cleanupEnumVariant": cleanupEnumVariant,
+ "gt0": gt0,
+ }
+
+ for k, v := range extra {
+ f[k] = v
+ }
+
+ return f
+}
+
+func defaultAssets() map[string][]byte {
+ return map[string][]byte{
+ // schema validation templates
+ "validation/primitive.gotmpl": MustAsset("templates/validation/primitive.gotmpl"),
+ "validation/customformat.gotmpl": MustAsset("templates/validation/customformat.gotmpl"),
+ "validation/structfield.gotmpl": MustAsset("templates/validation/structfield.gotmpl"),
+ "structfield.gotmpl": MustAsset("templates/structfield.gotmpl"),
+ "schemavalidator.gotmpl": MustAsset("templates/schemavalidator.gotmpl"),
+ "schemapolymorphic.gotmpl": MustAsset("templates/schemapolymorphic.gotmpl"),
+ "schemaembedded.gotmpl": MustAsset("templates/schemaembedded.gotmpl"),
+ "validation/minimum.gotmpl": MustAsset("templates/validation/minimum.gotmpl"),
+ "validation/maximum.gotmpl": MustAsset("templates/validation/maximum.gotmpl"),
+ "validation/multipleOf.gotmpl": MustAsset("templates/validation/multipleOf.gotmpl"),
+
+ // schema serialization templates
+ "additionalpropertiesserializer.gotmpl": MustAsset("templates/serializers/additionalpropertiesserializer.gotmpl"),
+ "aliasedserializer.gotmpl": MustAsset("templates/serializers/aliasedserializer.gotmpl"),
+ "allofserializer.gotmpl": MustAsset("templates/serializers/allofserializer.gotmpl"),
+ "basetypeserializer.gotmpl": MustAsset("templates/serializers/basetypeserializer.gotmpl"),
+ "marshalbinaryserializer.gotmpl": MustAsset("templates/serializers/marshalbinaryserializer.gotmpl"),
+ "schemaserializer.gotmpl": MustAsset("templates/serializers/schemaserializer.gotmpl"),
+ "subtypeserializer.gotmpl": MustAsset("templates/serializers/subtypeserializer.gotmpl"),
+ "tupleserializer.gotmpl": MustAsset("templates/serializers/tupleserializer.gotmpl"),
+
+ // schema generation template
+ "docstring.gotmpl": MustAsset("templates/docstring.gotmpl"),
+ "schematype.gotmpl": MustAsset("templates/schematype.gotmpl"),
+ "schemabody.gotmpl": MustAsset("templates/schemabody.gotmpl"),
+ "schema.gotmpl": MustAsset("templates/schema.gotmpl"),
+ "model.gotmpl": MustAsset("templates/model.gotmpl"),
+ "header.gotmpl": MustAsset("templates/header.gotmpl"),
+
+ // simple schema generation helpers templates
+ "simpleschema/defaultsvar.gotmpl": MustAsset("templates/simpleschema/defaultsvar.gotmpl"),
+ "simpleschema/defaultsinit.gotmpl": MustAsset("templates/simpleschema/defaultsinit.gotmpl"),
+
+ "swagger_json_embed.gotmpl": MustAsset("templates/swagger_json_embed.gotmpl"),
+
+ // server templates
+ "server/parameter.gotmpl": MustAsset("templates/server/parameter.gotmpl"),
+ "server/urlbuilder.gotmpl": MustAsset("templates/server/urlbuilder.gotmpl"),
+ "server/responses.gotmpl": MustAsset("templates/server/responses.gotmpl"),
+ "server/operation.gotmpl": MustAsset("templates/server/operation.gotmpl"),
+ "server/builder.gotmpl": MustAsset("templates/server/builder.gotmpl"),
+ "server/server.gotmpl": MustAsset("templates/server/server.gotmpl"),
+ "server/configureapi.gotmpl": MustAsset("templates/server/configureapi.gotmpl"),
+ "server/autoconfigureapi.gotmpl": MustAsset("templates/server/autoconfigureapi.gotmpl"),
+ "server/main.gotmpl": MustAsset("templates/server/main.gotmpl"),
+ "server/doc.gotmpl": MustAsset("templates/server/doc.gotmpl"),
+
+ // client templates
+ "client/parameter.gotmpl": MustAsset("templates/client/parameter.gotmpl"),
+ "client/response.gotmpl": MustAsset("templates/client/response.gotmpl"),
+ "client/client.gotmpl": MustAsset("templates/client/client.gotmpl"),
+ "client/facade.gotmpl": MustAsset("templates/client/facade.gotmpl"),
+
+ "markdown/docs.gotmpl": MustAsset("templates/markdown/docs.gotmpl"),
+
+ // cli templates
+ "cli/cli.gotmpl": MustAsset("templates/cli/cli.gotmpl"),
+ "cli/main.gotmpl": MustAsset("templates/cli/main.gotmpl"),
+ "cli/modelcli.gotmpl": MustAsset("templates/cli/modelcli.gotmpl"),
+ "cli/operation.gotmpl": MustAsset("templates/cli/operation.gotmpl"),
+ "cli/registerflag.gotmpl": MustAsset("templates/cli/registerflag.gotmpl"),
+ "cli/retrieveflag.gotmpl": MustAsset("templates/cli/retrieveflag.gotmpl"),
+ "cli/schema.gotmpl": MustAsset("templates/cli/schema.gotmpl"),
+ "cli/completion.gotmpl": MustAsset("templates/cli/completion.gotmpl"),
+ }
+}
+
+func defaultProtectedTemplates() map[string]bool {
+ return map[string]bool{
+ "dereffedSchemaType": true,
+ "docstring": true,
+ "header": true,
+ "mapvalidator": true,
+ "model": true,
+ "modelvalidator": true,
+ "objectvalidator": true,
+ "primitivefieldvalidator": true,
+ "privstructfield": true,
+ "privtuplefield": true,
+ "propertyValidationDocString": true,
+ "propertyvalidator": true,
+ "schema": true,
+ "schemaBody": true,
+ "schemaType": true,
+ "schemabody": true,
+ "schematype": true,
+ "schemavalidator": true,
+ "serverDoc": true,
+ "slicevalidator": true,
+ "structfield": true,
+ "structfieldIface": true,
+ "subTypeBody": true,
+ "swaggerJsonEmbed": true,
+ "tuplefield": true,
+ "tuplefieldIface": true,
+ "typeSchemaType": true,
+ "simpleschemaDefaultsvar": true,
+ "simpleschemaDefaultsinit": true,
+
+ // validation helpers
+ "validationCustomformat": true,
+ "validationPrimitive": true,
+ "validationStructfield": true,
+ "withBaseTypeBody": true,
+ "withoutBaseTypeBody": true,
+ "validationMinimum": true,
+ "validationMaximum": true,
+ "validationMultipleOf": true,
+
+ // all serializers
+ "additionalPropertiesSerializer": true,
+ "tupleSerializer": true,
+ "schemaSerializer": true,
+ "hasDiscriminatedSerializer": true,
+ "discriminatedSerializer": true,
+ }
+}
+
+// AddFile adds a file to the default repository. It will create a new template based on the filename.
+// It trims the .gotmpl from the end and converts the name using swag.ToJSONName. This will strip
+// directory separators and Camelcase the next letter.
+// e.g validation/primitive.gotmpl will become validationPrimitive
+//
+// If the file contains a definition for a template that is protected the whole file will not be added
+func AddFile(name, data string) error {
+ return templates.addFile(name, data, false)
+}
+
+// NewRepository creates a new template repository with the provided functions defined
+func NewRepository(funcs template.FuncMap) *Repository {
+ repo := Repository{
+ files: make(map[string]string),
+ templates: make(map[string]*template.Template),
+ funcs: funcs,
+ }
+
+ if repo.funcs == nil {
+ repo.funcs = make(template.FuncMap)
+ }
+
+ return &repo
+}
+
+// Repository is the repository for the generator templates
+type Repository struct {
+ files map[string]string
+ templates map[string]*template.Template
+ funcs template.FuncMap
+ allowOverride bool
+ mux sync.Mutex
+}
+
+// ShallowClone a repository.
+//
+// Clones the maps of files and templates, so as to be able to use
+// the cloned repo concurrently.
+func (t *Repository) ShallowClone() *Repository {
+ clone := &Repository{
+ files: make(map[string]string, len(t.files)),
+ templates: make(map[string]*template.Template, len(t.templates)),
+ funcs: t.funcs,
+ allowOverride: t.allowOverride,
+ }
+
+ t.mux.Lock()
+ defer t.mux.Unlock()
+
+ for k, file := range t.files {
+ clone.files[k] = file
+ }
+ for k, tpl := range t.templates {
+ clone.templates[k] = tpl
+ }
+ return clone
+}
+
+// LoadDefaults will load the embedded templates
+func (t *Repository) LoadDefaults() {
+
+ for name, asset := range assets {
+ if err := t.addFile(name, string(asset), true); err != nil {
+ log.Fatal(err)
+ }
+ }
+}
+
+// LoadDir will walk the specified path and add each .gotmpl file it finds to the repository
+func (t *Repository) LoadDir(templatePath string) error {
+ err := filepath.Walk(templatePath, func(path string, info os.FileInfo, err error) error {
+
+ if strings.HasSuffix(path, ".gotmpl") {
+ if assetName, e := filepath.Rel(templatePath, path); e == nil {
+ if data, e := os.ReadFile(path); e == nil {
+ if ee := t.AddFile(assetName, string(data)); ee != nil {
+ return fmt.Errorf("could not add template: %v", ee)
+ }
+ }
+ // Non-readable files are skipped
+ }
+ }
+ if err != nil {
+ return err
+ }
+ // Non-template files are skipped
+ return nil
+ })
+ if err != nil {
+ return fmt.Errorf("could not complete template processing in directory \"%s\": %v", templatePath, err)
+ }
+ return nil
+}
+
+// LoadContrib loads template from contrib directory
+func (t *Repository) LoadContrib(name string) error {
+ log.Printf("loading contrib %s", name)
+ const pathPrefix = "templates/contrib/"
+ basePath := pathPrefix + name
+ filesAdded := 0
+ for _, aname := range AssetNames() {
+ if !strings.HasSuffix(aname, ".gotmpl") {
+ continue
+ }
+ if strings.HasPrefix(aname, basePath) {
+ target := aname[len(basePath)+1:]
+ err := t.addFile(target, string(MustAsset(aname)), true)
+ if err != nil {
+ return err
+ }
+ log.Printf("added contributed template %s from %s", target, aname)
+ filesAdded++
+ }
+ }
+ if filesAdded == 0 {
+ return fmt.Errorf("no files added from template: %s", name)
+ }
+ return nil
+}
+
+func (t *Repository) addFile(name, data string, allowOverride bool) error {
+ fileName := name
+ name = swag.ToJSONName(strings.TrimSuffix(name, ".gotmpl"))
+
+ templ, err := template.New(name).Funcs(t.funcs).Parse(data)
+
+ if err != nil {
+ return fmt.Errorf("failed to load template %s: %v", name, err)
+ }
+
+ // check if any protected templates are defined
+ if !allowOverride && !t.allowOverride {
+ for _, template := range templ.Templates() {
+ if protectedTemplates[template.Name()] {
+ return fmt.Errorf("cannot overwrite protected template %s", template.Name())
+ }
+ }
+ }
+
+ // Add each defined template into the cache
+ for _, template := range templ.Templates() {
+
+ t.files[template.Name()] = fileName
+ t.templates[template.Name()] = template.Lookup(template.Name())
+ }
+
+ return nil
+}
+
+// MustGet a template by name, panics when fails
+func (t *Repository) MustGet(name string) *template.Template {
+ tpl, err := t.Get(name)
+ if err != nil {
+ panic(err)
+ }
+ return tpl
+}
+
+// AddFile adds a file to the repository. It will create a new template based on the filename.
+// It trims the .gotmpl from the end and converts the name using swag.ToJSONName. This will strip
+// directory separators and Camelcase the next letter.
+// e.g validation/primitive.gotmpl will become validationPrimitive
+//
+// If the file contains a definition for a template that is protected the whole file will not be added
+func (t *Repository) AddFile(name, data string) error {
+ return t.addFile(name, data, false)
+}
+
+// SetAllowOverride allows setting allowOverride after the Repository was initialized
+func (t *Repository) SetAllowOverride(value bool) {
+ t.allowOverride = value
+}
+
+func findDependencies(n parse.Node) []string {
+
+ var deps []string
+ depMap := make(map[string]bool)
+
+ if n == nil {
+ return deps
+ }
+
+ switch node := n.(type) {
+ case *parse.ListNode:
+ if node != nil && node.Nodes != nil {
+ for _, nn := range node.Nodes {
+ for _, dep := range findDependencies(nn) {
+ depMap[dep] = true
+ }
+ }
+ }
+ case *parse.IfNode:
+ for _, dep := range findDependencies(node.BranchNode.List) {
+ depMap[dep] = true
+ }
+ for _, dep := range findDependencies(node.BranchNode.ElseList) {
+ depMap[dep] = true
+ }
+
+ case *parse.RangeNode:
+ for _, dep := range findDependencies(node.BranchNode.List) {
+ depMap[dep] = true
+ }
+ for _, dep := range findDependencies(node.BranchNode.ElseList) {
+ depMap[dep] = true
+ }
+
+ case *parse.WithNode:
+ for _, dep := range findDependencies(node.BranchNode.List) {
+ depMap[dep] = true
+ }
+ for _, dep := range findDependencies(node.BranchNode.ElseList) {
+ depMap[dep] = true
+ }
+
+ case *parse.TemplateNode:
+ depMap[node.Name] = true
+ }
+
+ for dep := range depMap {
+ deps = append(deps, dep)
+ }
+
+ return deps
+
+}
+
+func (t *Repository) flattenDependencies(templ *template.Template, dependencies map[string]bool) map[string]bool {
+ if dependencies == nil {
+ dependencies = make(map[string]bool)
+ }
+
+ deps := findDependencies(templ.Tree.Root)
+
+ for _, d := range deps {
+ if _, found := dependencies[d]; !found {
+
+ dependencies[d] = true
+
+ if tt := t.templates[d]; tt != nil {
+ dependencies = t.flattenDependencies(tt, dependencies)
+ }
+ }
+
+ dependencies[d] = true
+
+ }
+
+ return dependencies
+
+}
+
+func (t *Repository) addDependencies(templ *template.Template) (*template.Template, error) {
+
+ name := templ.Name()
+
+ deps := t.flattenDependencies(templ, nil)
+
+ for dep := range deps {
+
+ if dep == "" {
+ continue
+ }
+
+ tt := templ.Lookup(dep)
+
+ // Check if we have it
+ if tt == nil {
+ tt = t.templates[dep]
+
+ // Still don't have it, return an error
+ if tt == nil {
+ return templ, fmt.Errorf("could not find template %s", dep)
+ }
+ var err error
+
+ // Add it to the parse tree
+ templ, err = templ.AddParseTree(dep, tt.Tree)
+
+ if err != nil {
+ return templ, fmt.Errorf("dependency error: %v", err)
+ }
+
+ }
+ }
+ return templ.Lookup(name), nil
+}
+
+// Get will return the named template from the repository, ensuring that all dependent templates are loaded.
+// It will return an error if a dependent template is not defined in the repository.
+func (t *Repository) Get(name string) (*template.Template, error) {
+ templ, found := t.templates[name]
+
+ if !found {
+ return templ, fmt.Errorf("template doesn't exist %s", name)
+ }
+
+ return t.addDependencies(templ)
+}
+
+// DumpTemplates prints out a dump of all the defined templates, where they are defined and what their dependencies are.
+func (t *Repository) DumpTemplates() {
+ buf := bytes.NewBuffer(nil)
+ fmt.Fprintln(buf, "\n# Templates")
+ for name, templ := range t.templates {
+ fmt.Fprintf(buf, "## %s\n", name)
+ fmt.Fprintf(buf, "Defined in `%s`\n", t.files[name])
+
+ if deps := findDependencies(templ.Tree.Root); len(deps) > 0 {
+
+ fmt.Fprintf(buf, "####requires \n - %v\n\n\n", strings.Join(deps, "\n - "))
+ }
+ fmt.Fprintln(buf, "\n---")
+ }
+ log.Println(buf.String())
+}
+
+// FuncMap functions
+
+func asJSON(data interface{}) (string, error) {
+ b, err := json.Marshal(data)
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+}
+
+func asPrettyJSON(data interface{}) (string, error) {
+ b, err := json.MarshalIndent(data, "", " ")
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+}
+
+func pluralizeFirstWord(arg string) string {
+ sentence := strings.Split(arg, " ")
+ if len(sentence) == 1 {
+ return inflect.Pluralize(arg)
+ }
+
+ return inflect.Pluralize(sentence[0]) + " " + strings.Join(sentence[1:], " ")
+}
+
+func dropPackage(str string) string {
+ parts := strings.Split(str, ".")
+ return parts[len(parts)-1]
+}
+
+// return true if the GoType str contains pkg. For example "model.MyType" -> true, "MyType" -> false
+func containsPkgStr(str string) bool {
+ dropped := dropPackage(str)
+ return !(dropped == str)
+}
+
+func padSurround(entry, padWith string, i, ln int) string {
+ var res []string
+ if i > 0 {
+ for j := 0; j < i; j++ {
+ res = append(res, padWith)
+ }
+ }
+ res = append(res, entry)
+ tot := ln - i - 1
+ for j := 0; j < tot; j++ {
+ res = append(res, padWith)
+ }
+ return strings.Join(res, ",")
+}
+
+func padComment(str string, pads ...string) string {
+ // pads specifes padding to indent multi line comments.Defaults to one space
+ pad := " "
+ lines := strings.Split(str, "\n")
+ if len(pads) > 0 {
+ pad = strings.Join(pads, "")
+ }
+ return (strings.Join(lines, "\n//"+pad))
+}
+
+func blockComment(str string) string {
+ return strings.ReplaceAll(str, "*/", "[*]/")
+}
+
+func pascalize(arg string) string {
+ runes := []rune(arg)
+ switch len(runes) {
+ case 0:
+ return "Empty"
+ case 1: // handle special case when we have a single rune that is not handled by swag.ToGoName
+ switch runes[0] {
+ case '+', '-', '#', '_', '*', '/', '=': // those cases are handled differently than swag utility
+ return prefixForName(arg)
+ }
+ }
+ return swag.ToGoName(swag.ToGoName(arg)) // want to remove spaces
+}
+
+func prefixForName(arg string) string {
+ first := []rune(arg)[0]
+ if len(arg) == 0 || unicode.IsLetter(first) {
+ return ""
+ }
+ switch first {
+ case '+':
+ return "Plus"
+ case '-':
+ return "Minus"
+ case '#':
+ return "HashTag"
+ case '*':
+ return "Asterisk"
+ case '/':
+ return "ForwardSlash"
+ case '=':
+ return "EqualSign"
+ // other cases ($,@ etc..) handled by swag.ToGoName
+ }
+ return "Nr"
+}
+
+func replaceSpecialChar(in rune) string {
+ switch in {
+ case '.':
+ return "-Dot-"
+ case '+':
+ return "-Plus-"
+ case '-':
+ return "-Dash-"
+ case '#':
+ return "-Hashtag-"
+ }
+ return string(in)
+}
+
+func cleanupEnumVariant(in string) string {
+ replaced := ""
+ for _, char := range in {
+ replaced += replaceSpecialChar(char)
+ }
+ return replaced
+}
+
+func dict(values ...interface{}) (map[string]interface{}, error) {
+ if len(values)%2 != 0 {
+ return nil, fmt.Errorf("expected even number of arguments, got %d", len(values))
+ }
+ dict := make(map[string]interface{}, len(values)/2)
+ for i := 0; i < len(values); i += 2 {
+ key, ok := values[i].(string)
+ if !ok {
+ return nil, fmt.Errorf("expected string key, got %+v", values[i])
+ }
+ dict[key] = values[i+1]
+ }
+ return dict, nil
+}
+
+func isInteger(arg interface{}) bool {
+ // is integer determines if a value may be represented by an integer
+ switch val := arg.(type) {
+ case int8, int16, int32, int, int64, uint8, uint16, uint32, uint, uint64:
+ return true
+ case *int8, *int16, *int32, *int, *int64, *uint8, *uint16, *uint32, *uint, *uint64:
+ v := reflect.ValueOf(arg)
+ return !v.IsNil()
+ case float64:
+ return math.Round(val) == val
+ case *float64:
+ return val != nil && math.Round(*val) == *val
+ case float32:
+ return math.Round(float64(val)) == float64(val)
+ case *float32:
+ return val != nil && math.Round(float64(*val)) == float64(*val)
+ case string:
+ _, err := strconv.ParseInt(val, 10, 64)
+ return err == nil
+ case *string:
+ if val == nil {
+ return false
+ }
+ _, err := strconv.ParseInt(*val, 10, 64)
+ return err == nil
+ default:
+ return false
+ }
+}
+
+func resolvedDocCollectionFormat(cf string, child *GenItems) string {
+ if child == nil {
+ return cf
+ }
+ ccf := cf
+ if ccf == "" {
+ ccf = "csv"
+ }
+ rcf := resolvedDocCollectionFormat(child.CollectionFormat, child.Child)
+ if rcf == "" {
+ return ccf
+ }
+ return ccf + "|" + rcf
+}
+
+func resolvedDocType(tn, ft string, child *GenItems) string {
+ if tn == "array" {
+ if child == nil {
+ return "[]any"
+ }
+ return "[]" + resolvedDocType(child.SwaggerType, child.SwaggerFormat, child.Child)
+ }
+
+ if ft != "" {
+ if doc, ok := docFormat[ft]; ok {
+ return doc
+ }
+ return fmt.Sprintf("%s (formatted %s)", ft, tn)
+ }
+
+ return tn
+}
+
+func resolvedDocSchemaType(tn, ft string, child *GenSchema) string {
+ if tn == "array" {
+ if child == nil {
+ return "[]any"
+ }
+ return "[]" + resolvedDocSchemaType(child.SwaggerType, child.SwaggerFormat, child.Items)
+ }
+
+ if tn == "object" {
+ if child == nil || child.ElemType == nil {
+ return "map of any"
+ }
+ if child.IsMap {
+ return "map of " + resolvedDocElemType(child.SwaggerType, child.SwaggerFormat, &child.resolvedType)
+ }
+
+ return child.GoType
+ }
+
+ if ft != "" {
+ if doc, ok := docFormat[ft]; ok {
+ return doc
+ }
+ return fmt.Sprintf("%s (formatted %s)", ft, tn)
+ }
+
+ return tn
+}
+
+func resolvedDocElemType(tn, ft string, schema *resolvedType) string {
+ if schema == nil {
+ return ""
+ }
+ if schema.IsMap {
+ return "map of " + resolvedDocElemType(schema.ElemType.SwaggerType, schema.ElemType.SwaggerFormat, schema.ElemType)
+ }
+
+ if schema.IsArray {
+ return "[]" + resolvedDocElemType(schema.ElemType.SwaggerType, schema.ElemType.SwaggerFormat, schema.ElemType)
+ }
+
+ if ft != "" {
+ if doc, ok := docFormat[ft]; ok {
+ return doc
+ }
+ return fmt.Sprintf("%s (formatted %s)", ft, tn)
+ }
+
+ return tn
+}
+
+func httpStatus(code int) string {
+ if name, ok := runtime.Statuses[code]; ok {
+ return name
+ }
+ // non-standard codes deserve some name
+ return fmt.Sprintf("Status %d", code)
+}
+
+func gt0(in *int64) bool {
+ // gt0 returns true if the *int64 points to a value > 0
+ // NOTE: plain {{ gt .MinProperties 0 }} just refuses to work normally
+ // with a pointer
+ return in != nil && *in > 0
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl
new file mode 100644
index 000000000..3d88c5beb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/cli.gotmpl
@@ -0,0 +1,242 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .GenOpts.CliPackage }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+
+ "github.com/spf13/cobra"
+ "github.com/spf13/viper"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+ httptransport "github.com/go-openapi/runtime/client"
+ homedir "github.com/mitchellh/go-homedir"
+)
+
+// debug flag indicating that cli should output debug logs
+var debug bool
+// config file location
+var configFile string
+// dry run flag
+var dryRun bool
+
+// name of the executable
+var exeName string = filepath.Base(os.Args[0])
+
+// logDebugf writes debug log to stdout
+func logDebugf(format string, v ...interface{}) {
+ if !debug{
+ return
+ }
+ log.Printf(format, v...)
+}
+
+{{/*TODO: make this a swagger cli option*/}}
+// depth of recursion to construct model flags
+var maxDepth int = 5
+
+// makeClient constructs a client object
+func makeClient(cmd *cobra.Command, args []string) (*client.{{ pascalize .Name }}, error) {
+ hostname := viper.GetString("hostname")
+ viper.SetDefault("base_path", client.DefaultBasePath)
+ basePath := viper.GetString("base_path")
+ scheme := viper.GetString("scheme")
+
+ r := httptransport.New(hostname, basePath, []string{scheme})
+ r.SetDebug(debug)
+
+ {{- /* user might define custom mediatype xxx/json and there is no registered ones to handle. */}}
+ // set custom producer and consumer to use the default ones
+ {{ range .Consumes }}
+ {{ range .AllSerializers }}
+ {{- if stringContains .MediaType "json" }}
+ r.Consumers["{{ .MediaType }}"] = runtime.JSONConsumer()
+ {{- else }}
+ // warning: consumes {{ .MediaType }} is not supported by go-swagger cli yet
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ {{ range .Produces }}
+ {{- range .AllSerializers }}
+ {{- if stringContains .MediaType "json" }}
+ r.Producers["{{ .MediaType }}"] = runtime.JSONProducer()
+ {{- else }}
+ // warning: produces {{ .MediaType }} is not supported by go-swagger cli yet
+ {{- end }}
+ {{- end }}
+ {{ end }}
+
+ {{- if .SecurityDefinitions }}
+ auth, err := makeAuthInfoWriter(cmd)
+ if err != nil {
+ return nil, err
+ }
+ r.DefaultAuthentication = auth
+ {{ end }}
+ appCli := client.New(r, strfmt.Default)
+ logDebugf("Server url: %v://%v", scheme, hostname)
+ return appCli, nil
+}
+
+// MakeRootCmd returns the root cmd
+func MakeRootCmd() (*cobra.Command, error) {
+ cobra.OnInitialize(initViperConfigs)
+
+ // Use executable name as the command name
+ rootCmd := &cobra.Command{
+ Use: exeName,
+ }
+ {{/*note: viper binded flag value must be retrieved from viper rather than cmd*/}}
+ // register basic flags
+ rootCmd.PersistentFlags().String("hostname", client.DefaultHost, "hostname of the service")
+ viper.BindPFlag("hostname", rootCmd.PersistentFlags().Lookup("hostname"))
+ rootCmd.PersistentFlags().String("scheme", client.DefaultSchemes[0], fmt.Sprintf("Choose from: %v", client.DefaultSchemes))
+ viper.BindPFlag("scheme", rootCmd.PersistentFlags().Lookup("scheme"))
+ rootCmd.PersistentFlags().String("base-path", client.DefaultBasePath, fmt.Sprintf("For example: %v", client.DefaultBasePath))
+ viper.BindPFlag("base_path", rootCmd.PersistentFlags().Lookup("base-path"))
+
+ // configure debug flag
+ rootCmd.PersistentFlags().BoolVar(&debug, "debug", false, "output debug logs")
+ // configure config location
+ rootCmd.PersistentFlags().StringVar(&configFile, "config", "", "config file path")
+ // configure dry run flag
+ rootCmd.PersistentFlags().BoolVar(&dryRun, "dry-run", false, "do not send the request to server")
+
+ // register security flags
+ {{- if .SecurityDefinitions }}
+ if err := registerAuthInoWriterFlags(rootCmd); err != nil{
+ return nil, err
+ }
+ {{- end }}
+ // add all operation groups
+{{- range .OperationGroups -}}
+ {{- $operationGroupCmdVarName := printf "operationGroup%vCmd" (pascalize .Name) }}
+ {{ $operationGroupCmdVarName }}, err := makeOperationGroup{{ pascalize .Name }}Cmd()
+ if err != nil {
+ return nil, err
+ }
+ rootCmd.AddCommand({{ $operationGroupCmdVarName }})
+{{ end }}
+
+ // add cobra completion
+ rootCmd.AddCommand(makeGenCompletionCmd())
+
+ return rootCmd, nil
+}
+
+// initViperConfigs initialize viper config using config file in '$HOME/.config/<cli name>/config.<json|yaml...>'
+// currently hostname, scheme and auth tokens can be specified in this config file.
+func initViperConfigs() {
+ if configFile != "" {
+ // use user specified config file location
+ viper.SetConfigFile(configFile)
+ }else{
+ // look for default config
+ // Find home directory.
+ home, err := homedir.Dir()
+ cobra.CheckErr(err)
+
+ // Search config in home directory with name ".cobra" (without extension).
+ viper.AddConfigPath(path.Join(home, ".config", exeName))
+ viper.SetConfigName("config")
+ }
+
+ if err := viper.ReadInConfig(); err != nil {
+ logDebugf("Error: loading config file: %v", err)
+ return
+ }
+ logDebugf("Using config file: %v", viper.ConfigFileUsed())
+}
+
+{{- if .SecurityDefinitions }}
+{{- /*youyuan: rework this since spec may define multiple auth schemes.
+ cli needs to detect which one user passed rather than add all of them.*/}}
+// registerAuthInoWriterFlags registers all flags needed to perform authentication
+func registerAuthInoWriterFlags(cmd *cobra.Command) error {
+{{- range .SecurityDefinitions }}
+ /*{{.Name}} {{.Description}}*/
+ {{- if .IsBasicAuth }}
+ cmd.PersistentFlags().String("username", "", "username for basic auth")
+ viper.BindPFlag("username", cmd.PersistentFlags().Lookup("username"))
+ cmd.PersistentFlags().String("password", "", "password for basic auth")
+ viper.BindPFlag("password", cmd.PersistentFlags().Lookup("password"))
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ cmd.PersistentFlags().String("{{.Name}}", "", `{{.Description}}`)
+ viper.BindPFlag("{{.Name}}", cmd.PersistentFlags().Lookup("{{.Name}}"))
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ // oauth2: let user provide the token in a flag, rather than implement the logic to fetch the token.
+ cmd.PersistentFlags().String("oauth2-token", "", `{{.Description}}`)
+ viper.BindPFlag("oauth2-token", cmd.PersistentFlags().Lookup("oauth2-token"))
+ {{- end }}
+{{- end }}
+ return nil
+}
+
+// makeAuthInfoWriter retrieves cmd flags and construct an auth info writer
+func makeAuthInfoWriter(cmd *cobra.Command) (runtime.ClientAuthInfoWriter, error) {
+ auths := []runtime.ClientAuthInfoWriter{}
+{{- range .SecurityDefinitions }}
+ /*{{.Name}} {{.Description}}*/
+ {{- if .IsBasicAuth }}
+ if viper.IsSet("username") {
+ usr := viper.GetString("username")
+ if !viper.IsSet("password"){
+ return nil, fmt.Errorf("Basic Auth password for user [%v] is not provided.", usr)
+ }
+ pwd := viper.GetString("password")
+ auths = append(auths, httptransport.BasicAuth(usr,pwd))
+ }
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ if viper.IsSet("{{.Name}}") {
+ {{ pascalize .Name }}Key := viper.GetString("{{.Name}}")
+ auths = append(auths, httptransport.APIKeyAuth("{{.Name}}", "{{.In}}", {{ pascalize .Name }}Key))
+ }
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ if viper.IsSet("oauth2-token") {
+ // oauth2 workflow for generated CLI is not ideal.
+ // If you have suggestions on how to support it, raise an issue here: https://github.com/go-swagger/go-swagger/issues
+ // This will be added to header: "Authorization: Bearer {oauth2-token value}"
+ token := viper.GetString("oauth2-token")
+ auths = append(auths, httptransport.BearerToken(token))
+ }
+ {{- end }}
+{{- end }}
+ if len(auths) == 0 {
+ logDebugf("Warning: No auth params detected.")
+ return nil, nil
+ }
+ // compose all auths together
+ return httptransport.Compose(auths...), nil
+}
+{{- end }}
+
+{{ range .OperationGroups -}}
+func makeOperationGroup{{ pascalize .Name }}Cmd() (*cobra.Command, error) {
+ {{- $operationGroupCmdVarName := printf "operationGroup%vCmd" (pascalize .Name) }}
+ {{ $operationGroupCmdVarName }} := &cobra.Command{
+ Use: "{{ .Name }}",
+ Long: `{{ .Description }}`,
+ }
+{{ range .Operations }}
+ {{- $operationCmdVarName := printf "operation%vCmd" (pascalize .Name) }}
+ {{ $operationCmdVarName }}, err := makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd()
+ if err != nil {
+ return nil, err
+ }
+ {{ $operationGroupCmdVarName }}.AddCommand({{ $operationCmdVarName }})
+{{ end }}
+ return {{ $operationGroupCmdVarName }}, nil
+}
+{{ end }} {{/*operation group*/}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl
new file mode 100644
index 000000000..c115cc1a9
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/completion.gotmpl
@@ -0,0 +1,77 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .GenOpts.CliPackage }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import(
+ "github.com/spf13/cobra"
+)
+
+func makeGenCompletionCmd() *cobra.Command{
+
+ var completionCmd = &cobra.Command{
+ Use: "completion [bash|zsh|fish|powershell]",
+ Short: "Generate completion script",
+ Long: `To load completions:
+
+Bash:
+
+ $ source <(yourprogram completion bash)
+
+ # To load completions for each session, execute once:
+ # Linux:
+ $ yourprogram completion bash > /etc/bash_completion.d/yourprogram
+ # macOS:
+ $ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram
+
+Zsh:
+
+ # If shell completion is not already enabled in your environment,
+ # you will need to enable it. You can execute the following once:
+
+ $ echo "autoload -U compinit; compinit" >> ~/.zshrc
+
+ # To load completions for each session, execute once:
+ $ yourprogram completion zsh > "${fpath[1]}/_yourprogram"
+
+ # You will need to start a new shell for this setup to take effect.
+
+fish:
+
+ $ yourprogram completion fish | source
+
+ # To load completions for each session, execute once:
+ $ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish
+
+PowerShell:
+
+ PS> yourprogram completion powershell | Out-String | Invoke-Expression
+
+ # To load completions for every new session, run:
+ PS> yourprogram completion powershell > yourprogram.ps1
+ # and source this file from your PowerShell profile.
+`,
+ DisableFlagsInUseLine: true,
+ ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
+ Args: cobra.ExactValidArgs(1),
+ Run: func(cmd *cobra.Command, args []string) {
+ switch args[0] {
+ case "bash":
+ cmd.Root().GenBashCompletion(os.Stdout)
+ case "zsh":
+ cmd.Root().GenZshCompletion(os.Stdout)
+ case "fish":
+ cmd.Root().GenFishCompletion(os.Stdout, true)
+ case "powershell":
+ cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout)
+ }
+ },
+ }
+ return completionCmd
+} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl
new file mode 100644
index 000000000..6cc470a2f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/main.gotmpl
@@ -0,0 +1,28 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package main
+
+import (
+ "encoding/json"
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+func main() {
+ rootCmd,err := cli.MakeRootCmd()
+ if err != nil {
+ fmt.Println("Cmd construction error: ", err)
+ os.Exit(1)
+ }
+
+ if err := rootCmd.Execute(); err != nil {
+ os.Exit(1)
+ }
+} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl
new file mode 100644
index 000000000..d93e91d41
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/modelcli.gotmpl
@@ -0,0 +1,25 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package cli
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+ "github.com/spf13/cobra"
+)
+
+// Schema cli for {{.GoType}}
+{{ template "modelschemacli" .}}
+
+{{ range .ExtraSchemas }}
+// Extra schema cli for {{.GoType}}
+ {{ template "modelschemacli" .}}
+{{ end }} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl
new file mode 100644
index 000000000..10666ed78
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/operation.gotmpl
@@ -0,0 +1,230 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+{{- /*TODO: do not hardcode cli pkg*/}}
+package cli
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+
+ "github.com/spf13/cobra"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/swag"
+ httptransport "github.com/go-openapi/runtime/client"
+)
+
+// makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd returns a cmd to handle operation {{ camelize .Name }}
+func makeOperation{{pascalize .Package}}{{ pascalize .Name }}Cmd() (*cobra.Command, error) {
+ cmd := &cobra.Command{
+ Use: "{{ .Name }}",
+ Short: `{{ escapeBackticks .Description}}`,
+ RunE: runOperation{{pascalize .Package}}{{ pascalize .Name }},
+ }
+
+ if err := registerOperation{{pascalize .Package}}{{ pascalize .Name }}ParamFlags(cmd); err != nil{
+ return nil, err
+ }
+
+ return cmd, nil
+}
+
+{{ $operationGroup := .Package }}
+{{ $operation := .Name }}
+{{ $operationPkgAlias := .PackageAlias }}
+// runOperation{{pascalize $operationGroup }}{{ pascalize $operation }} uses cmd flags to call endpoint api
+func runOperation{{pascalize $operationGroup }}{{ pascalize $operation }}(cmd *cobra.Command, args []string) error{
+ appCli, err := makeClient(cmd, args)
+ if err != nil {
+ return err
+ }
+ // retrieve flag values from cmd and fill params
+ params := {{ .PackageAlias }}.New{{ pascalize .Name}}Params()
+{{- range .Params }}
+ if err, _ := retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(params, "", cmd); err != nil{
+ return err
+ }
+{{- end }} {{/*Params*/}}
+ if dryRun {
+ {{/* Note: dry run is not very useful for now, but useful when validation is added in future*/}}
+ logDebugf("dry-run flag specified. Skip sending request.")
+ return nil
+ }
+ // make request and then print result
+{{- /*Package string is the operation name*/}}
+ msgStr, err := parseOperation{{pascalize .Package}}{{ pascalize .Name }}Result(appCli.{{- pascalize .Package }}.{{ pascalize .Name }}(params {{- if .Authorized}}, nil{{ end }}{{ if .HasStreamingResponse }}, &bytes.Buffer{}{{ end }}))
+ if err != nil {
+ return err
+ }
+ if !debug{
+ {{/* In debug mode content should have been printed in transport layer, so do not print again*/}}
+ fmt.Println(msgStr)
+ }
+ return nil
+}
+
+// registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}ParamFlags registers all flags needed to fill params
+func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}ParamFlags(cmd *cobra.Command) error {
+{{- range .Params }}
+ if err := registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{pascalize .Name }}ParamFlags("", cmd); err != nil{
+ return err
+ }
+{{- end }}
+ return nil
+}
+
+{{/*register functions for each fields in this operation*/}}
+{{- range .Params }}
+func registerOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{pascalize .Name }}ParamFlags(cmdPrefix string, cmd *cobra.Command) error{
+ {{- if .IsPrimitive }}
+ {{ template "primitiveregistrator" . }}
+ {{- else if .IsArray }}
+ {{ template "arrayregistrator" . }}
+ {{- else if and .IsBodyParam .Schema (not .IsArray) (not .IsMap) (not .IsStream) }}
+ {{ template "modelparamstringregistrator" . }}
+ {{ template "modelparamregistrator" . }}
+ {{/* Do not mark body flag as required, since the individial flag for body field will be added separately */}}
+ {{- else }}
+ // warning: go type {{ .GoType }} is not supported by go-swagger cli yet.
+ {{- end }}
+ return nil
+}
+{{- end }}
+
+{{/*functions to retreive each field of params*/}}
+{{- range .Params }}
+func retrieveOperation{{pascalize $operationGroup }}{{ pascalize $operation }}{{ pascalize .Name }}Flag(m *{{ $operationPkgAlias }}.{{ pascalize $operation }}Params, cmdPrefix string, cmd *cobra.Command) (error,bool){
+ retAdded := false
+ {{- $flagStr := .Name }}
+ {{- $flagValueVar := printf "%vValue" (camelize .Name) }}
+ {{- /*only set the param if user set the flag*/}}
+ if cmd.Flags().Changed("{{ $flagStr }}") {
+ {{- if .IsPrimitive }}
+ {{ template "primitiveretriever" . }}
+ {{- else if .IsArray }}
+ {{ template "arrayretriever" . }}
+ {{- else if .IsMap }}
+ // warning: {{ .Name }} map type {{.GoType}} is not supported by go-swagger cli yet
+ {{- else if and .IsBodyParam .Schema .IsComplexObject (not .IsStream) }}
+ {{- /*schema payload can be passed in cmd as a string and here is unmarshalled to model struct and attached in params*/}}
+ // Read {{ $flagStr }} string from cmd and unmarshal
+ {{ $flagValueVar }}Str, err := cmd.Flags().GetString("{{ $flagStr }}")
+ if err != nil {
+ return err, false
+ }
+ {{/*Note anonymous body schema is not pointer*/}}
+ {{ $flagValueVar }} := {{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{}
+ if err := json.Unmarshal([]byte({{ $flagValueVar }}Str), &{{ $flagValueVar }}); err!= nil{
+ return fmt.Errorf("cannot unmarshal {{ $flagStr }} string in {{.GoType}}: %v", err), false
+ }
+ m.{{ .ID }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else }}
+ // warning: {{.GoType}} is not supported by go-swagger cli yet
+ {{- end }} {{/*end go type case*/}}
+ }
+ {{- if and .IsBodyParam .Schema .IsComplexObject (not .IsArray) (not .IsMap) (not .IsStream) }}
+ {{- /* Add flags to capture fields in Body. If previously Body struct was constructed in unmarshalling body string,
+ then reuse the struct, otherwise construct an empty value struct to fill. Here body field flags overwrites
+ unmarshalled body string values. */}}
+ {{- $flagModelVar := printf "%vModel" (camelize $flagValueVar) }}
+ {{ $flagModelVar }} := m.{{ .ID }}
+ if swag.IsZero({{ $flagModelVar }}){
+ {{ $flagModelVar }} = {{- if .IsNullable }}&{{- end }}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{}
+ }
+ {{- /*Only attach the body struct in params if user passed some flag filling some body fields.*/}}
+ {{- /* add "&" to $flagModelVar when it is not nullable because the retrieve method always expects a pointer */}}
+ err, added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(0, {{if not .IsNullable}}&{{end}}{{ $flagModelVar }}, "{{ camelize (dropPackage .GoType) }}", cmd)
+ if err != nil{
+ return err, false
+ }
+ if added {
+ m.{{.ID}} = {{ $flagModelVar }}
+ }
+ if dryRun && debug {
+ {{/* dry run we don't get trasnport debug strings, so print it here*/}}
+ {{- $bodyDebugVar := printf "%vDebugBytes" (camelize $flagValueVar) }}
+ {{ $bodyDebugVar }}, err := json.Marshal(m.{{.ID}})
+ if err != nil{
+ return err, false
+ }
+ logDebugf("{{.ID }} dry-run payload: %v", string({{ $bodyDebugVar }}))
+ }
+ retAdded = retAdded || added
+ {{/*body debug string will be printed in transport layer*/}}
+ {{- end }}
+ return nil, retAdded
+}
+{{- end }} {{/*Params*/}}
+
+// parseOperation{{pascalize .Package}}{{ pascalize .Name }}Result parses request result and return the string content
+{{- /*TODO: handle multiple success response case*/}}
+func parseOperation{{pascalize .Package}}{{ pascalize .Name }}Result({{- if .SuccessResponse }}{{ range $i, $v := .SuccessResponses }} resp{{$i}} *{{$v.Package}}.{{pascalize $v.Name}},{{- end }}{{- end }} respErr error) (string, error){
+ if respErr != nil {
+ {{- /*error is of type default model. If we can cast, then print the resp.*/}}
+ {{ if .DefaultResponse }} {{with .DefaultResponse}}
+ {{ if .Schema }}
+ var iRespD interface{} = respErr
+ respD, ok := iRespD.(*{{ .Package }}.{{ pascalize .Name }})
+ if ok {
+ if !swag.IsZero(respD) && !swag.IsZero(respD.Payload) {
+ msgStr,err := json.Marshal(respD.Payload)
+ if err != nil{
+ return "", err
+ }
+ return string(msgStr), nil
+ }
+ }
+ {{ else }}
+ // Non schema case: warning {{.Name}} is not supported
+ {{ end }}
+ {{ end }} {{ end }}
+ {{- range $i, $v := .Responses }}
+ {{ if .Schema }}
+ var iResp{{$i}} interface{} = respErr
+ resp{{$i}}, ok := iResp{{$i}}.(*{{ .Package }}.{{ pascalize .Name }})
+ if ok {
+ if !swag.IsZero(resp{{$i}}) && !swag.IsZero(resp{{$i}}.Payload) {
+ msgStr,err := json.Marshal(resp{{$i}}.Payload)
+ if err != nil{
+ return "", err
+ }
+ return string(msgStr), nil
+ }
+ }
+ {{ else }}
+ // Non schema case: warning {{.Name}} is not supported
+ {{ end }}
+ {{ end }}
+ return "", respErr
+ }
+ {{- range $i, $v := .SuccessResponses }}
+ {{ if .Schema }}
+ {{- with .Schema}}
+ if !swag.IsZero(resp{{$i}}) && !swag.IsZero(resp{{$i}}.Payload) {
+ {{- if or .IsComplexObject .IsArray .IsMap }}
+ msgStr,err := json.Marshal(resp{{$i}}.Payload)
+ if err != nil{
+ return "", err
+ }
+ {{- else }}
+ msgStr := fmt.Sprintf("%v", resp{{$i}}.Payload)
+ {{- end }}
+ return string(msgStr), nil
+ }
+ {{- end }}
+ {{ else }}
+ // warning: non schema response {{.Name}} is not supported by go-swagger cli yet.
+ {{ end }}
+ {{ end }}
+ return "", nil
+}
+
+{{/*for models defined in params, generate their register and retrieve flags functions*/}}
+{{- range .ExtraSchemas }}
+ {{ template "modelschemacli" . }}
+{{- end}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl
new file mode 100644
index 000000000..637811155
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/registerflag.gotmpl
@@ -0,0 +1,97 @@
+{{/*util functions to run or register cmd flags*/}}
+
+{{ define "flagdescriptionvar" }}
+ {{- $fullDescription := (escapeBackticks .Description) }}
+ {{- if .Required}}
+ {{- $fullDescription = printf "Required. %v" $fullDescription}}
+ {{- end}}
+ {{- if .Enum }}
+ {{- $fullDescription = printf "Enum: %v. %v" (json .Enum) $fullDescription}}
+ {{- end }}
+ {{ camelize .Name }}Description := `{{ $fullDescription }}`
+{{ end }}
+
+{{ define "flagnamevar" }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name) }}
+ var {{ $flagNameVar }} string
+ if cmdPrefix == "" {
+ {{ $flagNameVar }} = "{{ .Name }}"
+ }else{
+ {{ $flagNameVar }} = fmt.Sprintf("%v.{{ .Name }}", cmdPrefix)
+ }
+{{ end }}
+
+{{ define "flagdefaultvar" }}
+ {{ $defaultVar := printf "%vFlagDefault" (camelize .Name) }}
+ var {{ $defaultVar}} {{ .GoType }} {{ if .Default }}= {{ printf "%#v" .Default }}{{ end }}
+{{ end }}
+
+{{/* Not used. CLI does not mark flag as required, and required will be checked by validation in future */}}
+{{/* {{ define "requiredregistrator" }}
+ if err := cmd.MarkPersistentFlagRequired({{ camelize .Name }}FlagName); err != nil{
+ return err
+ }
+{{ end }} */}}
+
+{{ define "enumcompletion" }} {{/*only used for primitive types. completion type is always string.*/}}
+{{ if .Enum }}
+if err := cmd.RegisterFlagCompletionFunc({{ camelize .Name }}FlagName,
+ func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ var res []string
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ return res, cobra.ShellCompDirectiveDefault
+ }); err != nil{
+ return err
+}
+{{ end }}
+{{ end }}
+
+{{/* intended to be used on struct GenSchema with .IsPrimitive */}}
+{{ define "primitiveregistrator" }}
+ {{- if or (eq .GoType "int64") (eq .GoType "int32") (eq .GoType "string") (eq .GoType "float64") (eq .GoType "float32") (eq .GoType "bool") }}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ {{ template "flagdefaultvar" . }}
+ _ = cmd.PersistentFlags().{{ pascalize .GoType }}({{ camelize .Name }}FlagName, {{ camelize .Name }}FlagDefault, {{ (camelize .Name) }}Description)
+ {{ template "enumcompletion" . }}
+ {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.UUID") (eq .GoType "strfmt.ObjectId") }} {{/* read as string */}}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ _ = cmd.PersistentFlags().String({{ camelize .Name }}FlagName, "", {{ (camelize .Name) }}Description)
+ {{ template "enumcompletion" . }}
+ {{- else }}
+ // warning: primitive {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+{{ define "arrayregistrator" }}
+ {{- if or (eq .GoType "[]int64") (eq .GoType "[]int32") (eq .GoType "[]string") (eq .GoType "[]float64") (eq .GoType "[]float32") (eq .GoType "[]bool") }}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ {{ template "flagdefaultvar" . }}
+ _ = cmd.PersistentFlags().{{ pascalize .GoType }}Slice({{ camelize .Name }}FlagName, {{ camelize .Name }}FlagDefault, {{ (camelize .Name) }}Description)
+ {{ template "enumcompletion" . }}
+ {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.UUID") (eq .GoType "[]strfmt.ObjectId") }} {{/* read as string */}}
+ {{ template "flagdescriptionvar" . }}
+ {{ template "flagnamevar" . }}
+ _ = cmd.PersistentFlags().StringSlice({{ camelize .Name }}FlagName, []string{}, {{ (camelize .Name) }}Description)
+ {{- else }}
+ // warning: array {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+
+{{/* each body parameter gets a string flag to input json raw string */}}
+{{ define "modelparamstringregistrator" }}
+ {{ template "flagnamevar" . }}
+ _ = cmd.PersistentFlags().String({{ camelize .Name }}FlagName, "", "Optional json string for [{{ .Name }}]. {{ .Description }}")
+{{ end }}
+
+{{ define "modelparamregistrator" }} {{/* register a param that has a schema */}}
+ // add flags for body {{/*use go type as the flag prefix. There is no good way to determine the original str case in spec*/}}
+ if err := registerModel{{ pascalize (dropPackage .GoType) }}Flags(0, "{{ camelize (dropPackage .GoType) }}", cmd); err != nil {
+ return err
+ }
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl
new file mode 100644
index 000000000..a1ff1e5de
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/retrieveflag.gotmpl
@@ -0,0 +1,59 @@
+{{/*util functions to retrieve flags*/}}
+
+{{ define "primitiveretriever" }}
+ {{- $flagValueVar := printf "%vFlagValue" (camelize .Name) }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name )}}
+ {{- if or (eq .GoType "int64") (eq .GoType "int32") (eq .GoType "string") (eq .GoType "float64") (eq .GoType "float32") (eq .GoType "bool") }}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}, err := cmd.Flags().Get{{pascalize .GoType}}({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+ {{- /* reciever by convention is m for CLI */}}
+ m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else if or (eq .GoType "strfmt.DateTime") (eq .GoType "strfmt.ObjectId") (eq .GoType "strfmt.UUID" ) }} {{/*Get flag value as string, then parse it*/}}
+ {{/*Many of the strfmt types can be added here*/}}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}Str, err := cmd.Flags().GetString({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+ var {{ $flagValueVar }} {{ .GoType }}
+ if err := {{ $flagValueVar }}.UnmarshalText([]byte({{ $flagValueVar }}Str)); err != nil{
+ return err, false
+ }
+ m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else }}
+ // warning: primitive {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+{{ define "arrayretriever" }}
+ {{- $flagValueVar := printf "%vFlagValues" (camelize .Name) }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name )}}
+ {{- if or (eq .GoType "[]int64") (eq .GoType "[]int32") (eq .GoType "[]string") (eq .GoType "[]float64") (eq .GoType "[]float32") (eq .GoType "[]bool") }}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}, err := cmd.Flags().Get{{pascalize .GoType}}Slice({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+ {{- /* reciever by convention is m for CLI */}}
+ m.{{ pascalize .Name }} = {{ $flagValueVar }}
+ {{- else if or (eq .GoType "[]strfmt.DateTime") (eq .GoType "[]strfmt.ObjectId") (eq .GoType "[]strfmt.UUID") }} {{/*Get flag value as string, then parse it*/}}
+ {{ template "flagnamevar" . }}
+ {{ $flagValueVar }}Str, err := cmd.Flags().GetStringSlice({{ $flagNameVar }})
+ if err != nil{
+ return err, false
+ }
+
+ {{ $flagValueVar }} := make({{ .GoType }}, len({{ $flagValueVar }}Str))
+ for i, v := range {{ $flagValueVar }}Str {
+ if err := {{ $flagValueVar }}[i].UnmarshalText([]byte(v)); err != nil{
+ return err, false
+ }
+ }
+ m.{{ pascalize .Name }} = {{- if .IsNullable }}&{{- end }}{{ $flagValueVar }}
+ {{- else }}
+ // warning: array {{.Name}} {{.GoType }} is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl
new file mode 100644
index 000000000..2dc42aebc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/cli/schema.gotmpl
@@ -0,0 +1,193 @@
+{{/*util functions to generate register and retrieve functions for a model*/}}
+
+{{ define "modelschemacli" }}
+ {{/*some guards to prevent rendering unsupported models types. TODO: remove this guard*/}}
+ {{if or .IsPrimitive .IsComplexObject }}
+ {{ template "modelschemacliinternal" . }}
+ {{ else }}
+ // Name: [{{.Name}}], Type:[{{ .GoType }}], register and retrieve functions are not rendered by go-swagger cli
+ {{ end }}
+{{ end }}
+
+{{/*since register and retrieve are the same for properties and all of, share them here*/}}
+{{ define "propertyregistor" }}
+ {{- if .IsPrimitive }}
+ {{ template "primitiveregistrator" . }}
+ {{- else if .IsArray }}
+ // warning: {{.Name}} {{ .GoType }} array type is not supported by go-swagger cli yet
+ {{- else if .IsMap }}
+ // warning: {{.Name}} {{ .GoType }} map type is not supported by go-swagger cli yet
+ {{- else if .IsComplexObject }} {{/* struct case */}}
+ {{ template "flagnamevar" . }}
+ if err := registerModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{ camelize .Name }}FlagName, cmd); err != nil{
+ return err
+ }
+ {{- else }}
+ // warning: {{.Name}} {{ .GoType }} unkown type is not supported by go-swagger cli yet
+ {{- end }}
+{{ end }}
+
+{{ define "propertyretriever" }}
+ {{- $flagNameVar := printf "%vFlagName" (camelize .Name) }}
+ {{- $flagValueVar := printf "%vFlagValue" (camelize .Name) }}
+ {{ $flagNameVar }} := fmt.Sprintf("%v.{{ .Name }}", cmdPrefix)
+ if cmd.Flags().Changed({{ $flagNameVar }}) {
+ {{- if .IsPrimitive }}
+ {{ template "primitiveretriever" . }}
+ retAdded = true
+ {{- else if .IsArray }}
+ // warning: {{ .Name }} array type {{ .GoType }} is not supported by go-swagger cli yet
+ {{- else if .IsMap }}
+ // warning: {{ .Name }} map type {{ .GoType }} is not supported by go-swagger cli yet
+ {{- else if .IsComplexObject }}
+ // info: complex object {{.Name}} {{.GoType}} is retrieved outside this Changed() block
+ {{- else }}
+ // warning: {{.Name}} {{ .GoType }} unkown type is not supported by go-swagger cli yet
+ {{- end }}
+ }
+ {{- if and .IsComplexObject (not .IsArray) (not .IsMap) (not .IsStream) }}
+ {{ $flagValueVar }} := m.{{pascalize .Name}}
+ if swag.IsZero({{ $flagValueVar }}){
+ {{ $flagValueVar }} = {{if .IsNullable }}&{{end}}{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}{}
+ }
+ {{/* always lift the payload to pointer and pass to model retrieve function. If .GoType has pkg str, use it, else use .Pkg+.GoType */}}
+ err, {{camelize .Name }}Added := retrieveModel{{pascalize (dropPackage .GoType) }}Flags(depth + 1, {{if not .IsNullable }}&{{end}}{{ $flagValueVar }}, {{ $flagNameVar }}, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{camelize .Name }}Added
+ if {{camelize .Name }}Added {
+ m.{{pascalize .Name}} = {{ $flagValueVar }}
+ }
+ {{- end }}
+{{ end }}
+
+{{ define "modelschemacliinternal" }} {{/*used by model definition and in params model*/}}
+{{- $modelName := .Name }}
+{{/*model package is filled by generator*/}}
+{{ $modelPkg := toPackageName .Pkg}}
+{{ $modelType := .GoType }}
+
+// register flags to command
+func registerModel{{pascalize .Name}}Flags(depth int, cmdPrefix string, cmd *cobra.Command) error {
+ {{ range .AllOf }}
+ {{- if not .IsAnonymous }}{{/* named type composition */}}
+ {{ if or .IsPrimitive .IsComplexObject }}
+ // register embedded {{ .GoType }} flags
+ {{/*defer all of registration to the model's regristor method. embed should not lift cmdPrefix */}}
+ if err := registerModel{{ pascalize (dropPackage .GoType) }}Flags(depth, cmdPrefix, cmd); err != nil{
+ return err
+ }
+ {{ else }}
+ // {{ .Name }} {{ .GoType }} register is skipped
+ {{ end }}
+ {{ else }}{{/*inline definition. assume only properties are used*/}}
+ // register anonymous fields for {{.Name}}
+ {{ $anonName := .Name }}
+ {{ range .Properties }}
+ if err := register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{
+ return err
+ }
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ if err := register{{ pascalize $modelName }}{{ pascalize .Name }}(depth, cmdPrefix, cmd); err != nil{
+ return err
+ }
+ {{ end }}
+ return nil
+}
+
+{{ range .AllOf }}
+ {{- if .IsAnonymous }}{{/* inline definition. schema case is defered. */}}
+// inline definition name {{ .Name }}, type {{.GoType}}
+ {{ $anonName := .Name }}
+ {{ range .Properties }}
+func register{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error {
+ if depth > maxDepth {
+ return nil
+ }
+ {{ template "propertyregistor" . }}
+ return nil
+}
+ {{ end }}
+ {{ end }}
+{{ end }}
+
+{{/*register functions for each fields in this model */}}
+{{ range .Properties }}
+func register{{ pascalize $modelName }}{{ pascalize .Name }}(depth int, cmdPrefix string, cmd *cobra.Command) error{
+ if depth > maxDepth {
+ return nil
+ }
+ {{ template "propertyregistor" .}}
+ return nil
+}
+{{ end }} {{/*Properties*/}}
+
+// retrieve flags from commands, and set value in model. Return true if any flag is passed by user to fill model field.
+func retrieveModel{{pascalize $modelName }}Flags(depth int, m *{{if containsPkgStr .GoType}}{{ .GoType }}{{else}}{{ .Pkg }}.{{.GoType}}{{ end }}, cmdPrefix string, cmd *cobra.Command) (error, bool) {
+ retAdded := false
+ {{ range .AllOf }}
+ {{- if not .IsAnonymous }}{{/* named type composition */}}
+ {{ if or .IsPrimitive .IsComplexObject }}
+ // retrieve model {{.GoType}}
+ err, {{camelize .Name }}Added := retrieveModel{{ pascalize (dropPackage .GoType) }}Flags(depth, &m.{{pascalize (dropPackage .GoType) }}, cmdPrefix, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{camelize .Name }}Added
+ {{ else }} {{/*inline anonymous case*/}}
+
+ {{ end }}
+ {{- else }}
+ // retrieve allOf {{.Name}} fields
+ {{ $anonName := .Name }}
+ {{ range .Properties }}
+ err, {{camelize .Name}}Added := retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth, m, cmdPrefix, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{ camelize .Name }}Added
+ {{ end }}
+ {{- end }}
+ {{ end }}
+ {{ range .Properties }}
+ err, {{ camelize .Name }}Added := retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth, m, cmdPrefix, cmd)
+ if err != nil{
+ return err, false
+ }
+ retAdded = retAdded || {{ camelize .Name }}Added
+ {{ end }}
+ return nil, retAdded
+}
+
+{{ range .AllOf }}
+ {{- if .IsAnonymous }}{{/* inline definition. schema case is defered. */}}
+// define retrieve functions for fields for inline definition name {{ .Name }}
+ {{ $anonName := .Name }}
+ {{ range .Properties }} {{/*anonymous fields will be registered directly on parent model*/}}
+func retrieve{{ pascalize $modelName }}Anon{{pascalize $anonName }}{{ pascalize .Name }}Flags(depth int, m *{{if containsPkgStr $modelType}}{{ $modelType }}{{else}}{{ $modelPkg }}.{{$modelType}}{{ end }},cmdPrefix string, cmd *cobra.Command) (error,bool) {
+ if depth > maxDepth {
+ return nil, false
+ }
+ retAdded := false
+ {{ template "propertyretriever" . }}
+ return nil, retAdded
+}
+ {{ end }}
+ {{ end }}
+{{ end }}
+
+{{ range .Properties }}
+func retrieve{{pascalize $modelName }}{{pascalize .Name }}Flags(depth int, m *{{if $modelPkg}}{{$modelPkg}}.{{ dropPackage $modelType }}{{else}}{{ $modelType }}{{end}}, cmdPrefix string, cmd *cobra.Command) (error, bool) {
+ if depth > maxDepth {
+ return nil, false
+ }
+ retAdded := false
+ {{ template "propertyretriever" . }}
+ return nil, retAdded
+}
+{{ end }} {{/*properties*/}}
+{{ end }} {{/*define*/}} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl
new file mode 100644
index 000000000..3d01e9dcc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/client.gotmpl
@@ -0,0 +1,127 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Name }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// New creates a new {{ humanize .Name }} API client.
+func New(transport runtime.ClientTransport, formats strfmt.Registry) ClientService {
+ return &Client{transport: transport, formats: formats}
+}
+
+/*
+Client {{ if .Summary }}{{ .Summary }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}for {{ humanize .Name }} API{{ end }}
+*/
+type Client struct {
+ transport runtime.ClientTransport
+ formats strfmt.Registry
+}
+
+// ClientOption is the option for Client methods
+type ClientOption func(*runtime.ClientOperation)
+
+// ClientService is the interface for Client methods
+type ClientService interface {
+ {{ range .Operations }}
+ {{ pascalize .Name }}(params *{{ pascalize .Name }}Params{{ if .Authorized }}, authInfo runtime.ClientAuthInfoWriter{{end}}{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}, opts ...ClientOption) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }}
+ {{ end }}
+
+ SetTransport(transport runtime.ClientTransport)
+}
+
+{{ range .Operations }}
+/*
+ {{ pascalize .Name }} {{ if .Summary }}{{ pluralizeFirstWord (humanize .Summary) }}{{ if .Description }}
+
+ {{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ humanize .Name }} API{{ end }}
+*/
+func (a *Client) {{ pascalize .Name }}(params *{{ pascalize .Name }}Params{{ if .Authorized }}, authInfo runtime.ClientAuthInfoWriter{{end}}{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}, opts ...ClientOption) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }} {
+ // TODO: Validate the params before sending
+ if params == nil {
+ params = New{{ pascalize .Name }}Params()
+ }
+ op := &runtime.ClientOperation{
+ ID: {{ printf "%q" .Name }},
+ Method: {{ printf "%q" .Method }},
+ PathPattern: {{ printf "%q" .Path }},
+ ProducesMediaTypes: {{ printf "%#v" .ProducesMediaTypes }},
+ ConsumesMediaTypes: {{ printf "%#v" .ConsumesMediaTypes }},
+ Schemes: {{ printf "%#v" .Schemes }},
+ Params: params,
+ Reader: &{{ pascalize .Name }}Reader{formats: a.formats{{ if .HasStreamingResponse }}, writer: writer{{ end }}},{{ if .Authorized }}
+ AuthInfo: authInfo,{{ end}}
+ Context: params.Context,
+ Client: params.HTTPClient,
+ }
+ for _, opt := range opts {
+ opt(op)
+ }
+ {{ $length := len .SuccessResponses }}
+ {{ if .SuccessResponse }}result{{else}}_{{ end }}, err := a.transport.Submit(op)
+ if err != nil {
+ return {{ if .SuccessResponse }}{{ padSurround "nil" "nil" 0 $length }}, {{ end }}err
+ }
+ {{- if .SuccessResponse }}
+ {{- if eq $length 1 }}
+ success, ok := result.(*{{ pascalize .SuccessResponse.Name }})
+ if ok {
+ return success,nil
+ }
+ // unexpected success response
+ {{- if .DefaultResponse }}{{/* if a default response is provided, fill this and return an error */}}
+ unexpectedSuccess := result.(*{{ pascalize .DefaultResponse.Name }})
+ return nil, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
+ {{- else }}
+ // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue
+ msg := fmt.Sprintf("unexpected success response for {{ .Name }}: API contract not enforced by server. Client expected to get an error, but got: %T", result)
+ panic(msg)
+ {{- end }}
+ {{- else }}{{/* several possible success responses */}}
+ switch value := result.(type) {
+ {{- range $i, $v := .SuccessResponses }}
+ case *{{ pascalize $v.Name }}:
+ return {{ padSurround "value" "nil" $i $length }}, nil
+ {{- end }}
+ }
+ {{- if .DefaultResponse }}{{/* if a default response is provided, fill this and return an error */}}
+ // unexpected success response
+ unexpectedSuccess := result.(*{{ pascalize .DefaultResponse.Name }})
+ return {{ padSurround "nil" "nil" 0 $length }}, runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
+ {{- else }}
+ // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue
+ msg := fmt.Sprintf("unexpected success response for {{ $.Name }}: API contract not enforced by server. Client expected to get an error, but got: %T", result)
+ panic(msg)
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ return nil
+ {{- end }}
+}
+{{- end }}
+
+// SetTransport changes the transport on the client
+func (a *Client) SetTransport(transport runtime.ClientTransport) {
+ a.transport = transport
+} \ No newline at end of file
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl
new file mode 100644
index 000000000..287a75f92
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/facade.gotmpl
@@ -0,0 +1,129 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ httptransport "github.com/go-openapi/runtime/client"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// Default {{ humanize .Name }} HTTP client.
+var Default = NewHTTPClient(nil)
+
+const (
+ // DefaultHost is the default Host
+ // found in Meta (info) section of spec file
+ DefaultHost string = {{ printf "%#v" .Host }}
+ // DefaultBasePath is the default BasePath
+ // found in Meta (info) section of spec file
+ DefaultBasePath string = {{ printf "%#v" .BasePath }}
+)
+
+// DefaultSchemes are the default schemes found in Meta (info) section of spec file
+var DefaultSchemes = {{ printf "%#v" .Schemes }}
+
+// NewHTTPClient creates a new {{ humanize .Name }} HTTP client.
+func NewHTTPClient(formats strfmt.Registry) *{{ pascalize .Name }} {
+ return NewHTTPClientWithConfig(formats, nil)
+}
+
+// NewHTTPClientWithConfig creates a new {{ humanize .Name }} HTTP client,
+// using a customizable transport config.
+func NewHTTPClientWithConfig(formats strfmt.Registry, cfg *TransportConfig) *{{ pascalize .Name }} {
+ // ensure nullable parameters have default
+ if cfg == nil {
+ cfg = DefaultTransportConfig()
+ }
+
+ // create transport and client
+ transport := httptransport.New(cfg.Host, cfg.BasePath, cfg.Schemes)
+ return New(transport, formats)
+}
+
+// New creates a new {{ humanize .Name }} client
+func New(transport runtime.ClientTransport, formats strfmt.Registry) *{{ pascalize .Name }} {
+ // ensure nullable parameters have default
+ if formats == nil {
+ formats = strfmt.Default
+ }
+
+ cli := new({{ pascalize .Name }})
+ cli.Transport = transport
+ {{- range .OperationGroups }}
+ cli.{{ pascalize .Name }} = {{ .PackageAlias }}.New(transport, formats)
+ {{- end }}
+ return cli
+}
+
+// DefaultTransportConfig creates a TransportConfig with the
+// default settings taken from the meta section of the spec file.
+func DefaultTransportConfig() *TransportConfig {
+ return &TransportConfig {
+ Host: DefaultHost,
+ BasePath: DefaultBasePath,
+ Schemes: DefaultSchemes,
+ }
+}
+
+// TransportConfig contains the transport related info,
+// found in the meta section of the spec file.
+type TransportConfig struct {
+ Host string
+ BasePath string
+ Schemes []string
+}
+
+// WithHost overrides the default host,
+// provided by the meta section of the spec file.
+func (cfg *TransportConfig) WithHost(host string) *TransportConfig {
+ cfg.Host = host
+ return cfg
+}
+
+// WithBasePath overrides the default basePath,
+// provided by the meta section of the spec file.
+func (cfg *TransportConfig) WithBasePath(basePath string) *TransportConfig {
+ cfg.BasePath = basePath
+ return cfg
+}
+
+// WithSchemes overrides the default schemes,
+// provided by the meta section of the spec file.
+func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig {
+ cfg.Schemes = schemes
+ return cfg
+}
+
+// {{ pascalize .Name }} is a client for {{ humanize .Name }}
+type {{ pascalize .Name }} struct {
+ {{ range .OperationGroups }}
+ {{ pascalize .Name }} {{ .PackageAlias }}.ClientService
+ {{ end }}
+ Transport runtime.ClientTransport
+}
+
+
+// SetTransport changes the transport on the client and all its subresources
+func (c *{{pascalize .Name}}) SetTransport(transport runtime.ClientTransport) {
+ c.Transport = transport
+ {{- range .OperationGroups }}
+ c.{{ pascalize .Name }}.SetTransport(transport)
+ {{- end }}
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl
new file mode 100644
index 000000000..3546fa273
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/parameter.gotmpl
@@ -0,0 +1,406 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "time"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ cr "github.com/go-openapi/runtime/client"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// New{{ pascalize .Name }}Params creates a new {{ pascalize .Name }}Params object,
+// with the default timeout for this client.
+//
+// Default values are not hydrated, since defaults are normally applied by the API server side.
+//
+// To enforce default values in parameter, use SetDefaults or WithDefaults.
+func New{{ pascalize .Name }}Params() *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ {{ camelize .TimeoutName }}: cr.DefaultTimeout,
+ }
+}
+
+// New{{ pascalize .Name }}ParamsWithTimeout creates a new {{ pascalize .Name }}Params object
+// with the ability to set a timeout on a request.
+func New{{ pascalize .Name }}ParamsWithTimeout(timeout time.Duration) *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ {{ camelize .TimeoutName }}: timeout,
+ }
+}
+
+// New{{ pascalize .Name }}ParamsWithContext creates a new {{ pascalize .Name }}Params object
+// with the ability to set a context for a request.
+func New{{ pascalize .Name }}ParamsWithContext(ctx context.Context) *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ Context: ctx,
+ }
+}
+
+// New{{ pascalize .Name }}ParamsWithHTTPClient creates a new {{ pascalize .Name }}Params object
+// with the ability to set a custom HTTPClient for a request.
+func New{{ pascalize .Name }}ParamsWithHTTPClient(client *http.Client) *{{ pascalize .Name }}Params {
+ return &{{ pascalize .Name}}Params{
+ HTTPClient: client,
+ }
+}
+
+/* {{ pascalize .Name }}Params contains all the parameters to send to the API endpoint
+ for the {{ humanize .Name }} operation.
+
+ Typically these are written to a http.Request.
+*/
+type {{ pascalize .Name }}Params struct {
+ {{- range .Params }}
+ {{- if .Description }}
+
+ /* {{ pascalize .Name }}.
+
+ {{ blockcomment .Description }}
+ {{- if or .SwaggerFormat .Default }}
+ {{ print "" }}
+ {{- if .SwaggerFormat }}
+ Format: {{ .SwaggerFormat }}
+ {{- end }}
+ {{- if .Default }}
+ Default: {{ json .Default }}
+ {{- end }}
+ {{- end }}
+ */
+ {{- else }}
+
+ // {{ pascalize .Name }}.
+ {{- if or .SwaggerFormat .Default }}
+ //
+ {{- if .SwaggerFormat }}
+ // Format: {{ .SwaggerFormat }}
+ {{- end }}
+ {{- if .Default }}
+ // Default: {{ json .Default }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ pascalize .ID }} {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) (or .IsNullable ) }}*{{ end }}{{ if not .IsFileParam }}{{ .GoType }}{{ else }}runtime.NamedReadCloser{{ end }}
+ {{- end }}
+
+ {{ camelize .TimeoutName }} time.Duration
+ Context context.Context
+ HTTPClient *http.Client
+}
+
+// WithDefaults hydrates default values in the {{ humanize .Name }} params (not the query body).
+//
+// All values with no default are reset to their zero value.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WithDefaults() *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.SetDefaults()
+ return {{ .ReceiverName }}
+}
+
+// SetDefaults hydrates default values in the {{ humanize .Name }} params (not the query body).
+//
+// All values with no default are reset to their zero value.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) SetDefaults() {
+{{- if .Params.HasSomeDefaults }}
+ var (
+ {{- range .Params }}
+ {{- if .HasDefault }}
+ {{- if .IsFileParam }}
+ // NOTE: no default supported for file parameter {{ .ID }}
+ {{- else if .IsStream }}
+ // NOTE: no default supported for stream parameter {{ .ID }}
+ {{- else if not .IsBodyParam }}
+ {{ template "simpleschemaDefaultsvar" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+ {{- range .Params }}
+ {{- if and .HasDefault (not .IsFileParam) (not .IsStream) (not .IsBodyParam) }}
+ {{ template "simpleschemaDefaultsinit" . }}
+ {{- end }}
+ {{- end }}
+
+ val := {{ pascalize .Name }}Params{
+ {{- range .Params }}
+ {{- if and .HasDefault (not .IsFileParam) (not .IsStream) (not .IsBodyParam) }}
+ {{ pascalize .ID }}: {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (or .IsNullable ) }}&{{ end }}{{ varname .ID }}Default,
+ {{- end }}
+ {{- end }}
+ }
+
+ val.{{ camelize .TimeoutName }} = {{ .ReceiverName }}.{{ camelize .TimeoutName }}
+ val.Context = {{ .ReceiverName }}.Context
+ val.HTTPClient = {{ .ReceiverName }}.HTTPClient
+ *{{ .ReceiverName }} = val
+{{- else }}
+ // no default values defined for this parameter
+{{- end }}
+}
+
+// With{{ pascalize .TimeoutName }} adds the timeout to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) With{{ pascalize .TimeoutName }}(timeout time.Duration) *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.Set{{ pascalize .TimeoutName }}(timeout)
+ return {{ .ReceiverName }}
+}
+
+// Set{{ pascalize .TimeoutName }} adds the timeout to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) Set{{ pascalize .TimeoutName }}(timeout time.Duration) {
+ {{ .ReceiverName }}.{{ camelize .TimeoutName }} = timeout
+}
+
+// WithContext adds the context to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WithContext(ctx context.Context) *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.SetContext(ctx)
+ return {{ .ReceiverName }}
+}
+
+// SetContext adds the context to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) SetContext(ctx context.Context) {
+ {{ .ReceiverName }}.Context = ctx
+}
+
+// WithHTTPClient adds the HTTPClient to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WithHTTPClient(client *http.Client) *{{ pascalize .Name }}Params {
+ {{ .ReceiverName }}.SetHTTPClient(client)
+ return {{ .ReceiverName }}
+}
+
+// SetHTTPClient adds the HTTPClient to the {{ humanize .Name }} params
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) SetHTTPClient(client *http.Client) {
+ {{ .ReceiverName }}.HTTPClient = client
+}
+
+{{- range .Params }}
+
+// With{{ pascalize .ID }} adds the {{ varname .Name }} to the {{ humanize $.Name }} params
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}Params) With{{ pascalize .ID }}({{ varname .Name }} {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (not .IsStream) (or .IsNullable ) }}*{{ end }}{{ if not .IsFileParam }}{{ .GoType }}{{ else }}runtime.NamedReadCloser{{ end }}) *{{ pascalize $.Name }}Params {
+ {{ $.ReceiverName }}.Set{{ pascalize .ID }}({{ varname .Name }})
+ return {{ .ReceiverName }}
+}
+
+// Set{{ pascalize .ID }} adds the {{ camelize .Name }} to the {{ humanize $.Name }} params
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}Params) Set{{ pascalize .ID }}({{ varname .Name }} {{ if and (not .IsArray) (not .IsMap) (not .HasDiscriminator) (not .IsStream) (or .IsNullable ) }}*{{ end }}{{ if not .IsFileParam }}{{ .GoType }}{{ else }}runtime.NamedReadCloser{{ end }}) {
+ {{ $.ReceiverName }}.{{ pascalize .ID }} = {{ varname .Name }}
+}
+{{- end }}
+
+// WriteToRequest writes these params to a swagger request
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
+
+ if err := r.SetTimeout({{ .ReceiverName }}.{{ camelize .TimeoutName }}); err != nil {
+ return err
+ }
+ var res []error
+ {{- range .Params }}
+ {{- if not (or .IsArray .IsMap .IsBodyParam) }}
+ {{- if and .IsNullable (not .AllowEmptyValue) }}
+
+ if {{ .ValueExpression }} != nil {
+ {{- end}}
+
+ {{- if .IsQueryParam }}
+
+ // query param {{ .Name }}
+ {{- if .IsNullable }}
+ var qr{{ pascalize .Name }} {{ .GoType }}
+
+ if {{ .ValueExpression }} != nil {
+ qr{{ pascalize .Name }} = *{{ .ValueExpression }}
+ }
+ {{- else }}
+ qr{{ pascalize .Name }} := {{ .ValueExpression }}
+ {{- end}}
+ q{{ pascalize .Name}} := {{ if .Formatter }}{{ .Formatter }}(qr{{ pascalize .Name }}){{ else }}qr{{ pascalize .Name }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+ {{- if not .AllowEmptyValue }}
+ if q{{ pascalize .Name }} != "" {
+ {{- end }}
+
+ if err := r.SetQueryParam({{ printf "%q" .Name }}, q{{ pascalize .Name }}); err != nil {
+ return err
+ }
+ {{- if not .AllowEmptyValue }}
+ }
+ {{- end }}
+
+ {{- else if .IsPathParam }}
+
+ // path param {{ .Name }}
+ if err := r.SetPathParam({{ printf "%q" .Name }}, {{ if .Formatter }}{{ .Formatter }}({{ if .IsNullable }}*{{end}}{{ .ValueExpression }}){{ else }}{{ if and (not .IsCustomFormatter) .IsNullable }}*{{end}}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}); err != nil {
+ return err
+ }
+
+ {{- else if .IsHeaderParam }}
+
+ // header param {{ .Name }}
+ if err := r.SetHeaderParam({{ printf "%q" .Name }}, {{ if .Formatter }}{{ .Formatter }}({{ if .IsNullable }}*{{end}}{{ .ValueExpression }}){{ else }}{{ if and (not .IsCustomFormatter) .IsNullable }}*{{end}}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}); err != nil {
+ return err
+ }
+ {{- else if .IsFormParam }}
+ {{- if .IsFileParam }}
+ {{- if .IsNullable }}
+
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ // form file param {{ .Name }}
+ if err := r.SetFileParam({{ printf "%q" .Name }}, {{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{- if .IsNullable}}
+ }
+ {{- end }}
+ {{- else }}
+
+ // form param {{ .Name }}
+ {{- if .IsNullable }}
+ var fr{{ pascalize .Name }} {{ .GoType }}
+ if {{ .ValueExpression }} != nil {
+ fr{{ pascalize .Name }} = *{{ .ValueExpression }}
+ }
+ {{- else }}
+ fr{{ pascalize .Name }} := {{ .ValueExpression }}
+ {{- end}}
+ f{{ pascalize .Name}} := {{ if .Formatter }}{{ .Formatter }}(fr{{ pascalize .Name }}){{ else }}fr{{ pascalize .Name }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+ {{- if not .AllowEmptyValue }}
+ if f{{ pascalize .Name }} != "" {
+ {{- end }}
+ if err := r.SetFormParam({{ printf "%q" .Name }}, f{{ pascalize .Name }}); err != nil {
+ return err
+ }
+ {{- if not .AllowEmptyValue }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if and .IsNullable (not .AllowEmptyValue) }}
+ }
+ {{- end }}
+ {{- else if .IsArray }}
+
+ {{- if not .IsBodyParam }}
+
+ if {{ .ValueExpression }} != nil {
+
+ {{- if .Child }}{{/* bind complex parameters (arrays and nested structures) */}}
+
+ // binding items for {{ .Name }}
+ joined{{ pascalize .Name }} := {{ .ReceiverName }}.bindParam{{ pascalize .Name }}(reg)
+ {{- else }}
+ values{{ pascalize .Name }} := {{ if and (not .IsArray) (not .IsStream) (not .IsMap) (.IsNullable) }}*{{end}}{{ .ValueExpression }}
+ joined{{ pascalize .Name}} := swag.JoinByFormat(values{{ pascalize .Name }}, "{{.CollectionFormat}}")
+ {{- end }}
+
+ {{- if .IsQueryParam }}
+
+ // query array param {{ .Name }}
+ if err := r.SetQueryParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}...); err != nil {
+ return err
+ }
+
+ {{- else if and .IsFormParam }}
+
+ // form array param {{ .Name }}
+ if err := r.SetFormParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}...); err != nil {
+ return err
+ }
+
+ {{- else if and .IsPathParam }}
+
+ // path array param {{ .Name }}
+ // SetPathParam does not support variadic arguments, since we used JoinByFormat
+ // we can send the first item in the array as it's all the items of the previous
+ // array joined together
+ if len(joined{{ pascalize .Name }}) > 0 {
+ if err := r.SetPathParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}[0]); err != nil {
+ return err
+ }
+ }
+
+ {{- else if .IsHeaderParam }}
+
+ // header array param {{ .Name }}
+ if len(joined{{ pascalize .Name }}) > 0 {
+ if err := r.SetHeaderParam({{ printf "%q" .Name }}, joined{{ pascalize .Name }}[0]); err != nil {
+ return err
+ }
+ }
+ {{- end }}
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- if .IsBodyParam }}
+ {{- if or .Schema.IsInterface .Schema.IsStream (and .Schema.IsArray .Child) (and .Schema.IsMap .Child) (and .Schema.IsNullable (not .HasDiscriminator)) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := r.SetBodyParam({{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{- if or .Schema.IsInterface .Schema.IsStream (and .Schema.IsArray .Child) (and .Schema.IsMap .Child) (and .Schema.IsNullable (not .HasDiscriminator)) }}
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- end }}
+
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+{{- range .Params }}
+ {{- if and (not .IsBodyParam) (not .IsFileParam) .IsArray }}
+// bindParam{{ pascalize $.Name }} binds the parameter {{ .Name }}
+func ({{ .ReceiverName }} *{{ pascalize $.Name }}Params) bindParam{{ pascalize .Name }}(formats strfmt.Registry) []string {
+ {{ varname .Child.ValueExpression }}R := {{ if and (not .IsArray) (not .IsStream) (not .IsMap) (.IsNullable) }}*{{end}}{{ .ValueExpression }}
+
+ {{ template "sliceclientparambinder" . }}
+
+ return {{ varname .Child.ValueExpression }}S
+}
+ {{- end }}
+{{- end }}
+{{- define "sliceclientparambinder" }}
+ {{- if .IsArray }}
+ var {{ varname .Child.ValueExpression }}C []string
+ for _, {{ varname .Child.ValueExpression }}IR := range {{ varname .Child.ValueExpression }}R { // explode {{ .GoType }}
+ {{ template "sliceclientparambinder" .Child }}
+ {{ varname .Child.ValueExpression }}C = append({{ varname .Child.ValueExpression }}C, {{ varname .Child.ValueExpression }}IV)
+ }
+
+ // {{ .Child.ItemsDepth }}CollectionFormat: {{ printf "%q" .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}S := swag.JoinByFormat({{ varname .Child.ValueExpression }}C, {{ printf "%q" .CollectionFormat }})
+ {{- if .Child.Parent }}{{/* NOTE: we cannot expect a "multi" CollectionFormat within an inner array */}}
+ {{ varname .Child.ValueExpression }}V := {{ varname .Child.ValueExpression }}S[0]
+ {{- end }}
+
+ {{- else }}
+ {{ varname .ValueExpression }}IV :=
+ {{- if .IsCustomFormatter }}
+ {{- print " " }}{{ varname .ValueExpression }}IR.String()
+ {{- else if eq .GoType "string" }}
+ {{- print " " }}{{ varname .ValueExpression }}IR
+ {{- else if .Formatter }}
+ {{- print " "}}{{ .Formatter }}({{ varname .ValueExpression }}IR)
+ {{- else }}
+ {{- print " " }}fmt.Sprintf("%v", {{ varname .ValueExpression }}IR)
+ {{- end }} // {{ .GoType }} as string
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl
new file mode 100644
index 000000000..d62238540
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/client/response.gotmpl
@@ -0,0 +1,346 @@
+{{- define "clientresponse" }}
+// New{{ pascalize .Name }} creates a {{ pascalize .Name }} with default headers values
+func New{{ pascalize .Name }}({{ if eq .Code -1 }}code int{{ end }}{{ if .Schema }}{{ if and (eq .Code -1) .Schema.IsStream }}, {{end}}{{ if .Schema.IsStream }}writer io.Writer{{ end }}{{ end }}) *{{ pascalize .Name }} {
+ {{- if .Headers.HasSomeDefaults }}
+ var (
+ // initialize headers with default values
+ {{- range .Headers }}
+ {{- if .HasDefault }}
+ {{ template "simpleschemaDefaultsvar" . }}
+ {{- end }}
+ {{- end }}
+ )
+ {{- range .Headers }}
+ {{- if .HasDefault }}
+ {{ template "simpleschemaDefaultsinit" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ return &{{ pascalize .Name }}{
+ {{- if eq .Code -1 }}
+ _statusCode: code,
+ {{- end }}
+ {{ range .Headers }}
+ {{- if .HasDefault }}
+ {{ pascalize .Name}}: {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}&{{ end }}{{ varname .ID }}Default,
+ {{- end }}
+ {{- end }}
+ {{- if .Schema }}
+ {{- if .Schema.IsStream }}
+ Payload: writer,
+ {{- end }}
+ {{- end }}
+ }
+}
+
+/* {{ pascalize .Name}} describes a response with status code {{ .Code }}, with default header values.
+
+ {{ if .Description }}{{ blockcomment .Description }}{{else}}{{ pascalize .Name }} {{ humanize .Name }}{{end}}
+ */
+type {{ pascalize .Name }} struct {
+ {{- if eq .Code -1 }}
+ _statusCode int
+ {{- end }}
+ {{- range .Headers }}
+ {{- if .Description }}
+
+ /* {{ blockcomment .Description }}
+ {{- if or .SwaggerFormat .Default }}
+ {{ print "" }}
+ {{- if .SwaggerFormat }}
+ Format: {{ .SwaggerFormat }}
+ {{- end }}
+ {{- if .Default }}
+ Default: {{ json .Default }}
+ {{- end }}
+ {{- end }}
+ */
+ {{- end }}
+ {{ pascalize .Name }} {{ .GoType }}
+ {{- end }}
+ {{- if .Schema }}
+
+ Payload {{ if and (not .Schema.IsBaseType) (not .Schema.IsInterface) .Schema.IsComplexObject (not .Schema.IsStream) }}*{{ end }}{{ if (not .Schema.IsStream) }}{{ .Schema.GoType }}{{ else }}io.Writer{{end}}
+ {{- end }}
+}
+
+// IsSuccess returns true when this {{ humanize .Name }} response has a 2xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsSuccess() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 2
+ {{- else }}
+ return {{ and (ge .Code 200) (lt .Code 300) }}
+ {{- end }}
+}
+
+// IsRedirect returns true when this {{ humanize .Name }} response has a 3xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsRedirect() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 3
+ {{- else }}
+ return {{ and (ge .Code 300) (lt .Code 400) }}
+ {{- end }}
+}
+
+// IsClientError returns true when this {{ humanize .Name }} response has a 4xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsClientError() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 4
+ {{- else }}
+ return {{ and (ge .Code 400) (lt .Code 500) }}
+ {{- end }}
+}
+
+// IsServerError returns true when this {{ humanize .Name }} response has a 5xx status code
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsServerError() bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode/100 == 5
+ {{- else }}
+ return {{ and (ge .Code 500) (lt .Code 600) }}
+ {{- end }}
+}
+
+// IsCode returns true when this {{ humanize .Name }} response a status code equal to that given
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) IsCode(code int) bool {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode == code
+ {{- else }}
+ return code == {{ .Code }}
+ {{- end }}
+}
+
+// Code gets the status code for the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) Code() int {
+ {{- if eq .Code -1 }}
+ return {{ .ReceiverName }}._statusCode
+ {{- else }}
+ return {{ .Code }}
+ {{- end }}
+}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) Error() string {
+ return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d] {{ if .Name }}{{ .Name }} {{ else }}unknown error {{ end }}{{ if .Schema }} %+v{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}, o.Payload{{ end }})
+}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) String() string {
+ return fmt.Sprintf("[{{ upper .Method }} {{ .Path }}][%d] {{ if .Name }}{{ .Name }} {{ else }}unknown response {{ end }}{{ if .Schema }} %+v{{ end }}", {{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }}{{ if .Schema }}, o.Payload{{ end }})
+}
+
+{{ if .Schema }}
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) GetPayload() {{ if and (not .Schema.IsBaseType) (not .Schema.IsInterface) .Schema.IsComplexObject (not .Schema.IsStream) }}*{{ end }}{{ if (not .Schema.IsStream) }}{{ .Schema.GoType }}{{ else }}io.Writer{{end}} {
+ return o.Payload
+}
+{{- end }}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
+ {{- range .Headers }}
+
+ // hydrates response header {{.Name}}
+ hdr{{ pascalize .Name }} := response.GetHeader("{{ .Name }}")
+
+ if hdr{{ pascalize .Name }} != "" {
+ {{- if .Converter }}
+ val{{ camelize .Name }}, err := {{ .Converter }}(hdr{{ pascalize .Name }})
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, "header", "{{ .GoType }}", hdr{{ pascalize .Name }})
+ }
+ {{ .ReceiverName }}.{{ pascalize .Name }} = val{{ camelize .Name }}
+ {{- else if .Child }}
+
+ // binding header items for {{ .Name }}
+ val{{ pascalize .Name }}, err := {{ .ReceiverName }}.bindHeader{{ pascalize .Name }}(hdr{{ pascalize .Name }}, formats)
+ if err != nil {
+ return err
+ }
+
+ {{ .ReceiverName }}.{{ pascalize .Name }} = val{{ pascalize .Name }}
+ {{- else if .IsCustomFormatter }}
+ val{{ camelize .Name }}, err := formats.Parse({{ printf "%q" .SwaggerFormat }}, hdr{{ pascalize .Name }})
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, "header", "{{ .GoType }}", hdr{{ pascalize .Name }})
+ }
+ {{- if .IsNullable }}
+ v := (val{{ camelize .Name }}.({{ .GoType }}))
+ {{ .ReceiverName }}.{{ pascalize .Name }} = &v
+ {{- else }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = *(val{{ camelize .Name }}.(*{{ .GoType }}))
+ {{- end }}
+ {{- else }}
+ {{- if eq .GoType "string" }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = hdr{{ pascalize .Name }}
+ {{- else }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ .GoType }}(hdr{{ pascalize .Name }})
+ {{- end }}
+ {{- end }}
+ }
+ {{- end }}
+
+ {{- if .Schema }}
+ {{- if .Schema.IsBaseType }}
+
+ // response payload as interface type
+ payload, err := {{ toPackageName .ModelsPackage }}.Unmarshal{{ dropPackage .Schema.GoType }}{{ if .Schema.IsArray}}Slice{{ end }}(response.Body(), consumer)
+ if err != nil {
+ return err
+ }
+ {{ .ReceiverName }}.Payload = payload
+ {{- else if .Schema.IsComplexObject }}
+
+ {{ .ReceiverName }}.Payload = new({{ .Schema.GoType }})
+ {{- end }}
+ {{- if not .Schema.IsBaseType }}
+
+ // response payload
+ if err := consumer.Consume(response.Body(), {{ if not (or .Schema.IsComplexObject .Schema.IsStream) }}&{{ end}}{{ .ReceiverName }}.Payload); err != nil && err != io.EOF {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+
+ return nil
+}
+ {{- range .Headers }}
+ {{- if .Child }}
+
+// bindHeader{{ pascalize $.Name }} binds the response header {{ .Name }}
+func ({{ .ReceiverName }} *{{ pascalize $.Name }}) bindHeader{{ pascalize .Name }}(hdr string, formats strfmt.Registry) ({{ .GoType }}, error) {
+ {{ varname .Child.ValueExpression }}V := hdr
+
+ {{ template "sliceclientheaderbinder" . }}
+
+ return {{ varname .Child.ValueExpression }}C, nil
+}
+ {{- end }}
+ {{- end }}
+{{- end }}
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// {{ pascalize .Name }}Reader is a Reader for the {{ pascalize .Name }} structure.
+type {{ pascalize .Name }}Reader struct {
+ formats strfmt.Registry
+{{- if .HasStreamingResponse }}
+ writer io.Writer
+{{- end }}
+}
+
+// ReadResponse reads a server response into the received {{ .ReceiverName }}.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Reader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
+ {{- if .Responses}}
+ switch response.Code() {
+ {{- end }}
+ {{- range .Responses }}
+ case {{ .Code }}:
+ result := New{{ pascalize .Name }}({{ if .Schema }}{{ if .Schema.IsStream }}{{ $.ReceiverName }}.writer{{ end }}{{ end }})
+ if err := result.readResponse(response, consumer, {{ $.ReceiverName }}.formats); err != nil {
+ return nil, err
+ }
+ return {{ if .IsSuccess }}result, nil{{else}}nil, result{{ end }}
+ {{- end }}
+ {{- if .DefaultResponse }}
+ {{- with .DefaultResponse }}
+ {{- if $.Responses}}
+ default:
+ {{- end }}
+ result := New{{ pascalize .Name }}(response.Code(){{ if .Schema }}{{ if .Schema.IsStream }}, {{ $.ReceiverName }}.writer{{ end }}{{ end }})
+ if err := result.readResponse(response, consumer, {{ $.ReceiverName }}.formats); err != nil {
+ return nil, err
+ }
+ if response.Code() / 100 == 2 {
+ return result, nil
+ }
+ return nil, result
+ {{- end }}
+ {{- else }}
+ {{- if $.Responses}}
+ default:
+ {{- end }}
+ return nil, runtime.NewAPIError("[{{ upper .Method }} {{ .Path }}]{{ if .Name }} {{ .Name }}{{ end }}", response, response.Code())
+ {{- end }}
+ {{- if .Responses}}
+ }
+ {{- end }}
+}
+
+{{ range .Responses }}
+ {{ template "clientresponse" . }}
+{{ end }}
+{{ if .DefaultResponse }}
+ {{ template "clientresponse" .DefaultResponse }}
+{{ end }}
+
+{{ range .ExtraSchemas }}
+/*{{ pascalize .Name }} {{ template "docstring" . }}
+swagger:model {{ .Name }}
+*/
+ {{- template "schema" . }}
+{{- end }}
+
+{{- define "sliceclientheaderbinder" }}
+ {{- if .IsArray }}
+ var (
+ {{ varname .Child.ValueExpression }}C {{ .GoType }}
+ )
+ // {{ .Child.ItemsDepth }}CollectionFormat: {{ printf "%q" .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}R := swag.SplitByFormat({{ varname .Child.ValueExpression }}V, {{ printf "%q" .CollectionFormat }})
+
+ for {{ if or .Child.IsCustomFormatter .Child.Converter }}{{ .IndexVar }}{{ else }}_{{ end }}, {{ varname .Child.ValueExpression }}IV := range {{ varname .Child.ValueExpression }}R {
+ {{ template "sliceclientheaderbinder" .Child }}
+ {{ varname .Child.ValueExpression }}C = append({{ varname .Child.ValueExpression }}C, {{ varname .Child.ValueExpression }}IC) // roll-up {{ .Child.GoType }} into {{ .GoType }}
+ }
+
+ {{- else }}
+ // convert split string to {{ .GoType }}
+ {{- if .IsCustomFormatter }}
+ val, err := formats.Parse({{ printf "%q" .SwaggerFormat }}, {{ varname .ValueExpression }}IV)
+ if err != nil {
+ return nil, errors.InvalidType({{ .Path }}, "header{{ .ItemsDepth }}", "{{ .GoType }}", {{ varname .ValueExpression }}IV)
+ }
+ {{- if .IsNullable }}
+ {{ varname .ValueExpression }}IC := (&val).(*{{ .GoType }})
+ {{- else }}
+ {{ varname .ValueExpression }}IC := val.({{ .GoType }})
+ {{- end }}
+ {{- else if .Converter }}
+ val, err := {{- print " "}}{{ .Converter }}({{ varname .ValueExpression }}IV)
+ if err != nil {
+ return nil, errors.InvalidType({{ .Path }}, "header{{ .ItemsDepth }}", "{{ .GoType }}", {{ varname .ValueExpression }}IV)
+ }
+ {{- if .IsNullable }}
+ {{ varname .ValueExpression }}IC := &val
+ {{- else }}
+ {{ varname .ValueExpression }}IC := val
+ {{- end }}
+ {{- else }}
+ {{ varname .ValueExpression }}IC :=
+ {{- if eq .GoType "string" }}
+ {{- print " " }}{{ varname .ValueExpression }}IV
+ {{- else }}
+ {{- print " " }}fmt.Sprintf("%v", {{ varname .ValueExpression }}IV)
+ {{- end }} // string as {{ .GoType }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md
new file mode 100644
index 000000000..1d36d66f5
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/README.md
@@ -0,0 +1,311 @@
+# swagger
+
+In Stratoscale, we really like the idea of API-first services, and we also really like Go.
+We saw the go-swagger library, and thought that most of it can really help us. Generating code from
+swagger files is a big problem with a lot of corner cases, and go-swagger is doing great job.
+
+The one thing that we felt missing, is customization of the server to run with our design principles:
+
+* Custom `main` function
+* Dependency injection
+* Limited scopes with unit testing.
+
+Also:
+
+* Adding you functions to the generated `configure_swagger_*.go` seems to be a burden.
+* Lack of Interface that the service implement.
+* Complicated and custom http clients and runtime.
+
+Those are the changes that this contributor templates are providing:
+
+## Server
+
+### The new `restapi` package exposes interfaces
+
+* Those interfaces can implemented by the developer and are the business logic of the service.
+* The implementation of those is extensible.
+* The implementation is separated from the generated code.
+
+### The `restapi` returns an `http.Handler`
+
+The `restapi.Handler` (see [example](./example/restapi/configure_swagger_petstore.go)) function returns
+a standard `http.Handler`
+
+* Given objects that implements the business logic, we can create a simple http handler.
+* This handler is standard go http.Handler, so we can now use any other middleware, library, or framework
+ that support it.
+* This handler is standard, so we understand it better.
+
+## Client
+
+* The new client package exposes interfaces, so functions in our code can receive those
+ interfaces which can be mocked for testing.
+* The new client has a config that gets an `*url.URL` to customize the endpoint.
+* The new client has a config that gets an `http.RoundTripper` to customize client with libraries, middleware or
+ frameworks that support the standard library's objects.
+
+# Example Walk-Through
+
+In the [example package](https://github.com/Stratoscale/swagger/tree/master/example) you'll find generated code and usage of the pet-store
+[swagger file](./example/swagger.yaml).
+
+* The `restapi`, `models` and `client` are auto-generated by the stratoscale/swagger docker file.
+* The `internal` package was manually added and contains the server's business logic.
+* The `main.go` file is the entrypoint and contains initializations and dependency injections of the project.
+
+## Server
+
+### [restapi](https://github.com/Stratoscale/swagger/tree/master/example/restapi)
+
+This package is autogenerated and contains the server routing and parameters parsing.
+
+The modified version contains `restapi.PetAPI` and `restapi.StoreAPI` which were auto generated.
+
+```go
+// PetAPI
+type PetAPI interface {
+ PetCreate(ctx context.Context, params pet.PetCreateParams) middleware.Responder
+ PetDelete(ctx context.Context, params pet.PetDeleteParams) middleware.Responder
+ PetGet(ctx context.Context, params pet.PetGetParams) middleware.Responder
+ PetList(ctx context.Context, params pet.PetListParams) middleware.Responder
+ PetUpdate(ctx context.Context, params pet.PetUpdateParams) middleware.Responder
+}
+
+//go:generate mockery -name StoreAPI -inpkg
+
+// StoreAPI
+type StoreAPI interface {
+ InventoryGet(ctx context.Context, params store.InventoryGetParams) middleware.Responder
+ OrderCreate(ctx context.Context, params store.OrderCreateParams) middleware.Responder
+ // OrderDelete is For valid response try integer IDs with positive integer value. Negative or non-integer values will generate API errors
+ OrderDelete(ctx context.Context, params store.OrderDeleteParams) middleware.Responder
+ // OrderGet is For valid response try integer IDs with value >= 1 and <= 10. Other values will generated exceptions
+ OrderGet(ctx context.Context, params store.OrderGetParams) middleware.Responder
+}
+```
+
+Each function matches an `operationId` in the swagger file and they are grouped according to
+the operation `tags`.
+
+There is also a `restapi.Config`:
+
+```go
+// Config is configuration for Handler
+type Config struct {
+ PetAPI
+ StoreAPI
+ Logger func(string, ...interface{})
+ // InnerMiddleware is for the handler executors. These do not apply to the swagger.json document.
+ // The middleware executes after routing but before authentication, binding and validation
+ InnerMiddleware func(http.Handler) http.Handler
+}
+```
+
+This config is auto generated and contains all the declared interfaces above.
+It is used to initiate an http.Handler with the `Handler` function:
+
+```go
+// Handler returns an http.Handler given the handler configuration
+// It mounts all the business logic implementers in the right routing.
+func Handler(c Config) (http.Handler, error) {
+ ...
+```
+
+Let's look how we use this generated code to build our server.
+
+### [internal](https://github.com/Stratoscale/swagger/tree/master/example/internal)
+
+The `internal` package is **not** auto generated and contains the business logic of our server.
+We can see two structs that implements the `restapi.PetAPI` and `restapi.StoreAPI` interfaces,
+needed to make our server work.
+
+When adding or removing functions from our REST API, we can just add or remove functions to those
+business logic units. We can also create new logical units when they are added to our REST API.
+
+### [main.go](./example/main.go)
+
+The main function is pretty straight forward. We initiate our business logic units.
+Then create a config for our rest API. We then create a standard `http.Handler` which we can
+update with middleware, test with `httptest`, or to use with other standard tools.
+The last piece is to run the handler with `http.ListenAndServe` or to use it with an `http.Server` -
+it is all very customizable.
+
+```go
+func main() {
+ // Initiate business logic implementers.
+ // This is the main function, so here the implementers' dependencies can be
+ // injected, such as database, parameters from environment variables, or different
+ // clients for different APIs.
+ p := internal.Pet{}
+ s := internal.Store{}
+
+ // Initiate the http handler, with the objects that are implementing the business logic.
+ h, err := restapi.Handler(restapi.Config{
+ PetAPI: &p,
+ StoreAPI: &s,
+ Logger: log.Printf,
+ })
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Run the standard http server
+ log.Fatal(http.ListenAndServe(":8080", h))
+}
+```
+
+## Client
+
+The client code is in the [client package](https://github.com/Stratoscale/swagger/tree/master/example/client) and is autogenerated.
+
+To create a new client we use the `client.Config` struct:
+
+```go
+type Config struct {
+ // URL is the base URL of the upstream server
+ URL *url.URL
+ // Transport is an inner transport for the client
+ Transport http.RoundTripper
+}
+```
+
+This enables us to use custom server endpoint or custom client middleware. Easily, with the
+standard components, and with any library that accepts them.
+
+The client is then generated with the New method:
+
+```go
+// New creates a new swagger petstore HTTP client.
+func New(c Config) *SwaggerPetstore { ... }
+```
+
+This method returns an object that has two important fields:
+
+```go
+type SwaggerPetstore {
+ ...
+ Pet *pet.Client
+ Store *store.Client
+}
+```
+
+Thos fields are objects, which implements interfaces declared in the [pet](./example/client/pet) and
+[store](./example/client/store) packages:
+
+For example:
+
+```go
+// API is the interface of the pet client
+type API interface {
+ // PetCreate adds a new pet to the store
+ PetCreate(ctx context.Context, params *PetCreateParams) (*PetCreateCreated, error)
+ // PetDelete deletes a pet
+ PetDelete(ctx context.Context, params *PetDeleteParams) (*PetDeleteNoContent, error)
+ // PetGet gets pet by it s ID
+ PetGet(ctx context.Context, params *PetGetParams) (*PetGetOK, error)
+ // PetList lists pets
+ PetList(ctx context.Context, params *PetListParams) (*PetListOK, error)
+ // PetUpdate updates an existing pet
+ PetUpdate(ctx context.Context, params *PetUpdateParams) (*PetUpdateCreated, error)
+}
+```
+
+They are very similar to the server interfaces, and can be used by consumers of those APIs
+(instead of using the actual client or the `*Pet` struct)
+
+# Authentication
+
+Authenticating and policy enforcement of the application is done in several stages, described below.
+
+## Define security in swagger.yaml
+
+Add to the root of the swagger.yaml the security and security definitions sections.
+
+```yaml
+securityDefinitions:
+ token:
+ type: apiKey
+ in: header
+ name: Cookie
+
+security:
+ - token: []
+```
+
+The securityDefinitions section defines different security types that your application can handle.
+The supported types by go-swagger are:
+* `apiKey` - token that should be able to processed.
+* `oauth2` - token and scopes that should be processed.
+* and `basic` - user/password that should be processed.
+
+Here we defined an apiKey, that is passed through the Cookie header.
+
+The `security` section defines the default security enforcement for the application. You can select
+different securityDefinitions, as the keys, and apply "scopes" as the values. Those default definitions
+can be overriden in each route by a section with the same name:
+
+```yaml
+paths:
+ /pets:
+ post:
+ [...]
+ security:
+ - token: [admin]
+```
+
+Here we overriden the scope of token in the POST /pets URL so that only admin can use this API.
+
+Let's see how we can use this functionality.
+
+## Writing Security Handlers
+
+Once we created a security definition named "token", a function called "AuthToken" was added to the `restapi.Config`:
+
+```go
+type Config struct {
+ ...
+ // AuthToken Applies when the "Cookie" header is set
+ AuthToken func(token string) (interface{}, error)
+}
+```
+
+This function gets the content of the Cookie header, and should return an `interface{}` and `error`.
+The `interface{}` is the object that should represent the user that performed the request, it should
+be nil to return an unauthorized 401 HTTP response. If the returned `error` is not nil, an HTTP 500,
+internal server error will be returned.
+
+The returned object, will be stored in the request context under the `restapi.AuthKey` key.
+
+There is another function that we should know about, in the `restapi.Config` struct:
+
+```go
+type Config struct {
+ ...
+ // Authorizer is used to authorize a request after the Auth function was called using the "Auth*" functions
+ // and the principal was stored in the context in the "AuthKey" context value.
+ Authorizer func(*http.Request) error
+}
+```
+
+This one is a custom defined function that gets the request and can return an error.
+If the returned error is not nil, and 403 HTTP error will be returned to the client - here the policy
+enforcement comes to place.
+There are two things that this function should be aware of:
+
+1. The user - it can retrieve the user information from the context: `ctx.Value(restapi.AuthKey).(MyUserType)`.
+ Usually, a server will have a function for extracting this user information and returns a concrete
+ type which could be used by all the routes.
+2. The route - it can retrieve the route using the go-swagger function: `middleware.MatchedRouteFrom(*http.Request)`.
+ So no need to parse URL and test the request method.
+ This route struct contains the route information. If for example, we want to check the scopes that were
+ defined for the current route in the swagger.yaml we can use the code below:
+
+```go
+for _, auth := range route.Authenticators {
+ for scopeName, scopeValues := range auth.Scopes {
+ for _, scopeValue := range scopeValues {
+ ...
+ }
+ }
+}
+```
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl
new file mode 100644
index 000000000..3398815ec
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/client.gotmpl
@@ -0,0 +1,111 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Name }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "fmt"
+ "net/http"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/validate"
+
+ strfmt "github.com/go-openapi/strfmt"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+//go:generate mockery --name API --keeptree --with-expecter --case underscore
+
+// API is the interface of the {{ humanize .Name }} client
+type API interface {
+{{ range .Operations -}}
+/*
+{{ pascalize .Name }} {{ if .Summary }}{{ pluralizeFirstWord (humanize .Summary) }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ humanize .Name }} API{{ end -}}
+*/
+ {{ pascalize .Name }}(ctx context.Context, params *{{ pascalize .Name }}Params{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }}
+{{ end -}}
+}
+
+// New creates a new {{ humanize .Name }} API client.
+func New(transport runtime.ClientTransport, formats strfmt.Registry, authInfo runtime.ClientAuthInfoWriter) *Client {
+ return &Client{
+ transport: transport,
+ formats: formats,
+ authInfo: authInfo,
+ }
+}
+
+/*
+Client {{ if .Summary }}{{ .Summary }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}for {{ humanize .Name }} API{{ end }}
+*/
+type Client struct {
+ transport runtime.ClientTransport
+ formats strfmt.Registry
+ authInfo runtime.ClientAuthInfoWriter
+}
+
+{{ range .Operations -}}
+/*
+{{ pascalize .Name }} {{ if .Summary }}{{ pluralizeFirstWord (humanize .Summary) }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ humanize .Name }} API{{ end }}
+*/
+func (a *Client) {{ pascalize .Name }}(ctx context.Context, params *{{ pascalize .Name }}Params{{ if .HasStreamingResponse }}, writer io.Writer{{ end }}) {{ if .SuccessResponse }}({{ range .SuccessResponses }}*{{ pascalize .Name }}, {{ end }}{{ end }}error{{ if .SuccessResponse }}){{ end }} {
+ {{ $length := len .SuccessResponses }}
+ {{ $success := .SuccessResponses }}
+ {{ if .Responses }}result{{else}}_{{end}}, err := a.transport.Submit(&runtime.ClientOperation{
+ ID: {{ printf "%q" .Name }},
+ Method: {{ printf "%q" .Method }},
+ PathPattern: {{ printf "%q" .Path }},
+ ProducesMediaTypes: {{ printf "%#v" .ProducesMediaTypes }},
+ ConsumesMediaTypes: {{ printf "%#v" .ConsumesMediaTypes }},
+ Schemes: {{ printf "%#v" .Schemes }},
+ Params: params,
+ Reader: &{{ pascalize .Name }}Reader{formats: a.formats{{ if .HasStreamingResponse }}, writer: writer{{ end }}},
+ {{ if .Authorized -}}
+ AuthInfo: a.authInfo,
+ {{ end -}}
+ Context: ctx,
+ Client: params.HTTPClient,
+ })
+ if err != nil {
+ return {{ if $success }}{{ padSurround "nil" "nil" 0 $length }}, {{ end }}err
+ }
+ {{- if .Responses }}
+ switch value := result.(type) {
+ {{- range $i, $v := .Responses }}
+ case *{{ pascalize $v.Name }}:
+ {{- if $v.IsSuccess }}
+ return {{ if $success }}{{ padSurround "value" "nil" $i $length }},{{ end }}nil
+ {{- else }}
+ return {{ if $success }}{{ padSurround "nil" "nil" 0 $length }},{{ end }}runtime.NewAPIError("unsuccessful response", value, value.Code())
+ {{- end }}
+ {{- end }}
+ }
+ {{- if .DefaultResponse }}
+ // unexpected success response
+ unexpectedSuccess := result.(*{{ pascalize .DefaultResponse.Name }})
+ return {{ if $success }}{{ padSurround "nil" "nil" 0 $length }}, {{ end }}runtime.NewAPIError("unexpected success response: content available as default response in error", unexpectedSuccess, unexpectedSuccess.Code())
+ {{- else }}
+ // safeguard: normally, absent a default response, unknown success responses return an error above: so this is a codegen issue
+ msg := fmt.Sprintf("unexpected success response for {{ .Name }}: API contract not enforced by server. Client expected to get an error, but got: %T", result)
+ panic(msg)
+ {{- end }}
+ {{- else }}
+ return nil
+ {{- end }}
+}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl
new file mode 100644
index 000000000..1d658978b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/client/facade.gotmpl
@@ -0,0 +1,83 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "net/url"
+ "net/http"
+
+ rtclient "github.com/go-openapi/runtime/client"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/strfmt"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+const (
+ // DefaultHost is the default Host
+ // found in Meta (info) section of spec file
+ DefaultHost string = {{ printf "%#v" .Host }}
+ // DefaultBasePath is the default BasePath
+ // found in Meta (info) section of spec file
+ DefaultBasePath string = {{ printf "%#v" .BasePath }}
+)
+
+// DefaultSchemes are the default schemes found in Meta (info) section of spec file
+var DefaultSchemes = {{ printf "%#v" .Schemes }}
+
+type Config struct {
+ // URL is the base URL of the upstream server
+ URL *url.URL
+ // Transport is an inner transport for the client
+ Transport http.RoundTripper
+ // AuthInfo is for authentication
+ AuthInfo runtime.ClientAuthInfoWriter
+}
+
+// New creates a new {{ humanize .Name }} HTTP client.
+func New(c Config) *{{ pascalize .Name }} {
+ var (
+ host = DefaultHost
+ basePath = DefaultBasePath
+ schemes = DefaultSchemes
+ )
+
+ if c.URL != nil {
+ host = c.URL.Host
+ basePath = c.URL.Path
+ schemes = []string{c.URL.Scheme}
+ }
+
+ transport := rtclient.New(host, basePath, schemes)
+ if c.Transport != nil {
+ transport.Transport = c.Transport
+ }
+
+ cli := new({{ pascalize .Name }})
+ cli.Transport = transport
+ {{ range .OperationGroups -}}
+ cli.{{ pascalize .Name }} = {{ .PackageAlias }}.New(transport, strfmt.Default, c.AuthInfo)
+ {{ end -}}
+
+ return cli
+}
+
+// {{ pascalize .Name }} is a client for {{ humanize .Name }}
+type {{ pascalize .Name }} struct {
+ {{ range .OperationGroups -}}
+ {{ pascalize .Name }} {{ .PackageAlias }}.API
+ {{ end -}}
+ Transport runtime.ClientTransport
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl
new file mode 100644
index 000000000..eaee9701f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/configureapi.gotmpl
@@ -0,0 +1,222 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "net/http"
+ "log"
+ "fmt"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+{{ $package := .Package }}
+
+type contextKey string
+
+const AuthKey contextKey = "Auth"
+
+{{ range .OperationGroups -}}
+//go:generate mockery -name {{ pascalize .Name}}API -inpkg
+
+/* {{ pascalize .Name }}API {{ .Description }} */
+type {{ pascalize .Name }}API interface {
+{{ range .Operations -}}
+ {{ if .Summary -}}
+ /* {{ pascalize .Name }} {{ .Summary }} */
+ {{ else if .Description -}}
+ /* {{ pascalize .Name }} {{ .Description }} */
+ {{ end -}}
+ {{ pascalize .Name }}(ctx context.Context, params {{.Package}}.{{ pascalize .Name }}Params) middleware.Responder
+
+{{ end -}}
+}
+{{ end }}
+
+// Config is configuration for Handler
+type Config struct {
+ {{ range .OperationGroups -}}
+ {{ pascalize .Name }}API
+ {{ end -}}
+ Logger func(string, ...interface{})
+ // InnerMiddleware is for the handler executors. These do not apply to the swagger.json document.
+ // The middleware executes after routing but before authentication, binding and validation
+ InnerMiddleware func(http.Handler) http.Handler
+
+ // Authorizer is used to authorize a request after the Auth function was called using the "Auth*" functions
+ // and the principal was stored in the context in the "AuthKey" context value.
+ Authorizer func(*http.Request) error
+
+ {{ range .SecurityDefinitions -}}
+ {{ if .IsBasicAuth -}}
+ // Auth{{ pascalize .ID }} for basic authentication
+ Auth{{ pascalize .ID }} func(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{ end -}}
+ {{ if .IsAPIKeyAuth -}}
+ // Auth{{ pascalize .ID }} Applies when the "{{ .Name }}" {{ .Source }} is set
+ Auth{{ pascalize .ID }} func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{ end }}
+ {{ if .IsOAuth2 -}}
+ // Auth{{ pascalize .ID }} For OAuth2 authentication
+ Auth{{ pascalize .ID }} func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{ end -}}
+ {{ end -}}
+
+ // Authenticator to use for all APIKey authentication
+ APIKeyAuthenticator func(string, string, security.TokenAuthentication) runtime.Authenticator
+ // Authenticator to use for all Bearer authentication
+ BasicAuthenticator func(security.UserPassAuthentication) runtime.Authenticator
+ // Authenticator to use for all Basic authentication
+ BearerAuthenticator func(string, security.ScopedTokenAuthentication) runtime.Authenticator
+
+ {{ range .Consumes -}}
+ {{ if .Implementation -}}
+ // {{ pascalize .Name }}Consumer is a {{ .Name }} consumer that will replace the default if not nil.
+ {{ pascalize .Name }}Consumer runtime.Consumer
+ {{ end -}}
+ {{ end -}}
+}
+
+// Handler returns an http.Handler given the handler configuration
+// It mounts all the business logic implementers in the right routing.
+func Handler(c Config) (http.Handler, error) {
+ h, _, err := HandlerAPI(c)
+ return h, err
+}
+
+// HandlerAPI returns an http.Handler given the handler configuration
+// and the corresponding *{{ pascalize .Name }} instance.
+// It mounts all the business logic implementers in the right routing.
+func HandlerAPI(c Config) (http.Handler, *{{.Package}}.{{ pascalize .Name }}API, error) {
+ spec, err := loads.Analyzed(swaggerCopy(SwaggerJSON), "")
+ if err != nil {
+ return nil, nil, fmt.Errorf("analyze swagger: %v", err)
+ }
+ api := {{.Package}}.New{{ pascalize .Name }}API(spec)
+ api.ServeError = errors.ServeError
+ api.Logger = c.Logger
+
+ if c.APIKeyAuthenticator != nil {
+ api.APIKeyAuthenticator = c.APIKeyAuthenticator
+ }
+ if c.BasicAuthenticator != nil {
+ api.BasicAuthenticator = c.BasicAuthenticator
+ }
+ if c.BearerAuthenticator != nil {
+ api.BearerAuthenticator = c.BearerAuthenticator
+ }
+
+ {{ range .Consumes -}}
+ if c.{{ pascalize .Name }}Consumer != nil {
+ api.{{ pascalize .Name }}Consumer = c.{{ pascalize .Name }}Consumer
+ } else {
+ {{ if .Implementation -}}
+ api.{{ pascalize .Name }}Consumer = {{ .Implementation }}
+ {{ else }}
+ api.{{ pascalize .Name }}Consumer = runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return errors.NotImplemented("{{.Name}} consumer has not yet been implemented")
+ })
+ {{ end -}}
+ }
+ {{ end -}}
+ {{ range .Produces -}}
+ {{ if .Implementation -}}
+ api.{{ pascalize .Name }}Producer = {{ .Implementation }}
+ {{ else -}}
+ api.{{ pascalize .Name }}Producer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return errors.NotImplemented("{{.Name}} producer has not yet been implemented")
+ })
+ {{ end -}}
+ {{ end -}}
+
+ {{ range .SecurityDefinitions -}}
+ {{ if .IsBasicAuth -}}
+ api.{{ pascalize .ID }}Auth = func(user string, pass string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ if c.Auth{{ pascalize .ID }} == nil {
+ {{- if eq .Principal "interface{}" }}
+ return "", nil
+ {{- else }}
+ panic("you specified a custom principal type, but did not provide the authenticator to provide this")
+ {{- end }}
+ }
+ return c.Auth{{ pascalize .ID }}(user, pass)
+ }
+ {{ end -}}
+ {{ if .IsAPIKeyAuth -}}
+ api.{{ pascalize .ID }}Auth = func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ if c.Auth{{ pascalize .ID }} == nil {
+ {{- if eq .Principal "interface{}" }}
+ return token, nil
+ {{- else }}
+ panic("you specified a custom principal type, but did not provide the authenticator to provide this")
+ {{- end }}
+ }
+ return c.Auth{{ pascalize .ID }}(token)
+ }
+ {{ end }}
+ {{ if .IsOAuth2 -}}
+ api.{{ pascalize .ID }}Auth = func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ if c.Auth{{ pascalize .ID }} == nil {
+ {{- if eq .Principal "interface{}" }}
+ return token, nil
+ {{- else }}
+ panic("you specified a custom principal type, but did not provide the authenticator to provide this")
+ {{- end }}
+ }
+ return c.Auth{{ pascalize .ID }}(token, scopes)
+ }
+ {{ end -}}
+ {{ end -}}
+
+ {{ if .SecurityDefinitions -}}
+ api.APIAuthorizer = authorizer(c.Authorizer)
+ {{ end -}}
+
+ {{ range .Operations -}}
+ api.{{if ne .Package $package}}{{pascalize .Package}}{{end}}{{ pascalize .Name }}Handler =
+ {{- .PackageAlias }}.{{ pascalize .Name }}HandlerFunc(func(params {{.PackageAlias}}.{{ pascalize .Name }}Params{{if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{end}}) middleware.Responder {
+ ctx := params.HTTPRequest.Context()
+ {{ if .Authorized -}}
+ ctx = storeAuth(ctx, principal)
+ {{ end -}}
+ return c.{{pascalize .Package}}API.{{pascalize .Name}}(ctx, params)
+ })
+ {{ end -}}
+
+ api.ServerShutdown = func() { }
+ return api.Serve(c.InnerMiddleware), api, nil
+}
+
+// swaggerCopy copies the swagger json to prevent data races in runtime
+func swaggerCopy(orig json.RawMessage) json.RawMessage {
+ c := make(json.RawMessage, len(orig))
+ copy(c, orig)
+ return c
+}
+
+// authorizer is a helper function to implement the runtime.Authorizer interface.
+type authorizer func(*http.Request) error
+
+func (a authorizer) Authorize(req *http.Request, principal interface{}) error {
+ if a == nil {
+ return nil
+ }
+ ctx := storeAuth(req.Context(), principal)
+ return a(req.WithContext(ctx))
+}
+
+func storeAuth(ctx context.Context, principal interface{}) context.Context {
+ return context.WithValue(ctx, AuthKey, principal)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl
new file mode 100644
index 000000000..0330309c5
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/contrib/stratoscale/server/server.gotmpl
@@ -0,0 +1,9 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+// this file is intentionally empty. Otherwise go-swagger will generate a server which we don't want
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl
new file mode 100644
index 000000000..8e7108be1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/docstring.gotmpl
@@ -0,0 +1,25 @@
+{{ define "docstring" }}
+ {{- if .Title }}
+ {{- comment .Title }}
+ {{- if .Description }}
+//
+// {{ comment .Description }}
+ {{- end }}
+ {{- else if .Description}}
+ {{- comment .Description }}
+ {{- else }}
+ {{- humanize .Name }}
+ {{- end }}
+ {{- if or .MinProperties .MinProperties }}
+//
+ {{- if .MinProperties }}
+// Min Properties: {{ .MinProperties }}
+ {{- end }}
+ {{- if .MaxProperties }}
+// Max Properties: {{ .MaxProperties }}
+ {{- end }}
+ {{- end }}
+ {{- if .Example }}
+// Example: {{ print .Example }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl
new file mode 100644
index 000000000..a60cae1ea
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/header.gotmpl
@@ -0,0 +1,20 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+{{ if .Copyright -}}
+// {{ comment .Copyright }}
+{{- end }}
+
+package {{.Package}}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "github.com/go-openapi/strfmt"
+{{- if .DefaultImports }}
+ {{ imports .DefaultImports }}
+{{- end }}
+{{- if .Imports }}
+ {{ imports .Imports }}
+{{- end }}
+)
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl
new file mode 100644
index 000000000..8b7c6b3dd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/markdown/docs.gotmpl
@@ -0,0 +1,527 @@
+{{- define "externalDoc" }}{{/* renders external documentation */}}
+ {{- with .ExternalDocs }}
+ {{- if .URL }}
+ {{- if .Description }}
+> [{{ trimSpace .Description }}]({{ .URL }})
+ {{- else }}
+> [Read more]({{ .URL }})
+ {{- end }}
+ {{- else }}
+> {{ trimSpace .Description }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{- define "docParam" }}{{/* renders a parameter with simple schema */}}
+| {{ .Name }} | `{{ .Location }}` | {{ paramDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Required }}✓{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ trimSpace .Description }} |
+{{- end }}
+
+{{- define "docModelSchema" }}{{/* renders a schema */}}
+ {{- if .IsArray }}
+ {{- if .IsAliased }}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if .Items }}
+ {{- if and .Items.IsPrimitive (not .Items.IsAliased) -}}
+ {{- schemaDocType . -}}
+ {{- else -}}
+ [][{{- dropPackage .Items.GoType }}](#{{ dasherize (dropPackage .Items.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ []any{{ printf " " -}}
+ {{- end -}}
+ {{- else if and .IsMap (not .IsAdditionalProperties) -}}
+ {{- if .IsAliased -}}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if .ElemType }}
+ {{- if and .ElemType.IsPrimitive (not .ElemType.IsAliased) (not .ElemType.IsInterface) -}}
+ {{ schemaDocMapType . -}}
+ {{- else if .ElemType.IsInterface -}}
+ map of any{{ printf " " -}}
+ {{- else -}}
+ map of [{{- dropPackage .ElemType.GoType }}](#{{ dasherize (dropPackage .ElemType.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ map of any{{ printf " " -}}
+ {{- end -}}
+ {{- else if and .IsAliased .IsPrimitive (not .IsSuperAlias) -}}
+| Name | Type | Go type | Default | Description | Example |
+|------|------|---------| ------- |-------------|---------|
+| {{ .Name }} | {{ schemaDocType . }}| {{ .AliasedType }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+{{ printf "\n" }}
+ {{- else if or (and .IsAliased (not (.IsAdditionalProperties))) (and .IsComplexObject (not .Properties) (not .AllOf)) -}}
+[{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if and .IsInterface (not .IsAliased) (not .IsMap) -}}
+any
+ {{- else -}}
+ {{- range .AllOf }}
+ {{- if .IsAnonymous }}
+* inlined member (*{{ .Name }}*)
+
+{{ template "docModelSchema" . }}
+ {{- else if or .IsComplexObject .IsPrimitive }}
+* composed type [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else }}
+* {{ template "docModelSchema" . }}
+ {{- end }}
+ {{- end }}
+ {{- if .Properties }}
+
+**{{ if .IsTuple }}Tuple members{{ else }}Properties{{ end }}**
+
+| Name | Type | Go type | Required | Default | Description | Example |
+|------|------|---------|:--------:| ------- |-------------|---------|
+ {{- range .Properties }}
+| {{ .Name }} | {{ template "docSchemaSimple" . }}| `{{ .GoType }}` | {{ if .Required }}✓{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+ {{- end }}
+{{ printf "\n" }}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+
+**Additional Properties**
+ {{- with .AdditionalProperties }}
+ {{- if .IsInterface }}
+
+any
+ {{- else if .IsPrimitive }}
+
+| Type | Go type | Default | Description | Example |
+|------|---------| ------- |-------------|---------|
+| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+ {{- else }}
+
+{{ template "docModelSchema" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if and .IsTuple .HasAdditionalItems }}
+ {{- with .AdditionalItems }}
+
+**Additional Items**
+ {{- if .IsInterface }}
+
+any
+ {{- else if .IsPrimitive }}
+
+| Type | Go type | Default | Description | Example |
+|------|---------| ------- |-------------|---------|
+| {{ template "docSchemaSimple" . }} | `{{ .GoType }}` |{{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} | {{ if .Example }}`{{ .Example }}`{{ end }} |
+ {{- else }}
+
+{{ template "docModelSchema" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end -}}
+{{- end }}
+
+{{- define "docModel" }}{{/* renders a definition */}}
+ {{- with .Description }}
+> {{ .}}
+ {{- end }}
+ {{- if .ExternalDocs }}
+{{ template "externalDoc" . }}
+ {{- end }}
+ {{ if or .Description .ExternalDocs }}
+{{ printf "\n" }}
+ {{- end }}
+
+{{ template "docModelSchema" .}}
+{{- end }}
+
+{{- define "docSchemaSimple" }}{{/* renders a simple property */}}
+ {{- if .IsAliased -}}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- else if .IsArray }}
+ {{- if .Items }}
+ {{- if and .Items.IsPrimitive (not .Items.IsAliased) -}}
+ {{- schemaDocType . -}}
+ {{- else -}}
+ [][{{- dropPackage .Items.GoType }}](#{{ dasherize (dropPackage .Items.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ []any{{ printf " " -}}
+ {{- end -}}
+ {{- else if .IsMap -}}
+ {{- if .ElemType }}
+ {{- if and .ElemType.IsPrimitive (not .ElemType.IsAliased) (not .ElemType.IsInterface) -}}
+ {{ schemaDocMapType . -}}
+ {{- else if .ElemType.IsInterface -}}
+ map of any{{ printf " " -}}
+ {{- else -}}
+ map of [{{- dropPackage .ElemType.GoType }}](#{{ dasherize (dropPackage .ElemType.GoType) -}})
+ {{- end -}}
+ {{- else -}}
+ map of any{{ printf " " -}}
+ {{- end -}}
+ {{- else if .IsPrimitive -}}
+ {{- schemaDocType . -}}
+ {{- else -}}
+ [{{- dropPackage .GoType }}](#{{ dasherize (dropPackage .GoType) -}})
+ {{- end -}}
+{{- end }}
+
+{{- define "docModelBodyParam" }}{{/* layout for body param schema */}}
+| {{ .Name }} | `body` | {{ template "docSchemaSimple" .Schema }} | `{{ .Schema.GoType }}` | | {{ if .Required }}✓{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }}| {{ trimSpace .Description }} |
+{{- end }}
+
+{{- define "docHeaders" }}{{/* renders response headers */}}
+ {{- if .Headers }}
+| Name | Type | Go type | Separator | Default | Description |
+|------|------|---------|-----------|---------|-------------|
+ {{- range .Headers }}
+| {{ .Name }} | {{ headerDocType . }} | `{{ .GoType }}` | {{ if .CollectionFormat }}`{{ docCollectionFormat .CollectionFormat .Child }}`{{ end }} | {{ if .Default }}`{{ json .Default }}`{{ end }} | {{ trimSpace .Description }} |
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{/* spec top-level information block */}}
+{{- if .Info }}
+ {{- with .Info.Title }}
+# {{ . }}
+ {{- end }}
+
+ {{- with .Info.Description }}
+{{ . }}
+ {{- end }}
+ {{ template "externalDoc" . }}
+
+ {{- if or .Info.Version .Info.License .Info.Contact .Info.TermsOfService }}
+
+## Informations
+ {{- end }}
+
+ {{- with .Info.Version }}
+
+### Version
+
+{{ . }}
+ {{- end }}
+
+ {{- with .Info.License }}
+
+### License
+
+{{ if .Name }}[{{ .Name }}]({{ end}}{{ .URL }}{{ if .Name }}){{ end }}
+ {{- end }}
+
+ {{- with .Info.Contact }}
+
+### Contact
+
+{{ .Name }} {{ .Email }} {{ .URL }}
+ {{- end }}
+
+ {{- with .Info.TermsOfService }}
+
+### Terms Of Service
+
+{{ . }}
+ {{- end }}
+{{- else }}
+ {{ template "externalDoc" . }}
+{{- end }}
+
+{{- if .Tags }}
+
+## Tags
+ {{- range .Tags }}
+
+ ### <span id="tag-{{ dasherize .Name }}"></span>{{ if .ExternalDocs }}[{{ .Name }}]({{ .ExternalDocs.URL }}{{ if .ExternalDocs.Description }} {{ printf "%q" .ExternalDocs.Description }}{{ end }}){{ else }}{{ .Name }}{{ end }}
+ {{- if .Description }}
+
+{{ .Description }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{- if or .Schemes .Consumes .Produces }}
+
+## Content negotiation
+{{- end }}
+{{- if .Schemes }}
+
+### URI Schemes
+ {{- range .Schemes }}
+ * {{ . }}
+ {{- end }}
+ {{- range .ExtraSchemes }}
+ * {{ . }}
+ {{- end }}
+{{- end }}
+
+{{- if .Consumes }}
+
+### Consumes
+ {{- range .Consumes }}
+ {{- range .AllSerializers }}
+ * {{ .MediaType }}
+ {{- end }}
+ {{- end }}
+{{- end }}{{/* end .Schemes */}}
+
+{{- if .Produces }}
+
+### Produces
+ {{- range .Produces }}
+ {{- range .AllSerializers }}
+ * {{ .MediaType }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{- if or .SecurityDefinitions .SecurityRequirements }}
+
+## Access control
+{{- end }}
+{{- if .SecurityDefinitions }}
+
+### Security Schemes
+ {{- range .SecurityDefinitions }}
+
+#### {{ .ID }}{{ if .Source }} ({{ .Source }}{{ with .Name }}: {{ . }}{{ end }}){{ end }}
+
+{{ .Description }}
+
+ {{- with .Type }}
+
+> **Type**: {{ . }}
+ {{- end }}
+ {{- if .IsOAuth2}}
+ {{- with .Flow }}
+>
+> **Flow**: {{ . }}
+ {{- end }}
+ {{- with .AuthorizationURL }}
+>
+> **Authorization URL**: {{ . }}
+ {{- end }}
+ {{- with .TokenURL }}
+>
+> **Token URL**: {{ . }}
+ {{- end }}
+ {{ if .ScopesDesc }}
+
+##### Scopes
+
+Name | Description
+-----|-------------
+ {{- range .ScopesDesc }}
+{{ .Name }} | {{ .Description }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}{{/* end .SecurityDefinitions */}}
+
+{{- if .SecurityRequirements }}
+
+### Security Requirements
+
+ {{- range .SecurityRequirements }}
+ * {{ .Name }}{{ if .Scopes }}: {{ range $idx, $scope := .Scopes }}{{ if gt $idx 0 }}, {{ end }}{{ $scope }}{{ end }}
+ {{- end }}
+ {{- end }}
+{{- end }}{{/* end .SecurityRequirements */}}
+
+## All endpoints{{/* an index of all API endpoints */}}
+
+{{- $alltags := .Tags }}
+{{- range .OperationGroups }}
+
+### {{ .PackageAlias }}
+ {{- $pkg := .PackageAlias }}
+ {{- range $alltags }}
+ {{- if eq .Name $pkg }}
+
+ {{ template "externalDoc" . }}
+ {{- end }}
+ {{- end }}
+
+| Method | URI | Name | Summary |
+|---------|---------|--------|---------|
+ {{- range .Operations }}
+| {{ upper .Method }} | {{ joinPath .BasePath .Path }} | [{{ humanize .Name }}](#{{ dasherize .Name }}) | {{ .Summary }} |
+ {{- end }}
+ {{ printf "\n" }}
+{{- end }}
+
+## Paths{{/* all paths to operations */}}
+
+{{- range .Operations }}
+ {{- $opname := .Name }}
+
+### <span id="{{ dasherize .Name }}"></span> {{ if .Summary }}{{ trimSpace .Summary }}{{ else }}{{ humanize .Name }}{{ end }} (*{{ .Name }}*)
+
+```
+{{ upper .Method }} {{ joinPath .BasePath .Path }}
+```
+ {{- with .Description }}
+
+{{ . }}
+ {{- end }}
+
+ {{- with .ExternalDocs }}
+
+> {{ if .URL }}[Read more]({{ .URL }} "{{ .Description }}"){{ end }}
+ {{- end }}
+
+ {{- if or (gt (len .SchemeOverrides) 0) (gt (len .ExtraSchemeOverrides) 0) }}
+
+#### URI Schemes
+
+ {{- range .SchemeOverrides }}
+ * {{ . }}
+ {{- end }}
+ {{- range .ExtraSchemeOverrides }}
+ * {{ . }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .Consumes }}
+
+#### Consumes
+
+ {{- range .Consumes }}
+ * {{ . }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .Produces }}
+
+#### Produces
+
+ {{- range .Produces }}
+ * {{ . }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .SecurityRequirements }}
+
+#### Security Requirements
+ {{- range .SecurityRequirements }}
+ * {{ .Name }}{{ if .Scopes }}: {{ range $idx, $scope := .Scopes }}{{ if gt $idx 0 }}, {{ end }}{{ $scope }}{{ end }}{{ end }}
+ {{- end }}
+ {{- end }}
+
+ {{- if .Params }}
+
+#### Parameters
+
+| Name | Source | Type | Go type | Separator | Required | Default | Description |
+|------|--------|------|---------|-----------| :------: |---------|-------------|
+{{- range .PathParams }}{{ template "docParam" . }}{{ end }}
+{{- range .HeaderParams }}{{ template "docParam" . }}{{ end }}
+{{- range .QueryParams }}{{ template "docParam" . }}{{ end }}
+{{- range .FormParams }}{{ template "docParam" . }}{{ end }}
+{{- range .Params }}
+{{- if .IsBodyParam }}
+{{- template "docModelBodyParam" . }}
+ {{- end }}
+{{- end }}
+{{- end }}{{/* end .Params */}}
+
+#### All responses
+| Code | Status | Description | Has headers | Schema |
+|------|--------|-------------|:-----------:|--------|
+{{- range .Responses }}
+| [{{.Code}}](#{{ dasherize $opname }}-{{ .Code }}) | {{ httpStatus .Code }} | {{ trimSpace .Description }} | {{ if .Headers }}✓{{ end }} | [schema](#{{ dasherize $opname }}-{{ .Code }}-schema) |
+{{- end }}
+{{- with .DefaultResponse }}
+| [default](#{{ dasherize $opname }}-default) | | {{ trimSpace .Description }} | {{ if .Headers }}✓{{ end }} | [schema](#{{ dasherize $opname }}-default-schema) |
+{{- end }}
+
+#### Responses
+{{ range .Responses }}
+
+##### <span id="{{ dasherize $opname }}-{{ .Code }}"></span> {{.Code}}{{ if .Description }} - {{ trimSpace .Description }}{{ end }}
+Status: {{ httpStatus .Code }}
+
+###### <span id="{{ dasherize $opname }}-{{ .Code }}-schema"></span> Schema
+ {{- if .Schema }}
+ {{ template "docModel" .Schema }}
+ {{- end }}
+
+ {{- if .Examples }}
+
+###### Examples
+ {{ range .Examples }}
+**{{ .MediaType }}**
+```json
+{{ prettyjson .Example }}
+```
+ {{- end }}
+ {{- end }}
+
+ {{- if .Headers }}
+
+###### Response headers
+{{ template "docHeaders" . }}
+ {{- end }}
+{{- end }}
+
+{{- with .DefaultResponse }}
+
+##### <span id="{{ dasherize $opname }}-default"></span> Default Response
+{{ trimSpace .Description }}
+
+###### <span id="{{ dasherize $opname }}-default-schema"></span> Schema
+ {{- if .Schema }}
+{{ template "docModel" .Schema }}
+ {{- else }}
+empty schema
+ {{- end }}
+
+ {{- if .Examples }}
+
+###### Examples
+ {{ range .Examples }}
+**{{ .MediaType }}**
+```json
+{{ .Example }}
+```
+ {{- end }}
+ {{- end }}
+
+ {{- if .Headers }}
+
+###### Response headers
+{{ template "docHeaders" . }}
+ {{- end }}
+{{- end }}
+
+ {{- if .ExtraSchemas }}
+
+###### Inlined models
+ {{- range .ExtraSchemas }}
+ {{- if ne .Name "" }}
+
+**<span id="{{ dasherize .Name }}"></span> {{ .Name }}**
+
+{{ template "docModel" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+
+{{- end }}{{/* end .Operations */}}
+
+## Models
+
+{{- range .Models }}
+
+### <span id="{{ dasherize .Name }}"></span> {{ .Name }}
+
+{{ template "docModel" . }}
+
+ {{- if .ExtraSchemas }}
+
+#### Inlined models
+ {{- range .ExtraSchemas }}
+ {{- if ne .Name "" }}
+
+**<span id="{{ dasherize .Name }}"></span> {{ .Name }}**
+
+{{ template "docModel" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl
new file mode 100644
index 000000000..e107a1ee1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/model.gotmpl
@@ -0,0 +1,27 @@
+{{ template "header" . }}
+{{- if .IncludeModel }}
+ {{- if .IsExported }}
+// {{ pascalize .Name }} {{ template "docstring" . }}
+ {{- template "annotations" . }}
+ {{- end }}
+ {{- template "schema" . }}
+{{- end }}
+
+{{ range .ExtraSchemas }}
+ {{- if .IncludeModel }}
+ {{- if .IsExported }}
+// {{ pascalize .Name }} {{ template "docstring" . }}
+ {{- template "annotations" . }}
+ {{- end }}
+ {{- template "schema" . }}
+ {{- end }}
+{{- end }}
+{{- define "annotations" }}{{/* annotations to generate spec from source */}}
+ {{- if not .IsBaseType }}
+//
+// swagger:model {{ .Name }}
+ {{- else }}
+//
+// swagger:discriminator {{ .Name }} {{ .DiscriminatorField }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl
new file mode 100644
index 000000000..39339d728
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schema.gotmpl
@@ -0,0 +1,131 @@
+{{- if and .IsBaseType .IsExported (not .IsSuperAlias) }}
+ {{- template "schemaPolymorphic" . }}
+{{- else if .IsSuperAlias }}
+ type {{ pascalize .Name }} {{ template "typeSchemaType" . }}{{/* For types declared as $ref on some other type, just declare the type as a golang _aliased_ type, e.g. type A = B. No method shall be redeclared. */}}
+ {{- if .IsBaseType }}
+ {{ template "baseTypeSerializer" . }}{{/* When the alias redeclares a polymorphic type, define factory methods with this alias. */}}
+ {{- end }}
+{{- else if .IsEmbedded }}
+ {{- template "schemaEmbedded" . }}
+{{- else }}
+ {{- if or .IsComplexObject .IsTuple .IsAdditionalProperties }}{{/* TODO(fred): handle case of subtype inheriting from base type with AdditionalProperties, issue #2220 */}}
+ {{ if .Name }}type {{ if not .IsExported }}{{ .Name }}{{ else }}{{ pascalize .Name }}{{ end }}{{ end }} {{ template "schemaBody" . }}
+ {{- range .Properties }}
+ {{- if .IsBaseType }}
+ // {{ pascalize .Name}} gets the {{ humanize .Name }} of this base type{{/* all properties which are of a base type propagate its interface */}}
+ func ({{ $.ReceiverName}} *{{ pascalize $.Name}}) {{ pascalize .Name}}() {{ template "schemaType" . }}{
+ {{- if eq $.DiscriminatorField .Name }}
+ return {{ printf "%q" $.DiscriminatorValue }}
+ {{- else }}
+ return {{ $.ReceiverName }}.{{camelize .Name}}Field
+ {{- end }}
+ }
+
+ // Set{{ pascalize .Name}} sets the {{ humanize .Name }} of this base type
+ func ({{ $.ReceiverName}} *{{ pascalize $.Name}}) Set{{ pascalize .Name}}(val {{ template "schemaType" . }}) {
+ {{- if ne $.DiscriminatorField .Name }}
+ {{ $.ReceiverName }}.{{camelize .Name}}Field = val
+ {{- end }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .Default }}{{/* TODO(fred) - issue #2189 */}}
+ func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(b []byte) error {
+ type {{ pascalize .Name }}Alias {{ pascalize .Name }}
+ var t {{ pascalize .Name }}Alias
+ if err := json.Unmarshal([]byte({{printf "%q" (json .Default)}}), &t); err != nil {
+ return err
+ }
+ if err := json.Unmarshal(b, &t); err != nil {
+ return err
+ }
+ *{{.ReceiverName}} = {{ pascalize .Name }}(t)
+ return nil
+ }
+ {{- end }}
+ {{- else }}
+ type {{ pascalize .Name }} {{ template "typeSchemaType" . }}
+ {{- end }}
+ {{- if (and .IsPrimitive .IsAliased .IsCustomFormatter (not (stringContains .Zero "(\""))) }}
+ {{ template "aliasedSerializer" . }}
+ {{- end }}
+ {{- if .IsSubType }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{- if .IsBaseType }}
+
+ // {{ pascalize .Name}} gets the {{ humanize .Name }} of this subtype
+ func ({{$.ReceiverName}} *{{ pascalize $.Name}}) {{ pascalize .Name}}() {{ template "schemaType" . }}{
+ {{- if eq $.DiscriminatorField .Name }}
+ return {{ printf "%q" $.DiscriminatorValue }}
+ {{- else }}
+ return {{ $.ReceiverName }}.{{camelize .Name}}Field
+ {{- end }}
+ }
+
+ // Set{{ pascalize .Name}} sets the {{ humanize .Name }} of this subtype
+ func ({{$.ReceiverName}} *{{ pascalize $.Name}}) Set{{ pascalize .Name}}(val {{ template "schemaType" . }}) {
+ {{- if ne $.DiscriminatorField .Name }}
+ {{ $.ReceiverName }}.{{camelize .Name}}Field = val
+ {{- end }}
+ }
+ {{- end }}
+ {{- end }}{{/* TODO(fred): handle AdditionalProperties in base type */}}
+ {{- end }}
+ {{ template "mapOrSliceGetter" . }}
+ {{- end }}
+ {{ template "schemaSerializer" . }}
+{{- end }}
+{{- if and .IncludeValidator (not .IsSuperAlias) (not .IsEmbedded) }}{{/* aliased types type A = B do not redefine methods */}}
+ {{- if and (not (or .IsInterface .IsStream)) (or .Required .HasValidations .HasBaseType) }}
+ {{- if (eq .SwaggerType "string") }}{{/* Enum factory for enums for which we generate const (atm, only strings)*/}}
+ {{- if .Enum }}
+
+func New{{ pascalize .Name }}(value {{ .GoType }}) *{{ .GoType }} {
+ return &value
+}
+
+// Pointer returns a pointer to a freshly-allocated {{ .GoType }}.
+func ({{ .ReceiverName }} {{ .GoType }}) Pointer() *{{ .GoType }} {
+ return &{{ .ReceiverName }}
+}
+ {{- end }}
+ {{- end }}
+ {{ template "schemavalidator" . }}
+ {{- else if not (or .IsInterface .IsStream) }}
+// Validate validates this {{ humanize .Name }}{{/* this schema implements the runtime.Validatable interface but has no validations to check */}}
+func ({{.ReceiverName}} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if or (not .IsExported) .Discriminates }}{{ camelize .Name }}{{ else }}{{ pascalize .Name }}{{ end }}) Validate(formats strfmt.Registry) error {
+ return nil
+}
+ {{- else }}{{/* {{ .Name }} does not implement the runtime.Validatable interface: noop */}}
+ {{- end }}
+ {{- if and (not (or .IsInterface .IsStream)) (or .HasContextValidations) }}
+ {{ template "schemacontextvalidator" . }}
+ {{- else if not (or .IsInterface .IsStream) }}
+// ContextValidate validates this {{ humanize .Name }} based on context it is used {{/* this schema implements the runtime.ContextValidatable interface but has no validations to check */}}
+func ({{.ReceiverName}} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if or (not .IsExported) .Discriminates }}{{ camelize .Name }}{{ else }}{{ pascalize .Name }}{{ end }}) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
+ return nil
+}
+ {{- else }}{{/* {{ .Name }} does not implement the runtime.Validatable interface: noop */}}
+ {{- end }}
+{{- end }}
+{{- if .WantsMarshalBinary }}
+ {{ template "marshalBinarySerializer" . }}
+{{- end }}
+{{- define "mapOrSliceGetter" }}{{/* signature for AdditionalProperties and AdditionalItems getter funcs */}}
+ {{- if not .IsBaseType }}
+ {{- if .HasAdditionalProperties }}
+ {{- with .AdditionalProperties }}
+ // {{- template "docstring" . }}{{- template "propertyValidationDocString" . }}
+ {{ pascalize .Name }}() map[string]{{ template "schemaType" . }}
+ {{- end }}
+ {{- end }}
+ {{- with .AdditionalItems }}
+ // {{- template "docstring" . }}{{- template "propertyValidationDocString" . }}
+ {{ pascalize .Name }}() []{{ template "schemaType" . }}
+ {{- end }}
+ {{- else }}
+ // AdditionalProperties in base type shoud be handled just like regular properties{{/* TODO(fred): add full support for AdditionalProperties in base type */}}
+ // At this moment, the base type property is pushed down to the subtype
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl
new file mode 100644
index 000000000..947e8c01b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemabody.gotmpl
@@ -0,0 +1,330 @@
+{{ define "schemaBody" }}struct {
+ {{ range .AllOf }}
+ {{ if or (and $.IsSubType .IsBaseType .IsExported) .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if ne $.DiscriminatorField .Name }}
+ {{ if or (not $.IsExported) (and $.IsSubType .IsBaseType) }}
+ {{ if $.IsTuple }}
+ {{ template "privtuplefield" . }}
+ {{ else }}
+ {{template "privstructfield" . }}
+ {{ end }}
+ {{ else }}
+ {{ if $.IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ end}}
+ {{ end }}
+ {{ end }}
+ {{- if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ // {{ template "docstring" .AdditionalProperties }}
+ {{- template "propertyValidationDocString" .AdditionalProperties}}
+ {{- if and .IsExported (not .IsSubType) }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else if or (not .AdditionalProperties.IsExported) (.AdditionalProperties.IsBaseType) }}
+ {{ camelize .AdditionalProperties.Name }}Field
+ {{- else }}
+ {{ .AdditionalProperties.Name }}
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ // {{ template "docstring" .AdditionalItems }}
+ {{- template "propertyValidationDocString" .AdditionalItems}}
+ {{- if and .IsExported (not $.IsSubType) }}{{/* TODO(fred): make sure inherited AdditionalItems are camelized */}}
+ {{ pascalize .AdditionalItems.Name }}
+ {{- else }}
+ {{ .AdditionalItems.Name }}
+ {{- end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ {{ else }}{{/* named type composition */}}
+ {{ if not (and $.IsBaseType .IsExported) }}{{ .GoType }}{{ end }}
+ {{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if or (not $.IsExported) ($.IsBaseType) (.IsBaseType) }}
+ {{ if $.IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ // {{ template "docstring" .AdditionalProperties }}
+ {{- template "propertyValidationDocString" .AdditionalProperties}}
+ {{- if and .IsExported (not .IsSubType) }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else }}
+ {{ pascalize .AdditionalProperties.Name }}Field
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ // {{ template "docstring" .AdditionalItems }}
+ {{- template "propertyValidationDocString" .AdditionalItems}}
+ {{ if and .IsExported (not .IsSubType) }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ pascalize .AdditionalItems.Name }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+}
+{{- end }}
+
+{{ define "subTypeBody" }}struct {
+ {{- range .AllOf }}
+ {{- if or (and .IsBaseType .IsExported) .IsAnonymous }}
+ {{- range .Properties }}
+ {{- if not $.IsExported }}
+ {{- if $.IsTuple }}
+ {{- template "privtuplefield" . }}
+ {{- else }}
+ {{- template "privstructfield" . }}
+ {{- end }}
+ {{- else }}
+ {{- if $.IsTuple }}
+ {{- template "tuplefield" . }}
+ {{- else }}
+ {{- template "structfield" . }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ {{- if .IsExported }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else }}
+ {{ .AdditionalProperties.Name }}
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{- if .IsExported }}
+ {{ pascalize .AdditionalItems.Name }}
+ {{- else }}
+ {{ .AdditionalItems.Name }}
+ {{- end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ {{- else }}
+ {{- if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{- if not $.IsExported }}
+ {{- if $.IsTuple }}
+ {{ template "privtuplefield" . }}
+ {{- else }}
+ {{ template "privstructfield" . }}
+ {{- end }}
+ {{- else }}
+ {{- if $.IsTuple }}
+ {{ template "tuplefield" . }}
+ {{- else }}
+ {{ template "structfield" . }}
+ {{- end }}
+ {{- end}}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties }}
+ {{- if and .IsExported }}
+ {{ pascalize .AdditionalProperties.Name }}
+ {{- else }}
+ {{ pascalize .AdditionalProperties.Name }}Field
+ {{- end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{- if and .IsExported (not .IsSubType) }}
+ {{ pascalize .AdditionalItems.Name }}
+ {{- else }}
+ {{ pascalize .AdditionalItems.Name }}Field
+ {{- end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+}
+{{- end }}
+
+{{ define "withBaseTypeBody" }}struct {
+ {{ range .AllOf }}
+ {{ if or (and .IsBaseType .IsExported) .IsAnonymous }}{{ range .Properties }}
+ {{ if not .IsExported }}{{ if .IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}
+ {{ end }}{{ if .HasAdditionalProperties }}{{ if .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ .AdditionalProperties.Name }}{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"` {{end}}
+ {{ if .AdditionalItems }}{{ if and .IsExported }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ .AdditionalItems.Name }}{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}{{ .GoType }}{{ end }}{{ end }}
+ {{ end }}
+ {{range .Properties}}{{ if .IsBaseType }}
+ {{ if not $.IsExported }}{{ else }}{{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`{{ end}}
+ {{end}}{{ end }}
+ {{ if .HasAdditionalProperties }}{{ if and .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ pascalize .AdditionalProperties.Name }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}
+ {{ if .AdditionalItems }}{{ if and .IsExported (not .IsSubType) }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ pascalize .AdditionalItems.Name }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+}
+{{- end }}
+
+{{ define "withoutBaseTypeBody" }}struct {
+ {{ range .AllOf }}
+ {{ if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if and .IsExported (not .IsBaseType) }}
+ {{ if .IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ else }}
+ {{ pascalize .Name }} json.RawMessage `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ if .AdditionalProperties }}
+ {{ if .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ .AdditionalProperties.Name }}{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{end}}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .IsExported }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ .AdditionalItems.Name }}{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if not .IsBaseType }}
+ {{ if not $.IsExported }}
+ {{template "privstructfield" . }}
+ {{ else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end}}
+ {{ else }}
+ {{ pascalize .Name }} json.RawMessage `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}
+}
+{{- end }}
+
+{{ define "withoutBaseTypeBodyOrNonExported" }}struct {
+ {{ range .AllOf }}
+ {{ if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if and .IsExported (not .IsBaseType) }}
+ {{ if .IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ if .AdditionalProperties }}
+ {{ if .IsExported }}{{ pascalize .AdditionalProperties.Name }}{{ else }}{{ .AdditionalProperties.Name }}{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{end}}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .IsExported }}{{ pascalize .AdditionalItems.Name }}{{ else }}{{ .AdditionalItems.Name }}{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if not .IsBaseType }}
+ {{ if not .IsExported }}
+ {{template "privstructfield" . }}
+ {{ else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end}}
+ {{end}}
+ {{ end }}
+ {{ if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{ end }}}{
+ {{ range .AllOf }}
+ {{ if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if not .IsBaseType }}
+ {{ pascalize .Name }}: {{ .ReceiverName}}.{{ pascalize .Name }},
+ {{ end }}
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}
+ {{ .GoType }}: {{ .ReceiverName }}.{{ .GoType }},
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ if and (not .IsBaseType) .IsExported }}
+ {{ pascalize .Name }}: {{ .ReceiverName }}.{{ pascalize .Name }},
+ {{ end }}
+ {{ end }}
+ },
+{{- end }}
+
+{{ define "withBaseTypeBodyAndNonExported" }}struct{
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if .IsBaseType }}
+ {{ pascalize .Name }} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ if or (not .IsExported) .IsBaseType }}
+ {{ pascalize .Name }} {{ template "schemaType" . }} `json:"{{ .Name }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{end}}} {
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if .IsBaseType }}
+ {{ pascalize .Name }}:
+ {{ if ne .DiscriminatorField .Name }}
+ {{ .ReceiverName }}.{{ if .IsSubType}}{{ camelize .Name }}Field{{ else }}{{ pascalize .Name }}(){{ end }},
+ {{ else }}
+ {{ .ReceiverName }}.{{pascalize .Name}}(),
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ if or (not .IsExported) .IsBaseType }}
+ {{ pascalize .Name }}: {{ .ReceiverName }}.{{ if .IsBaseType}}{{ camelize .Name }}Field{{ else }}{{ pascalize .Name }}{{ end }},
+ {{ end }}
+ {{ end }} },
+{{- end }}
+
+{{ define "withoutAdditionalBody" }}struct {
+ {{ range .AllOf }}
+ {{ if or (and $.IsSubType .IsBaseType .IsExported) .IsAnonymous }}{{ range .Properties }}
+ {{ if ne $.DiscriminatorField .Name }}{{ if or (not $.IsExported) (and $.IsSubType .IsBaseType) }}{{ if $.IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}{{ end }}
+ {{ end }}
+ {{ else }}
+ {{ if not (and .IsBaseType .IsExported) }}{{ .GoType }}{{ end }}{{ end }}
+ {{ end }}
+ {{range .Properties}}
+ {{ if or (not $.IsExported) (and $.IsSubType .IsBaseType) }}{{ if $.IsTuple }}{{ template "privtuplefield" . }}{{ else }}{{template "privstructfield" . }}{{ end }}{{ else }}{{ if $.IsTuple }}{{ template "tuplefield" . }}{{ else }}{{template "structfield" . }}{{ end }}{{ end}}
+ {{end}}
+}
+{{- end }}
+
+{{ define "JustBaseTypeBody" }}struct {
+ /* Just the base type fields. Used for unmashalling polymorphic types.*/
+ {{ range .AllOf }}
+ {{ if .IsBaseType }}
+ {{ range .Properties }}
+ {{ if .IsExported }}
+ {{ if .IsTuple }}
+ {{ template "tuplefield" . }}
+ {{ else }}
+ {{template "structfield" . }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl
new file mode 100644
index 000000000..f86c27bc6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemaembedded.gotmpl
@@ -0,0 +1,21 @@
+{{ define "schemaEmbedded" }}
+type {{ pascalize .Name }} struct {
+ {{ if .ElemType.IsNullable }}*{{ end }}{{ .ElemType.GoType }}
+}
+
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) Validate(formats strfmt.Registry) error {
+ var f interface{} = {{ .ReceiverName }}.{{ dropPackage .ElemType.GoType }}
+ if v, ok := f.(runtime.Validatable) ; ok {
+ return v.Validate(formats)
+ }
+ return nil
+}
+
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
+ var f interface{} = {{ .ReceiverName }}.{{ dropPackage .ElemType.GoType }}
+ if v, ok := f.(runtime.ContextValidatable) ; ok {
+ return v.ContextValidate(ctx, formats)
+ }
+ return nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl
new file mode 100644
index 000000000..67b6a4fe0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemapolymorphic.gotmpl
@@ -0,0 +1,53 @@
+{{ define "schemaPolymorphic" }}
+ type {{ pascalize .Name }} interface {
+ {{- if not (or .IsInterface .IsStream) }}{{/*
+ A base type is always Validatable.
+ Under normal conditions, we can't have a base type rendered a .IsStream or .IsInterface: this check is just for sanity check).
+
+ In the definition of the base type itself, this means that the unexported struct holding
+ the definition of the base type has a Validate() func and a ContextValitate() func.
+ */}}
+ runtime.Validatable
+ runtime.ContextValidatable
+ {{- end }}
+ {{ range .AllOf }}
+ {{- if .IsAnonymous }}
+ {{ range .Properties }}
+ {{ if $.IsTuple }}{{ template "tuplefieldIface" . }}{{ else }}{{template "structfieldIface" . }}{{ end }}
+ {{- end }}
+ {{ template "mapOrSliceGetter" . }}
+ {{- else }}
+ {{ .GoType }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{- if $.IsTuple }}
+ {{ template "tuplefieldIface" . }}
+ {{- else }}
+ {{ template "structfieldIface" . }}
+ {{- end }}
+ {{- end }}
+ {{ template "mapOrSliceGetter" . }}
+ }
+
+ type {{ camelize .Name }} {{ template "schemaBody" . }}{{/* unexported implementation of the interface (TODO(fred): atm, this is not used, issue #232) */}}
+ {{- range .Properties }}
+
+ // {{ pascalize .Name}} gets the {{ humanize .Name }} of this polymorphic type
+ func ({{ $.ReceiverName}} *{{ camelize $.Name}}) {{ pascalize .Name}}() {{ template "schemaType" . }}{
+ {{- if eq $.DiscriminatorField .Name }}
+ return {{ printf "%q" $.DiscriminatorValue }}
+ {{- else }}
+ return {{ $.ReceiverName }}.{{camelize .Name}}Field
+ {{- end }}
+ }
+
+ // Set{{ pascalize .Name}} sets the {{ humanize .Name }} of this polymorphic type
+ func ({{ $.ReceiverName}} *{{ camelize $.Name}}) Set{{ pascalize .Name}}(val {{ template "schemaType" . }}) {
+ {{- if ne $.DiscriminatorField .Name }}
+ {{ $.ReceiverName }}.{{camelize .Name}}Field = val
+ {{- end }}
+ }
+ {{- end }}{{/* TODO(fred): AdditionalProperties */}}
+ {{ template "polymorphicSerializer" . }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl
new file mode 100644
index 000000000..cd5ef8d16
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schematype.gotmpl
@@ -0,0 +1,29 @@
+{{ define "schemaType" }}
+ {{- if and (or (gt (len .AllOf) 0) .IsAnonymous) ( not .IsMap) }}
+ {{- template "schemaBody" . }}
+ {{- else }}
+ {{- if and (not .IsMap) .IsNullable (not .IsSuperAlias) }}*{{ end }}
+ {{- if .IsSuperAlias }} = {{ end }}
+ {{- .GoType }}
+ {{- end}}
+{{- end }}
+
+{{ define "dereffedSchemaType" }}
+ {{- if and (or (gt (len .AllOf) 0) .IsAnonymous) ( not .IsMap) }}
+ {{- template "schemaBody" . }}
+ {{- else }}
+ {{- .GoType }}
+ {{- end}}
+{{- end }}
+
+{{ define "typeSchemaType" }}
+ {{- if and (or (gt (len .AllOf) 0) .IsAnonymous) ( not .IsMap) ( not .IsSuperAlias ) }}
+ {{- template "schemaBody" . }}
+ {{- else if and .IsSubType ( not .IsSuperAlias ) }}
+ {{- template "subTypeBody" . }}
+ {{- else }}
+ {{- if and (not .IsMap) .IsNullable (not .IsSuperAlias) }}*{{ end }}
+ {{- if .IsSuperAlias }} = {{ end }}
+ {{- if .AliasedType }}{{ .AliasedType }}{{ else }}{{ .GoType }}{{ end }}
+ {{- end}}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl
new file mode 100644
index 000000000..61684acd0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/schemavalidator.gotmpl
@@ -0,0 +1,1194 @@
+{{ define "primitivefieldcontextvalidator" }}
+ {{ if .ReadOnly }}
+ if err := validate.ReadOnly(ctx, {{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{
+ return err
+ }
+ {{ end }}
+{{ end }}
+{{ define "primitivefieldvalidator" }}
+ {{ if .Required }}
+ {{- if and (eq .GoType "string") (not .IsNullable) }}
+ if err := validate.RequiredString({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsAliased }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if .IsAliased }}){{ end }}); err != nil {
+ {{- else }}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ {{- end }}
+ return err
+ }
+ {{- end }}
+ {{ if .MinLength }}
+ if err := validate.MinLength({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ if .MaxLength }}
+ if err := validate.MaxLength({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .Pattern }}
+ if err := validate.Pattern({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{ escapeBackticks .Pattern }}`); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ if .Minimum }}
+ {{ template "validationMinimum" . }}
+ {{ end }}
+ {{ if .Maximum }}
+ {{ template "validationMaximum" . }}
+ {{ end }}
+ {{ if .MultipleOf }}
+ {{ template "validationMultipleOf" . }}
+ {{ end }}
+ {{ if .Enum }}
+ // value enum
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}{{ .Suffix }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ if and .IsCustomFormatter (not .IsStream) (not .IsBase64) }}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+{{ end }}
+
+{{ define "slicecontextvalidator" }}
+ {{ if .ReadOnly }}
+ if err := validate.ReadOnly(ctx, {{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil{
+ return err
+ }
+ {{ end }}
+ {{ if .Items }}
+ {{- if and (or .Items.ReadOnly .Items.HasContextValidations) (not .Items.IsInterface) (not .Items.IsStream) }}
+ for {{.IndexVar }} := 0; {{.IndexVar }} < len({{.ValueExpression }}); {{.IndexVar }}++ {
+ {{- with .Items }}
+ {{ template "propertycontextvalidator" . }}
+ {{- end }}
+ }
+ {{- end }}
+ {{- else }}
+ {{- if or .IsAliased (ne .ValueExpression .ReceiverName) }}{{/* prevents generated code to call itself: this is reserved for aliased types */}}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := {{.ValueExpression }}.ContextValidate(ctx, formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }}
+
+{{define "slicevalidator" }}
+ {{ if .Required }}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if or .MinItems .MaxItems }}
+ {{ .IndexVar }}{{ pascalize .Name }}Size := int64(len({{.ValueExpression }}))
+ {{ end }}
+ {{ if .MinItems }}
+ if err := validate.MinItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MinItems }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .MaxItems }}
+ if err := validate.MaxItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .IndexVar }}{{ pascalize .Name }}Size, {{.MaxItems }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .UniqueItems }}
+ if err := validate.UniqueItems({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .Enum }}
+ // for slice
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{ if .Items }}
+ {{- if and (or .Items.Required .Items.HasValidations .Items.IsBaseType .Items.IsAliased) (not .Items.IsInterface) (not .Items.IsStream) (not .Items.SkipExternalValidation) }}
+ for {{.IndexVar }} := 0; {{.IndexVar }} < len({{.ValueExpression }}); {{.IndexVar }}++ {
+ {{- with .Items }}
+ {{- if and .IsNullable (not .Required) (not .IsMapNullOverride) }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ continue
+ }
+ {{- end }}
+ {{ template "propertyvalidator" . }}
+ {{- end }}
+ }
+ {{- end }}
+ {{- else }}
+ {{- if and (or .IsAliased (ne .ValueExpression .ReceiverName) (not .SkipExternalValidation)) }}{{/* prevents generated code to call itself: this is reserved for aliased types */}}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := {{.ValueExpression }}.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }}
+{{ define "mapcontextvalidator" }}
+ {{- if and .Required }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ReceiverName }}.{{ pascalize .Name }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, nil)
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{ if .HasAdditionalProperties }}
+ {{- if .AdditionalProperties.HasContextValidations }}
+ {{- $validatedValues := .ValueExpression }}{{ $keyVar := .AdditionalProperties.KeyVar }}
+ for {{ .AdditionalProperties.KeyVar }} := range {{ .ValueExpression }} {
+ {{ with .AdditionalProperties }}
+ {{/*Don't need to add context validate directly here since we are recursing*/}}
+ {{- if .IsPrimitive }}
+ {{- if .IsAliased }}
+ {{- if not .IsAnonymous }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.ContextValidate(ctx, formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}{{/* validation of anonymous objects */}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue{{/* TODO(fred): investigate the case to remove that comment: AdditionalItems shouldn't come in maps. Upstream validation is needed to guard against this */}}
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{ else }}
+ {{ template "primitivefieldcontextvalidator" . }}
+ {{ end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if and (not .IsStream) (not .IsBase64) }}{{/* TODO: IsStream and CustomFormattershould be mutually exclusive in type resolver */}}
+ // TODO: context validating custom formatter items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*
+ {{ template "validationCustomformat" . }}
+ */}}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicecontextvalidator" . }}
+ {{- else if and .IsMap (not .IsInterface) }}
+ {{ template "mapcontextvalidator" . }}
+ {{- else if and .IsMap .IsInterface }}
+ {{ if .Enum }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{- if not .IsAnonymous }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.ContextValidate(ctx, formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{- end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{- end }}
+ }
+ {{ end }}
+ {{ end }}
+ {{- else if .IsAliased }}
+ {{- if and .IsMap .HasValidations }}{{/* validation of aliased maps but does not know about AdditionalProperties: e.g. it comes from a $ref */}}
+ {{- if not .IsAnonymous }}
+ {{- if $.IsMap }}{{/* we come from a map range */}}
+ if val, ok := {{ .ValueExpression }}; ok {
+ {{- end }}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ {{- if $.IsMap }}
+ if val != nil {
+ {{- else }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ {{- end }}
+ if err := {{ if $.IsMap }}val{{ else }}{{ .ValueExpression }}{{ end }}.ContextValidate(ctx, formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- if or $.IsMap }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }} {{/*mapcontextvalidator*/}}
+{{ define "mapvalidator" }}{{/* validates additionalProperties */}}
+ {{- if and .Required }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ReceiverName }}.{{ pascalize .Name }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, nil)
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{ .ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{ if .HasAdditionalProperties }}
+ {{- if and .AdditionalProperties.HasValidations (not .AdditionalProperties.SkipExternalValidation) }}
+ {{- $validatedValues := .ValueExpression }}{{ $keyVar := .AdditionalProperties.KeyVar }}
+ for {{ .AdditionalProperties.KeyVar }} := range {{ .ValueExpression }} {
+ {{ with .AdditionalProperties }}
+ {{- if and (not .Required) .IsNullable }}{{/* skip when nul type is accepted */}}
+ {{- if .IsInterface }}
+ if {{ $validatedValues }}[{{ $keyVar }}] == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ $validatedValues }}[{{ $keyVar }}]) { // not required
+ {{- end }}
+ continue
+ }
+ {{- else if and (.Required) (not .IsArray) }}{{/* Required slice is processed below */}}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and .IsPrimitive (not .SkipExternalValidation ) }}
+ {{- if .IsAliased }}
+ {{- if not .IsAnonymous }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}{{/* validation of anonymous objects */}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: validating additional items should go here, if you see this raise an issue{{/* TODO(fred): investigate the case to remove that comment: AdditionalItems shouldn't come in maps. Upstream validation is needed to guard against this */}}
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{- else }}
+ {{ template "primitivefieldvalidator" . }}
+ {{- end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if .Required }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) (not .SkipExternalValidation) }}{{/* TODO: IsStream and CustomFormattershould be mutually exclusive in type resolver */}}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicevalidator" . }}
+ {{- else if and .IsMap (not .IsInterface) }}
+ {{ template "mapvalidator" . }}
+ {{ if .Enum }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- else if and .IsMap .IsInterface }}
+ {{ if .Enum }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}ValueEnum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ $validatedValues }}[{{ $keyVar }}]); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{- if and (not .IsAnonymous) (not .SkipExternalValidation) }}
+ if val, ok := {{ $validatedValues }}[{{ $keyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ {{- else }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{- end }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{ end }}
+ {{ end }}
+ }
+ {{- end }}
+ {{ end }}
+ {{ if .Enum }}
+ // from map
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }}); err != nil {
+ return err
+ }
+ {{ end }}
+ {{- else if .IsAliased }}
+ {{- if and .IsMap .HasValidations (not .SkipExternalValidation) }}{{/* validation of aliased maps but does not know about AdditionalProperties: e.g. it comes from a $ref */}}
+ {{- if not .IsAnonymous }}
+ {{- if $.IsMap }}{{/* we come from a map range */}}
+ if val, ok := {{ .ValueExpression }}; ok {
+ {{- end }}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ {{- if $.IsMap }}
+ if val != nil {
+ {{- else }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ {{- end }}
+ if err := {{ if $.IsMap }}val{{ else }}{{ .ValueExpression }}{{ end }}.Validate(formats); err != nil {
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- if or $.IsMap }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{ end }}
+
+{{define "objectcontextvalidator" }}
+ {{/* Debug
+ // .Name: {{ .Name }}
+ // .IsAliased: {{ .IsAliased }}
+ // .IsAnonymous: {{ .IsAnonymous }}
+ // .IsNullable: {{ .IsNullable }}
+ // .Required: {{ .Required }}
+ // .ReadOnly: {{ .ReadOnly }}
+ // .HasContextValidations {{ .HasContextValidations }}
+ // .IsBaseType: {{ .IsBaseType }}
+ // .ValueExpression: {{ .ValueExpression }}
+ // .ReceiverName: {{ .ReceiverName }}
+ */}}
+ {{- if not .IsAnonymous }}
+ {{- if or .IsAliased (ne .ValueExpression .ReceiverName) }}{{/* prevents generated code to call itself: case of aliased types */}}
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ {{ if not .Required }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ return nil
+ }
+ {{ end }}
+ if err := {{.ValueExpression }}.ContextValidate(ctx, formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ template "propertycontextvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{- if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{- end }}
+{{ end }}
+{{ define "minmaxProperties" }}
+ {{- if and (or .IsMap (and .IsAdditionalProperties .HasAdditionalProperties)) (or .MinProperties .MaxProperties) }}
+ {{- if and (not .IsAdditionalProperties) (not .IsInterface) (eq (len .Properties) 0) }}{{/* map only */}}
+ nprops := len({{ if and (not .IsAliased) .HasAdditionalProperties }}{{ .ReceiverName }}{{ else }}{{ .ValueExpression }}{{ end }})
+ {{- else }}{{/* object with properties */}}
+ {{- if and .IsNullable .MinProperties }}
+ {{- if gt0 .MinProperties }}
+
+ // short circuits minProperties > 0
+ if {{ .ReceiverName }} == nil {
+ return errors.TooFewProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MinProperties }})
+ }
+ {{- end }}
+ {{- end }}
+
+ props := make(map[string]json.RawMessage, {{ len .Properties }}{{ if .HasAdditionalProperties }}+ 10{{ end }})
+ j, err := swag.WriteJSON({{ .ReceiverName }})
+ if err != nil {
+ return err
+ }
+
+ if err = swag.ReadJSON(j, &props) ; err != nil {
+ return err
+ }
+
+ nprops := len(props)
+ {{- end }}
+ {{ if .MinProperties }}
+ // minProperties: {{ .MinProperties }}
+ if nprops < {{ .MinProperties }} {
+ return errors.TooFewProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MinProperties }})
+ }
+ {{- end }}
+ {{ if .MaxProperties }}
+ // maxProperties: {{ .MaxProperties }}
+ if nprops > {{ .MaxProperties }} {
+ return errors.TooManyProperties({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .MaxProperties }})
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{define "objectvalidator" }}{{/* // DEBUG
+ // .Name: {{ .Name }}
+ // .IsAliased: {{ .IsAliased }}
+ // .IsAnonymous: {{ .IsAnonymous }}
+ // .IsNullable: {{ .IsNullable }}
+ // .Required: {{ .Required }}
+ // .ReadOnly: {{ .ReadOnly }}
+ // .HasValidations {{ .HasValidations }}
+ // .HasContextValidations {{ .HasContextValidations }}
+ // .IsBaseType: {{ .IsBaseType }}
+ // .ValueExpression: {{ .ValueExpression }}
+ // .ReceiverName: {{ .ReceiverName }}
+ // .IsAdditionalProperties: {{ .IsAdditionalProperties }}
+ // .IsInterface: {{ .IsInterface }}
+ // .IsMap: {{ .IsMap }}
+ // .IsArray: {{ .IsArray }}
+ // .IsMapNullOverride: {{ .IsMapNullOverride }}
+ */}}
+ {{- if not .IsAnonymous }}
+ {{- if and .Required (or .IsNullable .IsBaseType .IsMap) }}
+ if err := validate.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{.ValueExpression }}); err != nil {
+ return err
+ }
+ {{- if and (not .Required) .IsBaseType }}
+ if {{ .ValueExpression }} == nil {
+ return nil
+ }
+ {{- end }}
+ {{ end }}
+ {{- if and (or .IsAliased (ne .ValueExpression .ReceiverName)) (not .SkipExternalValidation) }}{{/* prevents generated code to call itself: case of aliased types */}}
+ {{- if or (and (or .IsNullable) (not .IsMapNullOverride)) .IsMap .IsArray }}
+ if {{ .ValueExpression }} != nil {
+ {{- end }}
+ if err := {{.ValueExpression }}.Validate(formats); err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ if .Path }}{{ .Path }}{{ else }}""{{ end }})
+ }
+ return err
+ }
+ {{- if or (and (or .IsNullable) (not .IsMapNullOverride)) .IsMap .IsArray }}
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{ template "minmaxProperties" .}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{ end }}
+ {{ range .Properties }}
+ {{ template "propertyvalidator" . }}
+ {{ end }}
+ {{- end }}
+ {{- if and .IsTuple .AdditionalItems }}
+ // TODO: validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{- end }}
+{{ end }}
+
+{{define "propertycontextvalidator"}}
+ {{- if .IsPrimitive }}
+ {{- if .IsAliased }}
+ {{ template "objectcontextvalidator" . }}
+ {{- else }}
+ {{ template "primitivefieldcontextvalidator" . }}
+ {{- end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ // TODO: context validating primitive with custom formatter should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{- if .ReadOnly }}
+
+ if err := validate.ReadOnly{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ // TODO: context validating properties with custom formatter should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*{{ template "validationCustomformat" . }}*/}}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicecontextvalidator" . }}
+ {{- else if and .IsMap (or (not .IsAliased) (and .IsAliased .IsInterface)) }}{{/* except for interface, the renderinf for aliased maps is performed by objectvalidator */}}
+ {{ template "mapcontextvalidator" . }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{ template "objectcontextvalidator" . }}
+ {{- end }}
+
+{{end}}
+
+{{define "propertyvalidator" }}
+ {{- if .IsPrimitive }}
+ {{- if .IsAliased }}
+ {{- if and .Required (not .IsAnonymous) }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{ template "objectvalidator" . }}
+ {{- else }}
+ {{ template "primitivefieldvalidator" . }}
+ {{- end }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if .Required }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicevalidator" . }}
+ {{- else if and .IsMap (or (not .IsAliased) (and .IsAliased .IsInterface)) }}
+ {{ template "minmaxProperties" . }}
+ {{ template "mapvalidator" . }}
+ {{- else if or .IsComplexObject .IsTuple .IsAdditionalProperties .IsAliased }}
+ {{- if and .IsAdditionalProperties .Required (not .IsAliased) }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ValueExpression }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }})
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{ template "objectvalidator" . }}
+ {{- else if and .IsExternal .Required }}
+ {{- if or .IsNullable .IsInterface }}
+ if {{ .ValueExpression }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }})
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+{{ end }}
+
+{{define "fieldcontextvalidator" }}
+ {{- if .IsPrimitive }}
+ {{ template "primitivefieldcontextvalidator" . }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ // TODO: context validating properties with custom formatter should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*
+ {{ template "validationCustomformat" . }}
+ */}}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicecontextvalidator" . }}
+ {{- else if .IsMap }}
+ {{ template "mapcontextvalidator" . }}
+ {{- end }}
+
+{{ end }}
+
+{{ define "fieldvalidator"}}
+ {{- if .IsPrimitive }}
+ {{ template "primitivefieldvalidator" . }}
+ {{- else if and .IsCustomFormatter (or .HasValidations .Required) }}{{/* custom format not captured as primitive */}}
+ {{- if .Required }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if not (or .IsAnonymous .IsNullable) }}{{ .GoType }}({{ end }}{{.ValueExpression }}{{ if not (or .IsAnonymous .IsNullable) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and (not .IsStream) (not .IsBase64) }}
+ {{ template "validationCustomformat" . }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{ template "slicevalidator" . }}
+ {{- else if .IsMap }}
+ {{ template "mapvalidator" . }}
+ {{- end }}
+{{ end }}
+
+{{define "schemacontextvalidator" }}
+// ContextValidate validate this {{ humanize .Name }} based on the context it is used
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
+ var res []error
+ {{ range .AllOf }}
+ {{- if not .Properties }}
+ // validation for a type composition with {{ .GoType }}
+ {{- end }}
+ {{- if and (or .IsInterface .IsAnonymous .IsBaseType) (or .HasContextValidations) }}
+ {{ template "fieldcontextvalidator" . }}
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .HasContextValidations) }}
+ if err := {{.ReceiverName }}.contextValidate{{ pascalize .Name }}(ctx, formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapcontextvalidator" . }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ // TODO: context validating additional items should go here, if you see this raise an issue
+ // at https://github.com/go-swagger/go-swagger/issues
+ {{/*
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Items(formats); err != nil {
+ res = append(res, err)
+ }
+ */}}
+ {{ end }}
+ {{- else if (or .HasContextValidations) }}
+ if err := {{ .ReceiverName }}.{{ pascalize (dropPackage .GoType) }}.ContextValidate(ctx, formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }} {{/*end AllOf*/}}
+ {{ template "fieldcontextvalidator" . }}
+ {{ range .Properties }}
+ {{ if .HasContextValidations }} {{/* complex obj always has cv*/}}
+ if err := {{.ReceiverName }}.contextValidate{{ pascalize .Name }}(ctx, formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{ end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapcontextvalidator" . }}
+ {{- end }}
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+
+ {{ range .Properties }}
+ {{ if .HasContextValidations }}
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) contextValidate{{ pascalize .Name }}(ctx context.Context, formats strfmt.Registry) error {
+ {{template "propertycontextvalidator" . }}
+ return nil
+}
+ {{ end }}
+ {{ end }} {{/*Properties*/}}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if .HasContextValidations }}
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) contextValidate{{ pascalize .Name }}(ctx context.Context, formats strfmt.Registry) error {
+ {{template "propertycontextvalidator" . }}
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ end }} {{/*AllOf*/}}
+{{end}} {{/*schemacontextvalidator*/}}
+
+{{define "schemavalidator" }}
+ {{ if .Enum }}
+ {{ if (eq .SwaggerType "string") }}
+ {{ $gotype := .GoType }}
+const (
+ {{ range .Enum }}
+ {{- $variant := print $gotype (pascalize (cleanupEnumVariant .)) }}
+ // {{ $variant }} captures enum value {{ printf "%q" . }}
+ {{ $variant }} {{ $gotype }} = {{ printf "%q" . }}
+ {{ end }}
+)
+ {{ end }}
+
+// for schema
+var {{ camelize .Name }}Enum []interface{}
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}Enum = append({{ camelize .Name }}Enum, v)
+ }
+}
+
+func ({{ .ReceiverName }} {{ if not .IsPrimitive }}*{{ end }}{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}Enum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .ItemsEnum }}
+var {{ camelize .Name }}ItemsEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .Items }}
+ if err := json.Unmarshal([]byte(`{{ json .ItemsEnum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}ItemsEnum = append({{ camelize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .Items }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ with .AdditionalProperties }}
+ {{ if .Enum }}
+// for additional props
+var {{ camelize .Name }}ValueEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}ValueEnum = append({{ camelize .Name }}ValueEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) validate{{ pascalize .Name }}ValueEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}ValueEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{- end }}
+ {{ end }}
+// Validate validates this {{ humanize .Name }}
+func ({{.ReceiverName }} {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ if .Discriminates }}{{ camelize .Name }}{{ else if .IsExported }}{{ pascalize .Name }}{{ else }}{{ .Name }}{{ end }}) Validate(formats strfmt.Registry) error {
+ var res []error
+ {{ template "minmaxProperties" .}}
+ {{ range .AllOf }}
+ {{- if not .Properties }}
+ // validation for a type composition with {{ .GoType }}
+ {{- end }}
+ {{- if and (or .IsInterface .IsAnonymous .IsBaseType) (or .Required .HasValidations) }}
+ {{ template "fieldvalidator" . }}
+
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}(formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapvalidator" . }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Items(formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{- else if (or .Required .HasValidations) }}
+ if err := {{ .ReceiverName }}.{{ pascalize (dropPackage .GoType) }}.Validate(formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{ template "fieldvalidator" . }}
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+ if err := {{.ReceiverName }}.validate{{ pascalize .Name }}(formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{ end }}
+ {{- if and .HasAdditionalProperties (not .IsMap) }}{{/* validates additionalProperties in an object which is not itself a map */}}
+ {{ template "mapvalidator" . }}
+ {{- end }}
+ {{ if and .IsTuple .AdditionalItems }}{{/* validates additionalItems in a tuple */}}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Items(formats); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+ {{ if and .Enum (not .IsPrimitive) (not .IsMap) }}
+ // value enum
+ if err := {{ .ReceiverName }}.validate{{ pascalize .Name }}Enum("", "body", {{ .ReceiverName }}); err != nil {
+ res = append(res, err)
+ }
+ {{ end }}
+
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+ {{ range .Properties }}
+ {{ if or .Required .HasValidations }}
+ {{ if .Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+ {{ if (eq .SwaggerType "string") }}
+ {{ $gotype := .GoType }}
+ {{ $propname := .Name }}
+const (
+ {{ range .Enum }}
+ {{- $variant := print (pascalize $.Name) (pascalize $propname) (pascalize (cleanupEnumVariant .)) }}
+ // {{ $variant }} captures enum value {{ printf "%q" . }}
+ {{ $variant }} {{ $gotype }} = {{ printf "%q" . }}
+ {{ end }}
+)
+ {{ end }}
+
+// prop value enum
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .ItemsEnum }}
+var {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum []interface{}
+func init() {
+ var res []{{ template "dereffedSchemaType" .Items }}
+ if err := json.Unmarshal([]byte(`{{ json .ItemsEnum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .Items.IsTuple .Items.IsComplexObject .Items.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .Items }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .AdditionalItems.Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .AdditionalItems }}
+ if err := json.Unmarshal([]byte(`{{ json .AdditionalItems.Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .AdditionalItems.IsTuple .AdditionalItems.IsComplexObject .AdditionalItems.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .AdditionalItems }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ with .AdditionalProperties }}
+ {{ if .Enum }}
+// additional properties value enum
+var {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ValueEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}(formats strfmt.Registry) error {
+ {{- if not .Required }}
+ {{- if .IsInterface }}
+ if .ValueExpression == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ return nil
+ }
+ {{- end }}
+ {{- if and $.IsTuple .IsMap .Required }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil {
+ return errors.Required({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ .ValueExpression }})
+ }
+ {{- else }}
+ if err := validate.Required{{ if and (eq .GoType "string") (not .IsNullable) }}String{{ end }}(
+ {{- if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }},
+ {{- if and (eq .GoType "string") (not (or .IsAnonymous .IsNullable)) }}{{ .GoType }}({{ end }}
+ {{- .ValueExpression }}
+ {{- if and (eq .GoType "string") (not (or .IsAnonymous .IsNullable)) }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{template "propertyvalidator" . }}
+
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+ {{ range .AllOf }}
+ {{ range .Properties }}
+ {{ if and (ne $.DiscriminatorField .Name) (or .Required .HasValidations) }}
+ {{ if .Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+// property enum
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .ItemsEnum }}
+var {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .Items }}
+ if err := json.Unmarshal([]byte(`{{ json .ItemsEnum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .Items.IsTuple .Items.IsComplexObject .Items.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .Items }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ if .AdditionalItems }}
+ {{ if .AdditionalItems.Enum }}
+var {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .AdditionalItems }}
+ if err := json.Unmarshal([]byte(`{{ json .AdditionalItems.Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum = append({{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}Enum(path, location string, value {{ if or .AdditionalItems.IsTuple .AdditionalItems.IsComplexObject .AdditionalItems.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .AdditionalItems }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}Type{{ pascalize .Name }}PropEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ with .AdditionalProperties }}
+ {{ if .Enum }}
+var {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum []interface{}
+func init() {
+ var res []{{ template "dereffedSchemaType" . }}
+ if err := json.Unmarshal([]byte(`{{ json .Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum = append({{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, v)
+ }
+}
+
+// additional properties value enum
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ValueEnum(path, location string, value {{ if or .IsTuple .IsComplexObject .IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" . }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize $.Name }}{{ pascalize .Name }}ValueEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+ {{ end }}
+
+
+func ({{.ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}(formats strfmt.Registry) error {
+ {{ if not .Required }}
+ {{- if .IsInterface }}
+ if {{ .ValueExpression }} == nil { // not required
+ {{- else }}
+ if swag.IsZero({{ .ValueExpression }}) { // not required
+ {{- end }}
+ return nil
+ }
+ {{ end }}
+ {{template "propertyvalidator" . }}
+
+ return nil
+}
+ {{ end }}
+ {{ end }}
+ {{ end }}
+
+ {{ if .HasAdditionalItems }}
+ {{ if .AdditionalItems.Enum }}
+var {{ camelize .Name }}ItemsEnum []interface{}
+
+func init() {
+ var res []{{ template "dereffedSchemaType" .AdditionalItems }}
+ if err := json.Unmarshal([]byte(`{{ json .AdditionalItems.Enum }}`), &res); err != nil {
+ panic(err)
+ }
+ for _, v := range res {
+ {{ camelize .Name }}ItemsEnum = append({{ camelize .Name }}ItemsEnum, v)
+ }
+}
+
+func ({{ .ReceiverName }} *{{ if $.Discriminates }}{{ camelize $.Name }}{{ else if $.IsExported }}{{ pascalize $.Name }}{{ else }}{{ $.Name }}{{ end }}) validate{{ pascalize .Name }}ItemsEnum(path, location string, value {{ if or .AdditionalItems.IsTuple .AdditionalItems.IsComplexObject .AdditionalItems.IsAdditionalProperties }}*{{ end }}{{ template "dereffedSchemaType" .AdditionalItems }}) error {
+ if err := validate.EnumCase(path, location, value, {{ camelize .Name }}ItemsEnum, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ return nil
+}
+ {{ end }}
+func ({{.ReceiverName }} *{{ pascalize .Name }}) validate{{ pascalize .Name }}Items(formats strfmt.Registry) error {
+ {{ if and (or .AdditionalItems.Required .AdditionalItems.HasValidations) (not .AdditionalItems.SkipExternalValidation) }}
+ for {{ .IndexVar }} := range {{ .ValueExpression }}.{{ pascalize .Name }}Items {
+ {{template "propertyvalidator" .AdditionalItems }}
+ }
+ {{ end }}
+ return nil
+}
+ {{ end }}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl
new file mode 100644
index 000000000..a09058683
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/additionalpropertiesserializer.gotmpl
@@ -0,0 +1,94 @@
+{{ define "additionalPropertiesSerializer" }}
+// UnmarshalJSON unmarshals this object with additional properties from JSON
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(data []byte) error {
+ // stage 1, bind the properties
+ var stage1 {{ template "withoutAdditionalBody" . }}
+ if err := json.Unmarshal(data, &stage1); err != nil {
+ return err
+ }
+ var rcv {{ pascalize .Name }}
+ {{ range .Properties }}
+ rcv.{{ pascalize .Name }} = stage1.{{ pascalize .Name }}
+ {{- end }}
+ *{{ .ReceiverName }} = rcv
+
+ // stage 2, remove properties and add to map
+ stage2 := make(map[string]{{ if .AdditionalProperties }}json.RawMessage{{ else }}interface{}{{ end }})
+ if err := json.Unmarshal(data, &stage2); err != nil {
+ return err
+ }
+
+ {{ range .Properties }}
+ delete(stage2, {{ printf "%q" .Name }})
+ {{- end }}
+
+ {{- if .AdditionalProperties }}
+ // stage 3, add additional properties values
+ if len(stage2) > 0 {
+ result := make(map[string]{{ template "schemaType" .AdditionalProperties }})
+ for k, v := range stage2 {
+ var toadd {{ template "schemaType" .AdditionalProperties }}
+ if err := json.Unmarshal(v, {{if not .AdditionalProperties.IsNullable }}&{{ end }}toadd); err != nil {
+ return err
+ }
+ result[k] = toadd
+ }
+ {{ .ValueExpression }} = result
+ }
+ {{- else }}
+ {{ .ValueExpression }} = stage2
+ {{- end }}
+
+ return nil
+}
+
+// MarshalJSON marshals this object with additional properties into a JSON object
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ var stage1 {{ template "withoutAdditionalBody" . }}
+ {{ range .Properties }}
+ stage1.{{ pascalize .Name }} = {{ .ValueExpression }}
+ {{- end }}
+
+ // make JSON object for known properties
+ props, err := json.Marshal(stage1)
+ if err != nil {
+ return nil, err
+ }
+
+ if len({{ .ValueExpression }}) == 0 { // no additional properties
+ return props, nil
+ }
+
+ // make JSON object for the additional properties
+ additional, err := json.Marshal({{ .ValueExpression }})
+ if err != nil {
+ return nil, err
+ }
+
+ if len(props) < 3 { // "{}": only additional properties
+ return additional, nil
+ }
+
+ // concatenate the 2 objects
+ return swag.ConcatJSON(props, additional), nil
+}
+{{- end }}
+
+{{ define "noAdditionalPropertiesSerializer" }}
+// UnmarshalJSON unmarshals this object while disallowing additional properties from JSON
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(data []byte) error {
+ var props {{ template "withoutAdditionalBody" . }}
+
+ dec := json.NewDecoder(bytes.NewReader(data))
+ dec.DisallowUnknownFields()
+ if err := dec.Decode(&props); err != nil {
+ return err
+ }
+
+ {{- $rcv := .ReceiverName }}
+ {{ range .Properties }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = props.{{ pascalize .Name }}
+ {{- end }}
+ return nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl
new file mode 100644
index 000000000..efdf2718a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/aliasedserializer.gotmpl
@@ -0,0 +1,11 @@
+{{ define "aliasedSerializer" }}
+// UnmarshalJSON sets a {{ pascalize .Name }} value from JSON input
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(b []byte) error {
+ return ((*{{ .AliasedType }})({{ .ReceiverName}})).UnmarshalJSON(b)
+}
+
+// MarshalJSON retrieves a {{ pascalize .Name }} value as JSON output
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ return ({{ .AliasedType }}({{ .ReceiverName}})).MarshalJSON()
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl
new file mode 100644
index 000000000..4359faa7f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/allofserializer.gotmpl
@@ -0,0 +1,180 @@
+{{ define "allOfSerializer" }}
+ {{- $receiverName := .ReceiverName }}
+// UnmarshalJSON unmarshals this object from a JSON structure
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(raw []byte) error {
+ {{- range .AllOf }}
+ // {{ pascalize .Name }}
+ {{- if and .IsAnonymous .Properties }}{{/* unmarshalling properties in all of anonymous objects */}}
+ {{- $part := pascalize .Name }}
+ var data{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{ else }}
+ {{ if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{ else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{ end }}
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if not .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{ pascalize .AdditionalItems.Name }}{{ if or (not .IsExported) .IsSubType }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ }
+ if err := swag.ReadJSON(raw, &data{{ $part }}); err != nil {
+ return err
+ }
+ {{ range .Properties }}
+ {{ $receiverName }}.{{ pascalize .Name }} = data{{ $part }}.{{ pascalize .Name }}
+ {{ end }}
+ {{- else if .IsAnonymous }}
+ var {{ varname .Name }} {{ .GoType }}
+ if err := {{ if .IsBaseType}}Unmarshal{{ .GoType }}(bytes.NewBuffer(raw), &{{ varname .Name }}){{ else }} swag.ReadJSON(raw, &{{ varname .Name }}){{ end }}; err != nil {
+ return err
+ }
+ {{ .ValueExpression }} = {{ varname .Name }}
+ {{- end }}
+ {{- if not .IsAnonymous }}{{/* unmarshalling allOf named objects */}}
+ var {{ varname .Name }} {{ .GoType }}
+ if err := {{ if .IsBaseType}}Unmarshal{{ .GoType }}(bytes.NewBuffer(raw), &{{ varname .Name }}){{ else }} swag.ReadJSON(raw, &{{ varname .Name }}){{ end }}; err != nil {
+ return err
+ }
+ {{ .ReceiverName }}.{{ dropPackage .GoType }} = {{ varname .Name }}
+ {{ end }}
+ {{ end }}
+ {{- if .Properties }}
+ // now for regular properties
+ {{- $part := pascalize .Name }}
+ var props{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- else }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ }
+ if err := swag.ReadJSON(raw, &props{{ $part }}); err != nil {
+ return err
+ }
+ {{- range .Properties }}
+ {{ $receiverName }}.{{ pascalize .Name }} = props{{ $part }}.{{ pascalize .Name }}
+ {{ end }}
+ {{- end }}
+ {{ if .HasAdditionalProperties }}
+ // TODO: AdditionalProperties
+ {{- end }}
+ {{- if .AdditionalItems }}
+ // TODO: AdditionalItems
+ {{- end }}
+ return nil
+}
+
+// MarshalJSON marshals this object to a JSON structure
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ _parts := make([][]byte, 0, {{ len .AllOf }})
+ {{ range .AllOf }}
+ {{- if and .IsAnonymous .Properties }}
+ {{- $part := pascalize .Name }}
+ var data{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- else }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ {{- if .HasAdditionalProperties }}
+ {{ pascalize .AdditionalProperties.Name }}{{ if not .IsExported }}Field{{ end }} map[string]{{ template "schemaType" .AdditionalProperties }} `json:"-"`
+ {{- end }}
+ {{- if .AdditionalItems }}
+ {{ pascalize .AdditionalItems.Name }}{{ if or (not .IsExported) .IsSubType }}Field{{ end }} []{{ template "schemaType" .AdditionalItems }} `json:"-"`
+ {{- end }}
+ }
+
+ {{ range .Properties }}
+ data{{ $part }}.{{ pascalize .Name }} = {{ $receiverName }}.{{ pascalize .Name }}
+ {{ end }}
+
+ jsonData{{ $part }}, err{{ $part }} := swag.WriteJSON(data{{ $part }})
+ if err{{ $part }} != nil {
+ return nil, err{{ $part }}
+ }
+ _parts = append(_parts, jsonData{{ $part }})
+ {{- else if .IsAnonymous }}{{/* unmarshalling anonymous type composition */}}
+ {{ varname .Name }}, err := swag.WriteJSON({{ .ValueExpression }})
+ if err != nil {
+ return nil, err
+ }
+ _parts = append(_parts, {{ varname .Name }})
+ {{- end }}
+ {{- if not .IsAnonymous }}
+
+ {{ varname .Name }}, err := swag.WriteJSON({{ $receiverName }}.{{ dropPackage .GoType }})
+ if err != nil {
+ return nil, err
+ }
+ _parts = append(_parts, {{ varname .Name }})
+ {{- end }}
+ {{- end }}
+ {{- if .Properties }}
+
+ // now for regular properties
+ {{- $part := pascalize .Name }}
+ var props{{ $part }} struct {
+ {{- range .Properties }}
+ {{- if not .IsBaseType }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} {{ template "schemaType" . }} `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- else }}
+ {{- if not $.IsExported }}
+ {{ template "privstructfield" . }}
+ {{- else }}
+ {{ pascalize .Name}} json.RawMessage `json:"{{ .OriginalName }}{{ if and (not .Required) .IsEmptyOmitted }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{- end }}
+ {{- end }}
+ {{ end }}
+ }
+ {{- range .Properties }}
+ props{{ $part }}.{{ pascalize .Name }} = {{ $receiverName }}.{{ pascalize .Name }}
+ {{ end }}
+ jsonDataProps{{ $part }}, err{{ $part }} := swag.WriteJSON(props{{ $part }})
+ if err{{ $part }} != nil {
+ return nil, err{{ $part }}
+ }
+ _parts = append(_parts, jsonDataProps{{ $part }})
+ {{- end }}
+ {{- if .HasAdditionalProperties }}
+ {{- end }}
+ {{- if .HasAdditionalItems }}
+ {{- end }}
+ return swag.ConcatJSON(_parts...), nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl
new file mode 100644
index 000000000..5a7e9f44c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/basetypeserializer.gotmpl
@@ -0,0 +1,69 @@
+{{ define "polymorphicSerializer" }}
+// Unmarshal{{ pascalize .Name }}Slice unmarshals polymorphic slices of {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}Slice(reader io.Reader, consumer runtime.Consumer) ([]{{ pascalize .Name }}, error) {
+ var elements []json.RawMessage
+ if err := consumer.Consume(reader, &elements); err != nil {
+ return nil, err
+ }
+
+ var result []{{ pascalize .Name }}
+ for _, element := range elements {
+ obj, err := unmarshal{{ pascalize .Name }}(element, consumer)
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, obj)
+ }
+ return result, nil
+}
+
+// Unmarshal{{ pascalize .Name }} unmarshals polymorphic {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}(reader io.Reader, consumer runtime.Consumer) ({{ pascalize .Name }}, error) {
+ // we need to read this twice, so first into a buffer
+ data, err := io.ReadAll(reader)
+ if err != nil {
+ return nil, err
+ }
+ return unmarshal{{ pascalize .Name }}(data, consumer)
+}
+
+func unmarshal{{ pascalize .Name }}(data []byte, consumer runtime.Consumer) ({{ pascalize .Name }}, error) {
+ buf := bytes.NewBuffer(data)
+ {{ if .Discriminates }} buf2 := bytes.NewBuffer(data) {{ end }}
+
+ // the first time this is read is to fetch the value of the {{ .DiscriminatorField }} property.
+ var getType struct { {{ pascalize .DiscriminatorField }} string `json:{{ printf "%q" .DiscriminatorField }}` }
+ if err := consumer.Consume(buf, &getType); err != nil {
+ return nil, err
+ }
+
+ if err := validate.RequiredString({{ printf "%q" .DiscriminatorField }}, "body", getType.{{ pascalize .DiscriminatorField }}); err != nil {
+ return nil, err
+ }
+
+ // The value of {{ .DiscriminatorField }} is used to determine which type to create and unmarshal the data into
+ switch getType.{{ pascalize .DiscriminatorField }} {
+ {{- range $k, $v := .Discriminates }}
+ case {{ printf "%q" $k }}:
+ var result {{ if eq (upper (pascalize $.Name)) (upper $v) }}{{ camelize $.Name }}{{ else }}{{ $v }}{{ end }}
+ if err := consumer.Consume(buf2, &result); err != nil {
+ return nil, err
+ }
+ return &result, nil
+ {{- end }}
+ }
+ return nil, errors.New(422, "invalid {{ .DiscriminatorField }} value: %q", getType.{{ pascalize .DiscriminatorField }})
+}
+{{- end }}
+
+{{ define "baseTypeSerializer" }}
+// Unmarshal{{ pascalize .Name }} unmarshals polymorphic {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}(reader io.Reader, consumer runtime.Consumer) ({{ pascalize .Name }}, error) {
+ return Unmarshal{{ pascalize .GoType }}(reader, consumer)
+}
+
+// Unmarshal{{ pascalize .Name }}Slice unmarshals polymorphic slices of {{ pascalize .Name }}
+func Unmarshal{{ pascalize .Name }}Slice(reader io.Reader, consumer runtime.Consumer) ([]{{ pascalize .Name }}, error) {
+ return Unmarshal{{ pascalize .GoType }}Slice(reader, consumer)
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl
new file mode 100644
index 000000000..17c36cd06
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/marshalbinaryserializer.gotmpl
@@ -0,0 +1,19 @@
+{{ define "marshalBinarySerializer" }}
+// MarshalBinary interface implementation
+func ({{.ReceiverName}} *{{ pascalize .Name }}) MarshalBinary() ([]byte, error) {
+ if {{ .ReceiverName }} == nil {
+ return nil, nil
+ }
+ return swag.WriteJSON({{ .ReceiverName }})
+}
+
+// UnmarshalBinary interface implementation
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalBinary(b []byte) error {
+ var res {{ pascalize .Name }}
+ if err := swag.ReadJSON(b, &res); err != nil {
+ return err
+ }
+ *{{ .ReceiverName }} = res
+ return nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl
new file mode 100644
index 000000000..76d814779
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/schemaserializer.gotmpl
@@ -0,0 +1,15 @@
+{{ define "schemaSerializer" }}{{/* switches to the appropriate serializer for any given type */}}
+ {{- if and .IsSubType (not .HasBaseType) }}
+ {{ template "hasDiscriminatedSerializer" . }}
+ {{- else if .IsTuple }}
+ {{ template "tupleSerializer" . }}
+ {{- else if .HasBaseType }}
+ {{ template "hasDiscriminatedSerializer" . }}
+ {{- else if .IsAdditionalProperties }}
+ {{ template "additionalPropertiesSerializer" . }}
+ {{- else if and (gt (len .AllOf) 0) (not .IsSubType ) }}
+ {{ template "allOfSerializer" . }}
+ {{- else if and .IsComplexObject .StrictAdditionalProperties }}
+ {{ template "noAdditionalPropertiesSerializer" . }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl
new file mode 100644
index 000000000..b15613efc
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/subtypeserializer.gotmpl
@@ -0,0 +1,172 @@
+{{ define "hasDiscriminatedSerializer" }}
+// UnmarshalJSON unmarshals this object with a polymorphic type from a JSON structure
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(raw []byte) error {
+ var data {{ template "withoutBaseTypeBody" . }}
+ buf := bytes.NewBuffer(raw)
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+
+ if err := dec.Decode(&data); err != nil {
+ return err
+ }
+ {{ if or .IsBaseType .IsSubType }}
+ var base {{ template "JustBaseTypeBody" . }}
+ buf = bytes.NewBuffer(raw)
+ dec = json.NewDecoder(buf)
+ dec.UseNumber()
+
+ if err := dec.Decode(&base); err != nil {
+ return err
+ }
+ {{- end }}
+
+ {{ range .AllOf }}
+ {{- if not .IsBaseType }}
+ {{ range .Properties }}
+ {{- if or .IsBaseType (not .IsExported) }}
+ {{- if not .Required }}
+ var allOf{{ pascalize .Name }} {{ if .IsArray }}[]{{ pascalize .Items.GoType }}{{ else }}{{ pascalize .GoType }}{{ end }}
+ if string(data.{{ pascalize .Name }}) != "null" {
+ {{ camelize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ allOf{{ pascalize .Name }} = {{ camelize .Name }}
+ }
+ {{- else }}
+ allOf{{ pascalize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ {{- if or .IsBaseType (not .IsExported) }}
+ {{- if not .Required }}
+ var prop{{ pascalize .Name }} {{ if .IsArray }}[]{{ pascalize .Items.GoType }}{{ else }}{{ pascalize .GoType }}{{ end }}
+ if string(data.{{ pascalize .Name }}) != "null" {
+ {{ camelize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ prop{{ pascalize .Name }} = {{ camelize .Name }}
+ }
+ {{- else }}
+ prop{{ pascalize .Name }}, err := Unmarshal{{ if .IsArray }}{{ pascalize .Items.GoType }}Slice{{ else }}{{ pascalize .GoType }}{{ end }}(bytes.NewBuffer(data.{{ pascalize .Name }}), runtime.JSONConsumer())
+ if err != nil && err != io.EOF {
+ return err
+ }
+ {{- end }}
+ {{- end }}
+ {{- end }}
+
+ var result {{ pascalize .Name }}
+ {{ range $_, $parent := .AllOf }}
+ {{- if $parent.IsAnonymous }}
+ {{- if $parent.IsBaseType }}
+ {{ range $idx, $val := $parent.Properties }}
+ {{- if ne $parent.DiscriminatorField $val.Name }}
+ {{- if $val.IsExported }}
+ result.{{ camelize $val.Name }}Field = base.{{ pascalize $val.Name }}
+ {{- else }}
+ result.{{ camelize $val.Name }}Field = allOf{{ pascalize $val.Name }}
+ {{- end }}
+ {{- else }}
+ if base.{{ pascalize $val.Name }} != result.{{ pascalize $val.Name }}() {
+ /* Not the type we're looking for. */
+ return errors.New(422, "invalid {{$val.Name}} value: %q", base.{{ pascalize $val.Name }})
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{ range $idx, $val := $parent.Properties }}
+ {{- if $val.IsBaseType }}
+ result.{{ camelize $val.Name }}Field = allOf{{ pascalize $val.Name }}
+ {{- else }}
+ result.{{ pascalize $val.Name }} = data.{{ pascalize $val.Name }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ {{- if and $parent.IsBaseType $parent.IsExported }}
+ {{ range $idx, $val := $parent.Properties }}
+ {{- if ne $parent.DiscriminatorField $val.Name }}
+ {{- if $val.IsExported }}
+ result.{{ camelize $val.Name }}Field = base.{{ pascalize $val.Name }}
+ {{ else }}
+ result.{{ camelize $val.Name }}Field = allOf{{ pascalize $val.Name }}
+ {{- end }}
+ {{- else }}
+ if base.{{ pascalize $val.Name }} != result.{{ pascalize $val.Name }}() {
+ /* Not the type we're looking for. */
+ return errors.New(422, "invalid {{$val.Name}} value: %q", base.{{ pascalize $val.Name }})
+ }
+ {{- end }}
+ {{- end }}
+ {{- else }}
+ result.{{ $parent.GoType }} = data.{{ $parent.GoType }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{ range .Properties }}
+ // {{ .Name }}
+ result.{{ if .IsBaseType }}{{ camelize .Name }}Field{{ else }}{{ pascalize .Name }}{{ end }} = {{ if .IsBaseType }}prop{{ pascalize .Name }}{{ else }}data.{{ pascalize .Name}}{{ end }}
+ {{ end }}
+ *{{ .ReceiverName }} = result
+
+ {{ if .IsAdditionalProperties }}
+ // Additional Properties: read raw, remove named properties, and add to map
+ rawProps := make(map[string]{{ if .AdditionalProperties }}json.RawMessage{{ else }}interface{}{{ end }})
+ if err := json.Unmarshal(raw, &rawProps); err != nil {
+ return err
+ }
+ {{ range .Properties }}
+ delete(rawProps, {{ printf "%q" .Name }})
+ {{- end }}
+ {{ if .AdditionalProperties }}
+ if len(rawProps) > 0 {
+ {{ .ValueExpression }} = make(map[string]{{ template "schemaType" .AdditionalProperties }})
+ for k, v := range rawProps {
+ var toadd {{ template "schemaType" .AdditionalProperties }}
+ if err := json.Unmarshal(v, {{if not .AdditionalProperties.IsNullable }}&{{ end }}toadd); err != nil {
+ return err
+ }
+ {{ .ValueExpression }}[k] = toadd
+ }
+ }
+ {{- else }}
+ {{ .ValueExpression }} = rawProps
+ {{- end }}
+ {{- end }}
+
+ return nil
+}
+
+// MarshalJSON marshals this object with a polymorphic type to a JSON structure
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) { {{ $receiverName := .ReceiverName }}
+ var b1, b2, b3 []byte
+ var err error
+ b1, err = json.Marshal({{ template "withoutBaseTypeBodyOrNonExported" . }})
+ if err != nil {
+ return nil, err
+ }
+ b2, err = json.Marshal({{ template "withBaseTypeBodyAndNonExported" . }})
+ if err != nil {
+ return nil, err
+ }
+ {{ if .IsAdditionalProperties }}
+ if len({{ .ValueExpression }}) > 0 {
+ // make JSON object for the additional properties
+ b3, err = json.Marshal({{ .ValueExpression }})
+ if err != nil {
+ return nil, err
+ }
+ }
+ {{- end }}
+
+ return swag.ConcatJSON(b1, b2, b3), nil
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl
new file mode 100644
index 000000000..c05e844bb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/serializers/tupleserializer.gotmpl
@@ -0,0 +1,66 @@
+{{ define "tupleSerializer" }}
+// UnmarshalJSON unmarshals this tuple type from a JSON array
+func ({{.ReceiverName}} *{{ pascalize .Name }}) UnmarshalJSON(raw []byte) error {
+ // stage 1, get the array but just the array
+ var stage1 []json.RawMessage
+ buf := bytes.NewBuffer(raw)
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+
+ if err := dec.Decode(&stage1); err != nil {
+ return err
+ }
+
+ // stage 2: hydrates struct members with tuple elements
+ {{- if .AdditionalItems }}
+ var lastIndex int
+ {{ end }}
+ {{ range $idx, $val := .Properties }}if len(stage1) > {{ $idx }} {
+ var data{{ pascalize .Name }} {{ template "dereffedSchemaType" . }}
+ buf = bytes.NewBuffer(stage1[{{ $idx }}])
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+ if err := dec.Decode(&data{{ pascalize .Name }}); err != nil {
+ return err
+ }
+ {{ .ReceiverName }}.{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }} = {{ if .IsNullable }}&{{ end }}data{{ pascalize .Name }}
+ {{ if $.AdditionalItems }}
+ lastIndex = {{ $idx }}
+ {{ end }}
+ }
+ {{ end }}
+ {{ if .AdditionalItems }}
+ // stage 3: hydrates AdditionalItems
+ if len(stage1) > lastIndex+1 {
+ for _, val := range stage1[lastIndex+1:] {
+ var toadd {{ template "schemaType" .AdditionalItems }}
+ buf = bytes.NewBuffer(val)
+ dec := json.NewDecoder(buf)
+ dec.UseNumber()
+ if err := dec.Decode({{ if not .AdditionalItems.IsNullable }}&{{ end }}toadd); err != nil {
+ return err
+ }
+ {{- with .AdditionalItems }}
+ {{ $.ValueExpression }}.{{- if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }} = append({{ $.ValueExpression }}.{{- if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }}, toadd)
+ {{- end }}
+ }
+ }
+ {{- end }}
+ return nil
+}
+
+// MarshalJSON marshals this tuple type into a JSON array
+func ({{.ReceiverName}} {{ pascalize .Name }}) MarshalJSON() ([]byte, error) {
+ data := []interface{}{
+ {{ range .Properties -}}
+ {{.ReceiverName}}.{{ pascalize .Name }},
+ {{- end }}
+ }
+ {{ with .AdditionalItems }}
+ for _, v := range {{ $.ValueExpression }}.{{ if .IsExported }}{{ pascalize .Name }}{{ else }}{{ camelize .Name }}{{ end }} {
+ data = append(data, v)
+ }
+ {{- end }}
+ return json.Marshal(data)
+}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl
new file mode 100644
index 000000000..629b4b22b
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/autoconfigureapi.gotmpl
@@ -0,0 +1,205 @@
+// Code generated by go-swagger; DO NOT EDIT.
+// Auto configures api handlers Implementations.
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "io"
+ "log"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{ with .GenOpts }}
+//go:generate swagger generate server --target {{ .TargetPath }} --name {{ .Name }} --spec {{ .SpecPath }}
+{{- if .APIPackage }}{{ if ne .APIPackage "operations" }} --api-package {{ .APIPackage }}{{ end }}{{ end }}
+{{- if .ModelPackage }}{{ if ne .ModelPackage "models" }} --model-package {{ .ModelPackage }}{{ end }}{{ end }}
+{{- if .ServerPackage }}{{ if ne .ServerPackage "restapi"}} --server-package {{ .ServerPackage }}{{ end }}{{ end }}
+{{- if .ClientPackage }}{{ if ne .ClientPackage "client" }} --client-package {{ .ClientPackage }}{{ end }}{{ end }}
+{{- if .ImplementationPackage }} --implementation-package {{ .ImplementationPackage }}{{ end }}
+{{- if .TemplateDir }} --template-dir {{ .TemplateDir }}{{ end }}
+{{- range .Operations }} --operation {{ . }}{{ end }}
+{{- range .Tags }} --tags {{ . }}{{ end }}
+{{- if .Principal }} --principal {{ .Principal }}{{ end }}
+{{- if .DefaultScheme }}{{ if ne .DefaultScheme "http" }} --default-scheme {{ .DefaultScheme }}{{ end }}{{ end }}
+{{- range .Models }} --model {{ . }}{{ end }}
+{{- if or (not .IncludeModel) (not .IncludeValidator) }} --skip-models{{ end }}
+{{- if or (not .IncludeHandler) (not .IncludeParameters ) (not .IncludeResponses) }} --skip-operations{{ end }}
+{{- if not .IncludeSupport }} --skip-support{{ end }}
+{{- if not .IncludeMain }} --exclude-main{{ end }}
+{{- if .ExcludeSpec }} --exclude-spec{{ end }}
+{{- if .DumpData }} --dump-data{{ end }}
+{{- if .StrictResponders }} --strict-responders{{ end }}
+{{ end }}
+
+// This file auto configures the api backend implementation.
+// {{.ImplementationPackageAlias}} package must already exist.
+// {{.ImplementationPackageAlias}}.New() is implemented by user, and must return an object
+// or interface that implements Handler interface defined below.
+var Impl Handler = {{.ImplementationPackageAlias}}.New()
+
+// Handler handles all api server backend configurations and requests
+type Handler interface{
+{{- if .SecurityDefinitions }}
+ Authable
+{{- end }}
+ Configurable
+{{ range .OperationGroups -}}
+ {{ pascalize .Name }}Handler
+{{ end -}}
+}
+
+// Configurable handles all server configurations
+type Configurable interface {
+ ConfigureFlags(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API)
+ ConfigureTLS(tlsConfig *tls.Config)
+ ConfigureServer(s *http.Server, scheme, addr string)
+ CustomConfigure(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API)
+ SetupMiddlewares(handler http.Handler) http.Handler
+ SetupGlobalMiddleware(handler http.Handler) http.Handler
+}
+
+{{- if .SecurityDefinitions }}
+// Authable handles server authentication
+type Authable interface{
+ {{- range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ {{ pascalize .ID }}Auth(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error)
+ {{- else if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ {{ pascalize .ID }}Auth(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error)
+ {{- else if .IsOAuth2 }}
+ {{ pascalize .ID }}Auth(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error)
+ {{- end }}
+ {{- end }}
+}
+{{- end }}
+
+{{- $package := .Package }}
+{{- $apipackagealias := .APIPackageAlias }}
+{{ range .OperationGroups -}}
+/* {{ pascalize .Name }}Handler {{ .Description }} */
+type {{ pascalize .Name }}Handler interface {
+{{ range .Operations -}}
+ {{ if .Summary -}}
+ /* {{ pascalize .Name }} {{ .Summary }} */
+ {{ else if .Description -}}
+ /* {{ pascalize .Name }} {{ .Description }} */
+ {{ end -}}
+ {{ pascalize .Name }}(params {{ if ne .Package $package }}{{ .PackageAlias }}{{ else }}{{- $apipackagealias }}{{ end }}.
+ {{- pascalize .Name }}Params {{- if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}})
+ {{- if $.GenOpts.StrictResponders }} {{.Package}}.{{ pascalize .Name }}Responder {{ else }} middleware.Responder {{ end }}
+{{ end -}}
+}
+{{ end }}
+
+func configureFlags(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) {
+ Impl.ConfigureFlags(api)
+}
+
+func configureAPI(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) http.Handler {
+
+ api.ServeError = errors.ServeError
+
+ api.UseSwaggerUI()
+
+ {{ range .Consumes }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Consumer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Consumer = runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return Impl.{{ pascalize .Name }}Consume(r, target)
+ })
+ {{- end }}
+ {{- end }}
+ {{ range .Produces }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Producer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Producer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return Impl.{{ pascalize .Name }}Produce(w, target)
+ })
+ {{- end }}
+ {{- end}}
+ {{ range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ api.{{ pascalize .ID }}Auth = func(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return Impl.{{ pascalize .ID }}Auth(user, pass)
+ }
+
+ {{- else if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ api.{{ pascalize .ID }}Auth = func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return Impl.{{ pascalize .ID }}Auth(token)
+ }
+ {{- else if .IsOAuth2 }}
+ api.{{ pascalize .ID }}Auth = func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return Impl.{{ pascalize .ID }}Auth(token, scopes)
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- $package := .Package }}
+ {{- $apipackagealias := .APIPackageAlias }}
+ {{ range .Operations }}
+ api.{{ if ne .Package $package }}{{pascalize .Package}}{{ end }}{{ pascalize .Name }}Handler =
+ {{- if ne .Package $package }}
+ {{- .PackageAlias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ .PackageAlias }}.{{- pascalize .Name }}Params
+ {{- else }}
+ {{- $apipackagealias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ $apipackagealias }}.{{- pascalize .Name }}Params
+ {{- end }}
+ {{- if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}})
+ {{- if $.GenOpts.StrictResponders }} {{.Package}}.{{ pascalize .Name }}Responder { {{ else }} middleware.Responder { {{ end }}
+ return Impl.{{ pascalize .Name }}(params {{- if .Authorized}}, principal {{ end }})
+ })
+ {{- end }}
+
+ api.PreServerShutdown = func() { }
+
+ api.ServerShutdown = func() { }
+
+ // CustomConfigure can override or add to configurations set above
+ Impl.CustomConfigure(api)
+
+ return setupGlobalMiddleware(api.Serve(setupMiddlewares))
+}
+
+// The TLS configuration before HTTPS server starts.
+func configureTLS(tlsConfig *tls.Config) {
+ // Make all necessary changes to the TLS configuration here.
+ Impl.ConfigureTLS(tlsConfig)
+}
+
+// As soon as server is initialized but not run yet, this function will be called.
+// If you need to modify a config, store server instance to stop it individually later, this is the place.
+// This function can be called multiple times, depending on the number of serving schemes.
+// scheme value will be set accordingly: "http", "https" or "unix".
+func configureServer(s *http.Server, scheme, addr string) {
+ Impl.ConfigureServer(s, scheme, addr)
+}
+
+// The middleware configuration is for the handler executors. These do not apply to the swagger.json document.
+// The middleware executes after routing but before authentication, binding and validation.
+func setupMiddlewares(handler http.Handler) http.Handler {
+ return Impl.SetupMiddlewares(handler)
+}
+
+// The middleware configuration happens before anything, this middleware also applies to serving the swagger.json document.
+// So this is a good place to plug in a panic handling middleware, logging and metrics.
+func setupGlobalMiddleware(handler http.Handler) http.Handler {
+ return Impl.SetupGlobalMiddleware(handler)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl
new file mode 100644
index 000000000..fda11859a
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/builder.gotmpl
@@ -0,0 +1,446 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{.Package}}
+{{ $package := .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "net/http"
+ "strings"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// New{{ pascalize .Name }}API creates a new {{ pascalize .Name }} instance
+func New{{ pascalize .Name }}API(spec *loads.Document) *{{ pascalize .Name }}API {
+ return &{{ pascalize .Name }}API{
+ handlers: make(map[string]map[string]http.Handler),
+ formats: strfmt.Default,
+ defaultConsumes: "{{ .DefaultConsumes }}",
+ defaultProduces: "{{ .DefaultProduces }}",
+ customConsumers: make(map[string]runtime.Consumer),
+ customProducers: make(map[string]runtime.Producer),
+ PreServerShutdown: func() { },
+ ServerShutdown: func() { },
+ spec: spec,
+ useSwaggerUI: false,
+ ServeError: errors.ServeError,
+ BasicAuthenticator: security.BasicAuth,
+ APIKeyAuthenticator: security.APIKeyAuth,
+ BearerAuthenticator: security.BearerAuth,
+ {{ range .Consumes }}
+ {{- if .Implementation }}
+ {{ pascalize .Name }}Consumer: {{ .Implementation }},
+ {{- else }}
+ {{ pascalize .Name }}Consumer: runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return errors.NotImplemented("{{.Name}} consumer has not yet been implemented")
+ }),
+ {{- end }}
+ {{- end }}
+ {{ range .Produces }}
+ {{- if .Implementation }}
+ {{ pascalize .Name }}Producer: {{ .Implementation }},
+ {{- else }}
+ {{ pascalize .Name }}Producer: runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return errors.NotImplemented("{{.Name}} producer has not yet been implemented")
+ }),
+ {{- end }}
+ {{- end }}
+ {{ range .Operations }}
+ {{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler:
+ {{- if ne .Package $package }}{{ .PackageAlias }}.{{ end }}{{ pascalize .Name }}HandlerFunc(func(params {{ if ne .Package $package }}{{ .PackageAlias }}.{{end }}
+ {{- if $.GenOpts.StrictResponders}}
+ {{- pascalize .Name }}Params{{if .Authorized}}, principal {{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) {{if ne .Package $package }}{{ .Package }}.{{ end }}{{ pascalize .Name }}Responder {
+ return {{if ne .Package $package }}{{ .Package }}.{{ end }}{{ pascalize .Name }}NotImplemented()
+ {{else}}
+ {{- pascalize .Name }}Params{{if .Authorized}}, principal {{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) middleware.Responder {
+ return middleware.NotImplemented("operation {{ if ne .Package $package }}{{ .Package }}.{{ end }}{{pascalize .Name}} has not yet been implemented")
+ {{ end -}}
+ }),
+ {{- end }}
+ {{ range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ {{ pascalize .ID }}Auth: func(user string, pass string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("basic auth ({{ .ID }}) has not yet been implemented")
+ },
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ {{ pascalize .ID }}Auth: func(token string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("api key auth ({{ .ID }}) {{.Name}} from {{.Source}} param [{{ .Name }}] has not yet been implemented")
+ },
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ {{ pascalize .ID }}Auth: func(token string, scopes []string) ({{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("oauth2 bearer auth ({{ .ID }}) has not yet been implemented")
+ },
+ {{- end }}
+ {{- end }}
+ {{- if .SecurityDefinitions }}
+ // default authorizer is authorized meaning no requests are blocked
+ APIAuthorizer: security.Authorized(),
+ {{- end }}
+ }
+}
+
+/*{{ pascalize .Name }}API {{ if .Info }}{{ if .Info.Description }}{{.Info.Description}}{{ else }}the {{ humanize .Name }} API{{ end }}{{ end }} */
+type {{ pascalize .Name }}API struct {
+ spec *loads.Document
+ context *middleware.Context
+ handlers map[string]map[string]http.Handler
+ formats strfmt.Registry
+ customConsumers map[string]runtime.Consumer
+ customProducers map[string]runtime.Producer
+ defaultConsumes string
+ defaultProduces string
+ Middleware func(middleware.Builder) http.Handler
+ useSwaggerUI bool
+
+ // BasicAuthenticator generates a runtime.Authenticator from the supplied basic auth function.
+ // It has a default implementation in the security package, however you can replace it for your particular usage.
+ BasicAuthenticator func(security.UserPassAuthentication) runtime.Authenticator
+
+ // APIKeyAuthenticator generates a runtime.Authenticator from the supplied token auth function.
+ // It has a default implementation in the security package, however you can replace it for your particular usage.
+ APIKeyAuthenticator func(string, string, security.TokenAuthentication) runtime.Authenticator
+
+ // BearerAuthenticator generates a runtime.Authenticator from the supplied bearer token auth function.
+ // It has a default implementation in the security package, however you can replace it for your particular usage.
+ BearerAuthenticator func(string, security.ScopedTokenAuthentication) runtime.Authenticator
+ {{ range .Consumes }}
+ // {{ pascalize .Name }}Consumer registers a consumer for the following mime types:
+ {{- range .AllSerializers }}
+ // - {{ .MediaType }}
+ {{- end }}
+ {{ pascalize .Name }}Consumer runtime.Consumer
+ {{- end }}
+ {{ range .Produces}}
+ // {{ pascalize .Name }}Producer registers a producer for the following mime types:
+ {{- range .AllSerializers }}
+ // - {{ .MediaType }}
+ {{- end }}
+ {{ pascalize .Name }}Producer runtime.Producer
+ {{- end }}
+ {{ range .SecurityDefinitions}}
+ {{- if .IsBasicAuth}}
+
+ // {{ pascalize .ID }}Auth registers a function that takes username and password and returns a principal
+ // it performs authentication with basic auth
+ {{ pascalize .ID }}Auth func(string, string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{- end }}
+ {{- if .IsAPIKeyAuth}}
+
+ // {{ pascalize .ID }}Auth registers a function that takes a token and returns a principal
+ // it performs authentication based on an api key {{ .Name }} provided in the {{.Source}}
+ {{ pascalize .ID }}Auth func(string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{- end }}
+ {{- if .IsOAuth2 }}
+
+ // {{ pascalize .ID }}Auth registers a function that takes an access token and a collection of required scopes and returns a principal
+ // it performs authentication based on an oauth2 bearer token provided in the request
+ {{ pascalize .ID }}Auth func(string, []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}, error)
+ {{- end }}
+ {{- end }}
+ {{- if .SecurityDefinitions }}
+
+ // APIAuthorizer provides access control (ACL/RBAC/ABAC) by providing access to the request and authenticated principal
+ APIAuthorizer runtime.Authorizer
+ {{- end }}
+ {{- $package := .Package }}
+ {{ range .Operations }}
+ // {{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler sets the operation handler for the {{ humanize .Name }} operation
+ {{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler {{ if ne .Package $package }}{{ .PackageAlias }}.{{ end }}{{ pascalize .Name }}Handler
+ {{- end }}
+
+ // ServeError is called when an error is received, there is a default handler
+ // but you can set your own with this
+ ServeError func(http.ResponseWriter, *http.Request, error)
+
+ // PreServerShutdown is called before the HTTP(S) server is shutdown
+ // This allows for custom functions to get executed before the HTTP(S) server stops accepting traffic
+ PreServerShutdown func()
+
+ // ServerShutdown is called when the HTTP(S) server is shut down and done
+ // handling all active connections and does not accept connections any more
+ ServerShutdown func()
+
+ // Custom command line argument groups with their descriptions
+ CommandLineOptionsGroups []swag.CommandLineOptionsGroup
+
+ // User defined logger function.
+ Logger func(string, ...interface{})
+}
+
+// UseRedoc for documentation at /docs
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) UseRedoc() {
+ {{.ReceiverName}}.useSwaggerUI = false
+}
+
+// UseSwaggerUI for documentation at /docs
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) UseSwaggerUI() {
+ {{.ReceiverName}}.useSwaggerUI = true
+}
+
+// SetDefaultProduces sets the default produces media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) SetDefaultProduces(mediaType string) {
+ {{.ReceiverName}}.defaultProduces = mediaType
+}
+
+// SetDefaultConsumes returns the default consumes media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) SetDefaultConsumes(mediaType string) {
+ {{.ReceiverName}}.defaultConsumes = mediaType
+}
+
+// SetSpec sets a spec that will be served for the clients.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) SetSpec(spec *loads.Document) {
+ {{.ReceiverName}}.spec = spec
+}
+
+// DefaultProduces returns the default produces media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) DefaultProduces() string {
+ return {{.ReceiverName}}.defaultProduces
+}
+
+// DefaultConsumes returns the default consumes media type
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) DefaultConsumes() string {
+ return {{.ReceiverName}}.defaultConsumes
+}
+
+// Formats returns the registered string formats
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Formats() strfmt.Registry {
+ return {{.ReceiverName}}.formats
+}
+
+// RegisterFormat registers a custom format validator
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) RegisterFormat(name string, format strfmt.Format, validator strfmt.Validator) {
+ {{.ReceiverName}}.formats.Add(name, format, validator)
+}
+
+// Validate validates the registrations in the {{ pascalize .Name }}API
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Validate() error {
+ var unregistered []string
+ {{ range .Consumes }}
+ if {{.ReceiverName}}.{{ pascalize .Name }}Consumer == nil {
+ unregistered = append(unregistered, "{{ pascalize .Name }}Consumer")
+ }
+ {{- end }}
+ {{ range .Produces }}
+ if {{.ReceiverName}}.{{ pascalize .Name }}Producer == nil {
+ unregistered = append(unregistered, "{{ pascalize .Name }}Producer")
+ }
+ {{- end }}
+ {{ range .SecurityDefinitions }}
+ if {{.ReceiverName}}.{{ pascalize .ID }}Auth == nil {
+ unregistered = append(unregistered, "{{if .IsAPIKeyAuth }}{{ pascalize .Name }}{{ else }}{{ pascalize .ID }}{{ end }}Auth")
+ }
+ {{- end }}
+ {{ range .Operations }}
+ if {{.ReceiverName}}.{{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler == nil {
+ unregistered = append(unregistered, "{{ if ne .Package $package }}{{ .Package }}.{{ end }}{{ pascalize .Name }}Handler")
+ }
+ {{- end }}
+
+ if len(unregistered) > 0 {
+ return fmt.Errorf("missing registration: %s", strings.Join(unregistered, ", "))
+ }
+
+ return nil
+}
+// ServeErrorFor gets a error handler for a given operation id
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) ServeErrorFor(operationID string) func(http.ResponseWriter, *http.Request, error) {
+ return {{.ReceiverName}}.ServeError
+}
+// AuthenticatorsFor gets the authenticators for the specified security schemes
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
+ {{- if .SecurityDefinitions }}
+ result := make(map[string]runtime.Authenticator)
+ for name := range schemes {
+ switch name {
+ {{- range .SecurityDefinitions }}
+ case "{{.ID}}":
+ {{- if .IsBasicAuth }}
+ result[name] = {{.ReceiverName}}.BasicAuthenticator({{ if not ( eq .Principal "interface{}" ) }}func(username, password string) (interface{}, error) {
+ return {{ end }}{{.ReceiverName}}.{{ pascalize .ID }}Auth{{ if not ( eq .Principal "interface{}" ) }}(username, password)
+ }{{ end }})
+ {{- end }}
+ {{- if .IsAPIKeyAuth }}
+ scheme := schemes[name]
+ result[name] = {{.ReceiverName}}.APIKeyAuthenticator(scheme.Name, scheme.In, {{ if not ( eq .Principal "interface{}" ) }}func(token string) (interface{}, error) {
+ return {{ end }}{{.ReceiverName}}.{{ pascalize .ID }}Auth{{ if not ( eq .Principal "interface{}" ) }}(token)
+ }{{ end }})
+ {{- end }}
+ {{- if .IsOAuth2 }}
+ result[name] = {{.ReceiverName}}.BearerAuthenticator(name, {{ if not ( eq .Principal "interface{}" ) }}func(token string, scopes []string) (interface{}, error) {
+ return {{ end }}{{.ReceiverName}}.{{ pascalize .ID }}Auth{{ if not ( eq .Principal "interface{}" ) }}(token, scopes)
+ }{{ end }})
+ {{- end }}
+ {{end}}
+ }
+ }
+ return result
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// Authorizer returns the registered authorizer
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Authorizer() runtime.Authorizer {
+ {{- if .SecurityDefinitions }}
+ return {{.ReceiverName}}.APIAuthorizer
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// ConsumersFor gets the consumers for the specified media types.
+// MIME type parameters are ignored here.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
+ {{- if .Consumes }}
+ result := make(map[string]runtime.Consumer, len(mediaTypes))
+ for _, mt := range mediaTypes {
+ switch mt {
+ {{- range .Consumes }}
+ {{- range .AllSerializers }}
+ case "{{ .MediaType }}":
+ result["{{ .MediaType }}"] = {{.ReceiverName}}.{{ pascalize .Name }}Consumer
+ {{- end }}
+ {{- end }}
+ }
+
+ if c, ok := {{.ReceiverName}}.customConsumers[mt]; ok {
+ result[mt] = c
+ }
+ }
+ return result
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// ProducersFor gets the producers for the specified media types.
+// MIME type parameters are ignored here.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
+ {{- if .Produces }}
+ result := make(map[string]runtime.Producer, len(mediaTypes))
+ for _, mt := range mediaTypes {
+ switch mt {
+ {{- range .Produces }}
+ {{- range .AllSerializers }}
+ case "{{ .MediaType }}":
+ result["{{ .MediaType }}"] = {{.ReceiverName}}.{{ pascalize .Name }}Producer
+ {{- end }}
+ {{- end }}
+ }
+
+ if p, ok := {{.ReceiverName}}.customProducers[mt]; ok {
+ result[mt] = p
+ }
+ }
+ return result
+ {{- else }}
+ return nil
+ {{- end }}
+}
+
+// HandlerFor gets a http.Handler for the provided operation method and path
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) HandlerFor(method, path string) (http.Handler, bool) {
+ if {{.ReceiverName}}.handlers == nil {
+ return nil, false
+ }
+ um := strings.ToUpper(method)
+ if _, ok := {{.ReceiverName}}.handlers[um]; !ok {
+ return nil, false
+ }
+ if path == "/" {
+ path = ""
+ }
+ h, ok := {{.ReceiverName}}.handlers[um][path]
+ return h, ok
+}
+
+// Context returns the middleware context for the {{ humanize .Name }} API
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Context() *middleware.Context {
+ if {{.ReceiverName}}.context == nil {
+ {{.ReceiverName}}.context = middleware.NewRoutableContext({{.ReceiverName}}.spec, {{.ReceiverName}}, nil)
+ }
+
+ return {{ .ReceiverName }}.context
+}
+
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) initHandlerCache() {
+ {{.ReceiverName}}.Context() // don't care about the result, just that the initialization happened
+ {{- if .Operations }}
+ if {{ .ReceiverName }}.handlers == nil {
+ {{.ReceiverName}}.handlers = make(map[string]map[string]http.Handler)
+ }
+ {{ range .Operations }}
+ if {{ .ReceiverName }}.handlers[{{ printf "%q" (upper .Method) }}] == nil {
+ {{ .ReceiverName }}.handlers[{{ printf "%q" (upper .Method) }}] = make(map[string]http.Handler)
+ }
+ {{.ReceiverName}}.handlers[{{ printf "%q" (upper .Method) }}][{{ if eq .Path "/" }}""{{ else }}{{ printf "%q" (cleanPath .Path) }}{{ end }}] = {{ if ne .Package $package }}{{ .PackageAlias }}.{{ end }}New{{ pascalize .Name }}({{.ReceiverName}}.context, {{.ReceiverName}}.{{if ne .Package $package}}{{ pascalize .Package }}{{end}}{{ pascalize .Name }}Handler)
+ {{- end }}
+ {{- end }}
+}
+
+// Serve creates a http handler to serve the API over HTTP
+// can be used directly in http.ListenAndServe(":8000", api.Serve(nil))
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Serve(builder middleware.Builder) http.Handler {
+ {{ .ReceiverName }}.Init()
+
+ if {{ .ReceiverName}}.Middleware != nil {
+ return {{ .ReceiverName }}.Middleware(builder)
+ }
+ if {{.ReceiverName}}.useSwaggerUI {
+ return {{.ReceiverName}}.context.APIHandlerSwaggerUI(builder)
+ }
+ return {{.ReceiverName}}.context.APIHandler(builder)
+}
+
+// Init allows you to just initialize the handler cache, you can then recompose the middleware as you see fit
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) Init() {
+ if len({{.ReceiverName}}.handlers) == 0 {
+ {{.ReceiverName}}.initHandlerCache()
+ }
+}
+
+// RegisterConsumer allows you to add (or override) a consumer for a media type.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) RegisterConsumer(mediaType string, consumer runtime.Consumer) {
+ {{.ReceiverName}}.customConsumers[mediaType] = consumer
+}
+
+// RegisterProducer allows you to add (or override) a producer for a media type.
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) RegisterProducer(mediaType string, producer runtime.Producer) {
+ {{.ReceiverName}}.customProducers[mediaType] = producer
+}
+
+// AddMiddlewareFor adds a http middleware to existing handler
+func ({{.ReceiverName}} *{{ pascalize .Name }}API) AddMiddlewareFor(method, path string, builder middleware.Builder) {
+ um := strings.ToUpper(method)
+ if path == "/" {
+ path = ""
+ }
+ {{.ReceiverName}}.Init()
+ if h, ok := {{.ReceiverName}}.handlers[um][path]; ok {
+ {{.ReceiverName}}.handlers[um][path] = builder(h)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl
new file mode 100644
index 000000000..cbbb0bfd1
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/configureapi.gotmpl
@@ -0,0 +1,167 @@
+// This file is safe to edit. Once it exists it will not be overwritten
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "io"
+ "log"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{ with .GenOpts }}
+//go:generate swagger generate server --target {{ .TargetPath }} --name {{ .Name }} --spec {{ .SpecPath }}
+{{- if .APIPackage }}{{ if ne .APIPackage "operations" }} --api-package {{ .APIPackage }}{{ end }}{{ end }}
+{{- if .ModelPackage }}{{ if ne .ModelPackage "models" }} --model-package {{ .ModelPackage }}{{ end }}{{ end }}
+{{- if .ServerPackage }}{{ if ne .ServerPackage "restapi"}} --server-package {{ .ServerPackage }}{{ end }}{{ end }}
+{{- if .ClientPackage }}{{ if ne .ClientPackage "client" }} --client-package {{ .ClientPackage }}{{ end }}{{ end }}
+{{- if .TemplateDir }} --template-dir {{ .TemplateDir }}{{ end }}
+{{- range .Operations }} --operation {{ . }}{{ end }}
+{{- range .Tags }} --tags {{ . }}{{ end }}
+{{- if .Principal }} --principal {{ .Principal }}{{ end }}
+{{- if .DefaultScheme }}{{ if ne .DefaultScheme "http" }} --default-scheme {{ .DefaultScheme }}{{ end }}{{ end }}
+{{- range .Models }} --model {{ . }}{{ end }}
+{{- if or (not .IncludeModel) (not .IncludeValidator) }} --skip-models{{ end }}
+{{- if or (not .IncludeHandler) (not .IncludeParameters ) (not .IncludeResponses) }} --skip-operations{{ end }}
+{{- if not .IncludeSupport }} --skip-support{{ end }}
+{{- if not .IncludeMain }} --exclude-main{{ end }}
+{{- if .ExcludeSpec }} --exclude-spec{{ end }}
+{{- if .DumpData }} --dump-data{{ end }}
+{{- if .StrictResponders }} --strict-responders{{ end }}
+{{ end }}
+func configureFlags(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) {
+ // api.CommandLineOptionsGroups = []swag.CommandLineOptionsGroup{ ... }
+}
+
+func configureAPI(api *{{.APIPackageAlias}}.{{ pascalize .Name }}API) http.Handler {
+ // configure the api here
+ api.ServeError = errors.ServeError
+
+ // Set your custom logger if needed. Default one is log.Printf
+ // Expected interface func(string, ...interface{})
+ //
+ // Example:
+ // api.Logger = log.Printf
+
+ api.UseSwaggerUI()
+ // To continue using redoc as your UI, uncomment the following line
+ // api.UseRedoc()
+
+ {{ range .Consumes }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Consumer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Consumer = runtime.ConsumerFunc(func(r io.Reader, target interface{}) error {
+ return errors.NotImplemented("{{.Name}} consumer has not yet been implemented")
+ })
+ {{- end }}
+ {{- end }}
+ {{ range .Produces }}
+ {{- if .Implementation }}
+ api.{{ pascalize .Name }}Producer = {{ .Implementation }}
+ {{- else }}
+ api.{{ pascalize .Name }}Producer = runtime.ProducerFunc(func(w io.Writer, data interface{}) error {
+ return errors.NotImplemented("{{.Name}} producer has not yet been implemented")
+ })
+ {{- end }}
+ {{- end}}
+ {{ range .SecurityDefinitions }}
+ {{- if .IsBasicAuth }}
+ // Applies when the Authorization header is set with the Basic scheme
+ if api.{{ pascalize .ID }}Auth == nil {
+ api.{{ pascalize .ID }}Auth = func(user string, pass string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("basic auth ({{ .ID }}) has not yet been implemented")
+ }
+ }
+ {{- else if .IsAPIKeyAuth }}
+ // Applies when the "{{ .Name }}" {{ .Source }} is set
+ if api.{{ pascalize .ID }}Auth == nil {
+ api.{{ pascalize .ID }}Auth = func(token string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("api key auth ({{ .ID }}) {{.Name}} from {{.Source}} param [{{ .Name }}] has not yet been implemented")
+ }
+ }
+ {{- else if .IsOAuth2 }}
+ if api.{{ pascalize .ID }}Auth == nil {
+ api.{{ pascalize .ID }}Auth = func(token string, scopes []string) ({{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}, error) {
+ return nil, errors.NotImplemented("oauth2 bearer auth ({{ .ID }}) has not yet been implemented")
+ }
+ }
+ {{- end }}
+ {{- end }}
+ {{- if .SecurityDefinitions }}
+
+ // Set your custom authorizer if needed. Default one is security.Authorized()
+ // Expected interface runtime.Authorizer
+ //
+ // Example:
+ // api.APIAuthorizer = security.Authorized()
+ {{- end }}
+ {{- $package := .Package }}
+ {{- $apipackagealias := .APIPackageAlias }}
+ {{- range .Operations }}
+ {{- if .HasFormParams }}
+ // You may change here the memory limit for this multipart form parser. Below is the default (32 MB).
+ // {{ if ne .Package $package }}{{ .PackageAlias }}{{ else }}{{ $apipackagealias }}{{ end }}.{{ pascalize .Name }}MaxParseMemory = 32 << 20
+ {{- end }}
+ {{- end }}
+ {{ range .Operations }}
+ if api.{{ if ne .Package $package }}{{ pascalize .Package }}{{ end }}{{ pascalize .Name }}Handler == nil {
+ api.{{ if ne .Package $package }}{{pascalize .Package}}{{ end }}{{ pascalize .Name }}Handler =
+ {{- if ne .Package $package }}
+ {{- .PackageAlias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ .PackageAlias }}.{{- pascalize .Name }}Params
+ {{- else }}
+ {{- $apipackagealias }}.{{- pascalize .Name }}HandlerFunc(func(params {{ $apipackagealias }}.{{- pascalize .Name }}Params
+ {{- end }}
+ {{- if $.GenOpts.StrictResponders }}
+ {{- if .Authorized}}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) {{.Package}}.{{ pascalize .Name }}Responder {
+ return {{.Package}}.{{ pascalize .Name }}NotImplemented()
+ {{ else }}
+ {{- if .Authorized}}, principal {{if .PrincipalIsNullable }}*{{ end }}{{.Principal}}{{end}}) middleware.Responder {
+ return middleware.NotImplemented("operation {{ .Package}}.{{pascalize .Name}} has not yet been implemented")
+ {{ end -}}
+ })
+ }
+ {{- end }}
+
+ api.PreServerShutdown = func() { }
+
+ api.ServerShutdown = func() { }
+
+ return setupGlobalMiddleware(api.Serve(setupMiddlewares))
+}
+
+// The TLS configuration before HTTPS server starts.
+func configureTLS(tlsConfig *tls.Config) {
+ // Make all necessary changes to the TLS configuration here.
+}
+
+// As soon as server is initialized but not run yet, this function will be called.
+// If you need to modify a config, store server instance to stop it individually later, this is the place.
+// This function can be called multiple times, depending on the number of serving schemes.
+// scheme value will be set accordingly: "http", "https" or "unix".
+func configureServer(s *http.Server, scheme, addr string) {
+}
+
+// The middleware configuration is for the handler executors. These do not apply to the swagger.json document.
+// The middleware executes after routing but before authentication, binding and validation.
+func setupMiddlewares(handler http.Handler) http.Handler {
+ return handler
+}
+
+// The middleware configuration happens before anything, this middleware also applies to serving the swagger.json document.
+// So this is a good place to plug in a panic handling middleware, logging and metrics.
+func setupGlobalMiddleware(handler http.Handler) http.Handler {
+ return handler
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl
new file mode 100644
index 000000000..b51734aa4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/doc.gotmpl
@@ -0,0 +1,63 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{- if .Copyright }}
+// {{ comment .Copyright }}
+{{- end }}
+
+
+// Package {{ .APIPackage }} {{ if .Info.Title }}{{ comment .Info.Title }}{{ else }}{{ comment (humanize .Name) }}{{end}}
+//
+{{- if .Info.Description }}
+// {{ comment .Info.Description " " }}
+{{- end }}
+{{- if .Info.TermsOfService }}
+// Terms Of Service:
+// {{ comment .Info.TermsOfService " " }}
+{{- end }}
+{{- if or .Schemes .Host .BasePath .Info }}
+ {{- if .Schemes }}
+// Schemes:
+ {{- range .Schemes }}
+// {{ . }}
+ {{- end }}
+ {{- end }}
+ {{- if .Host }}
+// Host: {{ .Host }}
+ {{- end }}
+ {{- if .BasePath }}
+// BasePath: {{ .BasePath }}
+ {{- end}}
+ {{- with .Info }}
+ {{- if .Version }}
+// Version: {{ .Version }}
+ {{- end }}
+ {{- if .License }}
+// License: {{ if .License.Name }}{{ .License.Name}} {{ end }}{{ if .License.URL }}{{ .License.URL }}{{ end }}
+ {{- end }}
+ {{- if .Contact }}
+// Contact: {{ if .Contact.Name }}{{ .Contact.Name }}{{ end }}{{ if .Contact.Email }}<{{ .Contact.Email }}>{{ end }}{{ if .Contact.URL }} {{ .Contact.URL }}{{ end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{- if .Consumes }}
+//
+// Consumes:
+ {{- range .Consumes }}
+ {{- range .AllSerializers }}
+// - {{ .MediaType -}}
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{- if .Produces }}
+//
+// Produces:
+ {{- range .Produces }}
+ {{- range .AllSerializers }}
+// - {{ .MediaType -}}
+ {{- end }}
+ {{- end }}
+{{- end }}
+//
+// swagger:meta
+package {{ .APIPackage }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl
new file mode 100644
index 000000000..a6447ede7
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/main.gotmpl
@@ -0,0 +1,186 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+ {{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package main
+
+import (
+ "fmt"
+ "log"
+ "net/http"
+ "os"
+
+ "github.com/go-openapi/loads"
+ {{- if .UseGoStructFlags }}
+ flags "github.com/jessevdk/go-flags"
+ {{- end }}
+ {{- if .UsePFlags }}
+ flag "github.com/spf13/pflag"
+ {{- end }}
+ {{- if .UseFlags }}
+ "flag"
+ {{- end }}
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// This file was generated by the swagger tool.
+// Make sure not to overwrite this file after you generated it because all your edits would be lost!
+{{ if .ExcludeSpec }}
+func init() {
+ loads.AddLoader(fmts.YAMLMatcher, fmts.YAMLDoc)
+}
+{{ end }}
+
+func main() {
+ {{ if .UsePFlags }}
+ {{- if not .ExcludeSpec }}
+ swaggerSpec, err := loads.Embedded({{ .ServerPackageAlias }}.SwaggerJSON, {{ .ServerPackageAlias }}.FlatSwaggerJSON)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ {{- end }}
+
+ var server *{{ .ServerPackageAlias }}.Server // make sure init is called
+
+ flag.Usage = func() {
+ fmt.Fprint(os.Stderr, "Usage:\n")
+ fmt.Fprint(os.Stderr, " {{ dasherize .Name }}-server [OPTIONS]\n\n")
+
+ title := {{ if .Info }}{{ if .Info.Title }}{{ printf "%q" .Info.Title }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end}}
+ fmt.Fprint(os.Stderr, title+"\n\n")
+ desc := {{ if .Info }}{{ if .Info.Description }}{{ printf "%q" .Info.Description }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end}}
+ if desc != "" {
+ fmt.Fprintf(os.Stderr, desc+"\n\n")
+ }
+ fmt.Fprintln(os.Stderr, flag.CommandLine.FlagUsages())
+ }
+ // parse the CLI flags
+ flag.Parse()
+ {{- if .ExcludeSpec }}
+
+ server = {{ .ServerPackageAlias }}.NewServer(nil)
+ swaggerSpec, err := loads.Spec(string(server.Spec))
+ if err != nil {
+ log.Fatalln(err)
+ }
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server.SetAPI(api)
+ {{- else }}
+
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ // get server with flag values filled out
+ server = {{ .ServerPackageAlias }}.NewServer(api)
+ {{- end }}
+ defer server.Shutdown()
+
+ server.ConfigureAPI()
+ if err := server.Serve(); err != nil {
+ log.Fatalln(err)
+ }
+ {{ end }}
+ {{ if .UseGoStructFlags}}
+ {{- if .ExcludeSpec }}
+ server := {{ .ServerPackageAlias }}.NewServer(nil)
+ {{- else }}
+ swaggerSpec, err := loads.Embedded({{ .ServerPackageAlias }}.SwaggerJSON, {{ .ServerPackageAlias }}.FlatSwaggerJSON)
+ if err != nil {
+ log.Fatalln(err)
+ }
+
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server := {{ .ServerPackageAlias }}.NewServer(api)
+ defer server.Shutdown()
+ {{- end }}
+
+ parser := flags.NewParser(server, flags.Default)
+ parser.ShortDescription = {{ if .Info }}{{ if .Info.Title }}{{ printf "%q" .Info.Title }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end}}
+ parser.LongDescription = {{ if .Info }}{{ if .Info.Description }}{{ printf "%q" .Info.Description }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end}}
+
+ {{- if not .ExcludeSpec }}
+ server.ConfigureFlags()
+ for _, optsGroup := range api.CommandLineOptionsGroups {
+ _, err := parser.AddGroup(optsGroup.ShortDescription, optsGroup.LongDescription, optsGroup.Options)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ }
+ {{- end }}
+
+ if _, err := parser.Parse(); err != nil {
+ code := 1
+ if fe, ok := err.(*flags.Error); ok {
+ if fe.Type == flags.ErrHelp {
+ code = 0
+ }
+ }
+ os.Exit(code)
+ }
+ {{- if .ExcludeSpec }}
+ swaggerSpec, err := loads.Spec(string(server.Spec))
+ if err != nil {
+ log.Fatalln(err)
+ }
+
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server.SetAPI(api)
+ defer server.Shutdown()
+ {{- end }}
+
+ server.ConfigureAPI()
+
+ if err := server.Serve(); err != nil {
+ log.Fatalln(err)
+ }
+ {{ end }}
+ {{ if .UseFlags}}
+ {{- if not .ExcludeSpec }}
+
+ swaggerSpec, err := loads.Embedded({{ .ServerPackageAlias }}.SwaggerJSON, {{ .ServerPackageAlias }}.FlatSwaggerJSON)
+ if err != nil {
+ log.Fatalln(err)
+ }
+ {{- end }}
+ var server *{{ .ServerPackageAlias }}.Server // make sure init is called
+
+ flag.Usage = func() {
+ fmt.Fprint(os.Stderr, "Usage:\n")
+ fmt.Fprint(os.Stderr, " {{ dasherize .Name }}-server [OPTIONS]\n\n")
+
+ title := {{ if .Info }}{{ if .Info.Title }}{{ printf "%q" .Info.Title }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Title{{ end }}{{ end}}
+ fmt.Fprint(os.Stderr, title+"\n\n")
+ desc := {{ if .Info }}{{ if .Info.Description }}{{ printf "%q" .Info.Description }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end }}{{ else }}{{ if .ExcludeSpec }}""{{ else }}swaggerSpec.Spec().Info.Description{{ end }}{{ end}}
+ if desc != "" {
+ fmt.Fprintf(os.Stderr, desc+"\n\n")
+ }
+ flag.CommandLine.SetOutput(os.Stderr)
+ flag.PrintDefaults()
+ }
+ // parse the CLI flags
+ flag.Parse()
+
+ {{- if .ExcludeSpec }}
+
+ server = {{ .ServerPackageAlias }}.NewServer(nil)
+ swaggerSpec, err := loads.Spec(string(server.Spec))
+ if err != nil {
+ log.Fatalln(err)
+ }
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ server.SetAPI(api)
+ {{- else }}
+ api := {{.APIPackageAlias}}.New{{ pascalize .Name }}API(swaggerSpec)
+ // get server with flag values filled out
+ server = {{ .ServerPackageAlias }}.NewServer(api)
+ {{- end }}
+ defer server.Shutdown()
+
+ server.ConfigureAPI()
+ if err := server.Serve(); err != nil {
+ log.Fatalln(err)
+ }
+ {{ end }}
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl
new file mode 100644
index 000000000..041c00e44
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/operation.gotmpl
@@ -0,0 +1,92 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the generate command
+
+import (
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+// {{ pascalize .Name }}HandlerFunc turns a function with the right signature into a {{ humanize .Name }} handler
+type {{ pascalize .Name }}HandlerFunc func({{ pascalize .Name }}Params{{ if .Authorized }}, {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{ end }}) {{ if $.StrictResponders }} {{ pascalize .Name }}Responder {{else}} middleware.Responder {{end}}
+
+// Handle executing the request and returning a response
+func (fn {{ pascalize .Name }}HandlerFunc) Handle(params {{ pascalize .Name }}Params{{ if .Authorized }}, principal {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{ end }}) {{ if $.StrictResponders }} {{ pascalize .Name }}Responder {{else}} middleware.Responder {{end}} {
+ return fn(params{{ if .Authorized }}, principal{{ end }})
+}
+
+// {{ pascalize .Name }}Handler interface for that can handle valid {{ humanize .Name }} params
+type {{ pascalize .Name }}Handler interface {
+ Handle({{ pascalize .Name }}Params{{ if .Authorized }}, {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}{{ end }}) {{ if $.StrictResponders }} {{ pascalize .Name }}Responder {{else}} middleware.Responder {{end}}
+}
+
+// New{{ pascalize .Name }} creates a new http.Handler for the {{ humanize .Name }} operation
+func New{{ pascalize .Name }}(ctx *middleware.Context, handler {{ pascalize .Name }}Handler) *{{ pascalize .Name }} {
+ return &{{ pascalize .Name }}{Context: ctx, Handler: handler}
+}
+
+/* {{ pascalize .Name }} swagger:route {{ .Method }} {{ .Path }}{{ range .Tags }} {{ . }}{{ end }} {{ camelize .Name }}
+
+{{ if .Summary }}{{ .Summary }}{{ if .Description }}
+
+{{ blockcomment .Description }}{{ end }}{{ else if .Description}}{{ blockcomment .Description }}{{ else }}{{ pascalize .Name }} {{ humanize .Name }} API{{ end }}
+
+*/
+type {{ pascalize .Name }} struct {
+ Context *middleware.Context
+ Handler {{ pascalize .Name }}Handler
+}
+
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
+ route, rCtx, _ := {{ .ReceiverName }}.Context.RouteInfo(r)
+ if rCtx != nil {
+ *r = *rCtx
+ }
+ var Params = New{{ pascalize .Name }}Params()
+
+ {{- if .Authorized }}
+ uprinc, aCtx, err := {{ .ReceiverName }}.Context.Authorize(r, route)
+ if err != nil {
+ {{ .ReceiverName }}.Context.Respond(rw, r, route.Produces, route, err)
+ return
+ }
+ if aCtx != nil {
+ *r = *aCtx
+ }
+ var principal {{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}
+ if uprinc != nil {
+ principal = {{ if eq .Principal "inferface{}" }}uprinc{{ else }}uprinc.({{ if .PrincipalIsNullable }}*{{ end }}{{ .Principal }}) // this is really a {{ .Principal }}, I promise{{ end }}
+ }
+ {{ end }}
+ if err := {{ .ReceiverName }}.Context.BindValidRequest(r, route, &Params); err != nil { // bind params
+ {{ .ReceiverName }}.Context.Respond(rw, r, route.Produces, route, err)
+ return
+ }
+
+ res := {{ .ReceiverName }}.Handler.Handle(Params{{ if .Authorized }}, principal{{ end }}) // actually handle the request
+ {{ .ReceiverName }}.Context.Respond(rw, r, route.Produces, route, res)
+
+}
+
+{{ range .ExtraSchemas }}
+// {{ .Name }} {{ template "docstring" . }}
+//
+// swagger:model {{ .Name }}
+ {{- template "schema" . }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl
new file mode 100644
index 000000000..1000a9f95
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/parameter.gotmpl
@@ -0,0 +1,720 @@
+{{ define "bindprimitiveparam" }}{{/* an empty test definition to test template repo dependencies resolution - DO NOT CHANGE THIS */}}
+{{ end }}
+{{ define "bodyvalidator" }}
+ {{- if .HasModelBodyParams }}
+ // validate body object{{/* delegate validation to model object */}}
+ if err := body.Validate(route.Formats); err != nil {
+ res = append(res, err)
+ }
+
+ ctx := validate.WithOperationRequest(r.Context())
+ if err := body.ContextValidate(ctx, route.Formats); err != nil {
+ res = append(res, err)
+ }
+
+ if len(res) == 0 {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ }
+ {{- else if and .HasSimpleBodyParams .HasModelBodyItems }}
+
+ {{- if or .Schema.HasSliceValidations .Schema.Items.HasValidations }}
+
+ // validate array of body objects
+ {{- end }}
+
+ {{- if .Schema.HasSliceValidations }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{ template "sliceparamvalidator" . }}
+ {{- end }}
+
+ {{- if and .Schema.Items.HasValidations (not (or .Schema.Items.IsInterface .Schema.Items.IsStream)) }}
+ for {{ .IndexVar }} := range body {
+ {{- if .Schema.Items.IsNullable }}
+ if body[{{ .IndexVar }}] == nil {
+ {{- if .Schema.Items.Required }}
+ res = append(res, errors.Required({{ .Child.Path }}, {{ printf "%q" .Child.Location }}, body[{{ .IndexVar }}]))
+ break
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ if err := body[{{ .IndexVar }}].Validate(route.Formats); err != nil {
+ res = append(res, err)
+ break
+ }
+ }
+
+ {{- if not .Schema.HasSliceValidations }}
+
+ if len(res) == 0 {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ }
+ {{- end }}
+ {{- else }}
+ // no validation for items in this slice
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end }}
+
+ {{- else if and .HasSimpleBodyParams .HasModelBodyMap }}
+
+ {{- if and .Schema.HasValidations (not (or .Schema.AdditionalProperties.IsInterface .Schema.AdditionalProperties.IsStream)) }}
+ // validate map of body objects
+ for {{ .KeyVar }} := range body {
+ {{- if .Schema.AdditionalProperties.Required }}
+ if err := validate.Required({{ if .Child.Path }}{{ .Child.Path }}{{ else }}""{{ end }}, {{ printf "%q" .Child.Location }}, {{ if not .IsAnonymous }}{{ .Schema.GoType }}({{ end }}body[{{ .KeyVar }}]{{ if not .IsAnonymous }}){{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+ {{- if and .Schema.AdditionalProperties.IsNullable (not .IsMapNullOverride) }}
+ if body[{{ .KeyVar }}] == nil {
+ {{- if .Schema.AdditionalProperties.Required }}
+ res = append(res, errors.Required({{ .Path }}, {{ printf "%q" .Location }}, body[{{ .KeyVar }}]))
+ break
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ if val , ok :=body[{{ .KeyVar }}]; ok {
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ if val != nil {
+ {{- end }}
+ if err := val.Validate(route.Formats); err != nil {
+ res = append(res, err)
+ break
+ }
+ {{- if and .IsNullable (not .IsMapNullOverride) }}
+ }
+ {{- end }}
+ }
+ }
+
+ if len(res) == 0 {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ }
+ {{- else }}
+ // no validation for this map
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end }}
+ {{- else if .HasSimpleBodyParams }}
+ {{- if and (not .IsArray) (not .IsMap) .Schema.HasValidations }}
+ // validate inline body
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}Body(route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if and (or .IsArray .IsMap) .Schema.HasValidations }}
+ // validate inline body {{ if .IsArray }}array{{ else }}map{{ end }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}Body(route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else }}
+ // no validation required on inline body
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end}}
+ {{- else }}
+ {{- if .IsInterface }}
+ // no validation on generic interface
+ {{ .ReceiverName }}.{{ pascalize .Name }} = {{ if and (not .Schema.IsBaseType) .IsNullable }}&{{ end }}body
+ {{- end }}
+ {{- end }}
+{{- end }}
+
+{{ define "sliceparamvalidator"}}
+ {{- if or .MinItems .MaxItems }}
+
+ {{ camelize .Name }}Size := int64(len({{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end }}{{ if and .Child (not (hasPrefix .ValueExpression "o.")) }}{{ .Child.ValueExpression }}C{{ else }}{{ .ValueExpression }}{{ end }}))
+ {{- end }}
+ {{- if .MinItems }}
+
+// {{ .ItemsDepth }}minItems: {{ .MinItems }}
+if err := validate.MinItems({{ .Path }}, {{ printf "%q" .Location }}, {{ camelize .Name }}Size, {{ .MinItems }}); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .MaxItems }}
+
+// {{ .ItemsDepth }}maxItems: {{ .MaxItems }}
+if err := validate.MaxItems({{ .Path }}, {{ printf "%q" .Location }}, {{ camelize .Name }}Size, {{.MaxItems}}); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .UniqueItems }}
+
+// {{ .ItemsDepth }}uniqueItems: true
+if err := validate.UniqueItems({{ .Path }}, {{ printf "%q" .Location }}, {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end }}{{ if and .Child (not ( hasPrefix .ValueExpression "o." )) }}{{ .Child.ValueExpression }}C{{ else }}{{ .ValueExpression }}{{ end }}); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .Enum }}
+
+// {{ .ItemsDepth }}Enum: {{ .Enum }}
+if err := validate.EnumCase(
+ {{- .Path }}, {{ printf "%q" .Location }},
+ {{- if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end -}}
+ {{- if .Child -}}
+ {{- if not ( hasPrefix .ValueExpression "o." ) -}}
+ {{- .Child.ValueExpression }}C{{- if .IsCustomFormatter }}.String(){{ end -}}
+ {{- else -}}
+ {{- .ValueExpression -}}{{- if .Child.IsCustomFormatter }}.String(){{ end -}}
+ {{- end -}}
+ {{- end -}},
+ {{- printf "%#v" .Enum -}}, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+ }
+ {{- end }}
+{{- end }}
+
+{{- define "childvalidator" }}
+ {{- if .Converter }}
+ {{- if ne .SwaggerFormat "" }}
+ // {{ .ItemsDepth }}Format: {{ printf "%q" .SwaggerFormat }}
+ {{- end }}
+ {{ varname .ValueExpression }}, err := {{ .Converter }}({{ varname .ValueExpression }}V)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, "{{ .GoType }}", {{ varname .ValueExpression }})
+ }
+ {{- else if and .IsCustomFormatter (not .SkipParse) }}{{/* parsing is skipped for simple body items */}}
+ // {{ .ItemsDepth }}Format: {{ printf "%q" .SwaggerFormat }}
+ value, err := formats.Parse({{ printf "%q" .SwaggerFormat }},{{ varname .ValueExpression }}V)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, "{{ .GoType }}", value)
+ }
+ {{ varname .ValueExpression }} := *(value.(*{{.GoType}}))
+ {{- else if and .IsComplexObject .HasValidations }}{{/* dedicated to nested body params */}}
+ {{ varname .ValueExpression }} := {{ varname .ValueExpression }}V
+ if err := {{ .ValueExpression }}.Validate(formats) ; err != nil {
+ if ve, ok := err.(*errors.Validation); ok {
+ return ve.ValidateName({{ .Path }})
+ } else if ce, ok := err.(*errors.CompositeError); ok {
+ return ce.ValidateName({{ .Path }})
+ }
+ return err
+ }
+ {{- else }}
+ {{ varname .ValueExpression }} := {{ varname .ValueExpression }}V
+ {{- end }}
+ {{ template "propertyparamvalidator" . }}
+{{- end }}
+
+{{- define "mapparamvalidator" }}
+ {{- if and .Child.HasValidations (not (or .Child.IsInterface .Child.IsStream)) }}
+
+ // validations for map
+ {{- else }}
+
+ // map has no validations: copying all elements
+ {{- end }}
+ {{ varname .Child.ValueExpression }}R := make({{ .GoType }},len({{ .Child.ValueExpression }}C))
+ for {{ .KeyVar }}, {{ .Child.ValueExpression }}V := range {{ .Child.ValueExpression}}C {
+ {{- if .Child.IsArray }}
+ {{ .Child.Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{- if .Child.HasSliceValidations }}
+ {{- template "sliceparamvalidator" .Child }}
+ {{- end }}
+ {{- template "sliceparambinder" .Child }}
+ {{- else if .Child.IsMap }}
+ {{ .Child.Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{ template "mapparamvalidator" .Child }}
+ {{- else }}
+ {{- if and .Child.IsNullable }}
+ if {{ varname .Child.ValueExpression }}V == nil {
+ {{- if .Child.Required }}
+ return errors.Required({{ .Child.Path }}, {{ printf "%q" .Child.Location }}, {{ varname .Child.ValueExpression }}V)
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ {{- template "childvalidator" .Child }}
+ {{- end }}
+ {{ varname .Child.ValueExpression }}R[{{.KeyVar}}] = {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap}}IR{{end}}
+ }
+{{- end }}
+
+{{- define "propertyparamvalidator" }}
+ {{- if .IsPrimitive }}
+ {{ template "validationPrimitive" . }}
+ {{- end }}
+ {{- if and .IsCustomFormatter (not .IsStream) (not .IsBase64) }}
+
+if err := validate.FormatOf({{.Path}}, "{{.Location}}", "{{.SwaggerFormat}}", {{ .ValueExpression}}.String(), formats); err != nil {
+ return err
+}
+ {{- end }}
+ {{- if .IsArray }}{{/* slice validations */}}
+ {{ template "sliceparamvalidator" . }}
+ {{- else if .IsMap }}
+ {{ .Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{ template "mapparamvalidator" . }}
+ {{- end }}
+{{- end }}
+
+{{ define "sliceparambinder" }}
+var {{ varname .Child.ValueExpression }}R {{ .GoType }}
+for {{ if .Child.NeedsIndex }}{{ .IndexVar }}{{ else }}_{{ end }}, {{ varname .Child.ValueExpression }}V := range {{ varname .Child.ValueExpression }}C {
+ {{- if .Child.IsArray }}{{/* recursive resolution of arrays in params */}}
+ {{- if not .Child.SkipParse }}
+ // {{ .Child.ItemsDepth }}CollectionFormat: {{ .Child.CollectionFormat }}
+ {{- end }}
+ {{ .Child.Child.ValueExpression }}C := {{ if .Child.SkipParse }}{{ varname .Child.ValueExpression }}V{{ else }}swag.SplitByFormat({{ varname .Child.ValueExpression }}V, {{ printf "%q" .Child.CollectionFormat }}){{ end }}
+ {{- if .Child.HasSliceValidations }}
+ {{- template "sliceparamvalidator" .Child }}
+ {{- end }}
+ if len({{ varname .Child.Child.ValueExpression }}C) > 0 {
+ {{ template "sliceparambinder" .Child }}
+ {{ varname .Child.ValueExpression }}R = append({{ varname .Child.ValueExpression }}R, {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap }}IR{{end}})
+ }
+ {{- else if .Child.IsMap }}{{/* simple map in items (possible with body params)*/}}
+ {{ .Child.Child.ValueExpression }}C := {{ varname .Child.ValueExpression }}V
+ {{- template "mapparamvalidator" .Child }}
+ {{ varname .Child.ValueExpression }}R = append({{ varname .Child.ValueExpression }}R, {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap }}IR{{end}})
+ {{- else }}{{/* non-array && non-map type in items */}}
+ {{- if and .Child.IsNullable (not .IsMapNullOverride) }}
+ if {{ varname .Child.ValueExpression }}V == nil {
+ {{- if .Child.Required }}
+ return errors.Required({{ .Child.Path }}, {{ printf "%q" .Child.Location }}, {{ varname .Child.ValueExpression }}V)
+ {{- else }}
+ continue
+ {{- end }}
+ }
+ {{- end }}
+ {{- template "childvalidator" .Child }}
+ {{ varname .Child.ValueExpression }}R = append({{ varname .Child.ValueExpression }}R, {{ varname .Child.ValueExpression }}{{ if or .Child.IsArray .Child.IsMap }}IR{{end}})
+ {{- end }}
+}
+{{ end }}
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/runtime/middleware"
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{- if .HasFormParams }}
+
+// {{ pascalize .Name }}MaxParseMemory sets the maximum size in bytes for
+// the multipart form parser for this operation.
+//
+// The default value is 32 MB.
+// The multipart parser stores up to this + 10MB.
+var {{ pascalize .Name }}MaxParseMemory int64 = 32 << 20
+{{- end }}
+
+// New{{ pascalize .Name }}Params creates a new {{ pascalize .Name }}Params object
+{{- if .Params.HasSomeDefaults }}
+// with the default values initialized.
+{{- else }}
+//
+// There are no default values defined in the spec.
+{{- end }}
+func New{{ pascalize .Name }}Params() {{ pascalize .Name }}Params {
+{{ if .Params.HasSomeDefaults }}
+ var (
+ // initialize parameters with default values
+ {{ range .Params }}
+ {{ if .HasDefault -}}
+ {{ if not .IsFileParam }}{{ varname .ID}}Default =
+ {{- if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"" )) }}{{ .Zero }}{{/* strfmt type initializer requires UnmarshalText(), e.g. Date, Datetime, Duration */}}
+ {{- else if and .IsPrimitive .IsCustomFormatter (stringContains .Zero "(\"" ) }}{{.GoType}}({{- printf "%#v" .Default }}){{/* strfmt type initializer takes string */}}
+ {{- else if and .IsPrimitive (not .IsCustomFormatter) -}}{{.GoType}}({{- printf "%#v" .Default }}){{/* regular go primitive type initializer */}}
+ {{- else if .IsArray -}}{{- /* Do not initialize from possible defaults in nested arrays */ -}}
+ {{- if and .Child.IsPrimitive .Child.IsCustomFormatter }}{{ .Zero }}{{/* initialization strategy with UnmarshalText() */}}
+ {{- else if .Child.IsArray -}}{{ .Zero }}{{/* initialization strategy with json.Unmarshal() */}}
+ {{- else if and .Child.IsPrimitive (not .Child.IsCustomFormatter) -}}{{.GoType}}{{- arrayInitializer .Default }}{{/* regular go primitive type initializer: simple slice initializer */}}
+ {{- else }}{{ printf "%#v" .Default }}{{/* all other cases (e.g. schema) [should not occur] */}}
+ {{- end }}
+ {{- else }}{{ printf "%#v" .Default }}{{/* case .Schema */}}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+
+{{ range .Params }}{{ if .HasDefault -}}{{- /* carry out UnmarshalText initialization strategy */ -}}
+ {{ if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"")) }}{{ varname .ID}}Default.UnmarshalText([]byte({{ printf "%q" .Default }}))
+ {{ else if .IsArray -}}
+ {{ if or ( and .Child.IsPrimitive .Child.IsCustomFormatter ) .Child.IsArray -}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ // panics if specification is invalid
+ msg := fmt.Sprintf("invalid default value for parameter {{ varname .ID }}: %v",err)
+ panic(msg)
+ }
+ {{ end -}}
+ {{- end }}
+ {{ end -}}
+{{- end }}
+{{ end }}
+ return {{ pascalize .Name }}Params{ {{ range .Params }}{{ if .HasDefault }}
+ {{ pascalize .ID}}: {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}&{{ end }}{{ varname .ID }}Default,
+ {{ end }}{{ end }} }
+}
+
+// {{ pascalize .Name }}Params contains all the bound params for the {{ humanize .Name }} operation
+// typically these are obtained from a http.Request
+//
+// swagger:parameters {{ .Name }}
+type {{ pascalize .Name }}Params struct {
+
+ // HTTP Request Object
+ HTTPRequest *http.Request `json:"-"`
+
+ {{ range .Params }}/*{{ if .Description }}{{ blockcomment .Description }}{{ end }}{{ if .Required }}
+ Required: true{{ end }}{{ if .Maximum }}
+ Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}{{ end }}{{ if .Minimum }}
+ Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}{{ end }}{{ if .MultipleOf }}
+ Multiple Of: {{ .MultipleOf }}{{ end }}{{ if .MaxLength }}
+ Max Length: {{ .MaxLength }}{{ end }}{{ if .MinLength }}
+ Min Length: {{ .MinLength }}{{ end }}{{ if .Pattern }}
+ Pattern: {{ .Pattern }}{{ end }}{{ if .MaxItems }}
+ Max Items: {{ .MaxItems }}{{ end }}{{ if .MinItems }}
+ Min Items: {{ .MinItems }}{{ end }}{{ if .UniqueItems }}
+ Unique: true{{ end }}{{ if .Location }}
+ In: {{ .Location }}{{ end }}{{ if .CollectionFormat }}
+ Collection Format: {{ .CollectionFormat }}{{ end }}{{ if .HasDefault }}
+ Default: {{ printf "%#v" .Default }}{{ end }}
+ */
+ {{ if not .Schema }}{{ pascalize .ID }} {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}*{{ end }}{{.GoType}}{{ else }}{{ pascalize .Name }} {{ if and (not .Schema.IsBaseType) .IsNullable (not .Schema.IsStream) }}*{{ end }}{{.GoType}}{{ end }}
+ {{ end}}
+}
+
+// BindRequest both binds and validates a request, it assumes that complex things implement a Validatable(strfmt.Registry) error interface
+// for simple values it will use straight method calls.
+//
+// To ensure default values, the struct must have been initialized with New{{ pascalize .Name }}Params() beforehand.
+func ({{ .ReceiverName }} *{{ pascalize .Name }}Params) BindRequest(r *http.Request, route *middleware.MatchedRoute) error {
+ var res []error
+
+ {{ .ReceiverName }}.HTTPRequest = r
+
+{{- if .HasQueryParams }}
+
+ qs := runtime.Values(r.URL.Query())
+{{- end }}
+
+{{- if .HasFormParams }}
+
+ if err := r.ParseMultipartForm({{ pascalize .Name }}MaxParseMemory); err != nil {
+ if err != http.ErrNotMultipart {
+ return errors.New(400,"%v",err)
+ } else if err := r.ParseForm(); err != nil {
+ return errors.New(400,"%v",err)
+ }
+ }
+ {{- if .HasFormValueParams }}
+ fds := runtime.Values(r.Form)
+ {{- end }}
+{{- end }}
+{{ range .Params }}
+ {{- if not .IsArray }}
+ {{- if .IsQueryParam }}
+
+ q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, _ := qs.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsPathParam }}
+
+ r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, _ := route.Params.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsHeaderParam }}
+
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r.Header[http.CanonicalHeaderKey({{ .Path }})], true, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsFormParam }}
+ {{- if .IsFileParam }}
+
+ {{ camelize .Name }}, {{ camelize .Name }}Header, err := r.FormFile({{ .Path }})
+ if err != nil {{ if .IsNullable }}&& err != http.ErrMissingFile{{ end }}{
+ res = append(res, errors.New(400, "reading file %q failed: %v", {{ printf "%q" (camelize .Name) }}, err))
+ {{- if .IsNullable }}
+ } else if err == http.ErrMissingFile {
+ // no-op for missing but optional file parameter
+ {{- end }}
+ } else if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}({{ camelize .Name }}, {{ camelize .Name }}Header); err != nil {
+ {{- if .Required }}
+ // Required: true
+ {{- end }}
+ res = append(res, err)
+ } else {
+ {{ .ReceiverName }}.{{ pascalize .Name }} = &runtime.File{Data: {{ camelize .Name }}, Header: {{ camelize .Name }}Header}
+ }
+ {{- else }}
+
+ fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, _ := fds.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+ {{- else if .IsArray }}
+ {{- if .IsQueryParam }}
+
+ q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, _ := qs.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(q{{ pascalize .Name }}, qhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsPathParam }}
+
+ r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, _ := route.Params.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r{{ pascalize .Name }}, rhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if .IsHeaderParam }}
+
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(r.Header[http.CanonicalHeaderKey({{ .Path }})], true, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- else if and .IsFormParam }}
+
+ fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, _ := fds.GetOK({{ .Path }})
+ if err := {{ .ReceiverName }}.bind{{ pascalize .ID }}(fd{{ pascalize .Name }}, fdhk{{ pascalize .Name }}, route.Formats); err != nil {
+ res = append(res, err)
+ }
+ {{- end }}
+ {{- end }}
+
+ {{- if and .IsBodyParam .Schema }}
+
+ if runtime.HasBody(r) {
+ {{- if .Schema.IsStream }}
+ {{ .ReceiverName }}.{{ pascalize .Name }} = r.Body
+ {{- else }}
+ defer r.Body.Close()
+ {{- if and .Schema.IsBaseType .Schema.IsExported }}
+ body, err := {{ toPackageName .ModelsPackage }}.Unmarshal{{ dropPackage .GoType }}{{ if .IsArray }}Slice{{ end }}(r.Body, route.Consumer)
+ if err != nil {
+ {{- if .Required }}
+ if err == io.EOF {
+ err = errors.Required({{ .Path }}, {{ printf "%q" .Location }}, "")
+ }
+ {{- end }}
+ res = append(res, err)
+ {{- else }}
+ var body {{ .GoType }}
+ if err := route.Consumer.Consume(r.Body, &body); err != nil {
+ {{- if .Required }}
+ if err == io.EOF {
+ res = append(res, errors.Required({{ printf "%q" (camelize .Name) }}, {{ printf "%q" .Location }}, ""))
+ } else {
+ {{- end }}
+ res = append(res, errors.NewParseError({{ printf "%q" (camelize .Name) }}, {{ printf "%q" .Location }}, "", err))
+ {{- if .Required }}
+ }
+ {{- end }}
+ {{- end }}
+ } else {
+ {{- template "bodyvalidator" . }}
+ }
+ {{- end }}
+ }
+ {{- if .Required }} else {
+ res = append(res, errors.Required({{ printf "%q" (camelize .Name) }}, {{ printf "%q" .Location }}, ""))
+ }
+ {{- end }}
+ {{- end }}
+{{- end }}
+ if len(res) > 0 {
+ return errors.CompositeValidationError(res...)
+ }
+ return nil
+}
+
+{{- $className := (pascalize .Name) }}
+{{ range .Params }}
+ {{- if .IsFileParam }}
+// bind{{ pascalize .ID }} binds file parameter {{ .ID }}.
+//
+// The only supported validations on files are MinLength and MaxLength
+func ({{ .ReceiverName }} *{{ $className }}Params) bind{{ pascalize .ID }}(file multipart.File, header *multipart.FileHeader) error {
+ {{- if or .MinLength .MaxLength }}
+ size, _ := file.Seek(0, io.SeekEnd)
+ file.Seek(0, io.SeekStart)
+ {{- end }}
+ {{- if .MinLength}}
+ if size < {{.MinLength}} {
+ return errors.ExceedsMinimum({{ .Path }}, {{ printf "%q" .Location }}, {{ .MinLength }}, false, size)
+ }
+ {{- end }}
+ {{- if .MaxLength}}
+ if size > {{.MaxLength}} {
+ return errors.ExceedsMaximum({{ .Path }}, {{ printf "%q" .Location }}, {{ .MaxLength }}, false, size)
+ }
+ {{- end }}
+ return nil
+}
+ {{- else if not .IsBodyParam }}
+ {{- if or .IsPrimitive .IsCustomFormatter }}
+
+// bind{{ pascalize .ID }} binds and validates parameter {{ .ID }} from {{ .Location }}.
+func ({{ .ReceiverName }} *{{ $className }}Params) bind{{ pascalize .ID }}(rawData []string, hasKey bool, formats strfmt.Registry) error {
+ {{- if and (not .IsPathParam) .Required }}
+ if !hasKey {
+ return errors.Required({{ .Path }}, {{ printf "%q" .Location }}, rawData)
+ }
+ {{- end }}
+ var raw string
+ if len(rawData) > 0 {
+ raw = rawData[len(rawData)-1]
+ }
+
+ // Required: {{ .Required }}
+ {{- if .IsQueryParam }}
+ // AllowEmptyValue: {{ .AllowEmptyValue }}
+ {{- end }}
+ {{- if .IsPathParam }}
+ // Parameter is provided by construction from the route
+ {{- end }}
+
+ {{- if and (not .IsPathParam) .Required (not .AllowEmptyValue) }}
+
+ if err := validate.RequiredString({{ .Path }}, {{ printf "%q" .Location }}, raw); err != nil {
+ return err
+ }
+ {{- else if and ( not .IsPathParam ) (or (not .Required) .AllowEmptyValue) }}
+
+ if raw == "" { // empty values pass all other validations
+ {{- if .HasDefault }}
+ // Default values have been previously initialized by New{{ $className }}Params()
+ {{- end }}
+ return nil
+ }
+ {{- end }}
+
+ {{- if .Converter }}
+
+ value, err := {{ .Converter }}(raw)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, {{ printf "%q" .GoType }}, raw)
+ }
+ {{ .ValueExpression }} = {{ if .IsNullable }}&{{ end }}value
+ {{- else if .IsCustomFormatter }}
+
+ // Format: {{ .SwaggerFormat }}
+ value, err := formats.Parse({{ printf "%q" .SwaggerFormat }}, raw)
+ if err != nil {
+ return errors.InvalidType({{ .Path }}, {{ printf "%q" .Location }}, {{ printf "%q" .GoType }}, raw)
+ }
+ {{ .ValueExpression }} = {{ if or .IsArray .HasDiscriminator .IsFileParam .IsStream (not .IsNullable) }}*{{ end }}(value.(*{{ .GoType }}))
+ {{- else}}
+ {{ .ValueExpression }} = {{ if .IsNullable }}&{{ end }}raw
+ {{- end }}
+
+ {{- if .HasValidations }}
+
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}(formats); err != nil {
+ return err
+ }
+ {{- end }}
+
+ return nil
+}
+ {{- else if .IsArray }}
+
+// bind{{ pascalize .ID }} binds and validates array parameter {{ .ID }} from {{ .Location }}.
+//
+// Arrays are parsed according to CollectionFormat: "{{ .CollectionFormat }}" (defaults to "csv" when empty).
+func ({{ .ReceiverName }} *{{ $className }}Params) bind{{ pascalize .ID }}(rawData []string, hasKey bool, formats strfmt.Registry) error {
+ {{- if .Required }}
+ if !hasKey {
+ return errors.Required({{ .Path }}, {{ printf "%q" .Location }}, rawData)
+ }
+ {{- end }}
+ {{- if eq .CollectionFormat "multi" }}
+ // CollectionFormat: {{ .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}C := rawData
+ {{- else }}
+ var qv{{ pascalize .Name }} string
+ if len(rawData) > 0 {
+ qv{{ pascalize .Name }} = rawData[len(rawData) - 1]
+ }
+
+ // CollectionFormat: {{ .CollectionFormat }}
+ {{ varname .Child.ValueExpression }}C := swag.SplitByFormat(qv{{ pascalize .Name }}, {{ printf "%q" .CollectionFormat }})
+ {{- end }}
+ {{- if and .Required (not .AllowEmptyValue) }}
+ if len({{ varname .Child.ValueExpression }}C) == 0 {
+ return errors.Required({{ .Path }}, {{ printf "%q" .Location }}, {{ varname .Child.ValueExpression }}C)
+ }
+ {{- else }}
+ if len({{ varname .Child.ValueExpression }}C) == 0 {
+ {{- if .HasDefault }}
+ // Default values have been previously initialized by New{{ $className }}Params()
+ {{- end }}
+ return nil
+ } {{- end }}
+ {{ template "sliceparambinder" . }}
+ {{ .ValueExpression }} = {{ varname .Child.ValueExpression }}R
+ {{- if .HasSliceValidations }}
+ if err := {{ .ReceiverName }}.validate{{ pascalize .ID }}(formats); err != nil {
+ return err
+ }
+ {{- end }}
+
+ return nil
+}
+ {{- end }}
+
+ {{- if or (and (not .IsArray) .HasValidations) (and .IsArray .HasSliceValidations) }}
+
+// validate{{ pascalize .ID }} carries on validations for parameter {{ .ID }}
+func ({{ .ReceiverName }} *{{ $className }}Params) validate{{ pascalize .ID }}(formats strfmt.Registry) error {
+ {{ template "propertyparamvalidator" . }}
+ return nil
+}
+ {{- end }}
+
+ {{- else if .IsBodyParam }}{{/* validation method for inline body parameters with validations */}}
+ {{- if and .HasSimpleBodyParams (not .HasModelBodyItems) (not .HasModelBodyMap) }}
+ {{- if .Schema.HasValidations }}
+
+// validate{{ pascalize .ID }}Body validates an inline body parameter
+func ({{ .ReceiverName }} *{{ $className }}Params) validate{{ pascalize .ID }}Body(formats strfmt.Registry) error {
+ {{- if .IsArray }}
+ {{- if .HasSliceValidations }}
+ {{- template "sliceparamvalidator" . }}
+ {{- end }}
+ {{- if .Child.HasValidations }}
+ {{ varname .Child.ValueExpression }}C := {{ .ValueExpression }}
+ {{ template "sliceparambinder" . }}
+ {{ .ValueExpression }} = {{ varname .Child.ValueExpression }}R
+ {{- end }}
+ {{- else if .IsMap }}
+ {{ varname .Child.ValueExpression }}C := {{ .ValueExpression }}
+ {{ template "mapparamvalidator" . }}
+ {{ .ValueExpression }} = {{ varname .Child.ValueExpression }}R
+ {{- else }}
+ {{ template "propertyparamvalidator" . }}
+ {{- end }}
+ return nil
+}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl
new file mode 100644
index 000000000..1d844a890
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/responses.gotmpl
@@ -0,0 +1,271 @@
+{{ define "serverheaderbuilder" }}
+{{ if not .IsArray }}{{ template "simpleserverheaderbuilder" . }}{{ else }}{{ template "sliceserverheaderbuilder" . }}{{ end }}
+{{- end }}
+{{ define "simpleserverheaderbuilder" }}
+{{ if .IsNullable -}}
+var {{ varname .ID }} string
+if {{ .ReceiverName }}.{{ pascalize .ID }} != nil {
+ {{ varname .ID }} = {{ if .Formatter }}{{ .Formatter }}(*{{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ if not .IsCustomFormatter }}*{{ end }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else }}{{ varname .ID }} := {{ if .Formatter }}{{ .Formatter }}({{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ varname .ID }} != "" {
+ rw.Header().Set({{ printf "%q" .Name }}, {{ varname .ID }})
+}
+{{ end }}
+{{ define "sliceitemserverheaderbuilder" }}
+{{ if .IsNullable -}}
+var {{ .ValueExpression }}S string
+if {{ .ValueExpression }} != nil {
+ {{ .ValueExpression }}S = {{ if .Formatter }}{{ .Formatter }}(*{{ .ValueExpression }}){{ else }}*{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else -}}
+{{ .ValueExpression }}S := {{ if .Formatter }}{{ .Formatter }}({{ .ValueExpression }}){{ else }}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ .ValueExpression }}S != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}S)
+}
+{{ end }}
+{{define "sliceserverheaderbuilder" }}
+var {{ varname .Child.ValueExpression }}R []string
+for _, {{ varname .Child.ValueExpression }} := range {{ .ValueExpression }} {
+ {{- if not .Child.IsArray }}{{ template "sliceitemserverheaderbuilder" .Child }}{{ else }}{{ template "sliceserverheaderbuilder" .Child }}{{ end -}}
+}
+{{ if not .Child.Parent -}}
+{{ varname .ID }} := swag.JoinByFormat({{ varname .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ varname .ID }}) > 0 {
+ hv := {{ varname .ID }}[0]
+ if hv != "" {
+ rw.Header().Set({{ printf "%q" .Name }}, hv)
+ }
+}
+{{ else -}}
+{{ .ValueExpression }}S := swag.JoinByFormat({{ varname .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ .ValueExpression }}S) > 0 {
+ {{ .ValueExpression }}Ss := {{ .ValueExpression }}S[0]
+ if {{ .ValueExpression }}Ss != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}Ss)
+ }
+}
+{{ end -}}
+{{ end -}}
+{{ define "serverresponse" }}
+{{ if ne .Code -1 }}// {{pascalize .Name}}Code is the HTTP code returned for type {{ pascalize .Name}}
+const {{ pascalize .Name}}Code int = {{ .Code }}{{ end }}
+
+/*{{ if .Description }}{{ pascalize .Name }} {{ blockcomment .Description }}{{else}}{{ pascalize .Name }} {{ humanize .Name }}{{end}}
+
+swagger:response {{ camelize .Name }}
+*/
+type {{ pascalize .Name }} struct {
+ {{ if eq .Code -1 }}
+ _statusCode int
+ {{ end }}{{ range .Headers }}/*{{if .Description }}{{ blockcomment .Description }}{{ end }}
+ {{ if .Maximum }}
+ Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}{{ end }}{{ if .Minimum }}
+ Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}{{ end }}{{ if .MultipleOf }}
+ Multiple Of: {{ .MultipleOf }}{{ end }}{{ if .MaxLength }}
+ Max Length: {{ .MaxLength }}{{ end }}{{ if .MinLength }}
+ Min Length: {{ .MinLength }}{{ end }}{{ if .Pattern }}
+ Pattern: {{ .Pattern }}{{ end }}{{ if .MaxItems }}
+ Max Items: {{ .MaxItems }}{{ end }}{{ if .MinItems }}
+ Min Items: {{ .MinItems }}{{ end }}{{ if .UniqueItems }}
+ Unique: true{{ end }}{{ if .HasDefault }}
+ Default: {{ printf "%#v" .Default }}{{ end }}
+ */
+ {{ pascalize .Name }} {{ .GoType }} `json:"{{.Name}}{{ if not .Required }},omitempty{{ end }}{{ if .IsJSONString }},string{{ end }}"`
+ {{ end }}
+ {{ if .Schema }}{{ with .Schema }}
+ /*{{if .Description }}{{ blockcomment .Description }}{{ end }}{{ if .Maximum }}
+ Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}{{ end }}{{ if .Minimum }}
+ Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}{{ end }}{{ if .MultipleOf }}
+ Multiple Of: {{ .MultipleOf }}{{ end }}{{ if .MaxLength }}
+ Max Length: {{ .MaxLength }}{{ end }}{{ if .MinLength }}
+ Min Length: {{ .MinLength }}{{ end }}{{ if .Pattern }}
+ Pattern: {{ .Pattern }}{{ end }}{{ if .MaxItems }}
+ Max Items: {{ .MaxItems }}{{ end }}{{ if .MinItems }}
+ Min Items: {{ .MinItems }}{{ end }}{{ if .UniqueItems }}
+ Unique: true{{ end }}
+ In: Body
+ */{{ end }}
+ Payload {{ if and (not .Schema.IsBaseType) .Schema.IsComplexObject }}*{{ end }}{{ .Schema.GoType }} `json:"body,omitempty"`
+ {{ end }}
+}
+
+// New{{ pascalize .Name }} creates {{ pascalize .Name }} with default headers values
+func New{{ pascalize .Name }}({{ if eq .Code -1 }}code int{{ end }}) *{{ pascalize .Name }} { {{ if eq .Code -1 }}
+if code <= 0 {
+ code = 500
+ }
+{{ end }}
+{{ if .Headers.HasSomeDefaults }}
+ var (
+ // initialize headers with default values
+ {{ range .Headers }}
+ {{ if .HasDefault -}}
+ {{ varname .ID}}Default =
+ {{- if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"" )) }}{{ .Zero }}{{/* strfmt type initializer requires UnmarshalText(), e.g. Date, Datetime, Duration */}}
+ {{- else if and .IsPrimitive .IsCustomFormatter (stringContains .Zero "(\"" ) }}{{.GoType}}({{- printf "%#v" .Default }}){{/* strfmt type initializer takes string */}}
+ {{- else if and .IsPrimitive (not .IsCustomFormatter) -}}{{.GoType}}({{- printf "%#v" .Default }}){{/* regular go primitive type initializer */}}
+ {{- else if .IsArray -}}{{- /* Do not initialize from possible defaults in nested arrays */ -}}
+ {{- if and .Child.IsPrimitive .Child.IsCustomFormatter }}{{ .Zero }}{{/* initialization strategy with UnmarshalText() */}}
+ {{- else if .Child.IsArray -}}{{ .Zero }}{{/* initialization strategy with json.Unmarshal() */}}
+ {{- else if and .Child.IsPrimitive (not .Child.IsCustomFormatter) -}}{{.GoType}}{{- arrayInitializer .Default }}{{/* regular go primitive type initializer: simple slice initializer */}}
+ {{- else }}{{ printf "%#v" .Default }}{{/* all other cases (e.g. schema) [should not occur] */}}
+ {{- end }}
+ {{- else }}{{ printf "%#v" .Default }}{{/* case .Schema */}}
+ {{- end }}
+ {{- end }}
+ {{- end }}
+ )
+
+{{ range .Headers }}{{ if .HasDefault -}}{{- /* carry out UnmarshalText initialization strategy */ -}}
+ {{ if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"")) }}{{ varname .ID}}Default.UnmarshalText([]byte({{ printf "%q" .Default }}))
+ {{ else if .IsArray -}}
+ {{ if or ( and .Child.IsPrimitive .Child.IsCustomFormatter ) .Child.IsArray -}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ // panics if specification is invalid
+ msg := fmt.Sprintf("invalid default value for header {{ varname .ID }}: %v",err)
+ panic(msg)
+ }
+ {{ end -}}
+ {{- end }}
+ {{- end }}
+{{- end }}
+{{ end }}
+ return &{{ pascalize .Name }}{
+ {{ if eq .Code -1 }}_statusCode: code,{{ end }}
+ {{ range .Headers }}{{ if .HasDefault }}
+ {{ pascalize .Name}}: {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) (not .IsStream) .IsNullable }}&{{ end }}{{ varname .ID }}Default,
+ {{ end }}
+ {{ end -}}
+ }
+}
+
+{{ if eq .Code -1 }}
+// WithStatusCode adds the status to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) WithStatusCode(code int) *{{ pascalize .Name }} {
+ {{ .ReceiverName }}._statusCode = code
+ return {{ .ReceiverName }}
+}
+
+// SetStatusCode sets the status to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) SetStatusCode(code int) {
+ {{ .ReceiverName }}._statusCode = code
+}
+{{ end }}{{ range .Headers }}
+// With{{ pascalize .Name }} adds the {{ camelize .Name }} to the {{ humanize $.Name }} response
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}) With{{ pascalize .Name }}({{ varname .Name }} {{ .GoType}}) *{{ pascalize $.Name }} {
+ {{ $.ReceiverName }}.{{ pascalize .Name }} = {{ varname .Name }}
+ return {{ .ReceiverName }}
+}
+
+// Set{{ pascalize .Name }} sets the {{ camelize .Name }} to the {{ humanize $.Name }} response
+func ({{ $.ReceiverName }} *{{ pascalize $.Name }}) Set{{ pascalize .Name }}({{ varname .Name }} {{ .GoType}}) {
+ {{ $.ReceiverName }}.{{ pascalize .Name }} = {{ varname .Name }}
+}
+{{ end }}{{ if .Schema }}
+// WithPayload adds the payload to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) WithPayload(payload {{ if and .Schema.IsComplexObject (not .Schema.IsBaseType) }}*{{ end }}{{ .Schema.GoType }}) *{{ pascalize .Name }} {
+ {{ .ReceiverName }}.Payload = payload
+ return {{ .ReceiverName }}
+}
+
+// SetPayload sets the payload to the {{ humanize .Name }} response
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) SetPayload(payload {{ if and .Schema.IsComplexObject (not .Schema.IsBaseType) }}*{{ end }}{{ .Schema.GoType }}) {
+ {{ .ReceiverName }}.Payload = payload
+}
+{{ end }}
+
+// WriteResponse to the client
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
+ {{ range .Headers }}
+ // response header {{.Name}}
+ {{ template "serverheaderbuilder" . -}}
+ {{ end }}
+ {{ if not .Schema }}
+ rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses
+ {{ end }}
+ rw.WriteHeader({{ if eq .Code -1 }}{{ .ReceiverName }}._statusCode{{ else }}{{ .Code }}{{ end }})
+ {{- if .Schema }}
+ {{- if .Schema.IsComplexObject }}
+ if {{ .ReceiverName }}.Payload != nil {
+ {{- end }}
+ payload := {{ .ReceiverName }}.Payload
+ {{- if and (not .Schema.IsInterface) (or .Schema.IsArray .Schema.IsMap) }}
+ if payload == nil {
+ // return empty {{ if .Schema.IsArray }}array{{ else if .Schema.IsMap }}map{{ end }}
+ payload =
+ {{- if or .Schema.IsAliased .Schema.IsComplexObject }}
+ {{- if and (not .Schema.IsBaseType) .Schema.IsComplexObject }}&{{ end }}{{ .Schema.GoType -}} {}
+ {{- else }}
+ {{- .Schema.Zero }}
+ {{- end }}
+ }
+ {{ end }}
+ if err := producer.Produce(rw, payload); err != nil {
+ panic(err) // let the recovery middleware deal with this
+ }
+ {{- if .Schema.IsComplexObject }}
+ }
+ {{- end }}
+ {{- end }}
+}
+
+{{ if $.StrictResponders }}
+func ({{ .ReceiverName }} *{{ pascalize .Name }}) {{ pascalize .OperationName }}Responder() {}
+{{- end }}
+{{ end }}// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+
+import (
+ "fmt"
+ "net/http"
+
+ "github.com/go-openapi/errors"
+ "github.com/go-openapi/runtime"
+ "github.com/go-openapi/runtime/security"
+ "github.com/go-openapi/swag"
+ "github.com/go-openapi/validate"
+ "github.com/go-openapi/runtime/middleware"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+{{ range .Responses }}
+{{ template "serverresponse" . }}
+{{ end }}
+{{ if .DefaultResponse }}
+{{ template "serverresponse" .DefaultResponse }}
+{{ end }}
+
+{{ if $.StrictResponders }}
+type {{ pascalize .Name }}NotImplementedResponder struct {
+ middleware.Responder
+}
+
+func (*{{ pascalize .Name }}NotImplementedResponder) {{ pascalize .Name }}Responder() {}
+
+func {{ pascalize .Name }}NotImplemented() {{ pascalize .Name }}Responder {
+ return &{{ pascalize .Name }}NotImplementedResponder{
+ middleware.NotImplemented(
+ "operation authentication.{{ pascalize .Name }} has not yet been implemented",
+ ),
+ }
+}
+
+type {{ pascalize .Name }}Responder interface {
+ middleware.Responder
+ {{ pascalize .Name }}Responder()
+}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl
new file mode 100644
index 000000000..c78d22051
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/server.gotmpl
@@ -0,0 +1,660 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+import (
+ "context"
+ "crypto/tls"
+ "crypto/x509"
+ "errors"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "net"
+ "net/http"
+ "os"
+ "os/signal"
+ "strconv"
+ "sync"
+ "sync/atomic"
+ "syscall"
+ "time"
+
+ "github.com/go-openapi/swag"
+ {{ if .UseGoStructFlags }}flags "github.com/jessevdk/go-flags"
+ {{ end -}}
+ "github.com/go-openapi/runtime/flagext"
+ {{ if .UsePFlags }}flag "github.com/spf13/pflag"
+ {{ end -}}
+ {{ if .UseFlags }}"flag"
+ "strings"
+ {{ end -}}
+ "golang.org/x/net/netutil"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+const (
+ schemeHTTP = "http"
+ schemeHTTPS = "https"
+ schemeUnix = "unix"
+)
+
+var defaultSchemes []string
+
+func init() {
+ defaultSchemes = []string{ {{ if (hasInsecure .Schemes) }}
+ schemeHTTP,{{ end}}{{ if (hasSecure .Schemes) }}
+ schemeHTTPS,{{ end }}{{ if (contains .ExtraSchemes "unix") }}
+ schemeUnix,{{ end }}
+ }
+}
+
+{{ if not .UseGoStructFlags}}
+var ({{ if .ExcludeSpec }}
+ specFile string
+ {{ end }}enabledListeners []string
+ cleanupTimeout time.Duration
+ gracefulTimeout time.Duration
+ maxHeaderSize flagext.ByteSize
+
+ socketPath string
+
+ host string
+ port int
+ listenLimit int
+ keepAlive time.Duration
+ readTimeout time.Duration
+ writeTimeout time.Duration
+
+ tlsHost string
+ tlsPort int
+ tlsListenLimit int
+ tlsKeepAlive time.Duration
+ tlsReadTimeout time.Duration
+ tlsWriteTimeout time.Duration
+ tlsCertificate string
+ tlsCertificateKey string
+ tlsCACertificate string
+)
+
+{{ if .UseFlags}}
+// StringSliceVar support for flag
+type sliceValue []string
+
+func newSliceValue(vals []string, p *[]string) *sliceValue {
+ *p = vals
+ return (*sliceValue)(p)
+}
+
+func (s *sliceValue) Set(val string) error {
+ *s = sliceValue(strings.Split(val, ","))
+ return nil
+}
+
+func (s *sliceValue) Get() interface{} { return []string(*s) }
+
+func (s *sliceValue) String() string { return strings.Join([]string(*s), ",") }
+// end StringSliceVar support for flag
+{{ end }}
+
+func init() {
+ maxHeaderSize = flagext.ByteSize(1000000){{ if .ExcludeSpec }}
+ flag.StringVarP(&specFile, "spec", "", "", "the swagger specification to serve")
+ {{ end }}
+ {{ if .UseFlags }}
+ flag.Var(newSliceValue(defaultSchemes, &enabledListeners), "schema", "the listeners to enable, this can be repeated and defaults to the schemes in the swagger spec")
+ {{ end }}
+ {{ if .UsePFlags }}
+ flag.StringSliceVar(&enabledListeners, "scheme", defaultSchemes, "the listeners to enable, this can be repeated and defaults to the schemes in the swagger spec")
+ {{ end }}
+ flag.DurationVar(&cleanupTimeout, "cleanup-timeout", 10*time.Second, "grace period for which to wait before killing idle connections")
+ flag.DurationVar(&gracefulTimeout, "graceful-timeout", 15*time.Second, "grace period for which to wait before shutting down the server")
+ flag.Var(&maxHeaderSize, "max-header-size", "controls the maximum number of bytes the server will read parsing the request header's keys and values, including the request line. It does not limit the size of the request body")
+
+ flag.StringVar(&socketPath, "socket-path", "/var/run/todo-list.sock", "the unix socket to listen on")
+
+ flag.StringVar(&host, "host", "localhost", "the IP to listen on")
+ flag.IntVar(&port, "port", 0, "the port to listen on for insecure connections, defaults to a random value")
+ flag.IntVar(&listenLimit, "listen-limit", 0, "limit the number of outstanding requests")
+ flag.DurationVar(&keepAlive, "keep-alive", 3*time.Minute, "sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)")
+ flag.DurationVar(&readTimeout, "read-timeout", 30*time.Second, "maximum duration before timing out read of the request")
+ flag.DurationVar(&writeTimeout, "write-timeout", 30*time.Second, "maximum duration before timing out write of the response")
+
+ flag.StringVar(&tlsHost, "tls-host", "localhost", "the IP to listen on")
+ flag.IntVar(&tlsPort, "tls-port", 0, "the port to listen on for secure connections, defaults to a random value")
+ flag.StringVar(&tlsCertificate, "tls-certificate", "", "the certificate file to use for secure connections")
+ flag.StringVar(&tlsCertificateKey, "tls-key", "", "the private key file to use for secure connections (without passphrase)")
+ flag.StringVar(&tlsCACertificate, "tls-ca", "", "the certificate authority certificate file to be used with mutual tls auth")
+ flag.IntVar(&tlsListenLimit, "tls-listen-limit", 0, "limit the number of outstanding requests")
+ flag.DurationVar(&tlsKeepAlive, "tls-keep-alive", 3*time.Minute, "sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)")
+ flag.DurationVar(&tlsReadTimeout, "tls-read-timeout", 30*time.Second, "maximum duration before timing out read of the request")
+ flag.DurationVar(&tlsWriteTimeout, "tls-write-timeout", 30*time.Second, "maximum duration before timing out write of the response")
+}
+
+func stringEnvOverride(orig string, def string, keys ...string) string {
+ for _, k := range keys {
+ if os.Getenv(k) != "" {
+ return os.Getenv(k)
+ }
+ }
+ if def != "" && orig == "" {
+ return def
+ }
+ return orig
+}
+
+func intEnvOverride(orig int, def int, keys ...string) int {
+ for _, k := range keys {
+ if os.Getenv(k) != "" {
+ v, err := strconv.Atoi(os.Getenv(k))
+ if err != nil {
+ fmt.Fprintln(os.Stderr, k, "is not a valid number")
+ os.Exit(1)
+ }
+ return v
+ }
+ }
+ if def != 0 && orig == 0 {
+ return def
+ }
+ return orig
+}
+{{ end }}
+
+// NewServer creates a new api {{ humanize .Name }} server but does not configure it
+func NewServer(api *{{ .APIPackageAlias }}.{{ pascalize .Name }}API) *Server {
+ s := new(Server)
+ {{ if not .UseGoStructFlags }}
+ s.EnabledListeners = enabledListeners
+ s.CleanupTimeout = cleanupTimeout
+ s.GracefulTimeout = gracefulTimeout
+ s.MaxHeaderSize = maxHeaderSize
+ s.SocketPath = socketPath
+ s.Host = stringEnvOverride(host, "", "HOST")
+ s.Port = intEnvOverride(port, 0, "PORT")
+ s.ListenLimit = listenLimit
+ s.KeepAlive = keepAlive
+ s.ReadTimeout = readTimeout
+ s.WriteTimeout = writeTimeout
+ s.TLSHost = stringEnvOverride(tlsHost, s.Host, "TLS_HOST", "HOST")
+ s.TLSPort = intEnvOverride(tlsPort, 0, "TLS_PORT")
+ s.TLSCertificate = stringEnvOverride(tlsCertificate, "", "TLS_CERTIFICATE")
+ s.TLSCertificateKey = stringEnvOverride(tlsCertificateKey, "", "TLS_PRIVATE_KEY")
+ s.TLSCACertificate = stringEnvOverride(tlsCACertificate, "", "TLS_CA_CERTIFICATE")
+ s.TLSListenLimit = tlsListenLimit
+ s.TLSKeepAlive = tlsKeepAlive
+ s.TLSReadTimeout = tlsReadTimeout
+ s.TLSWriteTimeout = tlsWriteTimeout
+ {{- if .ExcludeSpec }}
+ s.Spec = specFile
+ {{- end }}
+ {{- end }}
+ s.shutdown = make(chan struct{})
+ s.api = api
+ s.interrupt = make(chan os.Signal, 1)
+ return s
+}
+
+// ConfigureAPI configures the API and handlers.
+func (s *Server) ConfigureAPI() {
+ if s.api != nil {
+ s.handler = configureAPI(s.api)
+ }
+}
+
+// ConfigureFlags configures the additional flags defined by the handlers. Needs to be called before the parser.Parse
+func (s *Server) ConfigureFlags() {
+ if s.api != nil {
+ configureFlags(s.api)
+ }
+}
+
+// Server for the {{ humanize .Name }} API
+type Server struct {
+ EnabledListeners []string{{ if .UseGoStructFlags }} `long:"scheme" description:"the listeners to enable, this can be repeated and defaults to the schemes in the swagger spec"`{{ end }}
+ CleanupTimeout time.Duration{{ if .UseGoStructFlags }} `long:"cleanup-timeout" description:"grace period for which to wait before killing idle connections" default:"10s"`{{ end }}
+ GracefulTimeout time.Duration{{ if .UseGoStructFlags }} `long:"graceful-timeout" description:"grace period for which to wait before shutting down the server" default:"15s"`{{ end }}
+ MaxHeaderSize flagext.ByteSize{{ if .UseGoStructFlags }} `long:"max-header-size" description:"controls the maximum number of bytes the server will read parsing the request header's keys and values, including the request line. It does not limit the size of the request body." default:"1MiB"`{{ end }}
+
+ SocketPath {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"socket-path" description:"the unix socket to listen on" default:"/var/run/{{ dasherize .Name }}.sock"`{{ end }}
+ domainSocketL net.Listener
+
+ Host string{{ if .UseGoStructFlags }} `long:"host" description:"the IP to listen on" default:"localhost" env:"HOST"`{{ end }}
+ Port int{{ if .UseGoStructFlags }} `long:"port" description:"the port to listen on for insecure connections, defaults to a random value" env:"PORT"`{{ end }}
+ ListenLimit int{{ if .UseGoStructFlags }} `long:"listen-limit" description:"limit the number of outstanding requests"`{{ end }}
+ KeepAlive time.Duration{{ if .UseGoStructFlags }} `long:"keep-alive" description:"sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)" default:"3m"`{{ end }}
+ ReadTimeout time.Duration{{ if .UseGoStructFlags }} `long:"read-timeout" description:"maximum duration before timing out read of the request" default:"30s"`{{ end }}
+ WriteTimeout time.Duration{{ if .UseGoStructFlags }} `long:"write-timeout" description:"maximum duration before timing out write of the response" default:"60s"`{{ end }}
+ httpServerL net.Listener
+
+ TLSHost string{{ if .UseGoStructFlags }} `long:"tls-host" description:"the IP to listen on for tls, when not specified it's the same as --host" env:"TLS_HOST"`{{ end }}
+ TLSPort int{{ if .UseGoStructFlags }} `long:"tls-port" description:"the port to listen on for secure connections, defaults to a random value" env:"TLS_PORT"`{{ end }}
+ TLSCertificate {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"tls-certificate" description:"the certificate to use for secure connections" env:"TLS_CERTIFICATE"`{{ end }}
+ TLSCertificateKey {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"tls-key" description:"the private key to use for secure connections" env:"TLS_PRIVATE_KEY"`{{ end }}
+ TLSCACertificate {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"tls-ca" description:"the certificate authority file to be used with mutual tls auth" env:"TLS_CA_CERTIFICATE"`{{ end }}
+ TLSListenLimit int{{ if .UseGoStructFlags }} `long:"tls-listen-limit" description:"limit the number of outstanding requests"`{{ end }}
+ TLSKeepAlive time.Duration{{ if .UseGoStructFlags }} `long:"tls-keep-alive" description:"sets the TCP keep-alive timeouts on accepted connections. It prunes dead TCP connections ( e.g. closing laptop mid-download)"`{{ end }}
+ TLSReadTimeout time.Duration{{ if .UseGoStructFlags }} `long:"tls-read-timeout" description:"maximum duration before timing out read of the request"`{{ end }}
+ TLSWriteTimeout time.Duration{{ if .UseGoStructFlags }} `long:"tls-write-timeout" description:"maximum duration before timing out write of the response"`{{ end }}
+ httpsServerL net.Listener
+
+ {{ if .ExcludeSpec }}Spec {{ if not .UseGoStructFlags }}string{{ else }}flags.Filename `long:"spec" description:"the swagger specification to serve"`{{ end }}{{ end }}
+ api *{{ .APIPackageAlias }}.{{ pascalize .Name }}API
+ handler http.Handler
+ hasListeners bool
+ shutdown chan struct{}
+ shuttingDown int32
+ interrupted bool
+ interrupt chan os.Signal
+}
+
+// Logf logs message either via defined user logger or via system one if no user logger is defined.
+func (s *Server) Logf(f string, args ...interface{}) {
+ if s.api != nil && s.api.Logger != nil {
+ s.api.Logger(f, args...)
+ } else {
+ log.Printf(f, args...)
+ }
+}
+
+// Fatalf logs message either via defined user logger or via system one if no user logger is defined.
+// Exits with non-zero status after printing
+func (s *Server) Fatalf(f string, args ...interface{}) {
+ if s.api != nil && s.api.Logger != nil {
+ s.api.Logger(f, args...)
+ os.Exit(1)
+ } else {
+ log.Fatalf(f, args...)
+ }
+}
+
+// SetAPI configures the server with the specified API. Needs to be called before Serve
+func (s *Server) SetAPI(api *{{ .APIPackageAlias }}.{{ pascalize .Name }}API) {
+ if api == nil {
+ s.api = nil
+ s.handler = nil
+ return
+ }
+
+ s.api = api
+ s.handler = configureAPI(api)
+}
+
+func (s *Server) hasScheme(scheme string) bool {
+ schemes := s.EnabledListeners
+ if len(schemes) == 0 {
+ schemes = defaultSchemes
+ }
+
+ for _, v := range schemes {
+ if v == scheme {
+ return true
+ }
+ }
+ return false
+}
+
+// Serve the api
+func (s *Server) Serve() (err error) {
+ if !s.hasListeners {
+ if err = s.Listen(); err != nil {
+ return err
+ }
+ }
+
+ // set default handler, if none is set
+ if s.handler == nil {
+ if s.api == nil {
+ return errors.New("can't create the default handler, as no api is set")
+ }
+
+ s.SetHandler(s.api.Serve(nil))
+ }
+
+ wg := new(sync.WaitGroup)
+ once := new(sync.Once)
+ signalNotify(s.interrupt)
+ go handleInterrupt(once, s)
+
+ servers := []*http.Server{}
+
+ if s.hasScheme(schemeUnix) {
+ domainSocket := new(http.Server)
+ domainSocket.MaxHeaderBytes = int(s.MaxHeaderSize)
+ domainSocket.Handler = s.handler
+ if int64(s.CleanupTimeout) > 0 {
+ domainSocket.IdleTimeout = s.CleanupTimeout
+ }
+
+ configureServer(domainSocket, "unix", string(s.SocketPath))
+
+ servers = append(servers, domainSocket)
+ wg.Add(1)
+ s.Logf("Serving {{ humanize .Name }} at unix://%s", s.SocketPath)
+ go func(l net.Listener){
+ defer wg.Done()
+ if err := domainSocket.Serve(l); err != nil && err != http.ErrServerClosed {
+ s.Fatalf("%v", err)
+ }
+ s.Logf("Stopped serving {{ humanize .Name }} at unix://%s", s.SocketPath)
+ }(s.domainSocketL)
+ }
+
+ if s.hasScheme(schemeHTTP) {
+ httpServer := new(http.Server)
+ httpServer.MaxHeaderBytes = int(s.MaxHeaderSize)
+ httpServer.ReadTimeout = s.ReadTimeout
+ httpServer.WriteTimeout = s.WriteTimeout
+ httpServer.SetKeepAlivesEnabled(int64(s.KeepAlive) > 0)
+ if s.ListenLimit > 0 {
+ s.httpServerL = netutil.LimitListener(s.httpServerL, s.ListenLimit)
+ }
+
+ if int64(s.CleanupTimeout) > 0 {
+ httpServer.IdleTimeout = s.CleanupTimeout
+ }
+
+ httpServer.Handler = s.handler
+
+ configureServer(httpServer, "http", s.httpServerL.Addr().String())
+
+ servers = append(servers, httpServer)
+ wg.Add(1)
+ s.Logf("Serving {{ humanize .Name }} at http://%s", s.httpServerL.Addr())
+ go func(l net.Listener) {
+ defer wg.Done()
+ if err := httpServer.Serve(l); err != nil && err != http.ErrServerClosed {
+ s.Fatalf("%v", err)
+ }
+ s.Logf("Stopped serving {{ humanize .Name }} at http://%s", l.Addr())
+ }(s.httpServerL)
+ }
+
+ if s.hasScheme(schemeHTTPS) {
+ httpsServer := new(http.Server)
+ httpsServer.MaxHeaderBytes = int(s.MaxHeaderSize)
+ httpsServer.ReadTimeout = s.TLSReadTimeout
+ httpsServer.WriteTimeout = s.TLSWriteTimeout
+ httpsServer.SetKeepAlivesEnabled(int64(s.TLSKeepAlive) > 0)
+ if s.TLSListenLimit > 0 {
+ s.httpsServerL = netutil.LimitListener(s.httpsServerL, s.TLSListenLimit)
+ }
+ if int64(s.CleanupTimeout) > 0 {
+ httpsServer.IdleTimeout = s.CleanupTimeout
+ }
+ httpsServer.Handler = s.handler
+
+ // Inspired by https://blog.bracebin.com/achieving-perfect-ssl-labs-score-with-go
+ httpsServer.TLSConfig = &tls.Config{
+ // Causes servers to use Go's default ciphersuite preferences,
+ // which are tuned to avoid attacks. Does nothing on clients.
+ PreferServerCipherSuites: true,
+ // Only use curves which have assembly implementations
+ // https://github.com/golang/go/tree/master/src/crypto/elliptic
+ CurvePreferences: []tls.CurveID{tls.CurveP256},
+ {{- if .UseModernMode }}
+ // Use modern tls mode https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
+ NextProtos: []string{"h2", "http/1.1"},
+ // https://www.owasp.org/index.php/Transport_Layer_Protection_Cheat_Sheet#Rule_-_Only_Support_Strong_Protocols
+ MinVersion: tls.VersionTLS12,
+ // These ciphersuites support Forward Secrecy: https://en.wikipedia.org/wiki/Forward_secrecy
+ CipherSuites: []uint16{
+ tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
+ tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
+ },
+ {{- end }}
+ }
+
+ // build standard config from server options
+ if s.TLSCertificate != "" && s.TLSCertificateKey != "" {
+ httpsServer.TLSConfig.Certificates = make([]tls.Certificate, 1)
+ httpsServer.TLSConfig.Certificates[0], err = tls.LoadX509KeyPair({{ if .UseGoStructFlags }}string({{ end }}s.TLSCertificate{{ if .UseGoStructFlags }}){{ end }}, {{ if .UseGoStructFlags }}string({{ end }}s.TLSCertificateKey{{ if .UseGoStructFlags }}){{ end }})
+ if err != nil {
+ return err
+ }
+ }
+
+ if s.TLSCACertificate != "" {
+ // include specified CA certificate
+ caCert, caCertErr := os.ReadFile({{ if .UseGoStructFlags }}string({{ end }}s.TLSCACertificate{{ if .UseGoStructFlags }}){{ end }})
+ if caCertErr != nil {
+ return caCertErr
+ }
+ caCertPool := x509.NewCertPool()
+ ok := caCertPool.AppendCertsFromPEM(caCert)
+ if !ok {
+ return fmt.Errorf("cannot parse CA certificate")
+ }
+ httpsServer.TLSConfig.ClientCAs = caCertPool
+ httpsServer.TLSConfig.ClientAuth = tls.RequireAndVerifyClientCert
+ }
+
+ // call custom TLS configurator
+ configureTLS(httpsServer.TLSConfig)
+
+ if len(httpsServer.TLSConfig.Certificates) == 0 && httpsServer.TLSConfig.GetCertificate == nil {
+ // after standard and custom config are passed, this ends up with no certificate
+ if s.TLSCertificate == "" {
+ if s.TLSCertificateKey == "" {
+ s.Fatalf("the required flags `--tls-certificate` and `--tls-key` were not specified")
+ }
+ s.Fatalf("the required flag `--tls-certificate` was not specified")
+ }
+ if s.TLSCertificateKey == "" {
+ s.Fatalf("the required flag `--tls-key` was not specified")
+ }
+ // this happens with a wrong custom TLS configurator
+ s.Fatalf("no certificate was configured for TLS")
+ }
+
+ configureServer(httpsServer, "https", s.httpsServerL.Addr().String())
+
+ servers = append(servers, httpsServer)
+ wg.Add(1)
+ s.Logf("Serving {{ humanize .Name }} at https://%s", s.httpsServerL.Addr())
+ go func(l net.Listener) {
+ defer wg.Done()
+ if err := httpsServer.Serve(l); err != nil && err != http.ErrServerClosed {
+ s.Fatalf("%v", err)
+ }
+ s.Logf("Stopped serving {{ humanize .Name }} at https://%s", l.Addr())
+ }(tls.NewListener(s.httpsServerL, httpsServer.TLSConfig))
+ }
+
+ wg.Add(1)
+ go s.handleShutdown(wg, &servers)
+
+ wg.Wait()
+ return nil
+}
+
+// Listen creates the listeners for the server
+func (s *Server) Listen() error {
+ if s.hasListeners { // already done this
+ return nil
+ }
+
+ if s.hasScheme(schemeHTTPS) {
+ // Use http host if https host wasn't defined
+ if s.TLSHost == "" {
+ s.TLSHost = s.Host
+ }
+ // Use http listen limit if https listen limit wasn't defined
+ if s.TLSListenLimit == 0 {
+ s.TLSListenLimit = s.ListenLimit
+ }
+ // Use http tcp keep alive if https tcp keep alive wasn't defined
+ if int64(s.TLSKeepAlive) == 0 {
+ s.TLSKeepAlive = s.KeepAlive
+ }
+ // Use http read timeout if https read timeout wasn't defined
+ if int64(s.TLSReadTimeout) == 0 {
+ s.TLSReadTimeout = s.ReadTimeout
+ }
+ // Use http write timeout if https write timeout wasn't defined
+ if int64(s.TLSWriteTimeout) == 0 {
+ s.TLSWriteTimeout = s.WriteTimeout
+ }
+ }
+
+ if s.hasScheme(schemeUnix) {
+ domSockListener, err := net.Listen("unix", string(s.SocketPath))
+ if err != nil {
+ return err
+ }
+ s.domainSocketL = domSockListener
+ }
+
+ if s.hasScheme(schemeHTTP) {
+ listener, err := net.Listen("tcp", net.JoinHostPort(s.Host, strconv.Itoa(s.Port)))
+ if err != nil {
+ return err
+ }
+
+ h, p, err := swag.SplitHostPort(listener.Addr().String())
+ if err != nil {
+ return err
+ }
+ s.Host = h
+ s.Port = p
+ s.httpServerL = listener
+ }
+
+ if s.hasScheme(schemeHTTPS) {
+ tlsListener, err := net.Listen("tcp", net.JoinHostPort(s.TLSHost, strconv.Itoa(s.TLSPort)))
+ if err != nil {
+ return err
+ }
+
+ sh, sp, err := swag.SplitHostPort(tlsListener.Addr().String())
+ if err != nil {
+ return err
+ }
+ s.TLSHost = sh
+ s.TLSPort = sp
+ s.httpsServerL = tlsListener
+ }
+
+ s.hasListeners = true
+ return nil
+}
+
+// Shutdown server and clean up resources
+func (s *Server) Shutdown() error {
+ if atomic.CompareAndSwapInt32(&s.shuttingDown, 0, 1) {
+ close(s.shutdown)
+ }
+ return nil
+}
+
+func (s *Server) handleShutdown(wg *sync.WaitGroup, serversPtr *[]*http.Server) {
+ // wg.Done must occur last, after s.api.ServerShutdown()
+ // (to preserve old behaviour)
+ defer wg.Done()
+
+ <-s.shutdown
+
+ servers := *serversPtr
+
+ ctx, cancel := context.WithTimeout(context.TODO(), s.GracefulTimeout)
+ defer cancel()
+
+ // first execute the pre-shutdown hook
+ s.api.PreServerShutdown()
+
+ shutdownChan := make(chan bool)
+ for i := range servers {
+ server := servers[i]
+ go func() {
+ var success bool
+ defer func() {
+ shutdownChan <- success
+ }()
+ if err := server.Shutdown(ctx); err != nil {
+ // Error from closing listeners, or context timeout:
+ s.Logf("HTTP server Shutdown: %v", err)
+ } else {
+ success = true
+ }
+ }()
+ }
+
+ // Wait until all listeners have successfully shut down before calling ServerShutdown
+ success := true
+ for range servers {
+ success = success && <-shutdownChan
+ }
+ if success {
+ s.api.ServerShutdown()
+ }
+}
+
+// GetHandler returns a handler useful for testing
+func (s *Server) GetHandler() http.Handler {
+ return s.handler
+}
+
+// SetHandler allows for setting a http handler on this server
+func (s *Server) SetHandler(handler http.Handler) {
+ s.handler = handler
+}
+
+// UnixListener returns the domain socket listener
+func (s *Server) UnixListener() (net.Listener, error) {
+ if !s.hasListeners {
+ if err := s.Listen(); err != nil {
+ return nil, err
+ }
+ }
+ return s.domainSocketL, nil
+}
+
+// HTTPListener returns the http listener
+func (s *Server) HTTPListener() (net.Listener, error) {
+ if !s.hasListeners {
+ if err := s.Listen(); err != nil {
+ return nil, err
+ }
+ }
+ return s.httpServerL, nil
+}
+
+// TLSListener returns the https listener
+func (s *Server) TLSListener() (net.Listener, error) {
+ if !s.hasListeners {
+ if err := s.Listen(); err != nil {
+ return nil, err
+ }
+ }
+ return s.httpsServerL, nil
+}
+
+func handleInterrupt(once *sync.Once, s *Server) {
+ once.Do(func(){
+ for range s.interrupt {
+ if s.interrupted {
+ s.Logf("Server already shutting down")
+ continue
+ }
+ s.interrupted = true
+ s.Logf("Shutting down... ")
+ if err := s.Shutdown(); err != nil {
+ s.Logf("HTTP server Shutdown: %v", err)
+ }
+ }
+ })
+}
+
+func signalNotify(interrupt chan<- os.Signal) {
+ signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM)
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl
new file mode 100644
index 000000000..5d6010c0c
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/server/urlbuilder.gotmpl
@@ -0,0 +1,213 @@
+{{ define "queryparambuilder" }}
+{{ if not .IsArray }}{{ template "simplequeryparambuilder" . }}{{ else }}{{ template "slicequeryparambuilder" . }}{{ end }}
+{{- end }}
+{{ define "simplequeryparambuilder" }}
+{{ if .IsNullable -}}
+var {{ varname .ID }}Q string
+if {{ .ReceiverName }}.{{ pascalize .ID }} != nil {
+ {{ varname .ID }}Q = {{ if .Formatter }}{{ .Formatter }}(*{{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ if not .IsCustomFormatter }}*{{ end }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else }}{{ varname .ID }}Q := {{ if .Formatter }}{{ .Formatter }}({{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ varname .ID }}Q != "" {
+ qs.Set({{ printf "%q" .Name }}, {{ varname .ID }}Q)
+}
+{{ end }}
+{{ define "sliceitemqueryparambuilder" }}
+{{ if .IsNullable -}}
+var {{ .ValueExpression }}S string
+if {{ .ValueExpression }} != nil {
+ {{ .ValueExpression }}S = {{ if .Formatter }}{{ .Formatter }}(*{{ .ValueExpression }}){{ else }}*{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+}
+{{ else -}}
+{{ .ValueExpression }}S := {{ if .Formatter }}{{ .Formatter }}({{ .ValueExpression }}){{ else }}{{ .ValueExpression }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+{{ end -}}
+if {{ .ValueExpression }}S != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}S)
+}
+{{ end }}
+{{define "slicequeryparambuilder" }}
+var {{ .Child.ValueExpression }}R []string
+for _, {{ .Child.ValueExpression }} := range {{ .ValueExpression }} {
+ {{- if not .Child.IsArray }}{{ template "sliceitemqueryparambuilder" .Child }}{{ else }}{{ template "slicequeryparambuilder" .Child }}{{ end -}}
+}
+
+{{ if not .Child.Parent -}}
+{{ varname .ID }} := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+{{ if eq .CollectionFormat "multi" }}
+for _, qsv := range {{ varname .ID }} {
+ qs.Add({{ printf "%q" .Name }}, qsv)
+}
+{{ else }}
+if len({{ varname .ID }}) > 0 {
+ qsv := {{ varname .ID }}[0]
+ if qsv != "" {
+ qs.Set({{ printf "%q" .Name }}, qsv)
+ }
+}
+{{ end }}
+{{ else -}}
+{{ .ValueExpression }}S := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ .ValueExpression }}S) > 0 {
+ {{ .ValueExpression }}Ss := {{ .ValueExpression }}S[0]
+ if {{ .ValueExpression }}Ss != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}Ss)
+ }
+}
+{{ end -}}
+{{ end -}}
+{{ define "slicepathparambuilder" }}
+var {{ .Child.ValueExpression }}R []string
+for _, {{ .Child.ValueExpression }} := range {{ .ValueExpression }} {
+ {{- if not .Child.IsArray }}{{ template "sliceitemqueryparambuilder" .Child }}{{ else }}{{ template "slicepathparambuilder" .Child }}{{ end -}}
+}
+
+{{ if not .Child.Parent -}}
+{{ varname .ID }} := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ varname .ID }}) > 0 {
+ psv := {{ varname .ID }}[0]
+ if psv != "" {
+ _path = strings.Replace(_path, "{{ printf "{%s}" .Name }}", psv, -1)
+ } else {
+ return nil, errors.New("{{ camelize .ID }} is required on {{ pascalize $.Name }}URL")
+ }
+}
+{{ else -}}
+{{ .ValueExpression }}S := swag.JoinByFormat({{ .Child.ValueExpression }}R, {{ printf "%q" .CollectionFormat }})
+if len({{ .ValueExpression }}S) > 0 {
+ {{ .ValueExpression }}Ss := {{ .ValueExpression }}S[0]
+ if {{ .ValueExpression }}Ss != "" {
+ {{ .ValueExpression }}R = append({{ .ValueExpression }}R, {{ .ValueExpression }}Ss)
+ }
+}
+{{ end -}}
+{{ end }}
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .Package }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the generate command
+
+import (
+ "fmt"
+ "errors"
+ "net/url"
+ golangswaggerpaths "path"
+ "strings"
+
+ "github.com/go-openapi/strfmt"
+ "github.com/go-openapi/swag"
+)
+
+// {{ pascalize .Name }}URL generates an URL for the {{ humanize .Name }} operation
+type {{ pascalize .Name }}URL struct {
+ {{ range .PathParams }}
+ {{ pascalize .ID }} {{.GoType}}
+ {{- end }}
+ {{ range .QueryParams }}
+ {{ pascalize .ID }} {{ if and (not .IsArray) .IsNullable }}*{{ end }}{{.GoType}}
+ {{- end }}
+
+ _basePath string
+ {{ if or (gt (len .PathParams ) 0) (gt (len .QueryParams) 0) -}}
+ // avoid unkeyed usage
+ _ struct{}
+ {{- end }}
+
+}
+
+// WithBasePath sets the base path for this url builder, only required when it's different from the
+// base path specified in the swagger spec.
+// When the value of the base path is an empty string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) WithBasePath(bp string) *{{pascalize .Name}}URL {
+ {{ .ReceiverName }}.SetBasePath(bp)
+ return {{ .ReceiverName }}
+}
+
+
+// SetBasePath sets the base path for this url builder, only required when it's different from the
+// base path specified in the swagger spec.
+// When the value of the base path is an empty string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) SetBasePath(bp string) {
+ {{ .ReceiverName }}._basePath = bp
+}
+
+// Build a url path and query string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) Build() (*url.URL, error) {
+ var _result url.URL
+
+ var _path = {{ printf "%q" .Path }}
+ {{ range .PathParams }}{{ if .IsArray }}
+ {{ template "slicepathparambuilder" . -}}
+ {{ else }}
+ {{ varname .ID }} := {{ if .Formatter }}{{ .Formatter }}({{ .ReceiverName }}.{{ pascalize .ID }}){{ else }}{{ .ReceiverName }}.{{ pascalize .ID }}{{ if .IsCustomFormatter }}.String(){{end}}{{end}}
+ if {{ varname .ID }} != "" {
+ _path = strings.Replace(_path, "{{ printf "{%s}" .Name }}", {{ varname .ID }}, -1)
+ } else {
+ return nil, errors.New("{{ camelize .ID }} is required on {{ pascalize $.Name }}URL")
+ }
+ {{ end }}
+
+ {{- end }}
+ _basePath := {{ .ReceiverName }}._basePath
+ {{ if .BasePath }}if _basePath == "" {
+ _basePath = {{ printf "%q" .BasePath }}
+ }
+ {{ end -}}
+ _result.Path = golangswaggerpaths.Join(_basePath, _path)
+
+ {{ if gt (len .QueryParams) 0 -}}
+ qs := make(url.Values)
+ {{ range .QueryParams }}
+ {{ template "queryparambuilder" . -}}
+ {{- end }}
+ _result.RawQuery = qs.Encode()
+ {{- end }}
+
+ return &_result, nil
+}
+
+// Must is a helper function to panic when the url builder returns an error
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) Must(u *url.URL, err error) *url.URL {
+ if err != nil {
+ panic(err)
+ }
+ if u == nil {
+ panic("url can't be nil")
+ }
+ return u
+}
+
+// String returns the string representation of the path with query string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) String() string {
+ return {{ .ReceiverName }}.Must({{ .ReceiverName }}.Build()).String()
+}
+
+// BuildFull builds a full url with scheme, host, path and query string
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) BuildFull(scheme, host string) (*url.URL, error) {
+ if scheme == "" {
+ return nil, errors.New("scheme is required for a full url on {{ pascalize .Name }}URL")
+ }
+ if host == "" {
+ return nil, errors.New("host is required for a full url on {{ pascalize .Name }}URL")
+ }
+
+ base, err := {{ .ReceiverName }}.Build()
+ if err != nil {
+ return nil, err
+ }
+
+ base.Scheme = scheme
+ base.Host = host
+ return base, nil
+}
+
+// StringFull returns the string representation of a complete url
+func ({{ .ReceiverName }} *{{ pascalize .Name }}URL) StringFull(scheme, host string) string {
+ return {{ .ReceiverName }}.Must( {{ .ReceiverName }}.BuildFull(scheme, host)).String()
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl
new file mode 100644
index 000000000..a42f1cf2f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsinit.gotmpl
@@ -0,0 +1,23 @@
+{{- if and .IsPrimitive .IsCustomFormatter (not (stringContains .Zero "(\"")) }}
+ if err := {{ varname .ID}}Default.UnmarshalText([]byte({{ printf "%q" .Default }})) ; err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+{{- else if .IsArray }}
+ {{- if or ( and .Child.IsPrimitive .Child.IsCustomFormatter ) .Child.IsArray }}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+ {{- else if and (not .Child.IsPrimitive) (not .Child.IsArray) }} {{/* shouldn't get there: guard */}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+ {{- end }}
+{{- else if not .IsPrimitive }} {{/* shouldn't get there: guard (defaults to letting json figure out) */}}
+ if err := json.Unmarshal([]byte(`{{printf "%s" (json .Default)}}`), &{{ varname .ID }}Default); err != nil {
+ msg := fmt.Sprintf("invalid default value for {{ varname .ID }}: %v",err)
+ panic(msg) // panics if the specification is invalid
+ }
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl
new file mode 100644
index 000000000..cfb9f80e6
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/simpleschema/defaultsvar.gotmpl
@@ -0,0 +1,29 @@
+{{- varname .ID}}Default
+{{- if .IsPrimitive }}
+ {{- print " " }}={{ print " " }}
+ {{- if .IsCustomFormatter }}
+ {{- if stringContains .Zero "(\"" }}
+ {{- .GoType }}({{ printf "%#v" .Default }}){{/* strfmt type initializer that takes string */}}
+ {{- else }}
+ {{- .Zero }}{{/* strfmt type initializer that requires UnmarshalText(), e.g. Date, Datetime, Duration */}}
+ {{- end }}
+ {{- else }}
+ {{- .GoType }}({{ printf "%#v" .Default }}){{/* regular go primitive type initializer */}}
+ {{- end }}
+{{- else if .IsArray }}{{/* do not initialize from possible defaults in nested arrays */}}
+ {{- if .Child.IsPrimitive }}
+ {{- print " " }}={{ print " " }}
+ {{- if .Child.IsCustomFormatter }}
+ {{- .Zero }}{{/* initialization strategy with UnmarshalText() */}}
+ {{- else }}
+ {{- .GoType }}{{ arrayInitializer .Default }}{{/* regular go primitive type initializer: simple slice initializer */}}
+ {{- end }}
+ {{- else if .Child.IsArray }}
+ {{- print " " }}={{ print " " }}
+ {{- .Zero }}{{/* initialization strategy with json.Unmarshal() */}}
+ {{- else }}
+ {{- print " " }}{{ .GoType }}{{/* shouldn't have that: simple schema is either primitive or array */}}
+ {{- end }}
+{{- else }}
+ {{- print " " }}{{ .GoType }}{{/* shouldn't have that: simple schema is either primitive or array */}}
+{{- end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl
new file mode 100644
index 000000000..c8e235dc3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/structfield.gotmpl
@@ -0,0 +1,41 @@
+{{ define "structfield" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" .}}
+ {{- end}}
+{{ pascalize .Name}} {{ template "schemaType" . }} {{ .PrintTags }}
+{{ end }}
+
+{{- define "tuplefield" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" .}}
+{{ end }}
+{{- pascalize .Name}} {{ template "schemaType" . }} `json:"-"
+{{- if .CustomTag }} {{ .CustomTag }}{{ end }}` // custom serializer
+{{ end }}
+
+{{- define "structfieldIface" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" .}}
+ {{- end }}
+{{ pascalize .Name}}() {{ template "schemaType" . }}
+Set{{ pascalize .Name}}({{ template "schemaType" . }})
+{{ end }}
+{{ define "tuplefieldIface" }}
+ {{- if not $.IsBaseType -}}
+ // {{ template "docstring" . }}
+ {{- template "propertyValidationDocString" . }}
+{{ end }}
+{{- pascalize .Name}}() {{ template "schemaType" . }}
+Set{{ pascalize .Name}}({{ template "schemaType" . }})
+{{ end }}
+
+{{- define "privstructfield" }}
+ {{- camelize .Name}}Field {{ template "schemaType" . }}
+{{ end }}
+
+{{- define "privtuplefield" }}
+ {{- camelize .Name}}Field {{ template "schemaType" . }}
+{{ end }}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl
new file mode 100644
index 000000000..5bb9f1a4e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/swagger_json_embed.gotmpl
@@ -0,0 +1,30 @@
+// Code generated by go-swagger; DO NOT EDIT.
+
+
+{{ if .Copyright -}}// {{ comment .Copyright -}}{{ end }}
+
+
+package {{ .APIPackage }}
+
+// This file was generated by the swagger tool.
+// Editing this file might prove futile when you re-run the swagger generate command
+
+import (
+ "encoding/json"
+
+ {{ imports .DefaultImports }}
+ {{ imports .Imports }}
+)
+
+
+var (
+ // SwaggerJSON embedded version of the swagger document used at generation time
+ SwaggerJSON json.RawMessage
+ // FlatSwaggerJSON embedded flattened version of the swagger document used at generation time
+ FlatSwaggerJSON json.RawMessage
+)
+
+func init() {
+ SwaggerJSON = json.RawMessage([]byte(`{{ .SwaggerJSON }}`))
+ FlatSwaggerJSON = json.RawMessage([]byte(`{{ .FlatSwaggerJSON }}`))
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl
new file mode 100644
index 000000000..354075a90
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/customformat.gotmpl
@@ -0,0 +1,3 @@
+if err := validate.FormatOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ printf "%q" .SwaggerFormat }}, {{ .ToString }}, formats); err != nil {
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl
new file mode 100644
index 000000000..993f7344f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/maximum.gotmpl
@@ -0,0 +1,23 @@
+{{- if or (hasPrefix .UnderlyingType "int") }}
+ {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }}
+if err := validate.MaximumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- else }}
+if err := validate.MaximumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- end }}
+{{- else }}
+ {{- if hasPrefix .UnderlyingType "uint" }}
+ {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }}
+if err := validate.MaximumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- else }}
+if err := validate.MaximumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- end }}
+ {{- else }}
+ {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }}
+if err := validate.Maximum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- else }}
+if err := validate.Maximum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Maximum }}, {{.ExclusiveMaximum }}); err != nil {
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl
new file mode 100644
index 000000000..626c207cb
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/minimum.gotmpl
@@ -0,0 +1,23 @@
+{{- if hasPrefix .UnderlyingType "int" }}
+ {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }}
+if err := validate.MinimumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- else }}
+if err := validate.MinimumInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- end }}
+{{- else }}
+ {{- if hasPrefix .UnderlyingType "uint" }}
+ {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }}
+if err := validate.MinimumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- else }}
+if err := validate.MinimumUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- end }}
+ {{- else }}
+ {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }}
+if err := validate.Minimum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- else }}
+if err := validate.Minimum({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.Minimum }}, {{.ExclusiveMinimum }}); err != nil {
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl
new file mode 100644
index 000000000..28796852d
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/multipleOf.gotmpl
@@ -0,0 +1,23 @@
+{{- if and (hasPrefix .UnderlyingType "int") (isInteger .MultipleOf) }}{{/* if the type is an integer, but the multiple factor is not, fall back to the float64 version of the validator */}}
+ {{- if and (hasPrefix .UnderlyingType "int64") (not .IsAliased) }}
+if err := validate.MultipleOfInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil {
+ {{- else }}
+if err := validate.MultipleOfInt({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, int64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil {
+ {{- end }}
+{{- else }}
+ {{- if and (hasPrefix .UnderlyingType "uint") (isInteger .MultipleOf) }}
+ {{- if and (hasPrefix .UnderlyingType "uint64") (not .IsAliased) }}
+if err := validate.MultipleOfUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil {
+ {{- else }}
+if err := validate.MultipleOfUint({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, uint64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil {
+ {{- end }}
+ {{- else }}
+ {{- if and (eq .UnderlyingType "float64") (not .IsAliased) }}
+if err := validate.MultipleOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, {{ if .IsNullable }}*{{ end }}{{.ValueExpression }}, {{.MultipleOf }}); err != nil {
+ {{- else }}
+if err := validate.MultipleOf({{ if .Path }}{{ .Path }}{{ else }}""{{ end }}, {{ printf "%q" .Location }}, float64({{ if .IsNullable }}*{{ end }}{{.ValueExpression }}), {{.MultipleOf }}); err != nil {
+ {{- end }}
+ {{- end }}
+{{- end }}
+ return err
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl
new file mode 100644
index 000000000..35238d784
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/primitive.gotmpl
@@ -0,0 +1,29 @@
+{{if .MinLength}}
+if err := validate.MinLength({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MinLength}}); err != nil {
+ return err
+}
+{{end}}
+{{if .MaxLength}}
+if err := validate.MaxLength({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, {{.MaxLength}}); err != nil {
+ return err
+}
+{{end}}
+{{if .Pattern}}
+if err := validate.Pattern({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ .ToString }}, `{{escapeBackticks .Pattern}}`); err != nil {
+ return err
+}
+{{end}}
+{{if .Minimum}}
+ {{ template "validationMinimum" . }}
+{{end}}
+{{if .Maximum}}
+ {{ template "validationMaximum" . }}
+{{end}}
+{{if .MultipleOf}}
+ {{ template "validationMultipleOf" . }}
+{{end}}
+{{if .Enum}}
+if err := validate.EnumCase({{ if .Path }}{{ .Path }}{{else}}""{{end}}, {{ printf "%q" .Location }}, {{ if and (not .IsArray) (not .HasDiscriminator) (not .IsInterface) .IsNullable }}*{{ end }}{{.ValueExpression}}{{ if .IsCustomFormatter }}.String(){{ end }}, {{ printf "%#v" .Enum}}, {{ if .IsEnumCI }}false{{ else }}true{{ end }}); err != nil {
+ return err
+}
+{{end}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl
new file mode 100644
index 000000000..8378c4615
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/templates/validation/structfield.gotmpl
@@ -0,0 +1,62 @@
+{{ define "propertyValidationDocString" }}
+
+{{- if .Required }}
+// Required: true
+{{- end }}
+
+{{- if .ReadOnly }}
+// Read Only: true
+{{- end }}
+
+{{- if .Maximum }}
+// Maximum: {{ if .ExclusiveMaximum }}< {{ end }}{{ .Maximum }}
+{{- end }}
+
+{{- if .Minimum }}
+// Minimum: {{ if .ExclusiveMinimum }}> {{ end }}{{ .Minimum }}
+{{- end }}
+
+{{- if .MultipleOf }}
+// Multiple Of: {{ .MultipleOf }}
+{{- end }}
+
+{{- if .MaxLength }}
+// Max Length: {{ .MaxLength }}
+{{- end }}
+
+{{- if .MinLength }}
+// Min Length: {{ .MinLength }}
+{{- end }}
+
+{{- if .Pattern }}
+// Pattern: {{ .Pattern }}
+{{- end }}
+
+{{- if .MaxItems }}
+// Max Items: {{ .MaxItems }}
+{{- end }}
+
+{{- if .MinItems }}
+// Min Items: {{ .MinItems }}
+{{- end }}
+
+{{- if .MinProperties }}
+// Min Properties: {{ .MinProperties }}
+{{- end }}
+
+{{- if .MaxProperties }}
+// Max Properties: {{ .MaxProperties }}
+{{- end }}
+
+{{- if .UniqueItems }}
+// Unique: true
+{{- end }}
+
+{{- if .IsCustomFormatter }}
+// Format: {{ .SwaggerFormat }}
+{{- end }}
+
+{{- if .Enum }}
+// Enum: {{ printf "%v" .Enum }}
+{{- end }}
+{{- end}}
diff --git a/vendor/github.com/go-swagger/go-swagger/generator/types.go b/vendor/github.com/go-swagger/go-swagger/generator/types.go
new file mode 100644
index 000000000..d2a6a4f5e
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/generator/types.go
@@ -0,0 +1,1284 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package generator
+
+import (
+ "fmt"
+ "log"
+ "path/filepath"
+ "reflect"
+ "strings"
+
+ "github.com/go-openapi/loads"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ "github.com/kr/pretty"
+ "github.com/mitchellh/mapstructure"
+)
+
+const (
+ iface = "interface{}"
+ array = "array"
+ file = "file"
+ number = "number"
+ integer = "integer"
+ boolean = "boolean"
+ str = "string"
+ object = "object"
+ binary = "binary"
+ body = "body"
+ b64 = "byte"
+)
+
+// Extensions supported by go-swagger
+const (
+ xClass = "x-class" // class name used by discriminator
+ xGoCustomTag = "x-go-custom-tag" // additional tag for serializers on struct fields
+ xGoName = "x-go-name" // name of the generated go variable
+ xGoType = "x-go-type" // reuse existing type (do not generate)
+ xIsNullable = "x-isnullable"
+ xNullable = "x-nullable" // turns the schema into a pointer
+ xOmitEmpty = "x-omitempty"
+ xSchemes = "x-schemes" // additional schemes supported for operations (server generation)
+ xOrder = "x-order" // sort order for properties (or any schema)
+ xGoJSONString = "x-go-json-string"
+ xGoEnumCI = "x-go-enum-ci" // make string enumeration case-insensitive
+
+ xGoOperationTag = "x-go-operation-tag" // additional tag to override generation in operation groups
+)
+
+// swaggerTypeName contains a mapping from go type to swagger type or format
+var swaggerTypeName map[string]string
+
+func initTypes() {
+ swaggerTypeName = make(map[string]string)
+ for k, v := range typeMapping {
+ swaggerTypeName[v] = k
+ }
+}
+
+func simpleResolvedType(tn, fmt string, items *spec.Items, v *spec.CommonValidations) (result resolvedType) {
+ result.SwaggerType = tn
+ result.SwaggerFormat = fmt
+
+ defer func() {
+ guardValidations(result.SwaggerType, v)
+ }()
+
+ if tn == file {
+ // special case of swagger type "file", rendered as io.ReadCloser interface
+ result.IsPrimitive = true
+ result.GoType = formatMapping[str][binary]
+ result.IsStream = true
+ return
+ }
+
+ if fmt != "" {
+ defer func() {
+ guardFormatConflicts(result.SwaggerFormat, v)
+ }()
+
+ fmtn := strings.ReplaceAll(fmt, "-", "")
+ if fmm, ok := formatMapping[tn]; ok {
+ if tpe, ok := fmm[fmtn]; ok {
+ result.GoType = tpe
+ result.IsPrimitive = true
+ _, result.IsCustomFormatter = customFormatters[tpe]
+ // special case of swagger format "binary", rendered as io.ReadCloser interface
+ // TODO(fredbi): should set IsCustomFormatter=false when binary
+ result.IsStream = fmt == binary
+ // special case of swagger format "byte", rendered as a strfmt.Base64 type: no validation
+ result.IsBase64 = fmt == b64
+ return
+ }
+ }
+ }
+
+ if tpe, ok := typeMapping[tn]; ok {
+ result.GoType = tpe
+ _, result.IsPrimitive = primitives[tpe]
+ result.IsPrimitive = ok
+ return
+ }
+
+ if tn == array {
+ result.IsArray = true
+ result.IsPrimitive = false
+ result.IsCustomFormatter = false
+ result.IsNullable = false
+ if items == nil {
+ result.GoType = "[]" + iface
+ return
+ }
+ res := simpleResolvedType(items.Type, items.Format, items.Items, &items.CommonValidations)
+ result.GoType = "[]" + res.GoType
+ return
+ }
+ result.GoType = tn
+ _, result.IsPrimitive = primitives[tn]
+ return
+}
+
+func newTypeResolver(pkg, _ string, doc *loads.Document) *typeResolver {
+ resolver := typeResolver{ModelsPackage: pkg, Doc: doc}
+ resolver.KnownDefs = make(map[string]struct{}, len(doc.Spec().Definitions))
+ for k, sch := range doc.Spec().Definitions {
+ tpe, _, _ := resolver.knownDefGoType(k, sch, nil)
+ resolver.KnownDefs[tpe] = struct{}{}
+ }
+ return &resolver
+}
+
+// knownDefGoType returns go type, package and package alias for definition
+func (t typeResolver) knownDefGoType(def string, schema spec.Schema, clear func(string) string) (string, string, string) {
+ debugLog("known def type: %q", def)
+ ext := schema.Extensions
+ nm, hasGoName := ext.GetString(xGoName)
+
+ if hasGoName {
+ debugLog("known def type %s named from %s as %q", def, xGoName, nm)
+ def = nm
+ }
+ extType, isExternalType := t.resolveExternalType(ext)
+ if !isExternalType || extType.Embedded {
+ if clear == nil {
+ debugLog("known def type no clear: %q", def)
+ return def, t.definitionPkg, ""
+ }
+ debugLog("known def type clear: %q -> %q", def, clear(def))
+ return clear(def), t.definitionPkg, ""
+ }
+
+ // external type definition trumps regular type resolution
+ if extType.Import.Alias == "" {
+ debugLog("type %s imported as external type %s, assumed in current package", def, extType.Type)
+ return extType.Type, extType.Import.Package, extType.Import.Alias
+ }
+ debugLog("type %s imported as external type from %s as %s.%s", def, extType.Import.Package, extType.Import.Alias, extType.Type)
+ return extType.Import.Alias + "." + extType.Type, extType.Import.Package, extType.Import.Alias
+}
+
+// x-go-type:
+//
+// type: mytype
+// import:
+// package:
+// alias:
+// hints:
+// kind: map|object|array|interface|primitive|stream|tuple
+// nullable: true|false
+// embedded: true
+type externalTypeDefinition struct {
+ Type string
+ Import struct {
+ Package string
+ Alias string
+ }
+ Hints struct {
+ Kind string
+ Nullable *bool
+ NoValidation *bool
+ }
+ Embedded bool
+}
+
+func hasExternalType(ext spec.Extensions) (*externalTypeDefinition, bool) {
+ v, ok := ext[xGoType]
+ if !ok {
+ return nil, false
+ }
+
+ var extType externalTypeDefinition
+ err := mapstructure.Decode(v, &extType)
+ if err != nil {
+ log.Printf("warning: x-go-type extension could not be decoded (%v). Skipped", v)
+ return nil, false
+ }
+
+ return &extType, true
+}
+
+func (t typeResolver) resolveExternalType(ext spec.Extensions) (*externalTypeDefinition, bool) {
+ extType, hasExt := hasExternalType(ext)
+ if !hasExt {
+ return nil, false
+ }
+
+ // NOTE:
+ // * basic deconfliction of the default alias
+ // * if no package is specified, defaults to models (as provided from CLI or defaut generation location for models)
+ toAlias := func(pkg string) string {
+ mangled := GoLangOpts().ManglePackageName(pkg, "")
+ return deconflictPkg(mangled, func(in string) string {
+ return in + "ext"
+ })
+ }
+
+ switch {
+ case extType.Import.Package != "" && extType.Import.Alias == "":
+ extType.Import.Alias = toAlias(extType.Import.Package)
+ case extType.Import.Package == "" && extType.Import.Alias != "":
+ extType.Import.Package = t.ModelsFullPkg
+ case extType.Import.Package == "" && extType.Import.Alias == "":
+ // in this case, the external type is assumed to be present in the current package.
+ // For completion, whenever this type is used in anonymous types declared by operations,
+ // we assume this is the package where models are expected to be found.
+ extType.Import.Package = t.ModelsFullPkg
+ if extType.Import.Package != "" {
+ extType.Import.Alias = toAlias(extType.Import.Package)
+ }
+ }
+
+ debugLogAsJSON("known def external %s type", xGoType, extType)
+
+ return extType, true
+}
+
+type typeResolver struct {
+ Doc *loads.Document
+ ModelsPackage string // package alias (e.g. "models")
+ ModelsFullPkg string // fully qualified package (e.g. "github.com/example/models")
+ ModelName string
+ KnownDefs map[string]struct{}
+ // unexported fields
+ keepDefinitionsPkg string
+ knownDefsKept map[string]struct{}
+ definitionPkg string // pkg alias to fill in GenSchema.Pkg
+}
+
+// NewWithModelName clones a type resolver and specifies a new model name
+func (t *typeResolver) NewWithModelName(name string) *typeResolver {
+ tt := newTypeResolver(t.ModelsPackage, t.ModelsFullPkg, t.Doc)
+ tt.ModelName = name
+
+ // propagates kept definitions
+ tt.keepDefinitionsPkg = t.keepDefinitionsPkg
+ tt.knownDefsKept = t.knownDefsKept
+ tt.definitionPkg = t.definitionPkg
+ return tt
+}
+
+// withKeepDefinitionsPackage instructs the type resolver to keep previously resolved package name for
+// definitions known at the moment it is first called.
+func (t *typeResolver) withKeepDefinitionsPackage(definitionsPackage string) *typeResolver {
+ t.keepDefinitionsPkg = definitionsPackage
+ t.knownDefsKept = make(map[string]struct{}, len(t.KnownDefs))
+ for k := range t.KnownDefs {
+ t.knownDefsKept[k] = struct{}{}
+ }
+ return t
+}
+
+// withDefinitionPackage sets the definition pkg that object/struct types to be generated
+// in GenSchema.Pkg field.
+// ModelsPackage field can not replace definitionPkg since ModelsPackage will be prepend to .GoType,
+// while definitionPkg is just used to fill the .Pkg in GenSchema
+func (t *typeResolver) withDefinitionPackage(pkg string) *typeResolver {
+ t.definitionPkg = pkg
+ return t
+}
+
+func (t *typeResolver) resolveSchemaRef(schema *spec.Schema, isRequired bool) (returns bool, result resolvedType, err error) {
+ if schema.Ref.String() == "" {
+ return
+ }
+ debugLog("resolving ref (anon: %t, req: %t) %s", false, isRequired, schema.Ref.String())
+
+ returns = true
+ var ref *spec.Schema
+ var er error
+
+ ref, er = spec.ResolveRef(t.Doc.Spec(), &schema.Ref)
+ if er != nil {
+ debugLog("error resolving ref %s: %v", schema.Ref.String(), er)
+ err = er
+ return
+ }
+
+ extType, isExternalType := t.resolveExternalType(schema.Extensions)
+ if isExternalType {
+ // deal with validations for an aliased external type
+ result.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ }
+
+ res, er := t.ResolveSchema(ref, false, isRequired)
+ if er != nil {
+ err = er
+ return
+ }
+ result = res
+
+ tn := filepath.Base(schema.Ref.GetURL().Fragment)
+ tpe, pkg, alias := t.knownDefGoType(tn, *ref, t.goTypeName)
+ debugLog("type name %s, package %s, alias %s", tpe, pkg, alias)
+ if tpe != "" {
+ result.GoType = tpe
+ result.Pkg = pkg
+ result.PkgAlias = alias
+ }
+ result.HasDiscriminator = res.HasDiscriminator
+ result.IsBaseType = result.HasDiscriminator
+ result.IsNullable = result.IsNullable || t.isNullable(ref) // this has to be overridden for slices and maps
+ result.IsEnumCI = false
+ return
+}
+
+func (t *typeResolver) inferAliasing(result *resolvedType, _ *spec.Schema, isAnonymous bool, _ bool) {
+ if !isAnonymous && t.ModelName != "" {
+ result.AliasedType = result.GoType
+ result.IsAliased = true
+ result.GoType = t.goTypeName(t.ModelName)
+ result.Pkg = t.definitionPkg
+ }
+}
+
+func (t *typeResolver) resolveFormat(schema *spec.Schema, isAnonymous bool, isRequired bool) (returns bool, result resolvedType, err error) {
+
+ if schema.Format != "" {
+ // defaults to string
+ result.SwaggerType = str
+ if len(schema.Type) > 0 {
+ result.SwaggerType = schema.Type[0]
+ }
+
+ debugLog("resolving format (anon: %t, req: %t)", isAnonymous, isRequired)
+ schFmt := strings.ReplaceAll(schema.Format, "-", "")
+ if fmm, ok := formatMapping[result.SwaggerType]; ok {
+ if tpe, ok := fmm[schFmt]; ok {
+ returns = true
+ result.GoType = tpe
+ _, result.IsCustomFormatter = customFormatters[tpe]
+ }
+ }
+ if tpe, ok := typeMapping[schFmt]; !returns && ok {
+ returns = true
+ result.GoType = tpe
+ _, result.IsCustomFormatter = customFormatters[tpe]
+ }
+
+ result.SwaggerFormat = schema.Format
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+ // special case of swagger format "binary", rendered as io.ReadCloser interface and is therefore not a primitive type
+ // TODO: should set IsCustomFormatter=false in this case.
+ result.IsPrimitive = schFmt != binary
+ result.IsStream = schFmt == binary
+ result.IsBase64 = schFmt == b64
+ // propagate extensions in resolvedType
+ result.Extensions = schema.Extensions
+
+ switch result.SwaggerType {
+ case str:
+ result.IsNullable = nullableStrfmt(schema, isRequired)
+ case number, integer:
+ result.IsNullable = nullableNumber(schema, isRequired)
+ default:
+ result.IsNullable = t.isNullable(schema)
+ }
+ }
+
+ guardFormatConflicts(schema.Format, schema)
+ return
+}
+
+// isNullable hints the generator as to render the type with a pointer or not.
+//
+// A schema is deemed nullable (i.e. rendered by a pointer) when:
+// - a custom extension says it has to be so
+// - it is an object with properties
+// - it is a composed object (allOf)
+//
+// The interpretation of Required as a mean to make a type nullable is carried out elsewhere.
+func (t *typeResolver) isNullable(schema *spec.Schema) bool {
+
+ if nullable, ok := t.isNullableOverride(schema); ok {
+ return nullable
+ }
+
+ return len(schema.Properties) > 0 || len(schema.AllOf) > 0
+}
+
+// isNullableOverride determines a nullable flag forced by an extension
+func (t *typeResolver) isNullableOverride(schema *spec.Schema) (bool, bool) {
+ check := func(extension string) (bool, bool) {
+ v, found := schema.Extensions[extension]
+ nullable, cast := v.(bool)
+ return nullable, found && cast
+ }
+
+ if nullable, ok := check(xIsNullable); ok {
+ return nullable, ok
+ }
+
+ if nullable, ok := check(xNullable); ok {
+ return nullable, ok
+ }
+
+ return false, false
+}
+
+func (t *typeResolver) firstType(schema *spec.Schema) string {
+ if len(schema.Type) == 0 || schema.Type[0] == "" {
+ return object
+ }
+ if len(schema.Type) > 1 {
+ // JSON-Schema multiple types, e.g. {"type": [ "object", "array" ]} are not supported.
+ // TODO: should keep the first _supported_ type, e.g. skip null
+ log.Printf("warning: JSON-Schema type definition as array with several types is not supported in %#v. Taking the first type: %s", schema.Type, schema.Type[0])
+ }
+ return schema.Type[0]
+}
+
+func (t *typeResolver) resolveArray(schema *spec.Schema, isAnonymous, isRequired bool) (result resolvedType, err error) {
+ debugLog("resolving array (anon: %t, req: %t)", isAnonymous, isRequired)
+
+ result.IsArray = true
+ result.IsNullable = false
+
+ if schema.AdditionalItems != nil {
+ result.HasAdditionalItems = (schema.AdditionalItems.Allows || schema.AdditionalItems.Schema != nil)
+ }
+
+ if schema.Items == nil {
+ result.GoType = "[]" + iface
+ result.SwaggerType = array
+ result.SwaggerFormat = ""
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ return
+ }
+
+ if len(schema.Items.Schemas) > 0 {
+ result.IsArray = false
+ result.IsTuple = true
+ result.SwaggerType = array
+ result.SwaggerFormat = ""
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ return
+ }
+
+ rt, er := t.ResolveSchema(schema.Items.Schema, true, false)
+ if er != nil {
+ err = er
+ return
+ }
+
+ // Override the general nullability rule from ResolveSchema() in array elements:
+ // - only complex items are nullable (when not discriminated, not forced by x-nullable)
+ // - arrays of allOf have non nullable elements when not forced by x-nullable
+ elem := schema.Items.Schema
+ if elem.Ref.String() != "" {
+ // drill into $ref to figure out whether we want the element type to nullable or not
+ resolved, erf := spec.ResolveRef(t.Doc.Spec(), &elem.Ref)
+ if erf != nil {
+ debugLog("error resolving ref %s: %v", schema.Ref.String(), erf)
+ }
+ elem = resolved
+ }
+
+ debugLogAsJSON("resolved item for %s", rt.GoType, elem)
+ if nullable, ok := t.isNullableOverride(elem); ok {
+ debugLog("found nullable override in element %s: %t", rt.GoType, nullable)
+ rt.IsNullable = nullable
+ } else {
+ // this differs from isNullable for elements with AllOf
+ debugLog("no nullable override in element %s: Properties: %t, HasDiscriminator: %t", rt.GoType, len(elem.Properties) > 0, rt.HasDiscriminator)
+ rt.IsNullable = len(elem.Properties) > 0 && !rt.HasDiscriminator
+ }
+
+ result.GoType = "[]" + rt.GoType
+ if rt.IsNullable && !strings.HasPrefix(rt.GoType, "*") {
+ result.GoType = "[]*" + rt.GoType
+ }
+
+ result.ElemType = &rt
+ result.SwaggerType = array
+ result.SwaggerFormat = ""
+ result.IsEnumCI = hasEnumCI(schema.Extensions)
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+ result.Extensions = schema.Extensions
+
+ return
+}
+
+func (t *typeResolver) goTypeName(nm string) string {
+ if len(t.knownDefsKept) > 0 {
+ // if a definitions package has been defined, already resolved definitions are
+ // always resolved against their original package (e.g. "models"), and not the
+ // current package.
+ // This allows complex anonymous extra schemas to reuse known definitions generated in another package.
+ if _, ok := t.knownDefsKept[nm]; ok {
+ return strings.Join([]string{t.keepDefinitionsPkg, swag.ToGoName(nm)}, ".")
+ }
+ }
+
+ if t.ModelsPackage == "" {
+ return swag.ToGoName(nm)
+ }
+ if _, ok := t.KnownDefs[nm]; ok {
+ return strings.Join([]string{t.ModelsPackage, swag.ToGoName(nm)}, ".")
+ }
+ return swag.ToGoName(nm)
+}
+
+func (t *typeResolver) resolveObject(schema *spec.Schema, isAnonymous bool) (result resolvedType, err error) {
+ debugLog("resolving object %s (anon: %t, req: %t)", t.ModelName, isAnonymous, false)
+
+ result.IsAnonymous = isAnonymous
+
+ result.IsBaseType = schema.Discriminator != ""
+ if !isAnonymous {
+ result.SwaggerType = object
+ tpe, pkg, alias := t.knownDefGoType(t.ModelName, *schema, t.goTypeName)
+ result.GoType = tpe
+ result.Pkg = pkg
+ result.PkgAlias = alias
+ }
+ if len(schema.AllOf) > 0 {
+ result.GoType = t.goTypeName(t.ModelName)
+ result.IsComplexObject = true
+ var isNullable bool
+ for _, sch := range schema.AllOf {
+ p := sch
+ if t.isNullable(&p) {
+ isNullable = true
+ }
+ }
+ if override, ok := t.isNullableOverride(schema); ok {
+ // prioritize x-nullable extensions
+ result.IsNullable = override
+ } else {
+ result.IsNullable = isNullable
+ }
+ result.SwaggerType = object
+ return
+ }
+
+ // if this schema has properties, build a map of property name to
+ // resolved type, this should also flag the object as anonymous,
+ // when a ref is found, the anonymous flag will be reset
+ if len(schema.Properties) > 0 {
+ result.IsNullable = t.isNullable(schema)
+ result.IsComplexObject = true
+ // no return here, still need to check for additional properties
+ }
+
+ // account for additional properties
+ if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+ sch := schema.AdditionalProperties.Schema
+ et, er := t.ResolveSchema(sch, sch.Ref.String() == "", false)
+ if er != nil {
+ err = er
+ return
+ }
+
+ result.IsMap = !result.IsComplexObject
+
+ result.SwaggerType = object
+
+ if et.IsExternal {
+ // external AdditionalProperties are a special case because we look ahead into schemas
+ extType, _, _ := t.knownDefGoType(t.ModelName, *sch, t.goTypeName)
+ et.GoType = extType
+ }
+
+ // only complex map elements are nullable (when not forced by x-nullable)
+ // TODO: figure out if required to check when not discriminated like arrays?
+ et.IsNullable = !et.IsArray && t.isNullable(schema.AdditionalProperties.Schema)
+ if et.IsNullable {
+ result.GoType = "map[string]*" + et.GoType
+ } else {
+ result.GoType = "map[string]" + et.GoType
+ }
+
+ // Resolving nullability conflicts for:
+ // - map[][]...[]{items}
+ // - map[]{aliased type}
+ //
+ // when IsMap is true and the type is a distinct definition,
+ // aliased type or anonymous construct generated independently.
+ //
+ // IsMapNullOverride is to be handled by the generator for special cases
+ // where the map element is considered non nullable and the element itself is.
+ //
+ // This allows to appreciate nullability according to the context
+ needsOverride := result.IsMap && (et.IsArray || (sch.Ref.String() != "" || et.IsAliased || et.IsAnonymous))
+
+ if needsOverride {
+ var er error
+ if et.IsArray {
+ var it resolvedType
+ s := sch
+ // resolve the last items after nested arrays
+ for s.Items != nil && s.Items.Schema != nil {
+ it, er = t.ResolveSchema(s.Items.Schema, sch.Ref.String() == "", false)
+ if er != nil {
+ return
+ }
+ s = s.Items.Schema
+ }
+ // mark an override when nullable status conflicts, i.e. when the original type is not already nullable
+ if !it.IsAnonymous || it.IsAnonymous && it.IsNullable {
+ result.IsMapNullOverride = true
+ }
+ } else {
+ // this locks the generator on the local nullability status
+ result.IsMapNullOverride = true
+ }
+ }
+
+ t.inferAliasing(&result, schema, isAnonymous, false)
+ result.ElemType = &et
+ return
+ }
+
+ if len(schema.Properties) > 0 {
+ return
+ }
+
+ // an object without property and without AdditionalProperties schema is rendered as interface{}
+ result.IsMap = true
+ result.SwaggerType = object
+ result.IsNullable = false
+ // an object without properties but with MinProperties or MaxProperties is rendered as map[string]interface{}
+ result.IsInterface = len(schema.Properties) == 0 && !schema.Validations().HasObjectValidations()
+ if result.IsInterface {
+ result.GoType = iface
+ } else {
+ result.GoType = "map[string]interface{}"
+ }
+ return
+}
+
+// nullableBool makes a boolean a pointer when we want to distinguish the zero value from no value set.
+// This is the case when:
+// - a x-nullable extension says so in the spec
+// - it is **not** a read-only property
+// - it is a required property
+// - it has a default value
+func nullableBool(schema *spec.Schema, isRequired bool) bool {
+ if nullable := nullableExtension(schema.Extensions); nullable != nil {
+ return *nullable
+ }
+ required := isRequired && schema.Default == nil && !schema.ReadOnly
+ optional := !isRequired && (schema.Default != nil || schema.ReadOnly)
+
+ return required || optional
+}
+
+// nullableNumber makes a number a pointer when we want to distinguish the zero value from no value set.
+// This is the case when:
+// - a x-nullable extension says so in the spec
+// - it is **not** a read-only property
+// - it is a required property
+// - boundaries defines the zero value as a valid value:
+// - there is a non-exclusive boundary set at the zero value of the type
+// - the [min,max] range crosses the zero value of the type
+func nullableNumber(schema *spec.Schema, isRequired bool) bool {
+ if nullable := nullableExtension(schema.Extensions); nullable != nil {
+ return *nullable
+ }
+ hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
+
+ isMin := schema.Minimum != nil && (*schema.Minimum != 0 || schema.ExclusiveMinimum)
+ bcMin := schema.Minimum != nil && *schema.Minimum == 0 && !schema.ExclusiveMinimum
+ isMax := schema.Minimum == nil && (schema.Maximum != nil && (*schema.Maximum != 0 || schema.ExclusiveMaximum))
+ bcMax := schema.Maximum != nil && *schema.Maximum == 0 && !schema.ExclusiveMaximum
+ isMinMax := (schema.Minimum != nil && schema.Maximum != nil && *schema.Minimum < *schema.Maximum)
+ bcMinMax := (schema.Minimum != nil && schema.Maximum != nil && (*schema.Minimum < 0 && 0 < *schema.Maximum))
+
+ nullable := !schema.ReadOnly && (isRequired || (hasDefault && !(isMin || isMax || isMinMax)) || bcMin || bcMax || bcMinMax)
+ return nullable
+}
+
+// nullableString makes a string nullable when we want to distinguish the zero value from no value set.
+// This is the case when:
+// - a x-nullable extension says so in the spec
+// - it is **not** a read-only property
+// - it is a required property
+// - it has a MinLength property set to 0
+// - it has a default other than "" (the zero for strings) and no MinLength or zero MinLength
+func nullableString(schema *spec.Schema, isRequired bool) bool {
+ if nullable := nullableExtension(schema.Extensions); nullable != nil {
+ return *nullable
+ }
+ hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
+
+ isMin := schema.MinLength != nil && *schema.MinLength != 0
+ bcMin := schema.MinLength != nil && *schema.MinLength == 0
+
+ nullable := !schema.ReadOnly && (isRequired || (hasDefault && !isMin) || bcMin)
+ return nullable
+}
+
+func nullableStrfmt(schema *spec.Schema, isRequired bool) bool {
+ notBinary := schema.Format != binary
+ if nullable := nullableExtension(schema.Extensions); nullable != nil && notBinary {
+ return *nullable
+ }
+ hasDefault := schema.Default != nil && !swag.IsZero(schema.Default)
+
+ nullable := !schema.ReadOnly && (isRequired || hasDefault)
+ return notBinary && nullable
+}
+
+func nullableExtension(ext spec.Extensions) *bool {
+ if ext == nil {
+ return nil
+ }
+
+ if boolPtr := boolExtension(ext, xNullable); boolPtr != nil {
+ return boolPtr
+ }
+
+ return boolExtension(ext, xIsNullable)
+}
+
+func boolExtension(ext spec.Extensions, key string) *bool {
+ if v, ok := ext[key]; ok {
+ if bb, ok := v.(bool); ok {
+ return &bb
+ }
+ }
+ return nil
+}
+
+func hasEnumCI(ve spec.Extensions) bool {
+ v, ok := ve[xGoEnumCI]
+ if !ok {
+ return false
+ }
+
+ isEnumCI, ok := v.(bool)
+ // All enumeration types are case-sensitive by default
+ return ok && isEnumCI
+}
+
+func (t *typeResolver) shortCircuitResolveExternal(tpe, pkg, alias string, extType *externalTypeDefinition, schema *spec.Schema, isRequired bool) resolvedType {
+ // short circuit type resolution for external types
+ debugLogAsJSON("shortCircuitResolveExternal", extType)
+
+ var result resolvedType
+ result.Extensions = schema.Extensions
+ result.GoType = tpe
+ result.Pkg = pkg
+ result.PkgAlias = alias
+ result.IsInterface = false
+ // by default consider that we have a type with validations. Use hint "interface" or "noValidation" to disable validations
+ result.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ result.IsNullable = isRequired
+
+ result.setKind(extType.Hints.Kind)
+ if result.IsInterface || result.IsStream {
+ result.IsNullable = false
+ }
+ if extType.Hints.Nullable != nil {
+ result.IsNullable = swag.BoolValue(extType.Hints.Nullable)
+ }
+
+ if nullable, ok := t.isNullableOverride(schema); ok {
+ result.IsNullable = nullable // x-nullable directive rules them all
+ }
+
+ // other extensions
+ if result.IsArray {
+ result.IsEmptyOmitted = false
+ tpe = "array"
+ }
+
+ result.setExtensions(schema, tpe)
+ return result
+}
+
+func (t *typeResolver) ResolveSchema(schema *spec.Schema, isAnonymous, isRequired bool) (result resolvedType, err error) {
+ debugLog("resolving schema (anon: %t, req: %t) %s", isAnonymous, isRequired, t.ModelName)
+ defer func() {
+ debugLog("returning after resolve schema: %s", pretty.Sprint(result))
+ }()
+
+ if schema == nil {
+ result.IsInterface = true
+ result.GoType = iface
+ return
+ }
+
+ extType, isExternalType := t.resolveExternalType(schema.Extensions)
+ if isExternalType {
+ tpe, pkg, alias := t.knownDefGoType(t.ModelName, *schema, t.goTypeName)
+ debugLog("found type %s declared as external, imported from %s as %s. Has type hints? %t, rendered has embedded? %t",
+ t.ModelName, pkg, tpe, extType.Hints.Kind != "", extType.Embedded)
+
+ if extType.Hints.Kind != "" && !extType.Embedded {
+ // use hint to qualify type
+ debugLog("short circuits external type resolution with hint for %s", tpe)
+ result = t.shortCircuitResolveExternal(tpe, pkg, alias, extType, schema, isRequired)
+ result.IsExternal = isAnonymous // mark anonymous external types only, not definitions
+ return
+ }
+
+ // use spec to qualify type
+ debugLog("marking type %s as external embedded: %t", tpe, extType.Embedded)
+ defer func() { // enforce bubbling up decisions taken about being an external type
+ // mark this type as an embedded external definition if requested
+ result.IsEmbedded = extType.Embedded
+ result.IsExternal = isAnonymous // for non-embedded, mark anonymous external types only, not definitions
+
+ result.IsAnonymous = false
+ result.IsAliased = true
+ result.IsNullable = isRequired
+ if extType.Hints.Nullable != nil {
+ result.IsNullable = swag.BoolValue(extType.Hints.Nullable)
+ }
+
+ result.IsMap = false
+ result.AliasedType = result.GoType
+ result.IsInterface = false
+
+ if result.IsEmbedded {
+ result.ElemType = &resolvedType{
+ IsExternal: isAnonymous, // mark anonymous external types only, not definitions
+ IsInterface: false,
+ Pkg: extType.Import.Package,
+ PkgAlias: extType.Import.Alias,
+ SkipExternalValidation: swag.BoolValue(extType.Hints.NoValidation),
+ }
+ if extType.Import.Alias != "" {
+ result.ElemType.GoType = extType.Import.Alias + "." + extType.Type
+ } else {
+ result.ElemType.GoType = extType.Type
+ }
+ result.ElemType.setKind(extType.Hints.Kind)
+ if result.IsInterface || result.IsStream {
+ result.ElemType.IsNullable = false
+ }
+ if extType.Hints.Nullable != nil {
+ result.ElemType.IsNullable = swag.BoolValue(extType.Hints.Nullable)
+ }
+ // embedded external: by default consider validation is skipped for the external type
+ //
+ // NOTE: at this moment the template generates a type assertion, so this setting does not really matter
+ // for embedded types.
+ if extType.Hints.NoValidation != nil {
+ result.ElemType.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ } else {
+ result.ElemType.SkipExternalValidation = true
+ }
+ } else {
+ // non-embedded external type: by default consider that validation is enabled (SkipExternalValidation: false)
+ result.SkipExternalValidation = swag.BoolValue(extType.Hints.NoValidation)
+ }
+
+ if nullable, ok := t.isNullableOverride(schema); ok {
+ result.IsNullable = nullable
+ }
+ }()
+ }
+
+ tpe := t.firstType(schema)
+ var returns bool
+
+ guardValidations(tpe, schema, schema.Type...)
+
+ returns, result, err = t.resolveSchemaRef(schema, isRequired)
+
+ if returns {
+ if !isAnonymous {
+ result.IsMap = false
+ result.IsComplexObject = true
+ }
+
+ return
+ }
+
+ defer func() {
+ result.setExtensions(schema, tpe)
+ }()
+
+ // special case of swagger type "file", rendered as io.ReadCloser interface
+ if t.firstType(schema) == file {
+ result.SwaggerType = file
+ result.IsPrimitive = true
+ result.IsNullable = false
+ result.GoType = formatMapping[str][binary]
+ result.IsStream = true
+ return
+ }
+
+ returns, result, err = t.resolveFormat(schema, isAnonymous, isRequired)
+ if returns {
+ return
+ }
+
+ result.IsNullable = t.isNullable(schema) || isRequired
+
+ switch tpe {
+ case array:
+ result, err = t.resolveArray(schema, isAnonymous, false)
+
+ case file, number, integer, boolean:
+ result.Extensions = schema.Extensions
+ result.GoType = typeMapping[tpe]
+ result.SwaggerType = tpe
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ switch tpe {
+ case boolean:
+ result.IsPrimitive = true
+ result.IsCustomFormatter = false
+ result.IsNullable = nullableBool(schema, isRequired)
+ case number, integer:
+ result.IsPrimitive = true
+ result.IsCustomFormatter = false
+ result.IsNullable = nullableNumber(schema, isRequired)
+ case file:
+ }
+
+ case str:
+ result.GoType = str
+ result.SwaggerType = str
+ t.inferAliasing(&result, schema, isAnonymous, isRequired)
+
+ result.IsPrimitive = true
+ result.IsNullable = nullableString(schema, isRequired)
+ result.Extensions = schema.Extensions
+
+ case object:
+ result, err = t.resolveObject(schema, isAnonymous)
+ if err != nil {
+ result = resolvedType{}
+ break
+ }
+ result.HasDiscriminator = schema.Discriminator != ""
+
+ case "null":
+ if schema.Validations().HasObjectValidations() {
+ // no explicit object type, but inferred from object validations:
+ // this makes the type a map[string]interface{} instead of interface{}
+ result, err = t.resolveObject(schema, isAnonymous)
+ if err != nil {
+ result = resolvedType{}
+ break
+ }
+ result.HasDiscriminator = schema.Discriminator != ""
+ break
+ }
+
+ result.GoType = iface
+ result.SwaggerType = object
+ result.IsNullable = false
+ result.IsInterface = true
+
+ default:
+ err = fmt.Errorf("unresolvable: %v (format %q)", schema.Type, schema.Format)
+ }
+
+ return
+}
+
+func warnSkipValidation(types interface{}) func(string, interface{}) {
+ return func(validation string, value interface{}) {
+ value = reflect.Indirect(reflect.ValueOf(value)).Interface()
+ log.Printf("warning: validation %s (value: %v) not compatible with type %v. Skipped", validation, value, types)
+ }
+}
+
+// guardValidations removes (with a warning) validations that don't fit with the schema type.
+//
+// Notice that the "enum" validation is allowed on any type but file.
+func guardValidations(tpe string, schema interface {
+ Validations() spec.SchemaValidations
+ SetValidations(spec.SchemaValidations)
+}, types ...string) {
+
+ v := schema.Validations()
+ if len(types) == 0 {
+ types = []string{tpe}
+ }
+ defer func() {
+ schema.SetValidations(v)
+ }()
+
+ if tpe != array {
+ v.ClearArrayValidations(warnSkipValidation(types))
+ }
+
+ if tpe != str && tpe != file {
+ v.ClearStringValidations(warnSkipValidation(types))
+ }
+
+ if tpe != object {
+ v.ClearObjectValidations(warnSkipValidation(types))
+ }
+
+ if tpe != number && tpe != integer {
+ v.ClearNumberValidations(warnSkipValidation(types))
+ }
+
+ if tpe == file {
+ // keep MinLength/MaxLength on file
+ if v.Pattern != "" {
+ warnSkipValidation(types)("pattern", v.Pattern)
+ v.Pattern = ""
+ }
+ if v.HasEnum() {
+ warnSkipValidation(types)("enum", v.Enum)
+ v.Enum = nil
+ }
+ }
+
+ // other cases: mapped as interface{}: no validations allowed but Enum
+}
+
+// guardFormatConflicts handles all conflicting properties
+// (for schema model or simple schema) when a format is set.
+//
+// At this moment, validation guards already handle all known conflicts, but for the
+// special case of binary (i.e. io.Reader).
+func guardFormatConflicts(format string, schema interface {
+ Validations() spec.SchemaValidations
+ SetValidations(spec.SchemaValidations)
+}) {
+ v := schema.Validations()
+ msg := fmt.Sprintf("for format %q", format)
+
+ // for this format, no additional validations are supported
+ if format == "binary" {
+ // no validations supported on binary fields at this moment (io.Reader)
+ v.ClearStringValidations(warnSkipValidation(msg))
+ if v.HasEnum() {
+ warnSkipValidation(msg)
+ v.Enum = nil
+ }
+ schema.SetValidations(v)
+ }
+ // more cases should be inserted here if they arise
+}
+
+// resolvedType is a swagger type that has been resolved and analyzed for usage
+// in a template
+type resolvedType struct {
+ IsAnonymous bool
+ IsArray bool
+ IsMap bool
+ IsInterface bool
+ IsPrimitive bool
+ IsCustomFormatter bool
+ IsAliased bool
+ IsNullable bool
+ IsStream bool
+ IsEmptyOmitted bool
+ IsJSONString bool
+ IsEnumCI bool
+ IsBase64 bool
+ IsExternal bool
+
+ // A tuple gets rendered as an anonymous struct with P{index} as property name
+ IsTuple bool
+ HasAdditionalItems bool
+
+ // A complex object gets rendered as a struct
+ IsComplexObject bool
+
+ // A polymorphic type
+ IsBaseType bool
+ HasDiscriminator bool
+
+ GoType string
+ Pkg string
+ PkgAlias string
+ AliasedType string
+ SwaggerType string
+ SwaggerFormat string
+ Extensions spec.Extensions
+
+ // The type of the element in a slice or map
+ ElemType *resolvedType
+
+ // IsMapNullOverride indicates that a nullable object is used within an
+ // aliased map. In this case, the reference is not rendered with a pointer
+ IsMapNullOverride bool
+
+ // IsSuperAlias indicates that the aliased type is really the same type,
+ // e.g. in golang, this translates to: type A = B
+ IsSuperAlias bool
+
+ // IsEmbedded applies to externally defined types. When embedded, a type
+ // is generated in models that embeds the external type, with the Validate
+ // method.
+ IsEmbedded bool
+
+ SkipExternalValidation bool
+}
+
+// Zero returns an initializer for the type
+func (rt resolvedType) Zero() string {
+ // if type is aliased, provide zero from the aliased type
+ if rt.IsAliased {
+ if zr, ok := zeroes[rt.AliasedType]; ok {
+ return rt.GoType + "(" + zr + ")"
+ }
+ }
+ // zero function provided as native or by strfmt function
+ if zr, ok := zeroes[rt.GoType]; ok {
+ return zr
+ }
+ // map and slice initializer
+ if rt.IsMap {
+ return "make(" + rt.GoType + ", 50)"
+ } else if rt.IsArray {
+ return "make(" + rt.GoType + ", 0, 50)"
+ }
+ // object initializer
+ if rt.IsTuple || rt.IsComplexObject {
+ if rt.IsNullable {
+ return "new(" + rt.GoType + ")"
+ }
+ return rt.GoType + "{}"
+ }
+ // interface initializer
+ if rt.IsInterface {
+ return "nil"
+ }
+
+ return ""
+}
+
+// ToString returns a string conversion for a type akin to a string
+func (rt resolvedType) ToString(value string) string {
+ if !rt.IsPrimitive || rt.SwaggerType != "string" || rt.IsStream {
+ return ""
+ }
+ if rt.IsCustomFormatter {
+ if rt.IsAliased {
+ return fmt.Sprintf("%s(%s).String()", rt.AliasedType, value)
+ }
+ return fmt.Sprintf("%s.String()", value)
+ }
+ var deref string
+ if rt.IsNullable {
+ deref = "*"
+ }
+ if rt.GoType == "string" || rt.GoType == "*string" {
+ return fmt.Sprintf("%s%s", deref, value)
+ }
+
+ return fmt.Sprintf("string(%s%s)", deref, value)
+}
+
+func (rt *resolvedType) setExtensions(schema *spec.Schema, origType string) {
+ rt.IsEnumCI = hasEnumCI(schema.Extensions)
+ rt.setIsEmptyOmitted(schema, origType)
+ rt.setIsJSONString(schema, origType)
+
+ if customTag, found := schema.Extensions[xGoCustomTag]; found {
+ if rt.Extensions == nil {
+ rt.Extensions = make(spec.Extensions)
+ }
+ rt.Extensions[xGoCustomTag] = customTag
+ }
+}
+
+func (rt *resolvedType) setIsEmptyOmitted(schema *spec.Schema, tpe string) {
+ if v, found := schema.Extensions[xOmitEmpty]; found {
+ omitted, cast := v.(bool)
+ rt.IsEmptyOmitted = omitted && cast
+ return
+ }
+ // array of primitives are by default not empty-omitted, but arrays of aliased type are
+ rt.IsEmptyOmitted = (tpe != array) || (tpe == array && rt.IsAliased)
+}
+
+func (rt *resolvedType) setIsJSONString(schema *spec.Schema, _ string) {
+ _, found := schema.Extensions[xGoJSONString]
+ if !found {
+ rt.IsJSONString = false
+ return
+ }
+ rt.IsJSONString = true
+}
+
+func (rt *resolvedType) setKind(kind string) {
+ if kind != "" {
+ debugLog("overriding kind for %s as %s", rt.GoType, kind)
+ }
+ switch kind {
+ case "map":
+ rt.IsMap = true
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = object
+ case "array":
+ rt.IsMap = false
+ rt.IsArray = true
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = array
+ case "object":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = true
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = object
+ case "interface", "null":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = true
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = iface
+ case "stream":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = true
+ rt.IsTuple = false
+ rt.IsPrimitive = false
+ rt.SwaggerType = file
+ case "tuple":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = true
+ rt.IsPrimitive = false
+ rt.SwaggerType = array
+ case "primitive":
+ rt.IsMap = false
+ rt.IsArray = false
+ rt.IsComplexObject = false
+ rt.IsInterface = false
+ rt.IsStream = false
+ rt.IsTuple = false
+ rt.IsPrimitive = true
+ case "":
+ break
+ default:
+ log.Printf("warning: unsupported hint value for external type: %q. Skipped", kind)
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/README.md b/vendor/github.com/go-swagger/go-swagger/scan/README.md
new file mode 100644
index 000000000..1ae6f766f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/README.md
@@ -0,0 +1,3 @@
+# scan
+
+Pre go1.11 version of the go source parser, without support for go modules.
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/classifier.go b/vendor/github.com/go-swagger/go-swagger/scan/classifier.go
new file mode 100644
index 000000000..e674272d0
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/classifier.go
@@ -0,0 +1,166 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "log"
+ "regexp"
+
+ "golang.org/x/tools/go/loader"
+)
+
+type packageFilter struct {
+ Name string
+}
+
+func (pf *packageFilter) Matches(path string) bool {
+ matched, err := regexp.MatchString(pf.Name, path)
+ if err != nil {
+ log.Fatal(err)
+ }
+ return matched
+}
+
+type packageFilters []packageFilter
+
+func (pf packageFilters) HasFilters() bool {
+ return len(pf) > 0
+}
+
+func (pf packageFilters) Matches(path string) bool {
+ for _, mod := range pf {
+ if mod.Matches(path) {
+ return true
+ }
+ }
+ return false
+}
+
+type classifiedProgram struct {
+ Meta []*ast.File
+ Models []*ast.File
+ Routes []*ast.File
+ Operations []*ast.File
+ Parameters []*ast.File
+ Responses []*ast.File
+}
+
+// programClassifier classifies the files of a program into buckets
+// for processing by a swagger spec generator. This buckets files in
+// 3 groups: Meta, Models and Operations.
+//
+// # Each of these buckets is then processed with an appropriate parsing strategy
+//
+// When there are Include or Exclude filters provide they are used to limit the
+// candidates prior to parsing.
+// The include filters take precedence over the excludes. So when something appears
+// in both filters it will be included.
+type programClassifier struct {
+ Includes packageFilters
+ Excludes packageFilters
+}
+
+func (pc *programClassifier) Classify(prog *loader.Program) (*classifiedProgram, error) {
+ var cp classifiedProgram
+ for pkg, pkgInfo := range prog.AllPackages {
+ if Debug {
+ log.Printf("analyzing: %s\n", pkg.Path())
+ }
+ if pc.Includes.HasFilters() {
+ if !pc.Includes.Matches(pkg.Path()) {
+ continue
+ }
+ } else if pc.Excludes.HasFilters() {
+ if pc.Excludes.Matches(pkg.Path()) {
+ continue
+ }
+ }
+
+ for _, file := range pkgInfo.Files {
+ var ro, op, mt, pm, rs, mm bool // only add a particular file once
+ for _, comments := range file.Comments {
+ var seenStruct string
+ for _, cline := range comments.List {
+ if cline != nil {
+ matches := rxSwaggerAnnotation.FindStringSubmatch(cline.Text)
+ if len(matches) > 1 {
+ switch matches[1] {
+ case "route":
+ if !ro {
+ cp.Routes = append(cp.Routes, file)
+ ro = true
+ }
+ case "operation":
+ if !op {
+ cp.Operations = append(cp.Operations, file)
+ op = true
+ }
+ case "model":
+ if !mm {
+ cp.Models = append(cp.Models, file)
+ mm = true
+ }
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "meta":
+ if !mt {
+ cp.Meta = append(cp.Meta, file)
+ mt = true
+ }
+ case "parameters":
+ if !pm {
+ cp.Parameters = append(cp.Parameters, file)
+ pm = true
+ }
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "response":
+ if !rs {
+ cp.Responses = append(cp.Responses, file)
+ rs = true
+ }
+ if seenStruct == "" || seenStruct == matches[1] {
+ seenStruct = matches[1]
+ } else {
+ return nil, fmt.Errorf("classifier: already annotated as %s, can't also be %q - %s", seenStruct, matches[1], cline.Text)
+ }
+ case "strfmt", "name", "discriminated", "file", "enum", "default", "alias", "type":
+ // TODO: perhaps collect these and pass along to avoid lookups later on
+ case "allOf":
+ case "ignore":
+ default:
+ return nil, fmt.Errorf("classifier: unknown swagger annotation %q", matches[1])
+ }
+ }
+
+ }
+ }
+ }
+ }
+ }
+
+ return &cp, nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/doc.go b/vendor/github.com/go-swagger/go-swagger/scan/doc.go
new file mode 100644
index 000000000..2bc415a8f
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/doc.go
@@ -0,0 +1,89 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package scan provides a scanner for go files that produces a swagger spec document.
+
+This package is intended for pre-go1.11 versions, and does not support go modules.
+
+You give it a main file and it will parse all the files that are required by that main
+package to produce a swagger specification.
+
+To use you can add a go:generate comment to your main file for example:
+
+ //go:generate swagger generate spec
+
+The following annotations exist:
+
+swagger:meta
+
+The swagger:meta annotation flags a file as source for metadata about the API.
+This is typically a doc.go file with your package documentation.
+
+You can specify a Consumes and Produces key which has a new content type on each line
+Schemes is a tag that is required and allows for a comma separated string composed of:
+http, https, ws or wss
+
+Host and BasePath can be specified but those values will be defaults,
+they should get substituted when serving the swagger spec.
+
+Default parameters and responses are not supported at this stage, for those you can edit the template json.
+
+swagger:strfmt [name]
+
+A swagger:strfmt annotation names a type as a string formatter. The name is mandatory and that is
+what will be used as format name for this particular string format.
+String formats should only be used for very well known formats.
+
+swagger:model [?model name]
+
+A swagger:model annotation optionally gets a model name as extra data on the line.
+when this appears anywhere in a comment for a struct, then that struct becomes a schema
+in the definitions object of swagger.
+
+The struct gets analyzed and all the collected models are added to the tree.
+The refs are tracked separately so that they can be renamed later on.
+
+When this annotation is found to be on an interface instead of a struct, the properties are provided
+through exported nullary methods.
+
+A property of an interface model can have a Discriminator: true annotation to mark that field as
+the field that will contain the discriminator value.
+
+swagger:route [method] [path pattern] [operation id] [?tag1 tag2 tag3]
+
+A swagger:route annotation links a path to a method.
+This operation gets a unique id, which is used in various places as method name.
+One such usage is in method names for client generation for example.
+
+Because there are many routers available, this tool does not try to parse the paths
+you provided to your routing library of choice. So you have to specify your path pattern
+yourself in valid swagger syntax.
+
+swagger:params [operationid1 operationid2]
+
+Links a struct to one or more operations. The params in the resulting swagger spec can be composed of several structs.
+There are no guarantees given on how property name overlaps are resolved when several structs apply to the same operation.
+This tag works very similarly to the swagger:model tag except that it produces valid parameter objects instead of schema
+objects.
+
+swagger:response [?response name]
+
+Reads a struct decorated with swagger:response and uses that information to fill up the headers and the schema for a response.
+A swagger:route can specify a response name for a status code and then the matching response will be used for that operation in the swagger definition.
+*/
+package scan
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/enum.go b/vendor/github.com/go-swagger/go-swagger/scan/enum.go
new file mode 100644
index 000000000..d1ecc9c87
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/enum.go
@@ -0,0 +1,84 @@
+//go:build !go1.11
+// +build !go1.11
+
+package scan
+
+import (
+ "go/ast"
+ "strconv"
+ "strings"
+ "unicode"
+)
+
+func upperSnakeCase(s string) string {
+ in := []rune(s)
+ isLower := func(idx int) bool {
+ return idx >= 0 && idx < len(in) && unicode.IsLower(in[idx])
+ }
+
+ out := make([]rune, 0, len(in)+len(in)/2)
+
+ for i, r := range in {
+ if unicode.IsUpper(r) {
+ r = unicode.ToLower(r)
+ if i > 0 && in[i-1] != '_' && (isLower(i-1) || isLower(i+1)) {
+ out = append(out, '_')
+ }
+ }
+ out = append(out, r)
+ }
+
+ return strings.ToUpper(string(out))
+}
+
+func getEnumBasicLitValue(basicLit *ast.BasicLit) interface{} {
+ switch basicLit.Kind.String() {
+ case "INT":
+ if result, err := strconv.ParseInt(basicLit.Value, 10, 64); err == nil {
+ return result
+ }
+ case "FLOAT":
+ if result, err := strconv.ParseFloat(basicLit.Value, 64); err == nil {
+ return result
+ }
+ default:
+ return strings.Trim(basicLit.Value, "\"")
+ }
+ return nil
+}
+
+func getEnumValues(file *ast.File, typeName string) (list []interface{}) {
+ for _, decl := range file.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+
+ if !ok {
+ continue
+ }
+
+ if genDecl.Tok.String() == "const" {
+ for _, spec := range genDecl.Specs {
+ if valueSpec, ok := spec.(*ast.ValueSpec); ok {
+ switch valueSpec.Type.(type) {
+ case *ast.Ident:
+ if valueSpec.Type.(*ast.Ident).Name == typeName {
+ if basicLit, ok := valueSpec.Values[0].(*ast.BasicLit); ok {
+ list = append(list, getEnumBasicLitValue(basicLit))
+ }
+ }
+ default:
+ var name = valueSpec.Names[0].Name
+ if strings.HasPrefix(name, upperSnakeCase(typeName)) {
+ var values = strings.SplitN(name, "__", 2)
+ if len(values) == 2 {
+ list = append(list, values[1])
+ }
+ }
+ }
+
+ }
+
+ }
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/meta.go b/vendor/github.com/go-swagger/go-swagger/scan/meta.go
new file mode 100644
index 000000000..f5b5ed5dd
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/meta.go
@@ -0,0 +1,246 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "encoding/json"
+ "fmt"
+ "net/mail"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+func metaTOSSetter(meta *spec.Info) func([]string) {
+ return func(lines []string) {
+ meta.TermsOfService = joinDropLast(lines)
+ }
+}
+
+func metaConsumesSetter(meta *spec.Swagger) func([]string) {
+ return func(consumes []string) { meta.Consumes = consumes }
+}
+
+func metaProducesSetter(meta *spec.Swagger) func([]string) {
+ return func(produces []string) { meta.Produces = produces }
+}
+
+func metaSchemeSetter(meta *spec.Swagger) func([]string) {
+ return func(schemes []string) { meta.Schemes = schemes }
+}
+
+func metaSecuritySetter(meta *spec.Swagger) func([]map[string][]string) {
+ return func(secDefs []map[string][]string) { meta.Security = secDefs }
+}
+
+func metaSecurityDefinitionsSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.SecurityDefinitions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ meta.SecurityDefinitions = jsonData
+ return nil
+ }
+}
+
+func metaVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Extensions = jsonData
+ return nil
+ }
+}
+
+func infoVendorExtensibleSetter(meta *spec.Swagger) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ meta.Info.Extensions = jsonData
+ return nil
+ }
+}
+
+func newMetaParser(swspec *spec.Swagger) *sectionedParser {
+ sp := new(sectionedParser)
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ info := swspec.Info
+ sp.setTitle = func(lines []string) {
+ tosave := joinDropLast(lines)
+ if len(tosave) > 0 {
+ tosave = rxStripTitleComments.ReplaceAllString(tosave, "")
+ }
+ info.Title = tosave
+ }
+ sp.setDescription = func(lines []string) { info.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("TOS", newMultilineDropEmptyParser(rxTOS, metaTOSSetter(info)), false),
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, metaConsumesSetter(swspec)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, metaProducesSetter(swspec)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(metaSchemeSetter(swspec))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, metaSecuritySetter(swspec)), false),
+ newMultiLineTagParser("SecurityDefinitions", newYamlParser(rxSecurity, metaSecurityDefinitionsSetter(swspec)), true),
+ newSingleLineTagParser("Version", &setMetaSingle{swspec, rxVersion, setInfoVersion}),
+ newSingleLineTagParser("Host", &setMetaSingle{swspec, rxHost, setSwaggerHost}),
+ newSingleLineTagParser("BasePath", &setMetaSingle{swspec, rxBasePath, setSwaggerBasePath}),
+ newSingleLineTagParser("Contact", &setMetaSingle{swspec, rxContact, setInfoContact}),
+ newSingleLineTagParser("License", &setMetaSingle{swspec, rxLicense, setInfoLicense}),
+ newMultiLineTagParser("YAMLInfoExtensionsBlock", newYamlParser(rxInfoExtensions, infoVendorExtensibleSetter(swspec)), true),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, metaVendorExtensibleSetter(swspec)), true),
+ }
+ return sp
+}
+
+type setMetaSingle struct {
+ spec *spec.Swagger
+ rx *regexp.Regexp
+ set func(spec *spec.Swagger, lines []string) error
+}
+
+func (s *setMetaSingle) Matches(line string) bool {
+ return s.rx.MatchString(line)
+}
+
+func (s *setMetaSingle) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := s.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ return s.set(s.spec, []string{matches[1]})
+ }
+ return nil
+}
+
+func setSwaggerHost(swspec *spec.Swagger, lines []string) error {
+ lns := lines
+ if len(lns) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ lns = []string{"localhost"}
+ }
+ swspec.Host = lns[0]
+ return nil
+}
+
+func setSwaggerBasePath(swspec *spec.Swagger, lines []string) error {
+ var ln string
+ if len(lines) > 0 {
+ ln = lines[0]
+ }
+ swspec.BasePath = ln
+ return nil
+}
+
+func setInfoVersion(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 {
+ return nil
+ }
+ info := safeInfo(swspec)
+ info.Version = strings.TrimSpace(lines[0])
+ return nil
+}
+
+func setInfoContact(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ contact, err := parseContactInfo(lines[0])
+ if err != nil {
+ return err
+ }
+ info := safeInfo(swspec)
+ info.Contact = contact
+ return nil
+}
+
+func parseContactInfo(line string) (*spec.ContactInfo, error) {
+ nameEmail, url := splitURL(line)
+ var name, email string
+ if len(nameEmail) > 0 {
+ addr, err := mail.ParseAddress(nameEmail)
+ if err != nil {
+ return nil, err
+ }
+ name, email = addr.Name, addr.Address
+ }
+ return &spec.ContactInfo{
+ URL: url,
+ Name: name,
+ Email: email,
+ }, nil
+}
+
+func setInfoLicense(swspec *spec.Swagger, lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ info := safeInfo(swspec)
+ line := lines[0]
+ name, url := splitURL(line)
+ info.License = &spec.License{
+ Name: name,
+ URL: url,
+ }
+ return nil
+}
+
+func safeInfo(swspec *spec.Swagger) *spec.Info {
+ if swspec.Info == nil {
+ swspec.Info = new(spec.Info)
+ }
+ return swspec.Info
+}
+
+// httpFTPScheme matches http://, https://, ws://, wss://
+var httpFTPScheme = regexp.MustCompile("(?:(?:ht|f)tp|ws)s?://")
+
+func splitURL(line string) (notURL, url string) {
+ str := strings.TrimSpace(line)
+ parts := httpFTPScheme.FindStringIndex(str)
+ if len(parts) == 0 {
+ if len(str) > 0 {
+ notURL = str
+ }
+ return
+ }
+ if len(parts) > 0 {
+ notURL = strings.TrimSpace(str[:parts[0]])
+ url = strings.TrimSpace(str[parts[0]:])
+ }
+ return
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/operations.go b/vendor/github.com/go-swagger/go-swagger/scan/operations.go
new file mode 100644
index 000000000..31e2ea5a9
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/operations.go
@@ -0,0 +1,85 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/go-openapi/spec"
+
+ "golang.org/x/tools/go/loader"
+)
+
+func newOperationsParser(prog *loader.Program) *operationsParser {
+ return &operationsParser{
+ program: prog,
+ }
+}
+
+type operationsParser struct {
+ program *loader.Program
+ definitions map[string]spec.Schema
+ operations map[string]*spec.Operation
+ responses map[string]spec.Response
+}
+
+func (op *operationsParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
+ tgt := target.(*spec.Paths)
+ for _, comsec := range gofile.Comments {
+ content := parsePathAnnotation(rxOperation, comsec.List)
+
+ if content.Method == "" {
+ continue // it's not, next!
+ }
+
+ if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
+ if Debug {
+ fmt.Printf("operation %s %s is ignored due to tag rules\n", content.Method, content.Path)
+ }
+ continue
+ }
+
+ pthObj := tgt.Paths[content.Path]
+
+ op := setPathOperation(
+ content.Method, content.ID,
+ &pthObj, op.operations[content.ID])
+
+ op.Tags = content.Tags
+
+ sp := new(yamlSpecScanner)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+
+ if err := sp.Parse(content.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+ if err := sp.UnmarshalSpec(op.UnmarshalJSON); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+
+ tgt.Paths[content.Path] = pthObj
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/parameters.go b/vendor/github.com/go-swagger/go-swagger/scan/parameters.go
new file mode 100644
index 000000000..58d96ebe3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/parameters.go
@@ -0,0 +1,515 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/go-openapi/spec"
+ "golang.org/x/tools/go/loader"
+)
+
+type operationValidationBuilder interface {
+ validationBuilder
+ SetCollectionFormat(string)
+}
+
+type paramTypable struct {
+ param *spec.Parameter
+}
+
+func (pt paramTypable) Level() int { return 0 }
+
+func (pt paramTypable) Typed(tpe, format string) {
+ pt.param.Typed(tpe, format)
+}
+
+func (pt paramTypable) WithEnum(values ...interface{}) {
+ pt.param.WithEnum(values...)
+}
+
+func (pt paramTypable) SetRef(ref spec.Ref) {
+ pt.param.Ref = ref
+}
+
+func (pt paramTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(pt.param.In, pt.param.Schema)
+ if bdt != nil {
+ pt.param.Schema = schema
+ return bdt
+ }
+
+ if pt.param.Items == nil {
+ pt.param.Items = new(spec.Items)
+ }
+ pt.param.Type = "array"
+ return itemsTypable{pt.param.Items, 1}
+}
+
+func (pt paramTypable) Schema() *spec.Schema {
+ if pt.param.In != "body" {
+ return nil
+ }
+ if pt.param.Schema == nil {
+ pt.param.Schema = new(spec.Schema)
+ }
+ return pt.param.Schema
+}
+
+type itemsTypable struct {
+ items *spec.Items
+ level int
+}
+
+func (pt itemsTypable) Level() int { return pt.level }
+
+func (pt itemsTypable) Typed(tpe, format string) {
+ pt.items.Typed(tpe, format)
+}
+
+func (pt itemsTypable) SetRef(ref spec.Ref) {
+ pt.items.Ref = ref
+}
+
+func (pt itemsTypable) WithEnum(values ...interface{}) {
+ pt.items.WithEnum(values...)
+}
+
+func (pt itemsTypable) Schema() *spec.Schema {
+ return nil
+}
+
+func (pt itemsTypable) Items() swaggerTypable {
+ if pt.items.Items == nil {
+ pt.items.Items = new(spec.Items)
+ }
+ pt.items.Type = "array"
+ return itemsTypable{pt.items.Items, pt.level + 1}
+}
+
+type paramValidations struct {
+ current *spec.Parameter
+}
+
+func (sv paramValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv paramValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv paramValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv paramValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv paramValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv paramValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv paramValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv paramValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv paramValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv paramValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv paramValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv paramValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv paramValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type itemsValidations struct {
+ current *spec.Items
+}
+
+func (sv itemsValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv itemsValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv itemsValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv itemsValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv itemsValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv itemsValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv itemsValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv itemsValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv itemsValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv itemsValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv itemsValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv itemsValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv itemsValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+type paramDecl struct {
+ File *ast.File
+ Decl *ast.GenDecl
+ TypeSpec *ast.TypeSpec
+ OperationIDs []string
+}
+
+func (sd *paramDecl) inferOperationIDs() (opids []string) {
+ if len(sd.OperationIDs) > 0 {
+ opids = sd.OperationIDs
+ return
+ }
+
+ if sd.Decl.Doc != nil {
+ for _, cmt := range sd.Decl.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxParametersOverride.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ for _, pt := range strings.Split(matches[1], " ") {
+ tr := strings.TrimSpace(pt)
+ if len(tr) > 0 {
+ opids = append(opids, tr)
+ }
+ }
+ }
+ }
+ }
+ }
+ sd.OperationIDs = append(sd.OperationIDs, opids...)
+ return
+}
+
+func newParameterParser(prog *loader.Program) *paramStructParser {
+ scp := new(paramStructParser)
+ scp.program = prog
+ scp.scp = newSchemaParser(prog)
+ return scp
+}
+
+type paramStructParser struct {
+ program *loader.Program
+ postDecls []schemaDecl
+ scp *schemaParser
+}
+
+// Parse will traverse a file and look for parameters.
+func (pp *paramStructParser) Parse(gofile *ast.File, target interface{}) error {
+ tgt := target.(map[string]*spec.Operation)
+ for _, decl := range gofile.Decls {
+ switch x1 := decl.(type) {
+ // Check for parameters at the package level.
+ case *ast.GenDecl:
+ for _, spc := range x1.Specs {
+ switch x2 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := paramDecl{gofile, x1, x2, nil}
+ sd.inferOperationIDs()
+ if err := pp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ // Check for parameters inside functions.
+ case *ast.FuncDecl:
+ for _, b := range x1.Body.List {
+ switch x2 := b.(type) {
+ case *ast.DeclStmt:
+ switch x3 := x2.Decl.(type) {
+ case *ast.GenDecl:
+ for _, spc := range x3.Specs {
+ switch x4 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := paramDecl{gofile, x3, x4, nil}
+ sd.inferOperationIDs()
+ if err := pp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (pp *paramStructParser) parseDecl(operations map[string]*spec.Operation, decl paramDecl) error {
+ // check if there is a swagger:parameters tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+ for _, opid := range decl.inferOperationIDs() {
+ operation, ok := operations[opid]
+ if !ok {
+ operation = new(spec.Operation)
+ operations[opid] = operation
+ operation.ID = opid
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
+ if err := pp.parseStructType(decl.File, operation, tpe, make(map[string]spec.Parameter)); err != nil {
+ return err
+ }
+ }
+
+ //operations[opid] = operation
+ }
+ return nil
+}
+
+func (pp *paramStructParser) parseEmbeddedStruct(gofile *ast.File, operation *spec.Operation, expr ast.Expr, seenPreviously map[string]spec.Parameter) error {
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err := pp.scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return pp.parseStructType(file, operation, st, seenPreviously)
+ }
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err := pp.scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return pp.parseStructType(file, operation, st, seenPreviously)
+ }
+ case *ast.StarExpr:
+ return pp.parseEmbeddedStruct(gofile, operation, tpe.X, seenPreviously)
+ }
+ fmt.Printf("3%#v\n", expr)
+ return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
+}
+
+func (pp *paramStructParser) parseStructType(gofile *ast.File, operation *spec.Operation, tpe *ast.StructType, seenPreviously map[string]spec.Parameter) error {
+ if tpe.Fields != nil {
+ pt := seenPreviously
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) == 0 {
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := pp.parseEmbeddedStruct(gofile, operation, fld.Type, pt); err != nil {
+ return err
+ }
+ }
+ }
+
+ // a slice used to keep track of the sequence of the map keys, as maps does not keep to any specific sequence (since Go-1.4)
+ sequence := []string{}
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
+ gnm := fld.Names[0].Name
+ nm, ignore, _, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ in := "query"
+ // scan for param location first, this changes some behavior down the line
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := pt[nm]
+ ps.In = in
+ var pty swaggerTypable = paramTypable{&ps}
+ if in == "body" {
+ pty = schemaTypable{pty.Schema(), 0}
+ }
+ if in == "formData" && fld.Doc != nil && fileParam(fld.Doc) {
+ pty.Typed("file", "")
+ } else {
+ if err := pp.scp.parseNamedType(gofile, fld.Type, pty); err != nil {
+ return err
+ }
+ }
+
+ if strfmtName, ok := strfmtName(fld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ ps.Ref = spec.Ref{}
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ if ps.Ref.String() == "" {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("maximum", &setMaximum{paramValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{paramValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{paramValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{paramValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{paramValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{paramValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{paramValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{paramValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{paramValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{paramValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{paramValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, paramValidations{&ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredParam{&ps}),
+ }
+
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.SelectorExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.Sel, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("in", &matchOnlyParam{&ps, rxIn}),
+ newSingleLineTagParser("required", &matchOnlyParam{&ps, rxRequired}),
+ }
+ }
+ if err := sp.Parse(fld.Doc); err != nil {
+ return err
+ }
+ if ps.In == "path" {
+ ps.Required = true
+ }
+
+ if ps.Name == "" {
+ ps.Name = nm
+ }
+
+ if nm != gnm {
+ addExtension(&ps.VendorExtensible, "x-go-name", gnm)
+ }
+ pt[nm] = ps
+ sequence = append(sequence, nm)
+ }
+ }
+
+ for _, k := range sequence {
+ p := pt[k]
+ for i, v := range operation.Parameters {
+ if v.Name == k {
+ operation.Parameters = append(operation.Parameters[:i], operation.Parameters[i+1:]...)
+ break
+ }
+ }
+ operation.Parameters = append(operation.Parameters, p)
+ }
+ }
+
+ return nil
+}
+
+func isAliasParam(prop swaggerTypable) bool {
+ var isParam bool
+ if param, ok := prop.(paramTypable); ok {
+ isParam = param.param.In == "query" ||
+ param.param.In == "path" ||
+ param.param.In == "formData"
+ }
+ return isParam
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/path.go b/vendor/github.com/go-swagger/go-swagger/scan/path.go
new file mode 100644
index 000000000..7302d41c3
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/path.go
@@ -0,0 +1,151 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "go/ast"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type parsedPathContent struct {
+ Method, Path, ID string
+ Tags []string
+ Remaining *ast.CommentGroup
+}
+
+func parsePathAnnotation(annotation *regexp.Regexp, lines []*ast.Comment) (cnt parsedPathContent) {
+ var justMatched bool
+
+ for _, cmt := range lines {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := annotation.FindStringSubmatch(line)
+ if len(matches) > 3 {
+ cnt.Method, cnt.Path, cnt.ID = matches[1], matches[2], matches[len(matches)-1]
+ cnt.Tags = rxSpace.Split(matches[3], -1)
+ if len(matches[3]) == 0 {
+ cnt.Tags = nil
+ }
+ justMatched = true
+ } else if cnt.Method != "" {
+ if cnt.Remaining == nil {
+ cnt.Remaining = new(ast.CommentGroup)
+ }
+ if !justMatched || strings.TrimSpace(rxStripComments.ReplaceAllString(line, "")) != "" {
+ cc := new(ast.Comment)
+ cc.Slash = cmt.Slash
+ cc.Text = line
+ cnt.Remaining.List = append(cnt.Remaining.List, cc)
+ justMatched = false
+ }
+ }
+ }
+ }
+
+ return
+}
+
+func setPathOperation(method, id string, pthObj *spec.PathItem, op *spec.Operation) *spec.Operation {
+ if op == nil {
+ op = new(spec.Operation)
+ op.ID = id
+ }
+
+ switch strings.ToUpper(method) {
+ case "GET":
+ if pthObj.Get != nil {
+ if id == pthObj.Get.ID {
+ op = pthObj.Get
+ } else {
+ pthObj.Get = op
+ }
+ } else {
+ pthObj.Get = op
+ }
+
+ case "POST":
+ if pthObj.Post != nil {
+ if id == pthObj.Post.ID {
+ op = pthObj.Post
+ } else {
+ pthObj.Post = op
+ }
+ } else {
+ pthObj.Post = op
+ }
+
+ case "PUT":
+ if pthObj.Put != nil {
+ if id == pthObj.Put.ID {
+ op = pthObj.Put
+ } else {
+ pthObj.Put = op
+ }
+ } else {
+ pthObj.Put = op
+ }
+
+ case "PATCH":
+ if pthObj.Patch != nil {
+ if id == pthObj.Patch.ID {
+ op = pthObj.Patch
+ } else {
+ pthObj.Patch = op
+ }
+ } else {
+ pthObj.Patch = op
+ }
+
+ case "HEAD":
+ if pthObj.Head != nil {
+ if id == pthObj.Head.ID {
+ op = pthObj.Head
+ } else {
+ pthObj.Head = op
+ }
+ } else {
+ pthObj.Head = op
+ }
+
+ case "DELETE":
+ if pthObj.Delete != nil {
+ if id == pthObj.Delete.ID {
+ op = pthObj.Delete
+ } else {
+ pthObj.Delete = op
+ }
+ } else {
+ pthObj.Delete = op
+ }
+
+ case "OPTIONS":
+ if pthObj.Options != nil {
+ if id == pthObj.Options.ID {
+ op = pthObj.Options
+ } else {
+ pthObj.Options = op
+ }
+ } else {
+ pthObj.Options = op
+ }
+ }
+
+ return op
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/responses.go b/vendor/github.com/go-swagger/go-swagger/scan/responses.go
new file mode 100644
index 000000000..327b8a488
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/responses.go
@@ -0,0 +1,453 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "golang.org/x/tools/go/loader"
+
+ "github.com/go-openapi/spec"
+)
+
+type responseTypable struct {
+ in string
+ header *spec.Header
+ response *spec.Response
+}
+
+func (ht responseTypable) Level() int { return 0 }
+
+func (ht responseTypable) Typed(tpe, format string) {
+ ht.header.Typed(tpe, format)
+}
+
+func (ht responseTypable) WithEnum(values ...interface{}) {
+ ht.header.WithEnum(values)
+}
+
+func bodyTypable(in string, schema *spec.Schema) (swaggerTypable, *spec.Schema) {
+ if in == "body" {
+ // get the schema for items on the schema property
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ if schema.Items == nil {
+ schema.Items = new(spec.SchemaOrArray)
+ }
+ if schema.Items.Schema == nil {
+ schema.Items.Schema = new(spec.Schema)
+ }
+ schema.Typed("array", "")
+ return schemaTypable{schema.Items.Schema, 0}, schema
+ }
+ return nil, nil
+}
+
+func (ht responseTypable) Items() swaggerTypable {
+ bdt, schema := bodyTypable(ht.in, ht.response.Schema)
+ if bdt != nil {
+ ht.response.Schema = schema
+ return bdt
+ }
+
+ if ht.header.Items == nil {
+ ht.header.Items = new(spec.Items)
+ }
+ ht.header.Type = "array"
+ return itemsTypable{ht.header.Items, 1}
+}
+
+func (ht responseTypable) SetRef(ref spec.Ref) {
+ // having trouble seeing the usefulness of this one here
+ ht.Schema().Ref = ref
+}
+
+func (ht responseTypable) Schema() *spec.Schema {
+ if ht.response.Schema == nil {
+ ht.response.Schema = new(spec.Schema)
+ }
+ return ht.response.Schema
+}
+
+func (ht responseTypable) SetSchema(schema *spec.Schema) {
+ ht.response.Schema = schema
+}
+
+func (ht responseTypable) CollectionOf(items *spec.Items, format string) {
+ ht.header.CollectionOf(items, format)
+}
+
+type headerValidations struct {
+ current *spec.Header
+}
+
+func (sv headerValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv headerValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv headerValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv headerValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv headerValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv headerValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv headerValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv headerValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv headerValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv headerValidations) SetCollectionFormat(val string) { sv.current.CollectionFormat = val }
+func (sv headerValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Type: sv.current.Type, Format: sv.current.Format})
+}
+func (sv headerValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv headerValidations) SetExample(val interface{}) { sv.current.Example = val }
+
+func newResponseDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) responseDecl {
+ var rd responseDecl
+ rd.File = file
+ rd.Decl = decl
+ rd.TypeSpec = ts
+ rd.inferNames()
+ return rd
+}
+
+type responseDecl struct {
+ File *ast.File
+ Decl *ast.GenDecl
+ TypeSpec *ast.TypeSpec
+ GoName string
+ Name string
+ annotated bool
+}
+
+func (sd *responseDecl) hasAnnotation() bool {
+ sd.inferNames()
+ return sd.annotated
+}
+
+func (sd *responseDecl) inferNames() (goName string, name string) {
+ if sd.GoName != "" {
+ goName, name = sd.GoName, sd.Name
+ return
+ }
+ goName = sd.TypeSpec.Name.Name
+ name = goName
+ if sd.Decl.Doc != nil {
+ DECLS:
+ for _, cmt := range sd.Decl.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxResponseOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ sd.annotated = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ }
+ sd.GoName = goName
+ sd.Name = name
+ return
+}
+
+func newResponseParser(prog *loader.Program) *responseParser {
+ return &responseParser{prog, nil, newSchemaParser(prog)}
+}
+
+type responseParser struct {
+ program *loader.Program
+ postDecls []schemaDecl
+ scp *schemaParser
+}
+
+func (rp *responseParser) Parse(gofile *ast.File, target interface{}) error {
+ tgt := target.(map[string]spec.Response)
+ for _, decl := range gofile.Decls {
+ switch x1 := decl.(type) {
+ // Check for parameters at the package level.
+ case *ast.GenDecl:
+ for _, spc := range x1.Specs {
+ switch x2 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := newResponseDecl(gofile, x1, x2)
+ if sd.hasAnnotation() {
+ if err := rp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ // Check for parameters inside functions.
+ case *ast.FuncDecl:
+ for _, b := range x1.Body.List {
+ switch x2 := b.(type) {
+ case *ast.DeclStmt:
+ switch x3 := x2.Decl.(type) {
+ case *ast.GenDecl:
+ for _, spc := range x3.Specs {
+ switch x4 := spc.(type) {
+ case *ast.TypeSpec:
+ sd := newResponseDecl(gofile, x3, x4)
+ if sd.hasAnnotation() {
+ if err := rp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (rp *responseParser) parseDecl(responses map[string]spec.Response, decl responseDecl) error {
+ // check if there is a swagger:parameters tag that is followed by one or more words,
+ // these words are the ids of the operations this parameter struct applies to
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // parameters dictionary that got passed into this parse method
+ response := responses[decl.Name]
+ resPtr := &response
+
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { resPtr.Description = joinDropLast(lines) }
+ if err := sp.Parse(decl.Decl.Doc); err != nil {
+ return err
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * comments that aren't tags is used as the description
+ if tpe, ok := decl.TypeSpec.Type.(*ast.StructType); ok {
+ if err := rp.parseStructType(decl.File, resPtr, tpe, make(map[string]struct{})); err != nil {
+ return err
+ }
+ }
+
+ responses[decl.Name] = response
+ return nil
+}
+
+func (rp *responseParser) parseEmbeddedStruct(gofile *ast.File, response *spec.Response, expr ast.Expr, seenPreviously map[string]struct{}) error {
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err := rp.scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return rp.parseStructType(file, response, st, seenPreviously)
+ }
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err := rp.scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return rp.parseStructType(file, response, st, seenPreviously)
+ }
+ case *ast.StarExpr:
+ return rp.parseEmbeddedStruct(gofile, response, tpe.X, seenPreviously)
+ }
+ fmt.Printf("1%#v\n", expr)
+ return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
+}
+
+func (rp *responseParser) parseStructType(gofile *ast.File, response *spec.Response, tpe *ast.StructType, seenPreviously map[string]struct{}) error {
+ if tpe.Fields != nil {
+
+ seenProperties := seenPreviously
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) == 0 {
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := rp.parseEmbeddedStruct(gofile, response, fld.Type, seenProperties); err != nil {
+ return err
+ }
+ }
+ }
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
+ nm, ignore, _, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ var in string
+ // scan for param location first, this changes some behavior down the line
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, line := range strings.Split(cmt.Text, "\n") {
+ matches := rxIn.FindStringSubmatch(line)
+ if len(matches) > 0 && len(strings.TrimSpace(matches[1])) > 0 {
+ in = strings.TrimSpace(matches[1])
+ }
+ }
+ }
+ }
+
+ ps := response.Headers[nm]
+
+ // support swagger:file for response
+ // An API operation can return a file, such as an image or PDF. In this case,
+ // define the response schema with type: file and specify the appropriate MIME types in the produces section.
+ if fld.Doc != nil && fileParam(fld.Doc) {
+ response.Schema = &spec.Schema{}
+ response.Schema.Typed("file", "")
+ } else if err := rp.scp.parseNamedType(gofile, fld.Type, responseTypable{in, &ps, response}); err != nil {
+ return err
+ }
+
+ if strfmtName, ok := strfmtName(fld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ }
+
+ sp := new(sectionedParser)
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{headerValidations{&ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{headerValidations{&ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{headerValidations{&ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{headerValidations{&ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{headerValidations{&ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{headerValidations{&ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("collectionFormat", &setCollectionFormat{headerValidations{&ps}, rxf(rxCollectionFormatFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{headerValidations{&ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{headerValidations{&ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{headerValidations{&ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{headerValidations{&ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&ps.SimpleSchema, headerValidations{&ps}, rxf(rxExampleFmt, "")}),
+ }
+ itemsTaggers := func(items *spec.Items, level int) []tagParser {
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{itemsValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{itemsValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{itemsValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{itemsValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{itemsValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{itemsValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dCollectionFormat", level), &setCollectionFormat{itemsValidations{items}, rxf(rxCollectionFormatFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{itemsValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{itemsValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{itemsValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{itemsValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&items.SimpleSchema, itemsValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&items.SimpleSchema, itemsValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.Items, level int) ([]tagParser, error) {
+ if items == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return err
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ if err := sp.Parse(fld.Doc); err != nil {
+ return err
+ }
+
+ if in != "body" {
+ seenProperties[nm] = struct{}{}
+ if response.Headers == nil {
+ response.Headers = make(map[string]spec.Header)
+ }
+ response.Headers[nm] = ps
+ }
+ }
+ }
+
+ for k := range response.Headers {
+ if _, ok := seenProperties[k]; !ok {
+ delete(response.Headers, k)
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/route_params.go b/vendor/github.com/go-swagger/go-swagger/scan/route_params.go
new file mode 100644
index 000000000..6dd17f6b4
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/route_params.go
@@ -0,0 +1,253 @@
+//go:build !go1.11
+// +build !go1.11
+
+package scan
+
+import (
+ "errors"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+const (
+ // ParamDescriptionKey indicates the tag used to define a parameter description in swagger:route
+ ParamDescriptionKey = "description"
+ // ParamNameKey indicates the tag used to define a parameter name in swagger:route
+ ParamNameKey = "name"
+ // ParamInKey indicates the tag used to define a parameter location in swagger:route
+ ParamInKey = "in"
+ // ParamRequiredKey indicates the tag used to declare whether a parameter is required in swagger:route
+ ParamRequiredKey = "required"
+ // ParamTypeKey indicates the tag used to define the parameter type in swagger:route
+ ParamTypeKey = "type"
+ // ParamAllowEmptyKey indicates the tag used to indicate whether a parameter allows empty values in swagger:route
+ ParamAllowEmptyKey = "allowempty"
+
+ // SchemaMinKey indicates the tag used to indicate the minimum value allowed for this type in swagger:route
+ SchemaMinKey = "min"
+ // SchemaMaxKey indicates the tag used to indicate the maximum value allowed for this type in swagger:route
+ SchemaMaxKey = "max"
+ // SchemaEnumKey indicates the tag used to specify the allowed values for this type in swagger:route
+ SchemaEnumKey = "enum"
+ // SchemaFormatKey indicates the expected format for this field in swagger:route
+ SchemaFormatKey = "format"
+ // SchemaDefaultKey indicates the default value for this field in swagger:route
+ SchemaDefaultKey = "default"
+ // SchemaMinLenKey indicates the minimum length this field in swagger:route
+ SchemaMinLenKey = "minlength"
+ // SchemaMaxLenKey indicates the minimum length this field in swagger:route
+ SchemaMaxLenKey = "maxlength"
+
+ // TypeArray is the identifier for an array type in swagger:route
+ TypeArray = "array"
+ // TypeNumber is the identifier for a number type in swagger:route
+ TypeNumber = "number"
+ // TypeInteger is the identifier for an integer type in swagger:route
+ TypeInteger = "integer"
+ // TypeBoolean is the identifier for a boolean type in swagger:route
+ TypeBoolean = "boolean"
+ // TypeBool is the identifier for a boolean type in swagger:route
+ TypeBool = "bool"
+ // TypeObject is the identifier for an object type in swagger:route
+ TypeObject = "object"
+ // TypeString is the identifier for a string type in swagger:route
+ TypeString = "string"
+)
+
+var (
+ validIn = []string{"path", "query", "header", "body", "form"}
+ basicTypes = []string{TypeInteger, TypeNumber, TypeString, TypeBoolean, TypeBool, TypeArray}
+)
+
+func newSetParams(params []*spec.Parameter, setter func([]*spec.Parameter)) *setOpParams {
+ return &setOpParams{
+ set: setter,
+ parameters: params,
+ }
+}
+
+type setOpParams struct {
+ set func([]*spec.Parameter)
+ parameters []*spec.Parameter
+}
+
+func (s *setOpParams) Matches(line string) bool {
+ return rxParameters.MatchString(line)
+}
+
+func (s *setOpParams) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var current *spec.Parameter
+ var extraData map[string]string
+
+ for _, line := range lines {
+ l := strings.TrimSpace(line)
+
+ if strings.HasPrefix(l, "+") {
+ s.finalizeParam(current, extraData)
+ current = new(spec.Parameter)
+ extraData = make(map[string]string)
+ l = strings.TrimPrefix(l, "+")
+ }
+
+ kv := strings.SplitN(l, ":", 2)
+
+ if len(kv) <= 1 {
+ continue
+ }
+
+ key := strings.ToLower(strings.TrimSpace(kv[0]))
+ value := strings.TrimSpace(kv[1])
+
+ if current == nil {
+ return errors.New("invalid route/operation schema provided")
+ }
+
+ switch key {
+ case ParamDescriptionKey:
+ current.Description = value
+ case ParamNameKey:
+ current.Name = value
+ case ParamInKey:
+ v := strings.ToLower(value)
+ if contains(validIn, v) {
+ current.In = v
+ }
+ case ParamRequiredKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.Required = v
+ }
+ case ParamTypeKey:
+ if current.Schema == nil {
+ current.Schema = new(spec.Schema)
+ }
+ if contains(basicTypes, value) {
+ current.Type = strings.ToLower(value)
+ if current.Type == TypeBool {
+ current.Type = TypeBoolean
+ }
+ } else {
+ if ref, err := spec.NewRef("#/definitions/" + value); err == nil {
+ current.Type = TypeObject
+ current.Schema.Ref = ref
+ }
+ }
+ current.Schema.Type = spec.StringOrArray{current.Type}
+ case ParamAllowEmptyKey:
+ if v, err := strconv.ParseBool(value); err == nil {
+ current.AllowEmptyValue = v
+ }
+ default:
+ extraData[key] = value
+ }
+ }
+
+ s.finalizeParam(current, extraData)
+ s.set(s.parameters)
+ return nil
+}
+
+func (s *setOpParams) finalizeParam(param *spec.Parameter, data map[string]string) {
+ if param == nil {
+ return
+ }
+
+ processSchema(data, param)
+ s.parameters = append(s.parameters, param)
+}
+
+func processSchema(data map[string]string, param *spec.Parameter) {
+ if param.Schema == nil {
+ return
+ }
+
+ var enumValues []string
+
+ for key, value := range data {
+ switch key {
+ case SchemaMinKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Minimum = &v
+ }
+ case SchemaMaxKey:
+ if t := getType(param.Schema); t == TypeNumber || t == TypeInteger {
+ v, _ := strconv.ParseFloat(value, 64)
+ param.Schema.Maximum = &v
+ }
+ case SchemaMinLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MinLength = &v
+ }
+ case SchemaMaxLenKey:
+ if getType(param.Schema) == TypeArray {
+ v, _ := strconv.ParseInt(value, 10, 64)
+ param.Schema.MaxLength = &v
+ }
+ case SchemaEnumKey:
+ enumValues = strings.Split(value, ",")
+ case SchemaFormatKey:
+ param.Schema.Format = value
+ case SchemaDefaultKey:
+ param.Schema.Default = convert(param.Type, value)
+ }
+ }
+
+ if param.Description != "" {
+ param.Schema.Description = param.Description
+ }
+
+ convertEnum(param.Schema, enumValues)
+}
+
+func convertEnum(schema *spec.Schema, enumValues []string) {
+ if len(enumValues) == 0 {
+ return
+ }
+
+ var finalEnum []interface{}
+ for _, v := range enumValues {
+ finalEnum = append(finalEnum, convert(schema.Type[0], strings.TrimSpace(v)))
+ }
+ schema.Enum = finalEnum
+}
+
+func convert(typeStr, valueStr string) interface{} {
+ switch typeStr {
+ case TypeInteger:
+ fallthrough
+ case TypeNumber:
+ if num, err := strconv.ParseFloat(valueStr, 64); err == nil {
+ return num
+ }
+ case TypeBoolean:
+ fallthrough
+ case TypeBool:
+ if b, err := strconv.ParseBool(valueStr); err == nil {
+ return b
+ }
+ }
+ return valueStr
+}
+
+func getType(schema *spec.Schema) string {
+ if len(schema.Type) == 0 {
+ return ""
+ }
+ return schema.Type[0]
+}
+
+func contains(arr []string, obj string) bool {
+ for _, v := range arr {
+ if v == obj {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/routes.go b/vendor/github.com/go-swagger/go-swagger/scan/routes.go
new file mode 100644
index 000000000..644d61900
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/routes.go
@@ -0,0 +1,146 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/go-openapi/spec"
+
+ "golang.org/x/tools/go/loader"
+)
+
+func opConsumesSetter(op *spec.Operation) func([]string) {
+ return func(consumes []string) { op.Consumes = consumes }
+}
+
+func opProducesSetter(op *spec.Operation) func([]string) {
+ return func(produces []string) { op.Produces = produces }
+}
+
+func opSchemeSetter(op *spec.Operation) func([]string) {
+ return func(schemes []string) { op.Schemes = schemes }
+}
+
+func opSecurityDefsSetter(op *spec.Operation) func([]map[string][]string) {
+ return func(securityDefs []map[string][]string) { op.Security = securityDefs }
+}
+
+func opResponsesSetter(op *spec.Operation) func(*spec.Response, map[int]spec.Response) {
+ return func(def *spec.Response, scr map[int]spec.Response) {
+ if op.Responses == nil {
+ op.Responses = new(spec.Responses)
+ }
+ op.Responses.Default = def
+ op.Responses.StatusCodeResponses = scr
+ }
+}
+
+func opParamSetter(op *spec.Operation) func([]*spec.Parameter) {
+ return func(params []*spec.Parameter) {
+ for _, v := range params {
+ op.AddParam(v)
+ }
+ }
+}
+
+func newRoutesParser(prog *loader.Program) *routesParser {
+ return &routesParser{
+ program: prog,
+ }
+}
+
+type routesParser struct {
+ program *loader.Program
+ definitions map[string]spec.Schema
+ operations map[string]*spec.Operation
+ responses map[string]spec.Response
+ parameters []*spec.Parameter
+}
+
+var routeVendorExtensibleParser = vendorExtensibleParser{
+ setExtensions: func(ext spec.Extensions, dest interface{}) {
+ dest.(*spec.Operation).Extensions = ext
+ },
+}
+
+func (rp *routesParser) Parse(gofile *ast.File, target interface{}, includeTags map[string]bool, excludeTags map[string]bool) error {
+ tgt := target.(*spec.Paths)
+ for _, comsec := range gofile.Comments {
+ content := parsePathAnnotation(rxRoute, comsec.List)
+
+ if content.Method == "" {
+ continue // it's not, next!
+ }
+
+ if !shouldAcceptTag(content.Tags, includeTags, excludeTags) {
+ if Debug {
+ fmt.Printf("route %s %s is ignored due to tag rules\n", content.Method, content.Path)
+ }
+ continue
+ }
+
+ pthObj := tgt.Paths[content.Path]
+ op := setPathOperation(
+ content.Method, content.ID,
+ &pthObj, rp.operations[content.ID])
+
+ op.Tags = content.Tags
+
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { op.Summary = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { op.Description = joinDropLast(lines) }
+ sr := newSetResponses(rp.definitions, rp.responses, opResponsesSetter(op))
+ spa := newSetParams(rp.parameters, opParamSetter(op))
+ sp.taggers = []tagParser{
+ newMultiLineTagParser("Consumes", newMultilineDropEmptyParser(rxConsumes, opConsumesSetter(op)), false),
+ newMultiLineTagParser("Produces", newMultilineDropEmptyParser(rxProduces, opProducesSetter(op)), false),
+ newSingleLineTagParser("Schemes", newSetSchemes(opSchemeSetter(op))),
+ newMultiLineTagParser("Security", newSetSecurity(rxSecuritySchemes, opSecurityDefsSetter(op)), false),
+ newMultiLineTagParser("Parameters", spa, false),
+ newMultiLineTagParser("Responses", sr, false),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, routeVendorExtensibleParser.ParseInto(op)), true),
+ }
+ if err := sp.Parse(content.Remaining); err != nil {
+ return fmt.Errorf("operation (%s): %v", op.ID, err)
+ }
+
+ if tgt.Paths == nil {
+ tgt.Paths = make(map[string]spec.PathItem)
+ }
+ tgt.Paths[content.Path] = pthObj
+ }
+
+ return nil
+}
+
+func shouldAcceptTag(tags []string, includeTags map[string]bool, excludeTags map[string]bool) bool {
+ for _, tag := range tags {
+ if len(includeTags) > 0 {
+ if includeTags[tag] {
+ return true
+ }
+ } else if len(excludeTags) > 0 {
+ if excludeTags[tag] {
+ return false
+ }
+ }
+ }
+ return len(includeTags) <= 0
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/scanner.go b/vendor/github.com/go-swagger/go-swagger/scan/scanner.go
new file mode 100644
index 000000000..b07616735
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/scanner.go
@@ -0,0 +1,974 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/build"
+ goparser "go/parser"
+ "go/types"
+ "log"
+ "os"
+ "regexp"
+ "strings"
+
+ "github.com/go-openapi/loads/fmts"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+ "golang.org/x/tools/go/loader"
+ yaml "gopkg.in/yaml.v3"
+)
+
+const (
+ rxMethod = "(\\p{L}+)"
+ rxPath = "((?:/[\\p{L}\\p{N}\\p{Pd}\\p{Pc}{}\\-\\.\\?_~%!$&'()*+,;=:@/]*)+/?)"
+ rxOpTags = "(\\p{L}[\\p{L}\\p{N}\\p{Pd}\\.\\p{Pc}\\p{Zs}]+)"
+ rxOpID = "((?:\\p{L}[\\p{L}\\p{N}\\p{Pd}\\p{Pc}]+)+)"
+
+ rxMaximumFmt = "%s[Mm]ax(?:imum)?\\p{Zs}*:\\p{Zs}*([\\<=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMinimumFmt = "%s[Mm]in(?:imum)?\\p{Zs}*:\\p{Zs}*([\\>=])?\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+ rxMultipleOfFmt = "%s[Mm]ultiple\\p{Zs}*[Oo]f\\p{Zs}*:\\p{Zs}*([\\+-]?(?:\\p{N}+\\.)?\\p{N}+)$"
+
+ rxMaxLengthFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinLengthFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ll]en(?:gth)?)\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxPatternFmt = "%s[Pp]attern\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxCollectionFormatFmt = "%s[Cc]ollection(?:\\p{Zs}*[\\p{Pd}\\p{Pc}]?[Ff]ormat)\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxEnumFmt = "%s[Ee]num\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxDefaultFmt = "%s[Dd]efault\\p{Zs}*:\\p{Zs}*(.*)$"
+ rxExampleFmt = "%s[Ee]xample\\p{Zs}*:\\p{Zs}*(.*)$"
+
+ rxMaxItemsFmt = "%s[Mm]ax(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxMinItemsFmt = "%s[Mm]in(?:imum)?(?:\\p{Zs}*|[\\p{Pd}\\p{Pc}]|\\.)?[Ii]tems\\p{Zs}*:\\p{Zs}*(\\p{N}+)$"
+ rxUniqueFmt = "%s[Uu]nique\\p{Zs}*:\\p{Zs}*(true|false)$"
+
+ rxItemsPrefixFmt = "(?:[Ii]tems[\\.\\p{Zs}]*){%d}"
+)
+
+var (
+ rxSwaggerAnnotation = regexp.MustCompile(`swagger:([\p{L}\p{N}\p{Pd}\p{Pc}]+)`)
+ rxFileUpload = regexp.MustCompile(`swagger:file`)
+ rxStrFmt = regexp.MustCompile(`swagger:strfmt\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxAlias = regexp.MustCompile(`swagger:alias`)
+ rxName = regexp.MustCompile(`swagger:name\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)$`)
+ rxAllOf = regexp.MustCompile(`swagger:allOf\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\.]+)?$`)
+ rxModelOverride = regexp.MustCompile(`swagger:model\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxResponseOverride = regexp.MustCompile(`swagger:response\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxParametersOverride = regexp.MustCompile(`swagger:parameters\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}\p{Zs}]+)$`)
+ rxEnum = regexp.MustCompile(`swagger:enum\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxIgnoreOverride = regexp.MustCompile(`swagger:ignore\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)?$`)
+ rxDefault = regexp.MustCompile(`swagger:default\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxType = regexp.MustCompile(`swagger:type\p{Zs}*(\p{L}[\p{L}\p{N}\p{Pd}\p{Pc}]+)$`)
+ rxRoute = regexp.MustCompile(
+ "swagger:route\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+ rxBeginYAMLSpec = regexp.MustCompile(`---\p{Zs}*$`)
+ rxUncommentHeaders = regexp.MustCompile(`^[\p{Zs}\t/\*-]*\|?`)
+ rxUncommentYAML = regexp.MustCompile(`^[\p{Zs}\t]*/*`)
+ rxOperation = regexp.MustCompile(
+ "swagger:operation\\p{Zs}*" +
+ rxMethod +
+ "\\p{Zs}*" +
+ rxPath +
+ "(?:\\p{Zs}+" +
+ rxOpTags +
+ ")?\\p{Zs}+" +
+ rxOpID + "\\p{Zs}*$")
+
+ rxSpace = regexp.MustCompile(`\p{Zs}+`)
+ rxIndent = regexp.MustCompile(`\p{Zs}*/*\p{Zs}*[^\p{Zs}]`)
+ rxPunctuationEnd = regexp.MustCompile(`\p{Po}$`)
+ rxStripComments = regexp.MustCompile(`^[^\p{L}\p{N}\p{Pd}\p{Pc}\+]*`)
+ rxStripTitleComments = regexp.MustCompile(`^[^\p{L}]*[Pp]ackage\p{Zs}+[^\p{Zs}]+\p{Zs}*`)
+ rxAllowedExtensions = regexp.MustCompile(`^[Xx]-`)
+
+ rxIn = regexp.MustCompile(`[Ii]n\p{Zs}*:\p{Zs}*(query|path|header|body|formData)$`)
+ rxRequired = regexp.MustCompile(`[Rr]equired\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxDiscriminator = regexp.MustCompile(`[Dd]iscriminator\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxReadOnly = regexp.MustCompile(`[Rr]ead(?:\p{Zs}*|[\p{Pd}\p{Pc}])?[Oo]nly\p{Zs}*:\p{Zs}*(true|false)$`)
+ rxConsumes = regexp.MustCompile(`[Cc]onsumes\p{Zs}*:`)
+ rxProduces = regexp.MustCompile(`[Pp]roduces\p{Zs}*:`)
+ rxSecuritySchemes = regexp.MustCompile(`[Ss]ecurity\p{Zs}*:`)
+ rxSecurity = regexp.MustCompile(`[Ss]ecurity\p{Zs}*[Dd]efinitions:`)
+ rxResponses = regexp.MustCompile(`[Rr]esponses\p{Zs}*:`)
+ rxParameters = regexp.MustCompile(`[Pp]arameters\p{Zs}*:`)
+ rxSchemes = regexp.MustCompile(`[Ss]chemes\p{Zs}*:\p{Zs}*((?:(?:https?|HTTPS?|wss?|WSS?)[\p{Zs},]*)+)$`)
+ rxVersion = regexp.MustCompile(`[Vv]ersion\p{Zs}*:\p{Zs}*(.+)$`)
+ rxHost = regexp.MustCompile(`[Hh]ost\p{Zs}*:\p{Zs}*(.+)$`)
+ rxBasePath = regexp.MustCompile(`[Bb]ase\p{Zs}*-*[Pp]ath\p{Zs}*:\p{Zs}*` + rxPath + "$")
+ rxLicense = regexp.MustCompile(`[Ll]icense\p{Zs}*:\p{Zs}*(.+)$`)
+ rxContact = regexp.MustCompile(`[Cc]ontact\p{Zs}*-?(?:[Ii]info\p{Zs}*)?:\p{Zs}*(.+)$`)
+ rxTOS = regexp.MustCompile(`[Tt](:?erms)?\p{Zs}*-?[Oo]f?\p{Zs}*-?[Ss](?:ervice)?\p{Zs}*:`)
+ rxExtensions = regexp.MustCompile(`[Ee]xtensions\p{Zs}*:`)
+ rxInfoExtensions = regexp.MustCompile(`[In]nfo\p{Zs}*[Ee]xtensions:`)
+ // currently unused: rxExample = regexp.MustCompile(`[Ex]ample\p{Zs}*:\p{Zs}*(.*)$`)
+)
+
+// Many thanks go to https://github.com/yvasiyarov/swagger
+// this is loosely based on that implementation but for swagger 2.0
+
+func joinDropLast(lines []string) string {
+ l := len(lines)
+ lns := lines
+ if l > 0 && len(strings.TrimSpace(lines[l-1])) == 0 {
+ lns = lines[:l-1]
+ }
+ return strings.Join(lns, "\n")
+}
+
+func removeEmptyLines(lines []string) (notEmpty []string) {
+ for _, l := range lines {
+ if len(strings.TrimSpace(l)) > 0 {
+ notEmpty = append(notEmpty, l)
+ }
+ }
+ return
+}
+
+func rxf(rxp, ar string) *regexp.Regexp {
+ return regexp.MustCompile(fmt.Sprintf(rxp, ar))
+}
+
+// The Opts for the application scanner.
+type Opts struct {
+ BasePath string
+ Input *spec.Swagger
+ ScanModels bool
+ BuildTags string
+ Include []string
+ Exclude []string
+ IncludeTags []string
+ ExcludeTags []string
+}
+
+func safeConvert(str string) bool {
+ b, err := swag.ConvertBool(str)
+ if err != nil {
+ return false
+ }
+ return b
+}
+
+// Debug is true when process is run with DEBUG=1 env var
+var Debug = safeConvert(os.Getenv("DEBUG"))
+
+// Application scans the application and builds a swagger spec based on the information from the code files.
+// When there are includes provided, only those files are considered for the initial discovery.
+// Similarly the excludes will exclude an item from initial discovery through scanning for annotations.
+// When something in the discovered items requires a type that is contained in the includes or excludes it will still be
+// in the spec.
+func Application(opts Opts) (*spec.Swagger, error) {
+ parser, err := newAppScanner(&opts)
+
+ if err != nil {
+ return nil, err
+ }
+ return parser.Parse()
+}
+
+// appScanner the global context for scanning a go application
+// into a swagger specification
+type appScanner struct {
+ loader *loader.Config
+ prog *loader.Program
+ classifier *programClassifier
+ discovered []schemaDecl
+ input *spec.Swagger
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+ operations map[string]*spec.Operation
+ scanModels bool
+ includeTags map[string]bool
+ excludeTas map[string]bool
+
+ // MainPackage the path to find the main class in
+ MainPackage string
+}
+
+// newAppScanner creates a new api parser
+func newAppScanner(opts *Opts) (*appScanner, error) {
+ if Debug {
+ log.Println("scanning packages discovered through entrypoint @ ", opts.BasePath)
+ }
+ var ldr loader.Config
+ ldr.ParserMode = goparser.ParseComments
+ ldr.Import(opts.BasePath)
+ if opts.BuildTags != "" {
+ ldr.Build = &build.Default
+ ldr.Build.BuildTags = strings.Split(opts.BuildTags, ",")
+ }
+ ldr.TypeChecker = types.Config{FakeImportC: true}
+ prog, err := ldr.Load()
+ if err != nil {
+ return nil, err
+ }
+
+ var includes, excludes packageFilters
+ if len(opts.Include) > 0 {
+ for _, include := range opts.Include {
+ includes = append(includes, packageFilter{Name: include})
+ }
+ }
+ if len(opts.Exclude) > 0 {
+ for _, exclude := range opts.Exclude {
+ excludes = append(excludes, packageFilter{Name: exclude})
+ }
+ }
+ includeTags := make(map[string]bool)
+ for _, includeTag := range opts.IncludeTags {
+ includeTags[includeTag] = true
+ }
+ excludeTags := make(map[string]bool)
+ for _, excludeTag := range opts.ExcludeTags {
+ excludeTags[excludeTag] = true
+ }
+
+ input := opts.Input
+ if input == nil {
+ input = new(spec.Swagger)
+ input.Swagger = "2.0"
+ }
+
+ if input.Paths == nil {
+ input.Paths = new(spec.Paths)
+ }
+ if input.Definitions == nil {
+ input.Definitions = make(map[string]spec.Schema)
+ }
+ if input.Responses == nil {
+ input.Responses = make(map[string]spec.Response)
+ }
+ if input.Extensions == nil {
+ input.Extensions = make(spec.Extensions)
+ }
+
+ return &appScanner{
+ MainPackage: opts.BasePath,
+ prog: prog,
+ input: input,
+ loader: &ldr,
+ operations: collectOperationsFromInput(input),
+ definitions: input.Definitions,
+ responses: input.Responses,
+ scanModels: opts.ScanModels,
+ classifier: &programClassifier{
+ Includes: includes,
+ Excludes: excludes,
+ },
+ includeTags: includeTags,
+ excludeTas: excludeTags,
+ }, nil
+}
+
+func collectOperationsFromInput(input *spec.Swagger) map[string]*spec.Operation {
+ operations := make(map[string]*spec.Operation)
+ if input != nil && input.Paths != nil {
+ for _, pth := range input.Paths.Paths {
+ if pth.Get != nil {
+ operations[pth.Get.ID] = pth.Get
+ }
+ if pth.Post != nil {
+ operations[pth.Post.ID] = pth.Post
+ }
+ if pth.Put != nil {
+ operations[pth.Put.ID] = pth.Put
+ }
+ if pth.Patch != nil {
+ operations[pth.Patch.ID] = pth.Patch
+ }
+ if pth.Delete != nil {
+ operations[pth.Delete.ID] = pth.Delete
+ }
+ if pth.Head != nil {
+ operations[pth.Head.ID] = pth.Head
+ }
+ if pth.Options != nil {
+ operations[pth.Options.ID] = pth.Options
+ }
+ }
+ }
+ return operations
+}
+
+// Parse produces a swagger object for an application
+func (a *appScanner) Parse() (*spec.Swagger, error) {
+ // classification still includes files that are completely commented out
+ cp, err := a.classifier.Classify(a.prog)
+ if err != nil {
+ return nil, err
+ }
+
+ // build models dictionary
+ if a.scanModels {
+ for _, modelsFile := range cp.Models {
+ if err := a.parseSchema(modelsFile); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ // build parameters dictionary
+ for _, paramsFile := range cp.Parameters {
+ if err := a.parseParameters(paramsFile); err != nil {
+ return nil, err
+ }
+ }
+
+ // build responses dictionary
+ for _, responseFile := range cp.Responses {
+ if err := a.parseResponses(responseFile); err != nil {
+ return nil, err
+ }
+ }
+
+ // build definitions dictionary
+ if err := a.processDiscovered(); err != nil {
+ return nil, err
+ }
+
+ // build paths dictionary
+ for _, routeFile := range cp.Routes {
+ if err := a.parseRoutes(routeFile); err != nil {
+ return nil, err
+ }
+ }
+ for _, operationFile := range cp.Operations {
+ if err := a.parseOperations(operationFile); err != nil {
+ return nil, err
+ }
+ }
+
+ // build swagger object
+ for _, metaFile := range cp.Meta {
+ if err := a.parseMeta(metaFile); err != nil {
+ return nil, err
+ }
+ }
+
+ if a.input.Swagger == "" {
+ a.input.Swagger = "2.0"
+ }
+
+ return a.input, nil
+}
+
+func (a *appScanner) processDiscovered() error {
+ // loop over discovered until all the items are in definitions
+ keepGoing := len(a.discovered) > 0
+ for keepGoing {
+ var queue []schemaDecl
+ for _, d := range a.discovered {
+ if _, ok := a.definitions[d.Name]; !ok {
+ queue = append(queue, d)
+ }
+ }
+ a.discovered = nil
+ for _, sd := range queue {
+ if err := a.parseDiscoveredSchema(sd); err != nil {
+ return err
+ }
+ }
+ keepGoing = len(a.discovered) > 0
+ }
+
+ return nil
+}
+
+func (a *appScanner) parseSchema(file *ast.File) error {
+ sp := newSchemaParser(a.prog)
+ if err := sp.Parse(file, a.definitions); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, sp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseDiscoveredSchema(sd schemaDecl) error {
+ sp := newSchemaParser(a.prog)
+ sp.discovered = &sd
+
+ if err := sp.Parse(sd.File, a.definitions); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, sp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseRoutes(file *ast.File) error {
+ rp := newRoutesParser(a.prog)
+ rp.operations = a.operations
+ rp.definitions = a.definitions
+ rp.responses = a.responses
+
+ return rp.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
+}
+
+func (a *appScanner) parseOperations(file *ast.File) error {
+ op := newOperationsParser(a.prog)
+ op.operations = a.operations
+ op.definitions = a.definitions
+ op.responses = a.responses
+ return op.Parse(file, a.input.Paths, a.includeTags, a.excludeTas)
+}
+
+func (a *appScanner) parseParameters(file *ast.File) error {
+ rp := newParameterParser(a.prog)
+ if err := rp.Parse(file, a.operations); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, rp.postDecls...)
+ a.discovered = append(a.discovered, rp.scp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseResponses(file *ast.File) error {
+ rp := newResponseParser(a.prog)
+ if err := rp.Parse(file, a.responses); err != nil {
+ return err
+ }
+ a.discovered = append(a.discovered, rp.postDecls...)
+ a.discovered = append(a.discovered, rp.scp.postDecls...)
+ return nil
+}
+
+func (a *appScanner) parseMeta(file *ast.File) error {
+ return newMetaParser(a.input).Parse(file.Doc)
+}
+
+// MustExpandPackagePath gets the real package path on disk
+func (a *appScanner) MustExpandPackagePath(packagePath string) string {
+ pkgRealpath := swag.FindInGoSearchPath(packagePath)
+ if pkgRealpath == "" {
+ log.Fatalf("Can't find package %s \n", packagePath)
+ }
+
+ return pkgRealpath
+}
+
+type swaggerTypable interface {
+ Typed(string, string)
+ SetRef(spec.Ref)
+ Items() swaggerTypable
+ WithEnum(...interface{})
+ Schema() *spec.Schema
+ Level() int
+}
+
+// Map all Go builtin types that have Json representation to Swagger/Json types.
+// See https://golang.org/pkg/builtin/ and http://swagger.io/specification/
+func swaggerSchemaForType(typeName string, prop swaggerTypable) error {
+ switch typeName {
+ case "bool":
+ prop.Typed("boolean", "")
+ case "byte":
+ prop.Typed("integer", "uint8")
+ case "complex128", "complex64":
+ return fmt.Errorf("unsupported builtin %q (no JSON marshaller)", typeName)
+ case "error":
+ // TODO: error is often marshalled into a string but not always (e.g. errors package creates
+ // errors that are marshalled into an empty object), this could be handled the same way
+ // custom JSON marshallers are handled (in future)
+ prop.Typed("string", "")
+ case "float32":
+ prop.Typed("number", "float")
+ case "float64":
+ prop.Typed("number", "double")
+ case "int":
+ prop.Typed("integer", "int64")
+ case "int16":
+ prop.Typed("integer", "int16")
+ case "int32":
+ prop.Typed("integer", "int32")
+ case "int64":
+ prop.Typed("integer", "int64")
+ case "int8":
+ prop.Typed("integer", "int8")
+ case "rune":
+ prop.Typed("integer", "int32")
+ case "string":
+ prop.Typed("string", "")
+ case "uint":
+ prop.Typed("integer", "uint64")
+ case "uint16":
+ prop.Typed("integer", "uint16")
+ case "uint32":
+ prop.Typed("integer", "uint32")
+ case "uint64":
+ prop.Typed("integer", "uint64")
+ case "uint8":
+ prop.Typed("integer", "uint8")
+ case "uintptr":
+ prop.Typed("integer", "uint64")
+ default:
+ return fmt.Errorf("unsupported type %q", typeName)
+ }
+ return nil
+}
+
+func newMultiLineTagParser(name string, parser valueParser, skipCleanUp bool) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: true,
+ SkipCleanUp: skipCleanUp,
+ Parser: parser,
+ }
+}
+
+func newSingleLineTagParser(name string, parser valueParser) tagParser {
+ return tagParser{
+ Name: name,
+ MultiLine: false,
+ SkipCleanUp: false,
+ Parser: parser,
+ }
+}
+
+type tagParser struct {
+ Name string
+ MultiLine bool
+ SkipCleanUp bool
+ Lines []string
+ Parser valueParser
+}
+
+func (st *tagParser) Matches(line string) bool {
+ return st.Parser.Matches(line)
+}
+
+func (st *tagParser) Parse(lines []string) error {
+ return st.Parser.Parse(lines)
+}
+
+func newYamlParser(rx *regexp.Regexp, setter func(json.RawMessage) error) valueParser {
+ return &yamlParser{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type yamlParser struct {
+ set func(json.RawMessage) error
+ rx *regexp.Regexp
+}
+
+func (y *yamlParser) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var uncommented []string
+ uncommented = append(uncommented, removeYamlIndent(lines)...)
+
+ yamlContent := strings.Join(uncommented, "\n")
+ var yamlValue interface{}
+ err := yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return err
+ }
+
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return err
+ }
+
+ return y.set(jsonValue)
+}
+
+func (y *yamlParser) Matches(line string) bool {
+ return y.rx.MatchString(line)
+}
+
+// aggregates lines in header until it sees `---`,
+// the beginning of a YAML spec
+type yamlSpecScanner struct {
+ header []string
+ yamlSpec []string
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ title []string
+ skipHeader bool
+}
+
+func cleanupScannerLines(lines []string, ur *regexp.Regexp, yamlBlock *regexp.Regexp) []string {
+ // bail early when there is nothing to parse
+ if len(lines) == 0 {
+ return lines
+ }
+ seenLine := -1
+ var lastContent int
+ var uncommented []string
+ var startBlock bool
+ var yaml []string
+ for i, v := range lines {
+ if yamlBlock != nil && yamlBlock.MatchString(v) && !startBlock {
+ startBlock = true
+ if seenLine < 0 {
+ seenLine = i
+ }
+ continue
+ }
+ if startBlock {
+ if yamlBlock.MatchString(v) {
+ startBlock = false
+ uncommented = append(uncommented, removeIndent(yaml)...)
+ continue
+ }
+ yaml = append(yaml, v)
+ if v != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ continue
+ }
+ str := ur.ReplaceAllString(v, "")
+ uncommented = append(uncommented, str)
+ if str != "" {
+ if seenLine < 0 {
+ seenLine = i
+ }
+ lastContent = i
+ }
+ }
+
+ // fixes issue #50
+ if seenLine == -1 {
+ return nil
+ }
+ return uncommented[seenLine : lastContent+1]
+}
+
+// a shared function that can be used to split given headers
+// into a title and description
+func collectScannerTitleDescription(headers []string) (title, desc []string) {
+ hdrs := cleanupScannerLines(headers, rxUncommentHeaders, nil)
+
+ idx := -1
+ for i, line := range hdrs {
+ if strings.TrimSpace(line) == "" {
+ idx = i
+ break
+ }
+ }
+
+ if idx > -1 {
+ title = hdrs[:idx]
+ if len(hdrs) > idx+1 {
+ desc = hdrs[idx+1:]
+ } else {
+ desc = nil
+ }
+ return
+ }
+
+ if len(hdrs) > 0 {
+ line := hdrs[0]
+ if rxPunctuationEnd.MatchString(line) {
+ title = []string{line}
+ desc = hdrs[1:]
+ } else {
+ desc = hdrs
+ }
+ }
+
+ return
+}
+
+func (sp *yamlSpecScanner) collectTitleDescription() {
+ if sp.workedOutTitle {
+ return
+ }
+ if sp.setTitle == nil {
+ sp.header = cleanupScannerLines(sp.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ sp.workedOutTitle = true
+ sp.title, sp.header = collectScannerTitleDescription(sp.header)
+}
+
+func (sp *yamlSpecScanner) Title() []string {
+ sp.collectTitleDescription()
+ return sp.title
+}
+
+func (sp *yamlSpecScanner) Description() []string {
+ sp.collectTitleDescription()
+ return sp.header
+}
+
+func (sp *yamlSpecScanner) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+ var startedYAMLSpec bool
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ if !startedYAMLSpec {
+ if rxBeginYAMLSpec.MatchString(line) {
+ startedYAMLSpec = true
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ continue
+ }
+
+ if !sp.skipHeader {
+ sp.header = append(sp.header, line)
+ }
+
+ // no YAML spec yet, moving on
+ continue
+ }
+
+ sp.yamlSpec = append(sp.yamlSpec, line)
+ }
+ }
+ if sp.setTitle != nil {
+ sp.setTitle(sp.Title())
+ }
+ if sp.setDescription != nil {
+ sp.setDescription(sp.Description())
+ }
+ return nil
+}
+
+func (sp *yamlSpecScanner) UnmarshalSpec(u func([]byte) error) (err error) {
+ spec := cleanupScannerLines(sp.yamlSpec, rxUncommentYAML, nil)
+ if len(spec) == 0 {
+ return errors.New("no spec available to unmarshal")
+ }
+
+ if !strings.Contains(spec[0], "---") {
+ return errors.New("yaml spec has to start with `---`")
+ }
+
+ // remove indentation
+ spec = removeIndent(spec)
+
+ // 1. parse yaml lines
+ yamlValue := make(map[interface{}]interface{})
+
+ yamlContent := strings.Join(spec, "\n")
+ err = yaml.Unmarshal([]byte(yamlContent), &yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 2. convert to json
+ var jsonValue json.RawMessage
+ jsonValue, err = fmts.YAMLToJSON(yamlValue)
+ if err != nil {
+ return
+ }
+
+ // 3. unmarshal the json into an interface
+ var data []byte
+ data, err = jsonValue.MarshalJSON()
+ if err != nil {
+ return
+ }
+ err = u(data)
+ if err != nil {
+ return
+ }
+
+ // all parsed, returning...
+ sp.yamlSpec = nil // spec is now consumed, so let's erase the parsed lines
+ return
+}
+
+// removes indent base on the first line
+func removeIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ if loc[1] > 0 {
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ spec[i] = spec[i][loc[1]-1:]
+ }
+ }
+ }
+ return spec
+}
+
+// removes indent base on the first line
+func removeYamlIndent(spec []string) []string {
+ loc := rxIndent.FindStringIndex(spec[0])
+ var s []string
+ if loc[1] > 0 {
+ for i := range spec {
+ if len(spec[i]) >= loc[1] {
+ s = append(s, spec[i][loc[1]-1:])
+ }
+ }
+ }
+ return s
+}
+
+// aggregates lines in header until it sees a tag.
+type sectionedParser struct {
+ header []string
+ matched map[string]tagParser
+ annotation valueParser
+
+ seenTag bool
+ skipHeader bool
+ setTitle func([]string)
+ setDescription func([]string)
+ workedOutTitle bool
+ taggers []tagParser
+ currentTagger *tagParser
+ title []string
+ ignored bool
+}
+
+func (st *sectionedParser) collectTitleDescription() {
+ if st.workedOutTitle {
+ return
+ }
+ if st.setTitle == nil {
+ st.header = cleanupScannerLines(st.header, rxUncommentHeaders, nil)
+ return
+ }
+
+ st.workedOutTitle = true
+ st.title, st.header = collectScannerTitleDescription(st.header)
+}
+
+func (st *sectionedParser) Title() []string {
+ st.collectTitleDescription()
+ return st.title
+}
+
+func (st *sectionedParser) Description() []string {
+ st.collectTitleDescription()
+ return st.header
+}
+
+func (st *sectionedParser) Parse(doc *ast.CommentGroup) error {
+ if doc == nil {
+ return nil
+ }
+COMMENTS:
+ for _, c := range doc.List {
+ for _, line := range strings.Split(c.Text, "\n") {
+ if rxSwaggerAnnotation.MatchString(line) {
+ if rxIgnoreOverride.MatchString(line) {
+ st.ignored = true
+ break COMMENTS // an explicit ignore terminates this parser
+ }
+ if st.annotation == nil || !st.annotation.Matches(line) {
+ break COMMENTS // a new swagger: annotation terminates this parser
+ }
+
+ _ = st.annotation.Parse([]string{line})
+ if len(st.header) > 0 {
+ st.seenTag = true
+ }
+ continue
+ }
+
+ var matched bool
+ for _, tagger := range st.taggers {
+ if tagger.Matches(line) {
+ st.seenTag = true
+ st.currentTagger = &tagger
+ matched = true
+ break
+ }
+ }
+
+ if st.currentTagger == nil {
+ if !st.skipHeader && !st.seenTag {
+ st.header = append(st.header, line)
+ }
+ // didn't match a tag, moving on
+ continue
+ }
+
+ if st.currentTagger.MultiLine && matched {
+ // the first line of a multiline tagger doesn't count
+ continue
+ }
+
+ ts, ok := st.matched[st.currentTagger.Name]
+ if !ok {
+ ts = *st.currentTagger
+ }
+ ts.Lines = append(ts.Lines, line)
+ if st.matched == nil {
+ st.matched = make(map[string]tagParser)
+ }
+ st.matched[st.currentTagger.Name] = ts
+
+ if !st.currentTagger.MultiLine {
+ st.currentTagger = nil
+ }
+ }
+ }
+ if st.setTitle != nil {
+ st.setTitle(st.Title())
+ }
+ if st.setDescription != nil {
+ st.setDescription(st.Description())
+ }
+ for _, mt := range st.matched {
+ if !mt.SkipCleanUp {
+ mt.Lines = cleanupScannerLines(mt.Lines, rxUncommentHeaders, nil)
+ }
+ if err := mt.Parse(mt.Lines); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+type vendorExtensibleParser struct {
+ setExtensions func(ext spec.Extensions, dest interface{})
+}
+
+func (extParser vendorExtensibleParser) ParseInto(dest interface{}) func(json.RawMessage) error {
+ return func(jsonValue json.RawMessage) error {
+ var jsonData spec.Extensions
+ err := json.Unmarshal(jsonValue, &jsonData)
+ if err != nil {
+ return err
+ }
+ for k := range jsonData {
+ if !rxAllowedExtensions.MatchString(k) {
+ return fmt.Errorf("invalid schema extension name, should start from `x-`: %s", k)
+ }
+ }
+ extParser.setExtensions(jsonData, dest)
+ return nil
+ }
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/schema.go b/vendor/github.com/go-swagger/go-swagger/scan/schema.go
new file mode 100644
index 000000000..37ce6cf25
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/schema.go
@@ -0,0 +1,1358 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "fmt"
+ "go/ast"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "reflect"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/loader"
+
+ "github.com/go-openapi/spec"
+)
+
+func addExtension(ve *spec.VendorExtensible, key string, value interface{}) {
+ if os.Getenv("SWAGGER_GENERATE_EXTENSION") == "false" {
+ return
+ }
+
+ ve.AddExtension(key, value)
+}
+
+type schemaTypable struct {
+ schema *spec.Schema
+ level int
+}
+
+func (st schemaTypable) Typed(tpe, format string) {
+ st.schema.Typed(tpe, format)
+}
+
+func (st schemaTypable) SetRef(ref spec.Ref) {
+ st.schema.Ref = ref
+}
+
+func (st schemaTypable) Schema() *spec.Schema {
+ return st.schema
+}
+
+func (st schemaTypable) Items() swaggerTypable {
+ if st.schema.Items == nil {
+ st.schema.Items = new(spec.SchemaOrArray)
+ }
+ if st.schema.Items.Schema == nil {
+ st.schema.Items.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("array", "")
+ return schemaTypable{st.schema.Items.Schema, st.level + 1}
+}
+
+func (st schemaTypable) AdditionalProperties() swaggerTypable {
+ if st.schema.AdditionalProperties == nil {
+ st.schema.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ if st.schema.AdditionalProperties.Schema == nil {
+ st.schema.AdditionalProperties.Schema = new(spec.Schema)
+ }
+
+ st.schema.Typed("object", "")
+ return schemaTypable{st.schema.AdditionalProperties.Schema, st.level + 1}
+}
+
+func (st schemaTypable) Level() int { return st.level }
+
+func (st schemaTypable) WithEnum(values ...interface{}) {
+ st.schema.WithEnum(values...)
+}
+
+type schemaValidations struct {
+ current *spec.Schema
+}
+
+func (sv schemaValidations) SetMaximum(val float64, exclusive bool) {
+ sv.current.Maximum = &val
+ sv.current.ExclusiveMaximum = exclusive
+}
+func (sv schemaValidations) SetMinimum(val float64, exclusive bool) {
+ sv.current.Minimum = &val
+ sv.current.ExclusiveMinimum = exclusive
+}
+func (sv schemaValidations) SetMultipleOf(val float64) { sv.current.MultipleOf = &val }
+func (sv schemaValidations) SetMinItems(val int64) { sv.current.MinItems = &val }
+func (sv schemaValidations) SetMaxItems(val int64) { sv.current.MaxItems = &val }
+func (sv schemaValidations) SetMinLength(val int64) { sv.current.MinLength = &val }
+func (sv schemaValidations) SetMaxLength(val int64) { sv.current.MaxLength = &val }
+func (sv schemaValidations) SetPattern(val string) { sv.current.Pattern = val }
+func (sv schemaValidations) SetUnique(val bool) { sv.current.UniqueItems = val }
+func (sv schemaValidations) SetDefault(val interface{}) { sv.current.Default = val }
+func (sv schemaValidations) SetExample(val interface{}) { sv.current.Example = val }
+func (sv schemaValidations) SetEnum(val string) {
+ sv.current.Enum = parseEnum(val, &spec.SimpleSchema{Format: sv.current.Format, Type: sv.current.Type[0]})
+}
+
+type schemaDecl struct {
+ File *ast.File
+ Decl *ast.GenDecl
+ TypeSpec *ast.TypeSpec
+ GoName string
+ Name string
+ annotated bool
+}
+
+func newSchemaDecl(file *ast.File, decl *ast.GenDecl, ts *ast.TypeSpec) *schemaDecl {
+ sd := &schemaDecl{
+ File: file,
+ Decl: decl,
+ TypeSpec: ts,
+ }
+ sd.inferNames()
+ return sd
+}
+
+func (sd *schemaDecl) hasAnnotation() bool {
+ sd.inferNames()
+ return sd.annotated
+}
+
+func (sd *schemaDecl) inferNames() (goName string, name string) {
+ if sd.GoName != "" {
+ goName, name = sd.GoName, sd.Name
+ return
+ }
+ goName = sd.TypeSpec.Name.Name
+ name = goName
+ if sd.Decl.Doc != nil {
+ DECLS:
+ for _, cmt := range sd.Decl.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxModelOverride.FindStringSubmatch(ln)
+ if len(matches) > 0 {
+ sd.annotated = true
+ }
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ name = matches[1]
+ break DECLS
+ }
+ }
+ }
+ }
+ sd.GoName = goName
+ sd.Name = name
+ return
+}
+
+type schemaParser struct {
+ program *loader.Program
+ postDecls []schemaDecl
+ known map[string]spec.Schema
+ discovered *schemaDecl
+}
+
+func newSchemaParser(prog *loader.Program) *schemaParser {
+ scp := new(schemaParser)
+ scp.program = prog
+ scp.known = make(map[string]spec.Schema)
+ return scp
+}
+
+func (scp *schemaParser) Parse(gofile *ast.File, target interface{}) error {
+ tgt := target.(map[string]spec.Schema)
+ for _, decl := range gofile.Decls {
+ gd, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+ for _, spc := range gd.Specs {
+ if ts, ok := spc.(*ast.TypeSpec); ok {
+ sd := newSchemaDecl(gofile, gd, ts)
+ if err := scp.parseDecl(tgt, sd); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+func (scp *schemaParser) parseDecl(definitions map[string]spec.Schema, decl *schemaDecl) error {
+ // check if there is a swagger:model tag that is followed by a word,
+ // this word is the type name for swagger
+ // the package and type are recorded in the extensions
+ // once type name is found convert it to a schema, by looking up the schema in the
+ // definitions dictionary that got passed into this parse method
+
+ // if our schemaParser is parsing a discovered schemaDecl and it does not match
+ // the current schemaDecl we can skip parsing.
+ if scp.discovered != nil && scp.discovered.Name != decl.Name {
+ return nil
+ }
+
+ decl.inferNames()
+ schema := definitions[decl.Name]
+ schPtr := &schema
+
+ // analyze doc comment for the model
+ sp := new(sectionedParser)
+ sp.setTitle = func(lines []string) { schema.Title = joinDropLast(lines) }
+ sp.setDescription = func(lines []string) { schema.Description = joinDropLast(lines) }
+ if err := sp.Parse(decl.Decl.Doc); err != nil {
+ return err
+ }
+
+ // if the type is marked to ignore, just return
+ if sp.ignored {
+ return nil
+ }
+
+ // analyze struct body for fields etc
+ // each exported struct field:
+ // * gets a type mapped to a go primitive
+ // * perhaps gets a format
+ // * has to document the validations that apply for the type and the field
+ // * when the struct field points to a model it becomes a ref: #/definitions/ModelName
+ // * the first line of the comment is the title
+ // * the following lines are the description
+ switch tpe := decl.TypeSpec.Type.(type) {
+ case *ast.StructType:
+ if err := scp.parseStructType(decl.File, schPtr, tpe, make(map[string]string)); err != nil {
+ return err
+ }
+ case *ast.InterfaceType:
+ if err := scp.parseInterfaceType(decl.File, schPtr, tpe, make(map[string]string)); err != nil {
+ return err
+ }
+ case *ast.Ident:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, prop); err != nil {
+ return err
+ }
+ }
+ if enumName, ok := enumName(decl.Decl.Doc); ok {
+ var enumValues = getEnumValues(decl.File, enumName)
+ if len(enumValues) > 0 {
+ var typeName = reflect.TypeOf(enumValues[0]).String()
+ prop.WithEnum(enumValues...)
+
+ err := swaggerSchemaForType(typeName, prop)
+ if err != nil {
+ return fmt.Errorf("file %s, error is: %v", decl.File.Name, err)
+ }
+ }
+ }
+ case *ast.SelectorExpr:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, prop); err != nil {
+ return err
+ }
+ }
+
+ case *ast.ArrayType:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.Items().Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, &schemaTypable{schPtr, 0}); err != nil {
+ return err
+ }
+ }
+
+ case *ast.MapType:
+ prop := &schemaTypable{schPtr, 0}
+ if strfmtName, ok := strfmtName(decl.Decl.Doc); ok {
+ prop.AdditionalProperties().Typed("string", strfmtName)
+ } else {
+ if err := scp.parseNamedType(decl.File, tpe, &schemaTypable{schPtr, 0}); err != nil {
+ return err
+ }
+ }
+ default:
+ log.Printf("WARNING: Missing parser for a %T, skipping model: %s\n", tpe, decl.Name)
+ return nil
+ }
+
+ if schPtr.Ref.String() == "" {
+ if decl.Name != decl.GoName {
+ addExtension(&schPtr.VendorExtensible, "x-go-name", decl.GoName)
+ }
+ for _, pkgInfo := range scp.program.AllPackages {
+ if pkgInfo.Importable {
+ for _, fil := range pkgInfo.Files {
+ if fil.Pos() == decl.File.Pos() {
+ addExtension(&schPtr.VendorExtensible, "x-go-package", pkgInfo.Pkg.Path())
+ }
+ }
+ }
+ }
+ }
+ definitions[decl.Name] = schema
+ return nil
+}
+
+func (scp *schemaParser) parseNamedType(gofile *ast.File, expr ast.Expr, prop swaggerTypable) error {
+ switch ftpe := expr.(type) {
+ case *ast.Ident: // simple value
+ pkg, err := scp.packageForFile(gofile, ftpe)
+ if err != nil {
+ return err
+ }
+ return scp.parseIdentProperty(pkg, ftpe, prop)
+
+ case *ast.StarExpr: // pointer to something, optional by default
+ if err := scp.parseNamedType(gofile, ftpe.X, prop); err != nil {
+ return err
+ }
+
+ case *ast.ArrayType: // slice type
+ if err := scp.parseNamedType(gofile, ftpe.Elt, prop.Items()); err != nil {
+ return err
+ }
+
+ case *ast.StructType:
+ schema := prop.Schema()
+ if schema == nil {
+ return fmt.Errorf("items doesn't support embedded structs")
+ }
+ return scp.parseStructType(gofile, prop.Schema(), ftpe, make(map[string]string))
+
+ case *ast.SelectorExpr:
+ err := scp.typeForSelector(gofile, ftpe, prop)
+ return err
+
+ case *ast.MapType:
+ // check if key is a string type, if not print a message
+ // and skip the map property. Only maps with string keys can go into additional properties
+ sch := prop.Schema()
+ if sch == nil {
+ return fmt.Errorf("items doesn't support maps")
+ }
+ if keyIdent, ok := ftpe.Key.(*ast.Ident); sch != nil && ok {
+ if keyIdent.Name == "string" {
+ if sch.AdditionalProperties == nil {
+ sch.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ sch.AdditionalProperties.Allows = false
+ if sch.AdditionalProperties.Schema == nil {
+ sch.AdditionalProperties.Schema = new(spec.Schema)
+ }
+ if err := scp.parseNamedType(gofile, ftpe.Value, schemaTypable{sch.AdditionalProperties.Schema, 0}); err != nil {
+ return err
+ }
+ sch.Typed("object", "")
+ }
+ }
+
+ case *ast.InterfaceType:
+ prop.Schema().Typed("object", "")
+ default:
+ pos := "unknown file:unknown position"
+ if scp != nil {
+ if scp.program != nil {
+ if scp.program.Fset != nil {
+ pos = scp.program.Fset.Position(expr.Pos()).String()
+ }
+ }
+ }
+ return fmt.Errorf("expr (%s) is unsupported for a schema", pos)
+ }
+ return nil
+}
+
+func (scp *schemaParser) parseEmbeddedType(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]string) error {
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err := scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return err
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return err
+ }
+
+ switch st := ts.Type.(type) {
+ case *ast.StructType:
+ return scp.parseStructType(file, schema, st, seenPreviously)
+ case *ast.InterfaceType:
+ return scp.parseInterfaceType(file, schema, st, seenPreviously)
+ default:
+ prop := &schemaTypable{schema, 0}
+ return scp.parseNamedType(gofile, st, prop)
+ }
+
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err := scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, _, ts, err := findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ if st, ok := ts.Type.(*ast.StructType); ok {
+ return scp.parseStructType(file, schema, st, seenPreviously)
+ }
+ if st, ok := ts.Type.(*ast.InterfaceType); ok {
+ return scp.parseInterfaceType(file, schema, st, seenPreviously)
+ }
+ case *ast.StarExpr:
+ return scp.parseEmbeddedType(gofile, schema, tpe.X, seenPreviously)
+ default:
+ return fmt.Errorf(
+ "parseEmbeddedType: unsupported type %v at position %#v",
+ expr,
+ scp.program.Fset.Position(tpe.Pos()),
+ )
+ }
+ return fmt.Errorf("unable to resolve embedded struct for: %v", expr)
+}
+
+func (scp *schemaParser) parseAllOfMember(gofile *ast.File, schema *spec.Schema, expr ast.Expr, seenPreviously map[string]string) error {
+ // TODO: check if struct is annotated with swagger:model or known in the definitions otherwise
+ var pkg *loader.PackageInfo
+ var file *ast.File
+ var gd *ast.GenDecl
+ var ts *ast.TypeSpec
+ var err error
+
+ switch tpe := expr.(type) {
+ case *ast.Ident:
+ // do lookup of type
+ // take primitives into account, they should result in an error for swagger
+ pkg, err = scp.packageForFile(gofile, tpe)
+ if err != nil {
+ return err
+ }
+ file, gd, ts, err = findSourceFile(pkg, tpe.Name)
+ if err != nil {
+ return err
+ }
+
+ case *ast.SelectorExpr:
+ // look up package, file and then type
+ pkg, err = scp.packageForSelector(gofile, tpe.X)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ file, gd, ts, err = findSourceFile(pkg, tpe.Sel.Name)
+ if err != nil {
+ return fmt.Errorf("embedded struct: %v", err)
+ }
+ default:
+ return fmt.Errorf("unable to resolve allOf member for: %v", expr)
+ }
+
+ sd := newSchemaDecl(file, gd, ts)
+ if sd.hasAnnotation() && pkg.String() != "time" && ts.Name.Name != "Time" {
+ ref, err := spec.NewRef("#/definitions/" + sd.Name)
+ if err != nil {
+ return err
+ }
+ schema.Ref = ref
+ scp.postDecls = append(scp.postDecls, *sd)
+ } else {
+ switch st := ts.Type.(type) {
+ case *ast.StructType:
+ return scp.parseStructType(file, schema, st, seenPreviously)
+ case *ast.InterfaceType:
+ return scp.parseInterfaceType(file, schema, st, seenPreviously)
+ }
+ }
+
+ return nil
+}
+func (scp *schemaParser) parseInterfaceType(gofile *ast.File, bschema *spec.Schema, tpe *ast.InterfaceType, seenPreviously map[string]string) error {
+ if tpe.Methods == nil {
+ return nil
+ }
+
+ // first check if this has embedded interfaces, if so make sure to refer to those by ref
+ // when they are decorated with an allOf annotation
+ // go over the method list again and this time collect the nullary methods and parse the comments
+ // as if they are properties on a struct
+ var schema *spec.Schema
+ seenProperties := seenPreviously
+ hasAllOf := false
+
+ for _, fld := range tpe.Methods.List {
+ if len(fld.Names) == 0 {
+ // if this created an allOf property then we have to rejig the schema var
+ // because all the fields collected that aren't from embedded structs should go in
+ // their own proper schema
+ // first process embedded structs in order of embedding
+ if allOfMember(fld.Doc) {
+ hasAllOf = true
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil {
+ return err
+ }
+
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ addExtension(&bschema.VendorExtensible, "x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ bschema.AllOf = append(bschema.AllOf, newSch)
+ continue
+ }
+
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseEmbeddedType(gofile, &newSch, fld.Type, seenProperties); err != nil {
+ return err
+ }
+ bschema.AllOf = append(bschema.AllOf, newSch)
+ hasAllOf = true
+ }
+ }
+
+ if schema == nil {
+ schema = bschema
+ }
+ // then add and possibly override values
+ if schema.Properties == nil {
+ schema.Properties = make(map[string]spec.Schema)
+ }
+ schema.Typed("object", "")
+ for _, fld := range tpe.Methods.List {
+ if mtpe, ok := fld.Type.(*ast.FuncType); ok && mtpe.Params.NumFields() == 0 && mtpe.Results.NumFields() == 1 {
+ gnm := fld.Names[0].Name
+ nm := gnm
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxName.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ nm = matches[ml-1]
+ }
+ }
+ }
+ }
+
+ ps := schema.Properties[nm]
+ if err := parseProperty(scp, gofile, mtpe.Results.List[0].Type, schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+
+ if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && nm != gnm {
+ addExtension(&ps.VendorExtensible, "x-go-name", gnm)
+ }
+ seenProperties[nm] = gnm
+ schema.Properties[nm] = ps
+ }
+
+ }
+ if schema != nil && hasAllOf && len(schema.Properties) > 0 {
+ bschema.AllOf = append(bschema.AllOf, *schema)
+ }
+ for k := range schema.Properties {
+ if _, ok := seenProperties[k]; !ok {
+ delete(schema.Properties, k)
+ }
+ }
+ return nil
+}
+
+func (scp *schemaParser) parseStructType(gofile *ast.File, bschema *spec.Schema, tpe *ast.StructType, seenPreviously map[string]string) error {
+ if tpe.Fields == nil {
+ return nil
+ }
+ var schema *spec.Schema
+ seenProperties := seenPreviously
+ hasAllOf := false
+
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) == 0 {
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(fld.Doc) {
+ continue
+ }
+
+ _, ignore, _, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ continue
+ }
+
+ // if this created an allOf property then we have to rejig the schema var
+ // because all the fields collected that aren't from embedded structs should go in
+ // their own proper schema
+ // first process embedded structs in order of embedding
+ if allOfMember(fld.Doc) {
+ hasAllOf = true
+ if schema == nil {
+ schema = new(spec.Schema)
+ }
+ var newSch spec.Schema
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseAllOfMember(gofile, &newSch, fld.Type, seenProperties); err != nil {
+ return err
+ }
+
+ if fld.Doc != nil {
+ for _, cmt := range fld.Doc.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxAllOf.FindStringSubmatch(ln)
+ ml := len(matches)
+ if ml > 1 {
+ mv := matches[ml-1]
+ if mv != "" {
+ addExtension(&bschema.VendorExtensible, "x-class", mv)
+ }
+ }
+ }
+ }
+ }
+
+ bschema.AllOf = append(bschema.AllOf, newSch)
+ continue
+ }
+ if schema == nil {
+ schema = bschema
+ }
+
+ // when the embedded struct is annotated with swagger:allOf it will be used as allOf property
+ // otherwise the fields will just be included as normal properties
+ if err := scp.parseEmbeddedType(gofile, schema, fld.Type, seenProperties); err != nil {
+ return err
+ }
+ }
+ }
+ if schema == nil {
+ schema = bschema
+ }
+
+ // then add and possibly override values
+ if schema.Properties == nil {
+ schema.Properties = make(map[string]spec.Schema)
+ }
+ schema.Typed("object", "")
+ for _, fld := range tpe.Fields.List {
+ if len(fld.Names) > 0 && fld.Names[0] != nil && fld.Names[0].IsExported() {
+ // if the field is annotated with swagger:ignore, ignore it
+ if ignored(fld.Doc) {
+ continue
+ }
+
+ gnm := fld.Names[0].Name
+ nm, ignore, isString, err := parseJSONTag(fld)
+ if err != nil {
+ return err
+ }
+ if ignore {
+ for seenTagName, seenFieldName := range seenPreviously {
+ if seenFieldName == gnm {
+ delete(schema.Properties, seenTagName)
+ break
+ }
+ }
+ continue
+ }
+
+ ps := schema.Properties[nm]
+ if err := parseProperty(scp, gofile, fld.Type, schemaTypable{&ps, 0}); err != nil {
+ return err
+ }
+ if isString {
+ ps.Typed("string", ps.Format)
+ ps.Ref = spec.Ref{}
+ }
+ if strfmtName, ok := strfmtName(fld.Doc); ok {
+ ps.Typed("string", strfmtName)
+ ps.Ref = spec.Ref{}
+ }
+
+ if err := scp.createParser(nm, schema, &ps, fld).Parse(fld.Doc); err != nil {
+ return err
+ }
+
+ if ps.Ref.String() == "" && nm != gnm {
+ addExtension(&ps.VendorExtensible, "x-go-name", gnm)
+ }
+ // we have 2 cases:
+ // 1. field with different name override tag
+ // 2. field with different name removes tag
+ // so we need to save both tag&name
+ seenProperties[nm] = gnm
+ schema.Properties[nm] = ps
+ }
+ }
+ if schema != nil && hasAllOf && len(schema.Properties) > 0 {
+ bschema.AllOf = append(bschema.AllOf, *schema)
+ }
+ for k := range schema.Properties {
+ if _, ok := seenProperties[k]; !ok {
+ delete(schema.Properties, k)
+ }
+ }
+ return nil
+}
+
+var schemaVendorExtensibleParser = vendorExtensibleParser{
+ setExtensions: func(ext spec.Extensions, dest interface{}) {
+ dest.(*spec.Schema).Extensions = ext
+ },
+}
+
+func (scp *schemaParser) createParser(nm string, schema, ps *spec.Schema, fld *ast.Field) *sectionedParser {
+ sp := new(sectionedParser)
+
+ schemeType, err := ps.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+
+ if ps.Ref.String() == "" {
+ sp.setDescription = func(lines []string) { ps.Description = joinDropLast(lines) }
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("maximum", &setMaximum{schemaValidations{ps}, rxf(rxMaximumFmt, "")}),
+ newSingleLineTagParser("minimum", &setMinimum{schemaValidations{ps}, rxf(rxMinimumFmt, "")}),
+ newSingleLineTagParser("multipleOf", &setMultipleOf{schemaValidations{ps}, rxf(rxMultipleOfFmt, "")}),
+ newSingleLineTagParser("minLength", &setMinLength{schemaValidations{ps}, rxf(rxMinLengthFmt, "")}),
+ newSingleLineTagParser("maxLength", &setMaxLength{schemaValidations{ps}, rxf(rxMaxLengthFmt, "")}),
+ newSingleLineTagParser("pattern", &setPattern{schemaValidations{ps}, rxf(rxPatternFmt, "")}),
+ newSingleLineTagParser("minItems", &setMinItems{schemaValidations{ps}, rxf(rxMinItemsFmt, "")}),
+ newSingleLineTagParser("maxItems", &setMaxItems{schemaValidations{ps}, rxf(rxMaxItemsFmt, "")}),
+ newSingleLineTagParser("unique", &setUnique{schemaValidations{ps}, rxf(rxUniqueFmt, "")}),
+ newSingleLineTagParser("enum", &setEnum{schemaValidations{ps}, rxf(rxEnumFmt, "")}),
+ newSingleLineTagParser("default", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("type", &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxDefaultFmt, "")}),
+ newSingleLineTagParser("example", &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{ps}, rxf(rxExampleFmt, "")}),
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ newSingleLineTagParser("readOnly", &setReadOnlySchema{ps}),
+ newSingleLineTagParser("discriminator", &setDiscriminator{schema, nm}),
+ newMultiLineTagParser("YAMLExtensionsBlock", newYamlParser(rxExtensions, schemaVendorExtensibleParser.ParseInto(ps)), true),
+ }
+
+ itemsTaggers := func(items *spec.Schema, level int) []tagParser {
+ schemeType, err := items.Type.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+ // the expression is 1-index based not 0-index
+ itemsPrefix := fmt.Sprintf(rxItemsPrefixFmt, level+1)
+ return []tagParser{
+ newSingleLineTagParser(fmt.Sprintf("items%dMaximum", level), &setMaximum{schemaValidations{items}, rxf(rxMaximumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinimum", level), &setMinimum{schemaValidations{items}, rxf(rxMinimumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMultipleOf", level), &setMultipleOf{schemaValidations{items}, rxf(rxMultipleOfFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinLength", level), &setMinLength{schemaValidations{items}, rxf(rxMinLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxLength", level), &setMaxLength{schemaValidations{items}, rxf(rxMaxLengthFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dPattern", level), &setPattern{schemaValidations{items}, rxf(rxPatternFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMinItems", level), &setMinItems{schemaValidations{items}, rxf(rxMinItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dMaxItems", level), &setMaxItems{schemaValidations{items}, rxf(rxMaxItemsFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dUnique", level), &setUnique{schemaValidations{items}, rxf(rxUniqueFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dEnum", level), &setEnum{schemaValidations{items}, rxf(rxEnumFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dDefault", level), &setDefault{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxDefaultFmt, itemsPrefix)}),
+ newSingleLineTagParser(fmt.Sprintf("items%dExample", level), &setExample{&spec.SimpleSchema{Type: string(schemeType)}, schemaValidations{items}, rxf(rxExampleFmt, itemsPrefix)}),
+ }
+ }
+
+ var parseArrayTypes func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error)
+ parseArrayTypes = func(expr ast.Expr, items *spec.SchemaOrArray, level int) ([]tagParser, error) {
+ if items == nil || items.Schema == nil {
+ return []tagParser{}, nil
+ }
+ switch iftpe := expr.(type) {
+ case *ast.ArrayType:
+ eleTaggers := itemsTaggers(items.Schema, level)
+ sp.taggers = append(eleTaggers, sp.taggers...)
+ otherTaggers, err := parseArrayTypes(iftpe.Elt, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ case *ast.Ident:
+ taggers := []tagParser{}
+ if iftpe.Obj == nil {
+ taggers = itemsTaggers(items.Schema, level)
+ }
+ otherTaggers, err := parseArrayTypes(expr, items.Schema.Items, level+1)
+ if err != nil {
+ return nil, err
+ }
+ return append(taggers, otherTaggers...), nil
+ case *ast.StarExpr:
+ otherTaggers, err := parseArrayTypes(iftpe.X, items, level)
+ if err != nil {
+ return nil, err
+ }
+ return otherTaggers, nil
+ default:
+ return nil, fmt.Errorf("unknown field type ele for %q", nm)
+ }
+ }
+ // check if this is a primitive, if so parse the validations from the
+ // doc comments of the slice declaration.
+ if ftped, ok := fld.Type.(*ast.ArrayType); ok {
+ taggers, err := parseArrayTypes(ftped.Elt, ps.Items, 0)
+ if err != nil {
+ return sp
+ }
+ sp.taggers = append(taggers, sp.taggers...)
+ }
+
+ } else {
+ sp.taggers = []tagParser{
+ newSingleLineTagParser("required", &setRequiredSchema{schema, nm}),
+ }
+ }
+ return sp
+}
+
+// hasFilePathPrefix reports whether the filesystem path s begins with the
+// elements in prefix.
+//
+// taken from: https://github.com/golang/go/blob/c87520c5981ecdeaa99e7ba636a6088f900c0c75/src/cmd/go/internal/load/path.go#L60-L80
+func hasFilePathPrefix(s, prefix string) bool {
+ sv := strings.ToUpper(filepath.VolumeName(s))
+ pv := strings.ToUpper(filepath.VolumeName(prefix))
+ s = s[len(sv):]
+ prefix = prefix[len(pv):]
+ switch {
+ default:
+ return false
+ case sv != pv:
+ return false
+ case len(s) == len(prefix):
+ return s == prefix
+ case len(s) > len(prefix):
+ if prefix != "" && prefix[len(prefix)-1] == filepath.Separator {
+ return strings.HasPrefix(s, prefix)
+ }
+ return s[len(prefix)] == filepath.Separator && s[:len(prefix)] == prefix
+ }
+}
+
+func goroot() string {
+ cmd := exec.Command("go", "env", "GOROOT")
+ out, err := cmd.Output()
+ if err != nil {
+ panic("Could not detect GOROOT")
+ }
+ return string(out)
+}
+
+func (scp *schemaParser) packageForFile(gofile *ast.File, tpe *ast.Ident) (*loader.PackageInfo, error) {
+ fn := scp.program.Fset.File(gofile.Pos()).Name()
+ if Debug {
+ log.Println("trying for", fn, tpe.Name, tpe.String())
+ }
+ fa, err := filepath.Abs(fn)
+ if err != nil {
+ return nil, err
+ }
+ if Debug {
+ log.Println("absolute path", fa)
+ }
+ var fgp string
+ gopath := os.Getenv("GOPATH")
+ if gopath == "" {
+ gopath = filepath.Join(os.Getenv("HOME"), "go")
+ }
+ for _, p := range append(filepath.SplitList(gopath), goroot()) {
+ pref := filepath.Join(p, "src")
+ if hasFilePathPrefix(fa, pref) {
+ fgp = filepath.Dir(strings.TrimPrefix(fa, pref))[1:]
+ break
+ }
+ }
+ if Debug {
+ log.Println("package in gopath", fgp)
+ }
+ for pkg, pkgInfo := range scp.program.AllPackages {
+ if Debug {
+ log.Println("inferring for", tpe.Name, "with", gofile.Name.Name, "at", pkg.Path(), "against", filepath.ToSlash(fgp))
+ }
+ if pkg.Name() == gofile.Name.Name && filepath.ToSlash(fgp) == pkg.Path() {
+ return pkgInfo, nil
+ }
+ }
+
+ return nil, fmt.Errorf("unable to determine package for %s", fn)
+}
+
+func (scp *schemaParser) packageForSelector(gofile *ast.File, expr ast.Expr) (*loader.PackageInfo, error) {
+
+ if pth, ok := expr.(*ast.Ident); ok {
+ // lookup import
+ var selPath string
+ for _, imp := range gofile.Imports {
+ pv, err := strconv.Unquote(imp.Path.Value)
+ if err != nil {
+ pv = imp.Path.Value
+ }
+ if imp.Name != nil {
+ if imp.Name.Name == pth.Name {
+ selPath = pv
+ break
+ }
+ } else {
+ pkg := scp.program.Package(pv)
+ if pkg != nil && pth.Name == pkg.Pkg.Name() {
+ selPath = pv
+ break
+ } else {
+ parts := strings.Split(pv, "/")
+ if len(parts) > 0 && parts[len(parts)-1] == pth.Name {
+ selPath = pv
+ break
+ }
+ }
+ }
+ }
+ // find actual struct
+ if selPath == "" {
+ return nil, fmt.Errorf("no import found for %s", pth.Name)
+ }
+
+ pkg := scp.program.Package(selPath)
+ if pkg != nil {
+ return pkg, nil
+ }
+ // TODO: I must admit this made me cry, it's not even a great solution.
+ pkg = scp.program.Package("github.com/go-swagger/go-swagger/vendor/" + selPath)
+ if pkg != nil {
+ return pkg, nil
+ }
+ for _, info := range scp.program.AllPackages {
+ n := info.String()
+ path := "/vendor/" + selPath
+ if strings.HasSuffix(n, path) {
+ pkg = scp.program.Package(n)
+ return pkg, nil
+ }
+ }
+ }
+ return nil, fmt.Errorf("can't determine selector path from %v", expr)
+}
+
+func (scp *schemaParser) makeRef(file *ast.File, pkg *loader.PackageInfo, gd *ast.GenDecl, ts *ast.TypeSpec, prop swaggerTypable) error {
+ sd := newSchemaDecl(file, gd, ts)
+ sd.inferNames()
+ // make an exception for time.Time because this is a well-known string format
+ if sd.Name == "Time" && pkg.String() == "time" {
+ return nil
+ }
+ ref, err := spec.NewRef("#/definitions/" + sd.Name)
+ if err != nil {
+ return err
+ }
+ prop.SetRef(ref)
+ scp.postDecls = append(scp.postDecls, *sd)
+ return nil
+}
+
+func (scp *schemaParser) parseIdentProperty(pkg *loader.PackageInfo, expr *ast.Ident, prop swaggerTypable) error {
+ // before proceeding make an exception to time.Time because it is a well known string format
+ if pkg.String() == "time" && expr.String() == "Time" {
+ prop.Typed("string", "date-time")
+ return nil
+ }
+
+ // find the file this selector points to
+ file, gd, ts, err := findSourceFile(pkg, expr.Name)
+
+ if err != nil {
+ err := swaggerSchemaForType(expr.Name, prop)
+ if err != nil {
+ return fmt.Errorf("package %s, error is: %v", pkg.String(), err)
+ }
+ return nil
+ }
+
+ if at, ok := ts.Type.(*ast.ArrayType); ok {
+ // the swagger spec defines strfmt base64 as []byte.
+ // in that case we don't actually want to turn it into an array
+ // but we want to turn it into a string
+ if _, ok := at.Elt.(*ast.Ident); ok {
+ if strfmtName, ok := strfmtName(gd.Doc); ok {
+ prop.Typed("string", strfmtName)
+ return nil
+ }
+ }
+ // this is a selector, so most likely not base64
+ if strfmtName, ok := strfmtName(gd.Doc); ok {
+ prop.Items().Typed("string", strfmtName)
+ return nil
+ }
+ }
+
+ // look at doc comments for swagger:strfmt [name]
+ // when found this is the format name, create a schema with that name
+ if strfmtName, ok := strfmtName(gd.Doc); ok {
+ prop.Typed("string", strfmtName)
+ return nil
+ }
+
+ if enumName, ok := enumName(gd.Doc); ok {
+ var enumValues = getEnumValues(file, enumName)
+ if len(enumValues) > 0 {
+ prop.WithEnum(enumValues...)
+ var typeName = reflect.TypeOf(enumValues[0]).String()
+ err := swaggerSchemaForType(typeName, prop)
+ if err != nil {
+ return fmt.Errorf("file %s, error is: %v", file.Name, err)
+ }
+ }
+ }
+
+ if defaultName, ok := defaultName(gd.Doc); ok {
+ log.Println(defaultName)
+ return nil
+ }
+
+ if typeName, ok := typeName(gd.Doc); ok {
+ _ = swaggerSchemaForType(typeName, prop)
+ return nil
+ }
+
+ if isAliasParam(prop) || aliasParam(gd.Doc) {
+ itype, ok := ts.Type.(*ast.Ident)
+ if ok {
+ err := swaggerSchemaForType(itype.Name, prop)
+ if err == nil {
+ return nil
+ }
+ }
+ }
+ switch tpe := ts.Type.(type) {
+ case *ast.ArrayType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+ case *ast.StructType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.Ident:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.StarExpr:
+ return parseProperty(scp, file, tpe.X, prop)
+
+ case *ast.SelectorExpr:
+ // return scp.refForSelector(file, gd, tpe, ts, prop)
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.InterfaceType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ case *ast.MapType:
+ return scp.makeRef(file, pkg, gd, ts, prop)
+
+ default:
+ err := swaggerSchemaForType(expr.Name, prop)
+ if err != nil {
+ return fmt.Errorf("package %s, error is: %v", pkg.String(), err)
+ }
+ return nil
+ }
+
+}
+
+func (scp *schemaParser) typeForSelector(gofile *ast.File, expr *ast.SelectorExpr, prop swaggerTypable) error {
+ pkg, err := scp.packageForSelector(gofile, expr.X)
+ if err != nil {
+ return err
+ }
+
+ return scp.parseIdentProperty(pkg, expr.Sel, prop)
+}
+
+func findSourceFile(pkg *loader.PackageInfo, typeName string) (*ast.File, *ast.GenDecl, *ast.TypeSpec, error) {
+ for _, file := range pkg.Files {
+ for _, decl := range file.Decls {
+ if gd, ok := decl.(*ast.GenDecl); ok {
+ for _, gs := range gd.Specs {
+ if ts, ok := gs.(*ast.TypeSpec); ok {
+ strfmtNme, isStrfmt := strfmtName(gd.Doc)
+ if (isStrfmt && strfmtNme == typeName) || ts.Name != nil && ts.Name.Name == typeName {
+ return file, gd, ts, nil
+ }
+ }
+ }
+ }
+ }
+ }
+ return nil, nil, nil, fmt.Errorf("unable to find %s in %s", typeName, pkg.String())
+}
+
+func allOfMember(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAllOf.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func fileParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxFileUpload.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func strfmtName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxStrFmt.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func ignored(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxIgnoreOverride.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func enumName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxEnum.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func aliasParam(comments *ast.CommentGroup) bool {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ if rxAlias.MatchString(ln) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func defaultName(comments *ast.CommentGroup) (string, bool) {
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxDefault.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ return strings.TrimSpace(matches[1]), true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func typeName(comments *ast.CommentGroup) (string, bool) {
+
+ var typ string
+ if comments != nil {
+ for _, cmt := range comments.List {
+ for _, ln := range strings.Split(cmt.Text, "\n") {
+ matches := rxType.FindStringSubmatch(ln)
+ if len(matches) > 1 && len(strings.TrimSpace(matches[1])) > 0 {
+ typ = strings.TrimSpace(matches[1])
+ return typ, true
+ }
+ }
+ }
+ }
+ return "", false
+}
+
+func parseProperty(scp *schemaParser, gofile *ast.File, fld ast.Expr, prop swaggerTypable) error {
+ switch ftpe := fld.(type) {
+ case *ast.Ident: // simple value
+ pkg, err := scp.packageForFile(gofile, ftpe)
+ if err != nil {
+ return err
+ }
+ return scp.parseIdentProperty(pkg, ftpe, prop)
+
+ case *ast.StarExpr: // pointer to something, optional by default
+ if err := parseProperty(scp, gofile, ftpe.X, prop); err != nil {
+ return err
+ }
+
+ case *ast.ArrayType: // slice type
+ if err := parseProperty(scp, gofile, ftpe.Elt, prop.Items()); err != nil {
+ return err
+ }
+
+ case *ast.StructType:
+ schema := prop.Schema()
+ if schema == nil {
+ return fmt.Errorf("items doesn't support embedded structs")
+ }
+ return scp.parseStructType(gofile, prop.Schema(), ftpe, make(map[string]string))
+
+ case *ast.SelectorExpr:
+ err := scp.typeForSelector(gofile, ftpe, prop)
+ return err
+
+ case *ast.MapType:
+ // check if key is a string type, if not print a message
+ // and skip the map property. Only maps with string keys can go into additional properties
+ sch := prop.Schema()
+ if sch == nil {
+ return fmt.Errorf("items doesn't support maps")
+ }
+ if keyIdent, ok := ftpe.Key.(*ast.Ident); sch != nil && ok {
+ if keyIdent.Name == "string" {
+ if sch.AdditionalProperties == nil {
+ sch.AdditionalProperties = new(spec.SchemaOrBool)
+ }
+ sch.AdditionalProperties.Allows = false
+ if sch.AdditionalProperties.Schema == nil {
+ sch.AdditionalProperties.Schema = new(spec.Schema)
+ }
+ if err := parseProperty(scp, gofile, ftpe.Value, schemaTypable{sch.AdditionalProperties.Schema, 0}); err != nil {
+ return err
+ }
+ sch.Typed("object", "")
+ }
+ }
+
+ case *ast.InterfaceType:
+ prop.Schema().Typed("object", "")
+ default:
+ pos := "unknown file:unknown position"
+ if scp != nil {
+ if scp.program != nil {
+ if scp.program.Fset != nil {
+ pos = scp.program.Fset.Position(fld.Pos()).String()
+ }
+ }
+ }
+ return fmt.Errorf("Expr (%s) is unsupported for a schema", pos)
+ }
+ return nil
+}
+
+func parseJSONTag(field *ast.Field) (name string, ignore bool, isString bool, err error) {
+ if len(field.Names) > 0 {
+ name = field.Names[0].Name
+ }
+ if field.Tag != nil && len(strings.TrimSpace(field.Tag.Value)) > 0 {
+ tv, err := strconv.Unquote(field.Tag.Value)
+ if err != nil {
+ return name, false, false, err
+ }
+
+ if strings.TrimSpace(tv) != "" {
+ st := reflect.StructTag(tv)
+ jsonParts := strings.Split(st.Get("json"), ",")
+ jsonName := jsonParts[0]
+
+ if len(jsonParts) > 1 && jsonParts[1] == "string" {
+ isString = isFieldStringable(field.Type)
+ }
+
+ if jsonName == "-" {
+ return name, true, isString, nil
+ } else if jsonName != "" {
+ return jsonName, false, isString, nil
+ }
+ }
+ }
+ return name, false, false, nil
+}
+
+// isFieldStringable check if the field type is a scalar. If the field type is
+// *ast.StarExpr and is pointer type, check if it refers to a scalar.
+// Otherwise, the ",string" directive doesn't apply.
+func isFieldStringable(tpe ast.Expr) bool {
+ if ident, ok := tpe.(*ast.Ident); ok {
+ switch ident.Name {
+ case "int", "int8", "int16", "int32", "int64",
+ "uint", "uint8", "uint16", "uint32", "uint64",
+ "float64", "string", "bool":
+ return true
+ }
+ } else if starExpr, ok := tpe.(*ast.StarExpr); ok {
+ return isFieldStringable(starExpr.X)
+ } else {
+ return false
+ }
+ return false
+}
diff --git a/vendor/github.com/go-swagger/go-swagger/scan/validators.go b/vendor/github.com/go-swagger/go-swagger/scan/validators.go
new file mode 100644
index 000000000..45caf8783
--- /dev/null
+++ b/vendor/github.com/go-swagger/go-swagger/scan/validators.go
@@ -0,0 +1,829 @@
+//go:build !go1.11
+// +build !go1.11
+
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package scan
+
+import (
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+type validationBuilder interface {
+ SetMaximum(float64, bool)
+ SetMinimum(float64, bool)
+ SetMultipleOf(float64)
+
+ SetMinItems(int64)
+ SetMaxItems(int64)
+
+ SetMinLength(int64)
+ SetMaxLength(int64)
+ SetPattern(string)
+
+ SetUnique(bool)
+ SetEnum(string)
+ SetDefault(interface{})
+ SetExample(interface{})
+}
+
+type valueParser interface {
+ Parse([]string) error
+ Matches(string) bool
+}
+
+type setMaximum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaximum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ max, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaximum(max, matches[1] == "<")
+ }
+ return nil
+}
+
+func (sm *setMaximum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinimum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinimum) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinimum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[2]) > 0 {
+ min, err := strconv.ParseFloat(matches[2], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinimum(min, matches[1] == ">")
+ }
+ return nil
+}
+
+type setMultipleOf struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMultipleOf) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMultipleOf) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 2 && len(matches[1]) > 0 {
+ multipleOf, err := strconv.ParseFloat(matches[1], 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMultipleOf(multipleOf)
+ }
+ return nil
+}
+
+type setMaxItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMaxItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxItems(maxItems)
+ }
+ return nil
+}
+
+type setMinItems struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinItems) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+func (sm *setMinItems) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minItems, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinItems(minItems)
+ }
+ return nil
+}
+
+type setMaxLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMaxLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ maxLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMaxLength(maxLength)
+ }
+ return nil
+}
+
+func (sm *setMaxLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setMinLength struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setMinLength) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ minLength, err := strconv.ParseInt(matches[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ sm.builder.SetMinLength(minLength)
+ }
+ return nil
+}
+
+func (sm *setMinLength) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setPattern struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setPattern) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetPattern(matches[1])
+ }
+ return nil
+}
+
+func (sm *setPattern) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setCollectionFormat struct {
+ builder operationValidationBuilder
+ rx *regexp.Regexp
+}
+
+func (sm *setCollectionFormat) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sm.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sm.builder.SetCollectionFormat(matches[1])
+ }
+ return nil
+}
+
+func (sm *setCollectionFormat) Matches(line string) bool {
+ return sm.rx.MatchString(line)
+}
+
+type setUnique struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (su *setUnique) Matches(line string) bool {
+ return su.rx.MatchString(line)
+}
+
+func (su *setUnique) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := su.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.builder.SetUnique(req)
+ }
+ return nil
+}
+
+type setEnum struct {
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setEnum) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setEnum) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ se.builder.SetEnum(matches[1])
+ }
+ return nil
+}
+
+func parseValueFromSchema(s string, schema *spec.SimpleSchema) (interface{}, error) {
+ if schema != nil {
+ switch strings.Trim(schema.TypeName(), "\"") {
+ case "integer", "int", "int64", "int32", "int16":
+ return strconv.Atoi(s)
+ case "bool", "boolean":
+ return strconv.ParseBool(s)
+ case "number", "float64", "float32":
+ return strconv.ParseFloat(s, 64)
+ case "object":
+ var obj map[string]interface{}
+ if err := json.Unmarshal([]byte(s), &obj); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return obj, nil
+ case "array":
+ var slice []interface{}
+ if err := json.Unmarshal([]byte(s), &slice); err != nil {
+ // If we can't parse it, just return the string.
+ return s, nil
+ }
+ return slice, nil
+ default:
+ return s, nil
+ }
+ } else {
+ return s, nil
+ }
+}
+
+type setDefault struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (sd *setDefault) Matches(line string) bool {
+ return sd.rx.MatchString(line)
+}
+
+func (sd *setDefault) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := sd.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], sd.scheme)
+ if err != nil {
+ return err
+ }
+ sd.builder.SetDefault(d)
+ }
+ return nil
+}
+
+type setExample struct {
+ scheme *spec.SimpleSchema
+ builder validationBuilder
+ rx *regexp.Regexp
+}
+
+func (se *setExample) Matches(line string) bool {
+ return se.rx.MatchString(line)
+}
+
+func (se *setExample) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := se.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ d, err := parseValueFromSchema(matches[1], se.scheme)
+ if err != nil {
+ return err
+ }
+ se.builder.SetExample(d)
+ }
+ return nil
+}
+
+type matchOnlyParam struct {
+ tgt *spec.Parameter
+ rx *regexp.Regexp
+}
+
+func (mo *matchOnlyParam) Matches(line string) bool {
+ return mo.rx.MatchString(line)
+}
+
+func (mo *matchOnlyParam) Parse(lines []string) error {
+ return nil
+}
+
+type setRequiredParam struct {
+ tgt *spec.Parameter
+}
+
+func (su *setRequiredParam) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredParam) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.Required = req
+ }
+ return nil
+}
+
+type setReadOnlySchema struct {
+ tgt *spec.Schema
+}
+
+func (su *setReadOnlySchema) Matches(line string) bool {
+ return rxReadOnly.MatchString(line)
+}
+
+func (su *setReadOnlySchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxReadOnly.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ su.tgt.ReadOnly = req
+ }
+ return nil
+}
+
+type setDiscriminator struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setDiscriminator) Matches(line string) bool {
+ return rxDiscriminator.MatchString(line)
+}
+
+func (su *setDiscriminator) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxDiscriminator.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ if req {
+ su.schema.Discriminator = su.field
+ } else {
+ if su.schema.Discriminator == su.field {
+ su.schema.Discriminator = ""
+ }
+ }
+ }
+ return nil
+}
+
+type setRequiredSchema struct {
+ schema *spec.Schema
+ field string
+}
+
+func (su *setRequiredSchema) Matches(line string) bool {
+ return rxRequired.MatchString(line)
+}
+
+func (su *setRequiredSchema) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := rxRequired.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ req, err := strconv.ParseBool(matches[1])
+ if err != nil {
+ return err
+ }
+ midx := -1
+ for i, nm := range su.schema.Required {
+ if nm == su.field {
+ midx = i
+ break
+ }
+ }
+ if req {
+ if midx < 0 {
+ su.schema.Required = append(su.schema.Required, su.field)
+ }
+ } else if midx >= 0 {
+ su.schema.Required = append(su.schema.Required[:midx], su.schema.Required[midx+1:]...)
+ }
+ }
+ return nil
+}
+
+func newMultilineDropEmptyParser(rx *regexp.Regexp, set func([]string)) *multiLineDropEmptyParser {
+ return &multiLineDropEmptyParser{
+ rx: rx,
+ set: set,
+ }
+}
+
+type multiLineDropEmptyParser struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (m *multiLineDropEmptyParser) Matches(line string) bool {
+ return m.rx.MatchString(line)
+}
+
+func (m *multiLineDropEmptyParser) Parse(lines []string) error {
+ m.set(removeEmptyLines(lines))
+ return nil
+}
+
+func newSetSchemes(set func([]string)) *setSchemes {
+ return &setSchemes{
+ set: set,
+ rx: rxSchemes,
+ }
+}
+
+type setSchemes struct {
+ set func([]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSchemes) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSchemes) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+ matches := ss.rx.FindStringSubmatch(lines[0])
+ if len(matches) > 1 && len(matches[1]) > 0 {
+ sch := strings.Split(matches[1], ", ")
+
+ var schemes []string
+ for _, s := range sch {
+ ts := strings.TrimSpace(s)
+ if ts != "" {
+ schemes = append(schemes, ts)
+ }
+ }
+ ss.set(schemes)
+ }
+ return nil
+}
+
+func newSetSecurity(rx *regexp.Regexp, setter func([]map[string][]string)) *setSecurity {
+ return &setSecurity{
+ set: setter,
+ rx: rx,
+ }
+}
+
+type setSecurity struct {
+ set func([]map[string][]string)
+ rx *regexp.Regexp
+}
+
+func (ss *setSecurity) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+func (ss *setSecurity) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var result []map[string][]string
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ scopes := []string{}
+ var key string
+
+ if len(kv) > 1 {
+ scs := strings.Split(kv[1], ",")
+ for _, scope := range scs {
+ tr := strings.TrimSpace(scope)
+ if tr != "" {
+ tr = strings.SplitAfter(tr, " ")[0]
+ scopes = append(scopes, strings.TrimSpace(tr))
+ }
+ }
+
+ key = strings.TrimSpace(kv[0])
+
+ result = append(result, map[string][]string{key: scopes})
+ }
+ }
+ ss.set(result)
+ return nil
+}
+
+func newSetResponses(definitions map[string]spec.Schema, responses map[string]spec.Response, setter func(*spec.Response, map[int]spec.Response)) *setOpResponses {
+ return &setOpResponses{
+ set: setter,
+ rx: rxResponses,
+ definitions: definitions,
+ responses: responses,
+ }
+}
+
+type setOpResponses struct {
+ set func(*spec.Response, map[int]spec.Response)
+ rx *regexp.Regexp
+ definitions map[string]spec.Schema
+ responses map[string]spec.Response
+}
+
+func (ss *setOpResponses) Matches(line string) bool {
+ return ss.rx.MatchString(line)
+}
+
+// ResponseTag used when specifying a response to point to a defined swagger:response
+const ResponseTag = "response"
+
+// BodyTag used when specifying a response to point to a model/schema
+const BodyTag = "body"
+
+// DescriptionTag used when specifying a response that gives a description of the response
+const DescriptionTag = "description"
+
+func parseTags(line string) (modelOrResponse string, arrays int, isDefinitionRef bool, description string, err error) {
+ tags := strings.Split(line, " ")
+ parsedModelOrResponse := false
+
+ for i, tagAndValue := range tags {
+ tagValList := strings.SplitN(tagAndValue, ":", 2)
+ var tag, value string
+ if len(tagValList) > 1 {
+ tag = tagValList[0]
+ value = tagValList[1]
+ } else {
+ //TODO: Print a warning, and in the long term, do not support not tagged values
+ //Add a default tag if none is supplied
+ if i == 0 {
+ tag = ResponseTag
+ } else {
+ tag = DescriptionTag
+ }
+ value = tagValList[0]
+ }
+
+ foundModelOrResponse := false
+ if !parsedModelOrResponse {
+ if tag == BodyTag {
+ foundModelOrResponse = true
+ isDefinitionRef = true
+ }
+ if tag == ResponseTag {
+ foundModelOrResponse = true
+ isDefinitionRef = false
+ }
+ }
+ if foundModelOrResponse {
+ //Read the model or response tag
+ parsedModelOrResponse = true
+ //Check for nested arrays
+ arrays = 0
+ for strings.HasPrefix(value, "[]") {
+ arrays++
+ value = value[2:]
+ }
+ //What's left over is the model name
+ modelOrResponse = value
+ } else {
+ foundDescription := false
+ if tag == DescriptionTag {
+ foundDescription = true
+ }
+ if foundDescription {
+ //Descriptions are special, they make they read the rest of the line
+ descriptionWords := []string{value}
+ if i < len(tags)-1 {
+ descriptionWords = append(descriptionWords, tags[i+1:]...)
+ }
+ description = strings.Join(descriptionWords, " ")
+ break
+ } else {
+ if tag == ResponseTag || tag == BodyTag || tag == DescriptionTag {
+ err = fmt.Errorf("Found valid tag %s, but not in a valid position", tag)
+ } else {
+ err = fmt.Errorf("Found invalid tag: %s", tag)
+ }
+ //return error
+ return
+ }
+ }
+ }
+
+ //TODO: Maybe do, if !parsedModelOrResponse {return some error}
+ return
+}
+
+func (ss *setOpResponses) Parse(lines []string) error {
+ if len(lines) == 0 || (len(lines) == 1 && len(lines[0]) == 0) {
+ return nil
+ }
+
+ var def *spec.Response
+ var scr map[int]spec.Response
+
+ for _, line := range lines {
+ kv := strings.SplitN(line, ":", 2)
+ var key, value string
+
+ if len(kv) > 1 {
+ key = strings.TrimSpace(kv[0])
+ if key == "" {
+ // this must be some weird empty line
+ continue
+ }
+ value = strings.TrimSpace(kv[1])
+ if value == "" {
+ var resp spec.Response
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ continue
+ }
+ refTarget, arrays, isDefinitionRef, description, err := parseTags(value)
+ if err != nil {
+ return err
+ }
+ //A possible exception for having a definition
+ if _, ok := ss.responses[refTarget]; !ok {
+ if _, ok := ss.definitions[refTarget]; ok {
+ isDefinitionRef = true
+ }
+ }
+
+ var ref spec.Ref
+ if isDefinitionRef {
+ if description == "" {
+ description = refTarget
+ }
+ ref, err = spec.NewRef("#/definitions/" + refTarget)
+ } else {
+ ref, err = spec.NewRef("#/responses/" + refTarget)
+ }
+ if err != nil {
+ return err
+ }
+
+ // description should used on anyway.
+ resp := spec.Response{ResponseProps: spec.ResponseProps{Description: description}}
+
+ if isDefinitionRef {
+ resp.Schema = new(spec.Schema)
+ resp.Description = description
+ if arrays == 0 {
+ resp.Schema.Ref = ref
+ } else {
+ cs := resp.Schema
+ for i := 0; i < arrays; i++ {
+ cs.Typed("array", "")
+ cs.Items = new(spec.SchemaOrArray)
+ cs.Items.Schema = new(spec.Schema)
+ cs = cs.Items.Schema
+ }
+ cs.Ref = ref
+ }
+ // ref. could be empty while use description tag
+ } else if len(refTarget) > 0 {
+ resp.Ref = ref
+ }
+
+ if strings.EqualFold("default", key) {
+ if def == nil {
+ def = &resp
+ }
+ } else {
+ if sc, err := strconv.Atoi(key); err == nil {
+ if scr == nil {
+ scr = make(map[int]spec.Response)
+ }
+ scr[sc] = resp
+ }
+ }
+ }
+ }
+ ss.set(def, scr)
+ return nil
+}
+
+func parseEnum(val string, s *spec.SimpleSchema) []interface{} {
+ list := strings.Split(val, ",")
+ interfaceSlice := make([]interface{}, len(list))
+ for i, d := range list {
+ v, err := parseValueFromSchema(d, s)
+ if err != nil {
+ interfaceSlice[i] = d
+ continue
+ }
+
+ interfaceSlice[i] = v
+ }
+ return interfaceSlice
+}
diff --git a/vendor/github.com/gorilla/handlers/LICENSE b/vendor/github.com/gorilla/handlers/LICENSE
new file mode 100644
index 000000000..66ea3c8ae
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/LICENSE
@@ -0,0 +1,22 @@
+Copyright (c) 2013 The Gorilla Handlers Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+ Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/gorilla/handlers/README.md b/vendor/github.com/gorilla/handlers/README.md
new file mode 100644
index 000000000..6eba66bf3
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/README.md
@@ -0,0 +1,56 @@
+gorilla/handlers
+================
+[![GoDoc](https://godoc.org/github.com/gorilla/handlers?status.svg)](https://godoc.org/github.com/gorilla/handlers)
+[![CircleCI](https://circleci.com/gh/gorilla/handlers.svg?style=svg)](https://circleci.com/gh/gorilla/handlers)
+[![Sourcegraph](https://sourcegraph.com/github.com/gorilla/handlers/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/handlers?badge)
+
+
+Package handlers is a collection of handlers (aka "HTTP middleware") for use
+with Go's `net/http` package (or any framework supporting `http.Handler`), including:
+
+* [**LoggingHandler**](https://godoc.org/github.com/gorilla/handlers#LoggingHandler) for logging HTTP requests in the Apache [Common Log
+ Format](http://httpd.apache.org/docs/2.2/logs.html#common).
+* [**CombinedLoggingHandler**](https://godoc.org/github.com/gorilla/handlers#CombinedLoggingHandler) for logging HTTP requests in the Apache [Combined Log
+ Format](http://httpd.apache.org/docs/2.2/logs.html#combined) commonly used by
+ both Apache and nginx.
+* [**CompressHandler**](https://godoc.org/github.com/gorilla/handlers#CompressHandler) for gzipping responses.
+* [**ContentTypeHandler**](https://godoc.org/github.com/gorilla/handlers#ContentTypeHandler) for validating requests against a list of accepted
+ content types.
+* [**MethodHandler**](https://godoc.org/github.com/gorilla/handlers#MethodHandler) for matching HTTP methods against handlers in a
+ `map[string]http.Handler`
+* [**ProxyHeaders**](https://godoc.org/github.com/gorilla/handlers#ProxyHeaders) for populating `r.RemoteAddr` and `r.URL.Scheme` based on the
+ `X-Forwarded-For`, `X-Real-IP`, `X-Forwarded-Proto` and RFC7239 `Forwarded`
+ headers when running a Go server behind a HTTP reverse proxy.
+* [**CanonicalHost**](https://godoc.org/github.com/gorilla/handlers#CanonicalHost) for re-directing to the preferred host when handling multiple
+ domains (i.e. multiple CNAME aliases).
+* [**RecoveryHandler**](https://godoc.org/github.com/gorilla/handlers#RecoveryHandler) for recovering from unexpected panics.
+
+Other handlers are documented [on the Gorilla
+website](https://www.gorillatoolkit.org/pkg/handlers).
+
+## Example
+
+A simple example using `handlers.LoggingHandler` and `handlers.CompressHandler`:
+
+```go
+import (
+ "net/http"
+ "github.com/gorilla/handlers"
+)
+
+func main() {
+ r := http.NewServeMux()
+
+ // Only log requests to our admin dashboard to stdout
+ r.Handle("/admin", handlers.LoggingHandler(os.Stdout, http.HandlerFunc(ShowAdminDashboard)))
+ r.HandleFunc("/", ShowIndex)
+
+ // Wrap our server with our gzip handler to gzip compress all responses.
+ http.ListenAndServe(":8000", handlers.CompressHandler(r))
+}
+```
+
+## License
+
+BSD licensed. See the included LICENSE file for details.
+
diff --git a/vendor/github.com/gorilla/handlers/canonical.go b/vendor/github.com/gorilla/handlers/canonical.go
new file mode 100644
index 000000000..8437fefc1
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/canonical.go
@@ -0,0 +1,74 @@
+package handlers
+
+import (
+ "net/http"
+ "net/url"
+ "strings"
+)
+
+type canonical struct {
+ h http.Handler
+ domain string
+ code int
+}
+
+// CanonicalHost is HTTP middleware that re-directs requests to the canonical
+// domain. It accepts a domain and a status code (e.g. 301 or 302) and
+// re-directs clients to this domain. The existing request path is maintained.
+//
+// Note: If the provided domain is considered invalid by url.Parse or otherwise
+// returns an empty scheme or host, clients are not re-directed.
+//
+// Example:
+//
+// r := mux.NewRouter()
+// canonical := handlers.CanonicalHost("http://www.gorillatoolkit.org", 302)
+// r.HandleFunc("/route", YourHandler)
+//
+// log.Fatal(http.ListenAndServe(":7000", canonical(r)))
+//
+func CanonicalHost(domain string, code int) func(h http.Handler) http.Handler {
+ fn := func(h http.Handler) http.Handler {
+ return canonical{h, domain, code}
+ }
+
+ return fn
+}
+
+func (c canonical) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ dest, err := url.Parse(c.domain)
+ if err != nil {
+ // Call the next handler if the provided domain fails to parse.
+ c.h.ServeHTTP(w, r)
+ return
+ }
+
+ if dest.Scheme == "" || dest.Host == "" {
+ // Call the next handler if the scheme or host are empty.
+ // Note that url.Parse won't fail on in this case.
+ c.h.ServeHTTP(w, r)
+ return
+ }
+
+ if !strings.EqualFold(cleanHost(r.Host), dest.Host) {
+ // Re-build the destination URL
+ dest := dest.Scheme + "://" + dest.Host + r.URL.Path
+ if r.URL.RawQuery != "" {
+ dest += "?" + r.URL.RawQuery
+ }
+ http.Redirect(w, r, dest, c.code)
+ return
+ }
+
+ c.h.ServeHTTP(w, r)
+}
+
+// cleanHost cleans invalid Host headers by stripping anything after '/' or ' '.
+// This is backported from Go 1.5 (in response to issue #11206) and attempts to
+// mitigate malformed Host headers that do not match the format in RFC7230.
+func cleanHost(in string) string {
+ if i := strings.IndexAny(in, " /"); i != -1 {
+ return in[:i]
+ }
+ return in
+}
diff --git a/vendor/github.com/gorilla/handlers/compress.go b/vendor/github.com/gorilla/handlers/compress.go
new file mode 100644
index 000000000..1e95f1ccb
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/compress.go
@@ -0,0 +1,143 @@
+// Copyright 2013 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package handlers
+
+import (
+ "compress/flate"
+ "compress/gzip"
+ "io"
+ "net/http"
+ "strings"
+
+ "github.com/felixge/httpsnoop"
+)
+
+const acceptEncoding string = "Accept-Encoding"
+
+type compressResponseWriter struct {
+ compressor io.Writer
+ w http.ResponseWriter
+}
+
+func (cw *compressResponseWriter) WriteHeader(c int) {
+ cw.w.Header().Del("Content-Length")
+ cw.w.WriteHeader(c)
+}
+
+func (cw *compressResponseWriter) Write(b []byte) (int, error) {
+ h := cw.w.Header()
+ if h.Get("Content-Type") == "" {
+ h.Set("Content-Type", http.DetectContentType(b))
+ }
+ h.Del("Content-Length")
+
+ return cw.compressor.Write(b)
+}
+
+func (cw *compressResponseWriter) ReadFrom(r io.Reader) (int64, error) {
+ return io.Copy(cw.compressor, r)
+}
+
+type flusher interface {
+ Flush() error
+}
+
+func (w *compressResponseWriter) Flush() {
+ // Flush compressed data if compressor supports it.
+ if f, ok := w.compressor.(flusher); ok {
+ f.Flush()
+ }
+ // Flush HTTP response.
+ if f, ok := w.w.(http.Flusher); ok {
+ f.Flush()
+ }
+}
+
+// CompressHandler gzip compresses HTTP responses for clients that support it
+// via the 'Accept-Encoding' header.
+//
+// Compressing TLS traffic may leak the page contents to an attacker if the
+// page contains user input: http://security.stackexchange.com/a/102015/12208
+func CompressHandler(h http.Handler) http.Handler {
+ return CompressHandlerLevel(h, gzip.DefaultCompression)
+}
+
+// CompressHandlerLevel gzip compresses HTTP responses with specified compression level
+// for clients that support it via the 'Accept-Encoding' header.
+//
+// The compression level should be gzip.DefaultCompression, gzip.NoCompression,
+// or any integer value between gzip.BestSpeed and gzip.BestCompression inclusive.
+// gzip.DefaultCompression is used in case of invalid compression level.
+func CompressHandlerLevel(h http.Handler, level int) http.Handler {
+ if level < gzip.DefaultCompression || level > gzip.BestCompression {
+ level = gzip.DefaultCompression
+ }
+
+ const (
+ gzipEncoding = "gzip"
+ flateEncoding = "deflate"
+ )
+
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ // detect what encoding to use
+ var encoding string
+ for _, curEnc := range strings.Split(r.Header.Get(acceptEncoding), ",") {
+ curEnc = strings.TrimSpace(curEnc)
+ if curEnc == gzipEncoding || curEnc == flateEncoding {
+ encoding = curEnc
+ break
+ }
+ }
+
+ // always add Accept-Encoding to Vary to prevent intermediate caches corruption
+ w.Header().Add("Vary", acceptEncoding)
+
+ // if we weren't able to identify an encoding we're familiar with, pass on the
+ // request to the handler and return
+ if encoding == "" {
+ h.ServeHTTP(w, r)
+ return
+ }
+
+ if r.Header.Get("Upgrade") != "" {
+ h.ServeHTTP(w, r)
+ return
+ }
+
+ // wrap the ResponseWriter with the writer for the chosen encoding
+ var encWriter io.WriteCloser
+ if encoding == gzipEncoding {
+ encWriter, _ = gzip.NewWriterLevel(w, level)
+ } else if encoding == flateEncoding {
+ encWriter, _ = flate.NewWriter(w, level)
+ }
+ defer encWriter.Close()
+
+ w.Header().Set("Content-Encoding", encoding)
+ r.Header.Del(acceptEncoding)
+
+ cw := &compressResponseWriter{
+ w: w,
+ compressor: encWriter,
+ }
+
+ w = httpsnoop.Wrap(w, httpsnoop.Hooks{
+ Write: func(httpsnoop.WriteFunc) httpsnoop.WriteFunc {
+ return cw.Write
+ },
+ WriteHeader: func(httpsnoop.WriteHeaderFunc) httpsnoop.WriteHeaderFunc {
+ return cw.WriteHeader
+ },
+ Flush: func(httpsnoop.FlushFunc) httpsnoop.FlushFunc {
+ return cw.Flush
+ },
+ ReadFrom: func(rff httpsnoop.ReadFromFunc) httpsnoop.ReadFromFunc {
+ return cw.ReadFrom
+ },
+ })
+
+ h.ServeHTTP(w, r)
+ })
+}
diff --git a/vendor/github.com/gorilla/handlers/cors.go b/vendor/github.com/gorilla/handlers/cors.go
new file mode 100644
index 000000000..0dcdffb3d
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/cors.go
@@ -0,0 +1,355 @@
+package handlers
+
+import (
+ "net/http"
+ "strconv"
+ "strings"
+)
+
+// CORSOption represents a functional option for configuring the CORS middleware.
+type CORSOption func(*cors) error
+
+type cors struct {
+ h http.Handler
+ allowedHeaders []string
+ allowedMethods []string
+ allowedOrigins []string
+ allowedOriginValidator OriginValidator
+ exposedHeaders []string
+ maxAge int
+ ignoreOptions bool
+ allowCredentials bool
+ optionStatusCode int
+}
+
+// OriginValidator takes an origin string and returns whether or not that origin is allowed.
+type OriginValidator func(string) bool
+
+var (
+ defaultCorsOptionStatusCode = 200
+ defaultCorsMethods = []string{"GET", "HEAD", "POST"}
+ defaultCorsHeaders = []string{"Accept", "Accept-Language", "Content-Language", "Origin"}
+ // (WebKit/Safari v9 sends the Origin header by default in AJAX requests)
+)
+
+const (
+ corsOptionMethod string = "OPTIONS"
+ corsAllowOriginHeader string = "Access-Control-Allow-Origin"
+ corsExposeHeadersHeader string = "Access-Control-Expose-Headers"
+ corsMaxAgeHeader string = "Access-Control-Max-Age"
+ corsAllowMethodsHeader string = "Access-Control-Allow-Methods"
+ corsAllowHeadersHeader string = "Access-Control-Allow-Headers"
+ corsAllowCredentialsHeader string = "Access-Control-Allow-Credentials"
+ corsRequestMethodHeader string = "Access-Control-Request-Method"
+ corsRequestHeadersHeader string = "Access-Control-Request-Headers"
+ corsOriginHeader string = "Origin"
+ corsVaryHeader string = "Vary"
+ corsOriginMatchAll string = "*"
+)
+
+func (ch *cors) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+ origin := r.Header.Get(corsOriginHeader)
+ if !ch.isOriginAllowed(origin) {
+ if r.Method != corsOptionMethod || ch.ignoreOptions {
+ ch.h.ServeHTTP(w, r)
+ }
+
+ return
+ }
+
+ if r.Method == corsOptionMethod {
+ if ch.ignoreOptions {
+ ch.h.ServeHTTP(w, r)
+ return
+ }
+
+ if _, ok := r.Header[corsRequestMethodHeader]; !ok {
+ w.WriteHeader(http.StatusBadRequest)
+ return
+ }
+
+ method := r.Header.Get(corsRequestMethodHeader)
+ if !ch.isMatch(method, ch.allowedMethods) {
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ return
+ }
+
+ requestHeaders := strings.Split(r.Header.Get(corsRequestHeadersHeader), ",")
+ allowedHeaders := []string{}
+ for _, v := range requestHeaders {
+ canonicalHeader := http.CanonicalHeaderKey(strings.TrimSpace(v))
+ if canonicalHeader == "" || ch.isMatch(canonicalHeader, defaultCorsHeaders) {
+ continue
+ }
+
+ if !ch.isMatch(canonicalHeader, ch.allowedHeaders) {
+ w.WriteHeader(http.StatusForbidden)
+ return
+ }
+
+ allowedHeaders = append(allowedHeaders, canonicalHeader)
+ }
+
+ if len(allowedHeaders) > 0 {
+ w.Header().Set(corsAllowHeadersHeader, strings.Join(allowedHeaders, ","))
+ }
+
+ if ch.maxAge > 0 {
+ w.Header().Set(corsMaxAgeHeader, strconv.Itoa(ch.maxAge))
+ }
+
+ if !ch.isMatch(method, defaultCorsMethods) {
+ w.Header().Set(corsAllowMethodsHeader, method)
+ }
+ } else {
+ if len(ch.exposedHeaders) > 0 {
+ w.Header().Set(corsExposeHeadersHeader, strings.Join(ch.exposedHeaders, ","))
+ }
+ }
+
+ if ch.allowCredentials {
+ w.Header().Set(corsAllowCredentialsHeader, "true")
+ }
+
+ if len(ch.allowedOrigins) > 1 {
+ w.Header().Set(corsVaryHeader, corsOriginHeader)
+ }
+
+ returnOrigin := origin
+ if ch.allowedOriginValidator == nil && len(ch.allowedOrigins) == 0 {
+ returnOrigin = "*"
+ } else {
+ for _, o := range ch.allowedOrigins {
+ // A configuration of * is different than explicitly setting an allowed
+ // origin. Returning arbitrary origin headers in an access control allow
+ // origin header is unsafe and is not required by any use case.
+ if o == corsOriginMatchAll {
+ returnOrigin = "*"
+ break
+ }
+ }
+ }
+ w.Header().Set(corsAllowOriginHeader, returnOrigin)
+
+ if r.Method == corsOptionMethod {
+ w.WriteHeader(ch.optionStatusCode)
+ return
+ }
+ ch.h.ServeHTTP(w, r)
+}
+
+// CORS provides Cross-Origin Resource Sharing middleware.
+// Example:
+//
+// import (
+// "net/http"
+//
+// "github.com/gorilla/handlers"
+// "github.com/gorilla/mux"
+// )
+//
+// func main() {
+// r := mux.NewRouter()
+// r.HandleFunc("/users", UserEndpoint)
+// r.HandleFunc("/projects", ProjectEndpoint)
+//
+// // Apply the CORS middleware to our top-level router, with the defaults.
+// http.ListenAndServe(":8000", handlers.CORS()(r))
+// }
+//
+func CORS(opts ...CORSOption) func(http.Handler) http.Handler {
+ return func(h http.Handler) http.Handler {
+ ch := parseCORSOptions(opts...)
+ ch.h = h
+ return ch
+ }
+}
+
+func parseCORSOptions(opts ...CORSOption) *cors {
+ ch := &cors{
+ allowedMethods: defaultCorsMethods,
+ allowedHeaders: defaultCorsHeaders,
+ allowedOrigins: []string{},
+ optionStatusCode: defaultCorsOptionStatusCode,
+ }
+
+ for _, option := range opts {
+ option(ch)
+ }
+
+ return ch
+}
+
+//
+// Functional options for configuring CORS.
+//
+
+// AllowedHeaders adds the provided headers to the list of allowed headers in a
+// CORS request.
+// This is an append operation so the headers Accept, Accept-Language,
+// and Content-Language are always allowed.
+// Content-Type must be explicitly declared if accepting Content-Types other than
+// application/x-www-form-urlencoded, multipart/form-data, or text/plain.
+func AllowedHeaders(headers []string) CORSOption {
+ return func(ch *cors) error {
+ for _, v := range headers {
+ normalizedHeader := http.CanonicalHeaderKey(strings.TrimSpace(v))
+ if normalizedHeader == "" {
+ continue
+ }
+
+ if !ch.isMatch(normalizedHeader, ch.allowedHeaders) {
+ ch.allowedHeaders = append(ch.allowedHeaders, normalizedHeader)
+ }
+ }
+
+ return nil
+ }
+}
+
+// AllowedMethods can be used to explicitly allow methods in the
+// Access-Control-Allow-Methods header.
+// This is a replacement operation so you must also
+// pass GET, HEAD, and POST if you wish to support those methods.
+func AllowedMethods(methods []string) CORSOption {
+ return func(ch *cors) error {
+ ch.allowedMethods = []string{}
+ for _, v := range methods {
+ normalizedMethod := strings.ToUpper(strings.TrimSpace(v))
+ if normalizedMethod == "" {
+ continue
+ }
+
+ if !ch.isMatch(normalizedMethod, ch.allowedMethods) {
+ ch.allowedMethods = append(ch.allowedMethods, normalizedMethod)
+ }
+ }
+
+ return nil
+ }
+}
+
+// AllowedOrigins sets the allowed origins for CORS requests, as used in the
+// 'Allow-Access-Control-Origin' HTTP header.
+// Note: Passing in a []string{"*"} will allow any domain.
+func AllowedOrigins(origins []string) CORSOption {
+ return func(ch *cors) error {
+ for _, v := range origins {
+ if v == corsOriginMatchAll {
+ ch.allowedOrigins = []string{corsOriginMatchAll}
+ return nil
+ }
+ }
+
+ ch.allowedOrigins = origins
+ return nil
+ }
+}
+
+// AllowedOriginValidator sets a function for evaluating allowed origins in CORS requests, represented by the
+// 'Allow-Access-Control-Origin' HTTP header.
+func AllowedOriginValidator(fn OriginValidator) CORSOption {
+ return func(ch *cors) error {
+ ch.allowedOriginValidator = fn
+ return nil
+ }
+}
+
+// OptionStatusCode sets a custom status code on the OPTIONS requests.
+// Default behaviour sets it to 200 to reflect best practices. This is option is not mandatory
+// and can be used if you need a custom status code (i.e 204).
+//
+// More informations on the spec:
+// https://fetch.spec.whatwg.org/#cors-preflight-fetch
+func OptionStatusCode(code int) CORSOption {
+ return func(ch *cors) error {
+ ch.optionStatusCode = code
+ return nil
+ }
+}
+
+// ExposedHeaders can be used to specify headers that are available
+// and will not be stripped out by the user-agent.
+func ExposedHeaders(headers []string) CORSOption {
+ return func(ch *cors) error {
+ ch.exposedHeaders = []string{}
+ for _, v := range headers {
+ normalizedHeader := http.CanonicalHeaderKey(strings.TrimSpace(v))
+ if normalizedHeader == "" {
+ continue
+ }
+
+ if !ch.isMatch(normalizedHeader, ch.exposedHeaders) {
+ ch.exposedHeaders = append(ch.exposedHeaders, normalizedHeader)
+ }
+ }
+
+ return nil
+ }
+}
+
+// MaxAge determines the maximum age (in seconds) between preflight requests. A
+// maximum of 10 minutes is allowed. An age above this value will default to 10
+// minutes.
+func MaxAge(age int) CORSOption {
+ return func(ch *cors) error {
+ // Maximum of 10 minutes.
+ if age > 600 {
+ age = 600
+ }
+
+ ch.maxAge = age
+ return nil
+ }
+}
+
+// IgnoreOptions causes the CORS middleware to ignore OPTIONS requests, instead
+// passing them through to the next handler. This is useful when your application
+// or framework has a pre-existing mechanism for responding to OPTIONS requests.
+func IgnoreOptions() CORSOption {
+ return func(ch *cors) error {
+ ch.ignoreOptions = true
+ return nil
+ }
+}
+
+// AllowCredentials can be used to specify that the user agent may pass
+// authentication details along with the request.
+func AllowCredentials() CORSOption {
+ return func(ch *cors) error {
+ ch.allowCredentials = true
+ return nil
+ }
+}
+
+func (ch *cors) isOriginAllowed(origin string) bool {
+ if origin == "" {
+ return false
+ }
+
+ if ch.allowedOriginValidator != nil {
+ return ch.allowedOriginValidator(origin)
+ }
+
+ if len(ch.allowedOrigins) == 0 {
+ return true
+ }
+
+ for _, allowedOrigin := range ch.allowedOrigins {
+ if allowedOrigin == origin || allowedOrigin == corsOriginMatchAll {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (ch *cors) isMatch(needle string, haystack []string) bool {
+ for _, v := range haystack {
+ if v == needle {
+ return true
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/gorilla/handlers/doc.go b/vendor/github.com/gorilla/handlers/doc.go
new file mode 100644
index 000000000..944e5a8ae
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/doc.go
@@ -0,0 +1,9 @@
+/*
+Package handlers is a collection of handlers (aka "HTTP middleware") for use
+with Go's net/http package (or any framework supporting http.Handler).
+
+The package includes handlers for logging in standardised formats, compressing
+HTTP responses, validating content types and other useful tools for manipulating
+requests and responses.
+*/
+package handlers
diff --git a/vendor/github.com/gorilla/handlers/handlers.go b/vendor/github.com/gorilla/handlers/handlers.go
new file mode 100644
index 000000000..0509482ad
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/handlers.go
@@ -0,0 +1,147 @@
+// Copyright 2013 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package handlers
+
+import (
+ "bufio"
+ "fmt"
+ "net"
+ "net/http"
+ "sort"
+ "strings"
+)
+
+// MethodHandler is an http.Handler that dispatches to a handler whose key in the
+// MethodHandler's map matches the name of the HTTP request's method, eg: GET
+//
+// If the request's method is OPTIONS and OPTIONS is not a key in the map then
+// the handler responds with a status of 200 and sets the Allow header to a
+// comma-separated list of available methods.
+//
+// If the request's method doesn't match any of its keys the handler responds
+// with a status of HTTP 405 "Method Not Allowed" and sets the Allow header to a
+// comma-separated list of available methods.
+type MethodHandler map[string]http.Handler
+
+func (h MethodHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+ if handler, ok := h[req.Method]; ok {
+ handler.ServeHTTP(w, req)
+ } else {
+ allow := []string{}
+ for k := range h {
+ allow = append(allow, k)
+ }
+ sort.Strings(allow)
+ w.Header().Set("Allow", strings.Join(allow, ", "))
+ if req.Method == "OPTIONS" {
+ w.WriteHeader(http.StatusOK)
+ } else {
+ http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
+ }
+ }
+}
+
+// responseLogger is wrapper of http.ResponseWriter that keeps track of its HTTP
+// status code and body size
+type responseLogger struct {
+ w http.ResponseWriter
+ status int
+ size int
+}
+
+func (l *responseLogger) Write(b []byte) (int, error) {
+ size, err := l.w.Write(b)
+ l.size += size
+ return size, err
+}
+
+func (l *responseLogger) WriteHeader(s int) {
+ l.w.WriteHeader(s)
+ l.status = s
+}
+
+func (l *responseLogger) Status() int {
+ return l.status
+}
+
+func (l *responseLogger) Size() int {
+ return l.size
+}
+
+func (l *responseLogger) Hijack() (net.Conn, *bufio.ReadWriter, error) {
+ conn, rw, err := l.w.(http.Hijacker).Hijack()
+ if err == nil && l.status == 0 {
+ // The status will be StatusSwitchingProtocols if there was no error and
+ // WriteHeader has not been called yet
+ l.status = http.StatusSwitchingProtocols
+ }
+ return conn, rw, err
+}
+
+// isContentType validates the Content-Type header matches the supplied
+// contentType. That is, its type and subtype match.
+func isContentType(h http.Header, contentType string) bool {
+ ct := h.Get("Content-Type")
+ if i := strings.IndexRune(ct, ';'); i != -1 {
+ ct = ct[0:i]
+ }
+ return ct == contentType
+}
+
+// ContentTypeHandler wraps and returns a http.Handler, validating the request
+// content type is compatible with the contentTypes list. It writes a HTTP 415
+// error if that fails.
+//
+// Only PUT, POST, and PATCH requests are considered.
+func ContentTypeHandler(h http.Handler, contentTypes ...string) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if !(r.Method == "PUT" || r.Method == "POST" || r.Method == "PATCH") {
+ h.ServeHTTP(w, r)
+ return
+ }
+
+ for _, ct := range contentTypes {
+ if isContentType(r.Header, ct) {
+ h.ServeHTTP(w, r)
+ return
+ }
+ }
+ http.Error(w, fmt.Sprintf("Unsupported content type %q; expected one of %q", r.Header.Get("Content-Type"), contentTypes), http.StatusUnsupportedMediaType)
+ })
+}
+
+const (
+ // HTTPMethodOverrideHeader is a commonly used
+ // http header to override a request method.
+ HTTPMethodOverrideHeader = "X-HTTP-Method-Override"
+ // HTTPMethodOverrideFormKey is a commonly used
+ // HTML form key to override a request method.
+ HTTPMethodOverrideFormKey = "_method"
+)
+
+// HTTPMethodOverrideHandler wraps and returns a http.Handler which checks for
+// the X-HTTP-Method-Override header or the _method form key, and overrides (if
+// valid) request.Method with its value.
+//
+// This is especially useful for HTTP clients that don't support many http verbs.
+// It isn't secure to override e.g a GET to a POST, so only POST requests are
+// considered. Likewise, the override method can only be a "write" method: PUT,
+// PATCH or DELETE.
+//
+// Form method takes precedence over header method.
+func HTTPMethodOverrideHandler(h http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.Method == "POST" {
+ om := r.FormValue(HTTPMethodOverrideFormKey)
+ if om == "" {
+ om = r.Header.Get(HTTPMethodOverrideHeader)
+ }
+ if om == "PUT" || om == "PATCH" || om == "DELETE" {
+ r.Method = om
+ }
+ }
+ h.ServeHTTP(w, r)
+ })
+}
diff --git a/vendor/github.com/gorilla/handlers/logging.go b/vendor/github.com/gorilla/handlers/logging.go
new file mode 100644
index 000000000..228465eba
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/logging.go
@@ -0,0 +1,244 @@
+// Copyright 2013 The Gorilla Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package handlers
+
+import (
+ "io"
+ "net"
+ "net/http"
+ "net/url"
+ "strconv"
+ "time"
+ "unicode/utf8"
+
+ "github.com/felixge/httpsnoop"
+)
+
+// Logging
+
+// LogFormatterParams is the structure any formatter will be handed when time to log comes
+type LogFormatterParams struct {
+ Request *http.Request
+ URL url.URL
+ TimeStamp time.Time
+ StatusCode int
+ Size int
+}
+
+// LogFormatter gives the signature of the formatter function passed to CustomLoggingHandler
+type LogFormatter func(writer io.Writer, params LogFormatterParams)
+
+// loggingHandler is the http.Handler implementation for LoggingHandlerTo and its
+// friends
+
+type loggingHandler struct {
+ writer io.Writer
+ handler http.Handler
+ formatter LogFormatter
+}
+
+func (h loggingHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+ t := time.Now()
+ logger, w := makeLogger(w)
+ url := *req.URL
+
+ h.handler.ServeHTTP(w, req)
+ if req.MultipartForm != nil {
+ req.MultipartForm.RemoveAll()
+ }
+
+ params := LogFormatterParams{
+ Request: req,
+ URL: url,
+ TimeStamp: t,
+ StatusCode: logger.Status(),
+ Size: logger.Size(),
+ }
+
+ h.formatter(h.writer, params)
+}
+
+func makeLogger(w http.ResponseWriter) (*responseLogger, http.ResponseWriter) {
+ logger := &responseLogger{w: w, status: http.StatusOK}
+ return logger, httpsnoop.Wrap(w, httpsnoop.Hooks{
+ Write: func(httpsnoop.WriteFunc) httpsnoop.WriteFunc {
+ return logger.Write
+ },
+ WriteHeader: func(httpsnoop.WriteHeaderFunc) httpsnoop.WriteHeaderFunc {
+ return logger.WriteHeader
+ },
+ })
+}
+
+const lowerhex = "0123456789abcdef"
+
+func appendQuoted(buf []byte, s string) []byte {
+ var runeTmp [utf8.UTFMax]byte
+ for width := 0; len(s) > 0; s = s[width:] {
+ r := rune(s[0])
+ width = 1
+ if r >= utf8.RuneSelf {
+ r, width = utf8.DecodeRuneInString(s)
+ }
+ if width == 1 && r == utf8.RuneError {
+ buf = append(buf, `\x`...)
+ buf = append(buf, lowerhex[s[0]>>4])
+ buf = append(buf, lowerhex[s[0]&0xF])
+ continue
+ }
+ if r == rune('"') || r == '\\' { // always backslashed
+ buf = append(buf, '\\')
+ buf = append(buf, byte(r))
+ continue
+ }
+ if strconv.IsPrint(r) {
+ n := utf8.EncodeRune(runeTmp[:], r)
+ buf = append(buf, runeTmp[:n]...)
+ continue
+ }
+ switch r {
+ case '\a':
+ buf = append(buf, `\a`...)
+ case '\b':
+ buf = append(buf, `\b`...)
+ case '\f':
+ buf = append(buf, `\f`...)
+ case '\n':
+ buf = append(buf, `\n`...)
+ case '\r':
+ buf = append(buf, `\r`...)
+ case '\t':
+ buf = append(buf, `\t`...)
+ case '\v':
+ buf = append(buf, `\v`...)
+ default:
+ switch {
+ case r < ' ':
+ buf = append(buf, `\x`...)
+ buf = append(buf, lowerhex[s[0]>>4])
+ buf = append(buf, lowerhex[s[0]&0xF])
+ case r > utf8.MaxRune:
+ r = 0xFFFD
+ fallthrough
+ case r < 0x10000:
+ buf = append(buf, `\u`...)
+ for s := 12; s >= 0; s -= 4 {
+ buf = append(buf, lowerhex[r>>uint(s)&0xF])
+ }
+ default:
+ buf = append(buf, `\U`...)
+ for s := 28; s >= 0; s -= 4 {
+ buf = append(buf, lowerhex[r>>uint(s)&0xF])
+ }
+ }
+ }
+ }
+ return buf
+}
+
+// buildCommonLogLine builds a log entry for req in Apache Common Log Format.
+// ts is the timestamp with which the entry should be logged.
+// status and size are used to provide the response HTTP status and size.
+func buildCommonLogLine(req *http.Request, url url.URL, ts time.Time, status int, size int) []byte {
+ username := "-"
+ if url.User != nil {
+ if name := url.User.Username(); name != "" {
+ username = name
+ }
+ }
+
+ host, _, err := net.SplitHostPort(req.RemoteAddr)
+ if err != nil {
+ host = req.RemoteAddr
+ }
+
+ uri := req.RequestURI
+
+ // Requests using the CONNECT method over HTTP/2.0 must use
+ // the authority field (aka r.Host) to identify the target.
+ // Refer: https://httpwg.github.io/specs/rfc7540.html#CONNECT
+ if req.ProtoMajor == 2 && req.Method == "CONNECT" {
+ uri = req.Host
+ }
+ if uri == "" {
+ uri = url.RequestURI()
+ }
+
+ buf := make([]byte, 0, 3*(len(host)+len(username)+len(req.Method)+len(uri)+len(req.Proto)+50)/2)
+ buf = append(buf, host...)
+ buf = append(buf, " - "...)
+ buf = append(buf, username...)
+ buf = append(buf, " ["...)
+ buf = append(buf, ts.Format("02/Jan/2006:15:04:05 -0700")...)
+ buf = append(buf, `] "`...)
+ buf = append(buf, req.Method...)
+ buf = append(buf, " "...)
+ buf = appendQuoted(buf, uri)
+ buf = append(buf, " "...)
+ buf = append(buf, req.Proto...)
+ buf = append(buf, `" `...)
+ buf = append(buf, strconv.Itoa(status)...)
+ buf = append(buf, " "...)
+ buf = append(buf, strconv.Itoa(size)...)
+ return buf
+}
+
+// writeLog writes a log entry for req to w in Apache Common Log Format.
+// ts is the timestamp with which the entry should be logged.
+// status and size are used to provide the response HTTP status and size.
+func writeLog(writer io.Writer, params LogFormatterParams) {
+ buf := buildCommonLogLine(params.Request, params.URL, params.TimeStamp, params.StatusCode, params.Size)
+ buf = append(buf, '\n')
+ writer.Write(buf)
+}
+
+// writeCombinedLog writes a log entry for req to w in Apache Combined Log Format.
+// ts is the timestamp with which the entry should be logged.
+// status and size are used to provide the response HTTP status and size.
+func writeCombinedLog(writer io.Writer, params LogFormatterParams) {
+ buf := buildCommonLogLine(params.Request, params.URL, params.TimeStamp, params.StatusCode, params.Size)
+ buf = append(buf, ` "`...)
+ buf = appendQuoted(buf, params.Request.Referer())
+ buf = append(buf, `" "`...)
+ buf = appendQuoted(buf, params.Request.UserAgent())
+ buf = append(buf, '"', '\n')
+ writer.Write(buf)
+}
+
+// CombinedLoggingHandler return a http.Handler that wraps h and logs requests to out in
+// Apache Combined Log Format.
+//
+// See http://httpd.apache.org/docs/2.2/logs.html#combined for a description of this format.
+//
+// LoggingHandler always sets the ident field of the log to -
+func CombinedLoggingHandler(out io.Writer, h http.Handler) http.Handler {
+ return loggingHandler{out, h, writeCombinedLog}
+}
+
+// LoggingHandler return a http.Handler that wraps h and logs requests to out in
+// Apache Common Log Format (CLF).
+//
+// See http://httpd.apache.org/docs/2.2/logs.html#common for a description of this format.
+//
+// LoggingHandler always sets the ident field of the log to -
+//
+// Example:
+//
+// r := mux.NewRouter()
+// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
+// w.Write([]byte("This is a catch-all route"))
+// })
+// loggedRouter := handlers.LoggingHandler(os.Stdout, r)
+// http.ListenAndServe(":1123", loggedRouter)
+//
+func LoggingHandler(out io.Writer, h http.Handler) http.Handler {
+ return loggingHandler{out, h, writeLog}
+}
+
+// CustomLoggingHandler provides a way to supply a custom log formatter
+// while taking advantage of the mechanisms in this package
+func CustomLoggingHandler(out io.Writer, h http.Handler, f LogFormatter) http.Handler {
+ return loggingHandler{out, h, f}
+}
diff --git a/vendor/github.com/gorilla/handlers/proxy_headers.go b/vendor/github.com/gorilla/handlers/proxy_headers.go
new file mode 100644
index 000000000..ed939dcef
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/proxy_headers.go
@@ -0,0 +1,120 @@
+package handlers
+
+import (
+ "net/http"
+ "regexp"
+ "strings"
+)
+
+var (
+ // De-facto standard header keys.
+ xForwardedFor = http.CanonicalHeaderKey("X-Forwarded-For")
+ xForwardedHost = http.CanonicalHeaderKey("X-Forwarded-Host")
+ xForwardedProto = http.CanonicalHeaderKey("X-Forwarded-Proto")
+ xForwardedScheme = http.CanonicalHeaderKey("X-Forwarded-Scheme")
+ xRealIP = http.CanonicalHeaderKey("X-Real-IP")
+)
+
+var (
+ // RFC7239 defines a new "Forwarded: " header designed to replace the
+ // existing use of X-Forwarded-* headers.
+ // e.g. Forwarded: for=192.0.2.60;proto=https;by=203.0.113.43
+ forwarded = http.CanonicalHeaderKey("Forwarded")
+ // Allows for a sub-match of the first value after 'for=' to the next
+ // comma, semi-colon or space. The match is case-insensitive.
+ forRegex = regexp.MustCompile(`(?i)(?:for=)([^(;|,| )]+)`)
+ // Allows for a sub-match for the first instance of scheme (http|https)
+ // prefixed by 'proto='. The match is case-insensitive.
+ protoRegex = regexp.MustCompile(`(?i)(?:proto=)(https|http)`)
+)
+
+// ProxyHeaders inspects common reverse proxy headers and sets the corresponding
+// fields in the HTTP request struct. These are X-Forwarded-For and X-Real-IP
+// for the remote (client) IP address, X-Forwarded-Proto or X-Forwarded-Scheme
+// for the scheme (http|https), X-Forwarded-Host for the host and the RFC7239
+// Forwarded header, which may include both client IPs and schemes.
+//
+// NOTE: This middleware should only be used when behind a reverse
+// proxy like nginx, HAProxy or Apache. Reverse proxies that don't (or are
+// configured not to) strip these headers from client requests, or where these
+// headers are accepted "as is" from a remote client (e.g. when Go is not behind
+// a proxy), can manifest as a vulnerability if your application uses these
+// headers for validating the 'trustworthiness' of a request.
+func ProxyHeaders(h http.Handler) http.Handler {
+ fn := func(w http.ResponseWriter, r *http.Request) {
+ // Set the remote IP with the value passed from the proxy.
+ if fwd := getIP(r); fwd != "" {
+ r.RemoteAddr = fwd
+ }
+
+ // Set the scheme (proto) with the value passed from the proxy.
+ if scheme := getScheme(r); scheme != "" {
+ r.URL.Scheme = scheme
+ }
+ // Set the host with the value passed by the proxy
+ if r.Header.Get(xForwardedHost) != "" {
+ r.Host = r.Header.Get(xForwardedHost)
+ }
+ // Call the next handler in the chain.
+ h.ServeHTTP(w, r)
+ }
+
+ return http.HandlerFunc(fn)
+}
+
+// getIP retrieves the IP from the X-Forwarded-For, X-Real-IP and RFC7239
+// Forwarded headers (in that order).
+func getIP(r *http.Request) string {
+ var addr string
+
+ if fwd := r.Header.Get(xForwardedFor); fwd != "" {
+ // Only grab the first (client) address. Note that '192.168.0.1,
+ // 10.1.1.1' is a valid key for X-Forwarded-For where addresses after
+ // the first may represent forwarding proxies earlier in the chain.
+ s := strings.Index(fwd, ", ")
+ if s == -1 {
+ s = len(fwd)
+ }
+ addr = fwd[:s]
+ } else if fwd := r.Header.Get(xRealIP); fwd != "" {
+ // X-Real-IP should only contain one IP address (the client making the
+ // request).
+ addr = fwd
+ } else if fwd := r.Header.Get(forwarded); fwd != "" {
+ // match should contain at least two elements if the protocol was
+ // specified in the Forwarded header. The first element will always be
+ // the 'for=' capture, which we ignore. In the case of multiple IP
+ // addresses (for=8.8.8.8, 8.8.4.4,172.16.1.20 is valid) we only
+ // extract the first, which should be the client IP.
+ if match := forRegex.FindStringSubmatch(fwd); len(match) > 1 {
+ // IPv6 addresses in Forwarded headers are quoted-strings. We strip
+ // these quotes.
+ addr = strings.Trim(match[1], `"`)
+ }
+ }
+
+ return addr
+}
+
+// getScheme retrieves the scheme from the X-Forwarded-Proto and RFC7239
+// Forwarded headers (in that order).
+func getScheme(r *http.Request) string {
+ var scheme string
+
+ // Retrieve the scheme from X-Forwarded-Proto.
+ if proto := r.Header.Get(xForwardedProto); proto != "" {
+ scheme = strings.ToLower(proto)
+ } else if proto = r.Header.Get(xForwardedScheme); proto != "" {
+ scheme = strings.ToLower(proto)
+ } else if proto = r.Header.Get(forwarded); proto != "" {
+ // match should contain at least two elements if the protocol was
+ // specified in the Forwarded header. The first element will always be
+ // the 'proto=' capture, which we ignore. In the case of multiple proto
+ // parameters (invalid) we only extract the first.
+ if match := protoRegex.FindStringSubmatch(proto); len(match) > 1 {
+ scheme = strings.ToLower(match[1])
+ }
+ }
+
+ return scheme
+}
diff --git a/vendor/github.com/gorilla/handlers/recovery.go b/vendor/github.com/gorilla/handlers/recovery.go
new file mode 100644
index 000000000..4c4c1d9c6
--- /dev/null
+++ b/vendor/github.com/gorilla/handlers/recovery.go
@@ -0,0 +1,96 @@
+package handlers
+
+import (
+ "log"
+ "net/http"
+ "runtime/debug"
+)
+
+// RecoveryHandlerLogger is an interface used by the recovering handler to print logs.
+type RecoveryHandlerLogger interface {
+ Println(...interface{})
+}
+
+type recoveryHandler struct {
+ handler http.Handler
+ logger RecoveryHandlerLogger
+ printStack bool
+}
+
+// RecoveryOption provides a functional approach to define
+// configuration for a handler; such as setting the logging
+// whether or not to print stack traces on panic.
+type RecoveryOption func(http.Handler)
+
+func parseRecoveryOptions(h http.Handler, opts ...RecoveryOption) http.Handler {
+ for _, option := range opts {
+ option(h)
+ }
+
+ return h
+}
+
+// RecoveryHandler is HTTP middleware that recovers from a panic,
+// logs the panic, writes http.StatusInternalServerError, and
+// continues to the next handler.
+//
+// Example:
+//
+// r := mux.NewRouter()
+// r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
+// panic("Unexpected error!")
+// })
+//
+// http.ListenAndServe(":1123", handlers.RecoveryHandler()(r))
+func RecoveryHandler(opts ...RecoveryOption) func(h http.Handler) http.Handler {
+ return func(h http.Handler) http.Handler {
+ r := &recoveryHandler{handler: h}
+ return parseRecoveryOptions(r, opts...)
+ }
+}
+
+// RecoveryLogger is a functional option to override
+// the default logger
+func RecoveryLogger(logger RecoveryHandlerLogger) RecoveryOption {
+ return func(h http.Handler) {
+ r := h.(*recoveryHandler)
+ r.logger = logger
+ }
+}
+
+// PrintRecoveryStack is a functional option to enable
+// or disable printing stack traces on panic.
+func PrintRecoveryStack(print bool) RecoveryOption {
+ return func(h http.Handler) {
+ r := h.(*recoveryHandler)
+ r.printStack = print
+ }
+}
+
+func (h recoveryHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
+ defer func() {
+ if err := recover(); err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ h.log(err)
+ }
+ }()
+
+ h.handler.ServeHTTP(w, req)
+}
+
+func (h recoveryHandler) log(v ...interface{}) {
+ if h.logger != nil {
+ h.logger.Println(v...)
+ } else {
+ log.Println(v...)
+ }
+
+ if h.printStack {
+ stack := string(debug.Stack())
+ if h.logger != nil {
+ h.logger.Println(stack)
+ } else {
+ log.Println(stack)
+ }
+ }
+}
diff --git a/vendor/github.com/huandu/xstrings/.gitignore b/vendor/github.com/huandu/xstrings/.gitignore
new file mode 100644
index 000000000..daf913b1b
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
diff --git a/vendor/github.com/huandu/xstrings/CONTRIBUTING.md b/vendor/github.com/huandu/xstrings/CONTRIBUTING.md
new file mode 100644
index 000000000..d7b4b8d58
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/CONTRIBUTING.md
@@ -0,0 +1,23 @@
+# Contributing #
+
+Thanks for your contribution in advance. No matter what you will contribute to this project, pull request or bug report or feature discussion, it's always highly appreciated.
+
+## New API or feature ##
+
+I want to speak more about how to add new functions to this package.
+
+Package `xstring` is a collection of useful string functions which should be implemented in Go. It's a bit subject to say which function should be included and which should not. I set up following rules in order to make it clear and as objective as possible.
+
+* Rule 1: Only string algorithm, which takes string as input, can be included.
+* Rule 2: If a function has been implemented in package `string`, it must not be included.
+* Rule 3: If a function is not language neutral, it must not be included.
+* Rule 4: If a function is a part of standard library in other languages, it can be included.
+* Rule 5: If a function is quite useful in some famous framework or library, it can be included.
+
+New function must be discussed in project issues before submitting any code. If a pull request with new functions is sent without any ref issue, it will be rejected.
+
+## Pull request ##
+
+Pull request is always welcome. Just make sure you have run `go fmt` and all test cases passed before submit.
+
+If the pull request is to add a new API or feature, don't forget to update README.md and add new API in function list.
diff --git a/vendor/github.com/huandu/xstrings/LICENSE b/vendor/github.com/huandu/xstrings/LICENSE
new file mode 100644
index 000000000..270177259
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Huan Du
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/vendor/github.com/huandu/xstrings/README.md b/vendor/github.com/huandu/xstrings/README.md
new file mode 100644
index 000000000..750c3c7eb
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/README.md
@@ -0,0 +1,117 @@
+# xstrings
+
+[![Build Status](https://github.com/huandu/xstrings/workflows/Go/badge.svg)](https://github.com/huandu/xstrings/actions)
+[![Go Doc](https://godoc.org/github.com/huandu/xstrings?status.svg)](https://pkg.go.dev/github.com/huandu/xstrings)
+[![Go Report](https://goreportcard.com/badge/github.com/huandu/xstrings)](https://goreportcard.com/report/github.com/huandu/xstrings)
+[![Coverage Status](https://coveralls.io/repos/github/huandu/xstrings/badge.svg?branch=master)](https://coveralls.io/github/huandu/xstrings?branch=master)
+
+Go package [xstrings](https://godoc.org/github.com/huandu/xstrings) is a collection of string functions, which are widely used in other languages but absent in Go package [strings](http://golang.org/pkg/strings).
+
+All functions are well tested and carefully tuned for performance.
+
+## Propose a new function
+
+Please review [contributing guideline](CONTRIBUTING.md) and [create new issue](https://github.com/huandu/xstrings/issues) to state why it should be included.
+
+## Install
+
+Use `go get` to install this library.
+
+ go get github.com/huandu/xstrings
+
+## API document
+
+See [GoDoc](https://godoc.org/github.com/huandu/xstrings) for full document.
+
+## Function list
+
+Go functions have a unique naming style. One, who has experience in other language but new in Go, may have difficulties to find out right string function to use.
+
+Here is a list of functions in [strings](http://golang.org/pkg/strings) and [xstrings](https://godoc.org/github.com/huandu/xstrings) with enough extra information about how to map these functions to their friends in other languages. Hope this list could be helpful for fresh gophers.
+
+### Package `xstrings` functions
+
+_Keep this table sorted by Function in ascending order._
+
+| Function | Friends | # |
+| --------------------------------------------------------------------------------- | ------------------------------------------------------------------------------- | --------------------------------------------------- |
+| [Center](https://godoc.org/github.com/huandu/xstrings#Center) | `str.center` in Python; `String#center` in Ruby | [#30](https://github.com/huandu/xstrings/issues/30) |
+| [Count](https://godoc.org/github.com/huandu/xstrings#Count) | `String#count` in Ruby | [#16](https://github.com/huandu/xstrings/issues/16) |
+| [Delete](https://godoc.org/github.com/huandu/xstrings#Delete) | `String#delete` in Ruby | [#17](https://github.com/huandu/xstrings/issues/17) |
+| [ExpandTabs](https://godoc.org/github.com/huandu/xstrings#ExpandTabs) | `str.expandtabs` in Python | [#27](https://github.com/huandu/xstrings/issues/27) |
+| [FirstRuneToLower](https://godoc.org/github.com/huandu/xstrings#FirstRuneToLower) | `lcfirst` in PHP or Perl | [#15](https://github.com/huandu/xstrings/issues/15) |
+| [FirstRuneToUpper](https://godoc.org/github.com/huandu/xstrings#FirstRuneToUpper) | `String#capitalize` in Ruby; `ucfirst` in PHP or Perl | [#15](https://github.com/huandu/xstrings/issues/15) |
+| [Insert](https://godoc.org/github.com/huandu/xstrings#Insert) | `String#insert` in Ruby | [#18](https://github.com/huandu/xstrings/issues/18) |
+| [LastPartition](https://godoc.org/github.com/huandu/xstrings#LastPartition) | `str.rpartition` in Python; `String#rpartition` in Ruby | [#19](https://github.com/huandu/xstrings/issues/19) |
+| [LeftJustify](https://godoc.org/github.com/huandu/xstrings#LeftJustify) | `str.ljust` in Python; `String#ljust` in Ruby | [#28](https://github.com/huandu/xstrings/issues/28) |
+| [Len](https://godoc.org/github.com/huandu/xstrings#Len) | `mb_strlen` in PHP | [#23](https://github.com/huandu/xstrings/issues/23) |
+| [Partition](https://godoc.org/github.com/huandu/xstrings#Partition) | `str.partition` in Python; `String#partition` in Ruby | [#10](https://github.com/huandu/xstrings/issues/10) |
+| [Reverse](https://godoc.org/github.com/huandu/xstrings#Reverse) | `String#reverse` in Ruby; `strrev` in PHP; `reverse` in Perl | [#7](https://github.com/huandu/xstrings/issues/7) |
+| [RightJustify](https://godoc.org/github.com/huandu/xstrings#RightJustify) | `str.rjust` in Python; `String#rjust` in Ruby | [#29](https://github.com/huandu/xstrings/issues/29) |
+| [RuneWidth](https://godoc.org/github.com/huandu/xstrings#RuneWidth) | - | [#27](https://github.com/huandu/xstrings/issues/27) |
+| [Scrub](https://godoc.org/github.com/huandu/xstrings#Scrub) | `String#scrub` in Ruby | [#20](https://github.com/huandu/xstrings/issues/20) |
+| [Shuffle](https://godoc.org/github.com/huandu/xstrings#Shuffle) | `str_shuffle` in PHP | [#13](https://github.com/huandu/xstrings/issues/13) |
+| [ShuffleSource](https://godoc.org/github.com/huandu/xstrings#ShuffleSource) | `str_shuffle` in PHP | [#13](https://github.com/huandu/xstrings/issues/13) |
+| [Slice](https://godoc.org/github.com/huandu/xstrings#Slice) | `mb_substr` in PHP | [#9](https://github.com/huandu/xstrings/issues/9) |
+| [Squeeze](https://godoc.org/github.com/huandu/xstrings#Squeeze) | `String#squeeze` in Ruby | [#11](https://github.com/huandu/xstrings/issues/11) |
+| [Successor](https://godoc.org/github.com/huandu/xstrings#Successor) | `String#succ` or `String#next` in Ruby | [#22](https://github.com/huandu/xstrings/issues/22) |
+| [SwapCase](https://godoc.org/github.com/huandu/xstrings#SwapCase) | `str.swapcase` in Python; `String#swapcase` in Ruby | [#12](https://github.com/huandu/xstrings/issues/12) |
+| [ToCamelCase](https://godoc.org/github.com/huandu/xstrings#ToCamelCase) | `String#camelize` in RoR | [#1](https://github.com/huandu/xstrings/issues/1) |
+| [ToKebab](https://godoc.org/github.com/huandu/xstrings#ToKebabCase) | - | [#41](https://github.com/huandu/xstrings/issues/41) |
+| [ToSnakeCase](https://godoc.org/github.com/huandu/xstrings#ToSnakeCase) | `String#underscore` in RoR | [#1](https://github.com/huandu/xstrings/issues/1) |
+| [Translate](https://godoc.org/github.com/huandu/xstrings#Translate) | `str.translate` in Python; `String#tr` in Ruby; `strtr` in PHP; `tr///` in Perl | [#21](https://github.com/huandu/xstrings/issues/21) |
+| [Width](https://godoc.org/github.com/huandu/xstrings#Width) | `mb_strwidth` in PHP | [#26](https://github.com/huandu/xstrings/issues/26) |
+| [WordCount](https://godoc.org/github.com/huandu/xstrings#WordCount) | `str_word_count` in PHP | [#14](https://github.com/huandu/xstrings/issues/14) |
+| [WordSplit](https://godoc.org/github.com/huandu/xstrings#WordSplit) | - | [#14](https://github.com/huandu/xstrings/issues/14) |
+
+### Package `strings` functions
+
+_Keep this table sorted by Function in ascending order._
+
+| Function | Friends |
+| --------------------------------------------------------------- | ----------------------------------------------------------------------------------- |
+| [Contains](http://golang.org/pkg/strings/#Contains) | `String#include?` in Ruby |
+| [ContainsAny](http://golang.org/pkg/strings/#ContainsAny) | - |
+| [ContainsRune](http://golang.org/pkg/strings/#ContainsRune) | - |
+| [Count](http://golang.org/pkg/strings/#Count) | `str.count` in Python; `substr_count` in PHP |
+| [EqualFold](http://golang.org/pkg/strings/#EqualFold) | `stricmp` in PHP; `String#casecmp` in Ruby |
+| [Fields](http://golang.org/pkg/strings/#Fields) | `str.split` in Python; `split` in Perl; `String#split` in Ruby |
+| [FieldsFunc](http://golang.org/pkg/strings/#FieldsFunc) | - |
+| [HasPrefix](http://golang.org/pkg/strings/#HasPrefix) | `str.startswith` in Python; `String#start_with?` in Ruby |
+| [HasSuffix](http://golang.org/pkg/strings/#HasSuffix) | `str.endswith` in Python; `String#end_with?` in Ruby |
+| [Index](http://golang.org/pkg/strings/#Index) | `str.index` in Python; `String#index` in Ruby; `strpos` in PHP; `index` in Perl |
+| [IndexAny](http://golang.org/pkg/strings/#IndexAny) | - |
+| [IndexByte](http://golang.org/pkg/strings/#IndexByte) | - |
+| [IndexFunc](http://golang.org/pkg/strings/#IndexFunc) | - |
+| [IndexRune](http://golang.org/pkg/strings/#IndexRune) | - |
+| [Join](http://golang.org/pkg/strings/#Join) | `str.join` in Python; `Array#join` in Ruby; `implode` in PHP; `join` in Perl |
+| [LastIndex](http://golang.org/pkg/strings/#LastIndex) | `str.rindex` in Python; `String#rindex`; `strrpos` in PHP; `rindex` in Perl |
+| [LastIndexAny](http://golang.org/pkg/strings/#LastIndexAny) | - |
+| [LastIndexFunc](http://golang.org/pkg/strings/#LastIndexFunc) | - |
+| [Map](http://golang.org/pkg/strings/#Map) | `String#each_codepoint` in Ruby |
+| [Repeat](http://golang.org/pkg/strings/#Repeat) | operator `*` in Python and Ruby; `str_repeat` in PHP |
+| [Replace](http://golang.org/pkg/strings/#Replace) | `str.replace` in Python; `String#sub` in Ruby; `str_replace` in PHP |
+| [Split](http://golang.org/pkg/strings/#Split) | `str.split` in Python; `String#split` in Ruby; `explode` in PHP; `split` in Perl |
+| [SplitAfter](http://golang.org/pkg/strings/#SplitAfter) | - |
+| [SplitAfterN](http://golang.org/pkg/strings/#SplitAfterN) | - |
+| [SplitN](http://golang.org/pkg/strings/#SplitN) | `str.split` in Python; `String#split` in Ruby; `explode` in PHP; `split` in Perl |
+| [Title](http://golang.org/pkg/strings/#Title) | `str.title` in Python |
+| [ToLower](http://golang.org/pkg/strings/#ToLower) | `str.lower` in Python; `String#downcase` in Ruby; `strtolower` in PHP; `lc` in Perl |
+| [ToLowerSpecial](http://golang.org/pkg/strings/#ToLowerSpecial) | - |
+| [ToTitle](http://golang.org/pkg/strings/#ToTitle) | - |
+| [ToTitleSpecial](http://golang.org/pkg/strings/#ToTitleSpecial) | - |
+| [ToUpper](http://golang.org/pkg/strings/#ToUpper) | `str.upper` in Python; `String#upcase` in Ruby; `strtoupper` in PHP; `uc` in Perl |
+| [ToUpperSpecial](http://golang.org/pkg/strings/#ToUpperSpecial) | - |
+| [Trim](http://golang.org/pkg/strings/#Trim) | `str.strip` in Python; `String#strip` in Ruby; `trim` in PHP |
+| [TrimFunc](http://golang.org/pkg/strings/#TrimFunc) | - |
+| [TrimLeft](http://golang.org/pkg/strings/#TrimLeft) | `str.lstrip` in Python; `String#lstrip` in Ruby; `ltrim` in PHP |
+| [TrimLeftFunc](http://golang.org/pkg/strings/#TrimLeftFunc) | - |
+| [TrimPrefix](http://golang.org/pkg/strings/#TrimPrefix) | - |
+| [TrimRight](http://golang.org/pkg/strings/#TrimRight) | `str.rstrip` in Python; `String#rstrip` in Ruby; `rtrim` in PHP |
+| [TrimRightFunc](http://golang.org/pkg/strings/#TrimRightFunc) | - |
+| [TrimSpace](http://golang.org/pkg/strings/#TrimSpace) | `str.strip` in Python; `String#strip` in Ruby; `trim` in PHP |
+| [TrimSuffix](http://golang.org/pkg/strings/#TrimSuffix) | `String#chomp` in Ruby; `chomp` in Perl |
+
+## License
+
+This library is licensed under MIT license. See LICENSE for details.
diff --git a/vendor/github.com/huandu/xstrings/common.go b/vendor/github.com/huandu/xstrings/common.go
new file mode 100644
index 000000000..f427cc84e
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/common.go
@@ -0,0 +1,21 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+package xstrings
+
+const bufferMaxInitGrowSize = 2048
+
+// Lazy initialize a buffer.
+func allocBuffer(orig, cur string) *stringBuilder {
+ output := &stringBuilder{}
+ maxSize := len(orig) * 4
+
+ // Avoid to reserve too much memory at once.
+ if maxSize > bufferMaxInitGrowSize {
+ maxSize = bufferMaxInitGrowSize
+ }
+
+ output.Grow(maxSize)
+ output.WriteString(orig[:len(orig)-len(cur)])
+ return output
+}
diff --git a/vendor/github.com/huandu/xstrings/convert.go b/vendor/github.com/huandu/xstrings/convert.go
new file mode 100644
index 000000000..151c3151d
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/convert.go
@@ -0,0 +1,590 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+package xstrings
+
+import (
+ "math/rand"
+ "unicode"
+ "unicode/utf8"
+)
+
+// ToCamelCase is to convert words separated by space, underscore and hyphen to camel case.
+//
+// Some samples.
+// "some_words" => "SomeWords"
+// "http_server" => "HttpServer"
+// "no_https" => "NoHttps"
+// "_complex__case_" => "_Complex_Case_"
+// "some words" => "SomeWords"
+func ToCamelCase(str string) string {
+ if len(str) == 0 {
+ return ""
+ }
+
+ buf := &stringBuilder{}
+ var r0, r1 rune
+ var size int
+
+ // leading connector will appear in output.
+ for len(str) > 0 {
+ r0, size = utf8.DecodeRuneInString(str)
+ str = str[size:]
+
+ if !isConnector(r0) {
+ r0 = unicode.ToUpper(r0)
+ break
+ }
+
+ buf.WriteRune(r0)
+ }
+
+ if len(str) == 0 {
+ // A special case for a string contains only 1 rune.
+ if size != 0 {
+ buf.WriteRune(r0)
+ }
+
+ return buf.String()
+ }
+
+ for len(str) > 0 {
+ r1 = r0
+ r0, size = utf8.DecodeRuneInString(str)
+ str = str[size:]
+
+ if isConnector(r0) && isConnector(r1) {
+ buf.WriteRune(r1)
+ continue
+ }
+
+ if isConnector(r1) {
+ r0 = unicode.ToUpper(r0)
+ } else {
+ r0 = unicode.ToLower(r0)
+ buf.WriteRune(r1)
+ }
+ }
+
+ buf.WriteRune(r0)
+ return buf.String()
+}
+
+// ToSnakeCase can convert all upper case characters in a string to
+// snake case format.
+//
+// Some samples.
+// "FirstName" => "first_name"
+// "HTTPServer" => "http_server"
+// "NoHTTPS" => "no_https"
+// "GO_PATH" => "go_path"
+// "GO PATH" => "go_path" // space is converted to underscore.
+// "GO-PATH" => "go_path" // hyphen is converted to underscore.
+// "http2xx" => "http_2xx" // insert an underscore before a number and after an alphabet.
+// "HTTP20xOK" => "http_20x_ok"
+// "Duration2m3s" => "duration_2m3s"
+// "Bld4Floor3rd" => "bld4_floor_3rd"
+func ToSnakeCase(str string) string {
+ return camelCaseToLowerCase(str, '_')
+}
+
+// ToKebabCase can convert all upper case characters in a string to
+// kebab case format.
+//
+// Some samples.
+// "FirstName" => "first-name"
+// "HTTPServer" => "http-server"
+// "NoHTTPS" => "no-https"
+// "GO_PATH" => "go-path"
+// "GO PATH" => "go-path" // space is converted to '-'.
+// "GO-PATH" => "go-path" // hyphen is converted to '-'.
+// "http2xx" => "http-2xx" // insert an underscore before a number and after an alphabet.
+// "HTTP20xOK" => "http-20x-ok"
+// "Duration2m3s" => "duration-2m3s"
+// "Bld4Floor3rd" => "bld4-floor-3rd"
+func ToKebabCase(str string) string {
+ return camelCaseToLowerCase(str, '-')
+}
+
+func camelCaseToLowerCase(str string, connector rune) string {
+ if len(str) == 0 {
+ return ""
+ }
+
+ buf := &stringBuilder{}
+ wt, word, remaining := nextWord(str)
+
+ for len(remaining) > 0 {
+ if wt != connectorWord {
+ toLower(buf, wt, word, connector)
+ }
+
+ prev := wt
+ last := word
+ wt, word, remaining = nextWord(remaining)
+
+ switch prev {
+ case numberWord:
+ for wt == alphabetWord || wt == numberWord {
+ toLower(buf, wt, word, connector)
+ wt, word, remaining = nextWord(remaining)
+ }
+
+ if wt != invalidWord && wt != punctWord && wt != connectorWord {
+ buf.WriteRune(connector)
+ }
+
+ case connectorWord:
+ toLower(buf, prev, last, connector)
+
+ case punctWord:
+ // nothing.
+
+ default:
+ if wt != numberWord {
+ if wt != connectorWord && wt != punctWord {
+ buf.WriteRune(connector)
+ }
+
+ break
+ }
+
+ if len(remaining) == 0 {
+ break
+ }
+
+ last := word
+ wt, word, remaining = nextWord(remaining)
+
+ // consider number as a part of previous word.
+ // e.g. "Bld4Floor" => "bld4_floor"
+ if wt != alphabetWord {
+ toLower(buf, numberWord, last, connector)
+
+ if wt != connectorWord && wt != punctWord {
+ buf.WriteRune(connector)
+ }
+
+ break
+ }
+
+ // if there are some lower case letters following a number,
+ // add connector before the number.
+ // e.g. "HTTP2xx" => "http_2xx"
+ buf.WriteRune(connector)
+ toLower(buf, numberWord, last, connector)
+
+ for wt == alphabetWord || wt == numberWord {
+ toLower(buf, wt, word, connector)
+ wt, word, remaining = nextWord(remaining)
+ }
+
+ if wt != invalidWord && wt != connectorWord && wt != punctWord {
+ buf.WriteRune(connector)
+ }
+ }
+ }
+
+ toLower(buf, wt, word, connector)
+ return buf.String()
+}
+
+func isConnector(r rune) bool {
+ return r == '-' || r == '_' || unicode.IsSpace(r)
+}
+
+type wordType int
+
+const (
+ invalidWord wordType = iota
+ numberWord
+ upperCaseWord
+ alphabetWord
+ connectorWord
+ punctWord
+ otherWord
+)
+
+func nextWord(str string) (wt wordType, word, remaining string) {
+ if len(str) == 0 {
+ return
+ }
+
+ var offset int
+ remaining = str
+ r, size := nextValidRune(remaining, utf8.RuneError)
+ offset += size
+
+ if r == utf8.RuneError {
+ wt = invalidWord
+ word = str[:offset]
+ remaining = str[offset:]
+ return
+ }
+
+ switch {
+ case isConnector(r):
+ wt = connectorWord
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if !isConnector(r) {
+ break
+ }
+
+ offset += size
+ remaining = remaining[size:]
+ }
+
+ case unicode.IsPunct(r):
+ wt = punctWord
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if !unicode.IsPunct(r) {
+ break
+ }
+
+ offset += size
+ remaining = remaining[size:]
+ }
+
+ case unicode.IsUpper(r):
+ wt = upperCaseWord
+ remaining = remaining[size:]
+
+ if len(remaining) == 0 {
+ break
+ }
+
+ r, size = nextValidRune(remaining, r)
+
+ switch {
+ case unicode.IsUpper(r):
+ prevSize := size
+ offset += size
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if !unicode.IsUpper(r) {
+ break
+ }
+
+ prevSize = size
+ offset += size
+ remaining = remaining[size:]
+ }
+
+ // it's a bit complex when dealing with a case like "HTTPStatus".
+ // it's expected to be splitted into "HTTP" and "Status".
+ // Therefore "S" should be in remaining instead of word.
+ if len(remaining) > 0 && isAlphabet(r) {
+ offset -= prevSize
+ remaining = str[offset:]
+ }
+
+ case isAlphabet(r):
+ offset += size
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if !isAlphabet(r) || unicode.IsUpper(r) {
+ break
+ }
+
+ offset += size
+ remaining = remaining[size:]
+ }
+ }
+
+ case isAlphabet(r):
+ wt = alphabetWord
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if !isAlphabet(r) || unicode.IsUpper(r) {
+ break
+ }
+
+ offset += size
+ remaining = remaining[size:]
+ }
+
+ case unicode.IsNumber(r):
+ wt = numberWord
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if !unicode.IsNumber(r) {
+ break
+ }
+
+ offset += size
+ remaining = remaining[size:]
+ }
+
+ default:
+ wt = otherWord
+ remaining = remaining[size:]
+
+ for len(remaining) > 0 {
+ r, size = nextValidRune(remaining, r)
+
+ if size == 0 || isConnector(r) || isAlphabet(r) || unicode.IsNumber(r) || unicode.IsPunct(r) {
+ break
+ }
+
+ offset += size
+ remaining = remaining[size:]
+ }
+ }
+
+ word = str[:offset]
+ return
+}
+
+func nextValidRune(str string, prev rune) (r rune, size int) {
+ var sz int
+
+ for len(str) > 0 {
+ r, sz = utf8.DecodeRuneInString(str)
+ size += sz
+
+ if r != utf8.RuneError {
+ return
+ }
+
+ str = str[sz:]
+ }
+
+ r = prev
+ return
+}
+
+func toLower(buf *stringBuilder, wt wordType, str string, connector rune) {
+ buf.Grow(buf.Len() + len(str))
+
+ if wt != upperCaseWord && wt != connectorWord {
+ buf.WriteString(str)
+ return
+ }
+
+ for len(str) > 0 {
+ r, size := utf8.DecodeRuneInString(str)
+ str = str[size:]
+
+ if isConnector(r) {
+ buf.WriteRune(connector)
+ } else if unicode.IsUpper(r) {
+ buf.WriteRune(unicode.ToLower(r))
+ } else {
+ buf.WriteRune(r)
+ }
+ }
+}
+
+// SwapCase will swap characters case from upper to lower or lower to upper.
+func SwapCase(str string) string {
+ var r rune
+ var size int
+
+ buf := &stringBuilder{}
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+
+ switch {
+ case unicode.IsUpper(r):
+ buf.WriteRune(unicode.ToLower(r))
+
+ case unicode.IsLower(r):
+ buf.WriteRune(unicode.ToUpper(r))
+
+ default:
+ buf.WriteRune(r)
+ }
+
+ str = str[size:]
+ }
+
+ return buf.String()
+}
+
+// FirstRuneToUpper converts first rune to upper case if necessary.
+func FirstRuneToUpper(str string) string {
+ if str == "" {
+ return str
+ }
+
+ r, size := utf8.DecodeRuneInString(str)
+
+ if !unicode.IsLower(r) {
+ return str
+ }
+
+ buf := &stringBuilder{}
+ buf.WriteRune(unicode.ToUpper(r))
+ buf.WriteString(str[size:])
+ return buf.String()
+}
+
+// FirstRuneToLower converts first rune to lower case if necessary.
+func FirstRuneToLower(str string) string {
+ if str == "" {
+ return str
+ }
+
+ r, size := utf8.DecodeRuneInString(str)
+
+ if !unicode.IsUpper(r) {
+ return str
+ }
+
+ buf := &stringBuilder{}
+ buf.WriteRune(unicode.ToLower(r))
+ buf.WriteString(str[size:])
+ return buf.String()
+}
+
+// Shuffle randomizes runes in a string and returns the result.
+// It uses default random source in `math/rand`.
+func Shuffle(str string) string {
+ if str == "" {
+ return str
+ }
+
+ runes := []rune(str)
+ index := 0
+
+ for i := len(runes) - 1; i > 0; i-- {
+ index = rand.Intn(i + 1)
+
+ if i != index {
+ runes[i], runes[index] = runes[index], runes[i]
+ }
+ }
+
+ return string(runes)
+}
+
+// ShuffleSource randomizes runes in a string with given random source.
+func ShuffleSource(str string, src rand.Source) string {
+ if str == "" {
+ return str
+ }
+
+ runes := []rune(str)
+ index := 0
+ r := rand.New(src)
+
+ for i := len(runes) - 1; i > 0; i-- {
+ index = r.Intn(i + 1)
+
+ if i != index {
+ runes[i], runes[index] = runes[index], runes[i]
+ }
+ }
+
+ return string(runes)
+}
+
+// Successor returns the successor to string.
+//
+// If there is one alphanumeric rune is found in string, increase the rune by 1.
+// If increment generates a "carry", the rune to the left of it is incremented.
+// This process repeats until there is no carry, adding an additional rune if necessary.
+//
+// If there is no alphanumeric rune, the rightmost rune will be increased by 1
+// regardless whether the result is a valid rune or not.
+//
+// Only following characters are alphanumeric.
+// * a - z
+// * A - Z
+// * 0 - 9
+//
+// Samples (borrowed from ruby's String#succ document):
+// "abcd" => "abce"
+// "THX1138" => "THX1139"
+// "<<koala>>" => "<<koalb>>"
+// "1999zzz" => "2000aaa"
+// "ZZZ9999" => "AAAA0000"
+// "***" => "**+"
+func Successor(str string) string {
+ if str == "" {
+ return str
+ }
+
+ var r rune
+ var i int
+ carry := ' '
+ runes := []rune(str)
+ l := len(runes)
+ lastAlphanumeric := l
+
+ for i = l - 1; i >= 0; i-- {
+ r = runes[i]
+
+ if ('a' <= r && r <= 'y') ||
+ ('A' <= r && r <= 'Y') ||
+ ('0' <= r && r <= '8') {
+ runes[i]++
+ carry = ' '
+ lastAlphanumeric = i
+ break
+ }
+
+ switch r {
+ case 'z':
+ runes[i] = 'a'
+ carry = 'a'
+ lastAlphanumeric = i
+
+ case 'Z':
+ runes[i] = 'A'
+ carry = 'A'
+ lastAlphanumeric = i
+
+ case '9':
+ runes[i] = '0'
+ carry = '0'
+ lastAlphanumeric = i
+ }
+ }
+
+ // Needs to add one character for carry.
+ if i < 0 && carry != ' ' {
+ buf := &stringBuilder{}
+ buf.Grow(l + 4) // Reserve enough space for write.
+
+ if lastAlphanumeric != 0 {
+ buf.WriteString(str[:lastAlphanumeric])
+ }
+
+ buf.WriteRune(carry)
+
+ for _, r = range runes[lastAlphanumeric:] {
+ buf.WriteRune(r)
+ }
+
+ return buf.String()
+ }
+
+ // No alphanumeric character. Simply increase last rune's value.
+ if lastAlphanumeric == l {
+ runes[l-1]++
+ }
+
+ return string(runes)
+}
diff --git a/vendor/github.com/huandu/xstrings/count.go b/vendor/github.com/huandu/xstrings/count.go
new file mode 100644
index 000000000..f96e38703
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/count.go
@@ -0,0 +1,120 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+package xstrings
+
+import (
+ "unicode"
+ "unicode/utf8"
+)
+
+// Len returns str's utf8 rune length.
+func Len(str string) int {
+ return utf8.RuneCountInString(str)
+}
+
+// WordCount returns number of words in a string.
+//
+// Word is defined as a locale dependent string containing alphabetic characters,
+// which may also contain but not start with `'` and `-` characters.
+func WordCount(str string) int {
+ var r rune
+ var size, n int
+
+ inWord := false
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+
+ switch {
+ case isAlphabet(r):
+ if !inWord {
+ inWord = true
+ n++
+ }
+
+ case inWord && (r == '\'' || r == '-'):
+ // Still in word.
+
+ default:
+ inWord = false
+ }
+
+ str = str[size:]
+ }
+
+ return n
+}
+
+const minCJKCharacter = '\u3400'
+
+// Checks r is a letter but not CJK character.
+func isAlphabet(r rune) bool {
+ if !unicode.IsLetter(r) {
+ return false
+ }
+
+ switch {
+ // Quick check for non-CJK character.
+ case r < minCJKCharacter:
+ return true
+
+ // Common CJK characters.
+ case r >= '\u4E00' && r <= '\u9FCC':
+ return false
+
+ // Rare CJK characters.
+ case r >= '\u3400' && r <= '\u4D85':
+ return false
+
+ // Rare and historic CJK characters.
+ case r >= '\U00020000' && r <= '\U0002B81D':
+ return false
+ }
+
+ return true
+}
+
+// Width returns string width in monotype font.
+// Multi-byte characters are usually twice the width of single byte characters.
+//
+// Algorithm comes from `mb_strwidth` in PHP.
+// http://php.net/manual/en/function.mb-strwidth.php
+func Width(str string) int {
+ var r rune
+ var size, n int
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+ n += RuneWidth(r)
+ str = str[size:]
+ }
+
+ return n
+}
+
+// RuneWidth returns character width in monotype font.
+// Multi-byte characters are usually twice the width of single byte characters.
+//
+// Algorithm comes from `mb_strwidth` in PHP.
+// http://php.net/manual/en/function.mb-strwidth.php
+func RuneWidth(r rune) int {
+ switch {
+ case r == utf8.RuneError || r < '\x20':
+ return 0
+
+ case '\x20' <= r && r < '\u2000':
+ return 1
+
+ case '\u2000' <= r && r < '\uFF61':
+ return 2
+
+ case '\uFF61' <= r && r < '\uFFA0':
+ return 1
+
+ case '\uFFA0' <= r:
+ return 2
+ }
+
+ return 0
+}
diff --git a/vendor/github.com/huandu/xstrings/doc.go b/vendor/github.com/huandu/xstrings/doc.go
new file mode 100644
index 000000000..1a6ef069f
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/doc.go
@@ -0,0 +1,8 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+// Package xstrings is to provide string algorithms which are useful but not included in `strings` package.
+// See project home page for details. https://github.com/huandu/xstrings
+//
+// Package xstrings assumes all strings are encoded in utf8.
+package xstrings
diff --git a/vendor/github.com/huandu/xstrings/format.go b/vendor/github.com/huandu/xstrings/format.go
new file mode 100644
index 000000000..8cd76c525
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/format.go
@@ -0,0 +1,169 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+package xstrings
+
+import (
+ "unicode/utf8"
+)
+
+// ExpandTabs can expand tabs ('\t') rune in str to one or more spaces dpending on
+// current column and tabSize.
+// The column number is reset to zero after each newline ('\n') occurring in the str.
+//
+// ExpandTabs uses RuneWidth to decide rune's width.
+// For example, CJK characters will be treated as two characters.
+//
+// If tabSize <= 0, ExpandTabs panics with error.
+//
+// Samples:
+// ExpandTabs("a\tbc\tdef\tghij\tk", 4) => "a bc def ghij k"
+// ExpandTabs("abcdefg\thij\nk\tl", 4) => "abcdefg hij\nk l"
+// ExpandTabs("z中\t文\tw", 4) => "z中 文 w"
+func ExpandTabs(str string, tabSize int) string {
+ if tabSize <= 0 {
+ panic("tab size must be positive")
+ }
+
+ var r rune
+ var i, size, column, expand int
+ var output *stringBuilder
+
+ orig := str
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+
+ if r == '\t' {
+ expand = tabSize - column%tabSize
+
+ if output == nil {
+ output = allocBuffer(orig, str)
+ }
+
+ for i = 0; i < expand; i++ {
+ output.WriteRune(' ')
+ }
+
+ column += expand
+ } else {
+ if r == '\n' {
+ column = 0
+ } else {
+ column += RuneWidth(r)
+ }
+
+ if output != nil {
+ output.WriteRune(r)
+ }
+ }
+
+ str = str[size:]
+ }
+
+ if output == nil {
+ return orig
+ }
+
+ return output.String()
+}
+
+// LeftJustify returns a string with pad string at right side if str's rune length is smaller than length.
+// If str's rune length is larger than length, str itself will be returned.
+//
+// If pad is an empty string, str will be returned.
+//
+// Samples:
+// LeftJustify("hello", 4, " ") => "hello"
+// LeftJustify("hello", 10, " ") => "hello "
+// LeftJustify("hello", 10, "123") => "hello12312"
+func LeftJustify(str string, length int, pad string) string {
+ l := Len(str)
+
+ if l >= length || pad == "" {
+ return str
+ }
+
+ remains := length - l
+ padLen := Len(pad)
+
+ output := &stringBuilder{}
+ output.Grow(len(str) + (remains/padLen+1)*len(pad))
+ output.WriteString(str)
+ writePadString(output, pad, padLen, remains)
+ return output.String()
+}
+
+// RightJustify returns a string with pad string at left side if str's rune length is smaller than length.
+// If str's rune length is larger than length, str itself will be returned.
+//
+// If pad is an empty string, str will be returned.
+//
+// Samples:
+// RightJustify("hello", 4, " ") => "hello"
+// RightJustify("hello", 10, " ") => " hello"
+// RightJustify("hello", 10, "123") => "12312hello"
+func RightJustify(str string, length int, pad string) string {
+ l := Len(str)
+
+ if l >= length || pad == "" {
+ return str
+ }
+
+ remains := length - l
+ padLen := Len(pad)
+
+ output := &stringBuilder{}
+ output.Grow(len(str) + (remains/padLen+1)*len(pad))
+ writePadString(output, pad, padLen, remains)
+ output.WriteString(str)
+ return output.String()
+}
+
+// Center returns a string with pad string at both side if str's rune length is smaller than length.
+// If str's rune length is larger than length, str itself will be returned.
+//
+// If pad is an empty string, str will be returned.
+//
+// Samples:
+// Center("hello", 4, " ") => "hello"
+// Center("hello", 10, " ") => " hello "
+// Center("hello", 10, "123") => "12hello123"
+func Center(str string, length int, pad string) string {
+ l := Len(str)
+
+ if l >= length || pad == "" {
+ return str
+ }
+
+ remains := length - l
+ padLen := Len(pad)
+
+ output := &stringBuilder{}
+ output.Grow(len(str) + (remains/padLen+1)*len(pad))
+ writePadString(output, pad, padLen, remains/2)
+ output.WriteString(str)
+ writePadString(output, pad, padLen, (remains+1)/2)
+ return output.String()
+}
+
+func writePadString(output *stringBuilder, pad string, padLen, remains int) {
+ var r rune
+ var size int
+
+ repeats := remains / padLen
+
+ for i := 0; i < repeats; i++ {
+ output.WriteString(pad)
+ }
+
+ remains = remains % padLen
+
+ if remains != 0 {
+ for i := 0; i < remains; i++ {
+ r, size = utf8.DecodeRuneInString(pad)
+ output.WriteRune(r)
+ pad = pad[size:]
+ }
+ }
+}
diff --git a/vendor/github.com/huandu/xstrings/manipulate.go b/vendor/github.com/huandu/xstrings/manipulate.go
new file mode 100644
index 000000000..64075f9bb
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/manipulate.go
@@ -0,0 +1,216 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+package xstrings
+
+import (
+ "strings"
+ "unicode/utf8"
+)
+
+// Reverse a utf8 encoded string.
+func Reverse(str string) string {
+ var size int
+
+ tail := len(str)
+ buf := make([]byte, tail)
+ s := buf
+
+ for len(str) > 0 {
+ _, size = utf8.DecodeRuneInString(str)
+ tail -= size
+ s = append(s[:tail], []byte(str[:size])...)
+ str = str[size:]
+ }
+
+ return string(buf)
+}
+
+// Slice a string by rune.
+//
+// Start must satisfy 0 <= start <= rune length.
+//
+// End can be positive, zero or negative.
+// If end >= 0, start and end must satisfy start <= end <= rune length.
+// If end < 0, it means slice to the end of string.
+//
+// Otherwise, Slice will panic as out of range.
+func Slice(str string, start, end int) string {
+ var size, startPos, endPos int
+
+ origin := str
+
+ if start < 0 || end > len(str) || (end >= 0 && start > end) {
+ panic("out of range")
+ }
+
+ if end >= 0 {
+ end -= start
+ }
+
+ for start > 0 && len(str) > 0 {
+ _, size = utf8.DecodeRuneInString(str)
+ start--
+ startPos += size
+ str = str[size:]
+ }
+
+ if end < 0 {
+ return origin[startPos:]
+ }
+
+ endPos = startPos
+
+ for end > 0 && len(str) > 0 {
+ _, size = utf8.DecodeRuneInString(str)
+ end--
+ endPos += size
+ str = str[size:]
+ }
+
+ if len(str) == 0 && (start > 0 || end > 0) {
+ panic("out of range")
+ }
+
+ return origin[startPos:endPos]
+}
+
+// Partition splits a string by sep into three parts.
+// The return value is a slice of strings with head, match and tail.
+//
+// If str contains sep, for example "hello" and "l", Partition returns
+// "he", "l", "lo"
+//
+// If str doesn't contain sep, for example "hello" and "x", Partition returns
+// "hello", "", ""
+func Partition(str, sep string) (head, match, tail string) {
+ index := strings.Index(str, sep)
+
+ if index == -1 {
+ head = str
+ return
+ }
+
+ head = str[:index]
+ match = str[index : index+len(sep)]
+ tail = str[index+len(sep):]
+ return
+}
+
+// LastPartition splits a string by last instance of sep into three parts.
+// The return value is a slice of strings with head, match and tail.
+//
+// If str contains sep, for example "hello" and "l", LastPartition returns
+// "hel", "l", "o"
+//
+// If str doesn't contain sep, for example "hello" and "x", LastPartition returns
+// "", "", "hello"
+func LastPartition(str, sep string) (head, match, tail string) {
+ index := strings.LastIndex(str, sep)
+
+ if index == -1 {
+ tail = str
+ return
+ }
+
+ head = str[:index]
+ match = str[index : index+len(sep)]
+ tail = str[index+len(sep):]
+ return
+}
+
+// Insert src into dst at given rune index.
+// Index is counted by runes instead of bytes.
+//
+// If index is out of range of dst, panic with out of range.
+func Insert(dst, src string, index int) string {
+ return Slice(dst, 0, index) + src + Slice(dst, index, -1)
+}
+
+// Scrub scrubs invalid utf8 bytes with repl string.
+// Adjacent invalid bytes are replaced only once.
+func Scrub(str, repl string) string {
+ var buf *stringBuilder
+ var r rune
+ var size, pos int
+ var hasError bool
+
+ origin := str
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+
+ if r == utf8.RuneError {
+ if !hasError {
+ if buf == nil {
+ buf = &stringBuilder{}
+ }
+
+ buf.WriteString(origin[:pos])
+ hasError = true
+ }
+ } else if hasError {
+ hasError = false
+ buf.WriteString(repl)
+
+ origin = origin[pos:]
+ pos = 0
+ }
+
+ pos += size
+ str = str[size:]
+ }
+
+ if buf != nil {
+ buf.WriteString(origin)
+ return buf.String()
+ }
+
+ // No invalid byte.
+ return origin
+}
+
+// WordSplit splits a string into words. Returns a slice of words.
+// If there is no word in a string, return nil.
+//
+// Word is defined as a locale dependent string containing alphabetic characters,
+// which may also contain but not start with `'` and `-` characters.
+func WordSplit(str string) []string {
+ var word string
+ var words []string
+ var r rune
+ var size, pos int
+
+ inWord := false
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+
+ switch {
+ case isAlphabet(r):
+ if !inWord {
+ inWord = true
+ word = str
+ pos = 0
+ }
+
+ case inWord && (r == '\'' || r == '-'):
+ // Still in word.
+
+ default:
+ if inWord {
+ inWord = false
+ words = append(words, word[:pos])
+ }
+ }
+
+ pos += size
+ str = str[size:]
+ }
+
+ if inWord {
+ words = append(words, word[:pos])
+ }
+
+ return words
+}
diff --git a/vendor/github.com/huandu/xstrings/stringbuilder.go b/vendor/github.com/huandu/xstrings/stringbuilder.go
new file mode 100644
index 000000000..bb0919d32
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/stringbuilder.go
@@ -0,0 +1,7 @@
+//+build go1.10
+
+package xstrings
+
+import "strings"
+
+type stringBuilder = strings.Builder
diff --git a/vendor/github.com/huandu/xstrings/stringbuilder_go110.go b/vendor/github.com/huandu/xstrings/stringbuilder_go110.go
new file mode 100644
index 000000000..dac389d13
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/stringbuilder_go110.go
@@ -0,0 +1,9 @@
+//+build !go1.10
+
+package xstrings
+
+import "bytes"
+
+type stringBuilder struct {
+ bytes.Buffer
+}
diff --git a/vendor/github.com/huandu/xstrings/translate.go b/vendor/github.com/huandu/xstrings/translate.go
new file mode 100644
index 000000000..42e694fb1
--- /dev/null
+++ b/vendor/github.com/huandu/xstrings/translate.go
@@ -0,0 +1,546 @@
+// Copyright 2015 Huan Du. All rights reserved.
+// Licensed under the MIT license that can be found in the LICENSE file.
+
+package xstrings
+
+import (
+ "unicode"
+ "unicode/utf8"
+)
+
+type runeRangeMap struct {
+ FromLo rune // Lower bound of range map.
+ FromHi rune // An inclusive higher bound of range map.
+ ToLo rune
+ ToHi rune
+}
+
+type runeDict struct {
+ Dict [unicode.MaxASCII + 1]rune
+}
+
+type runeMap map[rune]rune
+
+// Translator can translate string with pre-compiled from and to patterns.
+// If a from/to pattern pair needs to be used more than once, it's recommended
+// to create a Translator and reuse it.
+type Translator struct {
+ quickDict *runeDict // A quick dictionary to look up rune by index. Only available for latin runes.
+ runeMap runeMap // Rune map for translation.
+ ranges []*runeRangeMap // Ranges of runes.
+ mappedRune rune // If mappedRune >= 0, all matched runes are translated to the mappedRune.
+ reverted bool // If to pattern is empty, all matched characters will be deleted.
+ hasPattern bool
+}
+
+// NewTranslator creates new Translator through a from/to pattern pair.
+func NewTranslator(from, to string) *Translator {
+ tr := &Translator{}
+
+ if from == "" {
+ return tr
+ }
+
+ reverted := from[0] == '^'
+ deletion := len(to) == 0
+
+ if reverted {
+ from = from[1:]
+ }
+
+ var fromStart, fromEnd, fromRangeStep rune
+ var toStart, toEnd, toRangeStep rune
+ var fromRangeSize, toRangeSize rune
+ var singleRunes []rune
+
+ // Update the to rune range.
+ updateRange := func() {
+ // No more rune to read in the to rune pattern.
+ if toEnd == utf8.RuneError {
+ return
+ }
+
+ if toRangeStep == 0 {
+ to, toStart, toEnd, toRangeStep = nextRuneRange(to, toEnd)
+ return
+ }
+
+ // Current range is not empty. Consume 1 rune from start.
+ if toStart != toEnd {
+ toStart += toRangeStep
+ return
+ }
+
+ // No more rune. Repeat the last rune.
+ if to == "" {
+ toEnd = utf8.RuneError
+ return
+ }
+
+ // Both start and end are used. Read two more runes from the to pattern.
+ to, toStart, toEnd, toRangeStep = nextRuneRange(to, utf8.RuneError)
+ }
+
+ if deletion {
+ toStart = utf8.RuneError
+ toEnd = utf8.RuneError
+ } else {
+ // If from pattern is reverted, only the last rune in the to pattern will be used.
+ if reverted {
+ var size int
+
+ for len(to) > 0 {
+ toStart, size = utf8.DecodeRuneInString(to)
+ to = to[size:]
+ }
+
+ toEnd = utf8.RuneError
+ } else {
+ to, toStart, toEnd, toRangeStep = nextRuneRange(to, utf8.RuneError)
+ }
+ }
+
+ fromEnd = utf8.RuneError
+
+ for len(from) > 0 {
+ from, fromStart, fromEnd, fromRangeStep = nextRuneRange(from, fromEnd)
+
+ // fromStart is a single character. Just map it with a rune in the to pattern.
+ if fromRangeStep == 0 {
+ singleRunes = tr.addRune(fromStart, toStart, singleRunes)
+ updateRange()
+ continue
+ }
+
+ for toEnd != utf8.RuneError && fromStart != fromEnd {
+ // If mapped rune is a single character instead of a range, simply shift first
+ // rune in the range.
+ if toRangeStep == 0 {
+ singleRunes = tr.addRune(fromStart, toStart, singleRunes)
+ updateRange()
+ fromStart += fromRangeStep
+ continue
+ }
+
+ fromRangeSize = (fromEnd - fromStart) * fromRangeStep
+ toRangeSize = (toEnd - toStart) * toRangeStep
+
+ // Not enough runes in the to pattern. Need to read more.
+ if fromRangeSize > toRangeSize {
+ fromStart, toStart = tr.addRuneRange(fromStart, fromStart+toRangeSize*fromRangeStep, toStart, toEnd, singleRunes)
+ fromStart += fromRangeStep
+ updateRange()
+
+ // Edge case: If fromRangeSize == toRangeSize + 1, the last fromStart value needs be considered
+ // as a single rune.
+ if fromStart == fromEnd {
+ singleRunes = tr.addRune(fromStart, toStart, singleRunes)
+ updateRange()
+ }
+
+ continue
+ }
+
+ fromStart, toStart = tr.addRuneRange(fromStart, fromEnd, toStart, toStart+fromRangeSize*toRangeStep, singleRunes)
+ updateRange()
+ break
+ }
+
+ if fromStart == fromEnd {
+ fromEnd = utf8.RuneError
+ continue
+ }
+
+ _, toStart = tr.addRuneRange(fromStart, fromEnd, toStart, toStart, singleRunes)
+ fromEnd = utf8.RuneError
+ }
+
+ if fromEnd != utf8.RuneError {
+ tr.addRune(fromEnd, toStart, singleRunes)
+ }
+
+ tr.reverted = reverted
+ tr.mappedRune = -1
+ tr.hasPattern = true
+
+ // Translate RuneError only if in deletion or reverted mode.
+ if deletion || reverted {
+ tr.mappedRune = toStart
+ }
+
+ return tr
+}
+
+func (tr *Translator) addRune(from, to rune, singleRunes []rune) []rune {
+ if from <= unicode.MaxASCII {
+ if tr.quickDict == nil {
+ tr.quickDict = &runeDict{}
+ }
+
+ tr.quickDict.Dict[from] = to
+ } else {
+ if tr.runeMap == nil {
+ tr.runeMap = make(runeMap)
+ }
+
+ tr.runeMap[from] = to
+ }
+
+ singleRunes = append(singleRunes, from)
+ return singleRunes
+}
+
+func (tr *Translator) addRuneRange(fromLo, fromHi, toLo, toHi rune, singleRunes []rune) (rune, rune) {
+ var r rune
+ var rrm *runeRangeMap
+
+ if fromLo < fromHi {
+ rrm = &runeRangeMap{
+ FromLo: fromLo,
+ FromHi: fromHi,
+ ToLo: toLo,
+ ToHi: toHi,
+ }
+ } else {
+ rrm = &runeRangeMap{
+ FromLo: fromHi,
+ FromHi: fromLo,
+ ToLo: toHi,
+ ToHi: toLo,
+ }
+ }
+
+ // If there is any single rune conflicts with this rune range, clear single rune record.
+ for _, r = range singleRunes {
+ if rrm.FromLo <= r && r <= rrm.FromHi {
+ if r <= unicode.MaxASCII {
+ tr.quickDict.Dict[r] = 0
+ } else {
+ delete(tr.runeMap, r)
+ }
+ }
+ }
+
+ tr.ranges = append(tr.ranges, rrm)
+ return fromHi, toHi
+}
+
+func nextRuneRange(str string, last rune) (remaining string, start, end rune, rangeStep rune) {
+ var r rune
+ var size int
+
+ remaining = str
+ escaping := false
+ isRange := false
+
+ for len(remaining) > 0 {
+ r, size = utf8.DecodeRuneInString(remaining)
+ remaining = remaining[size:]
+
+ // Parse special characters.
+ if !escaping {
+ if r == '\\' {
+ escaping = true
+ continue
+ }
+
+ if r == '-' {
+ // Ignore slash at beginning of string.
+ if last == utf8.RuneError {
+ continue
+ }
+
+ start = last
+ isRange = true
+ continue
+ }
+ }
+
+ escaping = false
+
+ if last != utf8.RuneError {
+ // This is a range which start and end are the same.
+ // Considier it as a normal character.
+ if isRange && last == r {
+ isRange = false
+ continue
+ }
+
+ start = last
+ end = r
+
+ if isRange {
+ if start < end {
+ rangeStep = 1
+ } else {
+ rangeStep = -1
+ }
+ }
+
+ return
+ }
+
+ last = r
+ }
+
+ start = last
+ end = utf8.RuneError
+ return
+}
+
+// Translate str with a from/to pattern pair.
+//
+// See comment in Translate function for usage and samples.
+func (tr *Translator) Translate(str string) string {
+ if !tr.hasPattern || str == "" {
+ return str
+ }
+
+ var r rune
+ var size int
+ var needTr bool
+
+ orig := str
+
+ var output *stringBuilder
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+ r, needTr = tr.TranslateRune(r)
+
+ if needTr && output == nil {
+ output = allocBuffer(orig, str)
+ }
+
+ if r != utf8.RuneError && output != nil {
+ output.WriteRune(r)
+ }
+
+ str = str[size:]
+ }
+
+ // No character is translated.
+ if output == nil {
+ return orig
+ }
+
+ return output.String()
+}
+
+// TranslateRune return translated rune and true if r matches the from pattern.
+// If r doesn't match the pattern, original r is returned and translated is false.
+func (tr *Translator) TranslateRune(r rune) (result rune, translated bool) {
+ switch {
+ case tr.quickDict != nil:
+ if r <= unicode.MaxASCII {
+ result = tr.quickDict.Dict[r]
+
+ if result != 0 {
+ translated = true
+
+ if tr.mappedRune >= 0 {
+ result = tr.mappedRune
+ }
+
+ break
+ }
+ }
+
+ fallthrough
+
+ case tr.runeMap != nil:
+ var ok bool
+
+ if result, ok = tr.runeMap[r]; ok {
+ translated = true
+
+ if tr.mappedRune >= 0 {
+ result = tr.mappedRune
+ }
+
+ break
+ }
+
+ fallthrough
+
+ default:
+ var rrm *runeRangeMap
+ ranges := tr.ranges
+
+ for i := len(ranges) - 1; i >= 0; i-- {
+ rrm = ranges[i]
+
+ if rrm.FromLo <= r && r <= rrm.FromHi {
+ translated = true
+
+ if tr.mappedRune >= 0 {
+ result = tr.mappedRune
+ break
+ }
+
+ if rrm.ToLo < rrm.ToHi {
+ result = rrm.ToLo + r - rrm.FromLo
+ } else if rrm.ToLo > rrm.ToHi {
+ // ToHi can be smaller than ToLo if range is from higher to lower.
+ result = rrm.ToLo - r + rrm.FromLo
+ } else {
+ result = rrm.ToLo
+ }
+
+ break
+ }
+ }
+ }
+
+ if tr.reverted {
+ if !translated {
+ result = tr.mappedRune
+ }
+
+ translated = !translated
+ }
+
+ if !translated {
+ result = r
+ }
+
+ return
+}
+
+// HasPattern returns true if Translator has one pattern at least.
+func (tr *Translator) HasPattern() bool {
+ return tr.hasPattern
+}
+
+// Translate str with the characters defined in from replaced by characters defined in to.
+//
+// From and to are patterns representing a set of characters. Pattern is defined as following.
+//
+// * Special characters
+// * '-' means a range of runes, e.g.
+// * "a-z" means all characters from 'a' to 'z' inclusive;
+// * "z-a" means all characters from 'z' to 'a' inclusive.
+// * '^' as first character means a set of all runes excepted listed, e.g.
+// * "^a-z" means all characters except 'a' to 'z' inclusive.
+// * '\' escapes special characters.
+// * Normal character represents itself, e.g. "abc" is a set including 'a', 'b' and 'c'.
+//
+// Translate will try to find a 1:1 mapping from from to to.
+// If to is smaller than from, last rune in to will be used to map "out of range" characters in from.
+//
+// Note that '^' only works in the from pattern. It will be considered as a normal character in the to pattern.
+//
+// If the to pattern is an empty string, Translate works exactly the same as Delete.
+//
+// Samples:
+// Translate("hello", "aeiou", "12345") => "h2ll4"
+// Translate("hello", "a-z", "A-Z") => "HELLO"
+// Translate("hello", "z-a", "a-z") => "svool"
+// Translate("hello", "aeiou", "*") => "h*ll*"
+// Translate("hello", "^l", "*") => "**ll*"
+// Translate("hello ^ world", `\^lo`, "*") => "he*** * w*r*d"
+func Translate(str, from, to string) string {
+ tr := NewTranslator(from, to)
+ return tr.Translate(str)
+}
+
+// Delete runes in str matching the pattern.
+// Pattern is defined in Translate function.
+//
+// Samples:
+// Delete("hello", "aeiou") => "hll"
+// Delete("hello", "a-k") => "llo"
+// Delete("hello", "^a-k") => "he"
+func Delete(str, pattern string) string {
+ tr := NewTranslator(pattern, "")
+ return tr.Translate(str)
+}
+
+// Count how many runes in str match the pattern.
+// Pattern is defined in Translate function.
+//
+// Samples:
+// Count("hello", "aeiou") => 3
+// Count("hello", "a-k") => 3
+// Count("hello", "^a-k") => 2
+func Count(str, pattern string) int {
+ if pattern == "" || str == "" {
+ return 0
+ }
+
+ var r rune
+ var size int
+ var matched bool
+
+ tr := NewTranslator(pattern, "")
+ cnt := 0
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+ str = str[size:]
+
+ if _, matched = tr.TranslateRune(r); matched {
+ cnt++
+ }
+ }
+
+ return cnt
+}
+
+// Squeeze deletes adjacent repeated runes in str.
+// If pattern is not empty, only runes matching the pattern will be squeezed.
+//
+// Samples:
+// Squeeze("hello", "") => "helo"
+// Squeeze("hello", "m-z") => "hello"
+// Squeeze("hello world", " ") => "hello world"
+func Squeeze(str, pattern string) string {
+ var last, r rune
+ var size int
+ var skipSqueeze, matched bool
+ var tr *Translator
+ var output *stringBuilder
+
+ orig := str
+ last = -1
+
+ if len(pattern) > 0 {
+ tr = NewTranslator(pattern, "")
+ }
+
+ for len(str) > 0 {
+ r, size = utf8.DecodeRuneInString(str)
+
+ // Need to squeeze the str.
+ if last == r && !skipSqueeze {
+ if tr != nil {
+ if _, matched = tr.TranslateRune(r); !matched {
+ skipSqueeze = true
+ }
+ }
+
+ if output == nil {
+ output = allocBuffer(orig, str)
+ }
+
+ if skipSqueeze {
+ output.WriteRune(r)
+ }
+ } else {
+ if output != nil {
+ output.WriteRune(r)
+ }
+
+ last = r
+ skipSqueeze = false
+ }
+
+ str = str[size:]
+ }
+
+ if output == nil {
+ return orig
+ }
+
+ return output.String()
+}
diff --git a/vendor/github.com/imdario/mergo/.deepsource.toml b/vendor/github.com/imdario/mergo/.deepsource.toml
new file mode 100644
index 000000000..8a0681af8
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/.deepsource.toml
@@ -0,0 +1,12 @@
+version = 1
+
+test_patterns = [
+ "*_test.go"
+]
+
+[[analyzers]]
+name = "go"
+enabled = true
+
+ [analyzers.meta]
+ import_path = "github.com/imdario/mergo" \ No newline at end of file
diff --git a/vendor/github.com/imdario/mergo/.gitignore b/vendor/github.com/imdario/mergo/.gitignore
new file mode 100644
index 000000000..529c3412b
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/.gitignore
@@ -0,0 +1,33 @@
+#### joe made this: http://goel.io/joe
+
+#### go ####
+# Binaries for programs and plugins
+*.exe
+*.dll
+*.so
+*.dylib
+
+# Test binary, build with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
+.glide/
+
+#### vim ####
+# Swap
+[._]*.s[a-v][a-z]
+[._]*.sw[a-p]
+[._]s[a-v][a-z]
+[._]sw[a-p]
+
+# Session
+Session.vim
+
+# Temporary
+.netrwhist
+*~
+# Auto-generated tag files
+tags
diff --git a/vendor/github.com/imdario/mergo/.travis.yml b/vendor/github.com/imdario/mergo/.travis.yml
new file mode 100644
index 000000000..d324c43ba
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/.travis.yml
@@ -0,0 +1,12 @@
+language: go
+arch:
+ - amd64
+ - ppc64le
+install:
+ - go get -t
+ - go get golang.org/x/tools/cmd/cover
+ - go get github.com/mattn/goveralls
+script:
+ - go test -race -v ./...
+after_script:
+ - $HOME/gopath/bin/goveralls -service=travis-ci -repotoken $COVERALLS_TOKEN
diff --git a/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md b/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..469b44907
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at i@dario.im. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/imdario/mergo/LICENSE b/vendor/github.com/imdario/mergo/LICENSE
new file mode 100644
index 000000000..686680298
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2013 Dario Castañé. All rights reserved.
+Copyright (c) 2012 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/imdario/mergo/README.md b/vendor/github.com/imdario/mergo/README.md
new file mode 100644
index 000000000..aa8cbd7ce
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/README.md
@@ -0,0 +1,247 @@
+# Mergo
+
+
+[![GoDoc][3]][4]
+[![GitHub release][5]][6]
+[![GoCard][7]][8]
+[![Build Status][1]][2]
+[![Coverage Status][9]][10]
+[![Sourcegraph][11]][12]
+[![FOSSA Status][13]][14]
+
+[![GoCenter Kudos][15]][16]
+
+[1]: https://travis-ci.org/imdario/mergo.png
+[2]: https://travis-ci.org/imdario/mergo
+[3]: https://godoc.org/github.com/imdario/mergo?status.svg
+[4]: https://godoc.org/github.com/imdario/mergo
+[5]: https://img.shields.io/github/release/imdario/mergo.svg
+[6]: https://github.com/imdario/mergo/releases
+[7]: https://goreportcard.com/badge/imdario/mergo
+[8]: https://goreportcard.com/report/github.com/imdario/mergo
+[9]: https://coveralls.io/repos/github/imdario/mergo/badge.svg?branch=master
+[10]: https://coveralls.io/github/imdario/mergo?branch=master
+[11]: https://sourcegraph.com/github.com/imdario/mergo/-/badge.svg
+[12]: https://sourcegraph.com/github.com/imdario/mergo?badge
+[13]: https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=shield
+[14]: https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield
+[15]: https://search.gocenter.io/api/ui/badge/github.com%2Fimdario%2Fmergo
+[16]: https://search.gocenter.io/github.com/imdario/mergo
+
+A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements.
+
+Mergo merges same-type structs and maps by setting default values in zero-value fields. Mergo won't merge unexported (private) fields. It will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection).
+
+Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region of Marche.
+
+## Status
+
+It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc](https://github.com/imdario/mergo#mergo-in-the-wild).
+
+### Important note
+
+Please keep in mind that a problematic PR broke [0.3.9](//github.com/imdario/mergo/releases/tag/0.3.9). I reverted it in [0.3.10](//github.com/imdario/mergo/releases/tag/0.3.10), and I consider it stable but not bug-free. Also, this version adds suppot for go modules.
+
+Keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2), Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). I added an optional/variadic argument so that it won't break the existing code.
+
+If you were using Mergo before April 6th, 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause in existing projects after the change (release 0.2.0).
+
+### Donations
+
+If Mergo is useful to you, consider buying me a coffee, a beer, or making a monthly donation to allow me to keep building great free software. :heart_eyes:
+
+<a href='https://ko-fi.com/B0B58839' target='_blank'><img height='36' style='border:0px;height:36px;' src='https://az743702.vo.msecnd.net/cdn/kofi1.png?v=0' border='0' alt='Buy Me a Coffee at ko-fi.com' /></a>
+[![Beerpay](https://beerpay.io/imdario/mergo/badge.svg)](https://beerpay.io/imdario/mergo)
+[![Beerpay](https://beerpay.io/imdario/mergo/make-wish.svg)](https://beerpay.io/imdario/mergo)
+<a href="https://liberapay.com/dario/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg"></a>
+
+### Mergo in the wild
+
+- [moby/moby](https://github.com/moby/moby)
+- [kubernetes/kubernetes](https://github.com/kubernetes/kubernetes)
+- [vmware/dispatch](https://github.com/vmware/dispatch)
+- [Shopify/themekit](https://github.com/Shopify/themekit)
+- [imdario/zas](https://github.com/imdario/zas)
+- [matcornic/hermes](https://github.com/matcornic/hermes)
+- [OpenBazaar/openbazaar-go](https://github.com/OpenBazaar/openbazaar-go)
+- [kataras/iris](https://github.com/kataras/iris)
+- [michaelsauter/crane](https://github.com/michaelsauter/crane)
+- [go-task/task](https://github.com/go-task/task)
+- [sensu/uchiwa](https://github.com/sensu/uchiwa)
+- [ory/hydra](https://github.com/ory/hydra)
+- [sisatech/vcli](https://github.com/sisatech/vcli)
+- [dairycart/dairycart](https://github.com/dairycart/dairycart)
+- [projectcalico/felix](https://github.com/projectcalico/felix)
+- [resin-os/balena](https://github.com/resin-os/balena)
+- [go-kivik/kivik](https://github.com/go-kivik/kivik)
+- [Telefonica/govice](https://github.com/Telefonica/govice)
+- [supergiant/supergiant](supergiant/supergiant)
+- [SergeyTsalkov/brooce](https://github.com/SergeyTsalkov/brooce)
+- [soniah/dnsmadeeasy](https://github.com/soniah/dnsmadeeasy)
+- [ohsu-comp-bio/funnel](https://github.com/ohsu-comp-bio/funnel)
+- [EagerIO/Stout](https://github.com/EagerIO/Stout)
+- [lynndylanhurley/defsynth-api](https://github.com/lynndylanhurley/defsynth-api)
+- [russross/canvasassignments](https://github.com/russross/canvasassignments)
+- [rdegges/cryptly-api](https://github.com/rdegges/cryptly-api)
+- [casualjim/exeggutor](https://github.com/casualjim/exeggutor)
+- [divshot/gitling](https://github.com/divshot/gitling)
+- [RWJMurphy/gorl](https://github.com/RWJMurphy/gorl)
+- [andrerocker/deploy42](https://github.com/andrerocker/deploy42)
+- [elwinar/rambler](https://github.com/elwinar/rambler)
+- [tmaiaroto/gopartman](https://github.com/tmaiaroto/gopartman)
+- [jfbus/impressionist](https://github.com/jfbus/impressionist)
+- [Jmeyering/zealot](https://github.com/Jmeyering/zealot)
+- [godep-migrator/rigger-host](https://github.com/godep-migrator/rigger-host)
+- [Dronevery/MultiwaySwitch-Go](https://github.com/Dronevery/MultiwaySwitch-Go)
+- [thoas/picfit](https://github.com/thoas/picfit)
+- [mantasmatelis/whooplist-server](https://github.com/mantasmatelis/whooplist-server)
+- [jnuthong/item_search](https://github.com/jnuthong/item_search)
+- [bukalapak/snowboard](https://github.com/bukalapak/snowboard)
+- [containerssh/containerssh](https://github.com/containerssh/containerssh)
+
+## Install
+
+ go get github.com/imdario/mergo
+
+ // use in your .go code
+ import (
+ "github.com/imdario/mergo"
+ )
+
+## Usage
+
+You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as [they are zero values](https://golang.org/ref/spec#The_zero_value) too. Also, maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection).
+
+```go
+if err := mergo.Merge(&dst, src); err != nil {
+ // ...
+}
+```
+
+Also, you can merge overwriting values using the transformer `WithOverride`.
+
+```go
+if err := mergo.Merge(&dst, src, mergo.WithOverride); err != nil {
+ // ...
+}
+```
+
+Additionally, you can map a `map[string]interface{}` to a struct (and otherwise, from struct to map), following the same restrictions as in `Merge()`. Keys are capitalized to find each corresponding exported field.
+
+```go
+if err := mergo.Map(&dst, srcMap); err != nil {
+ // ...
+}
+```
+
+Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as `map[string]interface{}`. They will be just assigned as values.
+
+Here is a nice example:
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/imdario/mergo"
+)
+
+type Foo struct {
+ A string
+ B int64
+}
+
+func main() {
+ src := Foo{
+ A: "one",
+ B: 2,
+ }
+ dest := Foo{
+ A: "two",
+ }
+ mergo.Merge(&dest, src)
+ fmt.Println(dest)
+ // Will print
+ // {two 2}
+}
+```
+
+Note: if test are failing due missing package, please execute:
+
+ go get gopkg.in/yaml.v2
+
+### Transformers
+
+Transformers allow to merge specific types differently than in the default behavior. In other words, now you can customize how some types are merged. For example, `time.Time` is a struct; it doesn't have zero value but IsZero can return true because it has fields with zero value. How can we merge a non-zero `time.Time`?
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/imdario/mergo"
+ "reflect"
+ "time"
+)
+
+type timeTransformer struct {
+}
+
+func (t timeTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
+ if typ == reflect.TypeOf(time.Time{}) {
+ return func(dst, src reflect.Value) error {
+ if dst.CanSet() {
+ isZero := dst.MethodByName("IsZero")
+ result := isZero.Call([]reflect.Value{})
+ if result[0].Bool() {
+ dst.Set(src)
+ }
+ }
+ return nil
+ }
+ }
+ return nil
+}
+
+type Snapshot struct {
+ Time time.Time
+ // ...
+}
+
+func main() {
+ src := Snapshot{time.Now()}
+ dest := Snapshot{}
+ mergo.Merge(&dest, src, mergo.WithTransformers(timeTransformer{}))
+ fmt.Println(dest)
+ // Will print
+ // { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 }
+}
+```
+
+
+## Contact me
+
+If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario)
+
+## About
+
+Written by [Dario Castañé](http://dario.im).
+
+## Top Contributors
+
+[![0](https://sourcerer.io/fame/imdario/imdario/mergo/images/0)](https://sourcerer.io/fame/imdario/imdario/mergo/links/0)
+[![1](https://sourcerer.io/fame/imdario/imdario/mergo/images/1)](https://sourcerer.io/fame/imdario/imdario/mergo/links/1)
+[![2](https://sourcerer.io/fame/imdario/imdario/mergo/images/2)](https://sourcerer.io/fame/imdario/imdario/mergo/links/2)
+[![3](https://sourcerer.io/fame/imdario/imdario/mergo/images/3)](https://sourcerer.io/fame/imdario/imdario/mergo/links/3)
+[![4](https://sourcerer.io/fame/imdario/imdario/mergo/images/4)](https://sourcerer.io/fame/imdario/imdario/mergo/links/4)
+[![5](https://sourcerer.io/fame/imdario/imdario/mergo/images/5)](https://sourcerer.io/fame/imdario/imdario/mergo/links/5)
+[![6](https://sourcerer.io/fame/imdario/imdario/mergo/images/6)](https://sourcerer.io/fame/imdario/imdario/mergo/links/6)
+[![7](https://sourcerer.io/fame/imdario/imdario/mergo/images/7)](https://sourcerer.io/fame/imdario/imdario/mergo/links/7)
+
+
+## License
+
+[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE).
+
+
+[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fimdario%2Fmergo.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_large)
diff --git a/vendor/github.com/imdario/mergo/doc.go b/vendor/github.com/imdario/mergo/doc.go
new file mode 100644
index 000000000..fcd985f99
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/doc.go
@@ -0,0 +1,143 @@
+// Copyright 2013 Dario Castañé. All rights reserved.
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements.
+
+Mergo merges same-type structs and maps by setting default values in zero-value fields. Mergo won't merge unexported (private) fields. It will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection).
+
+Status
+
+It is ready for production use. It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc.
+
+Important note
+
+Please keep in mind that a problematic PR broke 0.3.9. We reverted it in 0.3.10. We consider 0.3.10 as stable but not bug-free. . Also, this version adds suppot for go modules.
+
+Keep in mind that in 0.3.2, Mergo changed Merge() and Map() signatures to support transformers. We added an optional/variadic argument so that it won't break the existing code.
+
+If you were using Mergo before April 6th, 2015, please check your project works as intended after updating your local copy with go get -u github.com/imdario/mergo. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause in existing projects after the change (release 0.2.0).
+
+Install
+
+Do your usual installation procedure:
+
+ go get github.com/imdario/mergo
+
+ // use in your .go code
+ import (
+ "github.com/imdario/mergo"
+ )
+
+Usage
+
+You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as they are zero values too. Also, maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection).
+
+ if err := mergo.Merge(&dst, src); err != nil {
+ // ...
+ }
+
+Also, you can merge overwriting values using the transformer WithOverride.
+
+ if err := mergo.Merge(&dst, src, mergo.WithOverride); err != nil {
+ // ...
+ }
+
+Additionally, you can map a map[string]interface{} to a struct (and otherwise, from struct to map), following the same restrictions as in Merge(). Keys are capitalized to find each corresponding exported field.
+
+ if err := mergo.Map(&dst, srcMap); err != nil {
+ // ...
+ }
+
+Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as map[string]interface{}. They will be just assigned as values.
+
+Here is a nice example:
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/imdario/mergo"
+ )
+
+ type Foo struct {
+ A string
+ B int64
+ }
+
+ func main() {
+ src := Foo{
+ A: "one",
+ B: 2,
+ }
+ dest := Foo{
+ A: "two",
+ }
+ mergo.Merge(&dest, src)
+ fmt.Println(dest)
+ // Will print
+ // {two 2}
+ }
+
+Transformers
+
+Transformers allow to merge specific types differently than in the default behavior. In other words, now you can customize how some types are merged. For example, time.Time is a struct; it doesn't have zero value but IsZero can return true because it has fields with zero value. How can we merge a non-zero time.Time?
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/imdario/mergo"
+ "reflect"
+ "time"
+ )
+
+ type timeTransformer struct {
+ }
+
+ func (t timeTransformer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
+ if typ == reflect.TypeOf(time.Time{}) {
+ return func(dst, src reflect.Value) error {
+ if dst.CanSet() {
+ isZero := dst.MethodByName("IsZero")
+ result := isZero.Call([]reflect.Value{})
+ if result[0].Bool() {
+ dst.Set(src)
+ }
+ }
+ return nil
+ }
+ }
+ return nil
+ }
+
+ type Snapshot struct {
+ Time time.Time
+ // ...
+ }
+
+ func main() {
+ src := Snapshot{time.Now()}
+ dest := Snapshot{}
+ mergo.Merge(&dest, src, mergo.WithTransformers(timeTransformer{}))
+ fmt.Println(dest)
+ // Will print
+ // { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 }
+ }
+
+Contact me
+
+If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): https://twitter.com/im_dario
+
+About
+
+Written by Dario Castañé: https://da.rio.hn
+
+License
+
+BSD 3-Clause license, as Go language.
+
+*/
+package mergo
diff --git a/vendor/github.com/imdario/mergo/map.go b/vendor/github.com/imdario/mergo/map.go
new file mode 100644
index 000000000..a13a7ee46
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/map.go
@@ -0,0 +1,178 @@
+// Copyright 2014 Dario Castañé. All rights reserved.
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Based on src/pkg/reflect/deepequal.go from official
+// golang's stdlib.
+
+package mergo
+
+import (
+ "fmt"
+ "reflect"
+ "unicode"
+ "unicode/utf8"
+)
+
+func changeInitialCase(s string, mapper func(rune) rune) string {
+ if s == "" {
+ return s
+ }
+ r, n := utf8.DecodeRuneInString(s)
+ return string(mapper(r)) + s[n:]
+}
+
+func isExported(field reflect.StructField) bool {
+ r, _ := utf8.DecodeRuneInString(field.Name)
+ return r >= 'A' && r <= 'Z'
+}
+
+// Traverses recursively both values, assigning src's fields values to dst.
+// The map argument tracks comparisons that have already been seen, which allows
+// short circuiting on recursive types.
+func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
+ overwrite := config.Overwrite
+ if dst.CanAddr() {
+ addr := dst.UnsafeAddr()
+ h := 17 * addr
+ seen := visited[h]
+ typ := dst.Type()
+ for p := seen; p != nil; p = p.next {
+ if p.ptr == addr && p.typ == typ {
+ return nil
+ }
+ }
+ // Remember, remember...
+ visited[h] = &visit{addr, typ, seen}
+ }
+ zeroValue := reflect.Value{}
+ switch dst.Kind() {
+ case reflect.Map:
+ dstMap := dst.Interface().(map[string]interface{})
+ for i, n := 0, src.NumField(); i < n; i++ {
+ srcType := src.Type()
+ field := srcType.Field(i)
+ if !isExported(field) {
+ continue
+ }
+ fieldName := field.Name
+ fieldName = changeInitialCase(fieldName, unicode.ToLower)
+ if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v)) || overwrite) {
+ dstMap[fieldName] = src.Field(i).Interface()
+ }
+ }
+ case reflect.Ptr:
+ if dst.IsNil() {
+ v := reflect.New(dst.Type().Elem())
+ dst.Set(v)
+ }
+ dst = dst.Elem()
+ fallthrough
+ case reflect.Struct:
+ srcMap := src.Interface().(map[string]interface{})
+ for key := range srcMap {
+ config.overwriteWithEmptyValue = true
+ srcValue := srcMap[key]
+ fieldName := changeInitialCase(key, unicode.ToUpper)
+ dstElement := dst.FieldByName(fieldName)
+ if dstElement == zeroValue {
+ // We discard it because the field doesn't exist.
+ continue
+ }
+ srcElement := reflect.ValueOf(srcValue)
+ dstKind := dstElement.Kind()
+ srcKind := srcElement.Kind()
+ if srcKind == reflect.Ptr && dstKind != reflect.Ptr {
+ srcElement = srcElement.Elem()
+ srcKind = reflect.TypeOf(srcElement.Interface()).Kind()
+ } else if dstKind == reflect.Ptr {
+ // Can this work? I guess it can't.
+ if srcKind != reflect.Ptr && srcElement.CanAddr() {
+ srcPtr := srcElement.Addr()
+ srcElement = reflect.ValueOf(srcPtr)
+ srcKind = reflect.Ptr
+ }
+ }
+
+ if !srcElement.IsValid() {
+ continue
+ }
+ if srcKind == dstKind {
+ if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
+ return
+ }
+ } else if dstKind == reflect.Interface && dstElement.Kind() == reflect.Interface {
+ if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
+ return
+ }
+ } else if srcKind == reflect.Map {
+ if err = deepMap(dstElement, srcElement, visited, depth+1, config); err != nil {
+ return
+ }
+ } else {
+ return fmt.Errorf("type mismatch on %s field: found %v, expected %v", fieldName, srcKind, dstKind)
+ }
+ }
+ }
+ return
+}
+
+// Map sets fields' values in dst from src.
+// src can be a map with string keys or a struct. dst must be the opposite:
+// if src is a map, dst must be a valid pointer to struct. If src is a struct,
+// dst must be map[string]interface{}.
+// It won't merge unexported (private) fields and will do recursively
+// any exported field.
+// If dst is a map, keys will be src fields' names in lower camel case.
+// Missing key in src that doesn't match a field in dst will be skipped. This
+// doesn't apply if dst is a map.
+// This is separated method from Merge because it is cleaner and it keeps sane
+// semantics: merging equal types, mapping different (restricted) types.
+func Map(dst, src interface{}, opts ...func(*Config)) error {
+ return _map(dst, src, opts...)
+}
+
+// MapWithOverwrite will do the same as Map except that non-empty dst attributes will be overridden by
+// non-empty src attribute values.
+// Deprecated: Use Map(…) with WithOverride
+func MapWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
+ return _map(dst, src, append(opts, WithOverride)...)
+}
+
+func _map(dst, src interface{}, opts ...func(*Config)) error {
+ if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr {
+ return ErrNonPointerAgument
+ }
+ var (
+ vDst, vSrc reflect.Value
+ err error
+ )
+ config := &Config{}
+
+ for _, opt := range opts {
+ opt(config)
+ }
+
+ if vDst, vSrc, err = resolveValues(dst, src); err != nil {
+ return err
+ }
+ // To be friction-less, we redirect equal-type arguments
+ // to deepMerge. Only because arguments can be anything.
+ if vSrc.Kind() == vDst.Kind() {
+ return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config)
+ }
+ switch vSrc.Kind() {
+ case reflect.Struct:
+ if vDst.Kind() != reflect.Map {
+ return ErrExpectedMapAsDestination
+ }
+ case reflect.Map:
+ if vDst.Kind() != reflect.Struct {
+ return ErrExpectedStructAsDestination
+ }
+ default:
+ return ErrNotSupported
+ }
+ return deepMap(vDst, vSrc, make(map[uintptr]*visit), 0, config)
+}
diff --git a/vendor/github.com/imdario/mergo/merge.go b/vendor/github.com/imdario/mergo/merge.go
new file mode 100644
index 000000000..8c2a8fcd9
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/merge.go
@@ -0,0 +1,380 @@
+// Copyright 2013 Dario Castañé. All rights reserved.
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Based on src/pkg/reflect/deepequal.go from official
+// golang's stdlib.
+
+package mergo
+
+import (
+ "fmt"
+ "reflect"
+)
+
+func hasMergeableFields(dst reflect.Value) (exported bool) {
+ for i, n := 0, dst.NumField(); i < n; i++ {
+ field := dst.Type().Field(i)
+ if field.Anonymous && dst.Field(i).Kind() == reflect.Struct {
+ exported = exported || hasMergeableFields(dst.Field(i))
+ } else if isExportedComponent(&field) {
+ exported = exported || len(field.PkgPath) == 0
+ }
+ }
+ return
+}
+
+func isExportedComponent(field *reflect.StructField) bool {
+ pkgPath := field.PkgPath
+ if len(pkgPath) > 0 {
+ return false
+ }
+ c := field.Name[0]
+ if 'a' <= c && c <= 'z' || c == '_' {
+ return false
+ }
+ return true
+}
+
+type Config struct {
+ Overwrite bool
+ AppendSlice bool
+ TypeCheck bool
+ Transformers Transformers
+ overwriteWithEmptyValue bool
+ overwriteSliceWithEmptyValue bool
+ sliceDeepCopy bool
+ debug bool
+}
+
+type Transformers interface {
+ Transformer(reflect.Type) func(dst, src reflect.Value) error
+}
+
+// Traverses recursively both values, assigning src's fields values to dst.
+// The map argument tracks comparisons that have already been seen, which allows
+// short circuiting on recursive types.
+func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
+ overwrite := config.Overwrite
+ typeCheck := config.TypeCheck
+ overwriteWithEmptySrc := config.overwriteWithEmptyValue
+ overwriteSliceWithEmptySrc := config.overwriteSliceWithEmptyValue
+ sliceDeepCopy := config.sliceDeepCopy
+
+ if !src.IsValid() {
+ return
+ }
+ if dst.CanAddr() {
+ addr := dst.UnsafeAddr()
+ h := 17 * addr
+ seen := visited[h]
+ typ := dst.Type()
+ for p := seen; p != nil; p = p.next {
+ if p.ptr == addr && p.typ == typ {
+ return nil
+ }
+ }
+ // Remember, remember...
+ visited[h] = &visit{addr, typ, seen}
+ }
+
+ if config.Transformers != nil && !isEmptyValue(dst) {
+ if fn := config.Transformers.Transformer(dst.Type()); fn != nil {
+ err = fn(dst, src)
+ return
+ }
+ }
+
+ switch dst.Kind() {
+ case reflect.Struct:
+ if hasMergeableFields(dst) {
+ for i, n := 0, dst.NumField(); i < n; i++ {
+ if err = deepMerge(dst.Field(i), src.Field(i), visited, depth+1, config); err != nil {
+ return
+ }
+ }
+ } else {
+ if dst.CanSet() && (isReflectNil(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc) {
+ dst.Set(src)
+ }
+ }
+ case reflect.Map:
+ if dst.IsNil() && !src.IsNil() {
+ if dst.CanSet() {
+ dst.Set(reflect.MakeMap(dst.Type()))
+ } else {
+ dst = src
+ return
+ }
+ }
+
+ if src.Kind() != reflect.Map {
+ if overwrite {
+ dst.Set(src)
+ }
+ return
+ }
+
+ for _, key := range src.MapKeys() {
+ srcElement := src.MapIndex(key)
+ if !srcElement.IsValid() {
+ continue
+ }
+ dstElement := dst.MapIndex(key)
+ switch srcElement.Kind() {
+ case reflect.Chan, reflect.Func, reflect.Map, reflect.Interface, reflect.Slice:
+ if srcElement.IsNil() {
+ if overwrite {
+ dst.SetMapIndex(key, srcElement)
+ }
+ continue
+ }
+ fallthrough
+ default:
+ if !srcElement.CanInterface() {
+ continue
+ }
+ switch reflect.TypeOf(srcElement.Interface()).Kind() {
+ case reflect.Struct:
+ fallthrough
+ case reflect.Ptr:
+ fallthrough
+ case reflect.Map:
+ srcMapElm := srcElement
+ dstMapElm := dstElement
+ if srcMapElm.CanInterface() {
+ srcMapElm = reflect.ValueOf(srcMapElm.Interface())
+ if dstMapElm.IsValid() {
+ dstMapElm = reflect.ValueOf(dstMapElm.Interface())
+ }
+ }
+ if err = deepMerge(dstMapElm, srcMapElm, visited, depth+1, config); err != nil {
+ return
+ }
+ case reflect.Slice:
+ srcSlice := reflect.ValueOf(srcElement.Interface())
+
+ var dstSlice reflect.Value
+ if !dstElement.IsValid() || dstElement.IsNil() {
+ dstSlice = reflect.MakeSlice(srcSlice.Type(), 0, srcSlice.Len())
+ } else {
+ dstSlice = reflect.ValueOf(dstElement.Interface())
+ }
+
+ if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy {
+ if typeCheck && srcSlice.Type() != dstSlice.Type() {
+ return fmt.Errorf("cannot override two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
+ }
+ dstSlice = srcSlice
+ } else if config.AppendSlice {
+ if srcSlice.Type() != dstSlice.Type() {
+ return fmt.Errorf("cannot append two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
+ }
+ dstSlice = reflect.AppendSlice(dstSlice, srcSlice)
+ } else if sliceDeepCopy {
+ i := 0
+ for ; i < srcSlice.Len() && i < dstSlice.Len(); i++ {
+ srcElement := srcSlice.Index(i)
+ dstElement := dstSlice.Index(i)
+
+ if srcElement.CanInterface() {
+ srcElement = reflect.ValueOf(srcElement.Interface())
+ }
+ if dstElement.CanInterface() {
+ dstElement = reflect.ValueOf(dstElement.Interface())
+ }
+
+ if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
+ return
+ }
+ }
+
+ }
+ dst.SetMapIndex(key, dstSlice)
+ }
+ }
+ if dstElement.IsValid() && !isEmptyValue(dstElement) && (reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map || reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice) {
+ continue
+ }
+
+ if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement)) {
+ if dst.IsNil() {
+ dst.Set(reflect.MakeMap(dst.Type()))
+ }
+ dst.SetMapIndex(key, srcElement)
+ }
+ }
+ case reflect.Slice:
+ if !dst.CanSet() {
+ break
+ }
+ if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice && !sliceDeepCopy {
+ dst.Set(src)
+ } else if config.AppendSlice {
+ if src.Type() != dst.Type() {
+ return fmt.Errorf("cannot append two slice with different type (%s, %s)", src.Type(), dst.Type())
+ }
+ dst.Set(reflect.AppendSlice(dst, src))
+ } else if sliceDeepCopy {
+ for i := 0; i < src.Len() && i < dst.Len(); i++ {
+ srcElement := src.Index(i)
+ dstElement := dst.Index(i)
+ if srcElement.CanInterface() {
+ srcElement = reflect.ValueOf(srcElement.Interface())
+ }
+ if dstElement.CanInterface() {
+ dstElement = reflect.ValueOf(dstElement.Interface())
+ }
+
+ if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
+ return
+ }
+ }
+ }
+ case reflect.Ptr:
+ fallthrough
+ case reflect.Interface:
+ if isReflectNil(src) {
+ if overwriteWithEmptySrc && dst.CanSet() && src.Type().AssignableTo(dst.Type()) {
+ dst.Set(src)
+ }
+ break
+ }
+
+ if src.Kind() != reflect.Interface {
+ if dst.IsNil() || (src.Kind() != reflect.Ptr && overwrite) {
+ if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
+ dst.Set(src)
+ }
+ } else if src.Kind() == reflect.Ptr {
+ if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
+ return
+ }
+ } else if dst.Elem().Type() == src.Type() {
+ if err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil {
+ return
+ }
+ } else {
+ return ErrDifferentArgumentsTypes
+ }
+ break
+ }
+
+ if dst.IsNil() || overwrite {
+ if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
+ dst.Set(src)
+ }
+ break
+ }
+
+ if dst.Elem().Kind() == src.Elem().Kind() {
+ if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
+ return
+ }
+ break
+ }
+ default:
+ mustSet := (isEmptyValue(dst) || overwrite) && (!isEmptyValue(src) || overwriteWithEmptySrc)
+ if mustSet {
+ if dst.CanSet() {
+ dst.Set(src)
+ } else {
+ dst = src
+ }
+ }
+ }
+
+ return
+}
+
+// Merge will fill any empty for value type attributes on the dst struct using corresponding
+// src attributes if they themselves are not empty. dst and src must be valid same-type structs
+// and dst must be a pointer to struct.
+// It won't merge unexported (private) fields and will do recursively any exported field.
+func Merge(dst, src interface{}, opts ...func(*Config)) error {
+ return merge(dst, src, opts...)
+}
+
+// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overridden by
+// non-empty src attribute values.
+// Deprecated: use Merge(…) with WithOverride
+func MergeWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
+ return merge(dst, src, append(opts, WithOverride)...)
+}
+
+// WithTransformers adds transformers to merge, allowing to customize the merging of some types.
+func WithTransformers(transformers Transformers) func(*Config) {
+ return func(config *Config) {
+ config.Transformers = transformers
+ }
+}
+
+// WithOverride will make merge override non-empty dst attributes with non-empty src attributes values.
+func WithOverride(config *Config) {
+ config.Overwrite = true
+}
+
+// WithOverwriteWithEmptyValue will make merge override non empty dst attributes with empty src attributes values.
+func WithOverwriteWithEmptyValue(config *Config) {
+ config.Overwrite = true
+ config.overwriteWithEmptyValue = true
+}
+
+// WithOverrideEmptySlice will make merge override empty dst slice with empty src slice.
+func WithOverrideEmptySlice(config *Config) {
+ config.overwriteSliceWithEmptyValue = true
+}
+
+// WithAppendSlice will make merge append slices instead of overwriting it.
+func WithAppendSlice(config *Config) {
+ config.AppendSlice = true
+}
+
+// WithTypeCheck will make merge check types while overwriting it (must be used with WithOverride).
+func WithTypeCheck(config *Config) {
+ config.TypeCheck = true
+}
+
+// WithSliceDeepCopy will merge slice element one by one with Overwrite flag.
+func WithSliceDeepCopy(config *Config) {
+ config.sliceDeepCopy = true
+ config.Overwrite = true
+}
+
+func merge(dst, src interface{}, opts ...func(*Config)) error {
+ if dst != nil && reflect.ValueOf(dst).Kind() != reflect.Ptr {
+ return ErrNonPointerAgument
+ }
+ var (
+ vDst, vSrc reflect.Value
+ err error
+ )
+
+ config := &Config{}
+
+ for _, opt := range opts {
+ opt(config)
+ }
+
+ if vDst, vSrc, err = resolveValues(dst, src); err != nil {
+ return err
+ }
+ if vDst.Type() != vSrc.Type() {
+ return ErrDifferentArgumentsTypes
+ }
+ return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config)
+}
+
+// IsReflectNil is the reflect value provided nil
+func isReflectNil(v reflect.Value) bool {
+ k := v.Kind()
+ switch k {
+ case reflect.Interface, reflect.Slice, reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr:
+ // Both interface and slice are nil if first word is 0.
+ // Both are always bigger than a word; assume flagIndir.
+ return v.IsNil()
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/imdario/mergo/mergo.go b/vendor/github.com/imdario/mergo/mergo.go
new file mode 100644
index 000000000..3cc926c7f
--- /dev/null
+++ b/vendor/github.com/imdario/mergo/mergo.go
@@ -0,0 +1,78 @@
+// Copyright 2013 Dario Castañé. All rights reserved.
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Based on src/pkg/reflect/deepequal.go from official
+// golang's stdlib.
+
+package mergo
+
+import (
+ "errors"
+ "reflect"
+)
+
+// Errors reported by Mergo when it finds invalid arguments.
+var (
+ ErrNilArguments = errors.New("src and dst must not be nil")
+ ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type")
+ ErrNotSupported = errors.New("only structs and maps are supported")
+ ErrExpectedMapAsDestination = errors.New("dst was expected to be a map")
+ ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct")
+ ErrNonPointerAgument = errors.New("dst must be a pointer")
+)
+
+// During deepMerge, must keep track of checks that are
+// in progress. The comparison algorithm assumes that all
+// checks in progress are true when it reencounters them.
+// Visited are stored in a map indexed by 17 * a1 + a2;
+type visit struct {
+ ptr uintptr
+ typ reflect.Type
+ next *visit
+}
+
+// From src/pkg/encoding/json/encode.go.
+func isEmptyValue(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Interface, reflect.Ptr:
+ if v.IsNil() {
+ return true
+ }
+ return isEmptyValue(v.Elem())
+ case reflect.Func:
+ return v.IsNil()
+ case reflect.Invalid:
+ return true
+ }
+ return false
+}
+
+func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) {
+ if dst == nil || src == nil {
+ err = ErrNilArguments
+ return
+ }
+ vDst = reflect.ValueOf(dst).Elem()
+ if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map {
+ err = ErrNotSupported
+ return
+ }
+ vSrc = reflect.ValueOf(src)
+ // We check if vSrc is a pointer to dereference it.
+ if vSrc.Kind() == reflect.Ptr {
+ vSrc = vSrc.Elem()
+ }
+ return
+}
diff --git a/vendor/github.com/jessevdk/go-flags/.travis.yml b/vendor/github.com/jessevdk/go-flags/.travis.yml
new file mode 100644
index 000000000..2fc5e5f5b
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/.travis.yml
@@ -0,0 +1,39 @@
+language: go
+
+os:
+ - linux
+ - osx
+
+go:
+ - 1.16.x
+
+install:
+ # go-flags
+ - go build -v ./...
+
+ # linting
+ - go get -v golang.org/x/lint/golint
+
+ # code coverage
+ - go get golang.org/x/tools/cmd/cover
+ - go get github.com/onsi/ginkgo/ginkgo
+ - go get github.com/modocache/gover
+ - if [ "$TRAVIS_SECURE_ENV_VARS" = "true" ]; then go get github.com/mattn/goveralls; fi
+
+script:
+ # go-flags
+ - $(exit $(gofmt -l . | wc -l))
+ - go test -v ./...
+
+ # linting
+ - go tool vet -all=true -v=true . || true
+ - $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/golint ./...
+
+ # code coverage
+ - $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/ginkgo -r -cover
+ - $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/gover
+ - if [ "$TRAVIS_SECURE_ENV_VARS" = "true" ]; then $(go env GOPATH | awk 'BEGIN{FS=":"} {print $1}')/bin/goveralls -coverprofile=gover.coverprofile -service=travis-ci -repotoken $COVERALLS_TOKEN; fi
+
+env:
+ # coveralls.io
+ secure: "RCYbiB4P0RjQRIoUx/vG/AjP3mmYCbzOmr86DCww1Z88yNcy3hYr3Cq8rpPtYU5v0g7wTpu4adaKIcqRE9xknYGbqj3YWZiCoBP1/n4Z+9sHW3Dsd9D/GRGeHUus0laJUGARjWoCTvoEtOgTdGQDoX7mH+pUUY0FBltNYUdOiiU="
diff --git a/vendor/github.com/jessevdk/go-flags/LICENSE b/vendor/github.com/jessevdk/go-flags/LICENSE
new file mode 100644
index 000000000..bcca0d521
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c) 2012 Jesse van den Kieboom. All rights reserved.
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following disclaimer
+ in the documentation and/or other materials provided with the
+ distribution.
+ * Neither the name of Google Inc. nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/jessevdk/go-flags/README.md b/vendor/github.com/jessevdk/go-flags/README.md
new file mode 100644
index 000000000..f22650b20
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/README.md
@@ -0,0 +1,139 @@
+go-flags: a go library for parsing command line arguments
+=========================================================
+
+[![GoDoc](https://godoc.org/github.com/jessevdk/go-flags?status.png)](https://godoc.org/github.com/jessevdk/go-flags) [![Build Status](https://travis-ci.org/jessevdk/go-flags.svg?branch=master)](https://travis-ci.org/jessevdk/go-flags) [![Coverage Status](https://img.shields.io/coveralls/jessevdk/go-flags.svg)](https://coveralls.io/r/jessevdk/go-flags?branch=master)
+
+This library provides similar functionality to the builtin flag library of
+go, but provides much more functionality and nicer formatting. From the
+documentation:
+
+Package flags provides an extensive command line option parser.
+The flags package is similar in functionality to the go builtin flag package
+but provides more options and uses reflection to provide a convenient and
+succinct way of specifying command line options.
+
+Supported features:
+* Options with short names (-v)
+* Options with long names (--verbose)
+* Options with and without arguments (bool v.s. other type)
+* Options with optional arguments and default values
+* Multiple option groups each containing a set of options
+* Generate and print well-formatted help message
+* Passing remaining command line arguments after -- (optional)
+* Ignoring unknown command line options (optional)
+* Supports -I/usr/include -I=/usr/include -I /usr/include option argument specification
+* Supports multiple short options -aux
+* Supports all primitive go types (string, int{8..64}, uint{8..64}, float)
+* Supports same option multiple times (can store in slice or last option counts)
+* Supports maps
+* Supports function callbacks
+* Supports namespaces for (nested) option groups
+
+The flags package uses structs, reflection and struct field tags
+to allow users to specify command line options. This results in very simple
+and concise specification of your application options. For example:
+
+```go
+type Options struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+}
+```
+
+This specifies one option with a short name -v and a long name --verbose.
+When either -v or --verbose is found on the command line, a 'true' value
+will be appended to the Verbose field. e.g. when specifying -vvv, the
+resulting value of Verbose will be {[true, true, true]}.
+
+Example:
+--------
+```go
+var opts struct {
+ // Slice of bool will append 'true' each time the option
+ // is encountered (can be set multiple times, like -vvv)
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+
+ // Example of automatic marshalling to desired type (uint)
+ Offset uint `long:"offset" description:"Offset"`
+
+ // Example of a callback, called each time the option is found.
+ Call func(string) `short:"c" description:"Call phone number"`
+
+ // Example of a required flag
+ Name string `short:"n" long:"name" description:"A name" required:"true"`
+
+ // Example of a flag restricted to a pre-defined set of strings
+ Animal string `long:"animal" choice:"cat" choice:"dog"`
+
+ // Example of a value name
+ File string `short:"f" long:"file" description:"A file" value-name:"FILE"`
+
+ // Example of a pointer
+ Ptr *int `short:"p" description:"A pointer to an integer"`
+
+ // Example of a slice of strings
+ StringSlice []string `short:"s" description:"A slice of strings"`
+
+ // Example of a slice of pointers
+ PtrSlice []*string `long:"ptrslice" description:"A slice of pointers to string"`
+
+ // Example of a map
+ IntMap map[string]int `long:"intmap" description:"A map from string to int"`
+}
+
+// Callback which will invoke callto:<argument> to call a number.
+// Note that this works just on OS X (and probably only with
+// Skype) but it shows the idea.
+opts.Call = func(num string) {
+ cmd := exec.Command("open", "callto:"+num)
+ cmd.Start()
+ cmd.Process.Release()
+}
+
+// Make some fake arguments to parse.
+args := []string{
+ "-vv",
+ "--offset=5",
+ "-n", "Me",
+ "--animal", "dog", // anything other than "cat" or "dog" will raise an error
+ "-p", "3",
+ "-s", "hello",
+ "-s", "world",
+ "--ptrslice", "hello",
+ "--ptrslice", "world",
+ "--intmap", "a:1",
+ "--intmap", "b:5",
+ "arg1",
+ "arg2",
+ "arg3",
+}
+
+// Parse flags from `args'. Note that here we use flags.ParseArgs for
+// the sake of making a working example. Normally, you would simply use
+// flags.Parse(&opts) which uses os.Args
+args, err := flags.ParseArgs(&opts, args)
+
+if err != nil {
+ panic(err)
+}
+
+fmt.Printf("Verbosity: %v\n", opts.Verbose)
+fmt.Printf("Offset: %d\n", opts.Offset)
+fmt.Printf("Name: %s\n", opts.Name)
+fmt.Printf("Animal: %s\n", opts.Animal)
+fmt.Printf("Ptr: %d\n", *opts.Ptr)
+fmt.Printf("StringSlice: %v\n", opts.StringSlice)
+fmt.Printf("PtrSlice: [%v %v]\n", *opts.PtrSlice[0], *opts.PtrSlice[1])
+fmt.Printf("IntMap: [a:%v b:%v]\n", opts.IntMap["a"], opts.IntMap["b"])
+fmt.Printf("Remaining args: %s\n", strings.Join(args, " "))
+
+// Output: Verbosity: [true true]
+// Offset: 5
+// Name: Me
+// Ptr: 3
+// StringSlice: [hello world]
+// PtrSlice: [hello world]
+// IntMap: [a:1 b:5]
+// Remaining args: arg1 arg2 arg3
+```
+
+More information can be found in the godocs: <http://godoc.org/github.com/jessevdk/go-flags>
diff --git a/vendor/github.com/jessevdk/go-flags/arg.go b/vendor/github.com/jessevdk/go-flags/arg.go
new file mode 100644
index 000000000..8ec62048f
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/arg.go
@@ -0,0 +1,27 @@
+package flags
+
+import (
+ "reflect"
+)
+
+// Arg represents a positional argument on the command line.
+type Arg struct {
+ // The name of the positional argument (used in the help)
+ Name string
+
+ // A description of the positional argument (used in the help)
+ Description string
+
+ // The minimal number of required positional arguments
+ Required int
+
+ // The maximum number of required positional arguments
+ RequiredMaximum int
+
+ value reflect.Value
+ tag multiTag
+}
+
+func (a *Arg) isRemaining() bool {
+ return a.value.Type().Kind() == reflect.Slice
+}
diff --git a/vendor/github.com/jessevdk/go-flags/check_crosscompile.sh b/vendor/github.com/jessevdk/go-flags/check_crosscompile.sh
new file mode 100644
index 000000000..5edc430ed
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/check_crosscompile.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -e
+
+echo '# linux arm7'
+GOARM=7 GOARCH=arm GOOS=linux go build
+echo '# linux arm5'
+GOARM=5 GOARCH=arm GOOS=linux go build
+echo '# windows 386'
+GOARCH=386 GOOS=windows go build
+echo '# windows amd64'
+GOARCH=amd64 GOOS=windows go build
+echo '# darwin'
+GOARCH=amd64 GOOS=darwin go build
+echo '# freebsd'
+GOARCH=amd64 GOOS=freebsd go build
+echo '# aix ppc64'
+GOARCH=ppc64 GOOS=aix go build
+echo '# solaris amd64'
+GOARCH=amd64 GOOS=solaris go build
diff --git a/vendor/github.com/jessevdk/go-flags/closest.go b/vendor/github.com/jessevdk/go-flags/closest.go
new file mode 100644
index 000000000..3b518757c
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/closest.go
@@ -0,0 +1,59 @@
+package flags
+
+func levenshtein(s string, t string) int {
+ if len(s) == 0 {
+ return len(t)
+ }
+
+ if len(t) == 0 {
+ return len(s)
+ }
+
+ dists := make([][]int, len(s)+1)
+ for i := range dists {
+ dists[i] = make([]int, len(t)+1)
+ dists[i][0] = i
+ }
+
+ for j := range t {
+ dists[0][j] = j
+ }
+
+ for i, sc := range s {
+ for j, tc := range t {
+ if sc == tc {
+ dists[i+1][j+1] = dists[i][j]
+ } else {
+ dists[i+1][j+1] = dists[i][j] + 1
+ if dists[i+1][j] < dists[i+1][j+1] {
+ dists[i+1][j+1] = dists[i+1][j] + 1
+ }
+ if dists[i][j+1] < dists[i+1][j+1] {
+ dists[i+1][j+1] = dists[i][j+1] + 1
+ }
+ }
+ }
+ }
+
+ return dists[len(s)][len(t)]
+}
+
+func closestChoice(cmd string, choices []string) (string, int) {
+ if len(choices) == 0 {
+ return "", 0
+ }
+
+ mincmd := -1
+ mindist := -1
+
+ for i, c := range choices {
+ l := levenshtein(cmd, c)
+
+ if mincmd < 0 || l < mindist {
+ mindist = l
+ mincmd = i
+ }
+ }
+
+ return choices[mincmd], mindist
+}
diff --git a/vendor/github.com/jessevdk/go-flags/command.go b/vendor/github.com/jessevdk/go-flags/command.go
new file mode 100644
index 000000000..879465d7a
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/command.go
@@ -0,0 +1,465 @@
+package flags
+
+import (
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// Command represents an application command. Commands can be added to the
+// parser (which itself is a command) and are selected/executed when its name
+// is specified on the command line. The Command type embeds a Group and
+// therefore also carries a set of command specific options.
+type Command struct {
+ // Embedded, see Group for more information
+ *Group
+
+ // The name by which the command can be invoked
+ Name string
+
+ // The active sub command (set by parsing) or nil
+ Active *Command
+
+ // Whether subcommands are optional
+ SubcommandsOptional bool
+
+ // Aliases for the command
+ Aliases []string
+
+ // Whether positional arguments are required
+ ArgsRequired bool
+
+ commands []*Command
+ hasBuiltinHelpGroup bool
+ args []*Arg
+}
+
+// Commander is an interface which can be implemented by any command added in
+// the options. When implemented, the Execute method will be called for the last
+// specified (sub)command providing the remaining command line arguments.
+type Commander interface {
+ // Execute will be called for the last active (sub)command. The
+ // args argument contains the remaining command line arguments. The
+ // error that Execute returns will be eventually passed out of the
+ // Parse method of the Parser.
+ Execute(args []string) error
+}
+
+// Usage is an interface which can be implemented to show a custom usage string
+// in the help message shown for a command.
+type Usage interface {
+ // Usage is called for commands to allow customized printing of command
+ // usage in the generated help message.
+ Usage() string
+}
+
+type lookup struct {
+ shortNames map[string]*Option
+ longNames map[string]*Option
+
+ commands map[string]*Command
+}
+
+// AddCommand adds a new command to the parser with the given name and data. The
+// data needs to be a pointer to a struct from which the fields indicate which
+// options are in the command. The provided data can implement the Command and
+// Usage interfaces.
+func (c *Command) AddCommand(command string, shortDescription string, longDescription string, data interface{}) (*Command, error) {
+ cmd := newCommand(command, shortDescription, longDescription, data)
+
+ cmd.parent = c
+
+ if err := cmd.scan(); err != nil {
+ return nil, err
+ }
+
+ c.commands = append(c.commands, cmd)
+ return cmd, nil
+}
+
+// AddGroup adds a new group to the command with the given name and data. The
+// data needs to be a pointer to a struct from which the fields indicate which
+// options are in the group.
+func (c *Command) AddGroup(shortDescription string, longDescription string, data interface{}) (*Group, error) {
+ group := newGroup(shortDescription, longDescription, data)
+
+ group.parent = c
+
+ if err := group.scanType(c.scanSubcommandHandler(group)); err != nil {
+ return nil, err
+ }
+
+ c.groups = append(c.groups, group)
+ return group, nil
+}
+
+// Commands returns a list of subcommands of this command.
+func (c *Command) Commands() []*Command {
+ return c.commands
+}
+
+// Find locates the subcommand with the given name and returns it. If no such
+// command can be found Find will return nil.
+func (c *Command) Find(name string) *Command {
+ for _, cc := range c.commands {
+ if cc.match(name) {
+ return cc
+ }
+ }
+
+ return nil
+}
+
+// FindOptionByLongName finds an option that is part of the command, or any of
+// its parent commands, by matching its long name (including the option
+// namespace).
+func (c *Command) FindOptionByLongName(longName string) (option *Option) {
+ for option == nil && c != nil {
+ option = c.Group.FindOptionByLongName(longName)
+
+ c, _ = c.parent.(*Command)
+ }
+
+ return option
+}
+
+// FindOptionByShortName finds an option that is part of the command, or any of
+// its parent commands, by matching its long name (including the option
+// namespace).
+func (c *Command) FindOptionByShortName(shortName rune) (option *Option) {
+ for option == nil && c != nil {
+ option = c.Group.FindOptionByShortName(shortName)
+
+ c, _ = c.parent.(*Command)
+ }
+
+ return option
+}
+
+// Args returns a list of positional arguments associated with this command.
+func (c *Command) Args() []*Arg {
+ ret := make([]*Arg, len(c.args))
+ copy(ret, c.args)
+
+ return ret
+}
+
+func newCommand(name string, shortDescription string, longDescription string, data interface{}) *Command {
+ return &Command{
+ Group: newGroup(shortDescription, longDescription, data),
+ Name: name,
+ }
+}
+
+func (c *Command) scanSubcommandHandler(parentg *Group) scanHandler {
+ f := func(realval reflect.Value, sfield *reflect.StructField) (bool, error) {
+ mtag := newMultiTag(string(sfield.Tag))
+
+ if err := mtag.Parse(); err != nil {
+ return true, err
+ }
+
+ positional := mtag.Get("positional-args")
+
+ if len(positional) != 0 {
+ stype := realval.Type()
+
+ for i := 0; i < stype.NumField(); i++ {
+ field := stype.Field(i)
+
+ m := newMultiTag((string(field.Tag)))
+
+ if err := m.Parse(); err != nil {
+ return true, err
+ }
+
+ name := m.Get("positional-arg-name")
+
+ if len(name) == 0 {
+ name = field.Name
+ }
+
+ required := -1
+ requiredMaximum := -1
+
+ sreq := m.Get("required")
+
+ if sreq != "" {
+ required = 1
+
+ rng := strings.SplitN(sreq, "-", 2)
+
+ if len(rng) > 1 {
+ if preq, err := strconv.ParseInt(rng[0], 10, 32); err == nil {
+ required = int(preq)
+ }
+
+ if preq, err := strconv.ParseInt(rng[1], 10, 32); err == nil {
+ requiredMaximum = int(preq)
+ }
+ } else {
+ if preq, err := strconv.ParseInt(sreq, 10, 32); err == nil {
+ required = int(preq)
+ }
+ }
+ }
+
+ arg := &Arg{
+ Name: name,
+ Description: m.Get("description"),
+ Required: required,
+ RequiredMaximum: requiredMaximum,
+
+ value: realval.Field(i),
+ tag: m,
+ }
+
+ c.args = append(c.args, arg)
+
+ if len(mtag.Get("required")) != 0 {
+ c.ArgsRequired = true
+ }
+ }
+
+ return true, nil
+ }
+
+ subcommand := mtag.Get("command")
+
+ if len(subcommand) != 0 {
+ var ptrval reflect.Value
+
+ if realval.Kind() == reflect.Ptr {
+ ptrval = realval
+
+ if ptrval.IsNil() {
+ ptrval.Set(reflect.New(ptrval.Type().Elem()))
+ }
+ } else {
+ ptrval = realval.Addr()
+ }
+
+ shortDescription := mtag.Get("description")
+ longDescription := mtag.Get("long-description")
+ subcommandsOptional := mtag.Get("subcommands-optional")
+ aliases := mtag.GetMany("alias")
+
+ subc, err := c.AddCommand(subcommand, shortDescription, longDescription, ptrval.Interface())
+
+ if err != nil {
+ return true, err
+ }
+
+ subc.Hidden = mtag.Get("hidden") != ""
+
+ if len(subcommandsOptional) > 0 {
+ subc.SubcommandsOptional = true
+ }
+
+ if len(aliases) > 0 {
+ subc.Aliases = aliases
+ }
+
+ return true, nil
+ }
+
+ return parentg.scanSubGroupHandler(realval, sfield)
+ }
+
+ return f
+}
+
+func (c *Command) scan() error {
+ return c.scanType(c.scanSubcommandHandler(c.Group))
+}
+
+func (c *Command) eachOption(f func(*Command, *Group, *Option)) {
+ c.eachCommand(func(c *Command) {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ f(c, g, option)
+ }
+ })
+ }, true)
+}
+
+func (c *Command) eachCommand(f func(*Command), recurse bool) {
+ f(c)
+
+ for _, cc := range c.commands {
+ if recurse {
+ cc.eachCommand(f, true)
+ } else {
+ f(cc)
+ }
+ }
+}
+
+func (c *Command) eachActiveGroup(f func(cc *Command, g *Group)) {
+ c.eachGroup(func(g *Group) {
+ f(c, g)
+ })
+
+ if c.Active != nil {
+ c.Active.eachActiveGroup(f)
+ }
+}
+
+func (c *Command) addHelpGroups(showHelp func() error) {
+ if !c.hasBuiltinHelpGroup {
+ c.addHelpGroup(showHelp)
+ c.hasBuiltinHelpGroup = true
+ }
+
+ for _, cc := range c.commands {
+ cc.addHelpGroups(showHelp)
+ }
+}
+
+func (c *Command) makeLookup() lookup {
+ ret := lookup{
+ shortNames: make(map[string]*Option),
+ longNames: make(map[string]*Option),
+ commands: make(map[string]*Command),
+ }
+
+ parent := c.parent
+
+ var parents []*Command
+
+ for parent != nil {
+ if cmd, ok := parent.(*Command); ok {
+ parents = append(parents, cmd)
+ parent = cmd.parent
+ } else {
+ parent = nil
+ }
+ }
+
+ for i := len(parents) - 1; i >= 0; i-- {
+ parents[i].fillLookup(&ret, true)
+ }
+
+ c.fillLookup(&ret, false)
+ return ret
+}
+
+func (c *Command) fillLookup(ret *lookup, onlyOptions bool) {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ if option.ShortName != 0 {
+ ret.shortNames[string(option.ShortName)] = option
+ }
+
+ if len(option.LongName) > 0 {
+ ret.longNames[option.LongNameWithNamespace()] = option
+ }
+ }
+ })
+
+ if onlyOptions {
+ return
+ }
+
+ for _, subcommand := range c.commands {
+ ret.commands[subcommand.Name] = subcommand
+
+ for _, a := range subcommand.Aliases {
+ ret.commands[a] = subcommand
+ }
+ }
+}
+
+func (c *Command) groupByName(name string) *Group {
+ if grp := c.Group.groupByName(name); grp != nil {
+ return grp
+ }
+
+ for _, subc := range c.commands {
+ prefix := subc.Name + "."
+
+ if strings.HasPrefix(name, prefix) {
+ if grp := subc.groupByName(name[len(prefix):]); grp != nil {
+ return grp
+ }
+ } else if name == subc.Name {
+ return subc.Group
+ }
+ }
+
+ return nil
+}
+
+type commandList []*Command
+
+func (c commandList) Less(i, j int) bool {
+ return c[i].Name < c[j].Name
+}
+
+func (c commandList) Len() int {
+ return len(c)
+}
+
+func (c commandList) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
+
+func (c *Command) sortedVisibleCommands() []*Command {
+ ret := commandList(c.visibleCommands())
+ sort.Sort(ret)
+
+ return []*Command(ret)
+}
+
+func (c *Command) visibleCommands() []*Command {
+ ret := make([]*Command, 0, len(c.commands))
+
+ for _, cmd := range c.commands {
+ if !cmd.Hidden {
+ ret = append(ret, cmd)
+ }
+ }
+
+ return ret
+}
+
+func (c *Command) match(name string) bool {
+ if c.Name == name {
+ return true
+ }
+
+ for _, v := range c.Aliases {
+ if v == name {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (c *Command) hasHelpOptions() bool {
+ ret := false
+
+ c.eachGroup(func(g *Group) {
+ if g.isBuiltinHelp {
+ return
+ }
+
+ for _, opt := range g.options {
+ if opt.showInHelp() {
+ ret = true
+ }
+ }
+ })
+
+ return ret
+}
+
+func (c *Command) fillParseState(s *parseState) {
+ s.positional = make([]*Arg, len(c.args))
+ copy(s.positional, c.args)
+
+ s.lookup = c.makeLookup()
+ s.command = c
+}
diff --git a/vendor/github.com/jessevdk/go-flags/completion.go b/vendor/github.com/jessevdk/go-flags/completion.go
new file mode 100644
index 000000000..8ed61f1db
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/completion.go
@@ -0,0 +1,315 @@
+package flags
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode/utf8"
+)
+
+// Completion is a type containing information of a completion.
+type Completion struct {
+ // The completed item
+ Item string
+
+ // A description of the completed item (optional)
+ Description string
+}
+
+type completions []Completion
+
+func (c completions) Len() int {
+ return len(c)
+}
+
+func (c completions) Less(i, j int) bool {
+ return c[i].Item < c[j].Item
+}
+
+func (c completions) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
+
+// Completer is an interface which can be implemented by types
+// to provide custom command line argument completion.
+type Completer interface {
+ // Complete receives a prefix representing a (partial) value
+ // for its type and should provide a list of possible valid
+ // completions.
+ Complete(match string) []Completion
+}
+
+type completion struct {
+ parser *Parser
+}
+
+// Filename is a string alias which provides filename completion.
+type Filename string
+
+func completionsWithoutDescriptions(items []string) []Completion {
+ ret := make([]Completion, len(items))
+
+ for i, v := range items {
+ ret[i].Item = v
+ }
+
+ return ret
+}
+
+// Complete returns a list of existing files with the given
+// prefix.
+func (f *Filename) Complete(match string) []Completion {
+ ret, _ := filepath.Glob(match + "*")
+ if len(ret) == 1 {
+ if info, err := os.Stat(ret[0]); err == nil && info.IsDir() {
+ ret[0] = ret[0] + "/"
+ }
+ }
+ return completionsWithoutDescriptions(ret)
+}
+
+func (c *completion) skipPositional(s *parseState, n int) {
+ if n >= len(s.positional) {
+ s.positional = nil
+ } else {
+ s.positional = s.positional[n:]
+ }
+}
+
+func (c *completion) completeOptionNames(s *parseState, prefix string, match string, short bool) []Completion {
+ if short && len(match) != 0 {
+ return []Completion{
+ {
+ Item: prefix + match,
+ },
+ }
+ }
+
+ var results []Completion
+ repeats := map[string]bool{}
+
+ for name, opt := range s.lookup.longNames {
+ if strings.HasPrefix(name, match) && !opt.Hidden {
+ results = append(results, Completion{
+ Item: defaultLongOptDelimiter + name,
+ Description: opt.Description,
+ })
+
+ if short {
+ repeats[string(opt.ShortName)] = true
+ }
+ }
+ }
+
+ if short {
+ for name, opt := range s.lookup.shortNames {
+ if _, exist := repeats[name]; !exist && strings.HasPrefix(name, match) && !opt.Hidden {
+ results = append(results, Completion{
+ Item: string(defaultShortOptDelimiter) + name,
+ Description: opt.Description,
+ })
+ }
+ }
+ }
+
+ return results
+}
+
+func (c *completion) completeNamesForLongPrefix(s *parseState, prefix string, match string) []Completion {
+ return c.completeOptionNames(s, prefix, match, false)
+}
+
+func (c *completion) completeNamesForShortPrefix(s *parseState, prefix string, match string) []Completion {
+ return c.completeOptionNames(s, prefix, match, true)
+}
+
+func (c *completion) completeCommands(s *parseState, match string) []Completion {
+ n := make([]Completion, 0, len(s.command.commands))
+
+ for _, cmd := range s.command.commands {
+ if cmd.data != c && !cmd.Hidden && strings.HasPrefix(cmd.Name, match) {
+ n = append(n, Completion{
+ Item: cmd.Name,
+ Description: cmd.ShortDescription,
+ })
+ }
+ }
+
+ return n
+}
+
+func (c *completion) completeValue(value reflect.Value, prefix string, match string) []Completion {
+ if value.Kind() == reflect.Slice {
+ value = reflect.New(value.Type().Elem())
+ }
+ i := value.Interface()
+
+ var ret []Completion
+
+ if cmp, ok := i.(Completer); ok {
+ ret = cmp.Complete(match)
+ } else if value.CanAddr() {
+ if cmp, ok = value.Addr().Interface().(Completer); ok {
+ ret = cmp.Complete(match)
+ }
+ }
+
+ for i, v := range ret {
+ ret[i].Item = prefix + v.Item
+ }
+
+ return ret
+}
+
+func (c *completion) complete(args []string) []Completion {
+ if len(args) == 0 {
+ args = []string{""}
+ }
+
+ s := &parseState{
+ args: args,
+ }
+
+ c.parser.fillParseState(s)
+
+ var opt *Option
+
+ for len(s.args) > 1 {
+ arg := s.pop()
+
+ if (c.parser.Options&PassDoubleDash) != None && arg == "--" {
+ opt = nil
+ c.skipPositional(s, len(s.args)-1)
+
+ break
+ }
+
+ if argumentIsOption(arg) {
+ prefix, optname, islong := stripOptionPrefix(arg)
+ optname, _, argument := splitOption(prefix, optname, islong)
+
+ if argument == nil {
+ var o *Option
+ canarg := true
+
+ if islong {
+ o = s.lookup.longNames[optname]
+ } else {
+ for i, r := range optname {
+ sname := string(r)
+ o = s.lookup.shortNames[sname]
+
+ if o == nil {
+ break
+ }
+
+ if i == 0 && o.canArgument() && len(optname) != len(sname) {
+ canarg = false
+ break
+ }
+ }
+ }
+
+ if o == nil && (c.parser.Options&PassAfterNonOption) != None {
+ opt = nil
+ c.skipPositional(s, len(s.args)-1)
+
+ break
+ } else if o != nil && o.canArgument() && !o.OptionalArgument && canarg {
+ if len(s.args) > 1 {
+ s.pop()
+ } else {
+ opt = o
+ }
+ }
+ }
+ } else {
+ if len(s.positional) > 0 {
+ if !s.positional[0].isRemaining() {
+ // Don't advance beyond a remaining positional arg (because
+ // it consumes all subsequent args).
+ s.positional = s.positional[1:]
+ }
+ } else if cmd, ok := s.lookup.commands[arg]; ok {
+ cmd.fillParseState(s)
+ }
+
+ opt = nil
+ }
+ }
+
+ lastarg := s.args[len(s.args)-1]
+ var ret []Completion
+
+ if opt != nil {
+ // Completion for the argument of 'opt'
+ ret = c.completeValue(opt.value, "", lastarg)
+ } else if argumentStartsOption(lastarg) {
+ // Complete the option
+ prefix, optname, islong := stripOptionPrefix(lastarg)
+ optname, split, argument := splitOption(prefix, optname, islong)
+
+ if argument == nil && !islong {
+ rname, n := utf8.DecodeRuneInString(optname)
+ sname := string(rname)
+
+ if opt := s.lookup.shortNames[sname]; opt != nil && opt.canArgument() {
+ ret = c.completeValue(opt.value, prefix+sname, optname[n:])
+ } else {
+ ret = c.completeNamesForShortPrefix(s, prefix, optname)
+ }
+ } else if argument != nil {
+ if islong {
+ opt = s.lookup.longNames[optname]
+ } else {
+ opt = s.lookup.shortNames[optname]
+ }
+
+ if opt != nil {
+ ret = c.completeValue(opt.value, prefix+optname+split, *argument)
+ }
+ } else if islong {
+ ret = c.completeNamesForLongPrefix(s, prefix, optname)
+ } else {
+ ret = c.completeNamesForShortPrefix(s, prefix, optname)
+ }
+ } else if len(s.positional) > 0 {
+ // Complete for positional argument
+ ret = c.completeValue(s.positional[0].value, "", lastarg)
+ } else if len(s.command.commands) > 0 {
+ // Complete for command
+ ret = c.completeCommands(s, lastarg)
+ }
+
+ sort.Sort(completions(ret))
+ return ret
+}
+
+func (c *completion) print(items []Completion, showDescriptions bool) {
+ if showDescriptions && len(items) > 1 {
+ maxl := 0
+
+ for _, v := range items {
+ if len(v.Item) > maxl {
+ maxl = len(v.Item)
+ }
+ }
+
+ for _, v := range items {
+ fmt.Printf("%s", v.Item)
+
+ if len(v.Description) > 0 {
+ fmt.Printf("%s # %s", strings.Repeat(" ", maxl-len(v.Item)), v.Description)
+ }
+
+ fmt.Printf("\n")
+ }
+ } else {
+ for _, v := range items {
+ fmt.Println(v.Item)
+ }
+ }
+}
diff --git a/vendor/github.com/jessevdk/go-flags/convert.go b/vendor/github.com/jessevdk/go-flags/convert.go
new file mode 100644
index 000000000..cda29b2f0
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/convert.go
@@ -0,0 +1,357 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Marshaler is the interface implemented by types that can marshal themselves
+// to a string representation of the flag.
+type Marshaler interface {
+ // MarshalFlag marshals a flag value to its string representation.
+ MarshalFlag() (string, error)
+}
+
+// Unmarshaler is the interface implemented by types that can unmarshal a flag
+// argument to themselves. The provided value is directly passed from the
+// command line.
+type Unmarshaler interface {
+ // UnmarshalFlag unmarshals a string value representation to the flag
+ // value (which therefore needs to be a pointer receiver).
+ UnmarshalFlag(value string) error
+}
+
+// ValueValidator is the interface implemented by types that can validate a
+// flag argument themselves. The provided value is directly passed from the
+// command line.
+type ValueValidator interface {
+ // IsValidValue returns an error if the provided string value is valid for
+ // the flag.
+ IsValidValue(value string) error
+}
+
+func getBase(options multiTag, base int) (int, error) {
+ sbase := options.Get("base")
+
+ var err error
+ var ivbase int64
+
+ if sbase != "" {
+ ivbase, err = strconv.ParseInt(sbase, 10, 32)
+ base = int(ivbase)
+ }
+
+ return base, err
+}
+
+func convertMarshal(val reflect.Value) (bool, string, error) {
+ // Check first for the Marshaler interface
+ if val.Type().NumMethod() > 0 && val.CanInterface() {
+ if marshaler, ok := val.Interface().(Marshaler); ok {
+ ret, err := marshaler.MarshalFlag()
+ return true, ret, err
+ }
+ }
+
+ return false, "", nil
+}
+
+func convertToString(val reflect.Value, options multiTag) (string, error) {
+ if ok, ret, err := convertMarshal(val); ok {
+ return ret, err
+ }
+
+ tp := val.Type()
+
+ // Support for time.Duration
+ if tp == reflect.TypeOf((*time.Duration)(nil)).Elem() {
+ stringer := val.Interface().(fmt.Stringer)
+ return stringer.String(), nil
+ }
+
+ switch tp.Kind() {
+ case reflect.String:
+ return val.String(), nil
+ case reflect.Bool:
+ if val.Bool() {
+ return "true", nil
+ }
+
+ return "false", nil
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return "", err
+ }
+
+ return strconv.FormatInt(val.Int(), base), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return "", err
+ }
+
+ return strconv.FormatUint(val.Uint(), base), nil
+ case reflect.Float32, reflect.Float64:
+ return strconv.FormatFloat(val.Float(), 'g', -1, tp.Bits()), nil
+ case reflect.Slice:
+ if val.Len() == 0 {
+ return "", nil
+ }
+
+ ret := "["
+
+ for i := 0; i < val.Len(); i++ {
+ if i != 0 {
+ ret += ", "
+ }
+
+ item, err := convertToString(val.Index(i), options)
+
+ if err != nil {
+ return "", err
+ }
+
+ ret += item
+ }
+
+ return ret + "]", nil
+ case reflect.Map:
+ ret := "{"
+
+ for i, key := range val.MapKeys() {
+ if i != 0 {
+ ret += ", "
+ }
+
+ keyitem, err := convertToString(key, options)
+
+ if err != nil {
+ return "", err
+ }
+
+ item, err := convertToString(val.MapIndex(key), options)
+
+ if err != nil {
+ return "", err
+ }
+
+ ret += keyitem + ":" + item
+ }
+
+ return ret + "}", nil
+ case reflect.Ptr:
+ return convertToString(reflect.Indirect(val), options)
+ case reflect.Interface:
+ if !val.IsNil() {
+ return convertToString(val.Elem(), options)
+ }
+ }
+
+ return "", nil
+}
+
+func convertUnmarshal(val string, retval reflect.Value) (bool, error) {
+ if retval.Type().NumMethod() > 0 && retval.CanInterface() {
+ if unmarshaler, ok := retval.Interface().(Unmarshaler); ok {
+ if retval.IsNil() {
+ retval.Set(reflect.New(retval.Type().Elem()))
+
+ // Re-assign from the new value
+ unmarshaler = retval.Interface().(Unmarshaler)
+ }
+
+ return true, unmarshaler.UnmarshalFlag(val)
+ }
+ }
+
+ if retval.Type().Kind() != reflect.Ptr && retval.CanAddr() {
+ return convertUnmarshal(val, retval.Addr())
+ }
+
+ if retval.Type().Kind() == reflect.Interface && !retval.IsNil() {
+ return convertUnmarshal(val, retval.Elem())
+ }
+
+ return false, nil
+}
+
+func convert(val string, retval reflect.Value, options multiTag) error {
+ if ok, err := convertUnmarshal(val, retval); ok {
+ return err
+ }
+
+ tp := retval.Type()
+
+ // Support for time.Duration
+ if tp == reflect.TypeOf((*time.Duration)(nil)).Elem() {
+ parsed, err := time.ParseDuration(val)
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetInt(int64(parsed))
+ return nil
+ }
+
+ switch tp.Kind() {
+ case reflect.String:
+ retval.SetString(val)
+ case reflect.Bool:
+ if val == "" {
+ retval.SetBool(true)
+ } else {
+ b, err := strconv.ParseBool(val)
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetBool(b)
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return err
+ }
+
+ parsed, err := strconv.ParseInt(val, base, tp.Bits())
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetInt(parsed)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+ base, err := getBase(options, 10)
+
+ if err != nil {
+ return err
+ }
+
+ parsed, err := strconv.ParseUint(val, base, tp.Bits())
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetUint(parsed)
+ case reflect.Float32, reflect.Float64:
+ parsed, err := strconv.ParseFloat(val, tp.Bits())
+
+ if err != nil {
+ return err
+ }
+
+ retval.SetFloat(parsed)
+ case reflect.Slice:
+ elemtp := tp.Elem()
+
+ elemvalptr := reflect.New(elemtp)
+ elemval := reflect.Indirect(elemvalptr)
+
+ if err := convert(val, elemval, options); err != nil {
+ return err
+ }
+
+ retval.Set(reflect.Append(retval, elemval))
+ case reflect.Map:
+ parts := strings.SplitN(val, ":", 2)
+
+ key := parts[0]
+ var value string
+
+ if len(parts) == 2 {
+ value = parts[1]
+ }
+
+ keytp := tp.Key()
+ keyval := reflect.New(keytp)
+
+ if err := convert(key, keyval, options); err != nil {
+ return err
+ }
+
+ valuetp := tp.Elem()
+ valueval := reflect.New(valuetp)
+
+ if err := convert(value, valueval, options); err != nil {
+ return err
+ }
+
+ if retval.IsNil() {
+ retval.Set(reflect.MakeMap(tp))
+ }
+
+ retval.SetMapIndex(reflect.Indirect(keyval), reflect.Indirect(valueval))
+ case reflect.Ptr:
+ if retval.IsNil() {
+ retval.Set(reflect.New(retval.Type().Elem()))
+ }
+
+ return convert(val, reflect.Indirect(retval), options)
+ case reflect.Interface:
+ if !retval.IsNil() {
+ return convert(val, retval.Elem(), options)
+ }
+ }
+
+ return nil
+}
+
+func isPrint(s string) bool {
+ for _, c := range s {
+ if !strconv.IsPrint(c) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func quoteIfNeeded(s string) string {
+ if !isPrint(s) {
+ return strconv.Quote(s)
+ }
+
+ return s
+}
+
+func quoteIfNeededV(s []string) []string {
+ ret := make([]string, len(s))
+
+ for i, v := range s {
+ ret[i] = quoteIfNeeded(v)
+ }
+
+ return ret
+}
+
+func quoteV(s []string) []string {
+ ret := make([]string, len(s))
+
+ for i, v := range s {
+ ret[i] = strconv.Quote(v)
+ }
+
+ return ret
+}
+
+func unquoteIfPossible(s string) (string, error) {
+ if len(s) == 0 || s[0] != '"' {
+ return s, nil
+ }
+
+ return strconv.Unquote(s)
+}
diff --git a/vendor/github.com/jessevdk/go-flags/error.go b/vendor/github.com/jessevdk/go-flags/error.go
new file mode 100644
index 000000000..73e07cfc2
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/error.go
@@ -0,0 +1,138 @@
+package flags
+
+import (
+ "fmt"
+)
+
+// ErrorType represents the type of error.
+type ErrorType uint
+
+const (
+ // ErrUnknown indicates a generic error.
+ ErrUnknown ErrorType = iota
+
+ // ErrExpectedArgument indicates that an argument was expected.
+ ErrExpectedArgument
+
+ // ErrUnknownFlag indicates an unknown flag.
+ ErrUnknownFlag
+
+ // ErrUnknownGroup indicates an unknown group.
+ ErrUnknownGroup
+
+ // ErrMarshal indicates a marshalling error while converting values.
+ ErrMarshal
+
+ // ErrHelp indicates that the built-in help was shown (the error
+ // contains the help message).
+ ErrHelp
+
+ // ErrNoArgumentForBool indicates that an argument was given for a
+ // boolean flag (which don't not take any arguments).
+ ErrNoArgumentForBool
+
+ // ErrRequired indicates that a required flag was not provided.
+ ErrRequired
+
+ // ErrShortNameTooLong indicates that a short flag name was specified,
+ // longer than one character.
+ ErrShortNameTooLong
+
+ // ErrDuplicatedFlag indicates that a short or long flag has been
+ // defined more than once
+ ErrDuplicatedFlag
+
+ // ErrTag indicates an error while parsing flag tags.
+ ErrTag
+
+ // ErrCommandRequired indicates that a command was required but not
+ // specified
+ ErrCommandRequired
+
+ // ErrUnknownCommand indicates that an unknown command was specified.
+ ErrUnknownCommand
+
+ // ErrInvalidChoice indicates an invalid option value which only allows
+ // a certain number of choices.
+ ErrInvalidChoice
+
+ // ErrInvalidTag indicates an invalid tag or invalid use of an existing tag
+ ErrInvalidTag
+)
+
+func (e ErrorType) String() string {
+ switch e {
+ case ErrUnknown:
+ return "unknown"
+ case ErrExpectedArgument:
+ return "expected argument"
+ case ErrUnknownFlag:
+ return "unknown flag"
+ case ErrUnknownGroup:
+ return "unknown group"
+ case ErrMarshal:
+ return "marshal"
+ case ErrHelp:
+ return "help"
+ case ErrNoArgumentForBool:
+ return "no argument for bool"
+ case ErrRequired:
+ return "required"
+ case ErrShortNameTooLong:
+ return "short name too long"
+ case ErrDuplicatedFlag:
+ return "duplicated flag"
+ case ErrTag:
+ return "tag"
+ case ErrCommandRequired:
+ return "command required"
+ case ErrUnknownCommand:
+ return "unknown command"
+ case ErrInvalidChoice:
+ return "invalid choice"
+ case ErrInvalidTag:
+ return "invalid tag"
+ }
+
+ return "unrecognized error type"
+}
+
+func (e ErrorType) Error() string {
+ return e.String()
+}
+
+// Error represents a parser error. The error returned from Parse is of this
+// type. The error contains both a Type and Message.
+type Error struct {
+ // The type of error
+ Type ErrorType
+
+ // The error message
+ Message string
+}
+
+// Error returns the error's message
+func (e *Error) Error() string {
+ return e.Message
+}
+
+func newError(tp ErrorType, message string) *Error {
+ return &Error{
+ Type: tp,
+ Message: message,
+ }
+}
+
+func newErrorf(tp ErrorType, format string, args ...interface{}) *Error {
+ return newError(tp, fmt.Sprintf(format, args...))
+}
+
+func wrapError(err error) *Error {
+ ret, ok := err.(*Error)
+
+ if !ok {
+ return newError(ErrUnknown, err.Error())
+ }
+
+ return ret
+}
diff --git a/vendor/github.com/jessevdk/go-flags/flags.go b/vendor/github.com/jessevdk/go-flags/flags.go
new file mode 100644
index 000000000..ac2157dd6
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/flags.go
@@ -0,0 +1,263 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+/*
+Package flags provides an extensive command line option parser.
+The flags package is similar in functionality to the go built-in flag package
+but provides more options and uses reflection to provide a convenient and
+succinct way of specifying command line options.
+
+
+Supported features
+
+The following features are supported in go-flags:
+
+ Options with short names (-v)
+ Options with long names (--verbose)
+ Options with and without arguments (bool v.s. other type)
+ Options with optional arguments and default values
+ Option default values from ENVIRONMENT_VARIABLES, including slice and map values
+ Multiple option groups each containing a set of options
+ Generate and print well-formatted help message
+ Passing remaining command line arguments after -- (optional)
+ Ignoring unknown command line options (optional)
+ Supports -I/usr/include -I=/usr/include -I /usr/include option argument specification
+ Supports multiple short options -aux
+ Supports all primitive go types (string, int{8..64}, uint{8..64}, float)
+ Supports same option multiple times (can store in slice or last option counts)
+ Supports maps
+ Supports function callbacks
+ Supports namespaces for (nested) option groups
+
+Additional features specific to Windows:
+ Options with short names (/v)
+ Options with long names (/verbose)
+ Windows-style options with arguments use a colon as the delimiter
+ Modify generated help message with Windows-style / options
+ Windows style options can be disabled at build time using the "forceposix"
+ build tag
+
+
+Basic usage
+
+The flags package uses structs, reflection and struct field tags
+to allow users to specify command line options. This results in very simple
+and concise specification of your application options. For example:
+
+ type Options struct {
+ Verbose []bool `short:"v" long:"verbose" description:"Show verbose debug information"`
+ }
+
+This specifies one option with a short name -v and a long name --verbose.
+When either -v or --verbose is found on the command line, a 'true' value
+will be appended to the Verbose field. e.g. when specifying -vvv, the
+resulting value of Verbose will be {[true, true, true]}.
+
+Slice options work exactly the same as primitive type options, except that
+whenever the option is encountered, a value is appended to the slice.
+
+Map options from string to primitive type are also supported. On the command
+line, you specify the value for such an option as key:value. For example
+
+ type Options struct {
+ AuthorInfo string[string] `short:"a"`
+ }
+
+Then, the AuthorInfo map can be filled with something like
+-a name:Jesse -a "surname:van den Kieboom".
+
+Finally, for full control over the conversion between command line argument
+values and options, user defined types can choose to implement the Marshaler
+and Unmarshaler interfaces.
+
+
+Available field tags
+
+The following is a list of tags for struct fields supported by go-flags:
+
+ short: the short name of the option (single character)
+ long: the long name of the option
+ required: if non empty, makes the option required to appear on the command
+ line. If a required option is not present, the parser will
+ return ErrRequired (optional)
+ description: the description of the option (optional)
+ long-description: the long description of the option. Currently only
+ displayed in generated man pages (optional)
+ no-flag: if non-empty, this field is ignored as an option (optional)
+
+ optional: if non-empty, makes the argument of the option optional. When an
+ argument is optional it can only be specified using
+ --option=argument (optional)
+ optional-value: the value of an optional option when the option occurs
+ without an argument. This tag can be specified multiple
+ times in the case of maps or slices (optional)
+ default: the default value of an option. This tag can be specified
+ multiple times in the case of slices or maps (optional)
+ default-mask: when specified, this value will be displayed in the help
+ instead of the actual default value. This is useful
+ mostly for hiding otherwise sensitive information from
+ showing up in the help. If default-mask takes the special
+ value "-", then no default value will be shown at all
+ (optional)
+ env: the default value of the option is overridden from the
+ specified environment variable, if one has been defined.
+ (optional)
+ env-delim: the 'env' default value from environment is split into
+ multiple values with the given delimiter string, use with
+ slices and maps (optional)
+ value-name: the name of the argument value (to be shown in the help)
+ (optional)
+ choice: limits the values for an option to a set of values.
+ Repeat this tag once for each allowable value.
+ e.g. `long:"animal" choice:"cat" choice:"dog"`
+ hidden: if non-empty, the option is not visible in the help or man page.
+
+ base: a base (radix) used to convert strings to integer values, the
+ default base is 10 (i.e. decimal) (optional)
+
+ ini-name: the explicit ini option name (optional)
+ no-ini: if non-empty this field is ignored as an ini option
+ (optional)
+
+ group: when specified on a struct field, makes the struct
+ field a separate group with the given name (optional)
+ namespace: when specified on a group struct field, the namespace
+ gets prepended to every option's long name and
+ subgroup's namespace of this group, separated by
+ the parser's namespace delimiter (optional)
+ env-namespace: when specified on a group struct field, the env-namespace
+ gets prepended to every option's env key and
+ subgroup's env-namespace of this group, separated by
+ the parser's env-namespace delimiter (optional)
+ command: when specified on a struct field, makes the struct
+ field a (sub)command with the given name (optional)
+ subcommands-optional: when specified on a command struct field, makes
+ any subcommands of that command optional (optional)
+ alias: when specified on a command struct field, adds the
+ specified name as an alias for the command. Can be
+ be specified multiple times to add more than one
+ alias (optional)
+ positional-args: when specified on a field with a struct type,
+ uses the fields of that struct to parse remaining
+ positional command line arguments into (in order
+ of the fields). If a field has a slice type,
+ then all remaining arguments will be added to it.
+ Positional arguments are optional by default,
+ unless the "required" tag is specified together
+ with the "positional-args" tag. The "required" tag
+ can also be set on the individual rest argument
+ fields, to require only the first N positional
+ arguments. If the "required" tag is set on the
+ rest arguments slice, then its value determines
+ the minimum amount of rest arguments that needs to
+ be provided (e.g. `required:"2"`) (optional)
+ positional-arg-name: used on a field in a positional argument struct; name
+ of the positional argument placeholder to be shown in
+ the help (optional)
+
+Either the `short:` tag or the `long:` must be specified to make the field eligible as an
+option.
+
+
+Option groups
+
+Option groups are a simple way to semantically separate your options. All
+options in a particular group are shown together in the help under the name
+of the group. Namespaces can be used to specify option long names more
+precisely and emphasize the options affiliation to their group.
+
+There are currently three ways to specify option groups.
+
+ 1. Use NewNamedParser specifying the various option groups.
+ 2. Use AddGroup to add a group to an existing parser.
+ 3. Add a struct field to the top-level options annotated with the
+ group:"group-name" tag.
+
+
+
+Commands
+
+The flags package also has basic support for commands. Commands are often
+used in monolithic applications that support various commands or actions.
+Take git for example, all of the add, commit, checkout, etc. are called
+commands. Using commands you can easily separate multiple functions of your
+application.
+
+There are currently two ways to specify a command.
+
+ 1. Use AddCommand on an existing parser.
+ 2. Add a struct field to your options struct annotated with the
+ command:"command-name" tag.
+
+The most common, idiomatic way to implement commands is to define a global
+parser instance and implement each command in a separate file. These
+command files should define a go init function which calls AddCommand on
+the global parser.
+
+When parsing ends and there is an active command and that command implements
+the Commander interface, then its Execute method will be run with the
+remaining command line arguments.
+
+Command structs can have options which become valid to parse after the
+command has been specified on the command line, in addition to the options
+of all the parent commands. I.e. considering a -v flag on the parser and an
+add command, the following are equivalent:
+
+ ./app -v add
+ ./app add -v
+
+However, if the -v flag is defined on the add command, then the first of
+the two examples above would fail since the -v flag is not defined before
+the add command.
+
+
+Completion
+
+go-flags has builtin support to provide bash completion of flags, commands
+and argument values. To use completion, the binary which uses go-flags
+can be invoked in a special environment to list completion of the current
+command line argument. It should be noted that this `executes` your application,
+and it is up to the user to make sure there are no negative side effects (for
+example from init functions).
+
+Setting the environment variable `GO_FLAGS_COMPLETION=1` enables completion
+by replacing the argument parsing routine with the completion routine which
+outputs completions for the passed arguments. The basic invocation to
+complete a set of arguments is therefore:
+
+ GO_FLAGS_COMPLETION=1 ./completion-example arg1 arg2 arg3
+
+where `completion-example` is the binary, `arg1` and `arg2` are
+the current arguments, and `arg3` (the last argument) is the argument
+to be completed. If the GO_FLAGS_COMPLETION is set to "verbose", then
+descriptions of possible completion items will also be shown, if there
+are more than 1 completion items.
+
+To use this with bash completion, a simple file can be written which
+calls the binary which supports go-flags completion:
+
+ _completion_example() {
+ # All arguments except the first one
+ args=("${COMP_WORDS[@]:1:$COMP_CWORD}")
+
+ # Only split on newlines
+ local IFS=$'\n'
+
+ # Call completion (note that the first element of COMP_WORDS is
+ # the executable itself)
+ COMPREPLY=($(GO_FLAGS_COMPLETION=1 ${COMP_WORDS[0]} "${args[@]}"))
+ return 0
+ }
+
+ complete -F _completion_example completion-example
+
+Completion requires the parser option PassDoubleDash and is therefore enforced if the environment variable GO_FLAGS_COMPLETION is set.
+
+Customized completion for argument values is supported by implementing
+the flags.Completer interface for the argument value type. An example
+of a type which does so is the flags.Filename type, an alias of string
+allowing simple filename completion. A slice or array argument value
+whose element type implements flags.Completer will also be completed.
+*/
+package flags
diff --git a/vendor/github.com/jessevdk/go-flags/group.go b/vendor/github.com/jessevdk/go-flags/group.go
new file mode 100644
index 000000000..181caabb2
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/group.go
@@ -0,0 +1,429 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "errors"
+ "reflect"
+ "strings"
+ "unicode/utf8"
+)
+
+// ErrNotPointerToStruct indicates that a provided data container is not
+// a pointer to a struct. Only pointers to structs are valid data containers
+// for options.
+var ErrNotPointerToStruct = errors.New("provided data is not a pointer to struct")
+
+// Group represents an option group. Option groups can be used to logically
+// group options together under a description. Groups are only used to provide
+// more structure to options both for the user (as displayed in the help message)
+// and for you, since groups can be nested.
+type Group struct {
+ // A short description of the group. The
+ // short description is primarily used in the built-in generated help
+ // message
+ ShortDescription string
+
+ // A long description of the group. The long
+ // description is primarily used to present information on commands
+ // (Command embeds Group) in the built-in generated help and man pages.
+ LongDescription string
+
+ // The namespace of the group
+ Namespace string
+
+ // The environment namespace of the group
+ EnvNamespace string
+
+ // If true, the group is not displayed in the help or man page
+ Hidden bool
+
+ // The parent of the group or nil if it has no parent
+ parent interface{}
+
+ // All the options in the group
+ options []*Option
+
+ // All the subgroups
+ groups []*Group
+
+ // Whether the group represents the built-in help group
+ isBuiltinHelp bool
+
+ data interface{}
+}
+
+type scanHandler func(reflect.Value, *reflect.StructField) (bool, error)
+
+// AddGroup adds a new group to the command with the given name and data. The
+// data needs to be a pointer to a struct from which the fields indicate which
+// options are in the group.
+func (g *Group) AddGroup(shortDescription string, longDescription string, data interface{}) (*Group, error) {
+ group := newGroup(shortDescription, longDescription, data)
+
+ group.parent = g
+
+ if err := group.scan(); err != nil {
+ return nil, err
+ }
+
+ g.groups = append(g.groups, group)
+ return group, nil
+}
+
+// AddOption adds a new option to this group.
+func (g *Group) AddOption(option *Option, data interface{}) {
+ option.value = reflect.ValueOf(data)
+ option.group = g
+ g.options = append(g.options, option)
+}
+
+// Groups returns the list of groups embedded in this group.
+func (g *Group) Groups() []*Group {
+ return g.groups
+}
+
+// Options returns the list of options in this group.
+func (g *Group) Options() []*Option {
+ return g.options
+}
+
+// Find locates the subgroup with the given short description and returns it.
+// If no such group can be found Find will return nil. Note that the description
+// is matched case insensitively.
+func (g *Group) Find(shortDescription string) *Group {
+ lshortDescription := strings.ToLower(shortDescription)
+
+ var ret *Group
+
+ g.eachGroup(func(gg *Group) {
+ if gg != g && strings.ToLower(gg.ShortDescription) == lshortDescription {
+ ret = gg
+ }
+ })
+
+ return ret
+}
+
+func (g *Group) findOption(matcher func(*Option) bool) (option *Option) {
+ g.eachGroup(func(g *Group) {
+ for _, opt := range g.options {
+ if option == nil && matcher(opt) {
+ option = opt
+ }
+ }
+ })
+
+ return option
+}
+
+// FindOptionByLongName finds an option that is part of the group, or any of its
+// subgroups, by matching its long name (including the option namespace).
+func (g *Group) FindOptionByLongName(longName string) *Option {
+ return g.findOption(func(option *Option) bool {
+ return option.LongNameWithNamespace() == longName
+ })
+}
+
+// FindOptionByShortName finds an option that is part of the group, or any of
+// its subgroups, by matching its short name.
+func (g *Group) FindOptionByShortName(shortName rune) *Option {
+ return g.findOption(func(option *Option) bool {
+ return option.ShortName == shortName
+ })
+}
+
+func newGroup(shortDescription string, longDescription string, data interface{}) *Group {
+ return &Group{
+ ShortDescription: shortDescription,
+ LongDescription: longDescription,
+
+ data: data,
+ }
+}
+
+func (g *Group) optionByName(name string, namematch func(*Option, string) bool) *Option {
+ prio := 0
+ var retopt *Option
+
+ g.eachGroup(func(g *Group) {
+ for _, opt := range g.options {
+ if namematch != nil && namematch(opt, name) && prio < 4 {
+ retopt = opt
+ prio = 4
+ }
+
+ if name == opt.field.Name && prio < 3 {
+ retopt = opt
+ prio = 3
+ }
+
+ if name == opt.LongNameWithNamespace() && prio < 2 {
+ retopt = opt
+ prio = 2
+ }
+
+ if opt.ShortName != 0 && name == string(opt.ShortName) && prio < 1 {
+ retopt = opt
+ prio = 1
+ }
+ }
+ })
+
+ return retopt
+}
+
+func (g *Group) showInHelp() bool {
+ if g.Hidden {
+ return false
+ }
+ for _, opt := range g.options {
+ if opt.showInHelp() {
+ return true
+ }
+ }
+ return false
+}
+
+func (g *Group) eachGroup(f func(*Group)) {
+ f(g)
+
+ for _, gg := range g.groups {
+ gg.eachGroup(f)
+ }
+}
+
+func isStringFalsy(s string) bool {
+ return s == "" || s == "false" || s == "no" || s == "0"
+}
+
+func (g *Group) scanStruct(realval reflect.Value, sfield *reflect.StructField, handler scanHandler) error {
+ stype := realval.Type()
+
+ if sfield != nil {
+ if ok, err := handler(realval, sfield); err != nil {
+ return err
+ } else if ok {
+ return nil
+ }
+ }
+
+ for i := 0; i < stype.NumField(); i++ {
+ field := stype.Field(i)
+
+ // PkgName is set only for non-exported fields, which we ignore
+ if field.PkgPath != "" && !field.Anonymous {
+ continue
+ }
+
+ mtag := newMultiTag(string(field.Tag))
+
+ if err := mtag.Parse(); err != nil {
+ return err
+ }
+
+ // Skip fields with the no-flag tag
+ if mtag.Get("no-flag") != "" {
+ continue
+ }
+
+ // Dive deep into structs or pointers to structs
+ kind := field.Type.Kind()
+ fld := realval.Field(i)
+
+ if kind == reflect.Struct {
+ if err := g.scanStruct(fld, &field, handler); err != nil {
+ return err
+ }
+ } else if kind == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct {
+ flagCountBefore := len(g.options) + len(g.groups)
+
+ if fld.IsNil() {
+ fld = reflect.New(fld.Type().Elem())
+ }
+
+ if err := g.scanStruct(reflect.Indirect(fld), &field, handler); err != nil {
+ return err
+ }
+
+ if len(g.options)+len(g.groups) != flagCountBefore {
+ realval.Field(i).Set(fld)
+ }
+ }
+
+ longname := mtag.Get("long")
+ shortname := mtag.Get("short")
+
+ // Need at least either a short or long name
+ if longname == "" && shortname == "" && mtag.Get("ini-name") == "" {
+ continue
+ }
+
+ short := rune(0)
+ rc := utf8.RuneCountInString(shortname)
+
+ if rc > 1 {
+ return newErrorf(ErrShortNameTooLong,
+ "short names can only be 1 character long, not `%s'",
+ shortname)
+
+ } else if rc == 1 {
+ short, _ = utf8.DecodeRuneInString(shortname)
+ }
+
+ description := mtag.Get("description")
+ def := mtag.GetMany("default")
+
+ optionalValue := mtag.GetMany("optional-value")
+ valueName := mtag.Get("value-name")
+ defaultMask := mtag.Get("default-mask")
+
+ optional := !isStringFalsy(mtag.Get("optional"))
+ required := !isStringFalsy(mtag.Get("required"))
+ choices := mtag.GetMany("choice")
+ hidden := !isStringFalsy(mtag.Get("hidden"))
+
+ option := &Option{
+ Description: description,
+ ShortName: short,
+ LongName: longname,
+ Default: def,
+ EnvDefaultKey: mtag.Get("env"),
+ EnvDefaultDelim: mtag.Get("env-delim"),
+ OptionalArgument: optional,
+ OptionalValue: optionalValue,
+ Required: required,
+ ValueName: valueName,
+ DefaultMask: defaultMask,
+ Choices: choices,
+ Hidden: hidden,
+
+ group: g,
+
+ field: field,
+ value: realval.Field(i),
+ tag: mtag,
+ }
+
+ if option.isBool() && option.Default != nil {
+ return newErrorf(ErrInvalidTag,
+ "boolean flag `%s' may not have default values, they always default to `false' and can only be turned on",
+ option.shortAndLongName())
+ }
+
+ g.options = append(g.options, option)
+ }
+
+ return nil
+}
+
+func (g *Group) checkForDuplicateFlags() *Error {
+ shortNames := make(map[rune]*Option)
+ longNames := make(map[string]*Option)
+
+ var duplicateError *Error
+
+ g.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ if option.LongName != "" {
+ longName := option.LongNameWithNamespace()
+
+ if otherOption, ok := longNames[longName]; ok {
+ duplicateError = newErrorf(ErrDuplicatedFlag, "option `%s' uses the same long name as option `%s'", option, otherOption)
+ return
+ }
+ longNames[longName] = option
+ }
+ if option.ShortName != 0 {
+ if otherOption, ok := shortNames[option.ShortName]; ok {
+ duplicateError = newErrorf(ErrDuplicatedFlag, "option `%s' uses the same short name as option `%s'", option, otherOption)
+ return
+ }
+ shortNames[option.ShortName] = option
+ }
+ }
+ })
+
+ return duplicateError
+}
+
+func (g *Group) scanSubGroupHandler(realval reflect.Value, sfield *reflect.StructField) (bool, error) {
+ mtag := newMultiTag(string(sfield.Tag))
+
+ if err := mtag.Parse(); err != nil {
+ return true, err
+ }
+
+ subgroup := mtag.Get("group")
+
+ if len(subgroup) != 0 {
+ var ptrval reflect.Value
+
+ if realval.Kind() == reflect.Ptr {
+ ptrval = realval
+
+ if ptrval.IsNil() {
+ ptrval.Set(reflect.New(ptrval.Type()))
+ }
+ } else {
+ ptrval = realval.Addr()
+ }
+
+ description := mtag.Get("description")
+
+ group, err := g.AddGroup(subgroup, description, ptrval.Interface())
+
+ if err != nil {
+ return true, err
+ }
+
+ group.Namespace = mtag.Get("namespace")
+ group.EnvNamespace = mtag.Get("env-namespace")
+ group.Hidden = mtag.Get("hidden") != ""
+
+ return true, nil
+ }
+
+ return false, nil
+}
+
+func (g *Group) scanType(handler scanHandler) error {
+ // Get all the public fields in the data struct
+ ptrval := reflect.ValueOf(g.data)
+
+ if ptrval.Type().Kind() != reflect.Ptr {
+ panic(ErrNotPointerToStruct)
+ }
+
+ stype := ptrval.Type().Elem()
+
+ if stype.Kind() != reflect.Struct {
+ panic(ErrNotPointerToStruct)
+ }
+
+ realval := reflect.Indirect(ptrval)
+
+ if err := g.scanStruct(realval, nil, handler); err != nil {
+ return err
+ }
+
+ if err := g.checkForDuplicateFlags(); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (g *Group) scan() error {
+ return g.scanType(g.scanSubGroupHandler)
+}
+
+func (g *Group) groupByName(name string) *Group {
+ if len(name) == 0 {
+ return g
+ }
+
+ return g.Find(name)
+}
diff --git a/vendor/github.com/jessevdk/go-flags/help.go b/vendor/github.com/jessevdk/go-flags/help.go
new file mode 100644
index 000000000..068fce152
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/help.go
@@ -0,0 +1,514 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "runtime"
+ "strings"
+ "unicode/utf8"
+)
+
+type alignmentInfo struct {
+ maxLongLen int
+ hasShort bool
+ hasValueName bool
+ terminalColumns int
+ indent bool
+}
+
+const (
+ paddingBeforeOption = 2
+ distanceBetweenOptionAndDescription = 2
+)
+
+func (a *alignmentInfo) descriptionStart() int {
+ ret := a.maxLongLen + distanceBetweenOptionAndDescription
+
+ if a.hasShort {
+ ret += 2
+ }
+
+ if a.maxLongLen > 0 {
+ ret += 4
+ }
+
+ if a.hasValueName {
+ ret += 3
+ }
+
+ return ret
+}
+
+func (a *alignmentInfo) updateLen(name string, indent bool) {
+ l := utf8.RuneCountInString(name)
+
+ if indent {
+ l = l + 4
+ }
+
+ if l > a.maxLongLen {
+ a.maxLongLen = l
+ }
+}
+
+func (p *Parser) getAlignmentInfo() alignmentInfo {
+ ret := alignmentInfo{
+ maxLongLen: 0,
+ hasShort: false,
+ hasValueName: false,
+ terminalColumns: getTerminalColumns(),
+ }
+
+ if ret.terminalColumns <= 0 {
+ ret.terminalColumns = 80
+ }
+
+ var prevcmd *Command
+
+ p.eachActiveGroup(func(c *Command, grp *Group) {
+ if !grp.showInHelp() {
+ return
+ }
+ if c != prevcmd {
+ for _, arg := range c.args {
+ ret.updateLen(arg.Name, c != p.Command)
+ }
+ }
+
+ for _, info := range grp.options {
+ if !info.showInHelp() {
+ continue
+ }
+
+ if info.ShortName != 0 {
+ ret.hasShort = true
+ }
+
+ if len(info.ValueName) > 0 {
+ ret.hasValueName = true
+ }
+
+ l := info.LongNameWithNamespace() + info.ValueName
+
+ if len(info.Choices) != 0 {
+ l += "[" + strings.Join(info.Choices, "|") + "]"
+ }
+
+ ret.updateLen(l, c != p.Command)
+ }
+ })
+
+ return ret
+}
+
+func wrapText(s string, l int, prefix string) string {
+ var ret string
+
+ if l < 10 {
+ l = 10
+ }
+
+ // Basic text wrapping of s at spaces to fit in l
+ lines := strings.Split(s, "\n")
+
+ for _, line := range lines {
+ var retline string
+
+ line = strings.TrimSpace(line)
+
+ for len(line) > l {
+ // Try to split on space
+ suffix := ""
+
+ pos := strings.LastIndex(line[:l], " ")
+
+ if pos < 0 {
+ pos = l - 1
+ suffix = "-\n"
+ }
+
+ if len(retline) != 0 {
+ retline += "\n" + prefix
+ }
+
+ retline += strings.TrimSpace(line[:pos]) + suffix
+ line = strings.TrimSpace(line[pos:])
+ }
+
+ if len(line) > 0 {
+ if len(retline) != 0 {
+ retline += "\n" + prefix
+ }
+
+ retline += line
+ }
+
+ if len(ret) > 0 {
+ ret += "\n"
+
+ if len(retline) > 0 {
+ ret += prefix
+ }
+ }
+
+ ret += retline
+ }
+
+ return ret
+}
+
+func (p *Parser) writeHelpOption(writer *bufio.Writer, option *Option, info alignmentInfo) {
+ line := &bytes.Buffer{}
+
+ prefix := paddingBeforeOption
+
+ if info.indent {
+ prefix += 4
+ }
+
+ if option.Hidden {
+ return
+ }
+
+ line.WriteString(strings.Repeat(" ", prefix))
+
+ if option.ShortName != 0 {
+ line.WriteRune(defaultShortOptDelimiter)
+ line.WriteRune(option.ShortName)
+ } else if info.hasShort {
+ line.WriteString(" ")
+ }
+
+ descstart := info.descriptionStart() + paddingBeforeOption
+
+ if len(option.LongName) > 0 {
+ if option.ShortName != 0 {
+ line.WriteString(", ")
+ } else if info.hasShort {
+ line.WriteString(" ")
+ }
+
+ line.WriteString(defaultLongOptDelimiter)
+ line.WriteString(option.LongNameWithNamespace())
+ }
+
+ if option.canArgument() {
+ line.WriteRune(defaultNameArgDelimiter)
+
+ if len(option.ValueName) > 0 {
+ line.WriteString(option.ValueName)
+ }
+
+ if len(option.Choices) > 0 {
+ line.WriteString("[" + strings.Join(option.Choices, "|") + "]")
+ }
+ }
+
+ written := line.Len()
+ line.WriteTo(writer)
+
+ if option.Description != "" {
+ dw := descstart - written
+ writer.WriteString(strings.Repeat(" ", dw))
+
+ var def string
+
+ if len(option.DefaultMask) != 0 {
+ if option.DefaultMask != "-" {
+ def = option.DefaultMask
+ }
+ } else {
+ def = option.defaultLiteral
+ }
+
+ var envDef string
+ if option.EnvKeyWithNamespace() != "" {
+ var envPrintable string
+ if runtime.GOOS == "windows" {
+ envPrintable = "%" + option.EnvKeyWithNamespace() + "%"
+ } else {
+ envPrintable = "$" + option.EnvKeyWithNamespace()
+ }
+ envDef = fmt.Sprintf(" [%s]", envPrintable)
+ }
+
+ var desc string
+
+ if def != "" {
+ desc = fmt.Sprintf("%s (default: %v)%s", option.Description, def, envDef)
+ } else {
+ desc = option.Description + envDef
+ }
+
+ writer.WriteString(wrapText(desc,
+ info.terminalColumns-descstart,
+ strings.Repeat(" ", descstart)))
+ }
+
+ writer.WriteString("\n")
+}
+
+func maxCommandLength(s []*Command) int {
+ if len(s) == 0 {
+ return 0
+ }
+
+ ret := len(s[0].Name)
+
+ for _, v := range s[1:] {
+ l := len(v.Name)
+
+ if l > ret {
+ ret = l
+ }
+ }
+
+ return ret
+}
+
+// WriteHelp writes a help message containing all the possible options and
+// their descriptions to the provided writer. Note that the HelpFlag parser
+// option provides a convenient way to add a -h/--help option group to the
+// command line parser which will automatically show the help messages using
+// this method.
+func (p *Parser) WriteHelp(writer io.Writer) {
+ if writer == nil {
+ return
+ }
+
+ wr := bufio.NewWriter(writer)
+ aligninfo := p.getAlignmentInfo()
+
+ cmd := p.Command
+
+ for cmd.Active != nil {
+ cmd = cmd.Active
+ }
+
+ if p.Name != "" {
+ wr.WriteString("Usage:\n")
+ wr.WriteString(" ")
+
+ allcmd := p.Command
+
+ for allcmd != nil {
+ var usage string
+
+ if allcmd == p.Command {
+ if len(p.Usage) != 0 {
+ usage = p.Usage
+ } else if p.Options&HelpFlag != 0 {
+ usage = "[OPTIONS]"
+ }
+ } else if us, ok := allcmd.data.(Usage); ok {
+ usage = us.Usage()
+ } else if allcmd.hasHelpOptions() {
+ usage = fmt.Sprintf("[%s-OPTIONS]", allcmd.Name)
+ }
+
+ if len(usage) != 0 {
+ fmt.Fprintf(wr, " %s %s", allcmd.Name, usage)
+ } else {
+ fmt.Fprintf(wr, " %s", allcmd.Name)
+ }
+
+ if len(allcmd.args) > 0 {
+ fmt.Fprintf(wr, " ")
+ }
+
+ for i, arg := range allcmd.args {
+ if i != 0 {
+ fmt.Fprintf(wr, " ")
+ }
+
+ name := arg.Name
+
+ if arg.isRemaining() {
+ name = name + "..."
+ }
+
+ if !allcmd.ArgsRequired {
+ fmt.Fprintf(wr, "[%s]", name)
+ } else {
+ fmt.Fprintf(wr, "%s", name)
+ }
+ }
+
+ if allcmd.Active == nil && len(allcmd.commands) > 0 {
+ var co, cc string
+
+ if allcmd.SubcommandsOptional {
+ co, cc = "[", "]"
+ } else {
+ co, cc = "<", ">"
+ }
+
+ visibleCommands := allcmd.visibleCommands()
+
+ if len(visibleCommands) > 3 {
+ fmt.Fprintf(wr, " %scommand%s", co, cc)
+ } else {
+ subcommands := allcmd.sortedVisibleCommands()
+ names := make([]string, len(subcommands))
+
+ for i, subc := range subcommands {
+ names[i] = subc.Name
+ }
+
+ fmt.Fprintf(wr, " %s%s%s", co, strings.Join(names, " | "), cc)
+ }
+ }
+
+ allcmd = allcmd.Active
+ }
+
+ fmt.Fprintln(wr)
+
+ if len(cmd.LongDescription) != 0 {
+ fmt.Fprintln(wr)
+
+ t := wrapText(cmd.LongDescription,
+ aligninfo.terminalColumns,
+ "")
+
+ fmt.Fprintln(wr, t)
+ }
+ }
+
+ c := p.Command
+
+ for c != nil {
+ printcmd := c != p.Command
+
+ c.eachGroup(func(grp *Group) {
+ first := true
+
+ // Skip built-in help group for all commands except the top-level
+ // parser
+ if grp.Hidden || (grp.isBuiltinHelp && c != p.Command) {
+ return
+ }
+
+ for _, info := range grp.options {
+ if !info.showInHelp() {
+ continue
+ }
+
+ if printcmd {
+ fmt.Fprintf(wr, "\n[%s command options]\n", c.Name)
+ aligninfo.indent = true
+ printcmd = false
+ }
+
+ if first && cmd.Group != grp {
+ fmt.Fprintln(wr)
+
+ if aligninfo.indent {
+ wr.WriteString(" ")
+ }
+
+ fmt.Fprintf(wr, "%s:\n", grp.ShortDescription)
+ first = false
+ }
+
+ p.writeHelpOption(wr, info, aligninfo)
+ }
+ })
+
+ var args []*Arg
+ for _, arg := range c.args {
+ if arg.Description != "" {
+ args = append(args, arg)
+ }
+ }
+
+ if len(args) > 0 {
+ if c == p.Command {
+ fmt.Fprintf(wr, "\nArguments:\n")
+ } else {
+ fmt.Fprintf(wr, "\n[%s command arguments]\n", c.Name)
+ }
+
+ descStart := aligninfo.descriptionStart() + paddingBeforeOption
+
+ for _, arg := range args {
+ argPrefix := strings.Repeat(" ", paddingBeforeOption)
+ argPrefix += arg.Name
+
+ if len(arg.Description) > 0 {
+ argPrefix += ":"
+ wr.WriteString(argPrefix)
+
+ // Space between "arg:" and the description start
+ descPadding := strings.Repeat(" ", descStart-len(argPrefix))
+ // How much space the description gets before wrapping
+ descWidth := aligninfo.terminalColumns - 1 - descStart
+ // Whitespace to which we can indent new description lines
+ descPrefix := strings.Repeat(" ", descStart)
+
+ wr.WriteString(descPadding)
+ wr.WriteString(wrapText(arg.Description, descWidth, descPrefix))
+ } else {
+ wr.WriteString(argPrefix)
+ }
+
+ fmt.Fprintln(wr)
+ }
+ }
+
+ c = c.Active
+ }
+
+ scommands := cmd.sortedVisibleCommands()
+
+ if len(scommands) > 0 {
+ maxnamelen := maxCommandLength(scommands)
+
+ fmt.Fprintln(wr)
+ fmt.Fprintln(wr, "Available commands:")
+
+ for _, c := range scommands {
+ fmt.Fprintf(wr, " %s", c.Name)
+
+ if len(c.ShortDescription) > 0 {
+ pad := strings.Repeat(" ", maxnamelen-len(c.Name))
+ fmt.Fprintf(wr, "%s %s", pad, c.ShortDescription)
+
+ if len(c.Aliases) > 0 {
+ fmt.Fprintf(wr, " (aliases: %s)", strings.Join(c.Aliases, ", "))
+ }
+
+ }
+
+ fmt.Fprintln(wr)
+ }
+ }
+
+ wr.Flush()
+}
+
+// WroteHelp is a helper to test the error from ParseArgs() to
+// determine if the help message was written. It is safe to
+// call without first checking that error is nil.
+func WroteHelp(err error) bool {
+ if err == nil { // No error
+ return false
+ }
+
+ flagError, ok := err.(*Error)
+ if !ok { // Not a go-flag error
+ return false
+ }
+
+ if flagError.Type != ErrHelp { // Did not print the help message
+ return false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/jessevdk/go-flags/ini.go b/vendor/github.com/jessevdk/go-flags/ini.go
new file mode 100644
index 000000000..60b36c79c
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/ini.go
@@ -0,0 +1,615 @@
+package flags
+
+import (
+ "bufio"
+ "fmt"
+ "io"
+ "os"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+// IniError contains location information on where an error occurred.
+type IniError struct {
+ // The error message.
+ Message string
+
+ // The filename of the file in which the error occurred.
+ File string
+
+ // The line number at which the error occurred.
+ LineNumber uint
+}
+
+// Error provides a "file:line: message" formatted message of the ini error.
+func (x *IniError) Error() string {
+ return fmt.Sprintf(
+ "%s:%d: %s",
+ x.File,
+ x.LineNumber,
+ x.Message,
+ )
+}
+
+// IniOptions for writing
+type IniOptions uint
+
+const (
+ // IniNone indicates no options.
+ IniNone IniOptions = 0
+
+ // IniIncludeDefaults indicates that default values should be written.
+ IniIncludeDefaults = 1 << iota
+
+ // IniCommentDefaults indicates that if IniIncludeDefaults is used
+ // options with default values are written but commented out.
+ IniCommentDefaults
+
+ // IniIncludeComments indicates that comments containing the description
+ // of an option should be written.
+ IniIncludeComments
+
+ // IniDefault provides a default set of options.
+ IniDefault = IniIncludeComments
+)
+
+// IniParser is a utility to read and write flags options from and to ini
+// formatted strings.
+type IniParser struct {
+ ParseAsDefaults bool // override default flags
+
+ parser *Parser
+}
+
+type iniValue struct {
+ Name string
+ Value string
+ Quoted bool
+ LineNumber uint
+}
+
+type iniSection []iniValue
+
+type ini struct {
+ File string
+ Sections map[string]iniSection
+}
+
+// NewIniParser creates a new ini parser for a given Parser.
+func NewIniParser(p *Parser) *IniParser {
+ return &IniParser{
+ parser: p,
+ }
+}
+
+// IniParse is a convenience function to parse command line options with default
+// settings from an ini formatted file. The provided data is a pointer to a struct
+// representing the default option group (named "Application Options"). For
+// more control, use flags.NewParser.
+func IniParse(filename string, data interface{}) error {
+ p := NewParser(data, Default)
+
+ return NewIniParser(p).ParseFile(filename)
+}
+
+// ParseFile parses flags from an ini formatted file. See Parse for more
+// information on the ini file format. The returned errors can be of the type
+// flags.Error or flags.IniError.
+func (i *IniParser) ParseFile(filename string) error {
+ ini, err := readIniFromFile(filename)
+
+ if err != nil {
+ return err
+ }
+
+ return i.parse(ini)
+}
+
+// Parse parses flags from an ini format. You can use ParseFile as a
+// convenience function to parse from a filename instead of a general
+// io.Reader.
+//
+// The format of the ini file is as follows:
+//
+// [Option group name]
+// option = value
+//
+// Each section in the ini file represents an option group or command in the
+// flags parser. The default flags parser option group (i.e. when using
+// flags.Parse) is named 'Application Options'. The ini option name is matched
+// in the following order:
+//
+// 1. Compared to the ini-name tag on the option struct field (if present)
+// 2. Compared to the struct field name
+// 3. Compared to the option long name (if present)
+// 4. Compared to the option short name (if present)
+//
+// Sections for nested groups and commands can be addressed using a dot `.'
+// namespacing notation (i.e [subcommand.Options]). Group section names are
+// matched case insensitive.
+//
+// The returned errors can be of the type flags.Error or flags.IniError.
+func (i *IniParser) Parse(reader io.Reader) error {
+ ini, err := readIni(reader, "")
+
+ if err != nil {
+ return err
+ }
+
+ return i.parse(ini)
+}
+
+// WriteFile writes the flags as ini format into a file. See Write
+// for more information. The returned error occurs when the specified file
+// could not be opened for writing.
+func (i *IniParser) WriteFile(filename string, options IniOptions) error {
+ return writeIniToFile(i, filename, options)
+}
+
+// Write writes the current values of all the flags to an ini format.
+// See Parse for more information on the ini file format. You typically
+// call this only after settings have been parsed since the default values of each
+// option are stored just before parsing the flags (this is only relevant when
+// IniIncludeDefaults is _not_ set in options).
+func (i *IniParser) Write(writer io.Writer, options IniOptions) {
+ writeIni(i, writer, options)
+}
+
+func readFullLine(reader *bufio.Reader) (string, error) {
+ var line []byte
+
+ for {
+ l, more, err := reader.ReadLine()
+
+ if err != nil {
+ return "", err
+ }
+
+ if line == nil && !more {
+ return string(l), nil
+ }
+
+ line = append(line, l...)
+
+ if !more {
+ break
+ }
+ }
+
+ return string(line), nil
+}
+
+func optionIniName(option *Option) string {
+ name := option.tag.Get("_read-ini-name")
+
+ if len(name) != 0 {
+ return name
+ }
+
+ name = option.tag.Get("ini-name")
+
+ if len(name) != 0 {
+ return name
+ }
+
+ return option.field.Name
+}
+
+func writeGroupIni(cmd *Command, group *Group, namespace string, writer io.Writer, options IniOptions) {
+ var sname string
+
+ if len(namespace) != 0 {
+ sname = namespace
+ }
+
+ if cmd.Group != group && len(group.ShortDescription) != 0 {
+ if len(sname) != 0 {
+ sname += "."
+ }
+
+ sname += group.ShortDescription
+ }
+
+ sectionwritten := false
+ comments := (options & IniIncludeComments) != IniNone
+
+ for _, option := range group.options {
+ if option.isFunc() || option.Hidden {
+ continue
+ }
+
+ if len(option.tag.Get("no-ini")) != 0 {
+ continue
+ }
+
+ val := option.value
+
+ if (options&IniIncludeDefaults) == IniNone && option.valueIsDefault() {
+ continue
+ }
+
+ if !sectionwritten {
+ fmt.Fprintf(writer, "[%s]\n", sname)
+ sectionwritten = true
+ }
+
+ if comments && len(option.Description) != 0 {
+ fmt.Fprintf(writer, "; %s\n", option.Description)
+ }
+
+ oname := optionIniName(option)
+
+ commentOption := (options&(IniIncludeDefaults|IniCommentDefaults)) == IniIncludeDefaults|IniCommentDefaults && option.valueIsDefault()
+
+ kind := val.Type().Kind()
+ switch kind {
+ case reflect.Slice:
+ kind = val.Type().Elem().Kind()
+
+ if val.Len() == 0 {
+ writeOption(writer, oname, kind, "", "", true, option.iniQuote)
+ } else {
+ for idx := 0; idx < val.Len(); idx++ {
+ v, _ := convertToString(val.Index(idx), option.tag)
+
+ writeOption(writer, oname, kind, "", v, commentOption, option.iniQuote)
+ }
+ }
+ case reflect.Map:
+ kind = val.Type().Elem().Kind()
+
+ if val.Len() == 0 {
+ writeOption(writer, oname, kind, "", "", true, option.iniQuote)
+ } else {
+ mkeys := val.MapKeys()
+ keys := make([]string, len(val.MapKeys()))
+ kkmap := make(map[string]reflect.Value)
+
+ for i, k := range mkeys {
+ keys[i], _ = convertToString(k, option.tag)
+ kkmap[keys[i]] = k
+ }
+
+ sort.Strings(keys)
+
+ for _, k := range keys {
+ v, _ := convertToString(val.MapIndex(kkmap[k]), option.tag)
+
+ writeOption(writer, oname, kind, k, v, commentOption, option.iniQuote)
+ }
+ }
+ default:
+ v, _ := convertToString(val, option.tag)
+
+ writeOption(writer, oname, kind, "", v, commentOption, option.iniQuote)
+ }
+
+ if comments {
+ fmt.Fprintln(writer)
+ }
+ }
+
+ if sectionwritten && !comments {
+ fmt.Fprintln(writer)
+ }
+}
+
+func writeOption(writer io.Writer, optionName string, optionType reflect.Kind, optionKey string, optionValue string, commentOption bool, forceQuote bool) {
+ if forceQuote || (optionType == reflect.String && !isPrint(optionValue)) {
+ optionValue = strconv.Quote(optionValue)
+ }
+
+ comment := ""
+ if commentOption {
+ comment = "; "
+ }
+
+ fmt.Fprintf(writer, "%s%s =", comment, optionName)
+
+ if optionKey != "" {
+ fmt.Fprintf(writer, " %s:%s", optionKey, optionValue)
+ } else if optionValue != "" {
+ fmt.Fprintf(writer, " %s", optionValue)
+ }
+
+ fmt.Fprintln(writer)
+}
+
+func writeCommandIni(command *Command, namespace string, writer io.Writer, options IniOptions) {
+ command.eachGroup(func(group *Group) {
+ if !group.Hidden {
+ writeGroupIni(command, group, namespace, writer, options)
+ }
+ })
+
+ for _, c := range command.commands {
+ var fqn string
+
+ if c.Hidden {
+ continue
+ }
+
+ if len(namespace) != 0 {
+ fqn = namespace + "." + c.Name
+ } else {
+ fqn = c.Name
+ }
+
+ writeCommandIni(c, fqn, writer, options)
+ }
+}
+
+func writeIni(parser *IniParser, writer io.Writer, options IniOptions) {
+ writeCommandIni(parser.parser.Command, "", writer, options)
+}
+
+func writeIniToFile(parser *IniParser, filename string, options IniOptions) error {
+ file, err := os.Create(filename)
+
+ if err != nil {
+ return err
+ }
+
+ defer file.Close()
+
+ writeIni(parser, file, options)
+
+ return nil
+}
+
+func readIniFromFile(filename string) (*ini, error) {
+ file, err := os.Open(filename)
+
+ if err != nil {
+ return nil, err
+ }
+
+ defer file.Close()
+
+ return readIni(file, filename)
+}
+
+func readIni(contents io.Reader, filename string) (*ini, error) {
+ ret := &ini{
+ File: filename,
+ Sections: make(map[string]iniSection),
+ }
+
+ reader := bufio.NewReader(contents)
+
+ // Empty global section
+ section := make(iniSection, 0, 10)
+ sectionname := ""
+
+ ret.Sections[sectionname] = section
+
+ var lineno uint
+
+ for {
+ line, err := readFullLine(reader)
+
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ lineno++
+ line = strings.TrimSpace(line)
+
+ // Skip empty lines and lines starting with ; (comments)
+ if len(line) == 0 || line[0] == ';' || line[0] == '#' {
+ continue
+ }
+
+ if line[0] == '[' {
+ if line[0] != '[' || line[len(line)-1] != ']' {
+ return nil, &IniError{
+ Message: "malformed section header",
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+
+ name := strings.TrimSpace(line[1 : len(line)-1])
+
+ if len(name) == 0 {
+ return nil, &IniError{
+ Message: "empty section name",
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+
+ sectionname = name
+ section = ret.Sections[name]
+
+ if section == nil {
+ section = make(iniSection, 0, 10)
+ ret.Sections[name] = section
+ }
+
+ continue
+ }
+
+ // Parse option here
+ keyval := strings.SplitN(line, "=", 2)
+
+ if len(keyval) != 2 {
+ return nil, &IniError{
+ Message: fmt.Sprintf("malformed key=value (%s)", line),
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+
+ name := strings.TrimSpace(keyval[0])
+ value := strings.TrimSpace(keyval[1])
+ quoted := false
+
+ if len(value) != 0 && value[0] == '"' {
+ if v, err := strconv.Unquote(value); err == nil {
+ value = v
+
+ quoted = true
+ } else {
+ return nil, &IniError{
+ Message: err.Error(),
+ File: filename,
+ LineNumber: lineno,
+ }
+ }
+ }
+
+ section = append(section, iniValue{
+ Name: name,
+ Value: value,
+ Quoted: quoted,
+ LineNumber: lineno,
+ })
+
+ ret.Sections[sectionname] = section
+ }
+
+ return ret, nil
+}
+
+func (i *IniParser) matchingGroups(name string) []*Group {
+ if len(name) == 0 {
+ var ret []*Group
+
+ i.parser.eachGroup(func(g *Group) {
+ ret = append(ret, g)
+ })
+
+ return ret
+ }
+
+ g := i.parser.groupByName(name)
+
+ if g != nil {
+ return []*Group{g}
+ }
+
+ return nil
+}
+
+func (i *IniParser) parse(ini *ini) error {
+ p := i.parser
+
+ p.eachOption(func(cmd *Command, group *Group, option *Option) {
+ option.clearReferenceBeforeSet = true
+ })
+
+ var quotesLookup = make(map[*Option]bool)
+
+ for name, section := range ini.Sections {
+ groups := i.matchingGroups(name)
+
+ if len(groups) == 0 {
+ if (p.Options & IgnoreUnknown) == None {
+ return newErrorf(ErrUnknownGroup, "could not find option group `%s'", name)
+ }
+
+ continue
+ }
+
+ for _, inival := range section {
+ var opt *Option
+
+ for _, group := range groups {
+ opt = group.optionByName(inival.Name, func(o *Option, n string) bool {
+ return strings.ToLower(o.tag.Get("ini-name")) == strings.ToLower(n)
+ })
+
+ if opt != nil && len(opt.tag.Get("no-ini")) != 0 {
+ opt = nil
+ }
+
+ if opt != nil {
+ break
+ }
+ }
+
+ if opt == nil {
+ if (p.Options & IgnoreUnknown) == None {
+ return &IniError{
+ Message: fmt.Sprintf("unknown option: %s", inival.Name),
+ File: ini.File,
+ LineNumber: inival.LineNumber,
+ }
+ }
+
+ continue
+ }
+
+ // ini value is ignored if parsed as default but defaults are prevented
+ if i.ParseAsDefaults && opt.preventDefault {
+ continue
+ }
+
+ pval := &inival.Value
+
+ if !opt.canArgument() && len(inival.Value) == 0 {
+ pval = nil
+ } else {
+ if opt.value.Type().Kind() == reflect.Map {
+ parts := strings.SplitN(inival.Value, ":", 2)
+
+ // only handle unquoting
+ if len(parts) == 2 && parts[1][0] == '"' {
+ if v, err := strconv.Unquote(parts[1]); err == nil {
+ parts[1] = v
+
+ inival.Quoted = true
+ } else {
+ return &IniError{
+ Message: err.Error(),
+ File: ini.File,
+ LineNumber: inival.LineNumber,
+ }
+ }
+
+ s := parts[0] + ":" + parts[1]
+
+ pval = &s
+ }
+ }
+ }
+
+ var err error
+
+ if i.ParseAsDefaults {
+ err = opt.setDefault(pval)
+ } else {
+ err = opt.set(pval)
+ }
+
+ if err != nil {
+ return &IniError{
+ Message: err.Error(),
+ File: ini.File,
+ LineNumber: inival.LineNumber,
+ }
+ }
+
+ // Defaults from ini files take precendence over defaults from parser
+ opt.preventDefault = true
+
+ // either all INI values are quoted or only values who need quoting
+ if _, ok := quotesLookup[opt]; !inival.Quoted || !ok {
+ quotesLookup[opt] = inival.Quoted
+ }
+
+ opt.tag.Set("_read-ini-name", inival.Name)
+ }
+ }
+
+ for opt, quoted := range quotesLookup {
+ opt.iniQuote = quoted
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/jessevdk/go-flags/man.go b/vendor/github.com/jessevdk/go-flags/man.go
new file mode 100644
index 000000000..82572f9a7
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/man.go
@@ -0,0 +1,223 @@
+package flags
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "runtime"
+ "strconv"
+ "strings"
+ "time"
+)
+
+func manQuoteLines(s string) string {
+ lines := strings.Split(s, "\n")
+ parts := []string{}
+
+ for _, line := range lines {
+ parts = append(parts, manQuote(line))
+ }
+
+ return strings.Join(parts, "\n")
+}
+
+func manQuote(s string) string {
+ return strings.Replace(s, "\\", "\\\\", -1)
+}
+
+func formatForMan(wr io.Writer, s string, quoter func(s string) string) {
+ for {
+ idx := strings.IndexRune(s, '`')
+
+ if idx < 0 {
+ fmt.Fprintf(wr, "%s", quoter(s))
+ break
+ }
+
+ fmt.Fprintf(wr, "%s", quoter(s[:idx]))
+
+ s = s[idx+1:]
+ idx = strings.IndexRune(s, '\'')
+
+ if idx < 0 {
+ fmt.Fprintf(wr, "%s", quoter(s))
+ break
+ }
+
+ fmt.Fprintf(wr, "\\fB%s\\fP", quoter(s[:idx]))
+ s = s[idx+1:]
+ }
+}
+
+func writeManPageOptions(wr io.Writer, grp *Group) {
+ grp.eachGroup(func(group *Group) {
+ if !group.showInHelp() {
+ return
+ }
+
+ // If the parent (grp) has any subgroups, display their descriptions as
+ // subsection headers similar to the output of --help.
+ if group.ShortDescription != "" && len(grp.groups) > 0 {
+ fmt.Fprintf(wr, ".SS %s\n", group.ShortDescription)
+
+ if group.LongDescription != "" {
+ formatForMan(wr, group.LongDescription, manQuoteLines)
+ fmt.Fprintln(wr, "")
+ }
+ }
+
+ for _, opt := range group.options {
+ if !opt.showInHelp() {
+ continue
+ }
+
+ fmt.Fprintln(wr, ".TP")
+ fmt.Fprintf(wr, "\\fB")
+
+ if opt.ShortName != 0 {
+ fmt.Fprintf(wr, "\\fB\\-%c\\fR", opt.ShortName)
+ }
+
+ if len(opt.LongName) != 0 {
+ if opt.ShortName != 0 {
+ fmt.Fprintf(wr, ", ")
+ }
+
+ fmt.Fprintf(wr, "\\fB\\-\\-%s\\fR", manQuote(opt.LongNameWithNamespace()))
+ }
+
+ if len(opt.ValueName) != 0 || opt.OptionalArgument {
+ if opt.OptionalArgument {
+ fmt.Fprintf(wr, " [\\fI%s=%s\\fR]", manQuote(opt.ValueName), manQuote(strings.Join(quoteV(opt.OptionalValue), ", ")))
+ } else {
+ fmt.Fprintf(wr, " \\fI%s\\fR", manQuote(opt.ValueName))
+ }
+ }
+
+ if len(opt.Default) != 0 {
+ fmt.Fprintf(wr, " <default: \\fI%s\\fR>", manQuote(strings.Join(quoteV(opt.Default), ", ")))
+ } else if len(opt.EnvKeyWithNamespace()) != 0 {
+ if runtime.GOOS == "windows" {
+ fmt.Fprintf(wr, " <default: \\fI%%%s%%\\fR>", manQuote(opt.EnvKeyWithNamespace()))
+ } else {
+ fmt.Fprintf(wr, " <default: \\fI$%s\\fR>", manQuote(opt.EnvKeyWithNamespace()))
+ }
+ }
+
+ if opt.Required {
+ fmt.Fprintf(wr, " (\\fIrequired\\fR)")
+ }
+
+ fmt.Fprintln(wr, "\\fP")
+
+ if len(opt.Description) != 0 {
+ formatForMan(wr, opt.Description, manQuoteLines)
+ fmt.Fprintln(wr, "")
+ }
+ }
+ })
+}
+
+func writeManPageSubcommands(wr io.Writer, name string, usagePrefix string, root *Command) {
+ commands := root.sortedVisibleCommands()
+
+ for _, c := range commands {
+ var nn string
+
+ if c.Hidden {
+ continue
+ }
+
+ if len(name) != 0 {
+ nn = name + " " + c.Name
+ } else {
+ nn = c.Name
+ }
+
+ writeManPageCommand(wr, nn, usagePrefix, c)
+ }
+}
+
+func writeManPageCommand(wr io.Writer, name string, usagePrefix string, command *Command) {
+ fmt.Fprintf(wr, ".SS %s\n", name)
+ fmt.Fprintln(wr, command.ShortDescription)
+
+ if len(command.LongDescription) > 0 {
+ fmt.Fprintln(wr, "")
+
+ cmdstart := fmt.Sprintf("The %s command", manQuote(command.Name))
+
+ if strings.HasPrefix(command.LongDescription, cmdstart) {
+ fmt.Fprintf(wr, "The \\fI%s\\fP command", manQuote(command.Name))
+
+ formatForMan(wr, command.LongDescription[len(cmdstart):], manQuoteLines)
+ fmt.Fprintln(wr, "")
+ } else {
+ formatForMan(wr, command.LongDescription, manQuoteLines)
+ fmt.Fprintln(wr, "")
+ }
+ }
+
+ var pre = usagePrefix + " " + command.Name
+
+ var usage string
+ if us, ok := command.data.(Usage); ok {
+ usage = us.Usage()
+ } else if command.hasHelpOptions() {
+ usage = fmt.Sprintf("[%s-OPTIONS]", command.Name)
+ }
+
+ var nextPrefix = pre
+ if len(usage) > 0 {
+ fmt.Fprintf(wr, "\n\\fBUsage\\fP: %s %s\n.TP\n", manQuote(pre), manQuote(usage))
+ nextPrefix = pre + " " + usage
+ }
+
+ if len(command.Aliases) > 0 {
+ fmt.Fprintf(wr, "\n\\fBAliases\\fP: %s\n\n", manQuote(strings.Join(command.Aliases, ", ")))
+ }
+
+ writeManPageOptions(wr, command.Group)
+ writeManPageSubcommands(wr, name, nextPrefix, command)
+}
+
+// WriteManPage writes a basic man page in groff format to the specified
+// writer.
+func (p *Parser) WriteManPage(wr io.Writer) {
+ t := time.Now()
+ source_date_epoch := os.Getenv("SOURCE_DATE_EPOCH")
+ if source_date_epoch != "" {
+ sde, err := strconv.ParseInt(source_date_epoch, 10, 64)
+ if err != nil {
+ panic(fmt.Sprintf("Invalid SOURCE_DATE_EPOCH: %s", err))
+ }
+ t = time.Unix(sde, 0)
+ }
+
+ fmt.Fprintf(wr, ".TH %s 1 \"%s\"\n", manQuote(p.Name), t.Format("2 January 2006"))
+ fmt.Fprintln(wr, ".SH NAME")
+ fmt.Fprintf(wr, "%s \\- %s\n", manQuote(p.Name), manQuoteLines(p.ShortDescription))
+ fmt.Fprintln(wr, ".SH SYNOPSIS")
+
+ usage := p.Usage
+
+ if len(usage) == 0 {
+ usage = "[OPTIONS]"
+ }
+
+ fmt.Fprintf(wr, "\\fB%s\\fP %s\n", manQuote(p.Name), manQuote(usage))
+ fmt.Fprintln(wr, ".SH DESCRIPTION")
+
+ formatForMan(wr, p.LongDescription, manQuoteLines)
+ fmt.Fprintln(wr, "")
+
+ fmt.Fprintln(wr, ".SH OPTIONS")
+
+ writeManPageOptions(wr, p.Command.Group)
+
+ if len(p.visibleCommands()) > 0 {
+ fmt.Fprintln(wr, ".SH COMMANDS")
+
+ writeManPageSubcommands(wr, "", p.Name+" "+usage, p.Command)
+ }
+}
diff --git a/vendor/github.com/jessevdk/go-flags/multitag.go b/vendor/github.com/jessevdk/go-flags/multitag.go
new file mode 100644
index 000000000..96bb1a31d
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/multitag.go
@@ -0,0 +1,140 @@
+package flags
+
+import (
+ "strconv"
+)
+
+type multiTag struct {
+ value string
+ cache map[string][]string
+}
+
+func newMultiTag(v string) multiTag {
+ return multiTag{
+ value: v,
+ }
+}
+
+func (x *multiTag) scan() (map[string][]string, error) {
+ v := x.value
+
+ ret := make(map[string][]string)
+
+ // This is mostly copied from reflect.StructTag.Get
+ for v != "" {
+ i := 0
+
+ // Skip whitespace
+ for i < len(v) && v[i] == ' ' {
+ i++
+ }
+
+ v = v[i:]
+
+ if v == "" {
+ break
+ }
+
+ // Scan to colon to find key
+ i = 0
+
+ for i < len(v) && v[i] != ' ' && v[i] != ':' && v[i] != '"' {
+ i++
+ }
+
+ if i >= len(v) {
+ return nil, newErrorf(ErrTag, "expected `:' after key name, but got end of tag (in `%v`)", x.value)
+ }
+
+ if v[i] != ':' {
+ return nil, newErrorf(ErrTag, "expected `:' after key name, but got `%v' (in `%v`)", v[i], x.value)
+ }
+
+ if i+1 >= len(v) {
+ return nil, newErrorf(ErrTag, "expected `\"' to start tag value at end of tag (in `%v`)", x.value)
+ }
+
+ if v[i+1] != '"' {
+ return nil, newErrorf(ErrTag, "expected `\"' to start tag value, but got `%v' (in `%v`)", v[i+1], x.value)
+ }
+
+ name := v[:i]
+ v = v[i+1:]
+
+ // Scan quoted string to find value
+ i = 1
+
+ for i < len(v) && v[i] != '"' {
+ if v[i] == '\n' {
+ return nil, newErrorf(ErrTag, "unexpected newline in tag value `%v' (in `%v`)", name, x.value)
+ }
+
+ if v[i] == '\\' {
+ i++
+ }
+ i++
+ }
+
+ if i >= len(v) {
+ return nil, newErrorf(ErrTag, "expected end of tag value `\"' at end of tag (in `%v`)", x.value)
+ }
+
+ val, err := strconv.Unquote(v[:i+1])
+
+ if err != nil {
+ return nil, newErrorf(ErrTag, "Malformed value of tag `%v:%v` => %v (in `%v`)", name, v[:i+1], err, x.value)
+ }
+
+ v = v[i+1:]
+
+ ret[name] = append(ret[name], val)
+ }
+
+ return ret, nil
+}
+
+func (x *multiTag) Parse() error {
+ vals, err := x.scan()
+ x.cache = vals
+
+ return err
+}
+
+func (x *multiTag) cached() map[string][]string {
+ if x.cache == nil {
+ cache, _ := x.scan()
+
+ if cache == nil {
+ cache = make(map[string][]string)
+ }
+
+ x.cache = cache
+ }
+
+ return x.cache
+}
+
+func (x *multiTag) Get(key string) string {
+ c := x.cached()
+
+ if v, ok := c[key]; ok {
+ return v[len(v)-1]
+ }
+
+ return ""
+}
+
+func (x *multiTag) GetMany(key string) []string {
+ c := x.cached()
+ return c[key]
+}
+
+func (x *multiTag) Set(key string, value string) {
+ c := x.cached()
+ c[key] = []string{value}
+}
+
+func (x *multiTag) SetMany(key string, value []string) {
+ c := x.cached()
+ c[key] = value
+}
diff --git a/vendor/github.com/jessevdk/go-flags/option.go b/vendor/github.com/jessevdk/go-flags/option.go
new file mode 100644
index 000000000..f6d694181
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/option.go
@@ -0,0 +1,569 @@
+package flags
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "reflect"
+ "strings"
+ "unicode/utf8"
+)
+
+// Option flag information. Contains a description of the option, short and
+// long name as well as a default value and whether an argument for this
+// flag is optional.
+type Option struct {
+ // The description of the option flag. This description is shown
+ // automatically in the built-in help.
+ Description string
+
+ // The short name of the option (a single character). If not 0, the
+ // option flag can be 'activated' using -<ShortName>. Either ShortName
+ // or LongName needs to be non-empty.
+ ShortName rune
+
+ // The long name of the option. If not "", the option flag can be
+ // activated using --<LongName>. Either ShortName or LongName needs
+ // to be non-empty.
+ LongName string
+
+ // The default value of the option.
+ Default []string
+
+ // The optional environment default value key name.
+ EnvDefaultKey string
+
+ // The optional delimiter string for EnvDefaultKey values.
+ EnvDefaultDelim string
+
+ // If true, specifies that the argument to an option flag is optional.
+ // When no argument to the flag is specified on the command line, the
+ // value of OptionalValue will be set in the field this option represents.
+ // This is only valid for non-boolean options.
+ OptionalArgument bool
+
+ // The optional value of the option. The optional value is used when
+ // the option flag is marked as having an OptionalArgument. This means
+ // that when the flag is specified, but no option argument is given,
+ // the value of the field this option represents will be set to
+ // OptionalValue. This is only valid for non-boolean options.
+ OptionalValue []string
+
+ // If true, the option _must_ be specified on the command line. If the
+ // option is not specified, the parser will generate an ErrRequired type
+ // error.
+ Required bool
+
+ // A name for the value of an option shown in the Help as --flag [ValueName]
+ ValueName string
+
+ // A mask value to show in the help instead of the default value. This
+ // is useful for hiding sensitive information in the help, such as
+ // passwords.
+ DefaultMask string
+
+ // If non empty, only a certain set of values is allowed for an option.
+ Choices []string
+
+ // If true, the option is not displayed in the help or man page
+ Hidden bool
+
+ // The group which the option belongs to
+ group *Group
+
+ // The struct field which the option represents.
+ field reflect.StructField
+
+ // The struct field value which the option represents.
+ value reflect.Value
+
+ // Determines if the option will be always quoted in the INI output
+ iniQuote bool
+
+ tag multiTag
+ isSet bool
+ isSetDefault bool
+ preventDefault bool
+ clearReferenceBeforeSet bool
+
+ defaultLiteral string
+}
+
+// LongNameWithNamespace returns the option's long name with the group namespaces
+// prepended by walking up the option's group tree. Namespaces and the long name
+// itself are separated by the parser's namespace delimiter. If the long name is
+// empty an empty string is returned.
+func (option *Option) LongNameWithNamespace() string {
+ if len(option.LongName) == 0 {
+ return ""
+ }
+
+ // fetch the namespace delimiter from the parser which is always at the
+ // end of the group hierarchy
+ namespaceDelimiter := ""
+ g := option.group
+
+ for {
+ if p, ok := g.parent.(*Parser); ok {
+ namespaceDelimiter = p.NamespaceDelimiter
+
+ break
+ }
+
+ switch i := g.parent.(type) {
+ case *Command:
+ g = i.Group
+ case *Group:
+ g = i
+ }
+ }
+
+ // concatenate long name with namespace
+ longName := option.LongName
+ g = option.group
+
+ for g != nil {
+ if g.Namespace != "" {
+ longName = g.Namespace + namespaceDelimiter + longName
+ }
+
+ switch i := g.parent.(type) {
+ case *Command:
+ g = i.Group
+ case *Group:
+ g = i
+ case *Parser:
+ g = nil
+ }
+ }
+
+ return longName
+}
+
+// EnvKeyWithNamespace returns the option's env key with the group namespaces
+// prepended by walking up the option's group tree. Namespaces and the env key
+// itself are separated by the parser's namespace delimiter. If the env key is
+// empty an empty string is returned.
+func (option *Option) EnvKeyWithNamespace() string {
+ if len(option.EnvDefaultKey) == 0 {
+ return ""
+ }
+
+ // fetch the namespace delimiter from the parser which is always at the
+ // end of the group hierarchy
+ namespaceDelimiter := ""
+ g := option.group
+
+ for {
+ if p, ok := g.parent.(*Parser); ok {
+ namespaceDelimiter = p.EnvNamespaceDelimiter
+
+ break
+ }
+
+ switch i := g.parent.(type) {
+ case *Command:
+ g = i.Group
+ case *Group:
+ g = i
+ }
+ }
+
+ // concatenate long name with namespace
+ key := option.EnvDefaultKey
+ g = option.group
+
+ for g != nil {
+ if g.EnvNamespace != "" {
+ key = g.EnvNamespace + namespaceDelimiter + key
+ }
+
+ switch i := g.parent.(type) {
+ case *Command:
+ g = i.Group
+ case *Group:
+ g = i
+ case *Parser:
+ g = nil
+ }
+ }
+
+ return key
+}
+
+// String converts an option to a human friendly readable string describing the
+// option.
+func (option *Option) String() string {
+ var s string
+ var short string
+
+ if option.ShortName != 0 {
+ data := make([]byte, utf8.RuneLen(option.ShortName))
+ utf8.EncodeRune(data, option.ShortName)
+ short = string(data)
+
+ if len(option.LongName) != 0 {
+ s = fmt.Sprintf("%s%s, %s%s",
+ string(defaultShortOptDelimiter), short,
+ defaultLongOptDelimiter, option.LongNameWithNamespace())
+ } else {
+ s = fmt.Sprintf("%s%s", string(defaultShortOptDelimiter), short)
+ }
+ } else if len(option.LongName) != 0 {
+ s = fmt.Sprintf("%s%s", defaultLongOptDelimiter, option.LongNameWithNamespace())
+ }
+
+ return s
+}
+
+// Value returns the option value as an interface{}.
+func (option *Option) Value() interface{} {
+ return option.value.Interface()
+}
+
+// Field returns the reflect struct field of the option.
+func (option *Option) Field() reflect.StructField {
+ return option.field
+}
+
+// IsSet returns true if option has been set
+func (option *Option) IsSet() bool {
+ return option.isSet
+}
+
+// IsSetDefault returns true if option has been set via the default option tag
+func (option *Option) IsSetDefault() bool {
+ return option.isSetDefault
+}
+
+// Set the value of an option to the specified value. An error will be returned
+// if the specified value could not be converted to the corresponding option
+// value type.
+func (option *Option) set(value *string) error {
+ kind := option.value.Type().Kind()
+
+ if (kind == reflect.Map || kind == reflect.Slice) && option.clearReferenceBeforeSet {
+ option.empty()
+ }
+
+ option.isSet = true
+ option.preventDefault = true
+ option.clearReferenceBeforeSet = false
+
+ if len(option.Choices) != 0 {
+ found := false
+
+ for _, choice := range option.Choices {
+ if choice == *value {
+ found = true
+ break
+ }
+ }
+
+ if !found {
+ allowed := strings.Join(option.Choices[0:len(option.Choices)-1], ", ")
+
+ if len(option.Choices) > 1 {
+ allowed += " or " + option.Choices[len(option.Choices)-1]
+ }
+
+ return newErrorf(ErrInvalidChoice,
+ "Invalid value `%s' for option `%s'. Allowed values are: %s",
+ *value, option, allowed)
+ }
+ }
+
+ if option.isFunc() {
+ return option.call(value)
+ } else if value != nil {
+ return convert(*value, option.value, option.tag)
+ }
+
+ return convert("", option.value, option.tag)
+}
+
+func (option *Option) setDefault(value *string) error {
+ if option.preventDefault {
+ return nil
+ }
+
+ if err := option.set(value); err != nil {
+ return err
+ }
+
+ option.isSetDefault = true
+ option.preventDefault = false
+
+ return nil
+}
+
+func (option *Option) showInHelp() bool {
+ return !option.Hidden && (option.ShortName != 0 || len(option.LongName) != 0)
+}
+
+func (option *Option) canArgument() bool {
+ if u := option.isUnmarshaler(); u != nil {
+ return true
+ }
+
+ return !option.isBool()
+}
+
+func (option *Option) emptyValue() reflect.Value {
+ tp := option.value.Type()
+
+ if tp.Kind() == reflect.Map {
+ return reflect.MakeMap(tp)
+ }
+
+ return reflect.Zero(tp)
+}
+
+func (option *Option) empty() {
+ if !option.isFunc() {
+ option.value.Set(option.emptyValue())
+ }
+}
+
+func (option *Option) clearDefault() error {
+ if option.preventDefault {
+ return nil
+ }
+
+ usedDefault := option.Default
+
+ if envKey := option.EnvKeyWithNamespace(); envKey != "" {
+ if value, ok := os.LookupEnv(envKey); ok {
+ if option.EnvDefaultDelim != "" {
+ usedDefault = strings.Split(value, option.EnvDefaultDelim)
+ } else {
+ usedDefault = []string{value}
+ }
+ }
+ }
+
+ option.isSetDefault = true
+
+ if len(usedDefault) > 0 {
+ option.empty()
+
+ for _, d := range usedDefault {
+ err := option.setDefault(&d)
+
+ if err != nil {
+ return err
+ }
+ }
+ } else {
+ tp := option.value.Type()
+
+ switch tp.Kind() {
+ case reflect.Map:
+ if option.value.IsNil() {
+ option.empty()
+ }
+ case reflect.Slice:
+ if option.value.IsNil() {
+ option.empty()
+ }
+ }
+ }
+
+ return nil
+}
+
+func (option *Option) valueIsDefault() bool {
+ // Check if the value of the option corresponds to its
+ // default value
+ emptyval := option.emptyValue()
+
+ checkvalptr := reflect.New(emptyval.Type())
+ checkval := reflect.Indirect(checkvalptr)
+
+ checkval.Set(emptyval)
+
+ if len(option.Default) != 0 {
+ for _, v := range option.Default {
+ convert(v, checkval, option.tag)
+ }
+ }
+
+ return reflect.DeepEqual(option.value.Interface(), checkval.Interface())
+}
+
+func (option *Option) isUnmarshaler() Unmarshaler {
+ v := option.value
+
+ for {
+ if !v.CanInterface() {
+ break
+ }
+
+ i := v.Interface()
+
+ if u, ok := i.(Unmarshaler); ok {
+ return u
+ }
+
+ if !v.CanAddr() {
+ break
+ }
+
+ v = v.Addr()
+ }
+
+ return nil
+}
+
+func (option *Option) isValueValidator() ValueValidator {
+ v := option.value
+
+ for {
+ if !v.CanInterface() {
+ break
+ }
+
+ i := v.Interface()
+
+ if u, ok := i.(ValueValidator); ok {
+ return u
+ }
+
+ if !v.CanAddr() {
+ break
+ }
+
+ v = v.Addr()
+ }
+
+ return nil
+}
+
+func (option *Option) isBool() bool {
+ tp := option.value.Type()
+
+ for {
+ switch tp.Kind() {
+ case reflect.Slice, reflect.Ptr:
+ tp = tp.Elem()
+ case reflect.Bool:
+ return true
+ case reflect.Func:
+ return tp.NumIn() == 0
+ default:
+ return false
+ }
+ }
+}
+
+func (option *Option) isSignedNumber() bool {
+ tp := option.value.Type()
+
+ for {
+ switch tp.Kind() {
+ case reflect.Slice, reflect.Ptr:
+ tp = tp.Elem()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Float32, reflect.Float64:
+ return true
+ default:
+ return false
+ }
+ }
+}
+
+func (option *Option) isFunc() bool {
+ return option.value.Type().Kind() == reflect.Func
+}
+
+func (option *Option) call(value *string) error {
+ var retval []reflect.Value
+
+ if value == nil {
+ retval = option.value.Call(nil)
+ } else {
+ tp := option.value.Type().In(0)
+
+ val := reflect.New(tp)
+ val = reflect.Indirect(val)
+
+ if err := convert(*value, val, option.tag); err != nil {
+ return err
+ }
+
+ retval = option.value.Call([]reflect.Value{val})
+ }
+
+ if len(retval) == 1 && retval[0].Type() == reflect.TypeOf((*error)(nil)).Elem() {
+ if retval[0].Interface() == nil {
+ return nil
+ }
+
+ return retval[0].Interface().(error)
+ }
+
+ return nil
+}
+
+func (option *Option) updateDefaultLiteral() {
+ defs := option.Default
+ def := ""
+
+ if len(defs) == 0 && option.canArgument() {
+ var showdef bool
+
+ switch option.field.Type.Kind() {
+ case reflect.Func, reflect.Ptr:
+ showdef = !option.value.IsNil()
+ case reflect.Slice, reflect.String, reflect.Array:
+ showdef = option.value.Len() > 0
+ case reflect.Map:
+ showdef = !option.value.IsNil() && option.value.Len() > 0
+ default:
+ zeroval := reflect.Zero(option.field.Type)
+ showdef = !reflect.DeepEqual(zeroval.Interface(), option.value.Interface())
+ }
+
+ if showdef {
+ def, _ = convertToString(option.value, option.tag)
+ }
+ } else if len(defs) != 0 {
+ l := len(defs) - 1
+
+ for i := 0; i < l; i++ {
+ def += quoteIfNeeded(defs[i]) + ", "
+ }
+
+ def += quoteIfNeeded(defs[l])
+ }
+
+ option.defaultLiteral = def
+}
+
+func (option *Option) shortAndLongName() string {
+ ret := &bytes.Buffer{}
+
+ if option.ShortName != 0 {
+ ret.WriteRune(defaultShortOptDelimiter)
+ ret.WriteRune(option.ShortName)
+ }
+
+ if len(option.LongName) != 0 {
+ if option.ShortName != 0 {
+ ret.WriteRune('/')
+ }
+
+ ret.WriteString(option.LongName)
+ }
+
+ return ret.String()
+}
+
+func (option *Option) isValidValue(arg string) error {
+ if validator := option.isValueValidator(); validator != nil {
+ return validator.IsValidValue(arg)
+ }
+ if argumentIsOption(arg) && !(option.isSignedNumber() && len(arg) > 1 && arg[0] == '-' && arg[1] >= '0' && arg[1] <= '9') {
+ return fmt.Errorf("expected argument for flag `%s', but got option `%s'", option, arg)
+ }
+ return nil
+}
diff --git a/vendor/github.com/jessevdk/go-flags/optstyle_other.go b/vendor/github.com/jessevdk/go-flags/optstyle_other.go
new file mode 100644
index 000000000..56dfdae12
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/optstyle_other.go
@@ -0,0 +1,67 @@
+// +build !windows forceposix
+
+package flags
+
+import (
+ "strings"
+)
+
+const (
+ defaultShortOptDelimiter = '-'
+ defaultLongOptDelimiter = "--"
+ defaultNameArgDelimiter = '='
+)
+
+func argumentStartsOption(arg string) bool {
+ return len(arg) > 0 && arg[0] == '-'
+}
+
+func argumentIsOption(arg string) bool {
+ if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' {
+ return true
+ }
+
+ if len(arg) > 2 && arg[0] == '-' && arg[1] == '-' && arg[2] != '-' {
+ return true
+ }
+
+ return false
+}
+
+// stripOptionPrefix returns the option without the prefix and whether or
+// not the option is a long option or not.
+func stripOptionPrefix(optname string) (prefix string, name string, islong bool) {
+ if strings.HasPrefix(optname, "--") {
+ return "--", optname[2:], true
+ } else if strings.HasPrefix(optname, "-") {
+ return "-", optname[1:], false
+ }
+
+ return "", optname, false
+}
+
+// splitOption attempts to split the passed option into a name and an argument.
+// When there is no argument specified, nil will be returned for it.
+func splitOption(prefix string, option string, islong bool) (string, string, *string) {
+ pos := strings.Index(option, "=")
+
+ if (islong && pos >= 0) || (!islong && pos == 1) {
+ rest := option[pos+1:]
+ return option[:pos], "=", &rest
+ }
+
+ return option, "", nil
+}
+
+// addHelpGroup adds a new group that contains default help parameters.
+func (c *Command) addHelpGroup(showHelp func() error) *Group {
+ var help struct {
+ ShowHelp func() error `short:"h" long:"help" description:"Show this help message"`
+ }
+
+ help.ShowHelp = showHelp
+ ret, _ := c.AddGroup("Help Options", "", &help)
+ ret.isBuiltinHelp = true
+
+ return ret
+}
diff --git a/vendor/github.com/jessevdk/go-flags/optstyle_windows.go b/vendor/github.com/jessevdk/go-flags/optstyle_windows.go
new file mode 100644
index 000000000..f3f28aeef
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/optstyle_windows.go
@@ -0,0 +1,108 @@
+// +build !forceposix
+
+package flags
+
+import (
+ "strings"
+)
+
+// Windows uses a front slash for both short and long options. Also it uses
+// a colon for name/argument delimter.
+const (
+ defaultShortOptDelimiter = '/'
+ defaultLongOptDelimiter = "/"
+ defaultNameArgDelimiter = ':'
+)
+
+func argumentStartsOption(arg string) bool {
+ return len(arg) > 0 && (arg[0] == '-' || arg[0] == '/')
+}
+
+func argumentIsOption(arg string) bool {
+ // Windows-style options allow front slash for the option
+ // delimiter.
+ if len(arg) > 1 && arg[0] == '/' {
+ return true
+ }
+
+ if len(arg) > 1 && arg[0] == '-' && arg[1] != '-' {
+ return true
+ }
+
+ if len(arg) > 2 && arg[0] == '-' && arg[1] == '-' && arg[2] != '-' {
+ return true
+ }
+
+ return false
+}
+
+// stripOptionPrefix returns the option without the prefix and whether or
+// not the option is a long option or not.
+func stripOptionPrefix(optname string) (prefix string, name string, islong bool) {
+ // Determine if the argument is a long option or not. Windows
+ // typically supports both long and short options with a single
+ // front slash as the option delimiter, so handle this situation
+ // nicely.
+ possplit := 0
+
+ if strings.HasPrefix(optname, "--") {
+ possplit = 2
+ islong = true
+ } else if strings.HasPrefix(optname, "-") {
+ possplit = 1
+ islong = false
+ } else if strings.HasPrefix(optname, "/") {
+ possplit = 1
+ islong = len(optname) > 2
+ }
+
+ return optname[:possplit], optname[possplit:], islong
+}
+
+// splitOption attempts to split the passed option into a name and an argument.
+// When there is no argument specified, nil will be returned for it.
+func splitOption(prefix string, option string, islong bool) (string, string, *string) {
+ if len(option) == 0 {
+ return option, "", nil
+ }
+
+ // Windows typically uses a colon for the option name and argument
+ // delimiter while POSIX typically uses an equals. Support both styles,
+ // but don't allow the two to be mixed. That is to say /foo:bar and
+ // --foo=bar are acceptable, but /foo=bar and --foo:bar are not.
+ var pos int
+ var sp string
+
+ if prefix == "/" {
+ sp = ":"
+ pos = strings.Index(option, sp)
+ } else if len(prefix) > 0 {
+ sp = "="
+ pos = strings.Index(option, sp)
+ }
+
+ if (islong && pos >= 0) || (!islong && pos == 1) {
+ rest := option[pos+1:]
+ return option[:pos], sp, &rest
+ }
+
+ return option, "", nil
+}
+
+// addHelpGroup adds a new group that contains default help parameters.
+func (c *Command) addHelpGroup(showHelp func() error) *Group {
+ // Windows CLI applications typically use /? for help, so make both
+ // that available as well as the POSIX style h and help.
+ var help struct {
+ ShowHelpWindows func() error `short:"?" description:"Show this help message"`
+ ShowHelpPosix func() error `short:"h" long:"help" description:"Show this help message"`
+ }
+
+ help.ShowHelpWindows = showHelp
+ help.ShowHelpPosix = showHelp
+
+ ret, _ := c.AddGroup("Help Options", "", &help)
+ ret.isBuiltinHelp = true
+
+ return ret
+}
diff --git a/vendor/github.com/jessevdk/go-flags/parser.go b/vendor/github.com/jessevdk/go-flags/parser.go
new file mode 100644
index 000000000..3fc3f7ba1
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/parser.go
@@ -0,0 +1,714 @@
+// Copyright 2012 Jesse van den Kieboom. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package flags
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode/utf8"
+)
+
+// A Parser provides command line option parsing. It can contain several
+// option groups each with their own set of options.
+type Parser struct {
+ // Embedded, see Command for more information
+ *Command
+
+ // A usage string to be displayed in the help message.
+ Usage string
+
+ // Option flags changing the behavior of the parser.
+ Options Options
+
+ // NamespaceDelimiter separates group namespaces and option long names
+ NamespaceDelimiter string
+
+ // EnvNamespaceDelimiter separates group env namespaces and env keys
+ EnvNamespaceDelimiter string
+
+ // UnknownOptionsHandler is a function which gets called when the parser
+ // encounters an unknown option. The function receives the unknown option
+ // name, a SplitArgument which specifies its value if set with an argument
+ // separator, and the remaining command line arguments.
+ // It should return a new list of remaining arguments to continue parsing,
+ // or an error to indicate a parse failure.
+ UnknownOptionHandler func(option string, arg SplitArgument, args []string) ([]string, error)
+
+ // CompletionHandler is a function gets called to handle the completion of
+ // items. By default, the items are printed and the application is exited.
+ // You can override this default behavior by specifying a custom CompletionHandler.
+ CompletionHandler func(items []Completion)
+
+ // CommandHandler is a function that gets called to handle execution of a
+ // command. By default, the command will simply be executed. This can be
+ // overridden to perform certain actions (such as applying global flags)
+ // just before the command is executed. Note that if you override the
+ // handler it is your responsibility to call the command.Execute function.
+ //
+ // The command passed into CommandHandler may be nil in case there is no
+ // command to be executed when parsing has finished.
+ CommandHandler func(command Commander, args []string) error
+
+ internalError error
+}
+
+// SplitArgument represents the argument value of an option that was passed using
+// an argument separator.
+type SplitArgument interface {
+ // String returns the option's value as a string, and a boolean indicating
+ // if the option was present.
+ Value() (string, bool)
+}
+
+type strArgument struct {
+ value *string
+}
+
+func (s strArgument) Value() (string, bool) {
+ if s.value == nil {
+ return "", false
+ }
+
+ return *s.value, true
+}
+
+// Options provides parser options that change the behavior of the option
+// parser.
+type Options uint
+
+const (
+ // None indicates no options.
+ None Options = 0
+
+ // HelpFlag adds a default Help Options group to the parser containing
+ // -h and --help options. When either -h or --help is specified on the
+ // command line, the parser will return the special error of type
+ // ErrHelp. When PrintErrors is also specified, then the help message
+ // will also be automatically printed to os.Stdout.
+ HelpFlag = 1 << iota
+
+ // PassDoubleDash passes all arguments after a double dash, --, as
+ // remaining command line arguments (i.e. they will not be parsed for
+ // flags).
+ PassDoubleDash
+
+ // IgnoreUnknown ignores any unknown options and passes them as
+ // remaining command line arguments instead of generating an error.
+ IgnoreUnknown
+
+ // PrintErrors prints any errors which occurred during parsing to
+ // os.Stderr. In the special case of ErrHelp, the message will be printed
+ // to os.Stdout.
+ PrintErrors
+
+ // PassAfterNonOption passes all arguments after the first non option
+ // as remaining command line arguments. This is equivalent to strict
+ // POSIX processing.
+ PassAfterNonOption
+
+ // Default is a convenient default set of options which should cover
+ // most of the uses of the flags package.
+ Default = HelpFlag | PrintErrors | PassDoubleDash
+)
+
+type parseState struct {
+ arg string
+ args []string
+ retargs []string
+ positional []*Arg
+ err error
+
+ command *Command
+ lookup lookup
+}
+
+// Parse is a convenience function to parse command line options with default
+// settings. The provided data is a pointer to a struct representing the
+// default option group (named "Application Options"). For more control, use
+// flags.NewParser.
+func Parse(data interface{}) ([]string, error) {
+ return NewParser(data, Default).Parse()
+}
+
+// ParseArgs is a convenience function to parse command line options with default
+// settings. The provided data is a pointer to a struct representing the
+// default option group (named "Application Options"). The args argument is
+// the list of command line arguments to parse. If you just want to parse the
+// default program command line arguments (i.e. os.Args), then use flags.Parse
+// instead. For more control, use flags.NewParser.
+func ParseArgs(data interface{}, args []string) ([]string, error) {
+ return NewParser(data, Default).ParseArgs(args)
+}
+
+// NewParser creates a new parser. It uses os.Args[0] as the application
+// name and then calls Parser.NewNamedParser (see Parser.NewNamedParser for
+// more details). The provided data is a pointer to a struct representing the
+// default option group (named "Application Options"), or nil if the default
+// group should not be added. The options parameter specifies a set of options
+// for the parser.
+func NewParser(data interface{}, options Options) *Parser {
+ p := NewNamedParser(path.Base(os.Args[0]), options)
+
+ if data != nil {
+ g, err := p.AddGroup("Application Options", "", data)
+
+ if err == nil {
+ g.parent = p
+ }
+
+ p.internalError = err
+ }
+
+ return p
+}
+
+// NewNamedParser creates a new parser. The appname is used to display the
+// executable name in the built-in help message. Option groups and commands can
+// be added to this parser by using AddGroup and AddCommand.
+func NewNamedParser(appname string, options Options) *Parser {
+ p := &Parser{
+ Command: newCommand(appname, "", "", nil),
+ Options: options,
+ NamespaceDelimiter: ".",
+ EnvNamespaceDelimiter: "_",
+ }
+
+ p.Command.parent = p
+
+ return p
+}
+
+// Parse parses the command line arguments from os.Args using Parser.ParseArgs.
+// For more detailed information see ParseArgs.
+func (p *Parser) Parse() ([]string, error) {
+ return p.ParseArgs(os.Args[1:])
+}
+
+// ParseArgs parses the command line arguments according to the option groups that
+// were added to the parser. On successful parsing of the arguments, the
+// remaining, non-option, arguments (if any) are returned. The returned error
+// indicates a parsing error and can be used with PrintError to display
+// contextual information on where the error occurred exactly.
+//
+// When the common help group has been added (AddHelp) and either -h or --help
+// was specified in the command line arguments, a help message will be
+// automatically printed if the PrintErrors option is enabled.
+// Furthermore, the special error type ErrHelp is returned.
+// It is up to the caller to exit the program if so desired.
+func (p *Parser) ParseArgs(args []string) ([]string, error) {
+ if p.internalError != nil {
+ return nil, p.internalError
+ }
+
+ p.eachOption(func(c *Command, g *Group, option *Option) {
+ option.clearReferenceBeforeSet = true
+ option.updateDefaultLiteral()
+ })
+
+ // Add built-in help group to all commands if necessary
+ if (p.Options & HelpFlag) != None {
+ p.addHelpGroups(p.showBuiltinHelp)
+ }
+
+ compval := os.Getenv("GO_FLAGS_COMPLETION")
+
+ if len(compval) != 0 {
+ comp := &completion{parser: p}
+ items := comp.complete(args)
+
+ if p.CompletionHandler != nil {
+ p.CompletionHandler(items)
+ } else {
+ comp.print(items, compval == "verbose")
+ os.Exit(0)
+ }
+
+ return nil, nil
+ }
+
+ s := &parseState{
+ args: args,
+ retargs: make([]string, 0, len(args)),
+ }
+
+ p.fillParseState(s)
+
+ for !s.eof() {
+ var err error
+ arg := s.pop()
+
+ // When PassDoubleDash is set and we encounter a --, then
+ // simply append all the rest as arguments and break out
+ if (p.Options&PassDoubleDash) != None && arg == "--" {
+ s.addArgs(s.args...)
+ break
+ }
+
+ if !argumentIsOption(arg) {
+ if (p.Options&PassAfterNonOption) != None && s.lookup.commands[arg] == nil {
+ // If PassAfterNonOption is set then all remaining arguments
+ // are considered positional
+ if err = s.addArgs(s.arg); err != nil {
+ break
+ }
+
+ if err = s.addArgs(s.args...); err != nil {
+ break
+ }
+
+ break
+ }
+
+ // Note: this also sets s.err, so we can just check for
+ // nil here and use s.err later
+ if p.parseNonOption(s) != nil {
+ break
+ }
+
+ continue
+ }
+
+ prefix, optname, islong := stripOptionPrefix(arg)
+ optname, _, argument := splitOption(prefix, optname, islong)
+
+ if islong {
+ err = p.parseLong(s, optname, argument)
+ } else {
+ err = p.parseShort(s, optname, argument)
+ }
+
+ if err != nil {
+ ignoreUnknown := (p.Options & IgnoreUnknown) != None
+ parseErr := wrapError(err)
+
+ if parseErr.Type != ErrUnknownFlag || (!ignoreUnknown && p.UnknownOptionHandler == nil) {
+ s.err = parseErr
+ break
+ }
+
+ if ignoreUnknown {
+ s.addArgs(arg)
+ } else if p.UnknownOptionHandler != nil {
+ modifiedArgs, err := p.UnknownOptionHandler(optname, strArgument{argument}, s.args)
+
+ if err != nil {
+ s.err = err
+ break
+ }
+
+ s.args = modifiedArgs
+ }
+ }
+ }
+
+ if s.err == nil {
+ p.eachOption(func(c *Command, g *Group, option *Option) {
+ err := option.clearDefault()
+ if err != nil {
+ if _, ok := err.(*Error); !ok {
+ err = p.marshalError(option, err)
+ }
+ s.err = err
+ }
+ })
+
+ s.checkRequired(p)
+ }
+
+ var reterr error
+
+ if s.err != nil {
+ reterr = s.err
+ } else if len(s.command.commands) != 0 && !s.command.SubcommandsOptional {
+ reterr = s.estimateCommand()
+ } else if cmd, ok := s.command.data.(Commander); ok {
+ if p.CommandHandler != nil {
+ reterr = p.CommandHandler(cmd, s.retargs)
+ } else {
+ reterr = cmd.Execute(s.retargs)
+ }
+ } else if p.CommandHandler != nil {
+ reterr = p.CommandHandler(nil, s.retargs)
+ }
+
+ if reterr != nil {
+ var retargs []string
+
+ if ourErr, ok := reterr.(*Error); !ok || ourErr.Type != ErrHelp {
+ retargs = append([]string{s.arg}, s.args...)
+ } else {
+ retargs = s.args
+ }
+
+ return retargs, p.printError(reterr)
+ }
+
+ return s.retargs, nil
+}
+
+func (p *parseState) eof() bool {
+ return len(p.args) == 0
+}
+
+func (p *parseState) pop() string {
+ if p.eof() {
+ return ""
+ }
+
+ p.arg = p.args[0]
+ p.args = p.args[1:]
+
+ return p.arg
+}
+
+func (p *parseState) peek() string {
+ if p.eof() {
+ return ""
+ }
+
+ return p.args[0]
+}
+
+func (p *parseState) checkRequired(parser *Parser) error {
+ c := parser.Command
+
+ var required []*Option
+
+ for c != nil {
+ c.eachGroup(func(g *Group) {
+ for _, option := range g.options {
+ if !option.isSet && option.Required {
+ required = append(required, option)
+ }
+ }
+ })
+
+ c = c.Active
+ }
+
+ if len(required) == 0 {
+ if len(p.positional) > 0 {
+ var reqnames []string
+
+ for _, arg := range p.positional {
+ argRequired := (!arg.isRemaining() && p.command.ArgsRequired) || arg.Required != -1 || arg.RequiredMaximum != -1
+
+ if !argRequired {
+ continue
+ }
+
+ if arg.isRemaining() {
+ if arg.value.Len() < arg.Required {
+ var arguments string
+
+ if arg.Required > 1 {
+ arguments = "arguments, but got only " + fmt.Sprintf("%d", arg.value.Len())
+ } else {
+ arguments = "argument"
+ }
+
+ reqnames = append(reqnames, "`"+arg.Name+" (at least "+fmt.Sprintf("%d", arg.Required)+" "+arguments+")`")
+ } else if arg.RequiredMaximum != -1 && arg.value.Len() > arg.RequiredMaximum {
+ if arg.RequiredMaximum == 0 {
+ reqnames = append(reqnames, "`"+arg.Name+" (zero arguments)`")
+ } else {
+ var arguments string
+
+ if arg.RequiredMaximum > 1 {
+ arguments = "arguments, but got " + fmt.Sprintf("%d", arg.value.Len())
+ } else {
+ arguments = "argument"
+ }
+
+ reqnames = append(reqnames, "`"+arg.Name+" (at most "+fmt.Sprintf("%d", arg.RequiredMaximum)+" "+arguments+")`")
+ }
+ }
+ } else {
+ reqnames = append(reqnames, "`"+arg.Name+"`")
+ }
+ }
+
+ if len(reqnames) == 0 {
+ return nil
+ }
+
+ var msg string
+
+ if len(reqnames) == 1 {
+ msg = fmt.Sprintf("the required argument %s was not provided", reqnames[0])
+ } else {
+ msg = fmt.Sprintf("the required arguments %s and %s were not provided",
+ strings.Join(reqnames[:len(reqnames)-1], ", "), reqnames[len(reqnames)-1])
+ }
+
+ p.err = newError(ErrRequired, msg)
+ return p.err
+ }
+
+ return nil
+ }
+
+ names := make([]string, 0, len(required))
+
+ for _, k := range required {
+ names = append(names, "`"+k.String()+"'")
+ }
+
+ sort.Strings(names)
+
+ var msg string
+
+ if len(names) == 1 {
+ msg = fmt.Sprintf("the required flag %s was not specified", names[0])
+ } else {
+ msg = fmt.Sprintf("the required flags %s and %s were not specified",
+ strings.Join(names[:len(names)-1], ", "), names[len(names)-1])
+ }
+
+ p.err = newError(ErrRequired, msg)
+ return p.err
+}
+
+func (p *parseState) estimateCommand() error {
+ commands := p.command.sortedVisibleCommands()
+ cmdnames := make([]string, len(commands))
+
+ for i, v := range commands {
+ cmdnames[i] = v.Name
+ }
+
+ var msg string
+ var errtype ErrorType
+
+ if len(p.retargs) != 0 {
+ c, l := closestChoice(p.retargs[0], cmdnames)
+ msg = fmt.Sprintf("Unknown command `%s'", p.retargs[0])
+ errtype = ErrUnknownCommand
+
+ if float32(l)/float32(len(c)) < 0.5 {
+ msg = fmt.Sprintf("%s, did you mean `%s'?", msg, c)
+ } else if len(cmdnames) == 1 {
+ msg = fmt.Sprintf("%s. You should use the %s command",
+ msg,
+ cmdnames[0])
+ } else if len(cmdnames) > 1 {
+ msg = fmt.Sprintf("%s. Please specify one command of: %s or %s",
+ msg,
+ strings.Join(cmdnames[:len(cmdnames)-1], ", "),
+ cmdnames[len(cmdnames)-1])
+ }
+ } else {
+ errtype = ErrCommandRequired
+
+ if len(cmdnames) == 1 {
+ msg = fmt.Sprintf("Please specify the %s command", cmdnames[0])
+ } else if len(cmdnames) > 1 {
+ msg = fmt.Sprintf("Please specify one command of: %s or %s",
+ strings.Join(cmdnames[:len(cmdnames)-1], ", "),
+ cmdnames[len(cmdnames)-1])
+ }
+ }
+
+ return newError(errtype, msg)
+}
+
+func (p *Parser) parseOption(s *parseState, name string, option *Option, canarg bool, argument *string) (err error) {
+ if !option.canArgument() {
+ if argument != nil {
+ return newErrorf(ErrNoArgumentForBool, "bool flag `%s' cannot have an argument", option)
+ }
+
+ err = option.set(nil)
+ } else if argument != nil || (canarg && !s.eof()) {
+ var arg string
+
+ if argument != nil {
+ arg = *argument
+ } else {
+ arg = s.pop()
+
+ if validationErr := option.isValidValue(arg); validationErr != nil {
+ return newErrorf(ErrExpectedArgument, validationErr.Error())
+ } else if p.Options&PassDoubleDash != 0 && arg == "--" {
+ return newErrorf(ErrExpectedArgument, "expected argument for flag `%s', but got double dash `--'", option)
+ }
+ }
+
+ if option.tag.Get("unquote") != "false" {
+ arg, err = unquoteIfPossible(arg)
+ }
+
+ if err == nil {
+ err = option.set(&arg)
+ }
+ } else if option.OptionalArgument {
+ option.empty()
+
+ for _, v := range option.OptionalValue {
+ err = option.set(&v)
+
+ if err != nil {
+ break
+ }
+ }
+ } else {
+ err = newErrorf(ErrExpectedArgument, "expected argument for flag `%s'", option)
+ }
+
+ if err != nil {
+ if _, ok := err.(*Error); !ok {
+ err = p.marshalError(option, err)
+ }
+ }
+
+ return err
+}
+
+func (p *Parser) marshalError(option *Option, err error) *Error {
+ s := "invalid argument for flag `%s'"
+
+ expected := p.expectedType(option)
+
+ if expected != "" {
+ s = s + " (expected " + expected + ")"
+ }
+
+ return newErrorf(ErrMarshal, s+": %s",
+ option,
+ err.Error())
+}
+
+func (p *Parser) expectedType(option *Option) string {
+ valueType := option.value.Type()
+
+ if valueType.Kind() == reflect.Func {
+ return ""
+ }
+
+ return valueType.String()
+}
+
+func (p *Parser) parseLong(s *parseState, name string, argument *string) error {
+ if option := s.lookup.longNames[name]; option != nil {
+ // Only long options that are required can consume an argument
+ // from the argument list
+ canarg := !option.OptionalArgument
+
+ return p.parseOption(s, name, option, canarg, argument)
+ }
+
+ return newErrorf(ErrUnknownFlag, "unknown flag `%s'", name)
+}
+
+func (p *Parser) splitShortConcatArg(s *parseState, optname string) (string, *string) {
+ c, n := utf8.DecodeRuneInString(optname)
+
+ if n == len(optname) {
+ return optname, nil
+ }
+
+ first := string(c)
+
+ if option := s.lookup.shortNames[first]; option != nil && option.canArgument() {
+ arg := optname[n:]
+ return first, &arg
+ }
+
+ return optname, nil
+}
+
+func (p *Parser) parseShort(s *parseState, optname string, argument *string) error {
+ if argument == nil {
+ optname, argument = p.splitShortConcatArg(s, optname)
+ }
+
+ for i, c := range optname {
+ shortname := string(c)
+
+ if option := s.lookup.shortNames[shortname]; option != nil {
+ // Only the last short argument can consume an argument from
+ // the arguments list, and only if it's non optional
+ canarg := (i+utf8.RuneLen(c) == len(optname)) && !option.OptionalArgument
+
+ if err := p.parseOption(s, shortname, option, canarg, argument); err != nil {
+ return err
+ }
+ } else {
+ return newErrorf(ErrUnknownFlag, "unknown flag `%s'", shortname)
+ }
+
+ // Only the first option can have a concatted argument, so just
+ // clear argument here
+ argument = nil
+ }
+
+ return nil
+}
+
+func (p *parseState) addArgs(args ...string) error {
+ for len(p.positional) > 0 && len(args) > 0 {
+ arg := p.positional[0]
+
+ if err := convert(args[0], arg.value, arg.tag); err != nil {
+ p.err = err
+ return err
+ }
+
+ if !arg.isRemaining() {
+ p.positional = p.positional[1:]
+ }
+
+ args = args[1:]
+ }
+
+ p.retargs = append(p.retargs, args...)
+ return nil
+}
+
+func (p *Parser) parseNonOption(s *parseState) error {
+ if len(s.positional) > 0 {
+ return s.addArgs(s.arg)
+ }
+
+ if len(s.command.commands) > 0 && len(s.retargs) == 0 {
+ if cmd := s.lookup.commands[s.arg]; cmd != nil {
+ s.command.Active = cmd
+ cmd.fillParseState(s)
+
+ return nil
+ } else if !s.command.SubcommandsOptional {
+ s.addArgs(s.arg)
+ return newErrorf(ErrUnknownCommand, "Unknown command `%s'", s.arg)
+ }
+ }
+
+ return s.addArgs(s.arg)
+}
+
+func (p *Parser) showBuiltinHelp() error {
+ var b bytes.Buffer
+
+ p.WriteHelp(&b)
+ return newError(ErrHelp, b.String())
+}
+
+func (p *Parser) printError(err error) error {
+ if err != nil && (p.Options&PrintErrors) != None {
+ flagsErr, ok := err.(*Error)
+
+ if ok && flagsErr.Type == ErrHelp {
+ fmt.Fprintln(os.Stdout, err)
+ } else {
+ fmt.Fprintln(os.Stderr, err)
+ }
+ }
+
+ return err
+}
diff --git a/vendor/github.com/jessevdk/go-flags/termsize.go b/vendor/github.com/jessevdk/go-flags/termsize.go
new file mode 100644
index 000000000..829e477ad
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/termsize.go
@@ -0,0 +1,15 @@
+// +build !windows,!plan9,!appengine,!wasm
+
+package flags
+
+import (
+ "golang.org/x/sys/unix"
+)
+
+func getTerminalColumns() int {
+ ws, err := unix.IoctlGetWinsize(0, unix.TIOCGWINSZ)
+ if err != nil {
+ return 80
+ }
+ return int(ws.Col)
+}
diff --git a/vendor/github.com/jessevdk/go-flags/termsize_nosysioctl.go b/vendor/github.com/jessevdk/go-flags/termsize_nosysioctl.go
new file mode 100644
index 000000000..c1ff18673
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/termsize_nosysioctl.go
@@ -0,0 +1,7 @@
+// +build plan9 appengine wasm
+
+package flags
+
+func getTerminalColumns() int {
+ return 80
+}
diff --git a/vendor/github.com/jessevdk/go-flags/termsize_windows.go b/vendor/github.com/jessevdk/go-flags/termsize_windows.go
new file mode 100644
index 000000000..5c0fa6ba2
--- /dev/null
+++ b/vendor/github.com/jessevdk/go-flags/termsize_windows.go
@@ -0,0 +1,85 @@
+// +build windows
+
+package flags
+
+import (
+ "syscall"
+ "unsafe"
+)
+
+type (
+ SHORT int16
+ WORD uint16
+
+ SMALL_RECT struct {
+ Left SHORT
+ Top SHORT
+ Right SHORT
+ Bottom SHORT
+ }
+
+ COORD struct {
+ X SHORT
+ Y SHORT
+ }
+
+ CONSOLE_SCREEN_BUFFER_INFO struct {
+ Size COORD
+ CursorPosition COORD
+ Attributes WORD
+ Window SMALL_RECT
+ MaximumWindowSize COORD
+ }
+)
+
+var kernel32DLL = syscall.NewLazyDLL("kernel32.dll")
+var getConsoleScreenBufferInfoProc = kernel32DLL.NewProc("GetConsoleScreenBufferInfo")
+
+func getError(r1, r2 uintptr, lastErr error) error {
+ // If the function fails, the return value is zero.
+ if r1 == 0 {
+ if lastErr != nil {
+ return lastErr
+ }
+ return syscall.EINVAL
+ }
+ return nil
+}
+
+func getStdHandle(stdhandle int) (uintptr, error) {
+ handle, err := syscall.GetStdHandle(stdhandle)
+ if err != nil {
+ return 0, err
+ }
+ return uintptr(handle), nil
+}
+
+// GetConsoleScreenBufferInfo retrieves information about the specified console screen buffer.
+// http://msdn.microsoft.com/en-us/library/windows/desktop/ms683171(v=vs.85).aspx
+func GetConsoleScreenBufferInfo(handle uintptr) (*CONSOLE_SCREEN_BUFFER_INFO, error) {
+ var info CONSOLE_SCREEN_BUFFER_INFO
+ if err := getError(getConsoleScreenBufferInfoProc.Call(handle, uintptr(unsafe.Pointer(&info)), 0)); err != nil {
+ return nil, err
+ }
+ return &info, nil
+}
+
+func getTerminalColumns() int {
+ defaultWidth := 80
+
+ stdoutHandle, err := getStdHandle(syscall.STD_OUTPUT_HANDLE)
+ if err != nil {
+ return defaultWidth
+ }
+
+ info, err := GetConsoleScreenBufferInfo(stdoutHandle)
+ if err != nil {
+ return defaultWidth
+ }
+
+ if info.MaximumWindowSize.X > 0 {
+ return int(info.MaximumWindowSize.X)
+ }
+
+ return defaultWidth
+}
diff --git a/vendor/github.com/josharian/intern/README.md b/vendor/github.com/josharian/intern/README.md
new file mode 100644
index 000000000..ffc44b219
--- /dev/null
+++ b/vendor/github.com/josharian/intern/README.md
@@ -0,0 +1,5 @@
+Docs: https://godoc.org/github.com/josharian/intern
+
+See also [Go issue 5160](https://golang.org/issue/5160).
+
+License: MIT
diff --git a/vendor/github.com/josharian/intern/intern.go b/vendor/github.com/josharian/intern/intern.go
new file mode 100644
index 000000000..7acb1fe90
--- /dev/null
+++ b/vendor/github.com/josharian/intern/intern.go
@@ -0,0 +1,44 @@
+// Package intern interns strings.
+// Interning is best effort only.
+// Interned strings may be removed automatically
+// at any time without notification.
+// All functions may be called concurrently
+// with themselves and each other.
+package intern
+
+import "sync"
+
+var (
+ pool sync.Pool = sync.Pool{
+ New: func() interface{} {
+ return make(map[string]string)
+ },
+ }
+)
+
+// String returns s, interned.
+func String(s string) string {
+ m := pool.Get().(map[string]string)
+ c, ok := m[s]
+ if ok {
+ pool.Put(m)
+ return c
+ }
+ m[s] = s
+ pool.Put(m)
+ return s
+}
+
+// Bytes returns b converted to a string, interned.
+func Bytes(b []byte) string {
+ m := pool.Get().(map[string]string)
+ c, ok := m[string(b)]
+ if ok {
+ pool.Put(m)
+ return c
+ }
+ s := string(b)
+ m[s] = s
+ pool.Put(m)
+ return s
+}
diff --git a/vendor/github.com/josharian/intern/license.md b/vendor/github.com/josharian/intern/license.md
new file mode 100644
index 000000000..353d3055f
--- /dev/null
+++ b/vendor/github.com/josharian/intern/license.md
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 Josh Bleecher Snyder
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/kr/pretty/.gitignore b/vendor/github.com/kr/pretty/.gitignore
new file mode 100644
index 000000000..b2d47811f
--- /dev/null
+++ b/vendor/github.com/kr/pretty/.gitignore
@@ -0,0 +1,5 @@
+[568].out
+_go*
+_test*
+_obj
+/.idea
diff --git a/vendor/github.com/kr/pretty/License b/vendor/github.com/kr/pretty/License
new file mode 100644
index 000000000..480a32805
--- /dev/null
+++ b/vendor/github.com/kr/pretty/License
@@ -0,0 +1,19 @@
+Copyright 2012 Keith Rarick
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/kr/pretty/Readme b/vendor/github.com/kr/pretty/Readme
new file mode 100644
index 000000000..c589fc622
--- /dev/null
+++ b/vendor/github.com/kr/pretty/Readme
@@ -0,0 +1,9 @@
+package pretty
+
+ import "github.com/kr/pretty"
+
+ Package pretty provides pretty-printing for Go values.
+
+Documentation
+
+ http://godoc.org/github.com/kr/pretty
diff --git a/vendor/github.com/kr/pretty/diff.go b/vendor/github.com/kr/pretty/diff.go
new file mode 100644
index 000000000..40a09dc64
--- /dev/null
+++ b/vendor/github.com/kr/pretty/diff.go
@@ -0,0 +1,295 @@
+package pretty
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+)
+
+type sbuf []string
+
+func (p *sbuf) Printf(format string, a ...interface{}) {
+ s := fmt.Sprintf(format, a...)
+ *p = append(*p, s)
+}
+
+// Diff returns a slice where each element describes
+// a difference between a and b.
+func Diff(a, b interface{}) (desc []string) {
+ Pdiff((*sbuf)(&desc), a, b)
+ return desc
+}
+
+// wprintfer calls Fprintf on w for each Printf call
+// with a trailing newline.
+type wprintfer struct{ w io.Writer }
+
+func (p *wprintfer) Printf(format string, a ...interface{}) {
+ fmt.Fprintf(p.w, format+"\n", a...)
+}
+
+// Fdiff writes to w a description of the differences between a and b.
+func Fdiff(w io.Writer, a, b interface{}) {
+ Pdiff(&wprintfer{w}, a, b)
+}
+
+type Printfer interface {
+ Printf(format string, a ...interface{})
+}
+
+// Pdiff prints to p a description of the differences between a and b.
+// It calls Printf once for each difference, with no trailing newline.
+// The standard library log.Logger is a Printfer.
+func Pdiff(p Printfer, a, b interface{}) {
+ d := diffPrinter{
+ w: p,
+ aVisited: make(map[visit]visit),
+ bVisited: make(map[visit]visit),
+ }
+ d.diff(reflect.ValueOf(a), reflect.ValueOf(b))
+}
+
+type Logfer interface {
+ Logf(format string, a ...interface{})
+}
+
+// logprintfer calls Fprintf on w for each Printf call
+// with a trailing newline.
+type logprintfer struct{ l Logfer }
+
+func (p *logprintfer) Printf(format string, a ...interface{}) {
+ p.l.Logf(format, a...)
+}
+
+// Ldiff prints to l a description of the differences between a and b.
+// It calls Logf once for each difference, with no trailing newline.
+// The standard library testing.T and testing.B are Logfers.
+func Ldiff(l Logfer, a, b interface{}) {
+ Pdiff(&logprintfer{l}, a, b)
+}
+
+type diffPrinter struct {
+ w Printfer
+ l string // label
+
+ aVisited map[visit]visit
+ bVisited map[visit]visit
+}
+
+func (w diffPrinter) printf(f string, a ...interface{}) {
+ var l string
+ if w.l != "" {
+ l = w.l + ": "
+ }
+ w.w.Printf(l+f, a...)
+}
+
+func (w diffPrinter) diff(av, bv reflect.Value) {
+ if !av.IsValid() && bv.IsValid() {
+ w.printf("nil != %# v", formatter{v: bv, quote: true})
+ return
+ }
+ if av.IsValid() && !bv.IsValid() {
+ w.printf("%# v != nil", formatter{v: av, quote: true})
+ return
+ }
+ if !av.IsValid() && !bv.IsValid() {
+ return
+ }
+
+ at := av.Type()
+ bt := bv.Type()
+ if at != bt {
+ w.printf("%v != %v", at, bt)
+ return
+ }
+
+ if av.CanAddr() && bv.CanAddr() {
+ avis := visit{av.UnsafeAddr(), at}
+ bvis := visit{bv.UnsafeAddr(), bt}
+ var cycle bool
+
+ // Have we seen this value before?
+ if vis, ok := w.aVisited[avis]; ok {
+ cycle = true
+ if vis != bvis {
+ w.printf("%# v (previously visited) != %# v", formatter{v: av, quote: true}, formatter{v: bv, quote: true})
+ }
+ } else if _, ok := w.bVisited[bvis]; ok {
+ cycle = true
+ w.printf("%# v != %# v (previously visited)", formatter{v: av, quote: true}, formatter{v: bv, quote: true})
+ }
+ w.aVisited[avis] = bvis
+ w.bVisited[bvis] = avis
+ if cycle {
+ return
+ }
+ }
+
+ switch kind := at.Kind(); kind {
+ case reflect.Bool:
+ if a, b := av.Bool(), bv.Bool(); a != b {
+ w.printf("%v != %v", a, b)
+ }
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ if a, b := av.Int(), bv.Int(); a != b {
+ w.printf("%d != %d", a, b)
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ if a, b := av.Uint(), bv.Uint(); a != b {
+ w.printf("%d != %d", a, b)
+ }
+ case reflect.Float32, reflect.Float64:
+ if a, b := av.Float(), bv.Float(); a != b {
+ w.printf("%v != %v", a, b)
+ }
+ case reflect.Complex64, reflect.Complex128:
+ if a, b := av.Complex(), bv.Complex(); a != b {
+ w.printf("%v != %v", a, b)
+ }
+ case reflect.Array:
+ n := av.Len()
+ for i := 0; i < n; i++ {
+ w.relabel(fmt.Sprintf("[%d]", i)).diff(av.Index(i), bv.Index(i))
+ }
+ case reflect.Chan, reflect.Func, reflect.UnsafePointer:
+ if a, b := av.Pointer(), bv.Pointer(); a != b {
+ w.printf("%#x != %#x", a, b)
+ }
+ case reflect.Interface:
+ w.diff(av.Elem(), bv.Elem())
+ case reflect.Map:
+ ak, both, bk := keyDiff(av.MapKeys(), bv.MapKeys())
+ for _, k := range ak {
+ w := w.relabel(fmt.Sprintf("[%#v]", k))
+ w.printf("%q != (missing)", av.MapIndex(k))
+ }
+ for _, k := range both {
+ w := w.relabel(fmt.Sprintf("[%#v]", k))
+ w.diff(av.MapIndex(k), bv.MapIndex(k))
+ }
+ for _, k := range bk {
+ w := w.relabel(fmt.Sprintf("[%#v]", k))
+ w.printf("(missing) != %q", bv.MapIndex(k))
+ }
+ case reflect.Ptr:
+ switch {
+ case av.IsNil() && !bv.IsNil():
+ w.printf("nil != %# v", formatter{v: bv, quote: true})
+ case !av.IsNil() && bv.IsNil():
+ w.printf("%# v != nil", formatter{v: av, quote: true})
+ case !av.IsNil() && !bv.IsNil():
+ w.diff(av.Elem(), bv.Elem())
+ }
+ case reflect.Slice:
+ lenA := av.Len()
+ lenB := bv.Len()
+ if lenA != lenB {
+ w.printf("%s[%d] != %s[%d]", av.Type(), lenA, bv.Type(), lenB)
+ break
+ }
+ for i := 0; i < lenA; i++ {
+ w.relabel(fmt.Sprintf("[%d]", i)).diff(av.Index(i), bv.Index(i))
+ }
+ case reflect.String:
+ if a, b := av.String(), bv.String(); a != b {
+ w.printf("%q != %q", a, b)
+ }
+ case reflect.Struct:
+ for i := 0; i < av.NumField(); i++ {
+ w.relabel(at.Field(i).Name).diff(av.Field(i), bv.Field(i))
+ }
+ default:
+ panic("unknown reflect Kind: " + kind.String())
+ }
+}
+
+func (d diffPrinter) relabel(name string) (d1 diffPrinter) {
+ d1 = d
+ if d.l != "" && name[0] != '[' {
+ d1.l += "."
+ }
+ d1.l += name
+ return d1
+}
+
+// keyEqual compares a and b for equality.
+// Both a and b must be valid map keys.
+func keyEqual(av, bv reflect.Value) bool {
+ if !av.IsValid() && !bv.IsValid() {
+ return true
+ }
+ if !av.IsValid() || !bv.IsValid() || av.Type() != bv.Type() {
+ return false
+ }
+ switch kind := av.Kind(); kind {
+ case reflect.Bool:
+ a, b := av.Bool(), bv.Bool()
+ return a == b
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ a, b := av.Int(), bv.Int()
+ return a == b
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ a, b := av.Uint(), bv.Uint()
+ return a == b
+ case reflect.Float32, reflect.Float64:
+ a, b := av.Float(), bv.Float()
+ return a == b
+ case reflect.Complex64, reflect.Complex128:
+ a, b := av.Complex(), bv.Complex()
+ return a == b
+ case reflect.Array:
+ for i := 0; i < av.Len(); i++ {
+ if !keyEqual(av.Index(i), bv.Index(i)) {
+ return false
+ }
+ }
+ return true
+ case reflect.Chan, reflect.UnsafePointer, reflect.Ptr:
+ a, b := av.Pointer(), bv.Pointer()
+ return a == b
+ case reflect.Interface:
+ return keyEqual(av.Elem(), bv.Elem())
+ case reflect.String:
+ a, b := av.String(), bv.String()
+ return a == b
+ case reflect.Struct:
+ for i := 0; i < av.NumField(); i++ {
+ if !keyEqual(av.Field(i), bv.Field(i)) {
+ return false
+ }
+ }
+ return true
+ default:
+ panic("invalid map key type " + av.Type().String())
+ }
+}
+
+func keyDiff(a, b []reflect.Value) (ak, both, bk []reflect.Value) {
+ for _, av := range a {
+ inBoth := false
+ for _, bv := range b {
+ if keyEqual(av, bv) {
+ inBoth = true
+ both = append(both, av)
+ break
+ }
+ }
+ if !inBoth {
+ ak = append(ak, av)
+ }
+ }
+ for _, bv := range b {
+ inBoth := false
+ for _, av := range a {
+ if keyEqual(av, bv) {
+ inBoth = true
+ break
+ }
+ }
+ if !inBoth {
+ bk = append(bk, bv)
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/kr/pretty/formatter.go b/vendor/github.com/kr/pretty/formatter.go
new file mode 100644
index 000000000..8e6969c59
--- /dev/null
+++ b/vendor/github.com/kr/pretty/formatter.go
@@ -0,0 +1,355 @@
+package pretty
+
+import (
+ "fmt"
+ "io"
+ "reflect"
+ "strconv"
+ "text/tabwriter"
+
+ "github.com/kr/text"
+ "github.com/rogpeppe/go-internal/fmtsort"
+)
+
+type formatter struct {
+ v reflect.Value
+ force bool
+ quote bool
+}
+
+// Formatter makes a wrapper, f, that will format x as go source with line
+// breaks and tabs. Object f responds to the "%v" formatting verb when both the
+// "#" and " " (space) flags are set, for example:
+//
+// fmt.Sprintf("%# v", Formatter(x))
+//
+// If one of these two flags is not set, or any other verb is used, f will
+// format x according to the usual rules of package fmt.
+// In particular, if x satisfies fmt.Formatter, then x.Format will be called.
+func Formatter(x interface{}) (f fmt.Formatter) {
+ return formatter{v: reflect.ValueOf(x), quote: true}
+}
+
+func (fo formatter) String() string {
+ return fmt.Sprint(fo.v.Interface()) // unwrap it
+}
+
+func (fo formatter) passThrough(f fmt.State, c rune) {
+ s := "%"
+ for i := 0; i < 128; i++ {
+ if f.Flag(i) {
+ s += string(rune(i))
+ }
+ }
+ if w, ok := f.Width(); ok {
+ s += fmt.Sprintf("%d", w)
+ }
+ if p, ok := f.Precision(); ok {
+ s += fmt.Sprintf(".%d", p)
+ }
+ s += string(c)
+ fmt.Fprintf(f, s, fo.v.Interface())
+}
+
+func (fo formatter) Format(f fmt.State, c rune) {
+ if fo.force || c == 'v' && f.Flag('#') && f.Flag(' ') {
+ w := tabwriter.NewWriter(f, 4, 4, 1, ' ', 0)
+ p := &printer{tw: w, Writer: w, visited: make(map[visit]int)}
+ p.printValue(fo.v, true, fo.quote)
+ w.Flush()
+ return
+ }
+ fo.passThrough(f, c)
+}
+
+type printer struct {
+ io.Writer
+ tw *tabwriter.Writer
+ visited map[visit]int
+ depth int
+}
+
+func (p *printer) indent() *printer {
+ q := *p
+ q.tw = tabwriter.NewWriter(p.Writer, 4, 4, 1, ' ', 0)
+ q.Writer = text.NewIndentWriter(q.tw, []byte{'\t'})
+ return &q
+}
+
+func (p *printer) printInline(v reflect.Value, x interface{}, showType bool) {
+ if showType {
+ io.WriteString(p, v.Type().String())
+ fmt.Fprintf(p, "(%#v)", x)
+ } else {
+ fmt.Fprintf(p, "%#v", x)
+ }
+}
+
+// printValue must keep track of already-printed pointer values to avoid
+// infinite recursion.
+type visit struct {
+ v uintptr
+ typ reflect.Type
+}
+
+func (p *printer) catchPanic(v reflect.Value, method string) {
+ if r := recover(); r != nil {
+ if v.Kind() == reflect.Ptr && v.IsNil() {
+ writeByte(p, '(')
+ io.WriteString(p, v.Type().String())
+ io.WriteString(p, ")(nil)")
+ return
+ }
+ writeByte(p, '(')
+ io.WriteString(p, v.Type().String())
+ io.WriteString(p, ")(PANIC=calling method ")
+ io.WriteString(p, strconv.Quote(method))
+ io.WriteString(p, ": ")
+ fmt.Fprint(p, r)
+ writeByte(p, ')')
+ }
+}
+
+func (p *printer) printValue(v reflect.Value, showType, quote bool) {
+ if p.depth > 10 {
+ io.WriteString(p, "!%v(DEPTH EXCEEDED)")
+ return
+ }
+
+ if v.IsValid() && v.CanInterface() {
+ i := v.Interface()
+ if goStringer, ok := i.(fmt.GoStringer); ok {
+ defer p.catchPanic(v, "GoString")
+ io.WriteString(p, goStringer.GoString())
+ return
+ }
+ }
+
+ switch v.Kind() {
+ case reflect.Bool:
+ p.printInline(v, v.Bool(), showType)
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ p.printInline(v, v.Int(), showType)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ p.printInline(v, v.Uint(), showType)
+ case reflect.Float32, reflect.Float64:
+ p.printInline(v, v.Float(), showType)
+ case reflect.Complex64, reflect.Complex128:
+ fmt.Fprintf(p, "%#v", v.Complex())
+ case reflect.String:
+ p.fmtString(v.String(), quote)
+ case reflect.Map:
+ t := v.Type()
+ if showType {
+ io.WriteString(p, t.String())
+ }
+ writeByte(p, '{')
+ if nonzero(v) {
+ expand := !canInline(v.Type())
+ pp := p
+ if expand {
+ writeByte(p, '\n')
+ pp = p.indent()
+ }
+ sm := fmtsort.Sort(v)
+ for i := 0; i < v.Len(); i++ {
+ k := sm.Key[i]
+ mv := sm.Value[i]
+ pp.printValue(k, false, true)
+ writeByte(pp, ':')
+ if expand {
+ writeByte(pp, '\t')
+ }
+ showTypeInStruct := t.Elem().Kind() == reflect.Interface
+ pp.printValue(mv, showTypeInStruct, true)
+ if expand {
+ io.WriteString(pp, ",\n")
+ } else if i < v.Len()-1 {
+ io.WriteString(pp, ", ")
+ }
+ }
+ if expand {
+ pp.tw.Flush()
+ }
+ }
+ writeByte(p, '}')
+ case reflect.Struct:
+ t := v.Type()
+ if v.CanAddr() {
+ addr := v.UnsafeAddr()
+ vis := visit{addr, t}
+ if vd, ok := p.visited[vis]; ok && vd < p.depth {
+ p.fmtString(t.String()+"{(CYCLIC REFERENCE)}", false)
+ break // don't print v again
+ }
+ p.visited[vis] = p.depth
+ }
+
+ if showType {
+ io.WriteString(p, t.String())
+ }
+ writeByte(p, '{')
+ if nonzero(v) {
+ expand := !canInline(v.Type())
+ pp := p
+ if expand {
+ writeByte(p, '\n')
+ pp = p.indent()
+ }
+ for i := 0; i < v.NumField(); i++ {
+ showTypeInStruct := true
+ if f := t.Field(i); f.Name != "" {
+ io.WriteString(pp, f.Name)
+ writeByte(pp, ':')
+ if expand {
+ writeByte(pp, '\t')
+ }
+ showTypeInStruct = labelType(f.Type)
+ }
+ pp.printValue(getField(v, i), showTypeInStruct, true)
+ if expand {
+ io.WriteString(pp, ",\n")
+ } else if i < v.NumField()-1 {
+ io.WriteString(pp, ", ")
+ }
+ }
+ if expand {
+ pp.tw.Flush()
+ }
+ }
+ writeByte(p, '}')
+ case reflect.Interface:
+ switch e := v.Elem(); {
+ case e.Kind() == reflect.Invalid:
+ io.WriteString(p, "nil")
+ case e.IsValid():
+ pp := *p
+ pp.depth++
+ pp.printValue(e, showType, true)
+ default:
+ io.WriteString(p, v.Type().String())
+ io.WriteString(p, "(nil)")
+ }
+ case reflect.Array, reflect.Slice:
+ t := v.Type()
+ if showType {
+ io.WriteString(p, t.String())
+ }
+ if v.Kind() == reflect.Slice && v.IsNil() && showType {
+ io.WriteString(p, "(nil)")
+ break
+ }
+ if v.Kind() == reflect.Slice && v.IsNil() {
+ io.WriteString(p, "nil")
+ break
+ }
+ writeByte(p, '{')
+ expand := !canInline(v.Type())
+ pp := p
+ if expand {
+ writeByte(p, '\n')
+ pp = p.indent()
+ }
+ for i := 0; i < v.Len(); i++ {
+ showTypeInSlice := t.Elem().Kind() == reflect.Interface
+ pp.printValue(v.Index(i), showTypeInSlice, true)
+ if expand {
+ io.WriteString(pp, ",\n")
+ } else if i < v.Len()-1 {
+ io.WriteString(pp, ", ")
+ }
+ }
+ if expand {
+ pp.tw.Flush()
+ }
+ writeByte(p, '}')
+ case reflect.Ptr:
+ e := v.Elem()
+ if !e.IsValid() {
+ writeByte(p, '(')
+ io.WriteString(p, v.Type().String())
+ io.WriteString(p, ")(nil)")
+ } else {
+ pp := *p
+ pp.depth++
+ writeByte(pp, '&')
+ pp.printValue(e, true, true)
+ }
+ case reflect.Chan:
+ x := v.Pointer()
+ if showType {
+ writeByte(p, '(')
+ io.WriteString(p, v.Type().String())
+ fmt.Fprintf(p, ")(%#v)", x)
+ } else {
+ fmt.Fprintf(p, "%#v", x)
+ }
+ case reflect.Func:
+ io.WriteString(p, v.Type().String())
+ io.WriteString(p, " {...}")
+ case reflect.UnsafePointer:
+ p.printInline(v, v.Pointer(), showType)
+ case reflect.Invalid:
+ io.WriteString(p, "nil")
+ }
+}
+
+func canInline(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Map:
+ return !canExpand(t.Elem())
+ case reflect.Struct:
+ for i := 0; i < t.NumField(); i++ {
+ if canExpand(t.Field(i).Type) {
+ return false
+ }
+ }
+ return true
+ case reflect.Interface:
+ return false
+ case reflect.Array, reflect.Slice:
+ return !canExpand(t.Elem())
+ case reflect.Ptr:
+ return false
+ case reflect.Chan, reflect.Func, reflect.UnsafePointer:
+ return false
+ }
+ return true
+}
+
+func canExpand(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Map, reflect.Struct,
+ reflect.Interface, reflect.Array, reflect.Slice,
+ reflect.Ptr:
+ return true
+ }
+ return false
+}
+
+func labelType(t reflect.Type) bool {
+ switch t.Kind() {
+ case reflect.Interface, reflect.Struct:
+ return true
+ }
+ return false
+}
+
+func (p *printer) fmtString(s string, quote bool) {
+ if quote {
+ s = strconv.Quote(s)
+ }
+ io.WriteString(p, s)
+}
+
+func writeByte(w io.Writer, b byte) {
+ w.Write([]byte{b})
+}
+
+func getField(v reflect.Value, i int) reflect.Value {
+ val := v.Field(i)
+ if val.Kind() == reflect.Interface && !val.IsNil() {
+ val = val.Elem()
+ }
+ return val
+}
diff --git a/vendor/github.com/kr/pretty/pretty.go b/vendor/github.com/kr/pretty/pretty.go
new file mode 100644
index 000000000..b4ca583c0
--- /dev/null
+++ b/vendor/github.com/kr/pretty/pretty.go
@@ -0,0 +1,108 @@
+// Package pretty provides pretty-printing for Go values. This is
+// useful during debugging, to avoid wrapping long output lines in
+// the terminal.
+//
+// It provides a function, Formatter, that can be used with any
+// function that accepts a format string. It also provides
+// convenience wrappers for functions in packages fmt and log.
+package pretty
+
+import (
+ "fmt"
+ "io"
+ "log"
+ "reflect"
+)
+
+// Errorf is a convenience wrapper for fmt.Errorf.
+//
+// Calling Errorf(f, x, y) is equivalent to
+// fmt.Errorf(f, Formatter(x), Formatter(y)).
+func Errorf(format string, a ...interface{}) error {
+ return fmt.Errorf(format, wrap(a, false)...)
+}
+
+// Fprintf is a convenience wrapper for fmt.Fprintf.
+//
+// Calling Fprintf(w, f, x, y) is equivalent to
+// fmt.Fprintf(w, f, Formatter(x), Formatter(y)).
+func Fprintf(w io.Writer, format string, a ...interface{}) (n int, error error) {
+ return fmt.Fprintf(w, format, wrap(a, false)...)
+}
+
+// Log is a convenience wrapper for log.Printf.
+//
+// Calling Log(x, y) is equivalent to
+// log.Print(Formatter(x), Formatter(y)), but each operand is
+// formatted with "%# v".
+func Log(a ...interface{}) {
+ log.Print(wrap(a, true)...)
+}
+
+// Logf is a convenience wrapper for log.Printf.
+//
+// Calling Logf(f, x, y) is equivalent to
+// log.Printf(f, Formatter(x), Formatter(y)).
+func Logf(format string, a ...interface{}) {
+ log.Printf(format, wrap(a, false)...)
+}
+
+// Logln is a convenience wrapper for log.Printf.
+//
+// Calling Logln(x, y) is equivalent to
+// log.Println(Formatter(x), Formatter(y)), but each operand is
+// formatted with "%# v".
+func Logln(a ...interface{}) {
+ log.Println(wrap(a, true)...)
+}
+
+// Print pretty-prints its operands and writes to standard output.
+//
+// Calling Print(x, y) is equivalent to
+// fmt.Print(Formatter(x), Formatter(y)), but each operand is
+// formatted with "%# v".
+func Print(a ...interface{}) (n int, errno error) {
+ return fmt.Print(wrap(a, true)...)
+}
+
+// Printf is a convenience wrapper for fmt.Printf.
+//
+// Calling Printf(f, x, y) is equivalent to
+// fmt.Printf(f, Formatter(x), Formatter(y)).
+func Printf(format string, a ...interface{}) (n int, errno error) {
+ return fmt.Printf(format, wrap(a, false)...)
+}
+
+// Println pretty-prints its operands and writes to standard output.
+//
+// Calling Println(x, y) is equivalent to
+// fmt.Println(Formatter(x), Formatter(y)), but each operand is
+// formatted with "%# v".
+func Println(a ...interface{}) (n int, errno error) {
+ return fmt.Println(wrap(a, true)...)
+}
+
+// Sprint is a convenience wrapper for fmt.Sprintf.
+//
+// Calling Sprint(x, y) is equivalent to
+// fmt.Sprint(Formatter(x), Formatter(y)), but each operand is
+// formatted with "%# v".
+func Sprint(a ...interface{}) string {
+ return fmt.Sprint(wrap(a, true)...)
+}
+
+// Sprintf is a convenience wrapper for fmt.Sprintf.
+//
+// Calling Sprintf(f, x, y) is equivalent to
+// fmt.Sprintf(f, Formatter(x), Formatter(y)).
+func Sprintf(format string, a ...interface{}) string {
+ return fmt.Sprintf(format, wrap(a, false)...)
+}
+
+func wrap(a []interface{}, force bool) []interface{} {
+ w := make([]interface{}, len(a))
+ for i, x := range a {
+ w[i] = formatter{v: reflect.ValueOf(x), force: force}
+ }
+ return w
+}
diff --git a/vendor/github.com/kr/pretty/zero.go b/vendor/github.com/kr/pretty/zero.go
new file mode 100644
index 000000000..abb5b6fc1
--- /dev/null
+++ b/vendor/github.com/kr/pretty/zero.go
@@ -0,0 +1,41 @@
+package pretty
+
+import (
+ "reflect"
+)
+
+func nonzero(v reflect.Value) bool {
+ switch v.Kind() {
+ case reflect.Bool:
+ return v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() != 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() != 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() != 0
+ case reflect.Complex64, reflect.Complex128:
+ return v.Complex() != complex(0, 0)
+ case reflect.String:
+ return v.String() != ""
+ case reflect.Struct:
+ for i := 0; i < v.NumField(); i++ {
+ if nonzero(getField(v, i)) {
+ return true
+ }
+ }
+ return false
+ case reflect.Array:
+ for i := 0; i < v.Len(); i++ {
+ if nonzero(v.Index(i)) {
+ return true
+ }
+ }
+ return false
+ case reflect.Map, reflect.Interface, reflect.Slice, reflect.Ptr, reflect.Chan, reflect.Func:
+ return !v.IsNil()
+ case reflect.UnsafePointer:
+ return v.Pointer() != 0
+ }
+ return true
+}
diff --git a/vendor/github.com/kr/text/License b/vendor/github.com/kr/text/License
new file mode 100644
index 000000000..480a32805
--- /dev/null
+++ b/vendor/github.com/kr/text/License
@@ -0,0 +1,19 @@
+Copyright 2012 Keith Rarick
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/kr/text/Readme b/vendor/github.com/kr/text/Readme
new file mode 100644
index 000000000..7e6e7c068
--- /dev/null
+++ b/vendor/github.com/kr/text/Readme
@@ -0,0 +1,3 @@
+This is a Go package for manipulating paragraphs of text.
+
+See http://go.pkgdoc.org/github.com/kr/text for full documentation.
diff --git a/vendor/github.com/kr/text/doc.go b/vendor/github.com/kr/text/doc.go
new file mode 100644
index 000000000..cf4c198f9
--- /dev/null
+++ b/vendor/github.com/kr/text/doc.go
@@ -0,0 +1,3 @@
+// Package text provides rudimentary functions for manipulating text in
+// paragraphs.
+package text
diff --git a/vendor/github.com/kr/text/indent.go b/vendor/github.com/kr/text/indent.go
new file mode 100644
index 000000000..4ebac45c0
--- /dev/null
+++ b/vendor/github.com/kr/text/indent.go
@@ -0,0 +1,74 @@
+package text
+
+import (
+ "io"
+)
+
+// Indent inserts prefix at the beginning of each non-empty line of s. The
+// end-of-line marker is NL.
+func Indent(s, prefix string) string {
+ return string(IndentBytes([]byte(s), []byte(prefix)))
+}
+
+// IndentBytes inserts prefix at the beginning of each non-empty line of b.
+// The end-of-line marker is NL.
+func IndentBytes(b, prefix []byte) []byte {
+ var res []byte
+ bol := true
+ for _, c := range b {
+ if bol && c != '\n' {
+ res = append(res, prefix...)
+ }
+ res = append(res, c)
+ bol = c == '\n'
+ }
+ return res
+}
+
+// Writer indents each line of its input.
+type indentWriter struct {
+ w io.Writer
+ bol bool
+ pre [][]byte
+ sel int
+ off int
+}
+
+// NewIndentWriter makes a new write filter that indents the input
+// lines. Each line is prefixed in order with the corresponding
+// element of pre. If there are more lines than elements, the last
+// element of pre is repeated for each subsequent line.
+func NewIndentWriter(w io.Writer, pre ...[]byte) io.Writer {
+ return &indentWriter{
+ w: w,
+ pre: pre,
+ bol: true,
+ }
+}
+
+// The only errors returned are from the underlying indentWriter.
+func (w *indentWriter) Write(p []byte) (n int, err error) {
+ for _, c := range p {
+ if w.bol {
+ var i int
+ i, err = w.w.Write(w.pre[w.sel][w.off:])
+ w.off += i
+ if err != nil {
+ return n, err
+ }
+ }
+ _, err = w.w.Write([]byte{c})
+ if err != nil {
+ return n, err
+ }
+ n++
+ w.bol = c == '\n'
+ if w.bol {
+ w.off = 0
+ if w.sel < len(w.pre)-1 {
+ w.sel++
+ }
+ }
+ }
+ return n, nil
+}
diff --git a/vendor/github.com/kr/text/wrap.go b/vendor/github.com/kr/text/wrap.go
new file mode 100644
index 000000000..b09bb0373
--- /dev/null
+++ b/vendor/github.com/kr/text/wrap.go
@@ -0,0 +1,86 @@
+package text
+
+import (
+ "bytes"
+ "math"
+)
+
+var (
+ nl = []byte{'\n'}
+ sp = []byte{' '}
+)
+
+const defaultPenalty = 1e5
+
+// Wrap wraps s into a paragraph of lines of length lim, with minimal
+// raggedness.
+func Wrap(s string, lim int) string {
+ return string(WrapBytes([]byte(s), lim))
+}
+
+// WrapBytes wraps b into a paragraph of lines of length lim, with minimal
+// raggedness.
+func WrapBytes(b []byte, lim int) []byte {
+ words := bytes.Split(bytes.Replace(bytes.TrimSpace(b), nl, sp, -1), sp)
+ var lines [][]byte
+ for _, line := range WrapWords(words, 1, lim, defaultPenalty) {
+ lines = append(lines, bytes.Join(line, sp))
+ }
+ return bytes.Join(lines, nl)
+}
+
+// WrapWords is the low-level line-breaking algorithm, useful if you need more
+// control over the details of the text wrapping process. For most uses, either
+// Wrap or WrapBytes will be sufficient and more convenient.
+//
+// WrapWords splits a list of words into lines with minimal "raggedness",
+// treating each byte as one unit, accounting for spc units between adjacent
+// words on each line, and attempting to limit lines to lim units. Raggedness
+// is the total error over all lines, where error is the square of the
+// difference of the length of the line and lim. Too-long lines (which only
+// happen when a single word is longer than lim units) have pen penalty units
+// added to the error.
+func WrapWords(words [][]byte, spc, lim, pen int) [][][]byte {
+ n := len(words)
+
+ length := make([][]int, n)
+ for i := 0; i < n; i++ {
+ length[i] = make([]int, n)
+ length[i][i] = len(words[i])
+ for j := i + 1; j < n; j++ {
+ length[i][j] = length[i][j-1] + spc + len(words[j])
+ }
+ }
+
+ nbrk := make([]int, n)
+ cost := make([]int, n)
+ for i := range cost {
+ cost[i] = math.MaxInt32
+ }
+ for i := n - 1; i >= 0; i-- {
+ if length[i][n-1] <= lim || i == n-1 {
+ cost[i] = 0
+ nbrk[i] = n
+ } else {
+ for j := i + 1; j < n; j++ {
+ d := lim - length[i][j-1]
+ c := d*d + cost[j]
+ if length[i][j-1] > lim {
+ c += pen // too-long lines get a worse penalty
+ }
+ if c < cost[i] {
+ cost[i] = c
+ nbrk[i] = j
+ }
+ }
+ }
+ }
+
+ var lines [][][]byte
+ i := 0
+ for i < n {
+ lines = append(lines, words[i:nbrk[i]])
+ i = nbrk[i]
+ }
+ return lines
+}
diff --git a/vendor/github.com/mailru/easyjson/LICENSE b/vendor/github.com/mailru/easyjson/LICENSE
new file mode 100644
index 000000000..fbff658f7
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/LICENSE
@@ -0,0 +1,7 @@
+Copyright (c) 2016 Mail.Ru Group
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/mailru/easyjson/buffer/pool.go b/vendor/github.com/mailru/easyjson/buffer/pool.go
new file mode 100644
index 000000000..598a54af9
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/buffer/pool.go
@@ -0,0 +1,278 @@
+// Package buffer implements a buffer for serialization, consisting of a chain of []byte-s to
+// reduce copying and to allow reuse of individual chunks.
+package buffer
+
+import (
+ "io"
+ "net"
+ "sync"
+)
+
+// PoolConfig contains configuration for the allocation and reuse strategy.
+type PoolConfig struct {
+ StartSize int // Minimum chunk size that is allocated.
+ PooledSize int // Minimum chunk size that is reused, reusing chunks too small will result in overhead.
+ MaxSize int // Maximum chunk size that will be allocated.
+}
+
+var config = PoolConfig{
+ StartSize: 128,
+ PooledSize: 512,
+ MaxSize: 32768,
+}
+
+// Reuse pool: chunk size -> pool.
+var buffers = map[int]*sync.Pool{}
+
+func initBuffers() {
+ for l := config.PooledSize; l <= config.MaxSize; l *= 2 {
+ buffers[l] = new(sync.Pool)
+ }
+}
+
+func init() {
+ initBuffers()
+}
+
+// Init sets up a non-default pooling and allocation strategy. Should be run before serialization is done.
+func Init(cfg PoolConfig) {
+ config = cfg
+ initBuffers()
+}
+
+// putBuf puts a chunk to reuse pool if it can be reused.
+func putBuf(buf []byte) {
+ size := cap(buf)
+ if size < config.PooledSize {
+ return
+ }
+ if c := buffers[size]; c != nil {
+ c.Put(buf[:0])
+ }
+}
+
+// getBuf gets a chunk from reuse pool or creates a new one if reuse failed.
+func getBuf(size int) []byte {
+ if size >= config.PooledSize {
+ if c := buffers[size]; c != nil {
+ v := c.Get()
+ if v != nil {
+ return v.([]byte)
+ }
+ }
+ }
+ return make([]byte, 0, size)
+}
+
+// Buffer is a buffer optimized for serialization without extra copying.
+type Buffer struct {
+
+ // Buf is the current chunk that can be used for serialization.
+ Buf []byte
+
+ toPool []byte
+ bufs [][]byte
+}
+
+// EnsureSpace makes sure that the current chunk contains at least s free bytes,
+// possibly creating a new chunk.
+func (b *Buffer) EnsureSpace(s int) {
+ if cap(b.Buf)-len(b.Buf) < s {
+ b.ensureSpaceSlow(s)
+ }
+}
+
+func (b *Buffer) ensureSpaceSlow(s int) {
+ l := len(b.Buf)
+ if l > 0 {
+ if cap(b.toPool) != cap(b.Buf) {
+ // Chunk was reallocated, toPool can be pooled.
+ putBuf(b.toPool)
+ }
+ if cap(b.bufs) == 0 {
+ b.bufs = make([][]byte, 0, 8)
+ }
+ b.bufs = append(b.bufs, b.Buf)
+ l = cap(b.toPool) * 2
+ } else {
+ l = config.StartSize
+ }
+
+ if l > config.MaxSize {
+ l = config.MaxSize
+ }
+ b.Buf = getBuf(l)
+ b.toPool = b.Buf
+}
+
+// AppendByte appends a single byte to buffer.
+func (b *Buffer) AppendByte(data byte) {
+ b.EnsureSpace(1)
+ b.Buf = append(b.Buf, data)
+}
+
+// AppendBytes appends a byte slice to buffer.
+func (b *Buffer) AppendBytes(data []byte) {
+ if len(data) <= cap(b.Buf)-len(b.Buf) {
+ b.Buf = append(b.Buf, data...) // fast path
+ } else {
+ b.appendBytesSlow(data)
+ }
+}
+
+func (b *Buffer) appendBytesSlow(data []byte) {
+ for len(data) > 0 {
+ b.EnsureSpace(1)
+
+ sz := cap(b.Buf) - len(b.Buf)
+ if sz > len(data) {
+ sz = len(data)
+ }
+
+ b.Buf = append(b.Buf, data[:sz]...)
+ data = data[sz:]
+ }
+}
+
+// AppendString appends a string to buffer.
+func (b *Buffer) AppendString(data string) {
+ if len(data) <= cap(b.Buf)-len(b.Buf) {
+ b.Buf = append(b.Buf, data...) // fast path
+ } else {
+ b.appendStringSlow(data)
+ }
+}
+
+func (b *Buffer) appendStringSlow(data string) {
+ for len(data) > 0 {
+ b.EnsureSpace(1)
+
+ sz := cap(b.Buf) - len(b.Buf)
+ if sz > len(data) {
+ sz = len(data)
+ }
+
+ b.Buf = append(b.Buf, data[:sz]...)
+ data = data[sz:]
+ }
+}
+
+// Size computes the size of a buffer by adding sizes of every chunk.
+func (b *Buffer) Size() int {
+ size := len(b.Buf)
+ for _, buf := range b.bufs {
+ size += len(buf)
+ }
+ return size
+}
+
+// DumpTo outputs the contents of a buffer to a writer and resets the buffer.
+func (b *Buffer) DumpTo(w io.Writer) (written int, err error) {
+ bufs := net.Buffers(b.bufs)
+ if len(b.Buf) > 0 {
+ bufs = append(bufs, b.Buf)
+ }
+ n, err := bufs.WriteTo(w)
+
+ for _, buf := range b.bufs {
+ putBuf(buf)
+ }
+ putBuf(b.toPool)
+
+ b.bufs = nil
+ b.Buf = nil
+ b.toPool = nil
+
+ return int(n), err
+}
+
+// BuildBytes creates a single byte slice with all the contents of the buffer. Data is
+// copied if it does not fit in a single chunk. You can optionally provide one byte
+// slice as argument that it will try to reuse.
+func (b *Buffer) BuildBytes(reuse ...[]byte) []byte {
+ if len(b.bufs) == 0 {
+ ret := b.Buf
+ b.toPool = nil
+ b.Buf = nil
+ return ret
+ }
+
+ var ret []byte
+ size := b.Size()
+
+ // If we got a buffer as argument and it is big enough, reuse it.
+ if len(reuse) == 1 && cap(reuse[0]) >= size {
+ ret = reuse[0][:0]
+ } else {
+ ret = make([]byte, 0, size)
+ }
+ for _, buf := range b.bufs {
+ ret = append(ret, buf...)
+ putBuf(buf)
+ }
+
+ ret = append(ret, b.Buf...)
+ putBuf(b.toPool)
+
+ b.bufs = nil
+ b.toPool = nil
+ b.Buf = nil
+
+ return ret
+}
+
+type readCloser struct {
+ offset int
+ bufs [][]byte
+}
+
+func (r *readCloser) Read(p []byte) (n int, err error) {
+ for _, buf := range r.bufs {
+ // Copy as much as we can.
+ x := copy(p[n:], buf[r.offset:])
+ n += x // Increment how much we filled.
+
+ // Did we empty the whole buffer?
+ if r.offset+x == len(buf) {
+ // On to the next buffer.
+ r.offset = 0
+ r.bufs = r.bufs[1:]
+
+ // We can release this buffer.
+ putBuf(buf)
+ } else {
+ r.offset += x
+ }
+
+ if n == len(p) {
+ break
+ }
+ }
+ // No buffers left or nothing read?
+ if len(r.bufs) == 0 {
+ err = io.EOF
+ }
+ return
+}
+
+func (r *readCloser) Close() error {
+ // Release all remaining buffers.
+ for _, buf := range r.bufs {
+ putBuf(buf)
+ }
+ // In case Close gets called multiple times.
+ r.bufs = nil
+
+ return nil
+}
+
+// ReadCloser creates an io.ReadCloser with all the contents of the buffer.
+func (b *Buffer) ReadCloser() io.ReadCloser {
+ ret := &readCloser{0, append(b.bufs, b.Buf)}
+
+ b.bufs = nil
+ b.toPool = nil
+ b.Buf = nil
+
+ return ret
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go b/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
new file mode 100644
index 000000000..ff7b27c5b
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/bytestostr.go
@@ -0,0 +1,24 @@
+// This file will only be included to the build if neither
+// easyjson_nounsafe nor appengine build tag is set. See README notes
+// for more details.
+
+//+build !easyjson_nounsafe
+//+build !appengine
+
+package jlexer
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+// bytesToStr creates a string pointing at the slice to avoid copying.
+//
+// Warning: the string returned by the function should be used with care, as the whole input data
+// chunk may be either blocked from being freed by GC because of a single string or the buffer.Data
+// may be garbage-collected even when the string exists.
+func bytesToStr(data []byte) string {
+ h := (*reflect.SliceHeader)(unsafe.Pointer(&data))
+ shdr := reflect.StringHeader{Data: h.Data, Len: h.Len}
+ return *(*string)(unsafe.Pointer(&shdr))
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go b/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
new file mode 100644
index 000000000..864d1be67
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/bytestostr_nounsafe.go
@@ -0,0 +1,13 @@
+// This file is included to the build if any of the buildtags below
+// are defined. Refer to README notes for more details.
+
+//+build easyjson_nounsafe appengine
+
+package jlexer
+
+// bytesToStr creates a string normally from []byte
+//
+// Note that this method is roughly 1.5x slower than using the 'unsafe' method.
+func bytesToStr(data []byte) string {
+ return string(data)
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/error.go b/vendor/github.com/mailru/easyjson/jlexer/error.go
new file mode 100644
index 000000000..e90ec40d0
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/error.go
@@ -0,0 +1,15 @@
+package jlexer
+
+import "fmt"
+
+// LexerError implements the error interface and represents all possible errors that can be
+// generated during parsing the JSON data.
+type LexerError struct {
+ Reason string
+ Offset int
+ Data string
+}
+
+func (l *LexerError) Error() string {
+ return fmt.Sprintf("parse error: %s near offset %d of '%s'", l.Reason, l.Offset, l.Data)
+}
diff --git a/vendor/github.com/mailru/easyjson/jlexer/lexer.go b/vendor/github.com/mailru/easyjson/jlexer/lexer.go
new file mode 100644
index 000000000..b5f5e2613
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jlexer/lexer.go
@@ -0,0 +1,1244 @@
+// Package jlexer contains a JSON lexer implementation.
+//
+// It is expected that it is mostly used with generated parser code, so the interface is tuned
+// for a parser that knows what kind of data is expected.
+package jlexer
+
+import (
+ "bytes"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+ "unicode"
+ "unicode/utf16"
+ "unicode/utf8"
+
+ "github.com/josharian/intern"
+)
+
+// tokenKind determines type of a token.
+type tokenKind byte
+
+const (
+ tokenUndef tokenKind = iota // No token.
+ tokenDelim // Delimiter: one of '{', '}', '[' or ']'.
+ tokenString // A string literal, e.g. "abc\u1234"
+ tokenNumber // Number literal, e.g. 1.5e5
+ tokenBool // Boolean literal: true or false.
+ tokenNull // null keyword.
+)
+
+// token describes a single token: type, position in the input and value.
+type token struct {
+ kind tokenKind // Type of a token.
+
+ boolValue bool // Value if a boolean literal token.
+ byteValueCloned bool // true if byteValue was allocated and does not refer to original json body
+ byteValue []byte // Raw value of a token.
+ delimValue byte
+}
+
+// Lexer is a JSON lexer: it iterates over JSON tokens in a byte slice.
+type Lexer struct {
+ Data []byte // Input data given to the lexer.
+
+ start int // Start of the current token.
+ pos int // Current unscanned position in the input stream.
+ token token // Last scanned token, if token.kind != tokenUndef.
+
+ firstElement bool // Whether current element is the first in array or an object.
+ wantSep byte // A comma or a colon character, which need to occur before a token.
+
+ UseMultipleErrors bool // If we want to use multiple errors.
+ fatalError error // Fatal error occurred during lexing. It is usually a syntax error.
+ multipleErrors []*LexerError // Semantic errors occurred during lexing. Marshalling will be continued after finding this errors.
+}
+
+// FetchToken scans the input for the next token.
+func (r *Lexer) FetchToken() {
+ r.token.kind = tokenUndef
+ r.start = r.pos
+
+ // Check if r.Data has r.pos element
+ // If it doesn't, it mean corrupted input data
+ if len(r.Data) < r.pos {
+ r.errParse("Unexpected end of data")
+ return
+ }
+ // Determine the type of a token by skipping whitespace and reading the
+ // first character.
+ for _, c := range r.Data[r.pos:] {
+ switch c {
+ case ':', ',':
+ if r.wantSep == c {
+ r.pos++
+ r.start++
+ r.wantSep = 0
+ } else {
+ r.errSyntax()
+ }
+
+ case ' ', '\t', '\r', '\n':
+ r.pos++
+ r.start++
+
+ case '"':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenString
+ r.fetchString()
+ return
+
+ case '{', '[':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+ r.firstElement = true
+ r.token.kind = tokenDelim
+ r.token.delimValue = r.Data[r.pos]
+ r.pos++
+ return
+
+ case '}', ']':
+ if !r.firstElement && (r.wantSep != ',') {
+ r.errSyntax()
+ }
+ r.wantSep = 0
+ r.token.kind = tokenDelim
+ r.token.delimValue = r.Data[r.pos]
+ r.pos++
+ return
+
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+ r.token.kind = tokenNumber
+ r.fetchNumber()
+ return
+
+ case 'n':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenNull
+ r.fetchNull()
+ return
+
+ case 't':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenBool
+ r.token.boolValue = true
+ r.fetchTrue()
+ return
+
+ case 'f':
+ if r.wantSep != 0 {
+ r.errSyntax()
+ }
+
+ r.token.kind = tokenBool
+ r.token.boolValue = false
+ r.fetchFalse()
+ return
+
+ default:
+ r.errSyntax()
+ return
+ }
+ }
+ r.fatalError = io.EOF
+ return
+}
+
+// isTokenEnd returns true if the char can follow a non-delimiter token
+func isTokenEnd(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\r' || c == '\n' || c == '[' || c == ']' || c == '{' || c == '}' || c == ',' || c == ':'
+}
+
+// fetchNull fetches and checks remaining bytes of null keyword.
+func (r *Lexer) fetchNull() {
+ r.pos += 4
+ if r.pos > len(r.Data) ||
+ r.Data[r.pos-3] != 'u' ||
+ r.Data[r.pos-2] != 'l' ||
+ r.Data[r.pos-1] != 'l' ||
+ (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
+
+ r.pos -= 4
+ r.errSyntax()
+ }
+}
+
+// fetchTrue fetches and checks remaining bytes of true keyword.
+func (r *Lexer) fetchTrue() {
+ r.pos += 4
+ if r.pos > len(r.Data) ||
+ r.Data[r.pos-3] != 'r' ||
+ r.Data[r.pos-2] != 'u' ||
+ r.Data[r.pos-1] != 'e' ||
+ (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
+
+ r.pos -= 4
+ r.errSyntax()
+ }
+}
+
+// fetchFalse fetches and checks remaining bytes of false keyword.
+func (r *Lexer) fetchFalse() {
+ r.pos += 5
+ if r.pos > len(r.Data) ||
+ r.Data[r.pos-4] != 'a' ||
+ r.Data[r.pos-3] != 'l' ||
+ r.Data[r.pos-2] != 's' ||
+ r.Data[r.pos-1] != 'e' ||
+ (r.pos != len(r.Data) && !isTokenEnd(r.Data[r.pos])) {
+
+ r.pos -= 5
+ r.errSyntax()
+ }
+}
+
+// fetchNumber scans a number literal token.
+func (r *Lexer) fetchNumber() {
+ hasE := false
+ afterE := false
+ hasDot := false
+
+ r.pos++
+ for i, c := range r.Data[r.pos:] {
+ switch {
+ case c >= '0' && c <= '9':
+ afterE = false
+ case c == '.' && !hasDot:
+ hasDot = true
+ case (c == 'e' || c == 'E') && !hasE:
+ hasE = true
+ hasDot = true
+ afterE = true
+ case (c == '+' || c == '-') && afterE:
+ afterE = false
+ default:
+ r.pos += i
+ if !isTokenEnd(c) {
+ r.errSyntax()
+ } else {
+ r.token.byteValue = r.Data[r.start:r.pos]
+ }
+ return
+ }
+ }
+
+ r.pos = len(r.Data)
+ r.token.byteValue = r.Data[r.start:]
+}
+
+// findStringLen tries to scan into the string literal for ending quote char to determine required size.
+// The size will be exact if no escapes are present and may be inexact if there are escaped chars.
+func findStringLen(data []byte) (isValid bool, length int) {
+ for {
+ idx := bytes.IndexByte(data, '"')
+ if idx == -1 {
+ return false, len(data)
+ }
+ if idx == 0 || (idx > 0 && data[idx-1] != '\\') {
+ return true, length + idx
+ }
+
+ // count \\\\\\\ sequences. even number of slashes means quote is not really escaped
+ cnt := 1
+ for idx-cnt-1 >= 0 && data[idx-cnt-1] == '\\' {
+ cnt++
+ }
+ if cnt%2 == 0 {
+ return true, length + idx
+ }
+
+ length += idx + 1
+ data = data[idx+1:]
+ }
+}
+
+// unescapeStringToken performs unescaping of string token.
+// if no escaping is needed, original string is returned, otherwise - a new one allocated
+func (r *Lexer) unescapeStringToken() (err error) {
+ data := r.token.byteValue
+ var unescapedData []byte
+
+ for {
+ i := bytes.IndexByte(data, '\\')
+ if i == -1 {
+ break
+ }
+
+ escapedRune, escapedBytes, err := decodeEscape(data[i:])
+ if err != nil {
+ r.errParse(err.Error())
+ return err
+ }
+
+ if unescapedData == nil {
+ unescapedData = make([]byte, 0, len(r.token.byteValue))
+ }
+
+ var d [4]byte
+ s := utf8.EncodeRune(d[:], escapedRune)
+ unescapedData = append(unescapedData, data[:i]...)
+ unescapedData = append(unescapedData, d[:s]...)
+
+ data = data[i+escapedBytes:]
+ }
+
+ if unescapedData != nil {
+ r.token.byteValue = append(unescapedData, data...)
+ r.token.byteValueCloned = true
+ }
+ return
+}
+
+// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
+// or it returns -1.
+func getu4(s []byte) rune {
+ if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
+ return -1
+ }
+ var val rune
+ for i := 2; i < len(s) && i < 6; i++ {
+ var v byte
+ c := s[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ v = c - '0'
+ case 'a', 'b', 'c', 'd', 'e', 'f':
+ v = c - 'a' + 10
+ case 'A', 'B', 'C', 'D', 'E', 'F':
+ v = c - 'A' + 10
+ default:
+ return -1
+ }
+
+ val <<= 4
+ val |= rune(v)
+ }
+ return val
+}
+
+// decodeEscape processes a single escape sequence and returns number of bytes processed.
+func decodeEscape(data []byte) (decoded rune, bytesProcessed int, err error) {
+ if len(data) < 2 {
+ return 0, 0, errors.New("incorrect escape symbol \\ at the end of token")
+ }
+
+ c := data[1]
+ switch c {
+ case '"', '/', '\\':
+ return rune(c), 2, nil
+ case 'b':
+ return '\b', 2, nil
+ case 'f':
+ return '\f', 2, nil
+ case 'n':
+ return '\n', 2, nil
+ case 'r':
+ return '\r', 2, nil
+ case 't':
+ return '\t', 2, nil
+ case 'u':
+ rr := getu4(data)
+ if rr < 0 {
+ return 0, 0, errors.New("incorrectly escaped \\uXXXX sequence")
+ }
+
+ read := 6
+ if utf16.IsSurrogate(rr) {
+ rr1 := getu4(data[read:])
+ if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
+ read += 6
+ rr = dec
+ } else {
+ rr = unicode.ReplacementChar
+ }
+ }
+ return rr, read, nil
+ }
+
+ return 0, 0, errors.New("incorrectly escaped bytes")
+}
+
+// fetchString scans a string literal token.
+func (r *Lexer) fetchString() {
+ r.pos++
+ data := r.Data[r.pos:]
+
+ isValid, length := findStringLen(data)
+ if !isValid {
+ r.pos += length
+ r.errParse("unterminated string literal")
+ return
+ }
+ r.token.byteValue = data[:length]
+ r.pos += length + 1 // skip closing '"' as well
+}
+
+// scanToken scans the next token if no token is currently available in the lexer.
+func (r *Lexer) scanToken() {
+ if r.token.kind != tokenUndef || r.fatalError != nil {
+ return
+ }
+
+ r.FetchToken()
+}
+
+// consume resets the current token to allow scanning the next one.
+func (r *Lexer) consume() {
+ r.token.kind = tokenUndef
+ r.token.byteValueCloned = false
+ r.token.delimValue = 0
+}
+
+// Ok returns true if no error (including io.EOF) was encountered during scanning.
+func (r *Lexer) Ok() bool {
+ return r.fatalError == nil
+}
+
+const maxErrorContextLen = 13
+
+func (r *Lexer) errParse(what string) {
+ if r.fatalError == nil {
+ var str string
+ if len(r.Data)-r.pos <= maxErrorContextLen {
+ str = string(r.Data)
+ } else {
+ str = string(r.Data[r.pos:r.pos+maxErrorContextLen-3]) + "..."
+ }
+ r.fatalError = &LexerError{
+ Reason: what,
+ Offset: r.pos,
+ Data: str,
+ }
+ }
+}
+
+func (r *Lexer) errSyntax() {
+ r.errParse("syntax error")
+}
+
+func (r *Lexer) errInvalidToken(expected string) {
+ if r.fatalError != nil {
+ return
+ }
+ if r.UseMultipleErrors {
+ r.pos = r.start
+ r.consume()
+ r.SkipRecursive()
+ switch expected {
+ case "[":
+ r.token.delimValue = ']'
+ r.token.kind = tokenDelim
+ case "{":
+ r.token.delimValue = '}'
+ r.token.kind = tokenDelim
+ }
+ r.addNonfatalError(&LexerError{
+ Reason: fmt.Sprintf("expected %s", expected),
+ Offset: r.start,
+ Data: string(r.Data[r.start:r.pos]),
+ })
+ return
+ }
+
+ var str string
+ if len(r.token.byteValue) <= maxErrorContextLen {
+ str = string(r.token.byteValue)
+ } else {
+ str = string(r.token.byteValue[:maxErrorContextLen-3]) + "..."
+ }
+ r.fatalError = &LexerError{
+ Reason: fmt.Sprintf("expected %s", expected),
+ Offset: r.pos,
+ Data: str,
+ }
+}
+
+func (r *Lexer) GetPos() int {
+ return r.pos
+}
+
+// Delim consumes a token and verifies that it is the given delimiter.
+func (r *Lexer) Delim(c byte) {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+
+ if !r.Ok() || r.token.delimValue != c {
+ r.consume() // errInvalidToken can change token if UseMultipleErrors is enabled.
+ r.errInvalidToken(string([]byte{c}))
+ } else {
+ r.consume()
+ }
+}
+
+// IsDelim returns true if there was no scanning error and next token is the given delimiter.
+func (r *Lexer) IsDelim(c byte) bool {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ return !r.Ok() || r.token.delimValue == c
+}
+
+// Null verifies that the next token is null and consumes it.
+func (r *Lexer) Null() {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenNull {
+ r.errInvalidToken("null")
+ }
+ r.consume()
+}
+
+// IsNull returns true if the next token is a null keyword.
+func (r *Lexer) IsNull() bool {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ return r.Ok() && r.token.kind == tokenNull
+}
+
+// Skip skips a single token.
+func (r *Lexer) Skip() {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ r.consume()
+}
+
+// SkipRecursive skips next array or object completely, or just skips a single token if not
+// an array/object.
+//
+// Note: no syntax validation is performed on the skipped data.
+func (r *Lexer) SkipRecursive() {
+ r.scanToken()
+ var start, end byte
+ startPos := r.start
+
+ switch r.token.delimValue {
+ case '{':
+ start, end = '{', '}'
+ case '[':
+ start, end = '[', ']'
+ default:
+ r.consume()
+ return
+ }
+
+ r.consume()
+
+ level := 1
+ inQuotes := false
+ wasEscape := false
+
+ for i, c := range r.Data[r.pos:] {
+ switch {
+ case c == start && !inQuotes:
+ level++
+ case c == end && !inQuotes:
+ level--
+ if level == 0 {
+ r.pos += i + 1
+ if !json.Valid(r.Data[startPos:r.pos]) {
+ r.pos = len(r.Data)
+ r.fatalError = &LexerError{
+ Reason: "skipped array/object json value is invalid",
+ Offset: r.pos,
+ Data: string(r.Data[r.pos:]),
+ }
+ }
+ return
+ }
+ case c == '\\' && inQuotes:
+ wasEscape = !wasEscape
+ continue
+ case c == '"' && inQuotes:
+ inQuotes = wasEscape
+ case c == '"':
+ inQuotes = true
+ }
+ wasEscape = false
+ }
+ r.pos = len(r.Data)
+ r.fatalError = &LexerError{
+ Reason: "EOF reached while skipping array/object or token",
+ Offset: r.pos,
+ Data: string(r.Data[r.pos:]),
+ }
+}
+
+// Raw fetches the next item recursively as a data slice
+func (r *Lexer) Raw() []byte {
+ r.SkipRecursive()
+ if !r.Ok() {
+ return nil
+ }
+ return r.Data[r.start:r.pos]
+}
+
+// IsStart returns whether the lexer is positioned at the start
+// of an input string.
+func (r *Lexer) IsStart() bool {
+ return r.pos == 0
+}
+
+// Consumed reads all remaining bytes from the input, publishing an error if
+// there is anything but whitespace remaining.
+func (r *Lexer) Consumed() {
+ if r.pos > len(r.Data) || !r.Ok() {
+ return
+ }
+
+ for _, c := range r.Data[r.pos:] {
+ if c != ' ' && c != '\t' && c != '\r' && c != '\n' {
+ r.AddError(&LexerError{
+ Reason: "invalid character '" + string(c) + "' after top-level value",
+ Offset: r.pos,
+ Data: string(r.Data[r.pos:]),
+ })
+ return
+ }
+
+ r.pos++
+ r.start++
+ }
+}
+
+func (r *Lexer) unsafeString(skipUnescape bool) (string, []byte) {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return "", nil
+ }
+ if !skipUnescape {
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return "", nil
+ }
+ }
+
+ bytes := r.token.byteValue
+ ret := bytesToStr(r.token.byteValue)
+ r.consume()
+ return ret, bytes
+}
+
+// UnsafeString returns the string value if the token is a string literal.
+//
+// Warning: returned string may point to the input buffer, so the string should not outlive
+// the input buffer. Intended pattern of usage is as an argument to a switch statement.
+func (r *Lexer) UnsafeString() string {
+ ret, _ := r.unsafeString(false)
+ return ret
+}
+
+// UnsafeBytes returns the byte slice if the token is a string literal.
+func (r *Lexer) UnsafeBytes() []byte {
+ _, ret := r.unsafeString(false)
+ return ret
+}
+
+// UnsafeFieldName returns current member name string token
+func (r *Lexer) UnsafeFieldName(skipUnescape bool) string {
+ ret, _ := r.unsafeString(skipUnescape)
+ return ret
+}
+
+// String reads a string literal.
+func (r *Lexer) String() string {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return ""
+ }
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return ""
+ }
+ var ret string
+ if r.token.byteValueCloned {
+ ret = bytesToStr(r.token.byteValue)
+ } else {
+ ret = string(r.token.byteValue)
+ }
+ r.consume()
+ return ret
+}
+
+// StringIntern reads a string literal, and performs string interning on it.
+func (r *Lexer) StringIntern() string {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return ""
+ }
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return ""
+ }
+ ret := intern.Bytes(r.token.byteValue)
+ r.consume()
+ return ret
+}
+
+// Bytes reads a string literal and base64 decodes it into a byte slice.
+func (r *Lexer) Bytes() []byte {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenString {
+ r.errInvalidToken("string")
+ return nil
+ }
+ if err := r.unescapeStringToken(); err != nil {
+ r.errInvalidToken("string")
+ return nil
+ }
+ ret := make([]byte, base64.StdEncoding.DecodedLen(len(r.token.byteValue)))
+ n, err := base64.StdEncoding.Decode(ret, r.token.byteValue)
+ if err != nil {
+ r.fatalError = &LexerError{
+ Reason: err.Error(),
+ }
+ return nil
+ }
+
+ r.consume()
+ return ret[:n]
+}
+
+// Bool reads a true or false boolean keyword.
+func (r *Lexer) Bool() bool {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenBool {
+ r.errInvalidToken("bool")
+ return false
+ }
+ ret := r.token.boolValue
+ r.consume()
+ return ret
+}
+
+func (r *Lexer) number() string {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() || r.token.kind != tokenNumber {
+ r.errInvalidToken("number")
+ return ""
+ }
+ ret := bytesToStr(r.token.byteValue)
+ r.consume()
+ return ret
+}
+
+func (r *Lexer) Uint8() uint8 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return uint8(n)
+}
+
+func (r *Lexer) Uint16() uint16 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return uint16(n)
+}
+
+func (r *Lexer) Uint32() uint32 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return uint32(n)
+}
+
+func (r *Lexer) Uint64() uint64 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Uint() uint {
+ return uint(r.Uint64())
+}
+
+func (r *Lexer) Int8() int8 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return int8(n)
+}
+
+func (r *Lexer) Int16() int16 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return int16(n)
+}
+
+func (r *Lexer) Int32() int32 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return int32(n)
+}
+
+func (r *Lexer) Int64() int64 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Int() int {
+ return int(r.Int64())
+}
+
+func (r *Lexer) Uint8Str() uint8 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return uint8(n)
+}
+
+func (r *Lexer) Uint16Str() uint16 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return uint16(n)
+}
+
+func (r *Lexer) Uint32Str() uint32 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return uint32(n)
+}
+
+func (r *Lexer) Uint64Str() uint64 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseUint(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return n
+}
+
+func (r *Lexer) UintStr() uint {
+ return uint(r.Uint64Str())
+}
+
+func (r *Lexer) UintptrStr() uintptr {
+ return uintptr(r.Uint64Str())
+}
+
+func (r *Lexer) Int8Str() int8 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 8)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return int8(n)
+}
+
+func (r *Lexer) Int16Str() int16 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 16)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return int16(n)
+}
+
+func (r *Lexer) Int32Str() int32 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return int32(n)
+}
+
+func (r *Lexer) Int64Str() int64 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return n
+}
+
+func (r *Lexer) IntStr() int {
+ return int(r.Int64Str())
+}
+
+func (r *Lexer) Float32() float32 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseFloat(s, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return float32(n)
+}
+
+func (r *Lexer) Float32Str() float32 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+ n, err := strconv.ParseFloat(s, 32)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return float32(n)
+}
+
+func (r *Lexer) Float64() float64 {
+ s := r.number()
+ if !r.Ok() {
+ return 0
+ }
+
+ n, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: s,
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Float64Str() float64 {
+ s, b := r.unsafeString(false)
+ if !r.Ok() {
+ return 0
+ }
+ n, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Reason: err.Error(),
+ Data: string(b),
+ })
+ }
+ return n
+}
+
+func (r *Lexer) Error() error {
+ return r.fatalError
+}
+
+func (r *Lexer) AddError(e error) {
+ if r.fatalError == nil {
+ r.fatalError = e
+ }
+}
+
+func (r *Lexer) AddNonFatalError(e error) {
+ r.addNonfatalError(&LexerError{
+ Offset: r.start,
+ Data: string(r.Data[r.start:r.pos]),
+ Reason: e.Error(),
+ })
+}
+
+func (r *Lexer) addNonfatalError(err *LexerError) {
+ if r.UseMultipleErrors {
+ // We don't want to add errors with the same offset.
+ if len(r.multipleErrors) != 0 && r.multipleErrors[len(r.multipleErrors)-1].Offset == err.Offset {
+ return
+ }
+ r.multipleErrors = append(r.multipleErrors, err)
+ return
+ }
+ r.fatalError = err
+}
+
+func (r *Lexer) GetNonFatalErrors() []*LexerError {
+ return r.multipleErrors
+}
+
+// JsonNumber fetches and json.Number from 'encoding/json' package.
+// Both int, float or string, contains them are valid values
+func (r *Lexer) JsonNumber() json.Number {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+ if !r.Ok() {
+ r.errInvalidToken("json.Number")
+ return json.Number("")
+ }
+
+ switch r.token.kind {
+ case tokenString:
+ return json.Number(r.String())
+ case tokenNumber:
+ return json.Number(r.Raw())
+ case tokenNull:
+ r.Null()
+ return json.Number("")
+ default:
+ r.errSyntax()
+ return json.Number("")
+ }
+}
+
+// Interface fetches an interface{} analogous to the 'encoding/json' package.
+func (r *Lexer) Interface() interface{} {
+ if r.token.kind == tokenUndef && r.Ok() {
+ r.FetchToken()
+ }
+
+ if !r.Ok() {
+ return nil
+ }
+ switch r.token.kind {
+ case tokenString:
+ return r.String()
+ case tokenNumber:
+ return r.Float64()
+ case tokenBool:
+ return r.Bool()
+ case tokenNull:
+ r.Null()
+ return nil
+ }
+
+ if r.token.delimValue == '{' {
+ r.consume()
+
+ ret := map[string]interface{}{}
+ for !r.IsDelim('}') {
+ key := r.String()
+ r.WantColon()
+ ret[key] = r.Interface()
+ r.WantComma()
+ }
+ r.Delim('}')
+
+ if r.Ok() {
+ return ret
+ } else {
+ return nil
+ }
+ } else if r.token.delimValue == '[' {
+ r.consume()
+
+ ret := []interface{}{}
+ for !r.IsDelim(']') {
+ ret = append(ret, r.Interface())
+ r.WantComma()
+ }
+ r.Delim(']')
+
+ if r.Ok() {
+ return ret
+ } else {
+ return nil
+ }
+ }
+ r.errSyntax()
+ return nil
+}
+
+// WantComma requires a comma to be present before fetching next token.
+func (r *Lexer) WantComma() {
+ r.wantSep = ','
+ r.firstElement = false
+}
+
+// WantColon requires a colon to be present before fetching next token.
+func (r *Lexer) WantColon() {
+ r.wantSep = ':'
+ r.firstElement = false
+}
diff --git a/vendor/github.com/mailru/easyjson/jwriter/writer.go b/vendor/github.com/mailru/easyjson/jwriter/writer.go
new file mode 100644
index 000000000..2c5b20105
--- /dev/null
+++ b/vendor/github.com/mailru/easyjson/jwriter/writer.go
@@ -0,0 +1,405 @@
+// Package jwriter contains a JSON writer.
+package jwriter
+
+import (
+ "io"
+ "strconv"
+ "unicode/utf8"
+
+ "github.com/mailru/easyjson/buffer"
+)
+
+// Flags describe various encoding options. The behavior may be actually implemented in the encoder, but
+// Flags field in Writer is used to set and pass them around.
+type Flags int
+
+const (
+ NilMapAsEmpty Flags = 1 << iota // Encode nil map as '{}' rather than 'null'.
+ NilSliceAsEmpty // Encode nil slice as '[]' rather than 'null'.
+)
+
+// Writer is a JSON writer.
+type Writer struct {
+ Flags Flags
+
+ Error error
+ Buffer buffer.Buffer
+ NoEscapeHTML bool
+}
+
+// Size returns the size of the data that was written out.
+func (w *Writer) Size() int {
+ return w.Buffer.Size()
+}
+
+// DumpTo outputs the data to given io.Writer, resetting the buffer.
+func (w *Writer) DumpTo(out io.Writer) (written int, err error) {
+ return w.Buffer.DumpTo(out)
+}
+
+// BuildBytes returns writer data as a single byte slice. You can optionally provide one byte slice
+// as argument that it will try to reuse.
+func (w *Writer) BuildBytes(reuse ...[]byte) ([]byte, error) {
+ if w.Error != nil {
+ return nil, w.Error
+ }
+
+ return w.Buffer.BuildBytes(reuse...), nil
+}
+
+// ReadCloser returns an io.ReadCloser that can be used to read the data.
+// ReadCloser also resets the buffer.
+func (w *Writer) ReadCloser() (io.ReadCloser, error) {
+ if w.Error != nil {
+ return nil, w.Error
+ }
+
+ return w.Buffer.ReadCloser(), nil
+}
+
+// RawByte appends raw binary data to the buffer.
+func (w *Writer) RawByte(c byte) {
+ w.Buffer.AppendByte(c)
+}
+
+// RawByte appends raw binary data to the buffer.
+func (w *Writer) RawString(s string) {
+ w.Buffer.AppendString(s)
+}
+
+// Raw appends raw binary data to the buffer or sets the error if it is given. Useful for
+// calling with results of MarshalJSON-like functions.
+func (w *Writer) Raw(data []byte, err error) {
+ switch {
+ case w.Error != nil:
+ return
+ case err != nil:
+ w.Error = err
+ case len(data) > 0:
+ w.Buffer.AppendBytes(data)
+ default:
+ w.RawString("null")
+ }
+}
+
+// RawText encloses raw binary data in quotes and appends in to the buffer.
+// Useful for calling with results of MarshalText-like functions.
+func (w *Writer) RawText(data []byte, err error) {
+ switch {
+ case w.Error != nil:
+ return
+ case err != nil:
+ w.Error = err
+ case len(data) > 0:
+ w.String(string(data))
+ default:
+ w.RawString("null")
+ }
+}
+
+// Base64Bytes appends data to the buffer after base64 encoding it
+func (w *Writer) Base64Bytes(data []byte) {
+ if data == nil {
+ w.Buffer.AppendString("null")
+ return
+ }
+ w.Buffer.AppendByte('"')
+ w.base64(data)
+ w.Buffer.AppendByte('"')
+}
+
+func (w *Writer) Uint8(n uint8) {
+ w.Buffer.EnsureSpace(3)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint16(n uint16) {
+ w.Buffer.EnsureSpace(5)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint32(n uint32) {
+ w.Buffer.EnsureSpace(10)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint(n uint) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+}
+
+func (w *Writer) Uint64(n uint64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, n, 10)
+}
+
+func (w *Writer) Int8(n int8) {
+ w.Buffer.EnsureSpace(4)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int16(n int16) {
+ w.Buffer.EnsureSpace(6)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int32(n int32) {
+ w.Buffer.EnsureSpace(11)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int(n int) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+}
+
+func (w *Writer) Int64(n int64) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, n, 10)
+}
+
+func (w *Writer) Uint8Str(n uint8) {
+ w.Buffer.EnsureSpace(3)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Uint16Str(n uint16) {
+ w.Buffer.EnsureSpace(5)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Uint32Str(n uint32) {
+ w.Buffer.EnsureSpace(10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) UintStr(n uint) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Uint64Str(n uint64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, n, 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) UintptrStr(n uintptr) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendUint(w.Buffer.Buf, uint64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int8Str(n int8) {
+ w.Buffer.EnsureSpace(4)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int16Str(n int16) {
+ w.Buffer.EnsureSpace(6)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int32Str(n int32) {
+ w.Buffer.EnsureSpace(11)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) IntStr(n int) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, int64(n), 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Int64Str(n int64) {
+ w.Buffer.EnsureSpace(21)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendInt(w.Buffer.Buf, n, 10)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Float32(n float32) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 32)
+}
+
+func (w *Writer) Float32Str(n float32) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 32)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Float64(n float64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, n, 'g', -1, 64)
+}
+
+func (w *Writer) Float64Str(n float64) {
+ w.Buffer.EnsureSpace(20)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+ w.Buffer.Buf = strconv.AppendFloat(w.Buffer.Buf, float64(n), 'g', -1, 64)
+ w.Buffer.Buf = append(w.Buffer.Buf, '"')
+}
+
+func (w *Writer) Bool(v bool) {
+ w.Buffer.EnsureSpace(5)
+ if v {
+ w.Buffer.Buf = append(w.Buffer.Buf, "true"...)
+ } else {
+ w.Buffer.Buf = append(w.Buffer.Buf, "false"...)
+ }
+}
+
+const chars = "0123456789abcdef"
+
+func getTable(falseValues ...int) [128]bool {
+ table := [128]bool{}
+
+ for i := 0; i < 128; i++ {
+ table[i] = true
+ }
+
+ for _, v := range falseValues {
+ table[v] = false
+ }
+
+ return table
+}
+
+var (
+ htmlEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '&', '<', '>', '\\')
+ htmlNoEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '\\')
+)
+
+func (w *Writer) String(s string) {
+ w.Buffer.AppendByte('"')
+
+ // Portions of the string that contain no escapes are appended as
+ // byte slices.
+
+ p := 0 // last non-escape symbol
+
+ escapeTable := &htmlEscapeTable
+ if w.NoEscapeHTML {
+ escapeTable = &htmlNoEscapeTable
+ }
+
+ for i := 0; i < len(s); {
+ c := s[i]
+
+ if c < utf8.RuneSelf {
+ if escapeTable[c] {
+ // single-width character, no escaping is required
+ i++
+ continue
+ }
+
+ w.Buffer.AppendString(s[p:i])
+ switch c {
+ case '\t':
+ w.Buffer.AppendString(`\t`)
+ case '\r':
+ w.Buffer.AppendString(`\r`)
+ case '\n':
+ w.Buffer.AppendString(`\n`)
+ case '\\':
+ w.Buffer.AppendString(`\\`)
+ case '"':
+ w.Buffer.AppendString(`\"`)
+ default:
+ w.Buffer.AppendString(`\u00`)
+ w.Buffer.AppendByte(chars[c>>4])
+ w.Buffer.AppendByte(chars[c&0xf])
+ }
+
+ i++
+ p = i
+ continue
+ }
+
+ // broken utf
+ runeValue, runeWidth := utf8.DecodeRuneInString(s[i:])
+ if runeValue == utf8.RuneError && runeWidth == 1 {
+ w.Buffer.AppendString(s[p:i])
+ w.Buffer.AppendString(`\ufffd`)
+ i++
+ p = i
+ continue
+ }
+
+ // jsonp stuff - tab separator and line separator
+ if runeValue == '\u2028' || runeValue == '\u2029' {
+ w.Buffer.AppendString(s[p:i])
+ w.Buffer.AppendString(`\u202`)
+ w.Buffer.AppendByte(chars[runeValue&0xf])
+ i += runeWidth
+ p = i
+ continue
+ }
+ i += runeWidth
+ }
+ w.Buffer.AppendString(s[p:])
+ w.Buffer.AppendByte('"')
+}
+
+const encode = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
+const padChar = '='
+
+func (w *Writer) base64(in []byte) {
+
+ if len(in) == 0 {
+ return
+ }
+
+ w.Buffer.EnsureSpace(((len(in)-1)/3 + 1) * 4)
+
+ si := 0
+ n := (len(in) / 3) * 3
+
+ for si < n {
+ // Convert 3x 8bit source bytes into 4 bytes
+ val := uint(in[si+0])<<16 | uint(in[si+1])<<8 | uint(in[si+2])
+
+ w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>18&0x3F], encode[val>>12&0x3F], encode[val>>6&0x3F], encode[val&0x3F])
+
+ si += 3
+ }
+
+ remain := len(in) - si
+ if remain == 0 {
+ return
+ }
+
+ // Add the remaining small block
+ val := uint(in[si+0]) << 16
+ if remain == 2 {
+ val |= uint(in[si+1]) << 8
+ }
+
+ w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>18&0x3F], encode[val>>12&0x3F])
+
+ switch remain {
+ case 2:
+ w.Buffer.Buf = append(w.Buffer.Buf, encode[val>>6&0x3F], byte(padChar))
+ case 1:
+ w.Buffer.Buf = append(w.Buffer.Buf, byte(padChar), byte(padChar))
+ }
+}
diff --git a/vendor/github.com/mitchellh/copystructure/LICENSE b/vendor/github.com/mitchellh/copystructure/LICENSE
new file mode 100644
index 000000000..229851590
--- /dev/null
+++ b/vendor/github.com/mitchellh/copystructure/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Mitchell Hashimoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/mitchellh/copystructure/README.md b/vendor/github.com/mitchellh/copystructure/README.md
new file mode 100644
index 000000000..f0fbd2e5c
--- /dev/null
+++ b/vendor/github.com/mitchellh/copystructure/README.md
@@ -0,0 +1,21 @@
+# copystructure
+
+copystructure is a Go library for deep copying values in Go.
+
+This allows you to copy Go values that may contain reference values
+such as maps, slices, or pointers, and copy their data as well instead
+of just their references.
+
+## Installation
+
+Standard `go get`:
+
+```
+$ go get github.com/mitchellh/copystructure
+```
+
+## Usage & Example
+
+For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/copystructure).
+
+The `Copy` function has examples associated with it there.
diff --git a/vendor/github.com/mitchellh/copystructure/copier_time.go b/vendor/github.com/mitchellh/copystructure/copier_time.go
new file mode 100644
index 000000000..db6a6aa1a
--- /dev/null
+++ b/vendor/github.com/mitchellh/copystructure/copier_time.go
@@ -0,0 +1,15 @@
+package copystructure
+
+import (
+ "reflect"
+ "time"
+)
+
+func init() {
+ Copiers[reflect.TypeOf(time.Time{})] = timeCopier
+}
+
+func timeCopier(v interface{}) (interface{}, error) {
+ // Just... copy it.
+ return v.(time.Time), nil
+}
diff --git a/vendor/github.com/mitchellh/copystructure/copystructure.go b/vendor/github.com/mitchellh/copystructure/copystructure.go
new file mode 100644
index 000000000..8089e6670
--- /dev/null
+++ b/vendor/github.com/mitchellh/copystructure/copystructure.go
@@ -0,0 +1,631 @@
+package copystructure
+
+import (
+ "errors"
+ "reflect"
+ "sync"
+
+ "github.com/mitchellh/reflectwalk"
+)
+
+const tagKey = "copy"
+
+// Copy returns a deep copy of v.
+//
+// Copy is unable to copy unexported fields in a struct (lowercase field names).
+// Unexported fields can't be reflected by the Go runtime and therefore
+// copystructure can't perform any data copies.
+//
+// For structs, copy behavior can be controlled with struct tags. For example:
+//
+// struct {
+// Name string
+// Data *bytes.Buffer `copy:"shallow"`
+// }
+//
+// The available tag values are:
+//
+// * "ignore" - The field will be ignored, effectively resulting in it being
+// assigned the zero value in the copy.
+//
+// * "shallow" - The field will be be shallow copied. This means that references
+// values such as pointers, maps, slices, etc. will be directly assigned
+// versus deep copied.
+//
+func Copy(v interface{}) (interface{}, error) {
+ return Config{}.Copy(v)
+}
+
+// CopierFunc is a function that knows how to deep copy a specific type.
+// Register these globally with the Copiers variable.
+type CopierFunc func(interface{}) (interface{}, error)
+
+// Copiers is a map of types that behave specially when they are copied.
+// If a type is found in this map while deep copying, this function
+// will be called to copy it instead of attempting to copy all fields.
+//
+// The key should be the type, obtained using: reflect.TypeOf(value with type).
+//
+// It is unsafe to write to this map after Copies have started. If you
+// are writing to this map while also copying, wrap all modifications to
+// this map as well as to Copy in a mutex.
+var Copiers map[reflect.Type]CopierFunc = make(map[reflect.Type]CopierFunc)
+
+// ShallowCopiers is a map of pointer types that behave specially
+// when they are copied. If a type is found in this map while deep
+// copying, the pointer value will be shallow copied and not walked
+// into.
+//
+// The key should be the type, obtained using: reflect.TypeOf(value
+// with type).
+//
+// It is unsafe to write to this map after Copies have started. If you
+// are writing to this map while also copying, wrap all modifications to
+// this map as well as to Copy in a mutex.
+var ShallowCopiers map[reflect.Type]struct{} = make(map[reflect.Type]struct{})
+
+// Must is a helper that wraps a call to a function returning
+// (interface{}, error) and panics if the error is non-nil. It is intended
+// for use in variable initializations and should only be used when a copy
+// error should be a crashing case.
+func Must(v interface{}, err error) interface{} {
+ if err != nil {
+ panic("copy error: " + err.Error())
+ }
+
+ return v
+}
+
+var errPointerRequired = errors.New("Copy argument must be a pointer when Lock is true")
+
+type Config struct {
+ // Lock any types that are a sync.Locker and are not a mutex while copying.
+ // If there is an RLocker method, use that to get the sync.Locker.
+ Lock bool
+
+ // Copiers is a map of types associated with a CopierFunc. Use the global
+ // Copiers map if this is nil.
+ Copiers map[reflect.Type]CopierFunc
+
+ // ShallowCopiers is a map of pointer types that when they are
+ // shallow copied no matter where they are encountered. Use the
+ // global ShallowCopiers if this is nil.
+ ShallowCopiers map[reflect.Type]struct{}
+}
+
+func (c Config) Copy(v interface{}) (interface{}, error) {
+ if c.Lock && reflect.ValueOf(v).Kind() != reflect.Ptr {
+ return nil, errPointerRequired
+ }
+
+ w := new(walker)
+ if c.Lock {
+ w.useLocks = true
+ }
+
+ if c.Copiers == nil {
+ c.Copiers = Copiers
+ }
+ w.copiers = c.Copiers
+
+ if c.ShallowCopiers == nil {
+ c.ShallowCopiers = ShallowCopiers
+ }
+ w.shallowCopiers = c.ShallowCopiers
+
+ err := reflectwalk.Walk(v, w)
+ if err != nil {
+ return nil, err
+ }
+
+ // Get the result. If the result is nil, then we want to turn it
+ // into a typed nil if we can.
+ result := w.Result
+ if result == nil {
+ val := reflect.ValueOf(v)
+ result = reflect.Indirect(reflect.New(val.Type())).Interface()
+ }
+
+ return result, nil
+}
+
+// Return the key used to index interfaces types we've seen. Store the number
+// of pointers in the upper 32bits, and the depth in the lower 32bits. This is
+// easy to calculate, easy to match a key with our current depth, and we don't
+// need to deal with initializing and cleaning up nested maps or slices.
+func ifaceKey(pointers, depth int) uint64 {
+ return uint64(pointers)<<32 | uint64(depth)
+}
+
+type walker struct {
+ Result interface{}
+
+ copiers map[reflect.Type]CopierFunc
+ shallowCopiers map[reflect.Type]struct{}
+ depth int
+ ignoreDepth int
+ vals []reflect.Value
+ cs []reflect.Value
+
+ // This stores the number of pointers we've walked over, indexed by depth.
+ ps []int
+
+ // If an interface is indirected by a pointer, we need to know the type of
+ // interface to create when creating the new value. Store the interface
+ // types here, indexed by both the walk depth and the number of pointers
+ // already seen at that depth. Use ifaceKey to calculate the proper uint64
+ // value.
+ ifaceTypes map[uint64]reflect.Type
+
+ // any locks we've taken, indexed by depth
+ locks []sync.Locker
+ // take locks while walking the structure
+ useLocks bool
+}
+
+func (w *walker) Enter(l reflectwalk.Location) error {
+ w.depth++
+
+ // ensure we have enough elements to index via w.depth
+ for w.depth >= len(w.locks) {
+ w.locks = append(w.locks, nil)
+ }
+
+ for len(w.ps) < w.depth+1 {
+ w.ps = append(w.ps, 0)
+ }
+
+ return nil
+}
+
+func (w *walker) Exit(l reflectwalk.Location) error {
+ locker := w.locks[w.depth]
+ w.locks[w.depth] = nil
+ if locker != nil {
+ defer locker.Unlock()
+ }
+
+ // clear out pointers and interfaces as we exit the stack
+ w.ps[w.depth] = 0
+
+ for k := range w.ifaceTypes {
+ mask := uint64(^uint32(0))
+ if k&mask == uint64(w.depth) {
+ delete(w.ifaceTypes, k)
+ }
+ }
+
+ w.depth--
+ if w.ignoreDepth > w.depth {
+ w.ignoreDepth = 0
+ }
+
+ if w.ignoring() {
+ return nil
+ }
+
+ switch l {
+ case reflectwalk.Array:
+ fallthrough
+ case reflectwalk.Map:
+ fallthrough
+ case reflectwalk.Slice:
+ w.replacePointerMaybe()
+
+ // Pop map off our container
+ w.cs = w.cs[:len(w.cs)-1]
+ case reflectwalk.MapValue:
+ // Pop off the key and value
+ mv := w.valPop()
+ mk := w.valPop()
+ m := w.cs[len(w.cs)-1]
+
+ // If mv is the zero value, SetMapIndex deletes the key form the map,
+ // or in this case never adds it. We need to create a properly typed
+ // zero value so that this key can be set.
+ if !mv.IsValid() {
+ mv = reflect.Zero(m.Elem().Type().Elem())
+ }
+ m.Elem().SetMapIndex(mk, mv)
+ case reflectwalk.ArrayElem:
+ // Pop off the value and the index and set it on the array
+ v := w.valPop()
+ i := w.valPop().Interface().(int)
+ if v.IsValid() {
+ a := w.cs[len(w.cs)-1]
+ ae := a.Elem().Index(i) // storing array as pointer on stack - so need Elem() call
+ if ae.CanSet() {
+ ae.Set(v)
+ }
+ }
+ case reflectwalk.SliceElem:
+ // Pop off the value and the index and set it on the slice
+ v := w.valPop()
+ i := w.valPop().Interface().(int)
+ if v.IsValid() {
+ s := w.cs[len(w.cs)-1]
+ se := s.Elem().Index(i)
+ if se.CanSet() {
+ se.Set(v)
+ }
+ }
+ case reflectwalk.Struct:
+ w.replacePointerMaybe()
+
+ // Remove the struct from the container stack
+ w.cs = w.cs[:len(w.cs)-1]
+ case reflectwalk.StructField:
+ // Pop off the value and the field
+ v := w.valPop()
+ f := w.valPop().Interface().(reflect.StructField)
+ if v.IsValid() {
+ s := w.cs[len(w.cs)-1]
+ sf := reflect.Indirect(s).FieldByName(f.Name)
+
+ if sf.CanSet() {
+ sf.Set(v)
+ }
+ }
+ case reflectwalk.WalkLoc:
+ // Clear out the slices for GC
+ w.cs = nil
+ w.vals = nil
+ }
+
+ return nil
+}
+
+func (w *walker) Map(m reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+ w.lock(m)
+
+ // Create the map. If the map itself is nil, then just make a nil map
+ var newMap reflect.Value
+ if m.IsNil() {
+ newMap = reflect.New(m.Type())
+ } else {
+ newMap = wrapPtr(reflect.MakeMap(m.Type()))
+ }
+
+ w.cs = append(w.cs, newMap)
+ w.valPush(newMap)
+ return nil
+}
+
+func (w *walker) MapElem(m, k, v reflect.Value) error {
+ return nil
+}
+
+func (w *walker) PointerEnter(v bool) error {
+ if v {
+ w.ps[w.depth]++
+ }
+ return nil
+}
+
+func (w *walker) PointerExit(v bool) error {
+ if v {
+ w.ps[w.depth]--
+ }
+ return nil
+}
+
+func (w *walker) Pointer(v reflect.Value) error {
+ if _, ok := w.shallowCopiers[v.Type()]; ok {
+ // Shallow copy this value. Use the same logic as primitive, then
+ // return skip.
+ if err := w.Primitive(v); err != nil {
+ return err
+ }
+
+ return reflectwalk.SkipEntry
+ }
+
+ return nil
+}
+
+func (w *walker) Interface(v reflect.Value) error {
+ if !v.IsValid() {
+ return nil
+ }
+ if w.ifaceTypes == nil {
+ w.ifaceTypes = make(map[uint64]reflect.Type)
+ }
+
+ w.ifaceTypes[ifaceKey(w.ps[w.depth], w.depth)] = v.Type()
+ return nil
+}
+
+func (w *walker) Primitive(v reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+ w.lock(v)
+
+ // IsValid verifies the v is non-zero and CanInterface verifies
+ // that we're allowed to read this value (unexported fields).
+ var newV reflect.Value
+ if v.IsValid() && v.CanInterface() {
+ newV = reflect.New(v.Type())
+ newV.Elem().Set(v)
+ }
+
+ w.valPush(newV)
+ w.replacePointerMaybe()
+ return nil
+}
+
+func (w *walker) Slice(s reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+ w.lock(s)
+
+ var newS reflect.Value
+ if s.IsNil() {
+ newS = reflect.New(s.Type())
+ } else {
+ newS = wrapPtr(reflect.MakeSlice(s.Type(), s.Len(), s.Cap()))
+ }
+
+ w.cs = append(w.cs, newS)
+ w.valPush(newS)
+ return nil
+}
+
+func (w *walker) SliceElem(i int, elem reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+
+ // We don't write the slice here because elem might still be
+ // arbitrarily complex. Just record the index and continue on.
+ w.valPush(reflect.ValueOf(i))
+
+ return nil
+}
+
+func (w *walker) Array(a reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+ w.lock(a)
+
+ newA := reflect.New(a.Type())
+
+ w.cs = append(w.cs, newA)
+ w.valPush(newA)
+ return nil
+}
+
+func (w *walker) ArrayElem(i int, elem reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+
+ // We don't write the array here because elem might still be
+ // arbitrarily complex. Just record the index and continue on.
+ w.valPush(reflect.ValueOf(i))
+
+ return nil
+}
+
+func (w *walker) Struct(s reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+ w.lock(s)
+
+ var v reflect.Value
+ if c, ok := w.copiers[s.Type()]; ok {
+ // We have a Copier for this struct, so we use that copier to
+ // get the copy, and we ignore anything deeper than this.
+ w.ignoreDepth = w.depth
+
+ dup, err := c(s.Interface())
+ if err != nil {
+ return err
+ }
+
+ // We need to put a pointer to the value on the value stack,
+ // so allocate a new pointer and set it.
+ v = reflect.New(s.Type())
+ reflect.Indirect(v).Set(reflect.ValueOf(dup))
+ } else {
+ // No copier, we copy ourselves and allow reflectwalk to guide
+ // us deeper into the structure for copying.
+ v = reflect.New(s.Type())
+ }
+
+ // Push the value onto the value stack for setting the struct field,
+ // and add the struct itself to the containers stack in case we walk
+ // deeper so that its own fields can be modified.
+ w.valPush(v)
+ w.cs = append(w.cs, v)
+
+ return nil
+}
+
+func (w *walker) StructField(f reflect.StructField, v reflect.Value) error {
+ if w.ignoring() {
+ return nil
+ }
+
+ // If PkgPath is non-empty, this is a private (unexported) field.
+ // We do not set this unexported since the Go runtime doesn't allow us.
+ if f.PkgPath != "" {
+ return reflectwalk.SkipEntry
+ }
+
+ switch f.Tag.Get(tagKey) {
+ case "shallow":
+ // If we're shallow copying then assign the value directly to the
+ // struct and skip the entry.
+ if v.IsValid() {
+ s := w.cs[len(w.cs)-1]
+ sf := reflect.Indirect(s).FieldByName(f.Name)
+ if sf.CanSet() {
+ sf.Set(v)
+ }
+ }
+
+ return reflectwalk.SkipEntry
+
+ case "ignore":
+ // Do nothing
+ return reflectwalk.SkipEntry
+ }
+
+ // Push the field onto the stack, we'll handle it when we exit
+ // the struct field in Exit...
+ w.valPush(reflect.ValueOf(f))
+
+ return nil
+}
+
+// ignore causes the walker to ignore any more values until we exit this on
+func (w *walker) ignore() {
+ w.ignoreDepth = w.depth
+}
+
+func (w *walker) ignoring() bool {
+ return w.ignoreDepth > 0 && w.depth >= w.ignoreDepth
+}
+
+func (w *walker) pointerPeek() bool {
+ return w.ps[w.depth] > 0
+}
+
+func (w *walker) valPop() reflect.Value {
+ result := w.vals[len(w.vals)-1]
+ w.vals = w.vals[:len(w.vals)-1]
+
+ // If we're out of values, that means we popped everything off. In
+ // this case, we reset the result so the next pushed value becomes
+ // the result.
+ if len(w.vals) == 0 {
+ w.Result = nil
+ }
+
+ return result
+}
+
+func (w *walker) valPush(v reflect.Value) {
+ w.vals = append(w.vals, v)
+
+ // If we haven't set the result yet, then this is the result since
+ // it is the first (outermost) value we're seeing.
+ if w.Result == nil && v.IsValid() {
+ w.Result = v.Interface()
+ }
+}
+
+func (w *walker) replacePointerMaybe() {
+ // Determine the last pointer value. If it is NOT a pointer, then
+ // we need to push that onto the stack.
+ if !w.pointerPeek() {
+ w.valPush(reflect.Indirect(w.valPop()))
+ return
+ }
+
+ v := w.valPop()
+
+ // If the expected type is a pointer to an interface of any depth,
+ // such as *interface{}, **interface{}, etc., then we need to convert
+ // the value "v" from *CONCRETE to *interface{} so types match for
+ // Set.
+ //
+ // Example if v is type *Foo where Foo is a struct, v would become
+ // *interface{} instead. This only happens if we have an interface expectation
+ // at this depth.
+ //
+ // For more info, see GH-16
+ if iType, ok := w.ifaceTypes[ifaceKey(w.ps[w.depth], w.depth)]; ok && iType.Kind() == reflect.Interface {
+ y := reflect.New(iType) // Create *interface{}
+ y.Elem().Set(reflect.Indirect(v)) // Assign "Foo" to interface{} (dereferenced)
+ v = y // v is now typed *interface{} (where *v = Foo)
+ }
+
+ for i := 1; i < w.ps[w.depth]; i++ {
+ if iType, ok := w.ifaceTypes[ifaceKey(w.ps[w.depth]-i, w.depth)]; ok {
+ iface := reflect.New(iType).Elem()
+ iface.Set(v)
+ v = iface
+ }
+
+ p := reflect.New(v.Type())
+ p.Elem().Set(v)
+ v = p
+ }
+
+ w.valPush(v)
+}
+
+// if this value is a Locker, lock it and add it to the locks slice
+func (w *walker) lock(v reflect.Value) {
+ if !w.useLocks {
+ return
+ }
+
+ if !v.IsValid() || !v.CanInterface() {
+ return
+ }
+
+ type rlocker interface {
+ RLocker() sync.Locker
+ }
+
+ var locker sync.Locker
+
+ // We can't call Interface() on a value directly, since that requires
+ // a copy. This is OK, since the pointer to a value which is a sync.Locker
+ // is also a sync.Locker.
+ if v.Kind() == reflect.Ptr {
+ switch l := v.Interface().(type) {
+ case rlocker:
+ // don't lock a mutex directly
+ if _, ok := l.(*sync.RWMutex); !ok {
+ locker = l.RLocker()
+ }
+ case sync.Locker:
+ locker = l
+ }
+ } else if v.CanAddr() {
+ switch l := v.Addr().Interface().(type) {
+ case rlocker:
+ // don't lock a mutex directly
+ if _, ok := l.(*sync.RWMutex); !ok {
+ locker = l.RLocker()
+ }
+ case sync.Locker:
+ locker = l
+ }
+ }
+
+ // still no callable locker
+ if locker == nil {
+ return
+ }
+
+ // don't lock a mutex directly
+ switch locker.(type) {
+ case *sync.Mutex, *sync.RWMutex:
+ return
+ }
+
+ locker.Lock()
+ w.locks[w.depth] = locker
+}
+
+// wrapPtr is a helper that takes v and always make it *v. copystructure
+// stores things internally as pointers until the last moment before unwrapping
+func wrapPtr(v reflect.Value) reflect.Value {
+ if !v.IsValid() {
+ return v
+ }
+ vPtr := reflect.New(v.Type())
+ vPtr.Elem().Set(v)
+ return vPtr
+}
diff --git a/vendor/github.com/mitchellh/reflectwalk/.travis.yml b/vendor/github.com/mitchellh/reflectwalk/.travis.yml
new file mode 100644
index 000000000..4f2ee4d97
--- /dev/null
+++ b/vendor/github.com/mitchellh/reflectwalk/.travis.yml
@@ -0,0 +1 @@
+language: go
diff --git a/vendor/github.com/mitchellh/reflectwalk/LICENSE b/vendor/github.com/mitchellh/reflectwalk/LICENSE
new file mode 100644
index 000000000..f9c841a51
--- /dev/null
+++ b/vendor/github.com/mitchellh/reflectwalk/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 Mitchell Hashimoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/mitchellh/reflectwalk/README.md b/vendor/github.com/mitchellh/reflectwalk/README.md
new file mode 100644
index 000000000..ac82cd2e1
--- /dev/null
+++ b/vendor/github.com/mitchellh/reflectwalk/README.md
@@ -0,0 +1,6 @@
+# reflectwalk
+
+reflectwalk is a Go library for "walking" a value in Go using reflection,
+in the same way a directory tree can be "walked" on the filesystem. Walking
+a complex structure can allow you to do manipulations on unknown structures
+such as those decoded from JSON.
diff --git a/vendor/github.com/mitchellh/reflectwalk/location.go b/vendor/github.com/mitchellh/reflectwalk/location.go
new file mode 100644
index 000000000..6a7f17611
--- /dev/null
+++ b/vendor/github.com/mitchellh/reflectwalk/location.go
@@ -0,0 +1,19 @@
+package reflectwalk
+
+//go:generate stringer -type=Location location.go
+
+type Location uint
+
+const (
+ None Location = iota
+ Map
+ MapKey
+ MapValue
+ Slice
+ SliceElem
+ Array
+ ArrayElem
+ Struct
+ StructField
+ WalkLoc
+)
diff --git a/vendor/github.com/mitchellh/reflectwalk/location_string.go b/vendor/github.com/mitchellh/reflectwalk/location_string.go
new file mode 100644
index 000000000..70760cf4c
--- /dev/null
+++ b/vendor/github.com/mitchellh/reflectwalk/location_string.go
@@ -0,0 +1,16 @@
+// Code generated by "stringer -type=Location location.go"; DO NOT EDIT.
+
+package reflectwalk
+
+import "fmt"
+
+const _Location_name = "NoneMapMapKeyMapValueSliceSliceElemArrayArrayElemStructStructFieldWalkLoc"
+
+var _Location_index = [...]uint8{0, 4, 7, 13, 21, 26, 35, 40, 49, 55, 66, 73}
+
+func (i Location) String() string {
+ if i >= Location(len(_Location_index)-1) {
+ return fmt.Sprintf("Location(%d)", i)
+ }
+ return _Location_name[_Location_index[i]:_Location_index[i+1]]
+}
diff --git a/vendor/github.com/mitchellh/reflectwalk/reflectwalk.go b/vendor/github.com/mitchellh/reflectwalk/reflectwalk.go
new file mode 100644
index 000000000..7fee7b050
--- /dev/null
+++ b/vendor/github.com/mitchellh/reflectwalk/reflectwalk.go
@@ -0,0 +1,420 @@
+// reflectwalk is a package that allows you to "walk" complex structures
+// similar to how you may "walk" a filesystem: visiting every element one
+// by one and calling callback functions allowing you to handle and manipulate
+// those elements.
+package reflectwalk
+
+import (
+ "errors"
+ "reflect"
+)
+
+// PrimitiveWalker implementations are able to handle primitive values
+// within complex structures. Primitive values are numbers, strings,
+// booleans, funcs, chans.
+//
+// These primitive values are often members of more complex
+// structures (slices, maps, etc.) that are walkable by other interfaces.
+type PrimitiveWalker interface {
+ Primitive(reflect.Value) error
+}
+
+// InterfaceWalker implementations are able to handle interface values as they
+// are encountered during the walk.
+type InterfaceWalker interface {
+ Interface(reflect.Value) error
+}
+
+// MapWalker implementations are able to handle individual elements
+// found within a map structure.
+type MapWalker interface {
+ Map(m reflect.Value) error
+ MapElem(m, k, v reflect.Value) error
+}
+
+// SliceWalker implementations are able to handle slice elements found
+// within complex structures.
+type SliceWalker interface {
+ Slice(reflect.Value) error
+ SliceElem(int, reflect.Value) error
+}
+
+// ArrayWalker implementations are able to handle array elements found
+// within complex structures.
+type ArrayWalker interface {
+ Array(reflect.Value) error
+ ArrayElem(int, reflect.Value) error
+}
+
+// StructWalker is an interface that has methods that are called for
+// structs when a Walk is done.
+type StructWalker interface {
+ Struct(reflect.Value) error
+ StructField(reflect.StructField, reflect.Value) error
+}
+
+// EnterExitWalker implementations are notified before and after
+// they walk deeper into complex structures (into struct fields,
+// into slice elements, etc.)
+type EnterExitWalker interface {
+ Enter(Location) error
+ Exit(Location) error
+}
+
+// PointerWalker implementations are notified when the value they're
+// walking is a pointer or not. Pointer is called for _every_ value whether
+// it is a pointer or not.
+type PointerWalker interface {
+ PointerEnter(bool) error
+ PointerExit(bool) error
+}
+
+// PointerValueWalker implementations are notified with the value of
+// a particular pointer when a pointer is walked. Pointer is called
+// right before PointerEnter.
+type PointerValueWalker interface {
+ Pointer(reflect.Value) error
+}
+
+// SkipEntry can be returned from walk functions to skip walking
+// the value of this field. This is only valid in the following functions:
+//
+// - Struct: skips all fields from being walked
+// - StructField: skips walking the struct value
+//
+var SkipEntry = errors.New("skip this entry")
+
+// Walk takes an arbitrary value and an interface and traverses the
+// value, calling callbacks on the interface if they are supported.
+// The interface should implement one or more of the walker interfaces
+// in this package, such as PrimitiveWalker, StructWalker, etc.
+func Walk(data, walker interface{}) (err error) {
+ v := reflect.ValueOf(data)
+ ew, ok := walker.(EnterExitWalker)
+ if ok {
+ err = ew.Enter(WalkLoc)
+ }
+
+ if err == nil {
+ err = walk(v, walker)
+ }
+
+ if ok && err == nil {
+ err = ew.Exit(WalkLoc)
+ }
+
+ return
+}
+
+func walk(v reflect.Value, w interface{}) (err error) {
+ // Determine if we're receiving a pointer and if so notify the walker.
+ // The logic here is convoluted but very important (tests will fail if
+ // almost any part is changed). I will try to explain here.
+ //
+ // First, we check if the value is an interface, if so, we really need
+ // to check the interface's VALUE to see whether it is a pointer.
+ //
+ // Check whether the value is then a pointer. If so, then set pointer
+ // to true to notify the user.
+ //
+ // If we still have a pointer or an interface after the indirections, then
+ // we unwrap another level
+ //
+ // At this time, we also set "v" to be the dereferenced value. This is
+ // because once we've unwrapped the pointer we want to use that value.
+ pointer := false
+ pointerV := v
+
+ for {
+ if pointerV.Kind() == reflect.Interface {
+ if iw, ok := w.(InterfaceWalker); ok {
+ if err = iw.Interface(pointerV); err != nil {
+ return
+ }
+ }
+
+ pointerV = pointerV.Elem()
+ }
+
+ if pointerV.Kind() == reflect.Ptr {
+ if pw, ok := w.(PointerValueWalker); ok {
+ if err = pw.Pointer(pointerV); err != nil {
+ if err == SkipEntry {
+ // Skip the rest of this entry but clear the error
+ return nil
+ }
+
+ return
+ }
+ }
+
+ pointer = true
+ v = reflect.Indirect(pointerV)
+ }
+ if pw, ok := w.(PointerWalker); ok {
+ if err = pw.PointerEnter(pointer); err != nil {
+ return
+ }
+
+ defer func(pointer bool) {
+ if err != nil {
+ return
+ }
+
+ err = pw.PointerExit(pointer)
+ }(pointer)
+ }
+
+ if pointer {
+ pointerV = v
+ }
+ pointer = false
+
+ // If we still have a pointer or interface we have to indirect another level.
+ switch pointerV.Kind() {
+ case reflect.Ptr, reflect.Interface:
+ continue
+ }
+ break
+ }
+
+ // We preserve the original value here because if it is an interface
+ // type, we want to pass that directly into the walkPrimitive, so that
+ // we can set it.
+ originalV := v
+ if v.Kind() == reflect.Interface {
+ v = v.Elem()
+ }
+
+ k := v.Kind()
+ if k >= reflect.Int && k <= reflect.Complex128 {
+ k = reflect.Int
+ }
+
+ switch k {
+ // Primitives
+ case reflect.Bool, reflect.Chan, reflect.Func, reflect.Int, reflect.String, reflect.Invalid:
+ err = walkPrimitive(originalV, w)
+ return
+ case reflect.Map:
+ err = walkMap(v, w)
+ return
+ case reflect.Slice:
+ err = walkSlice(v, w)
+ return
+ case reflect.Struct:
+ err = walkStruct(v, w)
+ return
+ case reflect.Array:
+ err = walkArray(v, w)
+ return
+ default:
+ panic("unsupported type: " + k.String())
+ }
+}
+
+func walkMap(v reflect.Value, w interface{}) error {
+ ew, ewok := w.(EnterExitWalker)
+ if ewok {
+ ew.Enter(Map)
+ }
+
+ if mw, ok := w.(MapWalker); ok {
+ if err := mw.Map(v); err != nil {
+ return err
+ }
+ }
+
+ for _, k := range v.MapKeys() {
+ kv := v.MapIndex(k)
+
+ if mw, ok := w.(MapWalker); ok {
+ if err := mw.MapElem(v, k, kv); err != nil {
+ return err
+ }
+ }
+
+ ew, ok := w.(EnterExitWalker)
+ if ok {
+ ew.Enter(MapKey)
+ }
+
+ if err := walk(k, w); err != nil {
+ return err
+ }
+
+ if ok {
+ ew.Exit(MapKey)
+ ew.Enter(MapValue)
+ }
+
+ // get the map value again as it may have changed in the MapElem call
+ if err := walk(v.MapIndex(k), w); err != nil {
+ return err
+ }
+
+ if ok {
+ ew.Exit(MapValue)
+ }
+ }
+
+ if ewok {
+ ew.Exit(Map)
+ }
+
+ return nil
+}
+
+func walkPrimitive(v reflect.Value, w interface{}) error {
+ if pw, ok := w.(PrimitiveWalker); ok {
+ return pw.Primitive(v)
+ }
+
+ return nil
+}
+
+func walkSlice(v reflect.Value, w interface{}) (err error) {
+ ew, ok := w.(EnterExitWalker)
+ if ok {
+ ew.Enter(Slice)
+ }
+
+ if sw, ok := w.(SliceWalker); ok {
+ if err := sw.Slice(v); err != nil {
+ return err
+ }
+ }
+
+ for i := 0; i < v.Len(); i++ {
+ elem := v.Index(i)
+
+ if sw, ok := w.(SliceWalker); ok {
+ if err := sw.SliceElem(i, elem); err != nil {
+ return err
+ }
+ }
+
+ ew, ok := w.(EnterExitWalker)
+ if ok {
+ ew.Enter(SliceElem)
+ }
+
+ if err := walk(elem, w); err != nil {
+ return err
+ }
+
+ if ok {
+ ew.Exit(SliceElem)
+ }
+ }
+
+ ew, ok = w.(EnterExitWalker)
+ if ok {
+ ew.Exit(Slice)
+ }
+
+ return nil
+}
+
+func walkArray(v reflect.Value, w interface{}) (err error) {
+ ew, ok := w.(EnterExitWalker)
+ if ok {
+ ew.Enter(Array)
+ }
+
+ if aw, ok := w.(ArrayWalker); ok {
+ if err := aw.Array(v); err != nil {
+ return err
+ }
+ }
+
+ for i := 0; i < v.Len(); i++ {
+ elem := v.Index(i)
+
+ if aw, ok := w.(ArrayWalker); ok {
+ if err := aw.ArrayElem(i, elem); err != nil {
+ return err
+ }
+ }
+
+ ew, ok := w.(EnterExitWalker)
+ if ok {
+ ew.Enter(ArrayElem)
+ }
+
+ if err := walk(elem, w); err != nil {
+ return err
+ }
+
+ if ok {
+ ew.Exit(ArrayElem)
+ }
+ }
+
+ ew, ok = w.(EnterExitWalker)
+ if ok {
+ ew.Exit(Array)
+ }
+
+ return nil
+}
+
+func walkStruct(v reflect.Value, w interface{}) (err error) {
+ ew, ewok := w.(EnterExitWalker)
+ if ewok {
+ ew.Enter(Struct)
+ }
+
+ skip := false
+ if sw, ok := w.(StructWalker); ok {
+ err = sw.Struct(v)
+ if err == SkipEntry {
+ skip = true
+ err = nil
+ }
+ if err != nil {
+ return
+ }
+ }
+
+ if !skip {
+ vt := v.Type()
+ for i := 0; i < vt.NumField(); i++ {
+ sf := vt.Field(i)
+ f := v.FieldByIndex([]int{i})
+
+ if sw, ok := w.(StructWalker); ok {
+ err = sw.StructField(sf, f)
+
+ // SkipEntry just pretends this field doesn't even exist
+ if err == SkipEntry {
+ continue
+ }
+
+ if err != nil {
+ return
+ }
+ }
+
+ ew, ok := w.(EnterExitWalker)
+ if ok {
+ ew.Enter(StructField)
+ }
+
+ err = walk(f, w)
+ if err != nil {
+ return
+ }
+
+ if ok {
+ ew.Exit(StructField)
+ }
+ }
+ }
+
+ if ewok {
+ ew.Exit(Struct)
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/LICENSE b/vendor/github.com/rogpeppe/go-internal/LICENSE
new file mode 100644
index 000000000..49ea0f928
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2018 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go b/vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go
new file mode 100644
index 000000000..98e4e38f4
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/fmtsort/mapelem.go
@@ -0,0 +1,20 @@
+package fmtsort
+
+import "reflect"
+
+const brokenNaNs = false
+
+func mapElems(mapValue reflect.Value) ([]reflect.Value, []reflect.Value) {
+ // Note: this code is arranged to not panic even in the presence
+ // of a concurrent map update. The runtime is responsible for
+ // yelling loudly if that happens. See issue 33275.
+ n := mapValue.Len()
+ key := make([]reflect.Value, 0, n)
+ value := make([]reflect.Value, 0, n)
+ iter := mapValue.MapRange()
+ for iter.Next() {
+ key = append(key, iter.Key())
+ value = append(value, iter.Value())
+ }
+ return key, value
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go b/vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go
new file mode 100644
index 000000000..7f5185417
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/fmtsort/sort.go
@@ -0,0 +1,209 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package fmtsort provides a general stable ordering mechanism
+// for maps, on behalf of the fmt and text/template packages.
+// It is not guaranteed to be efficient and works only for types
+// that are valid map keys.
+package fmtsort
+
+import (
+ "reflect"
+ "sort"
+)
+
+// Note: Throughout this package we avoid calling reflect.Value.Interface as
+// it is not always legal to do so and it's easier to avoid the issue than to face it.
+
+// SortedMap represents a map's keys and values. The keys and values are
+// aligned in index order: Value[i] is the value in the map corresponding to Key[i].
+type SortedMap struct {
+ Key []reflect.Value
+ Value []reflect.Value
+}
+
+func (o *SortedMap) Len() int { return len(o.Key) }
+func (o *SortedMap) Less(i, j int) bool { return compare(o.Key[i], o.Key[j]) < 0 }
+func (o *SortedMap) Swap(i, j int) {
+ o.Key[i], o.Key[j] = o.Key[j], o.Key[i]
+ o.Value[i], o.Value[j] = o.Value[j], o.Value[i]
+}
+
+// Sort accepts a map and returns a SortedMap that has the same keys and
+// values but in a stable sorted order according to the keys, modulo issues
+// raised by unorderable key values such as NaNs.
+//
+// The ordering rules are more general than with Go's < operator:
+//
+// - when applicable, nil compares low
+// - ints, floats, and strings order by <
+// - NaN compares less than non-NaN floats
+// - bool compares false before true
+// - complex compares real, then imag
+// - pointers compare by machine address
+// - channel values compare by machine address
+// - structs compare each field in turn
+// - arrays compare each element in turn.
+// Otherwise identical arrays compare by length.
+// - interface values compare first by reflect.Type describing the concrete type
+// and then by concrete value as described in the previous rules.
+func Sort(mapValue reflect.Value) *SortedMap {
+ if mapValue.Type().Kind() != reflect.Map {
+ return nil
+ }
+ key, value := mapElems(mapValue)
+ sorted := &SortedMap{
+ Key: key,
+ Value: value,
+ }
+ sort.Stable(sorted)
+ return sorted
+}
+
+// compare compares two values of the same type. It returns -1, 0, 1
+// according to whether a > b (1), a == b (0), or a < b (-1).
+// If the types differ, it returns -1.
+// See the comment on Sort for the comparison rules.
+func compare(aVal, bVal reflect.Value) int {
+ aType, bType := aVal.Type(), bVal.Type()
+ if aType != bType {
+ return -1 // No good answer possible, but don't return 0: they're not equal.
+ }
+ switch aVal.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ a, b := aVal.Int(), bVal.Int()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ a, b := aVal.Uint(), bVal.Uint()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.String:
+ a, b := aVal.String(), bVal.String()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Float32, reflect.Float64:
+ return floatCompare(aVal.Float(), bVal.Float())
+ case reflect.Complex64, reflect.Complex128:
+ a, b := aVal.Complex(), bVal.Complex()
+ if c := floatCompare(real(a), real(b)); c != 0 {
+ return c
+ }
+ return floatCompare(imag(a), imag(b))
+ case reflect.Bool:
+ a, b := aVal.Bool(), bVal.Bool()
+ switch {
+ case a == b:
+ return 0
+ case a:
+ return 1
+ default:
+ return -1
+ }
+ case reflect.Ptr:
+ a, b := aVal.Pointer(), bVal.Pointer()
+ switch {
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Chan:
+ if c, ok := nilCompare(aVal, bVal); ok {
+ return c
+ }
+ ap, bp := aVal.Pointer(), bVal.Pointer()
+ switch {
+ case ap < bp:
+ return -1
+ case ap > bp:
+ return 1
+ default:
+ return 0
+ }
+ case reflect.Struct:
+ for i := 0; i < aVal.NumField(); i++ {
+ if c := compare(aVal.Field(i), bVal.Field(i)); c != 0 {
+ return c
+ }
+ }
+ return 0
+ case reflect.Array:
+ for i := 0; i < aVal.Len(); i++ {
+ if c := compare(aVal.Index(i), bVal.Index(i)); c != 0 {
+ return c
+ }
+ }
+ return 0
+ case reflect.Interface:
+ if c, ok := nilCompare(aVal, bVal); ok {
+ return c
+ }
+ c := compare(reflect.ValueOf(aVal.Elem().Type()), reflect.ValueOf(bVal.Elem().Type()))
+ if c != 0 {
+ return c
+ }
+ return compare(aVal.Elem(), bVal.Elem())
+ default:
+ // Certain types cannot appear as keys (maps, funcs, slices), but be explicit.
+ panic("bad type in compare: " + aType.String())
+ }
+}
+
+// nilCompare checks whether either value is nil. If not, the boolean is false.
+// If either value is nil, the boolean is true and the integer is the comparison
+// value. The comparison is defined to be 0 if both are nil, otherwise the one
+// nil value compares low. Both arguments must represent a chan, func,
+// interface, map, pointer, or slice.
+func nilCompare(aVal, bVal reflect.Value) (int, bool) {
+ if aVal.IsNil() {
+ if bVal.IsNil() {
+ return 0, true
+ }
+ return -1, true
+ }
+ if bVal.IsNil() {
+ return 1, true
+ }
+ return 0, false
+}
+
+// floatCompare compares two floating-point values. NaNs compare low.
+func floatCompare(a, b float64) int {
+ switch {
+ case isNaN(a):
+ return -1 // No good answer if b is a NaN so don't bother checking.
+ case isNaN(b):
+ return 1
+ case a < b:
+ return -1
+ case a > b:
+ return 1
+ }
+ return 0
+}
+
+func isNaN(a float64) bool {
+ return a != a
+}
diff --git a/vendor/github.com/shopspring/decimal/.gitignore b/vendor/github.com/shopspring/decimal/.gitignore
new file mode 100644
index 000000000..8a43ce9d7
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/.gitignore
@@ -0,0 +1,6 @@
+.git
+*.swp
+
+# IntelliJ
+.idea/
+*.iml
diff --git a/vendor/github.com/shopspring/decimal/.travis.yml b/vendor/github.com/shopspring/decimal/.travis.yml
new file mode 100644
index 000000000..55d42b289
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/.travis.yml
@@ -0,0 +1,13 @@
+language: go
+
+go:
+ - 1.7.x
+ - 1.12.x
+ - 1.13.x
+ - tip
+
+install:
+ - go build .
+
+script:
+ - go test -v
diff --git a/vendor/github.com/shopspring/decimal/CHANGELOG.md b/vendor/github.com/shopspring/decimal/CHANGELOG.md
new file mode 100644
index 000000000..01ba02feb
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/CHANGELOG.md
@@ -0,0 +1,19 @@
+## Decimal v1.2.0
+
+#### BREAKING
+- Drop support for Go version older than 1.7 [#172](https://github.com/shopspring/decimal/pull/172)
+
+#### FEATURES
+- Add NewFromInt and NewFromInt32 initializers [#72](https://github.com/shopspring/decimal/pull/72)
+- Add support for Go modules [#157](https://github.com/shopspring/decimal/pull/157)
+- Add BigInt, BigFloat helper methods [#171](https://github.com/shopspring/decimal/pull/171)
+
+#### ENHANCEMENTS
+- Memory usage optimization [#160](https://github.com/shopspring/decimal/pull/160)
+- Updated travis CI golang versions [#156](https://github.com/shopspring/decimal/pull/156)
+- Update documentation [#173](https://github.com/shopspring/decimal/pull/173)
+- Improve code quality [#174](https://github.com/shopspring/decimal/pull/174)
+
+#### BUGFIXES
+- Revert remove insignificant digits [#159](https://github.com/shopspring/decimal/pull/159)
+- Remove 15 interval for RoundCash [#166](https://github.com/shopspring/decimal/pull/166)
diff --git a/vendor/github.com/shopspring/decimal/LICENSE b/vendor/github.com/shopspring/decimal/LICENSE
new file mode 100644
index 000000000..ad2148aaf
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/LICENSE
@@ -0,0 +1,45 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Spring, Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
+- Based on https://github.com/oguzbilgic/fpd, which has the following license:
+"""
+The MIT License (MIT)
+
+Copyright (c) 2013 Oguz Bilgic
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/vendor/github.com/shopspring/decimal/README.md b/vendor/github.com/shopspring/decimal/README.md
new file mode 100644
index 000000000..b70f90159
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/README.md
@@ -0,0 +1,130 @@
+# decimal
+
+[![Build Status](https://travis-ci.org/shopspring/decimal.png?branch=master)](https://travis-ci.org/shopspring/decimal) [![GoDoc](https://godoc.org/github.com/shopspring/decimal?status.svg)](https://godoc.org/github.com/shopspring/decimal) [![Go Report Card](https://goreportcard.com/badge/github.com/shopspring/decimal)](https://goreportcard.com/report/github.com/shopspring/decimal)
+
+Arbitrary-precision fixed-point decimal numbers in go.
+
+_Note:_ Decimal library can "only" represent numbers with a maximum of 2^31 digits after the decimal point.
+
+## Features
+
+ * The zero-value is 0, and is safe to use without initialization
+ * Addition, subtraction, multiplication with no loss of precision
+ * Division with specified precision
+ * Database/sql serialization/deserialization
+ * JSON and XML serialization/deserialization
+
+## Install
+
+Run `go get github.com/shopspring/decimal`
+
+## Requirements
+
+Decimal library requires Go version `>=1.7`
+
+## Usage
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/shopspring/decimal"
+)
+
+func main() {
+ price, err := decimal.NewFromString("136.02")
+ if err != nil {
+ panic(err)
+ }
+
+ quantity := decimal.NewFromInt(3)
+
+ fee, _ := decimal.NewFromString(".035")
+ taxRate, _ := decimal.NewFromString(".08875")
+
+ subtotal := price.Mul(quantity)
+
+ preTax := subtotal.Mul(fee.Add(decimal.NewFromFloat(1)))
+
+ total := preTax.Mul(taxRate.Add(decimal.NewFromFloat(1)))
+
+ fmt.Println("Subtotal:", subtotal) // Subtotal: 408.06
+ fmt.Println("Pre-tax:", preTax) // Pre-tax: 422.3421
+ fmt.Println("Taxes:", total.Sub(preTax)) // Taxes: 37.482861375
+ fmt.Println("Total:", total) // Total: 459.824961375
+ fmt.Println("Tax rate:", total.Sub(preTax).Div(preTax)) // Tax rate: 0.08875
+}
+```
+
+## Documentation
+
+http://godoc.org/github.com/shopspring/decimal
+
+## Production Usage
+
+* [Spring](https://shopspring.com/), since August 14, 2014.
+* If you are using this in production, please let us know!
+
+## FAQ
+
+#### Why don't you just use float64?
+
+Because float64 (or any binary floating point type, actually) can't represent
+numbers such as `0.1` exactly.
+
+Consider this code: http://play.golang.org/p/TQBd4yJe6B You might expect that
+it prints out `10`, but it actually prints `9.999999999999831`. Over time,
+these small errors can really add up!
+
+#### Why don't you just use big.Rat?
+
+big.Rat is fine for representing rational numbers, but Decimal is better for
+representing money. Why? Here's a (contrived) example:
+
+Let's say you use big.Rat, and you have two numbers, x and y, both
+representing 1/3, and you have `z = 1 - x - y = 1/3`. If you print each one
+out, the string output has to stop somewhere (let's say it stops at 3 decimal
+digits, for simplicity), so you'll get 0.333, 0.333, and 0.333. But where did
+the other 0.001 go?
+
+Here's the above example as code: http://play.golang.org/p/lCZZs0w9KE
+
+With Decimal, the strings being printed out represent the number exactly. So,
+if you have `x = y = 1/3` (with precision 3), they will actually be equal to
+0.333, and when you do `z = 1 - x - y`, `z` will be equal to .334. No money is
+unaccounted for!
+
+You still have to be careful. If you want to split a number `N` 3 ways, you
+can't just send `N/3` to three different people. You have to pick one to send
+`N - (2/3*N)` to. That person will receive the fraction of a penny remainder.
+
+But, it is much easier to be careful with Decimal than with big.Rat.
+
+#### Why isn't the API similar to big.Int's?
+
+big.Int's API is built to reduce the number of memory allocations for maximal
+performance. This makes sense for its use-case, but the trade-off is that the
+API is awkward and easy to misuse.
+
+For example, to add two big.Ints, you do: `z := new(big.Int).Add(x, y)`. A
+developer unfamiliar with this API might try to do `z := a.Add(a, b)`. This
+modifies `a` and sets `z` as an alias for `a`, which they might not expect. It
+also modifies any other aliases to `a`.
+
+Here's an example of the subtle bugs you can introduce with big.Int's API:
+https://play.golang.org/p/x2R_78pa8r
+
+In contrast, it's difficult to make such mistakes with decimal. Decimals
+behave like other go numbers types: even though `a = b` will not deep copy
+`b` into `a`, it is impossible to modify a Decimal, since all Decimal methods
+return new Decimals and do not modify the originals. The downside is that
+this causes extra allocations, so Decimal is less performant. My assumption
+is that if you're using Decimals, you probably care more about correctness
+than performance.
+
+## License
+
+The MIT License (MIT)
+
+This is a heavily modified fork of [fpd.Decimal](https://github.com/oguzbilgic/fpd), which was also released under the MIT License.
diff --git a/vendor/github.com/shopspring/decimal/decimal-go.go b/vendor/github.com/shopspring/decimal/decimal-go.go
new file mode 100644
index 000000000..9958d6902
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/decimal-go.go
@@ -0,0 +1,415 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Multiprecision decimal numbers.
+// For floating-point formatting only; not general purpose.
+// Only operations are assign and (binary) left/right shift.
+// Can do binary floating point in multiprecision decimal precisely
+// because 2 divides 10; cannot do decimal floating point
+// in multiprecision binary precisely.
+
+package decimal
+
+type decimal struct {
+ d [800]byte // digits, big-endian representation
+ nd int // number of digits used
+ dp int // decimal point
+ neg bool // negative flag
+ trunc bool // discarded nonzero digits beyond d[:nd]
+}
+
+func (a *decimal) String() string {
+ n := 10 + a.nd
+ if a.dp > 0 {
+ n += a.dp
+ }
+ if a.dp < 0 {
+ n += -a.dp
+ }
+
+ buf := make([]byte, n)
+ w := 0
+ switch {
+ case a.nd == 0:
+ return "0"
+
+ case a.dp <= 0:
+ // zeros fill space between decimal point and digits
+ buf[w] = '0'
+ w++
+ buf[w] = '.'
+ w++
+ w += digitZero(buf[w : w+-a.dp])
+ w += copy(buf[w:], a.d[0:a.nd])
+
+ case a.dp < a.nd:
+ // decimal point in middle of digits
+ w += copy(buf[w:], a.d[0:a.dp])
+ buf[w] = '.'
+ w++
+ w += copy(buf[w:], a.d[a.dp:a.nd])
+
+ default:
+ // zeros fill space between digits and decimal point
+ w += copy(buf[w:], a.d[0:a.nd])
+ w += digitZero(buf[w : w+a.dp-a.nd])
+ }
+ return string(buf[0:w])
+}
+
+func digitZero(dst []byte) int {
+ for i := range dst {
+ dst[i] = '0'
+ }
+ return len(dst)
+}
+
+// trim trailing zeros from number.
+// (They are meaningless; the decimal point is tracked
+// independent of the number of digits.)
+func trim(a *decimal) {
+ for a.nd > 0 && a.d[a.nd-1] == '0' {
+ a.nd--
+ }
+ if a.nd == 0 {
+ a.dp = 0
+ }
+}
+
+// Assign v to a.
+func (a *decimal) Assign(v uint64) {
+ var buf [24]byte
+
+ // Write reversed decimal in buf.
+ n := 0
+ for v > 0 {
+ v1 := v / 10
+ v -= 10 * v1
+ buf[n] = byte(v + '0')
+ n++
+ v = v1
+ }
+
+ // Reverse again to produce forward decimal in a.d.
+ a.nd = 0
+ for n--; n >= 0; n-- {
+ a.d[a.nd] = buf[n]
+ a.nd++
+ }
+ a.dp = a.nd
+ trim(a)
+}
+
+// Maximum shift that we can do in one pass without overflow.
+// A uint has 32 or 64 bits, and we have to be able to accommodate 9<<k.
+const uintSize = 32 << (^uint(0) >> 63)
+const maxShift = uintSize - 4
+
+// Binary shift right (/ 2) by k bits. k <= maxShift to avoid overflow.
+func rightShift(a *decimal, k uint) {
+ r := 0 // read pointer
+ w := 0 // write pointer
+
+ // Pick up enough leading digits to cover first shift.
+ var n uint
+ for ; n>>k == 0; r++ {
+ if r >= a.nd {
+ if n == 0 {
+ // a == 0; shouldn't get here, but handle anyway.
+ a.nd = 0
+ return
+ }
+ for n>>k == 0 {
+ n = n * 10
+ r++
+ }
+ break
+ }
+ c := uint(a.d[r])
+ n = n*10 + c - '0'
+ }
+ a.dp -= r - 1
+
+ var mask uint = (1 << k) - 1
+
+ // Pick up a digit, put down a digit.
+ for ; r < a.nd; r++ {
+ c := uint(a.d[r])
+ dig := n >> k
+ n &= mask
+ a.d[w] = byte(dig + '0')
+ w++
+ n = n*10 + c - '0'
+ }
+
+ // Put down extra digits.
+ for n > 0 {
+ dig := n >> k
+ n &= mask
+ if w < len(a.d) {
+ a.d[w] = byte(dig + '0')
+ w++
+ } else if dig > 0 {
+ a.trunc = true
+ }
+ n = n * 10
+ }
+
+ a.nd = w
+ trim(a)
+}
+
+// Cheat sheet for left shift: table indexed by shift count giving
+// number of new digits that will be introduced by that shift.
+//
+// For example, leftcheats[4] = {2, "625"}. That means that
+// if we are shifting by 4 (multiplying by 16), it will add 2 digits
+// when the string prefix is "625" through "999", and one fewer digit
+// if the string prefix is "000" through "624".
+//
+// Credit for this trick goes to Ken.
+
+type leftCheat struct {
+ delta int // number of new digits
+ cutoff string // minus one digit if original < a.
+}
+
+var leftcheats = []leftCheat{
+ // Leading digits of 1/2^i = 5^i.
+ // 5^23 is not an exact 64-bit floating point number,
+ // so have to use bc for the math.
+ // Go up to 60 to be large enough for 32bit and 64bit platforms.
+ /*
+ seq 60 | sed 's/^/5^/' | bc |
+ awk 'BEGIN{ print "\t{ 0, \"\" }," }
+ {
+ log2 = log(2)/log(10)
+ printf("\t{ %d, \"%s\" },\t// * %d\n",
+ int(log2*NR+1), $0, 2**NR)
+ }'
+ */
+ {0, ""},
+ {1, "5"}, // * 2
+ {1, "25"}, // * 4
+ {1, "125"}, // * 8
+ {2, "625"}, // * 16
+ {2, "3125"}, // * 32
+ {2, "15625"}, // * 64
+ {3, "78125"}, // * 128
+ {3, "390625"}, // * 256
+ {3, "1953125"}, // * 512
+ {4, "9765625"}, // * 1024
+ {4, "48828125"}, // * 2048
+ {4, "244140625"}, // * 4096
+ {4, "1220703125"}, // * 8192
+ {5, "6103515625"}, // * 16384
+ {5, "30517578125"}, // * 32768
+ {5, "152587890625"}, // * 65536
+ {6, "762939453125"}, // * 131072
+ {6, "3814697265625"}, // * 262144
+ {6, "19073486328125"}, // * 524288
+ {7, "95367431640625"}, // * 1048576
+ {7, "476837158203125"}, // * 2097152
+ {7, "2384185791015625"}, // * 4194304
+ {7, "11920928955078125"}, // * 8388608
+ {8, "59604644775390625"}, // * 16777216
+ {8, "298023223876953125"}, // * 33554432
+ {8, "1490116119384765625"}, // * 67108864
+ {9, "7450580596923828125"}, // * 134217728
+ {9, "37252902984619140625"}, // * 268435456
+ {9, "186264514923095703125"}, // * 536870912
+ {10, "931322574615478515625"}, // * 1073741824
+ {10, "4656612873077392578125"}, // * 2147483648
+ {10, "23283064365386962890625"}, // * 4294967296
+ {10, "116415321826934814453125"}, // * 8589934592
+ {11, "582076609134674072265625"}, // * 17179869184
+ {11, "2910383045673370361328125"}, // * 34359738368
+ {11, "14551915228366851806640625"}, // * 68719476736
+ {12, "72759576141834259033203125"}, // * 137438953472
+ {12, "363797880709171295166015625"}, // * 274877906944
+ {12, "1818989403545856475830078125"}, // * 549755813888
+ {13, "9094947017729282379150390625"}, // * 1099511627776
+ {13, "45474735088646411895751953125"}, // * 2199023255552
+ {13, "227373675443232059478759765625"}, // * 4398046511104
+ {13, "1136868377216160297393798828125"}, // * 8796093022208
+ {14, "5684341886080801486968994140625"}, // * 17592186044416
+ {14, "28421709430404007434844970703125"}, // * 35184372088832
+ {14, "142108547152020037174224853515625"}, // * 70368744177664
+ {15, "710542735760100185871124267578125"}, // * 140737488355328
+ {15, "3552713678800500929355621337890625"}, // * 281474976710656
+ {15, "17763568394002504646778106689453125"}, // * 562949953421312
+ {16, "88817841970012523233890533447265625"}, // * 1125899906842624
+ {16, "444089209850062616169452667236328125"}, // * 2251799813685248
+ {16, "2220446049250313080847263336181640625"}, // * 4503599627370496
+ {16, "11102230246251565404236316680908203125"}, // * 9007199254740992
+ {17, "55511151231257827021181583404541015625"}, // * 18014398509481984
+ {17, "277555756156289135105907917022705078125"}, // * 36028797018963968
+ {17, "1387778780781445675529539585113525390625"}, // * 72057594037927936
+ {18, "6938893903907228377647697925567626953125"}, // * 144115188075855872
+ {18, "34694469519536141888238489627838134765625"}, // * 288230376151711744
+ {18, "173472347597680709441192448139190673828125"}, // * 576460752303423488
+ {19, "867361737988403547205962240695953369140625"}, // * 1152921504606846976
+}
+
+// Is the leading prefix of b lexicographically less than s?
+func prefixIsLessThan(b []byte, s string) bool {
+ for i := 0; i < len(s); i++ {
+ if i >= len(b) {
+ return true
+ }
+ if b[i] != s[i] {
+ return b[i] < s[i]
+ }
+ }
+ return false
+}
+
+// Binary shift left (* 2) by k bits. k <= maxShift to avoid overflow.
+func leftShift(a *decimal, k uint) {
+ delta := leftcheats[k].delta
+ if prefixIsLessThan(a.d[0:a.nd], leftcheats[k].cutoff) {
+ delta--
+ }
+
+ r := a.nd // read index
+ w := a.nd + delta // write index
+
+ // Pick up a digit, put down a digit.
+ var n uint
+ for r--; r >= 0; r-- {
+ n += (uint(a.d[r]) - '0') << k
+ quo := n / 10
+ rem := n - 10*quo
+ w--
+ if w < len(a.d) {
+ a.d[w] = byte(rem + '0')
+ } else if rem != 0 {
+ a.trunc = true
+ }
+ n = quo
+ }
+
+ // Put down extra digits.
+ for n > 0 {
+ quo := n / 10
+ rem := n - 10*quo
+ w--
+ if w < len(a.d) {
+ a.d[w] = byte(rem + '0')
+ } else if rem != 0 {
+ a.trunc = true
+ }
+ n = quo
+ }
+
+ a.nd += delta
+ if a.nd >= len(a.d) {
+ a.nd = len(a.d)
+ }
+ a.dp += delta
+ trim(a)
+}
+
+// Binary shift left (k > 0) or right (k < 0).
+func (a *decimal) Shift(k int) {
+ switch {
+ case a.nd == 0:
+ // nothing to do: a == 0
+ case k > 0:
+ for k > maxShift {
+ leftShift(a, maxShift)
+ k -= maxShift
+ }
+ leftShift(a, uint(k))
+ case k < 0:
+ for k < -maxShift {
+ rightShift(a, maxShift)
+ k += maxShift
+ }
+ rightShift(a, uint(-k))
+ }
+}
+
+// If we chop a at nd digits, should we round up?
+func shouldRoundUp(a *decimal, nd int) bool {
+ if nd < 0 || nd >= a.nd {
+ return false
+ }
+ if a.d[nd] == '5' && nd+1 == a.nd { // exactly halfway - round to even
+ // if we truncated, a little higher than what's recorded - always round up
+ if a.trunc {
+ return true
+ }
+ return nd > 0 && (a.d[nd-1]-'0')%2 != 0
+ }
+ // not halfway - digit tells all
+ return a.d[nd] >= '5'
+}
+
+// Round a to nd digits (or fewer).
+// If nd is zero, it means we're rounding
+// just to the left of the digits, as in
+// 0.09 -> 0.1.
+func (a *decimal) Round(nd int) {
+ if nd < 0 || nd >= a.nd {
+ return
+ }
+ if shouldRoundUp(a, nd) {
+ a.RoundUp(nd)
+ } else {
+ a.RoundDown(nd)
+ }
+}
+
+// Round a down to nd digits (or fewer).
+func (a *decimal) RoundDown(nd int) {
+ if nd < 0 || nd >= a.nd {
+ return
+ }
+ a.nd = nd
+ trim(a)
+}
+
+// Round a up to nd digits (or fewer).
+func (a *decimal) RoundUp(nd int) {
+ if nd < 0 || nd >= a.nd {
+ return
+ }
+
+ // round up
+ for i := nd - 1; i >= 0; i-- {
+ c := a.d[i]
+ if c < '9' { // can stop after this digit
+ a.d[i]++
+ a.nd = i + 1
+ return
+ }
+ }
+
+ // Number is all 9s.
+ // Change to single 1 with adjusted decimal point.
+ a.d[0] = '1'
+ a.nd = 1
+ a.dp++
+}
+
+// Extract integer part, rounded appropriately.
+// No guarantees about overflow.
+func (a *decimal) RoundedInteger() uint64 {
+ if a.dp > 20 {
+ return 0xFFFFFFFFFFFFFFFF
+ }
+ var i int
+ n := uint64(0)
+ for i = 0; i < a.dp && i < a.nd; i++ {
+ n = n*10 + uint64(a.d[i]-'0')
+ }
+ for ; i < a.dp; i++ {
+ n *= 10
+ }
+ if shouldRoundUp(a, a.dp) {
+ n++
+ }
+ return n
+}
diff --git a/vendor/github.com/shopspring/decimal/decimal.go b/vendor/github.com/shopspring/decimal/decimal.go
new file mode 100644
index 000000000..801c1a045
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/decimal.go
@@ -0,0 +1,1477 @@
+// Package decimal implements an arbitrary precision fixed-point decimal.
+//
+// The zero-value of a Decimal is 0, as you would expect.
+//
+// The best way to create a new Decimal is to use decimal.NewFromString, ex:
+//
+// n, err := decimal.NewFromString("-123.4567")
+// n.String() // output: "-123.4567"
+//
+// To use Decimal as part of a struct:
+//
+// type Struct struct {
+// Number Decimal
+// }
+//
+// Note: This can "only" represent numbers with a maximum of 2^31 digits after the decimal point.
+package decimal
+
+import (
+ "database/sql/driver"
+ "encoding/binary"
+ "fmt"
+ "math"
+ "math/big"
+ "strconv"
+ "strings"
+)
+
+// DivisionPrecision is the number of decimal places in the result when it
+// doesn't divide exactly.
+//
+// Example:
+//
+// d1 := decimal.NewFromFloat(2).Div(decimal.NewFromFloat(3))
+// d1.String() // output: "0.6666666666666667"
+// d2 := decimal.NewFromFloat(2).Div(decimal.NewFromFloat(30000))
+// d2.String() // output: "0.0000666666666667"
+// d3 := decimal.NewFromFloat(20000).Div(decimal.NewFromFloat(3))
+// d3.String() // output: "6666.6666666666666667"
+// decimal.DivisionPrecision = 3
+// d4 := decimal.NewFromFloat(2).Div(decimal.NewFromFloat(3))
+// d4.String() // output: "0.667"
+//
+var DivisionPrecision = 16
+
+// MarshalJSONWithoutQuotes should be set to true if you want the decimal to
+// be JSON marshaled as a number, instead of as a string.
+// WARNING: this is dangerous for decimals with many digits, since many JSON
+// unmarshallers (ex: Javascript's) will unmarshal JSON numbers to IEEE 754
+// double-precision floating point numbers, which means you can potentially
+// silently lose precision.
+var MarshalJSONWithoutQuotes = false
+
+// Zero constant, to make computations faster.
+// Zero should never be compared with == or != directly, please use decimal.Equal or decimal.Cmp instead.
+var Zero = New(0, 1)
+
+var zeroInt = big.NewInt(0)
+var oneInt = big.NewInt(1)
+var twoInt = big.NewInt(2)
+var fourInt = big.NewInt(4)
+var fiveInt = big.NewInt(5)
+var tenInt = big.NewInt(10)
+var twentyInt = big.NewInt(20)
+
+// Decimal represents a fixed-point decimal. It is immutable.
+// number = value * 10 ^ exp
+type Decimal struct {
+ value *big.Int
+
+ // NOTE(vadim): this must be an int32, because we cast it to float64 during
+ // calculations. If exp is 64 bit, we might lose precision.
+ // If we cared about being able to represent every possible decimal, we
+ // could make exp a *big.Int but it would hurt performance and numbers
+ // like that are unrealistic.
+ exp int32
+}
+
+// New returns a new fixed-point decimal, value * 10 ^ exp.
+func New(value int64, exp int32) Decimal {
+ return Decimal{
+ value: big.NewInt(value),
+ exp: exp,
+ }
+}
+
+// NewFromInt converts a int64 to Decimal.
+//
+// Example:
+//
+// NewFromInt(123).String() // output: "123"
+// NewFromInt(-10).String() // output: "-10"
+func NewFromInt(value int64) Decimal {
+ return Decimal{
+ value: big.NewInt(value),
+ exp: 0,
+ }
+}
+
+// NewFromInt32 converts a int32 to Decimal.
+//
+// Example:
+//
+// NewFromInt(123).String() // output: "123"
+// NewFromInt(-10).String() // output: "-10"
+func NewFromInt32(value int32) Decimal {
+ return Decimal{
+ value: big.NewInt(int64(value)),
+ exp: 0,
+ }
+}
+
+// NewFromBigInt returns a new Decimal from a big.Int, value * 10 ^ exp
+func NewFromBigInt(value *big.Int, exp int32) Decimal {
+ return Decimal{
+ value: big.NewInt(0).Set(value),
+ exp: exp,
+ }
+}
+
+// NewFromString returns a new Decimal from a string representation.
+// Trailing zeroes are not trimmed.
+//
+// Example:
+//
+// d, err := NewFromString("-123.45")
+// d2, err := NewFromString(".0001")
+// d3, err := NewFromString("1.47000")
+//
+func NewFromString(value string) (Decimal, error) {
+ originalInput := value
+ var intString string
+ var exp int64
+
+ // Check if number is using scientific notation
+ eIndex := strings.IndexAny(value, "Ee")
+ if eIndex != -1 {
+ expInt, err := strconv.ParseInt(value[eIndex+1:], 10, 32)
+ if err != nil {
+ if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange {
+ return Decimal{}, fmt.Errorf("can't convert %s to decimal: fractional part too long", value)
+ }
+ return Decimal{}, fmt.Errorf("can't convert %s to decimal: exponent is not numeric", value)
+ }
+ value = value[:eIndex]
+ exp = expInt
+ }
+
+ parts := strings.Split(value, ".")
+ if len(parts) == 1 {
+ // There is no decimal point, we can just parse the original string as
+ // an int
+ intString = value
+ } else if len(parts) == 2 {
+ intString = parts[0] + parts[1]
+ expInt := -len(parts[1])
+ exp += int64(expInt)
+ } else {
+ return Decimal{}, fmt.Errorf("can't convert %s to decimal: too many .s", value)
+ }
+
+ dValue := new(big.Int)
+ _, ok := dValue.SetString(intString, 10)
+ if !ok {
+ return Decimal{}, fmt.Errorf("can't convert %s to decimal", value)
+ }
+
+ if exp < math.MinInt32 || exp > math.MaxInt32 {
+ // NOTE(vadim): I doubt a string could realistically be this long
+ return Decimal{}, fmt.Errorf("can't convert %s to decimal: fractional part too long", originalInput)
+ }
+
+ return Decimal{
+ value: dValue,
+ exp: int32(exp),
+ }, nil
+}
+
+// RequireFromString returns a new Decimal from a string representation
+// or panics if NewFromString would have returned an error.
+//
+// Example:
+//
+// d := RequireFromString("-123.45")
+// d2 := RequireFromString(".0001")
+//
+func RequireFromString(value string) Decimal {
+ dec, err := NewFromString(value)
+ if err != nil {
+ panic(err)
+ }
+ return dec
+}
+
+// NewFromFloat converts a float64 to Decimal.
+//
+// The converted number will contain the number of significant digits that can be
+// represented in a float with reliable roundtrip.
+// This is typically 15 digits, but may be more in some cases.
+// See https://www.exploringbinary.com/decimal-precision-of-binary-floating-point-numbers/ for more information.
+//
+// For slightly faster conversion, use NewFromFloatWithExponent where you can specify the precision in absolute terms.
+//
+// NOTE: this will panic on NaN, +/-inf
+func NewFromFloat(value float64) Decimal {
+ if value == 0 {
+ return New(0, 0)
+ }
+ return newFromFloat(value, math.Float64bits(value), &float64info)
+}
+
+// NewFromFloat32 converts a float32 to Decimal.
+//
+// The converted number will contain the number of significant digits that can be
+// represented in a float with reliable roundtrip.
+// This is typically 6-8 digits depending on the input.
+// See https://www.exploringbinary.com/decimal-precision-of-binary-floating-point-numbers/ for more information.
+//
+// For slightly faster conversion, use NewFromFloatWithExponent where you can specify the precision in absolute terms.
+//
+// NOTE: this will panic on NaN, +/-inf
+func NewFromFloat32(value float32) Decimal {
+ if value == 0 {
+ return New(0, 0)
+ }
+ // XOR is workaround for https://github.com/golang/go/issues/26285
+ a := math.Float32bits(value) ^ 0x80808080
+ return newFromFloat(float64(value), uint64(a)^0x80808080, &float32info)
+}
+
+func newFromFloat(val float64, bits uint64, flt *floatInfo) Decimal {
+ if math.IsNaN(val) || math.IsInf(val, 0) {
+ panic(fmt.Sprintf("Cannot create a Decimal from %v", val))
+ }
+ exp := int(bits>>flt.mantbits) & (1<<flt.expbits - 1)
+ mant := bits & (uint64(1)<<flt.mantbits - 1)
+
+ switch exp {
+ case 0:
+ // denormalized
+ exp++
+
+ default:
+ // add implicit top bit
+ mant |= uint64(1) << flt.mantbits
+ }
+ exp += flt.bias
+
+ var d decimal
+ d.Assign(mant)
+ d.Shift(exp - int(flt.mantbits))
+ d.neg = bits>>(flt.expbits+flt.mantbits) != 0
+
+ roundShortest(&d, mant, exp, flt)
+ // If less than 19 digits, we can do calculation in an int64.
+ if d.nd < 19 {
+ tmp := int64(0)
+ m := int64(1)
+ for i := d.nd - 1; i >= 0; i-- {
+ tmp += m * int64(d.d[i]-'0')
+ m *= 10
+ }
+ if d.neg {
+ tmp *= -1
+ }
+ return Decimal{value: big.NewInt(tmp), exp: int32(d.dp) - int32(d.nd)}
+ }
+ dValue := new(big.Int)
+ dValue, ok := dValue.SetString(string(d.d[:d.nd]), 10)
+ if ok {
+ return Decimal{value: dValue, exp: int32(d.dp) - int32(d.nd)}
+ }
+
+ return NewFromFloatWithExponent(val, int32(d.dp)-int32(d.nd))
+}
+
+// NewFromFloatWithExponent converts a float64 to Decimal, with an arbitrary
+// number of fractional digits.
+//
+// Example:
+//
+// NewFromFloatWithExponent(123.456, -2).String() // output: "123.46"
+//
+func NewFromFloatWithExponent(value float64, exp int32) Decimal {
+ if math.IsNaN(value) || math.IsInf(value, 0) {
+ panic(fmt.Sprintf("Cannot create a Decimal from %v", value))
+ }
+
+ bits := math.Float64bits(value)
+ mant := bits & (1<<52 - 1)
+ exp2 := int32((bits >> 52) & (1<<11 - 1))
+ sign := bits >> 63
+
+ if exp2 == 0 {
+ // specials
+ if mant == 0 {
+ return Decimal{}
+ }
+ // subnormal
+ exp2++
+ } else {
+ // normal
+ mant |= 1 << 52
+ }
+
+ exp2 -= 1023 + 52
+
+ // normalizing base-2 values
+ for mant&1 == 0 {
+ mant = mant >> 1
+ exp2++
+ }
+
+ // maximum number of fractional base-10 digits to represent 2^N exactly cannot be more than -N if N<0
+ if exp < 0 && exp < exp2 {
+ if exp2 < 0 {
+ exp = exp2
+ } else {
+ exp = 0
+ }
+ }
+
+ // representing 10^M * 2^N as 5^M * 2^(M+N)
+ exp2 -= exp
+
+ temp := big.NewInt(1)
+ dMant := big.NewInt(int64(mant))
+
+ // applying 5^M
+ if exp > 0 {
+ temp = temp.SetInt64(int64(exp))
+ temp = temp.Exp(fiveInt, temp, nil)
+ } else if exp < 0 {
+ temp = temp.SetInt64(-int64(exp))
+ temp = temp.Exp(fiveInt, temp, nil)
+ dMant = dMant.Mul(dMant, temp)
+ temp = temp.SetUint64(1)
+ }
+
+ // applying 2^(M+N)
+ if exp2 > 0 {
+ dMant = dMant.Lsh(dMant, uint(exp2))
+ } else if exp2 < 0 {
+ temp = temp.Lsh(temp, uint(-exp2))
+ }
+
+ // rounding and downscaling
+ if exp > 0 || exp2 < 0 {
+ halfDown := new(big.Int).Rsh(temp, 1)
+ dMant = dMant.Add(dMant, halfDown)
+ dMant = dMant.Quo(dMant, temp)
+ }
+
+ if sign == 1 {
+ dMant = dMant.Neg(dMant)
+ }
+
+ return Decimal{
+ value: dMant,
+ exp: exp,
+ }
+}
+
+// rescale returns a rescaled version of the decimal. Returned
+// decimal may be less precise if the given exponent is bigger
+// than the initial exponent of the Decimal.
+// NOTE: this will truncate, NOT round
+//
+// Example:
+//
+// d := New(12345, -4)
+// d2 := d.rescale(-1)
+// d3 := d2.rescale(-4)
+// println(d1)
+// println(d2)
+// println(d3)
+//
+// Output:
+//
+// 1.2345
+// 1.2
+// 1.2000
+//
+func (d Decimal) rescale(exp int32) Decimal {
+ d.ensureInitialized()
+
+ if d.exp == exp {
+ return Decimal{
+ new(big.Int).Set(d.value),
+ d.exp,
+ }
+ }
+
+ // NOTE(vadim): must convert exps to float64 before - to prevent overflow
+ diff := math.Abs(float64(exp) - float64(d.exp))
+ value := new(big.Int).Set(d.value)
+
+ expScale := new(big.Int).Exp(tenInt, big.NewInt(int64(diff)), nil)
+ if exp > d.exp {
+ value = value.Quo(value, expScale)
+ } else if exp < d.exp {
+ value = value.Mul(value, expScale)
+ }
+
+ return Decimal{
+ value: value,
+ exp: exp,
+ }
+}
+
+// Abs returns the absolute value of the decimal.
+func (d Decimal) Abs() Decimal {
+ d.ensureInitialized()
+ d2Value := new(big.Int).Abs(d.value)
+ return Decimal{
+ value: d2Value,
+ exp: d.exp,
+ }
+}
+
+// Add returns d + d2.
+func (d Decimal) Add(d2 Decimal) Decimal {
+ rd, rd2 := RescalePair(d, d2)
+
+ d3Value := new(big.Int).Add(rd.value, rd2.value)
+ return Decimal{
+ value: d3Value,
+ exp: rd.exp,
+ }
+}
+
+// Sub returns d - d2.
+func (d Decimal) Sub(d2 Decimal) Decimal {
+ rd, rd2 := RescalePair(d, d2)
+
+ d3Value := new(big.Int).Sub(rd.value, rd2.value)
+ return Decimal{
+ value: d3Value,
+ exp: rd.exp,
+ }
+}
+
+// Neg returns -d.
+func (d Decimal) Neg() Decimal {
+ d.ensureInitialized()
+ val := new(big.Int).Neg(d.value)
+ return Decimal{
+ value: val,
+ exp: d.exp,
+ }
+}
+
+// Mul returns d * d2.
+func (d Decimal) Mul(d2 Decimal) Decimal {
+ d.ensureInitialized()
+ d2.ensureInitialized()
+
+ expInt64 := int64(d.exp) + int64(d2.exp)
+ if expInt64 > math.MaxInt32 || expInt64 < math.MinInt32 {
+ // NOTE(vadim): better to panic than give incorrect results, as
+ // Decimals are usually used for money
+ panic(fmt.Sprintf("exponent %v overflows an int32!", expInt64))
+ }
+
+ d3Value := new(big.Int).Mul(d.value, d2.value)
+ return Decimal{
+ value: d3Value,
+ exp: int32(expInt64),
+ }
+}
+
+// Shift shifts the decimal in base 10.
+// It shifts left when shift is positive and right if shift is negative.
+// In simpler terms, the given value for shift is added to the exponent
+// of the decimal.
+func (d Decimal) Shift(shift int32) Decimal {
+ d.ensureInitialized()
+ return Decimal{
+ value: new(big.Int).Set(d.value),
+ exp: d.exp + shift,
+ }
+}
+
+// Div returns d / d2. If it doesn't divide exactly, the result will have
+// DivisionPrecision digits after the decimal point.
+func (d Decimal) Div(d2 Decimal) Decimal {
+ return d.DivRound(d2, int32(DivisionPrecision))
+}
+
+// QuoRem does divsion with remainder
+// d.QuoRem(d2,precision) returns quotient q and remainder r such that
+// d = d2 * q + r, q an integer multiple of 10^(-precision)
+// 0 <= r < abs(d2) * 10 ^(-precision) if d>=0
+// 0 >= r > -abs(d2) * 10 ^(-precision) if d<0
+// Note that precision<0 is allowed as input.
+func (d Decimal) QuoRem(d2 Decimal, precision int32) (Decimal, Decimal) {
+ d.ensureInitialized()
+ d2.ensureInitialized()
+ if d2.value.Sign() == 0 {
+ panic("decimal division by 0")
+ }
+ scale := -precision
+ e := int64(d.exp - d2.exp - scale)
+ if e > math.MaxInt32 || e < math.MinInt32 {
+ panic("overflow in decimal QuoRem")
+ }
+ var aa, bb, expo big.Int
+ var scalerest int32
+ // d = a 10^ea
+ // d2 = b 10^eb
+ if e < 0 {
+ aa = *d.value
+ expo.SetInt64(-e)
+ bb.Exp(tenInt, &expo, nil)
+ bb.Mul(d2.value, &bb)
+ scalerest = d.exp
+ // now aa = a
+ // bb = b 10^(scale + eb - ea)
+ } else {
+ expo.SetInt64(e)
+ aa.Exp(tenInt, &expo, nil)
+ aa.Mul(d.value, &aa)
+ bb = *d2.value
+ scalerest = scale + d2.exp
+ // now aa = a ^ (ea - eb - scale)
+ // bb = b
+ }
+ var q, r big.Int
+ q.QuoRem(&aa, &bb, &r)
+ dq := Decimal{value: &q, exp: scale}
+ dr := Decimal{value: &r, exp: scalerest}
+ return dq, dr
+}
+
+// DivRound divides and rounds to a given precision
+// i.e. to an integer multiple of 10^(-precision)
+// for a positive quotient digit 5 is rounded up, away from 0
+// if the quotient is negative then digit 5 is rounded down, away from 0
+// Note that precision<0 is allowed as input.
+func (d Decimal) DivRound(d2 Decimal, precision int32) Decimal {
+ // QuoRem already checks initialization
+ q, r := d.QuoRem(d2, precision)
+ // the actual rounding decision is based on comparing r*10^precision and d2/2
+ // instead compare 2 r 10 ^precision and d2
+ var rv2 big.Int
+ rv2.Abs(r.value)
+ rv2.Lsh(&rv2, 1)
+ // now rv2 = abs(r.value) * 2
+ r2 := Decimal{value: &rv2, exp: r.exp + precision}
+ // r2 is now 2 * r * 10 ^ precision
+ var c = r2.Cmp(d2.Abs())
+
+ if c < 0 {
+ return q
+ }
+
+ if d.value.Sign()*d2.value.Sign() < 0 {
+ return q.Sub(New(1, -precision))
+ }
+
+ return q.Add(New(1, -precision))
+}
+
+// Mod returns d % d2.
+func (d Decimal) Mod(d2 Decimal) Decimal {
+ quo := d.Div(d2).Truncate(0)
+ return d.Sub(d2.Mul(quo))
+}
+
+// Pow returns d to the power d2
+func (d Decimal) Pow(d2 Decimal) Decimal {
+ var temp Decimal
+ if d2.IntPart() == 0 {
+ return NewFromFloat(1)
+ }
+ temp = d.Pow(d2.Div(NewFromFloat(2)))
+ if d2.IntPart()%2 == 0 {
+ return temp.Mul(temp)
+ }
+ if d2.IntPart() > 0 {
+ return temp.Mul(temp).Mul(d)
+ }
+ return temp.Mul(temp).Div(d)
+}
+
+// Cmp compares the numbers represented by d and d2 and returns:
+//
+// -1 if d < d2
+// 0 if d == d2
+// +1 if d > d2
+//
+func (d Decimal) Cmp(d2 Decimal) int {
+ d.ensureInitialized()
+ d2.ensureInitialized()
+
+ if d.exp == d2.exp {
+ return d.value.Cmp(d2.value)
+ }
+
+ rd, rd2 := RescalePair(d, d2)
+
+ return rd.value.Cmp(rd2.value)
+}
+
+// Equal returns whether the numbers represented by d and d2 are equal.
+func (d Decimal) Equal(d2 Decimal) bool {
+ return d.Cmp(d2) == 0
+}
+
+// Equals is deprecated, please use Equal method instead
+func (d Decimal) Equals(d2 Decimal) bool {
+ return d.Equal(d2)
+}
+
+// GreaterThan (GT) returns true when d is greater than d2.
+func (d Decimal) GreaterThan(d2 Decimal) bool {
+ return d.Cmp(d2) == 1
+}
+
+// GreaterThanOrEqual (GTE) returns true when d is greater than or equal to d2.
+func (d Decimal) GreaterThanOrEqual(d2 Decimal) bool {
+ cmp := d.Cmp(d2)
+ return cmp == 1 || cmp == 0
+}
+
+// LessThan (LT) returns true when d is less than d2.
+func (d Decimal) LessThan(d2 Decimal) bool {
+ return d.Cmp(d2) == -1
+}
+
+// LessThanOrEqual (LTE) returns true when d is less than or equal to d2.
+func (d Decimal) LessThanOrEqual(d2 Decimal) bool {
+ cmp := d.Cmp(d2)
+ return cmp == -1 || cmp == 0
+}
+
+// Sign returns:
+//
+// -1 if d < 0
+// 0 if d == 0
+// +1 if d > 0
+//
+func (d Decimal) Sign() int {
+ if d.value == nil {
+ return 0
+ }
+ return d.value.Sign()
+}
+
+// IsPositive return
+//
+// true if d > 0
+// false if d == 0
+// false if d < 0
+func (d Decimal) IsPositive() bool {
+ return d.Sign() == 1
+}
+
+// IsNegative return
+//
+// true if d < 0
+// false if d == 0
+// false if d > 0
+func (d Decimal) IsNegative() bool {
+ return d.Sign() == -1
+}
+
+// IsZero return
+//
+// true if d == 0
+// false if d > 0
+// false if d < 0
+func (d Decimal) IsZero() bool {
+ return d.Sign() == 0
+}
+
+// Exponent returns the exponent, or scale component of the decimal.
+func (d Decimal) Exponent() int32 {
+ return d.exp
+}
+
+// Coefficient returns the coefficient of the decimal. It is scaled by 10^Exponent()
+func (d Decimal) Coefficient() *big.Int {
+ d.ensureInitialized()
+ // we copy the coefficient so that mutating the result does not mutate the
+ // Decimal.
+ return big.NewInt(0).Set(d.value)
+}
+
+// IntPart returns the integer component of the decimal.
+func (d Decimal) IntPart() int64 {
+ scaledD := d.rescale(0)
+ return scaledD.value.Int64()
+}
+
+// BigInt returns integer component of the decimal as a BigInt.
+func (d Decimal) BigInt() *big.Int {
+ scaledD := d.rescale(0)
+ i := &big.Int{}
+ i.SetString(scaledD.String(), 10)
+ return i
+}
+
+// BigFloat returns decimal as BigFloat.
+// Be aware that casting decimal to BigFloat might cause a loss of precision.
+func (d Decimal) BigFloat() *big.Float {
+ f := &big.Float{}
+ f.SetString(d.String())
+ return f
+}
+
+// Rat returns a rational number representation of the decimal.
+func (d Decimal) Rat() *big.Rat {
+ d.ensureInitialized()
+ if d.exp <= 0 {
+ // NOTE(vadim): must negate after casting to prevent int32 overflow
+ denom := new(big.Int).Exp(tenInt, big.NewInt(-int64(d.exp)), nil)
+ return new(big.Rat).SetFrac(d.value, denom)
+ }
+
+ mul := new(big.Int).Exp(tenInt, big.NewInt(int64(d.exp)), nil)
+ num := new(big.Int).Mul(d.value, mul)
+ return new(big.Rat).SetFrac(num, oneInt)
+}
+
+// Float64 returns the nearest float64 value for d and a bool indicating
+// whether f represents d exactly.
+// For more details, see the documentation for big.Rat.Float64
+func (d Decimal) Float64() (f float64, exact bool) {
+ return d.Rat().Float64()
+}
+
+// String returns the string representation of the decimal
+// with the fixed point.
+//
+// Example:
+//
+// d := New(-12345, -3)
+// println(d.String())
+//
+// Output:
+//
+// -12.345
+//
+func (d Decimal) String() string {
+ return d.string(true)
+}
+
+// StringFixed returns a rounded fixed-point string with places digits after
+// the decimal point.
+//
+// Example:
+//
+// NewFromFloat(0).StringFixed(2) // output: "0.00"
+// NewFromFloat(0).StringFixed(0) // output: "0"
+// NewFromFloat(5.45).StringFixed(0) // output: "5"
+// NewFromFloat(5.45).StringFixed(1) // output: "5.5"
+// NewFromFloat(5.45).StringFixed(2) // output: "5.45"
+// NewFromFloat(5.45).StringFixed(3) // output: "5.450"
+// NewFromFloat(545).StringFixed(-1) // output: "550"
+//
+func (d Decimal) StringFixed(places int32) string {
+ rounded := d.Round(places)
+ return rounded.string(false)
+}
+
+// StringFixedBank returns a banker rounded fixed-point string with places digits
+// after the decimal point.
+//
+// Example:
+//
+// NewFromFloat(0).StringFixedBank(2) // output: "0.00"
+// NewFromFloat(0).StringFixedBank(0) // output: "0"
+// NewFromFloat(5.45).StringFixedBank(0) // output: "5"
+// NewFromFloat(5.45).StringFixedBank(1) // output: "5.4"
+// NewFromFloat(5.45).StringFixedBank(2) // output: "5.45"
+// NewFromFloat(5.45).StringFixedBank(3) // output: "5.450"
+// NewFromFloat(545).StringFixedBank(-1) // output: "540"
+//
+func (d Decimal) StringFixedBank(places int32) string {
+ rounded := d.RoundBank(places)
+ return rounded.string(false)
+}
+
+// StringFixedCash returns a Swedish/Cash rounded fixed-point string. For
+// more details see the documentation at function RoundCash.
+func (d Decimal) StringFixedCash(interval uint8) string {
+ rounded := d.RoundCash(interval)
+ return rounded.string(false)
+}
+
+// Round rounds the decimal to places decimal places.
+// If places < 0, it will round the integer part to the nearest 10^(-places).
+//
+// Example:
+//
+// NewFromFloat(5.45).Round(1).String() // output: "5.5"
+// NewFromFloat(545).Round(-1).String() // output: "550"
+//
+func (d Decimal) Round(places int32) Decimal {
+ // truncate to places + 1
+ ret := d.rescale(-places - 1)
+
+ // add sign(d) * 0.5
+ if ret.value.Sign() < 0 {
+ ret.value.Sub(ret.value, fiveInt)
+ } else {
+ ret.value.Add(ret.value, fiveInt)
+ }
+
+ // floor for positive numbers, ceil for negative numbers
+ _, m := ret.value.DivMod(ret.value, tenInt, new(big.Int))
+ ret.exp++
+ if ret.value.Sign() < 0 && m.Cmp(zeroInt) != 0 {
+ ret.value.Add(ret.value, oneInt)
+ }
+
+ return ret
+}
+
+// RoundBank rounds the decimal to places decimal places.
+// If the final digit to round is equidistant from the nearest two integers the
+// rounded value is taken as the even number
+//
+// If places < 0, it will round the integer part to the nearest 10^(-places).
+//
+// Examples:
+//
+// NewFromFloat(5.45).Round(1).String() // output: "5.4"
+// NewFromFloat(545).Round(-1).String() // output: "540"
+// NewFromFloat(5.46).Round(1).String() // output: "5.5"
+// NewFromFloat(546).Round(-1).String() // output: "550"
+// NewFromFloat(5.55).Round(1).String() // output: "5.6"
+// NewFromFloat(555).Round(-1).String() // output: "560"
+//
+func (d Decimal) RoundBank(places int32) Decimal {
+
+ round := d.Round(places)
+ remainder := d.Sub(round).Abs()
+
+ half := New(5, -places-1)
+ if remainder.Cmp(half) == 0 && round.value.Bit(0) != 0 {
+ if round.value.Sign() < 0 {
+ round.value.Add(round.value, oneInt)
+ } else {
+ round.value.Sub(round.value, oneInt)
+ }
+ }
+
+ return round
+}
+
+// RoundCash aka Cash/Penny/öre rounding rounds decimal to a specific
+// interval. The amount payable for a cash transaction is rounded to the nearest
+// multiple of the minimum currency unit available. The following intervals are
+// available: 5, 10, 25, 50 and 100; any other number throws a panic.
+// 5: 5 cent rounding 3.43 => 3.45
+// 10: 10 cent rounding 3.45 => 3.50 (5 gets rounded up)
+// 25: 25 cent rounding 3.41 => 3.50
+// 50: 50 cent rounding 3.75 => 4.00
+// 100: 100 cent rounding 3.50 => 4.00
+// For more details: https://en.wikipedia.org/wiki/Cash_rounding
+func (d Decimal) RoundCash(interval uint8) Decimal {
+ var iVal *big.Int
+ switch interval {
+ case 5:
+ iVal = twentyInt
+ case 10:
+ iVal = tenInt
+ case 25:
+ iVal = fourInt
+ case 50:
+ iVal = twoInt
+ case 100:
+ iVal = oneInt
+ default:
+ panic(fmt.Sprintf("Decimal does not support this Cash rounding interval `%d`. Supported: 5, 10, 25, 50, 100", interval))
+ }
+ dVal := Decimal{
+ value: iVal,
+ }
+
+ // TODO: optimize those calculations to reduce the high allocations (~29 allocs).
+ return d.Mul(dVal).Round(0).Div(dVal).Truncate(2)
+}
+
+// Floor returns the nearest integer value less than or equal to d.
+func (d Decimal) Floor() Decimal {
+ d.ensureInitialized()
+
+ if d.exp >= 0 {
+ return d
+ }
+
+ exp := big.NewInt(10)
+
+ // NOTE(vadim): must negate after casting to prevent int32 overflow
+ exp.Exp(exp, big.NewInt(-int64(d.exp)), nil)
+
+ z := new(big.Int).Div(d.value, exp)
+ return Decimal{value: z, exp: 0}
+}
+
+// Ceil returns the nearest integer value greater than or equal to d.
+func (d Decimal) Ceil() Decimal {
+ d.ensureInitialized()
+
+ if d.exp >= 0 {
+ return d
+ }
+
+ exp := big.NewInt(10)
+
+ // NOTE(vadim): must negate after casting to prevent int32 overflow
+ exp.Exp(exp, big.NewInt(-int64(d.exp)), nil)
+
+ z, m := new(big.Int).DivMod(d.value, exp, new(big.Int))
+ if m.Cmp(zeroInt) != 0 {
+ z.Add(z, oneInt)
+ }
+ return Decimal{value: z, exp: 0}
+}
+
+// Truncate truncates off digits from the number, without rounding.
+//
+// NOTE: precision is the last digit that will not be truncated (must be >= 0).
+//
+// Example:
+//
+// decimal.NewFromString("123.456").Truncate(2).String() // "123.45"
+//
+func (d Decimal) Truncate(precision int32) Decimal {
+ d.ensureInitialized()
+ if precision >= 0 && -precision > d.exp {
+ return d.rescale(-precision)
+ }
+ return d
+}
+
+// UnmarshalJSON implements the json.Unmarshaler interface.
+func (d *Decimal) UnmarshalJSON(decimalBytes []byte) error {
+ if string(decimalBytes) == "null" {
+ return nil
+ }
+
+ str, err := unquoteIfQuoted(decimalBytes)
+ if err != nil {
+ return fmt.Errorf("error decoding string '%s': %s", decimalBytes, err)
+ }
+
+ decimal, err := NewFromString(str)
+ *d = decimal
+ if err != nil {
+ return fmt.Errorf("error decoding string '%s': %s", str, err)
+ }
+ return nil
+}
+
+// MarshalJSON implements the json.Marshaler interface.
+func (d Decimal) MarshalJSON() ([]byte, error) {
+ var str string
+ if MarshalJSONWithoutQuotes {
+ str = d.String()
+ } else {
+ str = "\"" + d.String() + "\""
+ }
+ return []byte(str), nil
+}
+
+// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. As a string representation
+// is already used when encoding to text, this method stores that string as []byte
+func (d *Decimal) UnmarshalBinary(data []byte) error {
+ // Extract the exponent
+ d.exp = int32(binary.BigEndian.Uint32(data[:4]))
+
+ // Extract the value
+ d.value = new(big.Int)
+ return d.value.GobDecode(data[4:])
+}
+
+// MarshalBinary implements the encoding.BinaryMarshaler interface.
+func (d Decimal) MarshalBinary() (data []byte, err error) {
+ // Write the exponent first since it's a fixed size
+ v1 := make([]byte, 4)
+ binary.BigEndian.PutUint32(v1, uint32(d.exp))
+
+ // Add the value
+ var v2 []byte
+ if v2, err = d.value.GobEncode(); err != nil {
+ return
+ }
+
+ // Return the byte array
+ data = append(v1, v2...)
+ return
+}
+
+// Scan implements the sql.Scanner interface for database deserialization.
+func (d *Decimal) Scan(value interface{}) error {
+ // first try to see if the data is stored in database as a Numeric datatype
+ switch v := value.(type) {
+
+ case float32:
+ *d = NewFromFloat(float64(v))
+ return nil
+
+ case float64:
+ // numeric in sqlite3 sends us float64
+ *d = NewFromFloat(v)
+ return nil
+
+ case int64:
+ // at least in sqlite3 when the value is 0 in db, the data is sent
+ // to us as an int64 instead of a float64 ...
+ *d = New(v, 0)
+ return nil
+
+ default:
+ // default is trying to interpret value stored as string
+ str, err := unquoteIfQuoted(v)
+ if err != nil {
+ return err
+ }
+ *d, err = NewFromString(str)
+ return err
+ }
+}
+
+// Value implements the driver.Valuer interface for database serialization.
+func (d Decimal) Value() (driver.Value, error) {
+ return d.String(), nil
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface for XML
+// deserialization.
+func (d *Decimal) UnmarshalText(text []byte) error {
+ str := string(text)
+
+ dec, err := NewFromString(str)
+ *d = dec
+ if err != nil {
+ return fmt.Errorf("error decoding string '%s': %s", str, err)
+ }
+
+ return nil
+}
+
+// MarshalText implements the encoding.TextMarshaler interface for XML
+// serialization.
+func (d Decimal) MarshalText() (text []byte, err error) {
+ return []byte(d.String()), nil
+}
+
+// GobEncode implements the gob.GobEncoder interface for gob serialization.
+func (d Decimal) GobEncode() ([]byte, error) {
+ return d.MarshalBinary()
+}
+
+// GobDecode implements the gob.GobDecoder interface for gob serialization.
+func (d *Decimal) GobDecode(data []byte) error {
+ return d.UnmarshalBinary(data)
+}
+
+// StringScaled first scales the decimal then calls .String() on it.
+// NOTE: buggy, unintuitive, and DEPRECATED! Use StringFixed instead.
+func (d Decimal) StringScaled(exp int32) string {
+ return d.rescale(exp).String()
+}
+
+func (d Decimal) string(trimTrailingZeros bool) string {
+ if d.exp >= 0 {
+ return d.rescale(0).value.String()
+ }
+
+ abs := new(big.Int).Abs(d.value)
+ str := abs.String()
+
+ var intPart, fractionalPart string
+
+ // NOTE(vadim): this cast to int will cause bugs if d.exp == INT_MIN
+ // and you are on a 32-bit machine. Won't fix this super-edge case.
+ dExpInt := int(d.exp)
+ if len(str) > -dExpInt {
+ intPart = str[:len(str)+dExpInt]
+ fractionalPart = str[len(str)+dExpInt:]
+ } else {
+ intPart = "0"
+
+ num0s := -dExpInt - len(str)
+ fractionalPart = strings.Repeat("0", num0s) + str
+ }
+
+ if trimTrailingZeros {
+ i := len(fractionalPart) - 1
+ for ; i >= 0; i-- {
+ if fractionalPart[i] != '0' {
+ break
+ }
+ }
+ fractionalPart = fractionalPart[:i+1]
+ }
+
+ number := intPart
+ if len(fractionalPart) > 0 {
+ number += "." + fractionalPart
+ }
+
+ if d.value.Sign() < 0 {
+ return "-" + number
+ }
+
+ return number
+}
+
+func (d *Decimal) ensureInitialized() {
+ if d.value == nil {
+ d.value = new(big.Int)
+ }
+}
+
+// Min returns the smallest Decimal that was passed in the arguments.
+//
+// To call this function with an array, you must do:
+//
+// Min(arr[0], arr[1:]...)
+//
+// This makes it harder to accidentally call Min with 0 arguments.
+func Min(first Decimal, rest ...Decimal) Decimal {
+ ans := first
+ for _, item := range rest {
+ if item.Cmp(ans) < 0 {
+ ans = item
+ }
+ }
+ return ans
+}
+
+// Max returns the largest Decimal that was passed in the arguments.
+//
+// To call this function with an array, you must do:
+//
+// Max(arr[0], arr[1:]...)
+//
+// This makes it harder to accidentally call Max with 0 arguments.
+func Max(first Decimal, rest ...Decimal) Decimal {
+ ans := first
+ for _, item := range rest {
+ if item.Cmp(ans) > 0 {
+ ans = item
+ }
+ }
+ return ans
+}
+
+// Sum returns the combined total of the provided first and rest Decimals
+func Sum(first Decimal, rest ...Decimal) Decimal {
+ total := first
+ for _, item := range rest {
+ total = total.Add(item)
+ }
+
+ return total
+}
+
+// Avg returns the average value of the provided first and rest Decimals
+func Avg(first Decimal, rest ...Decimal) Decimal {
+ count := New(int64(len(rest)+1), 0)
+ sum := Sum(first, rest...)
+ return sum.Div(count)
+}
+
+// RescalePair rescales two decimals to common exponential value (minimal exp of both decimals)
+func RescalePair(d1 Decimal, d2 Decimal) (Decimal, Decimal) {
+ d1.ensureInitialized()
+ d2.ensureInitialized()
+
+ if d1.exp == d2.exp {
+ return d1, d2
+ }
+
+ baseScale := min(d1.exp, d2.exp)
+ if baseScale != d1.exp {
+ return d1.rescale(baseScale), d2
+ }
+ return d1, d2.rescale(baseScale)
+}
+
+func min(x, y int32) int32 {
+ if x >= y {
+ return y
+ }
+ return x
+}
+
+func unquoteIfQuoted(value interface{}) (string, error) {
+ var bytes []byte
+
+ switch v := value.(type) {
+ case string:
+ bytes = []byte(v)
+ case []byte:
+ bytes = v
+ default:
+ return "", fmt.Errorf("could not convert value '%+v' to byte array of type '%T'",
+ value, value)
+ }
+
+ // If the amount is quoted, strip the quotes
+ if len(bytes) > 2 && bytes[0] == '"' && bytes[len(bytes)-1] == '"' {
+ bytes = bytes[1 : len(bytes)-1]
+ }
+ return string(bytes), nil
+}
+
+// NullDecimal represents a nullable decimal with compatibility for
+// scanning null values from the database.
+type NullDecimal struct {
+ Decimal Decimal
+ Valid bool
+}
+
+// Scan implements the sql.Scanner interface for database deserialization.
+func (d *NullDecimal) Scan(value interface{}) error {
+ if value == nil {
+ d.Valid = false
+ return nil
+ }
+ d.Valid = true
+ return d.Decimal.Scan(value)
+}
+
+// Value implements the driver.Valuer interface for database serialization.
+func (d NullDecimal) Value() (driver.Value, error) {
+ if !d.Valid {
+ return nil, nil
+ }
+ return d.Decimal.Value()
+}
+
+// UnmarshalJSON implements the json.Unmarshaler interface.
+func (d *NullDecimal) UnmarshalJSON(decimalBytes []byte) error {
+ if string(decimalBytes) == "null" {
+ d.Valid = false
+ return nil
+ }
+ d.Valid = true
+ return d.Decimal.UnmarshalJSON(decimalBytes)
+}
+
+// MarshalJSON implements the json.Marshaler interface.
+func (d NullDecimal) MarshalJSON() ([]byte, error) {
+ if !d.Valid {
+ return []byte("null"), nil
+ }
+ return d.Decimal.MarshalJSON()
+}
+
+// Trig functions
+
+// Atan returns the arctangent, in radians, of x.
+func (d Decimal) Atan() Decimal {
+ if d.Equal(NewFromFloat(0.0)) {
+ return d
+ }
+ if d.GreaterThan(NewFromFloat(0.0)) {
+ return d.satan()
+ }
+ return d.Neg().satan().Neg()
+}
+
+func (d Decimal) xatan() Decimal {
+ P0 := NewFromFloat(-8.750608600031904122785e-01)
+ P1 := NewFromFloat(-1.615753718733365076637e+01)
+ P2 := NewFromFloat(-7.500855792314704667340e+01)
+ P3 := NewFromFloat(-1.228866684490136173410e+02)
+ P4 := NewFromFloat(-6.485021904942025371773e+01)
+ Q0 := NewFromFloat(2.485846490142306297962e+01)
+ Q1 := NewFromFloat(1.650270098316988542046e+02)
+ Q2 := NewFromFloat(4.328810604912902668951e+02)
+ Q3 := NewFromFloat(4.853903996359136964868e+02)
+ Q4 := NewFromFloat(1.945506571482613964425e+02)
+ z := d.Mul(d)
+ b1 := P0.Mul(z).Add(P1).Mul(z).Add(P2).Mul(z).Add(P3).Mul(z).Add(P4).Mul(z)
+ b2 := z.Add(Q0).Mul(z).Add(Q1).Mul(z).Add(Q2).Mul(z).Add(Q3).Mul(z).Add(Q4)
+ z = b1.Div(b2)
+ z = d.Mul(z).Add(d)
+ return z
+}
+
+// satan reduces its argument (known to be positive)
+// to the range [0, 0.66] and calls xatan.
+func (d Decimal) satan() Decimal {
+ Morebits := NewFromFloat(6.123233995736765886130e-17) // pi/2 = PIO2 + Morebits
+ Tan3pio8 := NewFromFloat(2.41421356237309504880) // tan(3*pi/8)
+ pi := NewFromFloat(3.14159265358979323846264338327950288419716939937510582097494459)
+
+ if d.LessThanOrEqual(NewFromFloat(0.66)) {
+ return d.xatan()
+ }
+ if d.GreaterThan(Tan3pio8) {
+ return pi.Div(NewFromFloat(2.0)).Sub(NewFromFloat(1.0).Div(d).xatan()).Add(Morebits)
+ }
+ return pi.Div(NewFromFloat(4.0)).Add((d.Sub(NewFromFloat(1.0)).Div(d.Add(NewFromFloat(1.0)))).xatan()).Add(NewFromFloat(0.5).Mul(Morebits))
+}
+
+// sin coefficients
+var _sin = [...]Decimal{
+ NewFromFloat(1.58962301576546568060e-10), // 0x3de5d8fd1fd19ccd
+ NewFromFloat(-2.50507477628578072866e-8), // 0xbe5ae5e5a9291f5d
+ NewFromFloat(2.75573136213857245213e-6), // 0x3ec71de3567d48a1
+ NewFromFloat(-1.98412698295895385996e-4), // 0xbf2a01a019bfdf03
+ NewFromFloat(8.33333333332211858878e-3), // 0x3f8111111110f7d0
+ NewFromFloat(-1.66666666666666307295e-1), // 0xbfc5555555555548
+}
+
+// Sin returns the sine of the radian argument x.
+func (d Decimal) Sin() Decimal {
+ PI4A := NewFromFloat(7.85398125648498535156e-1) // 0x3fe921fb40000000, Pi/4 split into three parts
+ PI4B := NewFromFloat(3.77489470793079817668e-8) // 0x3e64442d00000000,
+ PI4C := NewFromFloat(2.69515142907905952645e-15) // 0x3ce8469898cc5170,
+ M4PI := NewFromFloat(1.273239544735162542821171882678754627704620361328125) // 4/pi
+
+ if d.Equal(NewFromFloat(0.0)) {
+ return d
+ }
+ // make argument positive but save the sign
+ sign := false
+ if d.LessThan(NewFromFloat(0.0)) {
+ d = d.Neg()
+ sign = true
+ }
+
+ j := d.Mul(M4PI).IntPart() // integer part of x/(Pi/4), as integer for tests on the phase angle
+ y := NewFromFloat(float64(j)) // integer part of x/(Pi/4), as float
+
+ // map zeros to origin
+ if j&1 == 1 {
+ j++
+ y = y.Add(NewFromFloat(1.0))
+ }
+ j &= 7 // octant modulo 2Pi radians (360 degrees)
+ // reflect in x axis
+ if j > 3 {
+ sign = !sign
+ j -= 4
+ }
+ z := d.Sub(y.Mul(PI4A)).Sub(y.Mul(PI4B)).Sub(y.Mul(PI4C)) // Extended precision modular arithmetic
+ zz := z.Mul(z)
+
+ if j == 1 || j == 2 {
+ w := zz.Mul(zz).Mul(_cos[0].Mul(zz).Add(_cos[1]).Mul(zz).Add(_cos[2]).Mul(zz).Add(_cos[3]).Mul(zz).Add(_cos[4]).Mul(zz).Add(_cos[5]))
+ y = NewFromFloat(1.0).Sub(NewFromFloat(0.5).Mul(zz)).Add(w)
+ } else {
+ y = z.Add(z.Mul(zz).Mul(_sin[0].Mul(zz).Add(_sin[1]).Mul(zz).Add(_sin[2]).Mul(zz).Add(_sin[3]).Mul(zz).Add(_sin[4]).Mul(zz).Add(_sin[5])))
+ }
+ if sign {
+ y = y.Neg()
+ }
+ return y
+}
+
+// cos coefficients
+var _cos = [...]Decimal{
+ NewFromFloat(-1.13585365213876817300e-11), // 0xbda8fa49a0861a9b
+ NewFromFloat(2.08757008419747316778e-9), // 0x3e21ee9d7b4e3f05
+ NewFromFloat(-2.75573141792967388112e-7), // 0xbe927e4f7eac4bc6
+ NewFromFloat(2.48015872888517045348e-5), // 0x3efa01a019c844f5
+ NewFromFloat(-1.38888888888730564116e-3), // 0xbf56c16c16c14f91
+ NewFromFloat(4.16666666666665929218e-2), // 0x3fa555555555554b
+}
+
+// Cos returns the cosine of the radian argument x.
+func (d Decimal) Cos() Decimal {
+
+ PI4A := NewFromFloat(7.85398125648498535156e-1) // 0x3fe921fb40000000, Pi/4 split into three parts
+ PI4B := NewFromFloat(3.77489470793079817668e-8) // 0x3e64442d00000000,
+ PI4C := NewFromFloat(2.69515142907905952645e-15) // 0x3ce8469898cc5170,
+ M4PI := NewFromFloat(1.273239544735162542821171882678754627704620361328125) // 4/pi
+
+ // make argument positive
+ sign := false
+ if d.LessThan(NewFromFloat(0.0)) {
+ d = d.Neg()
+ }
+
+ j := d.Mul(M4PI).IntPart() // integer part of x/(Pi/4), as integer for tests on the phase angle
+ y := NewFromFloat(float64(j)) // integer part of x/(Pi/4), as float
+
+ // map zeros to origin
+ if j&1 == 1 {
+ j++
+ y = y.Add(NewFromFloat(1.0))
+ }
+ j &= 7 // octant modulo 2Pi radians (360 degrees)
+ // reflect in x axis
+ if j > 3 {
+ sign = !sign
+ j -= 4
+ }
+ if j > 1 {
+ sign = !sign
+ }
+
+ z := d.Sub(y.Mul(PI4A)).Sub(y.Mul(PI4B)).Sub(y.Mul(PI4C)) // Extended precision modular arithmetic
+ zz := z.Mul(z)
+
+ if j == 1 || j == 2 {
+ y = z.Add(z.Mul(zz).Mul(_sin[0].Mul(zz).Add(_sin[1]).Mul(zz).Add(_sin[2]).Mul(zz).Add(_sin[3]).Mul(zz).Add(_sin[4]).Mul(zz).Add(_sin[5])))
+ } else {
+ w := zz.Mul(zz).Mul(_cos[0].Mul(zz).Add(_cos[1]).Mul(zz).Add(_cos[2]).Mul(zz).Add(_cos[3]).Mul(zz).Add(_cos[4]).Mul(zz).Add(_cos[5]))
+ y = NewFromFloat(1.0).Sub(NewFromFloat(0.5).Mul(zz)).Add(w)
+ }
+ if sign {
+ y = y.Neg()
+ }
+ return y
+}
+
+var _tanP = [...]Decimal{
+ NewFromFloat(-1.30936939181383777646e+4), // 0xc0c992d8d24f3f38
+ NewFromFloat(1.15351664838587416140e+6), // 0x413199eca5fc9ddd
+ NewFromFloat(-1.79565251976484877988e+7), // 0xc1711fead3299176
+}
+var _tanQ = [...]Decimal{
+ NewFromFloat(1.00000000000000000000e+0),
+ NewFromFloat(1.36812963470692954678e+4), //0x40cab8a5eeb36572
+ NewFromFloat(-1.32089234440210967447e+6), //0xc13427bc582abc96
+ NewFromFloat(2.50083801823357915839e+7), //0x4177d98fc2ead8ef
+ NewFromFloat(-5.38695755929454629881e+7), //0xc189afe03cbe5a31
+}
+
+// Tan returns the tangent of the radian argument x.
+func (d Decimal) Tan() Decimal {
+
+ PI4A := NewFromFloat(7.85398125648498535156e-1) // 0x3fe921fb40000000, Pi/4 split into three parts
+ PI4B := NewFromFloat(3.77489470793079817668e-8) // 0x3e64442d00000000,
+ PI4C := NewFromFloat(2.69515142907905952645e-15) // 0x3ce8469898cc5170,
+ M4PI := NewFromFloat(1.273239544735162542821171882678754627704620361328125) // 4/pi
+
+ if d.Equal(NewFromFloat(0.0)) {
+ return d
+ }
+
+ // make argument positive but save the sign
+ sign := false
+ if d.LessThan(NewFromFloat(0.0)) {
+ d = d.Neg()
+ sign = true
+ }
+
+ j := d.Mul(M4PI).IntPart() // integer part of x/(Pi/4), as integer for tests on the phase angle
+ y := NewFromFloat(float64(j)) // integer part of x/(Pi/4), as float
+
+ // map zeros to origin
+ if j&1 == 1 {
+ j++
+ y = y.Add(NewFromFloat(1.0))
+ }
+
+ z := d.Sub(y.Mul(PI4A)).Sub(y.Mul(PI4B)).Sub(y.Mul(PI4C)) // Extended precision modular arithmetic
+ zz := z.Mul(z)
+
+ if zz.GreaterThan(NewFromFloat(1e-14)) {
+ w := zz.Mul(_tanP[0].Mul(zz).Add(_tanP[1]).Mul(zz).Add(_tanP[2]))
+ x := zz.Add(_tanQ[1]).Mul(zz).Add(_tanQ[2]).Mul(zz).Add(_tanQ[3]).Mul(zz).Add(_tanQ[4])
+ y = z.Add(z.Mul(w.Div(x)))
+ } else {
+ y = z
+ }
+ if j&2 == 2 {
+ y = NewFromFloat(-1.0).Div(y)
+ }
+ if sign {
+ y = y.Neg()
+ }
+ return y
+}
diff --git a/vendor/github.com/shopspring/decimal/rounding.go b/vendor/github.com/shopspring/decimal/rounding.go
new file mode 100644
index 000000000..8008f55cb
--- /dev/null
+++ b/vendor/github.com/shopspring/decimal/rounding.go
@@ -0,0 +1,119 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Multiprecision decimal numbers.
+// For floating-point formatting only; not general purpose.
+// Only operations are assign and (binary) left/right shift.
+// Can do binary floating point in multiprecision decimal precisely
+// because 2 divides 10; cannot do decimal floating point
+// in multiprecision binary precisely.
+
+package decimal
+
+type floatInfo struct {
+ mantbits uint
+ expbits uint
+ bias int
+}
+
+var float32info = floatInfo{23, 8, -127}
+var float64info = floatInfo{52, 11, -1023}
+
+// roundShortest rounds d (= mant * 2^exp) to the shortest number of digits
+// that will let the original floating point value be precisely reconstructed.
+func roundShortest(d *decimal, mant uint64, exp int, flt *floatInfo) {
+ // If mantissa is zero, the number is zero; stop now.
+ if mant == 0 {
+ d.nd = 0
+ return
+ }
+
+ // Compute upper and lower such that any decimal number
+ // between upper and lower (possibly inclusive)
+ // will round to the original floating point number.
+
+ // We may see at once that the number is already shortest.
+ //
+ // Suppose d is not denormal, so that 2^exp <= d < 10^dp.
+ // The closest shorter number is at least 10^(dp-nd) away.
+ // The lower/upper bounds computed below are at distance
+ // at most 2^(exp-mantbits).
+ //
+ // So the number is already shortest if 10^(dp-nd) > 2^(exp-mantbits),
+ // or equivalently log2(10)*(dp-nd) > exp-mantbits.
+ // It is true if 332/100*(dp-nd) >= exp-mantbits (log2(10) > 3.32).
+ minexp := flt.bias + 1 // minimum possible exponent
+ if exp > minexp && 332*(d.dp-d.nd) >= 100*(exp-int(flt.mantbits)) {
+ // The number is already shortest.
+ return
+ }
+
+ // d = mant << (exp - mantbits)
+ // Next highest floating point number is mant+1 << exp-mantbits.
+ // Our upper bound is halfway between, mant*2+1 << exp-mantbits-1.
+ upper := new(decimal)
+ upper.Assign(mant*2 + 1)
+ upper.Shift(exp - int(flt.mantbits) - 1)
+
+ // d = mant << (exp - mantbits)
+ // Next lowest floating point number is mant-1 << exp-mantbits,
+ // unless mant-1 drops the significant bit and exp is not the minimum exp,
+ // in which case the next lowest is mant*2-1 << exp-mantbits-1.
+ // Either way, call it mantlo << explo-mantbits.
+ // Our lower bound is halfway between, mantlo*2+1 << explo-mantbits-1.
+ var mantlo uint64
+ var explo int
+ if mant > 1<<flt.mantbits || exp == minexp {
+ mantlo = mant - 1
+ explo = exp
+ } else {
+ mantlo = mant*2 - 1
+ explo = exp - 1
+ }
+ lower := new(decimal)
+ lower.Assign(mantlo*2 + 1)
+ lower.Shift(explo - int(flt.mantbits) - 1)
+
+ // The upper and lower bounds are possible outputs only if
+ // the original mantissa is even, so that IEEE round-to-even
+ // would round to the original mantissa and not the neighbors.
+ inclusive := mant%2 == 0
+
+ // Now we can figure out the minimum number of digits required.
+ // Walk along until d has distinguished itself from upper and lower.
+ for i := 0; i < d.nd; i++ {
+ l := byte('0') // lower digit
+ if i < lower.nd {
+ l = lower.d[i]
+ }
+ m := d.d[i] // middle digit
+ u := byte('0') // upper digit
+ if i < upper.nd {
+ u = upper.d[i]
+ }
+
+ // Okay to round down (truncate) if lower has a different digit
+ // or if lower is inclusive and is exactly the result of rounding
+ // down (i.e., and we have reached the final digit of lower).
+ okdown := l != m || inclusive && i+1 == lower.nd
+
+ // Okay to round up if upper has a different digit and either upper
+ // is inclusive or upper is bigger than the result of rounding up.
+ okup := m != u && (inclusive || m+1 < u || i+1 < upper.nd)
+
+ // If it's okay to do either, then round to the nearest one.
+ // If it's okay to do only one, do it.
+ switch {
+ case okdown && okup:
+ d.Round(i + 1)
+ return
+ case okdown:
+ d.RoundDown(i + 1)
+ return
+ case okup:
+ d.RoundUp(i + 1)
+ return
+ }
+ }
+}
diff --git a/vendor/github.com/toqueteos/webbrowser/.travis.yml b/vendor/github.com/toqueteos/webbrowser/.travis.yml
new file mode 100644
index 000000000..b55b114ab
--- /dev/null
+++ b/vendor/github.com/toqueteos/webbrowser/.travis.yml
@@ -0,0 +1,9 @@
+language: go
+
+go:
+ - 1.2
+ - 1.12
+ - tip
+
+script:
+ - go build ./...
diff --git a/vendor/github.com/toqueteos/webbrowser/CONTRIBUTING.md b/vendor/github.com/toqueteos/webbrowser/CONTRIBUTING.md
new file mode 100644
index 000000000..b9f7bf82a
--- /dev/null
+++ b/vendor/github.com/toqueteos/webbrowser/CONTRIBUTING.md
@@ -0,0 +1,11 @@
+# webbrowser contributing guide
+
+Any changes are welcomed!
+
+1. Be nice.
+2. Don't be afraid to ask, but please try search first.
+
+## Looking for contact info?
+
+- Twitter: [@toqueteos](https://twitter.com/toqueteos)
+- Mail: `toqueteos AT gmail DOT com`
diff --git a/vendor/github.com/toqueteos/webbrowser/LICENSE.md b/vendor/github.com/toqueteos/webbrowser/LICENSE.md
new file mode 100644
index 000000000..0d67949ef
--- /dev/null
+++ b/vendor/github.com/toqueteos/webbrowser/LICENSE.md
@@ -0,0 +1,19 @@
+The MIT License (MIT)
+Copyright (c) 2013-19 by Carlos Cobo and contributors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/toqueteos/webbrowser/README.md b/vendor/github.com/toqueteos/webbrowser/README.md
new file mode 100644
index 000000000..2ce091afb
--- /dev/null
+++ b/vendor/github.com/toqueteos/webbrowser/README.md
@@ -0,0 +1,56 @@
+# webbrowser [![Build Status](https://travis-ci.org/toqueteos/webbrowser.png?branch=master)](https://travis-ci.org/toqueteos/webbrowser) [![GoDoc](http://godoc.org/github.com/toqueteos/webbrowser?status.png)](http://godoc.org/github.com/toqueteos/webbrowser) [![Sourcegraph](https://sourcegraph.com/github.com/toqueteos/webbrowser/-/badge.svg)](https://sourcegraph.com/github.com/toqueteos/webbrowser?badge)
+
+webbrowser provides a simple API for opening web pages on your default browser.
+
+It's inspired on [Python's webbrowser](http://docs.python.org/3/library/webbrowser.html) package but lacks some of its features (open new window).
+
+It just opens a webpage, most browsers will open it on a new tab.
+
+## Installation
+
+As simple as:
+
+```bash
+go get -u github.com/toqueteos/webbrowser
+```
+
+## Usage
+
+```go
+package main
+
+import "github.com/toqueteos/webbrowser"
+
+func main() {
+ webbrowser.Open("http://golang.org")
+}
+```
+
+That's it!
+
+## Crossplatform support
+
+The package works on:
+
+- [x] `android` (verified by 3rd party)
+- [x] `darwin`
+- [x] `freebsd` (verified by 3rd party)
+- [x] `linux`
+- [x] `netbsd` (verified by 3rd party)
+- [x] `openbsd` (verified by 3rd party)
+- [x] `windows`
+
+## License
+
+It is licensed under the MIT open source license, please see the [LICENSE.md](https://github.com/toqueteos/webbrowser/blob/master/LICENSE.md) file for more information.
+
+## Thanks...
+
+Miki Tebeka wrote a nicer version that wasn't on godoc.org when I did this, [check it out!](https://bitbucket.org/tebeka/go-wise/src/d8db9bf5c4d1/desktop.go?at=default).
+
+## Already disliking it?
+
+No problem! There's alternative libraries that may be better to your needs:
+
+- https://github.com/pkg/browser, it does what webbrowser does and more!
+- https://github.com/skratchdot/open-golang, it even provides a `xdg-open` implementation in case you don't have it!
diff --git a/vendor/github.com/toqueteos/webbrowser/webbrowser.go b/vendor/github.com/toqueteos/webbrowser/webbrowser.go
new file mode 100644
index 000000000..f4f19b6b3
--- /dev/null
+++ b/vendor/github.com/toqueteos/webbrowser/webbrowser.go
@@ -0,0 +1,137 @@
+// Package webbrowser provides a simple API for opening web pages on your
+// default browser.
+package webbrowser
+
+import (
+ "errors"
+ "fmt"
+ "net/url"
+ "os"
+ "os/exec"
+ "runtime"
+ "strings"
+)
+
+var (
+ ErrCantOpenBrowser = errors.New("webbrowser: can't open browser")
+ ErrNoCandidates = errors.New("webbrowser: no browser candidate found for your OS")
+)
+
+// Candidates contains a list of registered `Browser`s that will be tried with Open.
+var Candidates []Browser
+
+type Browser interface {
+ // Command returns a ready to be used Cmd that will open an URL.
+ Command(string) (*exec.Cmd, error)
+ // Open tries to open a URL in your default browser. NOTE: This may cause
+ // your program to hang until the browser process is closed in some OSes,
+ // see https://github.com/toqueteos/webbrowser/issues/4.
+ Open(string) error
+}
+
+// Open tries to open a URL in your default browser ensuring you have a display
+// set up and not running this from SSH. NOTE: This may cause your program to
+// hang until the browser process is closed in some OSes, see
+// https://github.com/toqueteos/webbrowser/issues/4.
+func Open(s string) (err error) {
+ if len(Candidates) == 0 {
+ return ErrNoCandidates
+ }
+
+ // Try to determine if there's a display available (only linux) and we
+ // aren't on a terminal (all but windows).
+ switch runtime.GOOS {
+ case "linux":
+ // No display, no need to open a browser. Lynx users **MAY** have
+ // something to say about this.
+ if os.Getenv("DISPLAY") == "" {
+ return fmt.Errorf("webbrowser: tried to open %q, no screen found", s)
+ }
+ fallthrough
+ case "darwin":
+ // Check SSH env vars.
+ if os.Getenv("SSH_CLIENT") != "" || os.Getenv("SSH_TTY") != "" {
+ return fmt.Errorf("webbrowser: tried to open %q, but you are running a shell session", s)
+ }
+ }
+
+ // Try all candidates
+ for _, candidate := range Candidates {
+ err := candidate.Open(s)
+ if err == nil {
+ return nil
+ }
+ }
+
+ return ErrCantOpenBrowser
+}
+
+func init() {
+ // Register the default Browser for current OS, if it exists.
+ if os, ok := osCommand[runtime.GOOS]; ok {
+ Candidates = append(Candidates, browserCommand{os.cmd, os.args})
+ }
+}
+
+var (
+ osCommand = map[string]*browserCommand{
+ "android": &browserCommand{"xdg-open", nil},
+ "darwin": &browserCommand{"open", nil},
+ "freebsd": &browserCommand{"xdg-open", nil},
+ "linux": &browserCommand{"xdg-open", nil},
+ "netbsd": &browserCommand{"xdg-open", nil},
+ "openbsd": &browserCommand{"xdg-open", nil}, // It may be open instead
+ "windows": &browserCommand{"cmd", []string{"/c", "start"}},
+ }
+ winSchemes = [3]string{"https", "http", "file"}
+)
+
+type browserCommand struct {
+ cmd string
+ args []string
+}
+
+func (b browserCommand) Command(s string) (*exec.Cmd, error) {
+ u, err := url.Parse(s)
+ if err != nil {
+ return nil, err
+ }
+
+ validUrl := ensureValidURL(u)
+
+ b.args = append(b.args, validUrl)
+
+ return exec.Command(b.cmd, b.args...), nil
+}
+
+func (b browserCommand) Open(s string) error {
+ cmd, err := b.Command(s)
+ if err != nil {
+ return err
+ }
+
+ return cmd.Run()
+}
+
+func ensureScheme(u *url.URL) {
+ for _, s := range winSchemes {
+ if u.Scheme == s {
+ return
+ }
+ }
+ u.Scheme = "http"
+}
+
+func ensureValidURL(u *url.URL) string {
+ // Enforce a scheme (windows requires scheme to be set to work properly).
+ ensureScheme(u)
+ s := u.String()
+
+ // Escape characters not allowed by cmd/bash
+ switch runtime.GOOS {
+ case "windows":
+ s = strings.Replace(s, "&", `^&`, -1)
+ }
+
+ return s
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/LICENSE b/vendor/go.mongodb.org/mongo-driver/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bson.go b/vendor/go.mongodb.org/mongo-driver/bson/bson.go
new file mode 100644
index 000000000..a0d818582
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bson.go
@@ -0,0 +1,50 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+//
+// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer
+// See THIRD-PARTY-NOTICES for original license terms.
+
+package bson // import "go.mongodb.org/mongo-driver/bson"
+
+import (
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// Zeroer allows custom struct types to implement a report of zero
+// state. All struct types that don't implement Zeroer or where IsZero
+// returns false are considered to be not zero.
+type Zeroer interface {
+ IsZero() bool
+}
+
+// D is an ordered representation of a BSON document. This type should be used when the order of the elements matters,
+// such as MongoDB command documents. If the order of the elements does not matter, an M should be used instead.
+//
+// A D should not be constructed with duplicate key names, as that can cause undefined server behavior.
+//
+// Example usage:
+//
+// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
+type D = primitive.D
+
+// E represents a BSON element for a D. It is usually used inside a D.
+type E = primitive.E
+
+// M is an unordered representation of a BSON document. This type should be used when the order of the elements does not
+// matter. This type is handled as a regular map[string]interface{} when encoding and decoding. Elements will be
+// serialized in an undefined, random order. If the order of the elements matters, a D should be used instead.
+//
+// Example usage:
+//
+// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
+type M = primitive.M
+
+// An A is an ordered representation of a BSON array.
+//
+// Example usage:
+//
+// bson.A{"bar", "world", 3.14159, bson.D{{"qux", 12345}}}
+type A = primitive.A
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go
new file mode 100644
index 000000000..4e24f9eed
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/array_codec.go
@@ -0,0 +1,50 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+// ArrayCodec is the Codec used for bsoncore.Array values.
+type ArrayCodec struct{}
+
+var defaultArrayCodec = NewArrayCodec()
+
+// NewArrayCodec returns an ArrayCodec.
+func NewArrayCodec() *ArrayCodec {
+ return &ArrayCodec{}
+}
+
+// EncodeValue is the ValueEncoder for bsoncore.Array values.
+func (ac *ArrayCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tCoreArray {
+ return ValueEncoderError{Name: "CoreArrayEncodeValue", Types: []reflect.Type{tCoreArray}, Received: val}
+ }
+
+ arr := val.Interface().(bsoncore.Array)
+ return bsonrw.Copier{}.CopyArrayFromBytes(vw, arr)
+}
+
+// DecodeValue is the ValueDecoder for bsoncore.Array values.
+func (ac *ArrayCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tCoreArray {
+ return ValueDecoderError{Name: "CoreArrayDecodeValue", Types: []reflect.Type{tCoreArray}, Received: val}
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, 0))
+ }
+
+ val.SetLen(0)
+ arr, err := bsonrw.Copier{}.AppendArrayBytes(val.Interface().(bsoncore.Array), vr)
+ val.Set(reflect.ValueOf(arr))
+ return err
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go
new file mode 100644
index 000000000..098ed69f9
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/bsoncodec.go
@@ -0,0 +1,238 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec // import "go.mongodb.org/mongo-driver/bson/bsoncodec"
+
+import (
+ "fmt"
+ "reflect"
+ "strings"
+
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+var (
+ emptyValue = reflect.Value{}
+)
+
+// Marshaler is an interface implemented by types that can marshal themselves
+// into a BSON document represented as bytes. The bytes returned must be a valid
+// BSON document if the error is nil.
+type Marshaler interface {
+ MarshalBSON() ([]byte, error)
+}
+
+// ValueMarshaler is an interface implemented by types that can marshal
+// themselves into a BSON value as bytes. The type must be the valid type for
+// the bytes returned. The bytes and byte type together must be valid if the
+// error is nil.
+type ValueMarshaler interface {
+ MarshalBSONValue() (bsontype.Type, []byte, error)
+}
+
+// Unmarshaler is an interface implemented by types that can unmarshal a BSON
+// document representation of themselves. The BSON bytes can be assumed to be
+// valid. UnmarshalBSON must copy the BSON bytes if it wishes to retain the data
+// after returning.
+type Unmarshaler interface {
+ UnmarshalBSON([]byte) error
+}
+
+// ValueUnmarshaler is an interface implemented by types that can unmarshal a
+// BSON value representation of themselves. The BSON bytes and type can be
+// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
+// wishes to retain the data after returning.
+type ValueUnmarshaler interface {
+ UnmarshalBSONValue(bsontype.Type, []byte) error
+}
+
+// ValueEncoderError is an error returned from a ValueEncoder when the provided value can't be
+// encoded by the ValueEncoder.
+type ValueEncoderError struct {
+ Name string
+ Types []reflect.Type
+ Kinds []reflect.Kind
+ Received reflect.Value
+}
+
+func (vee ValueEncoderError) Error() string {
+ typeKinds := make([]string, 0, len(vee.Types)+len(vee.Kinds))
+ for _, t := range vee.Types {
+ typeKinds = append(typeKinds, t.String())
+ }
+ for _, k := range vee.Kinds {
+ if k == reflect.Map {
+ typeKinds = append(typeKinds, "map[string]*")
+ continue
+ }
+ typeKinds = append(typeKinds, k.String())
+ }
+ received := vee.Received.Kind().String()
+ if vee.Received.IsValid() {
+ received = vee.Received.Type().String()
+ }
+ return fmt.Sprintf("%s can only encode valid %s, but got %s", vee.Name, strings.Join(typeKinds, ", "), received)
+}
+
+// ValueDecoderError is an error returned from a ValueDecoder when the provided value can't be
+// decoded by the ValueDecoder.
+type ValueDecoderError struct {
+ Name string
+ Types []reflect.Type
+ Kinds []reflect.Kind
+ Received reflect.Value
+}
+
+func (vde ValueDecoderError) Error() string {
+ typeKinds := make([]string, 0, len(vde.Types)+len(vde.Kinds))
+ for _, t := range vde.Types {
+ typeKinds = append(typeKinds, t.String())
+ }
+ for _, k := range vde.Kinds {
+ if k == reflect.Map {
+ typeKinds = append(typeKinds, "map[string]*")
+ continue
+ }
+ typeKinds = append(typeKinds, k.String())
+ }
+ received := vde.Received.Kind().String()
+ if vde.Received.IsValid() {
+ received = vde.Received.Type().String()
+ }
+ return fmt.Sprintf("%s can only decode valid and settable %s, but got %s", vde.Name, strings.Join(typeKinds, ", "), received)
+}
+
+// EncodeContext is the contextual information required for a Codec to encode a
+// value.
+type EncodeContext struct {
+ *Registry
+ MinSize bool
+}
+
+// DecodeContext is the contextual information required for a Codec to decode a
+// value.
+type DecodeContext struct {
+ *Registry
+ Truncate bool
+
+ // Ancestor is the type of a containing document. This is mainly used to determine what type
+ // should be used when decoding an embedded document into an empty interface. For example, if
+ // Ancestor is a bson.M, BSON embedded document values being decoded into an empty interface
+ // will be decoded into a bson.M.
+ //
+ // Deprecated: Use DefaultDocumentM or DefaultDocumentD instead.
+ Ancestor reflect.Type
+
+ // defaultDocumentType specifies the Go type to decode top-level and nested BSON documents into. In particular, the
+ // usage for this field is restricted to data typed as "interface{}" or "map[string]interface{}". If DocumentType is
+ // set to a type that a BSON document cannot be unmarshaled into (e.g. "string"), unmarshalling will result in an
+ // error. DocumentType overrides the Ancestor field.
+ defaultDocumentType reflect.Type
+}
+
+// DefaultDocumentM will decode empty documents using the primitive.M type. This behavior is restricted to data typed as
+// "interface{}" or "map[string]interface{}".
+func (dc *DecodeContext) DefaultDocumentM() {
+ dc.defaultDocumentType = reflect.TypeOf(primitive.M{})
+}
+
+// DefaultDocumentD will decode empty documents using the primitive.D type. This behavior is restricted to data typed as
+// "interface{}" or "map[string]interface{}".
+func (dc *DecodeContext) DefaultDocumentD() {
+ dc.defaultDocumentType = reflect.TypeOf(primitive.D{})
+}
+
+// ValueCodec is the interface that groups the methods to encode and decode
+// values.
+type ValueCodec interface {
+ ValueEncoder
+ ValueDecoder
+}
+
+// ValueEncoder is the interface implemented by types that can handle the encoding of a value.
+type ValueEncoder interface {
+ EncodeValue(EncodeContext, bsonrw.ValueWriter, reflect.Value) error
+}
+
+// ValueEncoderFunc is an adapter function that allows a function with the correct signature to be
+// used as a ValueEncoder.
+type ValueEncoderFunc func(EncodeContext, bsonrw.ValueWriter, reflect.Value) error
+
+// EncodeValue implements the ValueEncoder interface.
+func (fn ValueEncoderFunc) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ return fn(ec, vw, val)
+}
+
+// ValueDecoder is the interface implemented by types that can handle the decoding of a value.
+type ValueDecoder interface {
+ DecodeValue(DecodeContext, bsonrw.ValueReader, reflect.Value) error
+}
+
+// ValueDecoderFunc is an adapter function that allows a function with the correct signature to be
+// used as a ValueDecoder.
+type ValueDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) error
+
+// DecodeValue implements the ValueDecoder interface.
+func (fn ValueDecoderFunc) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ return fn(dc, vr, val)
+}
+
+// typeDecoder is the interface implemented by types that can handle the decoding of a value given its type.
+type typeDecoder interface {
+ decodeType(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error)
+}
+
+// typeDecoderFunc is an adapter function that allows a function with the correct signature to be used as a typeDecoder.
+type typeDecoderFunc func(DecodeContext, bsonrw.ValueReader, reflect.Type) (reflect.Value, error)
+
+func (fn typeDecoderFunc) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ return fn(dc, vr, t)
+}
+
+// decodeAdapter allows two functions with the correct signatures to be used as both a ValueDecoder and typeDecoder.
+type decodeAdapter struct {
+ ValueDecoderFunc
+ typeDecoderFunc
+}
+
+var _ ValueDecoder = decodeAdapter{}
+var _ typeDecoder = decodeAdapter{}
+
+// decodeTypeOrValue calls decoder.decodeType is decoder is a typeDecoder. Otherwise, it allocates a new element of type
+// t and calls decoder.DecodeValue on it.
+func decodeTypeOrValue(decoder ValueDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ td, _ := decoder.(typeDecoder)
+ return decodeTypeOrValueWithInfo(decoder, td, dc, vr, t, true)
+}
+
+func decodeTypeOrValueWithInfo(vd ValueDecoder, td typeDecoder, dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type, convert bool) (reflect.Value, error) {
+ if td != nil {
+ val, err := td.decodeType(dc, vr, t)
+ if err == nil && convert && val.Type() != t {
+ // This conversion step is necessary for slices and maps. If a user declares variables like:
+ //
+ // type myBool bool
+ // var m map[string]myBool
+ //
+ // and tries to decode BSON bytes into the map, the decoding will fail if this conversion is not present
+ // because we'll try to assign a value of type bool to one of type myBool.
+ val = val.Convert(t)
+ }
+ return val, err
+ }
+
+ val := reflect.New(t).Elem()
+ err := vd.DecodeValue(dc, vr, val)
+ return val, err
+}
+
+// CodecZeroer is the interface implemented by Codecs that can also determine if
+// a value of the type that would be encoded is zero.
+type CodecZeroer interface {
+ IsTypeZero(interface{}) bool
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go
new file mode 100644
index 000000000..5a916cc15
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/byte_slice_codec.go
@@ -0,0 +1,111 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "fmt"
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// ByteSliceCodec is the Codec used for []byte values.
+type ByteSliceCodec struct {
+ EncodeNilAsEmpty bool
+}
+
+var (
+ defaultByteSliceCodec = NewByteSliceCodec()
+
+ _ ValueCodec = defaultByteSliceCodec
+ _ typeDecoder = defaultByteSliceCodec
+)
+
+// NewByteSliceCodec returns a StringCodec with options opts.
+func NewByteSliceCodec(opts ...*bsonoptions.ByteSliceCodecOptions) *ByteSliceCodec {
+ byteSliceOpt := bsonoptions.MergeByteSliceCodecOptions(opts...)
+ codec := ByteSliceCodec{}
+ if byteSliceOpt.EncodeNilAsEmpty != nil {
+ codec.EncodeNilAsEmpty = *byteSliceOpt.EncodeNilAsEmpty
+ }
+ return &codec
+}
+
+// EncodeValue is the ValueEncoder for []byte.
+func (bsc *ByteSliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tByteSlice {
+ return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
+ }
+ if val.IsNil() && !bsc.EncodeNilAsEmpty {
+ return vw.WriteNull()
+ }
+ return vw.WriteBinary(val.Interface().([]byte))
+}
+
+func (bsc *ByteSliceCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tByteSlice {
+ return emptyValue, ValueDecoderError{
+ Name: "ByteSliceDecodeValue",
+ Types: []reflect.Type{tByteSlice},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var data []byte
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.String:
+ str, err := vr.ReadString()
+ if err != nil {
+ return emptyValue, err
+ }
+ data = []byte(str)
+ case bsontype.Symbol:
+ sym, err := vr.ReadSymbol()
+ if err != nil {
+ return emptyValue, err
+ }
+ data = []byte(sym)
+ case bsontype.Binary:
+ var subtype byte
+ data, subtype, err = vr.ReadBinary()
+ if err != nil {
+ return emptyValue, err
+ }
+ if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
+ return emptyValue, decodeBinaryError{subtype: subtype, typeName: "[]byte"}
+ }
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a []byte", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(data), nil
+}
+
+// DecodeValue is the ValueDecoder for []byte.
+func (bsc *ByteSliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tByteSlice {
+ return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
+ }
+
+ elem, err := bsc.decodeType(dc, vr, tByteSlice)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go
new file mode 100644
index 000000000..cb8180f25
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/cond_addr_codec.go
@@ -0,0 +1,63 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+)
+
+// condAddrEncoder is the encoder used when a pointer to the encoding value has an encoder.
+type condAddrEncoder struct {
+ canAddrEnc ValueEncoder
+ elseEnc ValueEncoder
+}
+
+var _ ValueEncoder = (*condAddrEncoder)(nil)
+
+// newCondAddrEncoder returns an condAddrEncoder.
+func newCondAddrEncoder(canAddrEnc, elseEnc ValueEncoder) *condAddrEncoder {
+ encoder := condAddrEncoder{canAddrEnc: canAddrEnc, elseEnc: elseEnc}
+ return &encoder
+}
+
+// EncodeValue is the ValueEncoderFunc for a value that may be addressable.
+func (cae *condAddrEncoder) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if val.CanAddr() {
+ return cae.canAddrEnc.EncodeValue(ec, vw, val)
+ }
+ if cae.elseEnc != nil {
+ return cae.elseEnc.EncodeValue(ec, vw, val)
+ }
+ return ErrNoEncoder{Type: val.Type()}
+}
+
+// condAddrDecoder is the decoder used when a pointer to the value has a decoder.
+type condAddrDecoder struct {
+ canAddrDec ValueDecoder
+ elseDec ValueDecoder
+}
+
+var _ ValueDecoder = (*condAddrDecoder)(nil)
+
+// newCondAddrDecoder returns an CondAddrDecoder.
+func newCondAddrDecoder(canAddrDec, elseDec ValueDecoder) *condAddrDecoder {
+ decoder := condAddrDecoder{canAddrDec: canAddrDec, elseDec: elseDec}
+ return &decoder
+}
+
+// DecodeValue is the ValueDecoderFunc for a value that may be addressable.
+func (cad *condAddrDecoder) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if val.CanAddr() {
+ return cad.canAddrDec.DecodeValue(dc, vr, val)
+ }
+ if cad.elseDec != nil {
+ return cad.elseDec.DecodeValue(dc, vr, val)
+ }
+ return ErrNoDecoder{Type: val.Type()}
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go
new file mode 100644
index 000000000..e95cab585
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_decoders.go
@@ -0,0 +1,1729 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "net/url"
+ "reflect"
+ "strconv"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+var (
+ defaultValueDecoders DefaultValueDecoders
+ errCannotTruncate = errors.New("float64 can only be truncated to an integer type when truncation is enabled")
+)
+
+type decodeBinaryError struct {
+ subtype byte
+ typeName string
+}
+
+func (d decodeBinaryError) Error() string {
+ return fmt.Sprintf("only binary values with subtype 0x00 or 0x02 can be decoded into %s, but got subtype %v", d.typeName, d.subtype)
+}
+
+func newDefaultStructCodec() *StructCodec {
+ codec, err := NewStructCodec(DefaultStructTagParser)
+ if err != nil {
+ // This function is called from the codec registration path, so errors can't be propagated. If there's an error
+ // constructing the StructCodec, we panic to avoid losing it.
+ panic(fmt.Errorf("error creating default StructCodec: %v", err))
+ }
+ return codec
+}
+
+// DefaultValueDecoders is a namespace type for the default ValueDecoders used
+// when creating a registry.
+type DefaultValueDecoders struct{}
+
+// RegisterDefaultDecoders will register the decoder methods attached to DefaultValueDecoders with
+// the provided RegistryBuilder.
+//
+// There is no support for decoding map[string]interface{} because there is no decoder for
+// interface{}, so users must either register this decoder themselves or use the
+// EmptyInterfaceDecoder available in the bson package.
+func (dvd DefaultValueDecoders) RegisterDefaultDecoders(rb *RegistryBuilder) {
+ if rb == nil {
+ panic(errors.New("argument to RegisterDefaultDecoders must not be nil"))
+ }
+
+ intDecoder := decodeAdapter{dvd.IntDecodeValue, dvd.intDecodeType}
+ floatDecoder := decodeAdapter{dvd.FloatDecodeValue, dvd.floatDecodeType}
+
+ rb.
+ RegisterTypeDecoder(tD, ValueDecoderFunc(dvd.DDecodeValue)).
+ RegisterTypeDecoder(tBinary, decodeAdapter{dvd.BinaryDecodeValue, dvd.binaryDecodeType}).
+ RegisterTypeDecoder(tUndefined, decodeAdapter{dvd.UndefinedDecodeValue, dvd.undefinedDecodeType}).
+ RegisterTypeDecoder(tDateTime, decodeAdapter{dvd.DateTimeDecodeValue, dvd.dateTimeDecodeType}).
+ RegisterTypeDecoder(tNull, decodeAdapter{dvd.NullDecodeValue, dvd.nullDecodeType}).
+ RegisterTypeDecoder(tRegex, decodeAdapter{dvd.RegexDecodeValue, dvd.regexDecodeType}).
+ RegisterTypeDecoder(tDBPointer, decodeAdapter{dvd.DBPointerDecodeValue, dvd.dBPointerDecodeType}).
+ RegisterTypeDecoder(tTimestamp, decodeAdapter{dvd.TimestampDecodeValue, dvd.timestampDecodeType}).
+ RegisterTypeDecoder(tMinKey, decodeAdapter{dvd.MinKeyDecodeValue, dvd.minKeyDecodeType}).
+ RegisterTypeDecoder(tMaxKey, decodeAdapter{dvd.MaxKeyDecodeValue, dvd.maxKeyDecodeType}).
+ RegisterTypeDecoder(tJavaScript, decodeAdapter{dvd.JavaScriptDecodeValue, dvd.javaScriptDecodeType}).
+ RegisterTypeDecoder(tSymbol, decodeAdapter{dvd.SymbolDecodeValue, dvd.symbolDecodeType}).
+ RegisterTypeDecoder(tByteSlice, defaultByteSliceCodec).
+ RegisterTypeDecoder(tTime, defaultTimeCodec).
+ RegisterTypeDecoder(tEmpty, defaultEmptyInterfaceCodec).
+ RegisterTypeDecoder(tCoreArray, defaultArrayCodec).
+ RegisterTypeDecoder(tOID, decodeAdapter{dvd.ObjectIDDecodeValue, dvd.objectIDDecodeType}).
+ RegisterTypeDecoder(tDecimal, decodeAdapter{dvd.Decimal128DecodeValue, dvd.decimal128DecodeType}).
+ RegisterTypeDecoder(tJSONNumber, decodeAdapter{dvd.JSONNumberDecodeValue, dvd.jsonNumberDecodeType}).
+ RegisterTypeDecoder(tURL, decodeAdapter{dvd.URLDecodeValue, dvd.urlDecodeType}).
+ RegisterTypeDecoder(tCoreDocument, ValueDecoderFunc(dvd.CoreDocumentDecodeValue)).
+ RegisterTypeDecoder(tCodeWithScope, decodeAdapter{dvd.CodeWithScopeDecodeValue, dvd.codeWithScopeDecodeType}).
+ RegisterDefaultDecoder(reflect.Bool, decodeAdapter{dvd.BooleanDecodeValue, dvd.booleanDecodeType}).
+ RegisterDefaultDecoder(reflect.Int, intDecoder).
+ RegisterDefaultDecoder(reflect.Int8, intDecoder).
+ RegisterDefaultDecoder(reflect.Int16, intDecoder).
+ RegisterDefaultDecoder(reflect.Int32, intDecoder).
+ RegisterDefaultDecoder(reflect.Int64, intDecoder).
+ RegisterDefaultDecoder(reflect.Uint, defaultUIntCodec).
+ RegisterDefaultDecoder(reflect.Uint8, defaultUIntCodec).
+ RegisterDefaultDecoder(reflect.Uint16, defaultUIntCodec).
+ RegisterDefaultDecoder(reflect.Uint32, defaultUIntCodec).
+ RegisterDefaultDecoder(reflect.Uint64, defaultUIntCodec).
+ RegisterDefaultDecoder(reflect.Float32, floatDecoder).
+ RegisterDefaultDecoder(reflect.Float64, floatDecoder).
+ RegisterDefaultDecoder(reflect.Array, ValueDecoderFunc(dvd.ArrayDecodeValue)).
+ RegisterDefaultDecoder(reflect.Map, defaultMapCodec).
+ RegisterDefaultDecoder(reflect.Slice, defaultSliceCodec).
+ RegisterDefaultDecoder(reflect.String, defaultStringCodec).
+ RegisterDefaultDecoder(reflect.Struct, newDefaultStructCodec()).
+ RegisterDefaultDecoder(reflect.Ptr, NewPointerCodec()).
+ RegisterTypeMapEntry(bsontype.Double, tFloat64).
+ RegisterTypeMapEntry(bsontype.String, tString).
+ RegisterTypeMapEntry(bsontype.Array, tA).
+ RegisterTypeMapEntry(bsontype.Binary, tBinary).
+ RegisterTypeMapEntry(bsontype.Undefined, tUndefined).
+ RegisterTypeMapEntry(bsontype.ObjectID, tOID).
+ RegisterTypeMapEntry(bsontype.Boolean, tBool).
+ RegisterTypeMapEntry(bsontype.DateTime, tDateTime).
+ RegisterTypeMapEntry(bsontype.Regex, tRegex).
+ RegisterTypeMapEntry(bsontype.DBPointer, tDBPointer).
+ RegisterTypeMapEntry(bsontype.JavaScript, tJavaScript).
+ RegisterTypeMapEntry(bsontype.Symbol, tSymbol).
+ RegisterTypeMapEntry(bsontype.CodeWithScope, tCodeWithScope).
+ RegisterTypeMapEntry(bsontype.Int32, tInt32).
+ RegisterTypeMapEntry(bsontype.Int64, tInt64).
+ RegisterTypeMapEntry(bsontype.Timestamp, tTimestamp).
+ RegisterTypeMapEntry(bsontype.Decimal128, tDecimal).
+ RegisterTypeMapEntry(bsontype.MinKey, tMinKey).
+ RegisterTypeMapEntry(bsontype.MaxKey, tMaxKey).
+ RegisterTypeMapEntry(bsontype.Type(0), tD).
+ RegisterTypeMapEntry(bsontype.EmbeddedDocument, tD).
+ RegisterHookDecoder(tValueUnmarshaler, ValueDecoderFunc(dvd.ValueUnmarshalerDecodeValue)).
+ RegisterHookDecoder(tUnmarshaler, ValueDecoderFunc(dvd.UnmarshalerDecodeValue))
+}
+
+// DDecodeValue is the ValueDecoderFunc for primitive.D instances.
+func (dvd DefaultValueDecoders) DDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.IsValid() || !val.CanSet() || val.Type() != tD {
+ return ValueDecoderError{Name: "DDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
+ }
+
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ dc.Ancestor = tD
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ default:
+ return fmt.Errorf("cannot decode %v into a primitive.D", vrType)
+ }
+
+ dr, err := vr.ReadDocument()
+ if err != nil {
+ return err
+ }
+
+ decoder, err := dc.LookupDecoder(tEmpty)
+ if err != nil {
+ return err
+ }
+ tEmptyTypeDecoder, _ := decoder.(typeDecoder)
+
+ // Use the elements in the provided value if it's non nil. Otherwise, allocate a new D instance.
+ var elems primitive.D
+ if !val.IsNil() {
+ val.SetLen(0)
+ elems = val.Interface().(primitive.D)
+ } else {
+ elems = make(primitive.D, 0)
+ }
+
+ for {
+ key, elemVr, err := dr.ReadElement()
+ if err == bsonrw.ErrEOD {
+ break
+ } else if err != nil {
+ return err
+ }
+
+ // Pass false for convert because we don't need to call reflect.Value.Convert for tEmpty.
+ elem, err := decodeTypeOrValueWithInfo(decoder, tEmptyTypeDecoder, dc, elemVr, tEmpty, false)
+ if err != nil {
+ return err
+ }
+
+ elems = append(elems, primitive.E{Key: key, Value: elem.Interface()})
+ }
+
+ val.Set(reflect.ValueOf(elems))
+ return nil
+}
+
+func (dvd DefaultValueDecoders) booleanDecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t.Kind() != reflect.Bool {
+ return emptyValue, ValueDecoderError{
+ Name: "BooleanDecodeValue",
+ Kinds: []reflect.Kind{reflect.Bool},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var b bool
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Int32:
+ i32, err := vr.ReadInt32()
+ if err != nil {
+ return emptyValue, err
+ }
+ b = (i32 != 0)
+ case bsontype.Int64:
+ i64, err := vr.ReadInt64()
+ if err != nil {
+ return emptyValue, err
+ }
+ b = (i64 != 0)
+ case bsontype.Double:
+ f64, err := vr.ReadDouble()
+ if err != nil {
+ return emptyValue, err
+ }
+ b = (f64 != 0)
+ case bsontype.Boolean:
+ b, err = vr.ReadBoolean()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a boolean", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(b), nil
+}
+
+// BooleanDecodeValue is the ValueDecoderFunc for bool types.
+func (dvd DefaultValueDecoders) BooleanDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.IsValid() || !val.CanSet() || val.Kind() != reflect.Bool {
+ return ValueDecoderError{Name: "BooleanDecodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val}
+ }
+
+ elem, err := dvd.booleanDecodeType(dctx, vr, val.Type())
+ if err != nil {
+ return err
+ }
+
+ val.SetBool(elem.Bool())
+ return nil
+}
+
+func (DefaultValueDecoders) intDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ var i64 int64
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Int32:
+ i32, err := vr.ReadInt32()
+ if err != nil {
+ return emptyValue, err
+ }
+ i64 = int64(i32)
+ case bsontype.Int64:
+ i64, err = vr.ReadInt64()
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Double:
+ f64, err := vr.ReadDouble()
+ if err != nil {
+ return emptyValue, err
+ }
+ if !dc.Truncate && math.Floor(f64) != f64 {
+ return emptyValue, errCannotTruncate
+ }
+ if f64 > float64(math.MaxInt64) {
+ return emptyValue, fmt.Errorf("%g overflows int64", f64)
+ }
+ i64 = int64(f64)
+ case bsontype.Boolean:
+ b, err := vr.ReadBoolean()
+ if err != nil {
+ return emptyValue, err
+ }
+ if b {
+ i64 = 1
+ }
+ case bsontype.Null:
+ if err = vr.ReadNull(); err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Undefined:
+ if err = vr.ReadUndefined(); err != nil {
+ return emptyValue, err
+ }
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType)
+ }
+
+ switch t.Kind() {
+ case reflect.Int8:
+ if i64 < math.MinInt8 || i64 > math.MaxInt8 {
+ return emptyValue, fmt.Errorf("%d overflows int8", i64)
+ }
+
+ return reflect.ValueOf(int8(i64)), nil
+ case reflect.Int16:
+ if i64 < math.MinInt16 || i64 > math.MaxInt16 {
+ return emptyValue, fmt.Errorf("%d overflows int16", i64)
+ }
+
+ return reflect.ValueOf(int16(i64)), nil
+ case reflect.Int32:
+ if i64 < math.MinInt32 || i64 > math.MaxInt32 {
+ return emptyValue, fmt.Errorf("%d overflows int32", i64)
+ }
+
+ return reflect.ValueOf(int32(i64)), nil
+ case reflect.Int64:
+ return reflect.ValueOf(i64), nil
+ case reflect.Int:
+ if int64(int(i64)) != i64 { // Can we fit this inside of an int
+ return emptyValue, fmt.Errorf("%d overflows int", i64)
+ }
+
+ return reflect.ValueOf(int(i64)), nil
+ default:
+ return emptyValue, ValueDecoderError{
+ Name: "IntDecodeValue",
+ Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int},
+ Received: reflect.Zero(t),
+ }
+ }
+}
+
+// IntDecodeValue is the ValueDecoderFunc for int types.
+func (dvd DefaultValueDecoders) IntDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() {
+ return ValueDecoderError{
+ Name: "IntDecodeValue",
+ Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int},
+ Received: val,
+ }
+ }
+
+ elem, err := dvd.intDecodeType(dc, vr, val.Type())
+ if err != nil {
+ return err
+ }
+
+ val.SetInt(elem.Int())
+ return nil
+}
+
+// UintDecodeValue is the ValueDecoderFunc for uint types.
+//
+// Deprecated: UintDecodeValue is not registered by default. Use UintCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) UintDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ var i64 int64
+ var err error
+ switch vr.Type() {
+ case bsontype.Int32:
+ i32, err := vr.ReadInt32()
+ if err != nil {
+ return err
+ }
+ i64 = int64(i32)
+ case bsontype.Int64:
+ i64, err = vr.ReadInt64()
+ if err != nil {
+ return err
+ }
+ case bsontype.Double:
+ f64, err := vr.ReadDouble()
+ if err != nil {
+ return err
+ }
+ if !dc.Truncate && math.Floor(f64) != f64 {
+ return errors.New("UintDecodeValue can only truncate float64 to an integer type when truncation is enabled")
+ }
+ if f64 > float64(math.MaxInt64) {
+ return fmt.Errorf("%g overflows int64", f64)
+ }
+ i64 = int64(f64)
+ case bsontype.Boolean:
+ b, err := vr.ReadBoolean()
+ if err != nil {
+ return err
+ }
+ if b {
+ i64 = 1
+ }
+ default:
+ return fmt.Errorf("cannot decode %v into an integer type", vr.Type())
+ }
+
+ if !val.CanSet() {
+ return ValueDecoderError{
+ Name: "UintDecodeValue",
+ Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
+ Received: val,
+ }
+ }
+
+ switch val.Kind() {
+ case reflect.Uint8:
+ if i64 < 0 || i64 > math.MaxUint8 {
+ return fmt.Errorf("%d overflows uint8", i64)
+ }
+ case reflect.Uint16:
+ if i64 < 0 || i64 > math.MaxUint16 {
+ return fmt.Errorf("%d overflows uint16", i64)
+ }
+ case reflect.Uint32:
+ if i64 < 0 || i64 > math.MaxUint32 {
+ return fmt.Errorf("%d overflows uint32", i64)
+ }
+ case reflect.Uint64:
+ if i64 < 0 {
+ return fmt.Errorf("%d overflows uint64", i64)
+ }
+ case reflect.Uint:
+ if i64 < 0 || int64(uint(i64)) != i64 { // Can we fit this inside of an uint
+ return fmt.Errorf("%d overflows uint", i64)
+ }
+ default:
+ return ValueDecoderError{
+ Name: "UintDecodeValue",
+ Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
+ Received: val,
+ }
+ }
+
+ val.SetUint(uint64(i64))
+ return nil
+}
+
+func (dvd DefaultValueDecoders) floatDecodeType(ec DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ var f float64
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Int32:
+ i32, err := vr.ReadInt32()
+ if err != nil {
+ return emptyValue, err
+ }
+ f = float64(i32)
+ case bsontype.Int64:
+ i64, err := vr.ReadInt64()
+ if err != nil {
+ return emptyValue, err
+ }
+ f = float64(i64)
+ case bsontype.Double:
+ f, err = vr.ReadDouble()
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Boolean:
+ b, err := vr.ReadBoolean()
+ if err != nil {
+ return emptyValue, err
+ }
+ if b {
+ f = 1
+ }
+ case bsontype.Null:
+ if err = vr.ReadNull(); err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Undefined:
+ if err = vr.ReadUndefined(); err != nil {
+ return emptyValue, err
+ }
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a float32 or float64 type", vrType)
+ }
+
+ switch t.Kind() {
+ case reflect.Float32:
+ if !ec.Truncate && float64(float32(f)) != f {
+ return emptyValue, errCannotTruncate
+ }
+
+ return reflect.ValueOf(float32(f)), nil
+ case reflect.Float64:
+ return reflect.ValueOf(f), nil
+ default:
+ return emptyValue, ValueDecoderError{
+ Name: "FloatDecodeValue",
+ Kinds: []reflect.Kind{reflect.Float32, reflect.Float64},
+ Received: reflect.Zero(t),
+ }
+ }
+}
+
+// FloatDecodeValue is the ValueDecoderFunc for float types.
+func (dvd DefaultValueDecoders) FloatDecodeValue(ec DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() {
+ return ValueDecoderError{
+ Name: "FloatDecodeValue",
+ Kinds: []reflect.Kind{reflect.Float32, reflect.Float64},
+ Received: val,
+ }
+ }
+
+ elem, err := dvd.floatDecodeType(ec, vr, val.Type())
+ if err != nil {
+ return err
+ }
+
+ val.SetFloat(elem.Float())
+ return nil
+}
+
+// StringDecodeValue is the ValueDecoderFunc for string types.
+//
+// Deprecated: StringDecodeValue is not registered by default. Use StringCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) StringDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ var str string
+ var err error
+ switch vr.Type() {
+ // TODO(GODRIVER-577): Handle JavaScript and Symbol BSON types when allowed.
+ case bsontype.String:
+ str, err = vr.ReadString()
+ if err != nil {
+ return err
+ }
+ default:
+ return fmt.Errorf("cannot decode %v into a string type", vr.Type())
+ }
+ if !val.CanSet() || val.Kind() != reflect.String {
+ return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val}
+ }
+
+ val.SetString(str)
+ return nil
+}
+
+func (DefaultValueDecoders) javaScriptDecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tJavaScript {
+ return emptyValue, ValueDecoderError{
+ Name: "JavaScriptDecodeValue",
+ Types: []reflect.Type{tJavaScript},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var js string
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.JavaScript:
+ js, err = vr.ReadJavascript()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a primitive.JavaScript", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.JavaScript(js)), nil
+}
+
+// JavaScriptDecodeValue is the ValueDecoderFunc for the primitive.JavaScript type.
+func (dvd DefaultValueDecoders) JavaScriptDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tJavaScript {
+ return ValueDecoderError{Name: "JavaScriptDecodeValue", Types: []reflect.Type{tJavaScript}, Received: val}
+ }
+
+ elem, err := dvd.javaScriptDecodeType(dctx, vr, tJavaScript)
+ if err != nil {
+ return err
+ }
+
+ val.SetString(elem.String())
+ return nil
+}
+
+func (DefaultValueDecoders) symbolDecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tSymbol {
+ return emptyValue, ValueDecoderError{
+ Name: "SymbolDecodeValue",
+ Types: []reflect.Type{tSymbol},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var symbol string
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.String:
+ symbol, err = vr.ReadString()
+ case bsontype.Symbol:
+ symbol, err = vr.ReadSymbol()
+ case bsontype.Binary:
+ data, subtype, err := vr.ReadBinary()
+ if err != nil {
+ return emptyValue, err
+ }
+
+ if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
+ return emptyValue, decodeBinaryError{subtype: subtype, typeName: "primitive.Symbol"}
+ }
+ symbol = string(data)
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a primitive.Symbol", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.Symbol(symbol)), nil
+}
+
+// SymbolDecodeValue is the ValueDecoderFunc for the primitive.Symbol type.
+func (dvd DefaultValueDecoders) SymbolDecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tSymbol {
+ return ValueDecoderError{Name: "SymbolDecodeValue", Types: []reflect.Type{tSymbol}, Received: val}
+ }
+
+ elem, err := dvd.symbolDecodeType(dctx, vr, tSymbol)
+ if err != nil {
+ return err
+ }
+
+ val.SetString(elem.String())
+ return nil
+}
+
+func (DefaultValueDecoders) binaryDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tBinary {
+ return emptyValue, ValueDecoderError{
+ Name: "BinaryDecodeValue",
+ Types: []reflect.Type{tBinary},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var data []byte
+ var subtype byte
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Binary:
+ data, subtype, err = vr.ReadBinary()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a Binary", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.Binary{Subtype: subtype, Data: data}), nil
+}
+
+// BinaryDecodeValue is the ValueDecoderFunc for Binary.
+func (dvd DefaultValueDecoders) BinaryDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tBinary {
+ return ValueDecoderError{Name: "BinaryDecodeValue", Types: []reflect.Type{tBinary}, Received: val}
+ }
+
+ elem, err := dvd.binaryDecodeType(dc, vr, tBinary)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) undefinedDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tUndefined {
+ return emptyValue, ValueDecoderError{
+ Name: "UndefinedDecodeValue",
+ Types: []reflect.Type{tUndefined},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into an Undefined", vr.Type())
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.Undefined{}), nil
+}
+
+// UndefinedDecodeValue is the ValueDecoderFunc for Undefined.
+func (dvd DefaultValueDecoders) UndefinedDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tUndefined {
+ return ValueDecoderError{Name: "UndefinedDecodeValue", Types: []reflect.Type{tUndefined}, Received: val}
+ }
+
+ elem, err := dvd.undefinedDecodeType(dc, vr, tUndefined)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+// Accept both 12-byte string and pretty-printed 24-byte hex string formats.
+func (dvd DefaultValueDecoders) objectIDDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tOID {
+ return emptyValue, ValueDecoderError{
+ Name: "ObjectIDDecodeValue",
+ Types: []reflect.Type{tOID},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var oid primitive.ObjectID
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.ObjectID:
+ oid, err = vr.ReadObjectID()
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.String:
+ str, err := vr.ReadString()
+ if err != nil {
+ return emptyValue, err
+ }
+ if oid, err = primitive.ObjectIDFromHex(str); err == nil {
+ break
+ }
+ if len(str) != 12 {
+ return emptyValue, fmt.Errorf("an ObjectID string must be exactly 12 bytes long (got %v)", len(str))
+ }
+ byteArr := []byte(str)
+ copy(oid[:], byteArr)
+ case bsontype.Null:
+ if err = vr.ReadNull(); err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Undefined:
+ if err = vr.ReadUndefined(); err != nil {
+ return emptyValue, err
+ }
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into an ObjectID", vrType)
+ }
+
+ return reflect.ValueOf(oid), nil
+}
+
+// ObjectIDDecodeValue is the ValueDecoderFunc for primitive.ObjectID.
+func (dvd DefaultValueDecoders) ObjectIDDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tOID {
+ return ValueDecoderError{Name: "ObjectIDDecodeValue", Types: []reflect.Type{tOID}, Received: val}
+ }
+
+ elem, err := dvd.objectIDDecodeType(dc, vr, tOID)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) dateTimeDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tDateTime {
+ return emptyValue, ValueDecoderError{
+ Name: "DateTimeDecodeValue",
+ Types: []reflect.Type{tDateTime},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var dt int64
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.DateTime:
+ dt, err = vr.ReadDateTime()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a DateTime", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.DateTime(dt)), nil
+}
+
+// DateTimeDecodeValue is the ValueDecoderFunc for DateTime.
+func (dvd DefaultValueDecoders) DateTimeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tDateTime {
+ return ValueDecoderError{Name: "DateTimeDecodeValue", Types: []reflect.Type{tDateTime}, Received: val}
+ }
+
+ elem, err := dvd.dateTimeDecodeType(dc, vr, tDateTime)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) nullDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tNull {
+ return emptyValue, ValueDecoderError{
+ Name: "NullDecodeValue",
+ Types: []reflect.Type{tNull},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a Null", vr.Type())
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.Null{}), nil
+}
+
+// NullDecodeValue is the ValueDecoderFunc for Null.
+func (dvd DefaultValueDecoders) NullDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tNull {
+ return ValueDecoderError{Name: "NullDecodeValue", Types: []reflect.Type{tNull}, Received: val}
+ }
+
+ elem, err := dvd.nullDecodeType(dc, vr, tNull)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) regexDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tRegex {
+ return emptyValue, ValueDecoderError{
+ Name: "RegexDecodeValue",
+ Types: []reflect.Type{tRegex},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var pattern, options string
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Regex:
+ pattern, options, err = vr.ReadRegex()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a Regex", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.Regex{Pattern: pattern, Options: options}), nil
+}
+
+// RegexDecodeValue is the ValueDecoderFunc for Regex.
+func (dvd DefaultValueDecoders) RegexDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tRegex {
+ return ValueDecoderError{Name: "RegexDecodeValue", Types: []reflect.Type{tRegex}, Received: val}
+ }
+
+ elem, err := dvd.regexDecodeType(dc, vr, tRegex)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) dBPointerDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tDBPointer {
+ return emptyValue, ValueDecoderError{
+ Name: "DBPointerDecodeValue",
+ Types: []reflect.Type{tDBPointer},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var ns string
+ var pointer primitive.ObjectID
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.DBPointer:
+ ns, pointer, err = vr.ReadDBPointer()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a DBPointer", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.DBPointer{DB: ns, Pointer: pointer}), nil
+}
+
+// DBPointerDecodeValue is the ValueDecoderFunc for DBPointer.
+func (dvd DefaultValueDecoders) DBPointerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tDBPointer {
+ return ValueDecoderError{Name: "DBPointerDecodeValue", Types: []reflect.Type{tDBPointer}, Received: val}
+ }
+
+ elem, err := dvd.dBPointerDecodeType(dc, vr, tDBPointer)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) timestampDecodeType(dc DecodeContext, vr bsonrw.ValueReader, reflectType reflect.Type) (reflect.Value, error) {
+ if reflectType != tTimestamp {
+ return emptyValue, ValueDecoderError{
+ Name: "TimestampDecodeValue",
+ Types: []reflect.Type{tTimestamp},
+ Received: reflect.Zero(reflectType),
+ }
+ }
+
+ var t, incr uint32
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Timestamp:
+ t, incr, err = vr.ReadTimestamp()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a Timestamp", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.Timestamp{T: t, I: incr}), nil
+}
+
+// TimestampDecodeValue is the ValueDecoderFunc for Timestamp.
+func (dvd DefaultValueDecoders) TimestampDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tTimestamp {
+ return ValueDecoderError{Name: "TimestampDecodeValue", Types: []reflect.Type{tTimestamp}, Received: val}
+ }
+
+ elem, err := dvd.timestampDecodeType(dc, vr, tTimestamp)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) minKeyDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tMinKey {
+ return emptyValue, ValueDecoderError{
+ Name: "MinKeyDecodeValue",
+ Types: []reflect.Type{tMinKey},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.MinKey:
+ err = vr.ReadMinKey()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a MinKey", vr.Type())
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.MinKey{}), nil
+}
+
+// MinKeyDecodeValue is the ValueDecoderFunc for MinKey.
+func (dvd DefaultValueDecoders) MinKeyDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tMinKey {
+ return ValueDecoderError{Name: "MinKeyDecodeValue", Types: []reflect.Type{tMinKey}, Received: val}
+ }
+
+ elem, err := dvd.minKeyDecodeType(dc, vr, tMinKey)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (DefaultValueDecoders) maxKeyDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tMaxKey {
+ return emptyValue, ValueDecoderError{
+ Name: "MaxKeyDecodeValue",
+ Types: []reflect.Type{tMaxKey},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.MaxKey:
+ err = vr.ReadMaxKey()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a MaxKey", vr.Type())
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(primitive.MaxKey{}), nil
+}
+
+// MaxKeyDecodeValue is the ValueDecoderFunc for MaxKey.
+func (dvd DefaultValueDecoders) MaxKeyDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tMaxKey {
+ return ValueDecoderError{Name: "MaxKeyDecodeValue", Types: []reflect.Type{tMaxKey}, Received: val}
+ }
+
+ elem, err := dvd.maxKeyDecodeType(dc, vr, tMaxKey)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (dvd DefaultValueDecoders) decimal128DecodeType(dctx DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tDecimal {
+ return emptyValue, ValueDecoderError{
+ Name: "Decimal128DecodeValue",
+ Types: []reflect.Type{tDecimal},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var d128 primitive.Decimal128
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Decimal128:
+ d128, err = vr.ReadDecimal128()
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a primitive.Decimal128", vr.Type())
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(d128), nil
+}
+
+// Decimal128DecodeValue is the ValueDecoderFunc for primitive.Decimal128.
+func (dvd DefaultValueDecoders) Decimal128DecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tDecimal {
+ return ValueDecoderError{Name: "Decimal128DecodeValue", Types: []reflect.Type{tDecimal}, Received: val}
+ }
+
+ elem, err := dvd.decimal128DecodeType(dctx, vr, tDecimal)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (dvd DefaultValueDecoders) jsonNumberDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tJSONNumber {
+ return emptyValue, ValueDecoderError{
+ Name: "JSONNumberDecodeValue",
+ Types: []reflect.Type{tJSONNumber},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var jsonNum json.Number
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Double:
+ f64, err := vr.ReadDouble()
+ if err != nil {
+ return emptyValue, err
+ }
+ jsonNum = json.Number(strconv.FormatFloat(f64, 'f', -1, 64))
+ case bsontype.Int32:
+ i32, err := vr.ReadInt32()
+ if err != nil {
+ return emptyValue, err
+ }
+ jsonNum = json.Number(strconv.FormatInt(int64(i32), 10))
+ case bsontype.Int64:
+ i64, err := vr.ReadInt64()
+ if err != nil {
+ return emptyValue, err
+ }
+ jsonNum = json.Number(strconv.FormatInt(i64, 10))
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a json.Number", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(jsonNum), nil
+}
+
+// JSONNumberDecodeValue is the ValueDecoderFunc for json.Number.
+func (dvd DefaultValueDecoders) JSONNumberDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tJSONNumber {
+ return ValueDecoderError{Name: "JSONNumberDecodeValue", Types: []reflect.Type{tJSONNumber}, Received: val}
+ }
+
+ elem, err := dvd.jsonNumberDecodeType(dc, vr, tJSONNumber)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (dvd DefaultValueDecoders) urlDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tURL {
+ return emptyValue, ValueDecoderError{
+ Name: "URLDecodeValue",
+ Types: []reflect.Type{tURL},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ urlPtr := &url.URL{}
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.String:
+ var str string // Declare str here to avoid shadowing err during the ReadString call.
+ str, err = vr.ReadString()
+ if err != nil {
+ return emptyValue, err
+ }
+
+ urlPtr, err = url.Parse(str)
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a *url.URL", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(urlPtr).Elem(), nil
+}
+
+// URLDecodeValue is the ValueDecoderFunc for url.URL.
+func (dvd DefaultValueDecoders) URLDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tURL {
+ return ValueDecoderError{Name: "URLDecodeValue", Types: []reflect.Type{tURL}, Received: val}
+ }
+
+ elem, err := dvd.urlDecodeType(dc, vr, tURL)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+// TimeDecodeValue is the ValueDecoderFunc for time.Time.
+//
+// Deprecated: TimeDecodeValue is not registered by default. Use TimeCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) TimeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if vr.Type() != bsontype.DateTime {
+ return fmt.Errorf("cannot decode %v into a time.Time", vr.Type())
+ }
+
+ dt, err := vr.ReadDateTime()
+ if err != nil {
+ return err
+ }
+
+ if !val.CanSet() || val.Type() != tTime {
+ return ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: val}
+ }
+
+ val.Set(reflect.ValueOf(time.Unix(dt/1000, dt%1000*1000000).UTC()))
+ return nil
+}
+
+// ByteSliceDecodeValue is the ValueDecoderFunc for []byte.
+//
+// Deprecated: ByteSliceDecodeValue is not registered by default. Use ByteSliceCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) ByteSliceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if vr.Type() != bsontype.Binary && vr.Type() != bsontype.Null {
+ return fmt.Errorf("cannot decode %v into a []byte", vr.Type())
+ }
+
+ if !val.CanSet() || val.Type() != tByteSlice {
+ return ValueDecoderError{Name: "ByteSliceDecodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
+ }
+
+ if vr.Type() == bsontype.Null {
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ }
+
+ data, subtype, err := vr.ReadBinary()
+ if err != nil {
+ return err
+ }
+ if subtype != 0x00 {
+ return fmt.Errorf("ByteSliceDecodeValue can only be used to decode subtype 0x00 for %s, got %v", bsontype.Binary, subtype)
+ }
+
+ val.Set(reflect.ValueOf(data))
+ return nil
+}
+
+// MapDecodeValue is the ValueDecoderFunc for map[string]* types.
+//
+// Deprecated: MapDecodeValue is not registered by default. Use MapCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) MapDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Kind() != reflect.Map || val.Type().Key().Kind() != reflect.String {
+ return ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
+ }
+
+ switch vr.Type() {
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ default:
+ return fmt.Errorf("cannot decode %v into a %s", vr.Type(), val.Type())
+ }
+
+ dr, err := vr.ReadDocument()
+ if err != nil {
+ return err
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeMap(val.Type()))
+ }
+
+ eType := val.Type().Elem()
+ decoder, err := dc.LookupDecoder(eType)
+ if err != nil {
+ return err
+ }
+
+ if eType == tEmpty {
+ dc.Ancestor = val.Type()
+ }
+
+ keyType := val.Type().Key()
+ for {
+ key, vr, err := dr.ReadElement()
+ if err == bsonrw.ErrEOD {
+ break
+ }
+ if err != nil {
+ return err
+ }
+
+ elem := reflect.New(eType).Elem()
+
+ err = decoder.DecodeValue(dc, vr, elem)
+ if err != nil {
+ return err
+ }
+
+ val.SetMapIndex(reflect.ValueOf(key).Convert(keyType), elem)
+ }
+ return nil
+}
+
+// ArrayDecodeValue is the ValueDecoderFunc for array types.
+func (dvd DefaultValueDecoders) ArrayDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Array {
+ return ValueDecoderError{Name: "ArrayDecodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val}
+ }
+
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Array:
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ if val.Type().Elem() != tE {
+ return fmt.Errorf("cannot decode document into %s", val.Type())
+ }
+ case bsontype.Binary:
+ if val.Type().Elem() != tByte {
+ return fmt.Errorf("ArrayDecodeValue can only be used to decode binary into a byte array, got %v", vrType)
+ }
+ data, subtype, err := vr.ReadBinary()
+ if err != nil {
+ return err
+ }
+ if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
+ return fmt.Errorf("ArrayDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", bsontype.Binary, subtype)
+ }
+
+ if len(data) > val.Len() {
+ return fmt.Errorf("more elements returned in array than can fit inside %s", val.Type())
+ }
+
+ for idx, elem := range data {
+ val.Index(idx).Set(reflect.ValueOf(elem))
+ }
+ return nil
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ case bsontype.Undefined:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadUndefined()
+ default:
+ return fmt.Errorf("cannot decode %v into an array", vrType)
+ }
+
+ var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error)
+ switch val.Type().Elem() {
+ case tE:
+ elemsFunc = dvd.decodeD
+ default:
+ elemsFunc = dvd.decodeDefault
+ }
+
+ elems, err := elemsFunc(dc, vr, val)
+ if err != nil {
+ return err
+ }
+
+ if len(elems) > val.Len() {
+ return fmt.Errorf("more elements returned in array than can fit inside %s, got %v elements", val.Type(), len(elems))
+ }
+
+ for idx, elem := range elems {
+ val.Index(idx).Set(elem)
+ }
+
+ return nil
+}
+
+// SliceDecodeValue is the ValueDecoderFunc for slice types.
+//
+// Deprecated: SliceDecodeValue is not registered by default. Use SliceCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) SliceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Kind() != reflect.Slice {
+ return ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
+ }
+
+ switch vr.Type() {
+ case bsontype.Array:
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ if val.Type().Elem() != tE {
+ return fmt.Errorf("cannot decode document into %s", val.Type())
+ }
+ default:
+ return fmt.Errorf("cannot decode %v into a slice", vr.Type())
+ }
+
+ var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error)
+ switch val.Type().Elem() {
+ case tE:
+ dc.Ancestor = val.Type()
+ elemsFunc = dvd.decodeD
+ default:
+ elemsFunc = dvd.decodeDefault
+ }
+
+ elems, err := elemsFunc(dc, vr, val)
+ if err != nil {
+ return err
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, len(elems)))
+ }
+
+ val.SetLen(0)
+ val.Set(reflect.Append(val, elems...))
+
+ return nil
+}
+
+// ValueUnmarshalerDecodeValue is the ValueDecoderFunc for ValueUnmarshaler implementations.
+func (dvd DefaultValueDecoders) ValueUnmarshalerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.IsValid() || (!val.Type().Implements(tValueUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tValueUnmarshaler)) {
+ return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
+ }
+
+ if val.Kind() == reflect.Ptr && val.IsNil() {
+ if !val.CanSet() {
+ return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
+ }
+ val.Set(reflect.New(val.Type().Elem()))
+ }
+
+ if !val.Type().Implements(tValueUnmarshaler) {
+ if !val.CanAddr() {
+ return ValueDecoderError{Name: "ValueUnmarshalerDecodeValue", Types: []reflect.Type{tValueUnmarshaler}, Received: val}
+ }
+ val = val.Addr() // If the type doesn't implement the interface, a pointer to it must.
+ }
+
+ t, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
+ if err != nil {
+ return err
+ }
+
+ fn := val.Convert(tValueUnmarshaler).MethodByName("UnmarshalBSONValue")
+ errVal := fn.Call([]reflect.Value{reflect.ValueOf(t), reflect.ValueOf(src)})[0]
+ if !errVal.IsNil() {
+ return errVal.Interface().(error)
+ }
+ return nil
+}
+
+// UnmarshalerDecodeValue is the ValueDecoderFunc for Unmarshaler implementations.
+func (dvd DefaultValueDecoders) UnmarshalerDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.IsValid() || (!val.Type().Implements(tUnmarshaler) && !reflect.PtrTo(val.Type()).Implements(tUnmarshaler)) {
+ return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
+ }
+
+ if val.Kind() == reflect.Ptr && val.IsNil() {
+ if !val.CanSet() {
+ return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
+ }
+ val.Set(reflect.New(val.Type().Elem()))
+ }
+
+ _, src, err := bsonrw.Copier{}.CopyValueToBytes(vr)
+ if err != nil {
+ return err
+ }
+
+ // If the target Go value is a pointer and the BSON field value is empty, set the value to the
+ // zero value of the pointer (nil) and don't call UnmarshalBSON. UnmarshalBSON has no way to
+ // change the pointer value from within the function (only the value at the pointer address),
+ // so it can't set the pointer to "nil" itself. Since the most common Go value for an empty BSON
+ // field value is "nil", we set "nil" here and don't call UnmarshalBSON. This behavior matches
+ // the behavior of the Go "encoding/json" unmarshaler when the target Go value is a pointer and
+ // the JSON field value is "null".
+ if val.Kind() == reflect.Ptr && len(src) == 0 {
+ val.Set(reflect.Zero(val.Type()))
+ return nil
+ }
+
+ if !val.Type().Implements(tUnmarshaler) {
+ if !val.CanAddr() {
+ return ValueDecoderError{Name: "UnmarshalerDecodeValue", Types: []reflect.Type{tUnmarshaler}, Received: val}
+ }
+ val = val.Addr() // If the type doesn't implement the interface, a pointer to it must.
+ }
+
+ fn := val.Convert(tUnmarshaler).MethodByName("UnmarshalBSON")
+ errVal := fn.Call([]reflect.Value{reflect.ValueOf(src)})[0]
+ if !errVal.IsNil() {
+ return errVal.Interface().(error)
+ }
+ return nil
+}
+
+// EmptyInterfaceDecodeValue is the ValueDecoderFunc for interface{}.
+//
+// Deprecated: EmptyInterfaceDecodeValue is not registered by default. Use EmptyInterfaceCodec.DecodeValue instead.
+func (dvd DefaultValueDecoders) EmptyInterfaceDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tEmpty {
+ return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val}
+ }
+
+ rtype, err := dc.LookupTypeMapEntry(vr.Type())
+ if err != nil {
+ switch vr.Type() {
+ case bsontype.EmbeddedDocument:
+ if dc.Ancestor != nil {
+ rtype = dc.Ancestor
+ break
+ }
+ rtype = tD
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ default:
+ return err
+ }
+ }
+
+ decoder, err := dc.LookupDecoder(rtype)
+ if err != nil {
+ return err
+ }
+
+ elem := reflect.New(rtype).Elem()
+ err = decoder.DecodeValue(dc, vr, elem)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+// CoreDocumentDecodeValue is the ValueDecoderFunc for bsoncore.Document.
+func (DefaultValueDecoders) CoreDocumentDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tCoreDocument {
+ return ValueDecoderError{Name: "CoreDocumentDecodeValue", Types: []reflect.Type{tCoreDocument}, Received: val}
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, 0))
+ }
+
+ val.SetLen(0)
+
+ cdoc, err := bsonrw.Copier{}.AppendDocumentBytes(val.Interface().(bsoncore.Document), vr)
+ val.Set(reflect.ValueOf(cdoc))
+ return err
+}
+
+func (dvd DefaultValueDecoders) decodeDefault(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) ([]reflect.Value, error) {
+ elems := make([]reflect.Value, 0)
+
+ ar, err := vr.ReadArray()
+ if err != nil {
+ return nil, err
+ }
+
+ eType := val.Type().Elem()
+
+ decoder, err := dc.LookupDecoder(eType)
+ if err != nil {
+ return nil, err
+ }
+ eTypeDecoder, _ := decoder.(typeDecoder)
+
+ idx := 0
+ for {
+ vr, err := ar.ReadValue()
+ if err == bsonrw.ErrEOA {
+ break
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ elem, err := decodeTypeOrValueWithInfo(decoder, eTypeDecoder, dc, vr, eType, true)
+ if err != nil {
+ return nil, newDecodeError(strconv.Itoa(idx), err)
+ }
+ elems = append(elems, elem)
+ idx++
+ }
+
+ return elems, nil
+}
+
+func (dvd DefaultValueDecoders) readCodeWithScope(dc DecodeContext, vr bsonrw.ValueReader) (primitive.CodeWithScope, error) {
+ var cws primitive.CodeWithScope
+
+ code, dr, err := vr.ReadCodeWithScope()
+ if err != nil {
+ return cws, err
+ }
+
+ scope := reflect.New(tD).Elem()
+ elems, err := dvd.decodeElemsFromDocumentReader(dc, dr)
+ if err != nil {
+ return cws, err
+ }
+
+ scope.Set(reflect.MakeSlice(tD, 0, len(elems)))
+ scope.Set(reflect.Append(scope, elems...))
+
+ cws = primitive.CodeWithScope{
+ Code: primitive.JavaScript(code),
+ Scope: scope.Interface().(primitive.D),
+ }
+ return cws, nil
+}
+
+func (dvd DefaultValueDecoders) codeWithScopeDecodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tCodeWithScope {
+ return emptyValue, ValueDecoderError{
+ Name: "CodeWithScopeDecodeValue",
+ Types: []reflect.Type{tCodeWithScope},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var cws primitive.CodeWithScope
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.CodeWithScope:
+ cws, err = dvd.readCodeWithScope(dc, vr)
+ case bsontype.Null:
+ err = vr.ReadNull()
+ case bsontype.Undefined:
+ err = vr.ReadUndefined()
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a primitive.CodeWithScope", vrType)
+ }
+ if err != nil {
+ return emptyValue, err
+ }
+
+ return reflect.ValueOf(cws), nil
+}
+
+// CodeWithScopeDecodeValue is the ValueDecoderFunc for CodeWithScope.
+func (dvd DefaultValueDecoders) CodeWithScopeDecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tCodeWithScope {
+ return ValueDecoderError{Name: "CodeWithScopeDecodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val}
+ }
+
+ elem, err := dvd.codeWithScopeDecodeType(dc, vr, tCodeWithScope)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+func (dvd DefaultValueDecoders) decodeD(dc DecodeContext, vr bsonrw.ValueReader, _ reflect.Value) ([]reflect.Value, error) {
+ switch vr.Type() {
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ default:
+ return nil, fmt.Errorf("cannot decode %v into a D", vr.Type())
+ }
+
+ dr, err := vr.ReadDocument()
+ if err != nil {
+ return nil, err
+ }
+
+ return dvd.decodeElemsFromDocumentReader(dc, dr)
+}
+
+func (DefaultValueDecoders) decodeElemsFromDocumentReader(dc DecodeContext, dr bsonrw.DocumentReader) ([]reflect.Value, error) {
+ decoder, err := dc.LookupDecoder(tEmpty)
+ if err != nil {
+ return nil, err
+ }
+
+ elems := make([]reflect.Value, 0)
+ for {
+ key, vr, err := dr.ReadElement()
+ if err == bsonrw.ErrEOD {
+ break
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ val := reflect.New(tEmpty).Elem()
+ err = decoder.DecodeValue(dc, vr, val)
+ if err != nil {
+ return nil, newDecodeError(key, err)
+ }
+
+ elems = append(elems, reflect.ValueOf(primitive.E{Key: key, Value: val.Interface()}))
+ }
+
+ return elems, nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go
new file mode 100644
index 000000000..6bdb43cb4
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/default_value_encoders.go
@@ -0,0 +1,766 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math"
+ "net/url"
+ "reflect"
+ "sync"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+var defaultValueEncoders DefaultValueEncoders
+
+var bvwPool = bsonrw.NewBSONValueWriterPool()
+
+var errInvalidValue = errors.New("cannot encode invalid element")
+
+var sliceWriterPool = sync.Pool{
+ New: func() interface{} {
+ sw := make(bsonrw.SliceWriter, 0)
+ return &sw
+ },
+}
+
+func encodeElement(ec EncodeContext, dw bsonrw.DocumentWriter, e primitive.E) error {
+ vw, err := dw.WriteDocumentElement(e.Key)
+ if err != nil {
+ return err
+ }
+
+ if e.Value == nil {
+ return vw.WriteNull()
+ }
+ encoder, err := ec.LookupEncoder(reflect.TypeOf(e.Value))
+ if err != nil {
+ return err
+ }
+
+ err = encoder.EncodeValue(ec, vw, reflect.ValueOf(e.Value))
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+// DefaultValueEncoders is a namespace type for the default ValueEncoders used
+// when creating a registry.
+type DefaultValueEncoders struct{}
+
+// RegisterDefaultEncoders will register the encoder methods attached to DefaultValueEncoders with
+// the provided RegistryBuilder.
+func (dve DefaultValueEncoders) RegisterDefaultEncoders(rb *RegistryBuilder) {
+ if rb == nil {
+ panic(errors.New("argument to RegisterDefaultEncoders must not be nil"))
+ }
+ rb.
+ RegisterTypeEncoder(tByteSlice, defaultByteSliceCodec).
+ RegisterTypeEncoder(tTime, defaultTimeCodec).
+ RegisterTypeEncoder(tEmpty, defaultEmptyInterfaceCodec).
+ RegisterTypeEncoder(tCoreArray, defaultArrayCodec).
+ RegisterTypeEncoder(tOID, ValueEncoderFunc(dve.ObjectIDEncodeValue)).
+ RegisterTypeEncoder(tDecimal, ValueEncoderFunc(dve.Decimal128EncodeValue)).
+ RegisterTypeEncoder(tJSONNumber, ValueEncoderFunc(dve.JSONNumberEncodeValue)).
+ RegisterTypeEncoder(tURL, ValueEncoderFunc(dve.URLEncodeValue)).
+ RegisterTypeEncoder(tJavaScript, ValueEncoderFunc(dve.JavaScriptEncodeValue)).
+ RegisterTypeEncoder(tSymbol, ValueEncoderFunc(dve.SymbolEncodeValue)).
+ RegisterTypeEncoder(tBinary, ValueEncoderFunc(dve.BinaryEncodeValue)).
+ RegisterTypeEncoder(tUndefined, ValueEncoderFunc(dve.UndefinedEncodeValue)).
+ RegisterTypeEncoder(tDateTime, ValueEncoderFunc(dve.DateTimeEncodeValue)).
+ RegisterTypeEncoder(tNull, ValueEncoderFunc(dve.NullEncodeValue)).
+ RegisterTypeEncoder(tRegex, ValueEncoderFunc(dve.RegexEncodeValue)).
+ RegisterTypeEncoder(tDBPointer, ValueEncoderFunc(dve.DBPointerEncodeValue)).
+ RegisterTypeEncoder(tTimestamp, ValueEncoderFunc(dve.TimestampEncodeValue)).
+ RegisterTypeEncoder(tMinKey, ValueEncoderFunc(dve.MinKeyEncodeValue)).
+ RegisterTypeEncoder(tMaxKey, ValueEncoderFunc(dve.MaxKeyEncodeValue)).
+ RegisterTypeEncoder(tCoreDocument, ValueEncoderFunc(dve.CoreDocumentEncodeValue)).
+ RegisterTypeEncoder(tCodeWithScope, ValueEncoderFunc(dve.CodeWithScopeEncodeValue)).
+ RegisterDefaultEncoder(reflect.Bool, ValueEncoderFunc(dve.BooleanEncodeValue)).
+ RegisterDefaultEncoder(reflect.Int, ValueEncoderFunc(dve.IntEncodeValue)).
+ RegisterDefaultEncoder(reflect.Int8, ValueEncoderFunc(dve.IntEncodeValue)).
+ RegisterDefaultEncoder(reflect.Int16, ValueEncoderFunc(dve.IntEncodeValue)).
+ RegisterDefaultEncoder(reflect.Int32, ValueEncoderFunc(dve.IntEncodeValue)).
+ RegisterDefaultEncoder(reflect.Int64, ValueEncoderFunc(dve.IntEncodeValue)).
+ RegisterDefaultEncoder(reflect.Uint, defaultUIntCodec).
+ RegisterDefaultEncoder(reflect.Uint8, defaultUIntCodec).
+ RegisterDefaultEncoder(reflect.Uint16, defaultUIntCodec).
+ RegisterDefaultEncoder(reflect.Uint32, defaultUIntCodec).
+ RegisterDefaultEncoder(reflect.Uint64, defaultUIntCodec).
+ RegisterDefaultEncoder(reflect.Float32, ValueEncoderFunc(dve.FloatEncodeValue)).
+ RegisterDefaultEncoder(reflect.Float64, ValueEncoderFunc(dve.FloatEncodeValue)).
+ RegisterDefaultEncoder(reflect.Array, ValueEncoderFunc(dve.ArrayEncodeValue)).
+ RegisterDefaultEncoder(reflect.Map, defaultMapCodec).
+ RegisterDefaultEncoder(reflect.Slice, defaultSliceCodec).
+ RegisterDefaultEncoder(reflect.String, defaultStringCodec).
+ RegisterDefaultEncoder(reflect.Struct, newDefaultStructCodec()).
+ RegisterDefaultEncoder(reflect.Ptr, NewPointerCodec()).
+ RegisterHookEncoder(tValueMarshaler, ValueEncoderFunc(dve.ValueMarshalerEncodeValue)).
+ RegisterHookEncoder(tMarshaler, ValueEncoderFunc(dve.MarshalerEncodeValue)).
+ RegisterHookEncoder(tProxy, ValueEncoderFunc(dve.ProxyEncodeValue))
+}
+
+// BooleanEncodeValue is the ValueEncoderFunc for bool types.
+func (dve DefaultValueEncoders) BooleanEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Bool {
+ return ValueEncoderError{Name: "BooleanEncodeValue", Kinds: []reflect.Kind{reflect.Bool}, Received: val}
+ }
+ return vw.WriteBoolean(val.Bool())
+}
+
+func fitsIn32Bits(i int64) bool {
+ return math.MinInt32 <= i && i <= math.MaxInt32
+}
+
+// IntEncodeValue is the ValueEncoderFunc for int types.
+func (dve DefaultValueEncoders) IntEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ switch val.Kind() {
+ case reflect.Int8, reflect.Int16, reflect.Int32:
+ return vw.WriteInt32(int32(val.Int()))
+ case reflect.Int:
+ i64 := val.Int()
+ if fitsIn32Bits(i64) {
+ return vw.WriteInt32(int32(i64))
+ }
+ return vw.WriteInt64(i64)
+ case reflect.Int64:
+ i64 := val.Int()
+ if ec.MinSize && fitsIn32Bits(i64) {
+ return vw.WriteInt32(int32(i64))
+ }
+ return vw.WriteInt64(i64)
+ }
+
+ return ValueEncoderError{
+ Name: "IntEncodeValue",
+ Kinds: []reflect.Kind{reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int},
+ Received: val,
+ }
+}
+
+// UintEncodeValue is the ValueEncoderFunc for uint types.
+//
+// Deprecated: UintEncodeValue is not registered by default. Use UintCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) UintEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ switch val.Kind() {
+ case reflect.Uint8, reflect.Uint16:
+ return vw.WriteInt32(int32(val.Uint()))
+ case reflect.Uint, reflect.Uint32, reflect.Uint64:
+ u64 := val.Uint()
+ if ec.MinSize && u64 <= math.MaxInt32 {
+ return vw.WriteInt32(int32(u64))
+ }
+ if u64 > math.MaxInt64 {
+ return fmt.Errorf("%d overflows int64", u64)
+ }
+ return vw.WriteInt64(int64(u64))
+ }
+
+ return ValueEncoderError{
+ Name: "UintEncodeValue",
+ Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
+ Received: val,
+ }
+}
+
+// FloatEncodeValue is the ValueEncoderFunc for float types.
+func (dve DefaultValueEncoders) FloatEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ switch val.Kind() {
+ case reflect.Float32, reflect.Float64:
+ return vw.WriteDouble(val.Float())
+ }
+
+ return ValueEncoderError{Name: "FloatEncodeValue", Kinds: []reflect.Kind{reflect.Float32, reflect.Float64}, Received: val}
+}
+
+// StringEncodeValue is the ValueEncoderFunc for string types.
+//
+// Deprecated: StringEncodeValue is not registered by default. Use StringCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) StringEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if val.Kind() != reflect.String {
+ return ValueEncoderError{
+ Name: "StringEncodeValue",
+ Kinds: []reflect.Kind{reflect.String},
+ Received: val,
+ }
+ }
+
+ return vw.WriteString(val.String())
+}
+
+// ObjectIDEncodeValue is the ValueEncoderFunc for primitive.ObjectID.
+func (dve DefaultValueEncoders) ObjectIDEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tOID {
+ return ValueEncoderError{Name: "ObjectIDEncodeValue", Types: []reflect.Type{tOID}, Received: val}
+ }
+ return vw.WriteObjectID(val.Interface().(primitive.ObjectID))
+}
+
+// Decimal128EncodeValue is the ValueEncoderFunc for primitive.Decimal128.
+func (dve DefaultValueEncoders) Decimal128EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tDecimal {
+ return ValueEncoderError{Name: "Decimal128EncodeValue", Types: []reflect.Type{tDecimal}, Received: val}
+ }
+ return vw.WriteDecimal128(val.Interface().(primitive.Decimal128))
+}
+
+// JSONNumberEncodeValue is the ValueEncoderFunc for json.Number.
+func (dve DefaultValueEncoders) JSONNumberEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tJSONNumber {
+ return ValueEncoderError{Name: "JSONNumberEncodeValue", Types: []reflect.Type{tJSONNumber}, Received: val}
+ }
+ jsnum := val.Interface().(json.Number)
+
+ // Attempt int first, then float64
+ if i64, err := jsnum.Int64(); err == nil {
+ return dve.IntEncodeValue(ec, vw, reflect.ValueOf(i64))
+ }
+
+ f64, err := jsnum.Float64()
+ if err != nil {
+ return err
+ }
+
+ return dve.FloatEncodeValue(ec, vw, reflect.ValueOf(f64))
+}
+
+// URLEncodeValue is the ValueEncoderFunc for url.URL.
+func (dve DefaultValueEncoders) URLEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tURL {
+ return ValueEncoderError{Name: "URLEncodeValue", Types: []reflect.Type{tURL}, Received: val}
+ }
+ u := val.Interface().(url.URL)
+ return vw.WriteString(u.String())
+}
+
+// TimeEncodeValue is the ValueEncoderFunc for time.TIme.
+//
+// Deprecated: TimeEncodeValue is not registered by default. Use TimeCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) TimeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tTime {
+ return ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: val}
+ }
+ tt := val.Interface().(time.Time)
+ dt := primitive.NewDateTimeFromTime(tt)
+ return vw.WriteDateTime(int64(dt))
+}
+
+// ByteSliceEncodeValue is the ValueEncoderFunc for []byte.
+//
+// Deprecated: ByteSliceEncodeValue is not registered by default. Use ByteSliceCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) ByteSliceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tByteSlice {
+ return ValueEncoderError{Name: "ByteSliceEncodeValue", Types: []reflect.Type{tByteSlice}, Received: val}
+ }
+ if val.IsNil() {
+ return vw.WriteNull()
+ }
+ return vw.WriteBinary(val.Interface().([]byte))
+}
+
+// MapEncodeValue is the ValueEncoderFunc for map[string]* types.
+//
+// Deprecated: MapEncodeValue is not registered by default. Use MapCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) MapEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Map || val.Type().Key().Kind() != reflect.String {
+ return ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
+ }
+
+ if val.IsNil() {
+ // If we have a nill map but we can't WriteNull, that means we're probably trying to encode
+ // to a TopLevel document. We can't currently tell if this is what actually happened, but if
+ // there's a deeper underlying problem, the error will also be returned from WriteDocument,
+ // so just continue. The operations on a map reflection value are valid, so we can call
+ // MapKeys within mapEncodeValue without a problem.
+ err := vw.WriteNull()
+ if err == nil {
+ return nil
+ }
+ }
+
+ dw, err := vw.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ return dve.mapEncodeValue(ec, dw, val, nil)
+}
+
+// mapEncodeValue handles encoding of the values of a map. The collisionFn returns
+// true if the provided key exists, this is mainly used for inline maps in the
+// struct codec.
+func (dve DefaultValueEncoders) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, val reflect.Value, collisionFn func(string) bool) error {
+
+ elemType := val.Type().Elem()
+ encoder, err := ec.LookupEncoder(elemType)
+ if err != nil && elemType.Kind() != reflect.Interface {
+ return err
+ }
+
+ keys := val.MapKeys()
+ for _, key := range keys {
+ if collisionFn != nil && collisionFn(key.String()) {
+ return fmt.Errorf("Key %s of inlined map conflicts with a struct field name", key)
+ }
+
+ currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.MapIndex(key))
+ if lookupErr != nil && lookupErr != errInvalidValue {
+ return lookupErr
+ }
+
+ vw, err := dw.WriteDocumentElement(key.String())
+ if err != nil {
+ return err
+ }
+
+ if lookupErr == errInvalidValue {
+ err = vw.WriteNull()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ err = currEncoder.EncodeValue(ec, vw, currVal)
+ if err != nil {
+ return err
+ }
+ }
+
+ return dw.WriteDocumentEnd()
+}
+
+// ArrayEncodeValue is the ValueEncoderFunc for array types.
+func (dve DefaultValueEncoders) ArrayEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Array {
+ return ValueEncoderError{Name: "ArrayEncodeValue", Kinds: []reflect.Kind{reflect.Array}, Received: val}
+ }
+
+ // If we have a []primitive.E we want to treat it as a document instead of as an array.
+ if val.Type().Elem() == tE {
+ dw, err := vw.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ for idx := 0; idx < val.Len(); idx++ {
+ e := val.Index(idx).Interface().(primitive.E)
+ err = encodeElement(ec, dw, e)
+ if err != nil {
+ return err
+ }
+ }
+
+ return dw.WriteDocumentEnd()
+ }
+
+ // If we have a []byte we want to treat it as a binary instead of as an array.
+ if val.Type().Elem() == tByte {
+ var byteSlice []byte
+ for idx := 0; idx < val.Len(); idx++ {
+ byteSlice = append(byteSlice, val.Index(idx).Interface().(byte))
+ }
+ return vw.WriteBinary(byteSlice)
+ }
+
+ aw, err := vw.WriteArray()
+ if err != nil {
+ return err
+ }
+
+ elemType := val.Type().Elem()
+ encoder, err := ec.LookupEncoder(elemType)
+ if err != nil && elemType.Kind() != reflect.Interface {
+ return err
+ }
+
+ for idx := 0; idx < val.Len(); idx++ {
+ currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx))
+ if lookupErr != nil && lookupErr != errInvalidValue {
+ return lookupErr
+ }
+
+ vw, err := aw.WriteArrayElement()
+ if err != nil {
+ return err
+ }
+
+ if lookupErr == errInvalidValue {
+ err = vw.WriteNull()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ err = currEncoder.EncodeValue(ec, vw, currVal)
+ if err != nil {
+ return err
+ }
+ }
+ return aw.WriteArrayEnd()
+}
+
+// SliceEncodeValue is the ValueEncoderFunc for slice types.
+//
+// Deprecated: SliceEncodeValue is not registered by default. Use SliceCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) SliceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Slice {
+ return ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
+ }
+
+ if val.IsNil() {
+ return vw.WriteNull()
+ }
+
+ // If we have a []primitive.E we want to treat it as a document instead of as an array.
+ if val.Type().ConvertibleTo(tD) {
+ d := val.Convert(tD).Interface().(primitive.D)
+
+ dw, err := vw.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ for _, e := range d {
+ err = encodeElement(ec, dw, e)
+ if err != nil {
+ return err
+ }
+ }
+
+ return dw.WriteDocumentEnd()
+ }
+
+ aw, err := vw.WriteArray()
+ if err != nil {
+ return err
+ }
+
+ elemType := val.Type().Elem()
+ encoder, err := ec.LookupEncoder(elemType)
+ if err != nil && elemType.Kind() != reflect.Interface {
+ return err
+ }
+
+ for idx := 0; idx < val.Len(); idx++ {
+ currEncoder, currVal, lookupErr := dve.lookupElementEncoder(ec, encoder, val.Index(idx))
+ if lookupErr != nil && lookupErr != errInvalidValue {
+ return lookupErr
+ }
+
+ vw, err := aw.WriteArrayElement()
+ if err != nil {
+ return err
+ }
+
+ if lookupErr == errInvalidValue {
+ err = vw.WriteNull()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ err = currEncoder.EncodeValue(ec, vw, currVal)
+ if err != nil {
+ return err
+ }
+ }
+ return aw.WriteArrayEnd()
+}
+
+func (dve DefaultValueEncoders) lookupElementEncoder(ec EncodeContext, origEncoder ValueEncoder, currVal reflect.Value) (ValueEncoder, reflect.Value, error) {
+ if origEncoder != nil || (currVal.Kind() != reflect.Interface) {
+ return origEncoder, currVal, nil
+ }
+ currVal = currVal.Elem()
+ if !currVal.IsValid() {
+ return nil, currVal, errInvalidValue
+ }
+ currEncoder, err := ec.LookupEncoder(currVal.Type())
+
+ return currEncoder, currVal, err
+}
+
+// EmptyInterfaceEncodeValue is the ValueEncoderFunc for interface{}.
+//
+// Deprecated: EmptyInterfaceEncodeValue is not registered by default. Use EmptyInterfaceCodec.EncodeValue instead.
+func (dve DefaultValueEncoders) EmptyInterfaceEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tEmpty {
+ return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val}
+ }
+
+ if val.IsNil() {
+ return vw.WriteNull()
+ }
+ encoder, err := ec.LookupEncoder(val.Elem().Type())
+ if err != nil {
+ return err
+ }
+
+ return encoder.EncodeValue(ec, vw, val.Elem())
+}
+
+// ValueMarshalerEncodeValue is the ValueEncoderFunc for ValueMarshaler implementations.
+func (dve DefaultValueEncoders) ValueMarshalerEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ // Either val or a pointer to val must implement ValueMarshaler
+ switch {
+ case !val.IsValid():
+ return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val}
+ case val.Type().Implements(tValueMarshaler):
+ // If ValueMarshaler is implemented on a concrete type, make sure that val isn't a nil pointer
+ if isImplementationNil(val, tValueMarshaler) {
+ return vw.WriteNull()
+ }
+ case reflect.PtrTo(val.Type()).Implements(tValueMarshaler) && val.CanAddr():
+ val = val.Addr()
+ default:
+ return ValueEncoderError{Name: "ValueMarshalerEncodeValue", Types: []reflect.Type{tValueMarshaler}, Received: val}
+ }
+
+ fn := val.Convert(tValueMarshaler).MethodByName("MarshalBSONValue")
+ returns := fn.Call(nil)
+ if !returns[2].IsNil() {
+ return returns[2].Interface().(error)
+ }
+ t, data := returns[0].Interface().(bsontype.Type), returns[1].Interface().([]byte)
+ return bsonrw.Copier{}.CopyValueFromBytes(vw, t, data)
+}
+
+// MarshalerEncodeValue is the ValueEncoderFunc for Marshaler implementations.
+func (dve DefaultValueEncoders) MarshalerEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ // Either val or a pointer to val must implement Marshaler
+ switch {
+ case !val.IsValid():
+ return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val}
+ case val.Type().Implements(tMarshaler):
+ // If Marshaler is implemented on a concrete type, make sure that val isn't a nil pointer
+ if isImplementationNil(val, tMarshaler) {
+ return vw.WriteNull()
+ }
+ case reflect.PtrTo(val.Type()).Implements(tMarshaler) && val.CanAddr():
+ val = val.Addr()
+ default:
+ return ValueEncoderError{Name: "MarshalerEncodeValue", Types: []reflect.Type{tMarshaler}, Received: val}
+ }
+
+ fn := val.Convert(tMarshaler).MethodByName("MarshalBSON")
+ returns := fn.Call(nil)
+ if !returns[1].IsNil() {
+ return returns[1].Interface().(error)
+ }
+ data := returns[0].Interface().([]byte)
+ return bsonrw.Copier{}.CopyValueFromBytes(vw, bsontype.EmbeddedDocument, data)
+}
+
+// ProxyEncodeValue is the ValueEncoderFunc for Proxy implementations.
+func (dve DefaultValueEncoders) ProxyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ // Either val or a pointer to val must implement Proxy
+ switch {
+ case !val.IsValid():
+ return ValueEncoderError{Name: "ProxyEncodeValue", Types: []reflect.Type{tProxy}, Received: val}
+ case val.Type().Implements(tProxy):
+ // If Proxy is implemented on a concrete type, make sure that val isn't a nil pointer
+ if isImplementationNil(val, tProxy) {
+ return vw.WriteNull()
+ }
+ case reflect.PtrTo(val.Type()).Implements(tProxy) && val.CanAddr():
+ val = val.Addr()
+ default:
+ return ValueEncoderError{Name: "ProxyEncodeValue", Types: []reflect.Type{tProxy}, Received: val}
+ }
+
+ fn := val.Convert(tProxy).MethodByName("ProxyBSON")
+ returns := fn.Call(nil)
+ if !returns[1].IsNil() {
+ return returns[1].Interface().(error)
+ }
+ data := returns[0]
+ var encoder ValueEncoder
+ var err error
+ if data.Elem().IsValid() {
+ encoder, err = ec.LookupEncoder(data.Elem().Type())
+ } else {
+ encoder, err = ec.LookupEncoder(nil)
+ }
+ if err != nil {
+ return err
+ }
+ return encoder.EncodeValue(ec, vw, data.Elem())
+}
+
+// JavaScriptEncodeValue is the ValueEncoderFunc for the primitive.JavaScript type.
+func (DefaultValueEncoders) JavaScriptEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tJavaScript {
+ return ValueEncoderError{Name: "JavaScriptEncodeValue", Types: []reflect.Type{tJavaScript}, Received: val}
+ }
+
+ return vw.WriteJavascript(val.String())
+}
+
+// SymbolEncodeValue is the ValueEncoderFunc for the primitive.Symbol type.
+func (DefaultValueEncoders) SymbolEncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tSymbol {
+ return ValueEncoderError{Name: "SymbolEncodeValue", Types: []reflect.Type{tSymbol}, Received: val}
+ }
+
+ return vw.WriteSymbol(val.String())
+}
+
+// BinaryEncodeValue is the ValueEncoderFunc for Binary.
+func (DefaultValueEncoders) BinaryEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tBinary {
+ return ValueEncoderError{Name: "BinaryEncodeValue", Types: []reflect.Type{tBinary}, Received: val}
+ }
+ b := val.Interface().(primitive.Binary)
+
+ return vw.WriteBinaryWithSubtype(b.Data, b.Subtype)
+}
+
+// UndefinedEncodeValue is the ValueEncoderFunc for Undefined.
+func (DefaultValueEncoders) UndefinedEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tUndefined {
+ return ValueEncoderError{Name: "UndefinedEncodeValue", Types: []reflect.Type{tUndefined}, Received: val}
+ }
+
+ return vw.WriteUndefined()
+}
+
+// DateTimeEncodeValue is the ValueEncoderFunc for DateTime.
+func (DefaultValueEncoders) DateTimeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tDateTime {
+ return ValueEncoderError{Name: "DateTimeEncodeValue", Types: []reflect.Type{tDateTime}, Received: val}
+ }
+
+ return vw.WriteDateTime(val.Int())
+}
+
+// NullEncodeValue is the ValueEncoderFunc for Null.
+func (DefaultValueEncoders) NullEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tNull {
+ return ValueEncoderError{Name: "NullEncodeValue", Types: []reflect.Type{tNull}, Received: val}
+ }
+
+ return vw.WriteNull()
+}
+
+// RegexEncodeValue is the ValueEncoderFunc for Regex.
+func (DefaultValueEncoders) RegexEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tRegex {
+ return ValueEncoderError{Name: "RegexEncodeValue", Types: []reflect.Type{tRegex}, Received: val}
+ }
+
+ regex := val.Interface().(primitive.Regex)
+
+ return vw.WriteRegex(regex.Pattern, regex.Options)
+}
+
+// DBPointerEncodeValue is the ValueEncoderFunc for DBPointer.
+func (DefaultValueEncoders) DBPointerEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tDBPointer {
+ return ValueEncoderError{Name: "DBPointerEncodeValue", Types: []reflect.Type{tDBPointer}, Received: val}
+ }
+
+ dbp := val.Interface().(primitive.DBPointer)
+
+ return vw.WriteDBPointer(dbp.DB, dbp.Pointer)
+}
+
+// TimestampEncodeValue is the ValueEncoderFunc for Timestamp.
+func (DefaultValueEncoders) TimestampEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tTimestamp {
+ return ValueEncoderError{Name: "TimestampEncodeValue", Types: []reflect.Type{tTimestamp}, Received: val}
+ }
+
+ ts := val.Interface().(primitive.Timestamp)
+
+ return vw.WriteTimestamp(ts.T, ts.I)
+}
+
+// MinKeyEncodeValue is the ValueEncoderFunc for MinKey.
+func (DefaultValueEncoders) MinKeyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tMinKey {
+ return ValueEncoderError{Name: "MinKeyEncodeValue", Types: []reflect.Type{tMinKey}, Received: val}
+ }
+
+ return vw.WriteMinKey()
+}
+
+// MaxKeyEncodeValue is the ValueEncoderFunc for MaxKey.
+func (DefaultValueEncoders) MaxKeyEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tMaxKey {
+ return ValueEncoderError{Name: "MaxKeyEncodeValue", Types: []reflect.Type{tMaxKey}, Received: val}
+ }
+
+ return vw.WriteMaxKey()
+}
+
+// CoreDocumentEncodeValue is the ValueEncoderFunc for bsoncore.Document.
+func (DefaultValueEncoders) CoreDocumentEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tCoreDocument {
+ return ValueEncoderError{Name: "CoreDocumentEncodeValue", Types: []reflect.Type{tCoreDocument}, Received: val}
+ }
+
+ cdoc := val.Interface().(bsoncore.Document)
+
+ return bsonrw.Copier{}.CopyDocumentFromBytes(vw, cdoc)
+}
+
+// CodeWithScopeEncodeValue is the ValueEncoderFunc for CodeWithScope.
+func (dve DefaultValueEncoders) CodeWithScopeEncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tCodeWithScope {
+ return ValueEncoderError{Name: "CodeWithScopeEncodeValue", Types: []reflect.Type{tCodeWithScope}, Received: val}
+ }
+
+ cws := val.Interface().(primitive.CodeWithScope)
+
+ dw, err := vw.WriteCodeWithScope(string(cws.Code))
+ if err != nil {
+ return err
+ }
+
+ sw := sliceWriterPool.Get().(*bsonrw.SliceWriter)
+ defer sliceWriterPool.Put(sw)
+ *sw = (*sw)[:0]
+
+ scopeVW := bvwPool.Get(sw)
+ defer bvwPool.Put(scopeVW)
+
+ encoder, err := ec.LookupEncoder(reflect.TypeOf(cws.Scope))
+ if err != nil {
+ return err
+ }
+
+ err = encoder.EncodeValue(ec, scopeVW, reflect.ValueOf(cws.Scope))
+ if err != nil {
+ return err
+ }
+
+ err = bsonrw.Copier{}.CopyBytesToDocumentWriter(dw, *sw)
+ if err != nil {
+ return err
+ }
+ return dw.WriteDocumentEnd()
+}
+
+// isImplementationNil returns if val is a nil pointer and inter is implemented on a concrete type
+func isImplementationNil(val reflect.Value, inter reflect.Type) bool {
+ vt := val.Type()
+ for vt.Kind() == reflect.Ptr {
+ vt = vt.Elem()
+ }
+ return vt.Implements(inter) && val.Kind() == reflect.Ptr && val.IsNil()
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go
new file mode 100644
index 000000000..5f903ebea
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/doc.go
@@ -0,0 +1,90 @@
+// Copyright (C) MongoDB, Inc. 2022-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package bsoncodec provides a system for encoding values to BSON representations and decoding
+// values from BSON representations. This package considers both binary BSON and ExtendedJSON as
+// BSON representations. The types in this package enable a flexible system for handling this
+// encoding and decoding.
+//
+// The codec system is composed of two parts:
+//
+// 1) ValueEncoders and ValueDecoders that handle encoding and decoding Go values to and from BSON
+// representations.
+//
+// 2) A Registry that holds these ValueEncoders and ValueDecoders and provides methods for
+// retrieving them.
+//
+// # ValueEncoders and ValueDecoders
+//
+// The ValueEncoder interface is implemented by types that can encode a provided Go type to BSON.
+// The value to encode is provided as a reflect.Value and a bsonrw.ValueWriter is used within the
+// EncodeValue method to actually create the BSON representation. For convenience, ValueEncoderFunc
+// is provided to allow use of a function with the correct signature as a ValueEncoder. An
+// EncodeContext instance is provided to allow implementations to lookup further ValueEncoders and
+// to provide configuration information.
+//
+// The ValueDecoder interface is the inverse of the ValueEncoder. Implementations should ensure that
+// the value they receive is settable. Similar to ValueEncoderFunc, ValueDecoderFunc is provided to
+// allow the use of a function with the correct signature as a ValueDecoder. A DecodeContext
+// instance is provided and serves similar functionality to the EncodeContext.
+//
+// # Registry and RegistryBuilder
+//
+// A Registry is an immutable store for ValueEncoders, ValueDecoders, and a type map. See the Registry type
+// documentation for examples of registering various custom encoders and decoders. A Registry can be constructed using a
+// RegistryBuilder, which handles three main types of codecs:
+//
+// 1. Type encoders/decoders - These can be registered using the RegisterTypeEncoder and RegisterTypeDecoder methods.
+// The registered codec will be invoked when encoding/decoding a value whose type matches the registered type exactly.
+// If the registered type is an interface, the codec will be invoked when encoding or decoding values whose type is the
+// interface, but not for values with concrete types that implement the interface.
+//
+// 2. Hook encoders/decoders - These can be registered using the RegisterHookEncoder and RegisterHookDecoder methods.
+// These methods only accept interface types and the registered codecs will be invoked when encoding or decoding values
+// whose types implement the interface. An example of a hook defined by the driver is bson.Marshaler. The driver will
+// call the MarshalBSON method for any value whose type implements bson.Marshaler, regardless of the value's concrete
+// type.
+//
+// 3. Type map entries - This can be used to associate a BSON type with a Go type. These type associations are used when
+// decoding into a bson.D/bson.M or a struct field of type interface{}. For example, by default, BSON int32 and int64
+// values decode as Go int32 and int64 instances, respectively, when decoding into a bson.D. The following code would
+// change the behavior so these values decode as Go int instances instead:
+//
+// intType := reflect.TypeOf(int(0))
+// registryBuilder.RegisterTypeMapEntry(bsontype.Int32, intType).RegisterTypeMapEntry(bsontype.Int64, intType)
+//
+// 4. Kind encoder/decoders - These can be registered using the RegisterDefaultEncoder and RegisterDefaultDecoder
+// methods. The registered codec will be invoked when encoding or decoding values whose reflect.Kind matches the
+// registered reflect.Kind as long as the value's type doesn't match a registered type or hook encoder/decoder first.
+// These methods should be used to change the behavior for all values for a specific kind.
+//
+// # Registry Lookup Procedure
+//
+// When looking up an encoder in a Registry, the precedence rules are as follows:
+//
+// 1. A type encoder registered for the exact type of the value.
+//
+// 2. A hook encoder registered for an interface that is implemented by the value or by a pointer to the value. If the
+// value matches multiple hooks (e.g. the type implements bsoncodec.Marshaler and bsoncodec.ValueMarshaler), the first
+// one registered will be selected. Note that registries constructed using bson.NewRegistryBuilder have driver-defined
+// hooks registered for the bsoncodec.Marshaler, bsoncodec.ValueMarshaler, and bsoncodec.Proxy interfaces, so those
+// will take precedence over any new hooks.
+//
+// 3. A kind encoder registered for the value's kind.
+//
+// If all of these lookups fail to find an encoder, an error of type ErrNoEncoder is returned. The same precedence
+// rules apply for decoders, with the exception that an error of type ErrNoDecoder will be returned if no decoder is
+// found.
+//
+// # DefaultValueEncoders and DefaultValueDecoders
+//
+// The DefaultValueEncoders and DefaultValueDecoders types provide a full set of ValueEncoders and
+// ValueDecoders for handling a wide range of Go types, including all of the types within the
+// primitive package. To make registering these codecs easier, a helper method on each type is
+// provided. For the DefaultValueEncoders type the method is called RegisterDefaultEncoders and for
+// the DefaultValueDecoders type the method is called RegisterDefaultDecoders, this method also
+// handles registering type map entries for each BSON type.
+package bsoncodec
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go
new file mode 100644
index 000000000..eda417cff
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/empty_interface_codec.go
@@ -0,0 +1,147 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// EmptyInterfaceCodec is the Codec used for interface{} values.
+type EmptyInterfaceCodec struct {
+ DecodeBinaryAsSlice bool
+}
+
+var (
+ defaultEmptyInterfaceCodec = NewEmptyInterfaceCodec()
+
+ _ ValueCodec = defaultEmptyInterfaceCodec
+ _ typeDecoder = defaultEmptyInterfaceCodec
+)
+
+// NewEmptyInterfaceCodec returns a EmptyInterfaceCodec with options opts.
+func NewEmptyInterfaceCodec(opts ...*bsonoptions.EmptyInterfaceCodecOptions) *EmptyInterfaceCodec {
+ interfaceOpt := bsonoptions.MergeEmptyInterfaceCodecOptions(opts...)
+
+ codec := EmptyInterfaceCodec{}
+ if interfaceOpt.DecodeBinaryAsSlice != nil {
+ codec.DecodeBinaryAsSlice = *interfaceOpt.DecodeBinaryAsSlice
+ }
+ return &codec
+}
+
+// EncodeValue is the ValueEncoderFunc for interface{}.
+func (eic EmptyInterfaceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tEmpty {
+ return ValueEncoderError{Name: "EmptyInterfaceEncodeValue", Types: []reflect.Type{tEmpty}, Received: val}
+ }
+
+ if val.IsNil() {
+ return vw.WriteNull()
+ }
+ encoder, err := ec.LookupEncoder(val.Elem().Type())
+ if err != nil {
+ return err
+ }
+
+ return encoder.EncodeValue(ec, vw, val.Elem())
+}
+
+func (eic EmptyInterfaceCodec) getEmptyInterfaceDecodeType(dc DecodeContext, valueType bsontype.Type) (reflect.Type, error) {
+ isDocument := valueType == bsontype.Type(0) || valueType == bsontype.EmbeddedDocument
+ if isDocument {
+ if dc.defaultDocumentType != nil {
+ // If the bsontype is an embedded document and the DocumentType is set on the DecodeContext, then return
+ // that type.
+ return dc.defaultDocumentType, nil
+ }
+ if dc.Ancestor != nil {
+ // Using ancestor information rather than looking up the type map entry forces consistent decoding.
+ // If we're decoding into a bson.D, subdocuments should also be decoded as bson.D, even if a type map entry
+ // has been registered.
+ return dc.Ancestor, nil
+ }
+ }
+
+ rtype, err := dc.LookupTypeMapEntry(valueType)
+ if err == nil {
+ return rtype, nil
+ }
+
+ if isDocument {
+ // For documents, fallback to looking up a type map entry for bsontype.Type(0) or bsontype.EmbeddedDocument,
+ // depending on the original valueType.
+ var lookupType bsontype.Type
+ switch valueType {
+ case bsontype.Type(0):
+ lookupType = bsontype.EmbeddedDocument
+ case bsontype.EmbeddedDocument:
+ lookupType = bsontype.Type(0)
+ }
+
+ rtype, err = dc.LookupTypeMapEntry(lookupType)
+ if err == nil {
+ return rtype, nil
+ }
+ }
+
+ return nil, err
+}
+
+func (eic EmptyInterfaceCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tEmpty {
+ return emptyValue, ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: reflect.Zero(t)}
+ }
+
+ rtype, err := eic.getEmptyInterfaceDecodeType(dc, vr.Type())
+ if err != nil {
+ switch vr.Type() {
+ case bsontype.Null:
+ return reflect.Zero(t), vr.ReadNull()
+ default:
+ return emptyValue, err
+ }
+ }
+
+ decoder, err := dc.LookupDecoder(rtype)
+ if err != nil {
+ return emptyValue, err
+ }
+
+ elem, err := decodeTypeOrValue(decoder, dc, vr, rtype)
+ if err != nil {
+ return emptyValue, err
+ }
+
+ if eic.DecodeBinaryAsSlice && rtype == tBinary {
+ binElem := elem.Interface().(primitive.Binary)
+ if binElem.Subtype == bsontype.BinaryGeneric || binElem.Subtype == bsontype.BinaryBinaryOld {
+ elem = reflect.ValueOf(binElem.Data)
+ }
+ }
+
+ return elem, nil
+}
+
+// DecodeValue is the ValueDecoderFunc for interface{}.
+func (eic EmptyInterfaceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tEmpty {
+ return ValueDecoderError{Name: "EmptyInterfaceDecodeValue", Types: []reflect.Type{tEmpty}, Received: val}
+ }
+
+ elem, err := eic.decodeType(dc, vr, val.Type())
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go
new file mode 100644
index 000000000..e1fbef9c6
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/map_codec.go
@@ -0,0 +1,309 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "encoding"
+ "fmt"
+ "reflect"
+ "strconv"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+var defaultMapCodec = NewMapCodec()
+
+// MapCodec is the Codec used for map values.
+type MapCodec struct {
+ DecodeZerosMap bool
+ EncodeNilAsEmpty bool
+ EncodeKeysWithStringer bool
+}
+
+var _ ValueCodec = &MapCodec{}
+
+// KeyMarshaler is the interface implemented by an object that can marshal itself into a string key.
+// This applies to types used as map keys and is similar to encoding.TextMarshaler.
+type KeyMarshaler interface {
+ MarshalKey() (key string, err error)
+}
+
+// KeyUnmarshaler is the interface implemented by an object that can unmarshal a string representation
+// of itself. This applies to types used as map keys and is similar to encoding.TextUnmarshaler.
+//
+// UnmarshalKey must be able to decode the form generated by MarshalKey.
+// UnmarshalKey must copy the text if it wishes to retain the text
+// after returning.
+type KeyUnmarshaler interface {
+ UnmarshalKey(key string) error
+}
+
+// NewMapCodec returns a MapCodec with options opts.
+func NewMapCodec(opts ...*bsonoptions.MapCodecOptions) *MapCodec {
+ mapOpt := bsonoptions.MergeMapCodecOptions(opts...)
+
+ codec := MapCodec{}
+ if mapOpt.DecodeZerosMap != nil {
+ codec.DecodeZerosMap = *mapOpt.DecodeZerosMap
+ }
+ if mapOpt.EncodeNilAsEmpty != nil {
+ codec.EncodeNilAsEmpty = *mapOpt.EncodeNilAsEmpty
+ }
+ if mapOpt.EncodeKeysWithStringer != nil {
+ codec.EncodeKeysWithStringer = *mapOpt.EncodeKeysWithStringer
+ }
+ return &codec
+}
+
+// EncodeValue is the ValueEncoder for map[*]* types.
+func (mc *MapCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Map {
+ return ValueEncoderError{Name: "MapEncodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
+ }
+
+ if val.IsNil() && !mc.EncodeNilAsEmpty {
+ // If we have a nil map but we can't WriteNull, that means we're probably trying to encode
+ // to a TopLevel document. We can't currently tell if this is what actually happened, but if
+ // there's a deeper underlying problem, the error will also be returned from WriteDocument,
+ // so just continue. The operations on a map reflection value are valid, so we can call
+ // MapKeys within mapEncodeValue without a problem.
+ err := vw.WriteNull()
+ if err == nil {
+ return nil
+ }
+ }
+
+ dw, err := vw.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ return mc.mapEncodeValue(ec, dw, val, nil)
+}
+
+// mapEncodeValue handles encoding of the values of a map. The collisionFn returns
+// true if the provided key exists, this is mainly used for inline maps in the
+// struct codec.
+func (mc *MapCodec) mapEncodeValue(ec EncodeContext, dw bsonrw.DocumentWriter, val reflect.Value, collisionFn func(string) bool) error {
+
+ elemType := val.Type().Elem()
+ encoder, err := ec.LookupEncoder(elemType)
+ if err != nil && elemType.Kind() != reflect.Interface {
+ return err
+ }
+
+ keys := val.MapKeys()
+ for _, key := range keys {
+ keyStr, err := mc.encodeKey(key)
+ if err != nil {
+ return err
+ }
+
+ if collisionFn != nil && collisionFn(keyStr) {
+ return fmt.Errorf("Key %s of inlined map conflicts with a struct field name", key)
+ }
+
+ currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.MapIndex(key))
+ if lookupErr != nil && lookupErr != errInvalidValue {
+ return lookupErr
+ }
+
+ vw, err := dw.WriteDocumentElement(keyStr)
+ if err != nil {
+ return err
+ }
+
+ if lookupErr == errInvalidValue {
+ err = vw.WriteNull()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ err = currEncoder.EncodeValue(ec, vw, currVal)
+ if err != nil {
+ return err
+ }
+ }
+
+ return dw.WriteDocumentEnd()
+}
+
+// DecodeValue is the ValueDecoder for map[string/decimal]* types.
+func (mc *MapCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if val.Kind() != reflect.Map || (!val.CanSet() && val.IsNil()) {
+ return ValueDecoderError{Name: "MapDecodeValue", Kinds: []reflect.Kind{reflect.Map}, Received: val}
+ }
+
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ case bsontype.Undefined:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadUndefined()
+ default:
+ return fmt.Errorf("cannot decode %v into a %s", vrType, val.Type())
+ }
+
+ dr, err := vr.ReadDocument()
+ if err != nil {
+ return err
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeMap(val.Type()))
+ }
+
+ if val.Len() > 0 && mc.DecodeZerosMap {
+ clearMap(val)
+ }
+
+ eType := val.Type().Elem()
+ decoder, err := dc.LookupDecoder(eType)
+ if err != nil {
+ return err
+ }
+ eTypeDecoder, _ := decoder.(typeDecoder)
+
+ if eType == tEmpty {
+ dc.Ancestor = val.Type()
+ }
+
+ keyType := val.Type().Key()
+
+ for {
+ key, vr, err := dr.ReadElement()
+ if err == bsonrw.ErrEOD {
+ break
+ }
+ if err != nil {
+ return err
+ }
+
+ k, err := mc.decodeKey(key, keyType)
+ if err != nil {
+ return err
+ }
+
+ elem, err := decodeTypeOrValueWithInfo(decoder, eTypeDecoder, dc, vr, eType, true)
+ if err != nil {
+ return newDecodeError(key, err)
+ }
+
+ val.SetMapIndex(k, elem)
+ }
+ return nil
+}
+
+func clearMap(m reflect.Value) {
+ var none reflect.Value
+ for _, k := range m.MapKeys() {
+ m.SetMapIndex(k, none)
+ }
+}
+
+func (mc *MapCodec) encodeKey(val reflect.Value) (string, error) {
+ if mc.EncodeKeysWithStringer {
+ return fmt.Sprint(val), nil
+ }
+
+ // keys of any string type are used directly
+ if val.Kind() == reflect.String {
+ return val.String(), nil
+ }
+ // KeyMarshalers are marshaled
+ if km, ok := val.Interface().(KeyMarshaler); ok {
+ if val.Kind() == reflect.Ptr && val.IsNil() {
+ return "", nil
+ }
+ buf, err := km.MarshalKey()
+ if err == nil {
+ return buf, nil
+ }
+ return "", err
+ }
+ // keys implement encoding.TextMarshaler are marshaled.
+ if km, ok := val.Interface().(encoding.TextMarshaler); ok {
+ if val.Kind() == reflect.Ptr && val.IsNil() {
+ return "", nil
+ }
+
+ buf, err := km.MarshalText()
+ if err != nil {
+ return "", err
+ }
+
+ return string(buf), nil
+ }
+
+ switch val.Kind() {
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return strconv.FormatInt(val.Int(), 10), nil
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return strconv.FormatUint(val.Uint(), 10), nil
+ }
+ return "", fmt.Errorf("unsupported key type: %v", val.Type())
+}
+
+var keyUnmarshalerType = reflect.TypeOf((*KeyUnmarshaler)(nil)).Elem()
+var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
+
+func (mc *MapCodec) decodeKey(key string, keyType reflect.Type) (reflect.Value, error) {
+ keyVal := reflect.ValueOf(key)
+ var err error
+ switch {
+ // First, if EncodeKeysWithStringer is not enabled, try to decode withKeyUnmarshaler
+ case !mc.EncodeKeysWithStringer && reflect.PtrTo(keyType).Implements(keyUnmarshalerType):
+ keyVal = reflect.New(keyType)
+ v := keyVal.Interface().(KeyUnmarshaler)
+ err = v.UnmarshalKey(key)
+ keyVal = keyVal.Elem()
+ // Try to decode encoding.TextUnmarshalers.
+ case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
+ keyVal = reflect.New(keyType)
+ v := keyVal.Interface().(encoding.TextUnmarshaler)
+ err = v.UnmarshalText([]byte(key))
+ keyVal = keyVal.Elem()
+ // Otherwise, go to type specific behavior
+ default:
+ switch keyType.Kind() {
+ case reflect.String:
+ keyVal = reflect.ValueOf(key).Convert(keyType)
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ n, parseErr := strconv.ParseInt(key, 10, 64)
+ if parseErr != nil || reflect.Zero(keyType).OverflowInt(n) {
+ err = fmt.Errorf("failed to unmarshal number key %v", key)
+ }
+ keyVal = reflect.ValueOf(n).Convert(keyType)
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ n, parseErr := strconv.ParseUint(key, 10, 64)
+ if parseErr != nil || reflect.Zero(keyType).OverflowUint(n) {
+ err = fmt.Errorf("failed to unmarshal number key %v", key)
+ break
+ }
+ keyVal = reflect.ValueOf(n).Convert(keyType)
+ case reflect.Float32, reflect.Float64:
+ if mc.EncodeKeysWithStringer {
+ parsed, err := strconv.ParseFloat(key, 64)
+ if err != nil {
+ return keyVal, fmt.Errorf("Map key is defined to be a decimal type (%v) but got error %v", keyType.Kind(), err)
+ }
+ keyVal = reflect.ValueOf(parsed)
+ break
+ }
+ fallthrough
+ default:
+ return keyVal, fmt.Errorf("unsupported key type: %v", keyType)
+ }
+ }
+ return keyVal, err
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go
new file mode 100644
index 000000000..fbd9f0a9e
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/mode.go
@@ -0,0 +1,65 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import "fmt"
+
+type mode int
+
+const (
+ _ mode = iota
+ mTopLevel
+ mDocument
+ mArray
+ mValue
+ mElement
+ mCodeWithScope
+ mSpacer
+)
+
+func (m mode) String() string {
+ var str string
+
+ switch m {
+ case mTopLevel:
+ str = "TopLevel"
+ case mDocument:
+ str = "DocumentMode"
+ case mArray:
+ str = "ArrayMode"
+ case mValue:
+ str = "ValueMode"
+ case mElement:
+ str = "ElementMode"
+ case mCodeWithScope:
+ str = "CodeWithScopeMode"
+ case mSpacer:
+ str = "CodeWithScopeSpacerFrame"
+ default:
+ str = "UnknownMode"
+ }
+
+ return str
+}
+
+// TransitionError is an error returned when an invalid progressing a
+// ValueReader or ValueWriter state machine occurs.
+type TransitionError struct {
+ parent mode
+ current mode
+ destination mode
+}
+
+func (te TransitionError) Error() string {
+ if te.destination == mode(0) {
+ return fmt.Sprintf("invalid state transition: cannot read/write value while in %s", te.current)
+ }
+ if te.parent == mode(0) {
+ return fmt.Sprintf("invalid state transition: %s -> %s", te.current, te.destination)
+ }
+ return fmt.Sprintf("invalid state transition: %s -> %s; parent %s", te.current, te.destination, te.parent)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go
new file mode 100644
index 000000000..616a3e701
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/pointer_codec.go
@@ -0,0 +1,109 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "reflect"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+var _ ValueEncoder = &PointerCodec{}
+var _ ValueDecoder = &PointerCodec{}
+
+// PointerCodec is the Codec used for pointers.
+type PointerCodec struct {
+ ecache map[reflect.Type]ValueEncoder
+ dcache map[reflect.Type]ValueDecoder
+ l sync.RWMutex
+}
+
+// NewPointerCodec returns a PointerCodec that has been initialized.
+func NewPointerCodec() *PointerCodec {
+ return &PointerCodec{
+ ecache: make(map[reflect.Type]ValueEncoder),
+ dcache: make(map[reflect.Type]ValueDecoder),
+ }
+}
+
+// EncodeValue handles encoding a pointer by either encoding it to BSON Null if the pointer is nil
+// or looking up an encoder for the type of value the pointer points to.
+func (pc *PointerCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if val.Kind() != reflect.Ptr {
+ if !val.IsValid() {
+ return vw.WriteNull()
+ }
+ return ValueEncoderError{Name: "PointerCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: val}
+ }
+
+ if val.IsNil() {
+ return vw.WriteNull()
+ }
+
+ pc.l.RLock()
+ enc, ok := pc.ecache[val.Type()]
+ pc.l.RUnlock()
+ if ok {
+ if enc == nil {
+ return ErrNoEncoder{Type: val.Type()}
+ }
+ return enc.EncodeValue(ec, vw, val.Elem())
+ }
+
+ enc, err := ec.LookupEncoder(val.Type().Elem())
+ pc.l.Lock()
+ pc.ecache[val.Type()] = enc
+ pc.l.Unlock()
+ if err != nil {
+ return err
+ }
+
+ return enc.EncodeValue(ec, vw, val.Elem())
+}
+
+// DecodeValue handles decoding a pointer by looking up a decoder for the type it points to and
+// using that to decode. If the BSON value is Null, this method will set the pointer to nil.
+func (pc *PointerCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Kind() != reflect.Ptr {
+ return ValueDecoderError{Name: "PointerCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Ptr}, Received: val}
+ }
+
+ if vr.Type() == bsontype.Null {
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ }
+ if vr.Type() == bsontype.Undefined {
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadUndefined()
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.New(val.Type().Elem()))
+ }
+
+ pc.l.RLock()
+ dec, ok := pc.dcache[val.Type()]
+ pc.l.RUnlock()
+ if ok {
+ if dec == nil {
+ return ErrNoDecoder{Type: val.Type()}
+ }
+ return dec.DecodeValue(dc, vr, val.Elem())
+ }
+
+ dec, err := dc.LookupDecoder(val.Type().Elem())
+ pc.l.Lock()
+ pc.dcache[val.Type()] = dec
+ pc.l.Unlock()
+ if err != nil {
+ return err
+ }
+
+ return dec.DecodeValue(dc, vr, val.Elem())
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go
new file mode 100644
index 000000000..4cf2b01ab
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/proxy.go
@@ -0,0 +1,14 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+// Proxy is an interface implemented by types that cannot themselves be directly encoded. Types
+// that implement this interface with have ProxyBSON called during the encoding process and that
+// value will be encoded in place for the implementer.
+type Proxy interface {
+ ProxyBSON() (interface{}, error)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go
new file mode 100644
index 000000000..80644023c
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/registry.go
@@ -0,0 +1,469 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// ErrNilType is returned when nil is passed to either LookupEncoder or LookupDecoder.
+var ErrNilType = errors.New("cannot perform a decoder lookup on <nil>")
+
+// ErrNotPointer is returned when a non-pointer type is provided to LookupDecoder.
+var ErrNotPointer = errors.New("non-pointer provided to LookupDecoder")
+
+// ErrNoEncoder is returned when there wasn't an encoder available for a type.
+type ErrNoEncoder struct {
+ Type reflect.Type
+}
+
+func (ene ErrNoEncoder) Error() string {
+ if ene.Type == nil {
+ return "no encoder found for <nil>"
+ }
+ return "no encoder found for " + ene.Type.String()
+}
+
+// ErrNoDecoder is returned when there wasn't a decoder available for a type.
+type ErrNoDecoder struct {
+ Type reflect.Type
+}
+
+func (end ErrNoDecoder) Error() string {
+ return "no decoder found for " + end.Type.String()
+}
+
+// ErrNoTypeMapEntry is returned when there wasn't a type available for the provided BSON type.
+type ErrNoTypeMapEntry struct {
+ Type bsontype.Type
+}
+
+func (entme ErrNoTypeMapEntry) Error() string {
+ return "no type map entry found for " + entme.Type.String()
+}
+
+// ErrNotInterface is returned when the provided type is not an interface.
+var ErrNotInterface = errors.New("The provided type is not an interface")
+
+// A RegistryBuilder is used to build a Registry. This type is not goroutine
+// safe.
+type RegistryBuilder struct {
+ typeEncoders map[reflect.Type]ValueEncoder
+ interfaceEncoders []interfaceValueEncoder
+ kindEncoders map[reflect.Kind]ValueEncoder
+
+ typeDecoders map[reflect.Type]ValueDecoder
+ interfaceDecoders []interfaceValueDecoder
+ kindDecoders map[reflect.Kind]ValueDecoder
+
+ typeMap map[bsontype.Type]reflect.Type
+}
+
+// A Registry is used to store and retrieve codecs for types and interfaces. This type is the main
+// typed passed around and Encoders and Decoders are constructed from it.
+type Registry struct {
+ typeEncoders map[reflect.Type]ValueEncoder
+ typeDecoders map[reflect.Type]ValueDecoder
+
+ interfaceEncoders []interfaceValueEncoder
+ interfaceDecoders []interfaceValueDecoder
+
+ kindEncoders map[reflect.Kind]ValueEncoder
+ kindDecoders map[reflect.Kind]ValueDecoder
+
+ typeMap map[bsontype.Type]reflect.Type
+
+ mu sync.RWMutex
+}
+
+// NewRegistryBuilder creates a new empty RegistryBuilder.
+func NewRegistryBuilder() *RegistryBuilder {
+ return &RegistryBuilder{
+ typeEncoders: make(map[reflect.Type]ValueEncoder),
+ typeDecoders: make(map[reflect.Type]ValueDecoder),
+
+ interfaceEncoders: make([]interfaceValueEncoder, 0),
+ interfaceDecoders: make([]interfaceValueDecoder, 0),
+
+ kindEncoders: make(map[reflect.Kind]ValueEncoder),
+ kindDecoders: make(map[reflect.Kind]ValueDecoder),
+
+ typeMap: make(map[bsontype.Type]reflect.Type),
+ }
+}
+
+func buildDefaultRegistry() *Registry {
+ rb := NewRegistryBuilder()
+ defaultValueEncoders.RegisterDefaultEncoders(rb)
+ defaultValueDecoders.RegisterDefaultDecoders(rb)
+ return rb.Build()
+}
+
+// RegisterCodec will register the provided ValueCodec for the provided type.
+func (rb *RegistryBuilder) RegisterCodec(t reflect.Type, codec ValueCodec) *RegistryBuilder {
+ rb.RegisterTypeEncoder(t, codec)
+ rb.RegisterTypeDecoder(t, codec)
+ return rb
+}
+
+// RegisterTypeEncoder will register the provided ValueEncoder for the provided type.
+//
+// The type will be used directly, so an encoder can be registered for a type and a different encoder can be registered
+// for a pointer to that type.
+//
+// If the given type is an interface, the encoder will be called when marshalling a type that is that interface. It
+// will not be called when marshalling a non-interface type that implements the interface.
+func (rb *RegistryBuilder) RegisterTypeEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder {
+ rb.typeEncoders[t] = enc
+ return rb
+}
+
+// RegisterHookEncoder will register an encoder for the provided interface type t. This encoder will be called when
+// marshalling a type if the type implements t or a pointer to the type implements t. If the provided type is not
+// an interface (i.e. t.Kind() != reflect.Interface), this method will panic.
+func (rb *RegistryBuilder) RegisterHookEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder {
+ if t.Kind() != reflect.Interface {
+ panicStr := fmt.Sprintf("RegisterHookEncoder expects a type with kind reflect.Interface, "+
+ "got type %s with kind %s", t, t.Kind())
+ panic(panicStr)
+ }
+
+ for idx, encoder := range rb.interfaceEncoders {
+ if encoder.i == t {
+ rb.interfaceEncoders[idx].ve = enc
+ return rb
+ }
+ }
+
+ rb.interfaceEncoders = append(rb.interfaceEncoders, interfaceValueEncoder{i: t, ve: enc})
+ return rb
+}
+
+// RegisterTypeDecoder will register the provided ValueDecoder for the provided type.
+//
+// The type will be used directly, so a decoder can be registered for a type and a different decoder can be registered
+// for a pointer to that type.
+//
+// If the given type is an interface, the decoder will be called when unmarshalling into a type that is that interface.
+// It will not be called when unmarshalling into a non-interface type that implements the interface.
+func (rb *RegistryBuilder) RegisterTypeDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder {
+ rb.typeDecoders[t] = dec
+ return rb
+}
+
+// RegisterHookDecoder will register an decoder for the provided interface type t. This decoder will be called when
+// unmarshalling into a type if the type implements t or a pointer to the type implements t. If the provided type is not
+// an interface (i.e. t.Kind() != reflect.Interface), this method will panic.
+func (rb *RegistryBuilder) RegisterHookDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder {
+ if t.Kind() != reflect.Interface {
+ panicStr := fmt.Sprintf("RegisterHookDecoder expects a type with kind reflect.Interface, "+
+ "got type %s with kind %s", t, t.Kind())
+ panic(panicStr)
+ }
+
+ for idx, decoder := range rb.interfaceDecoders {
+ if decoder.i == t {
+ rb.interfaceDecoders[idx].vd = dec
+ return rb
+ }
+ }
+
+ rb.interfaceDecoders = append(rb.interfaceDecoders, interfaceValueDecoder{i: t, vd: dec})
+ return rb
+}
+
+// RegisterEncoder registers the provided type and encoder pair.
+//
+// Deprecated: Use RegisterTypeEncoder or RegisterHookEncoder instead.
+func (rb *RegistryBuilder) RegisterEncoder(t reflect.Type, enc ValueEncoder) *RegistryBuilder {
+ if t == tEmpty {
+ rb.typeEncoders[t] = enc
+ return rb
+ }
+ switch t.Kind() {
+ case reflect.Interface:
+ for idx, ir := range rb.interfaceEncoders {
+ if ir.i == t {
+ rb.interfaceEncoders[idx].ve = enc
+ return rb
+ }
+ }
+
+ rb.interfaceEncoders = append(rb.interfaceEncoders, interfaceValueEncoder{i: t, ve: enc})
+ default:
+ rb.typeEncoders[t] = enc
+ }
+ return rb
+}
+
+// RegisterDecoder registers the provided type and decoder pair.
+//
+// Deprecated: Use RegisterTypeDecoder or RegisterHookDecoder instead.
+func (rb *RegistryBuilder) RegisterDecoder(t reflect.Type, dec ValueDecoder) *RegistryBuilder {
+ if t == nil {
+ rb.typeDecoders[nil] = dec
+ return rb
+ }
+ if t == tEmpty {
+ rb.typeDecoders[t] = dec
+ return rb
+ }
+ switch t.Kind() {
+ case reflect.Interface:
+ for idx, ir := range rb.interfaceDecoders {
+ if ir.i == t {
+ rb.interfaceDecoders[idx].vd = dec
+ return rb
+ }
+ }
+
+ rb.interfaceDecoders = append(rb.interfaceDecoders, interfaceValueDecoder{i: t, vd: dec})
+ default:
+ rb.typeDecoders[t] = dec
+ }
+ return rb
+}
+
+// RegisterDefaultEncoder will registr the provided ValueEncoder to the provided
+// kind.
+func (rb *RegistryBuilder) RegisterDefaultEncoder(kind reflect.Kind, enc ValueEncoder) *RegistryBuilder {
+ rb.kindEncoders[kind] = enc
+ return rb
+}
+
+// RegisterDefaultDecoder will register the provided ValueDecoder to the
+// provided kind.
+func (rb *RegistryBuilder) RegisterDefaultDecoder(kind reflect.Kind, dec ValueDecoder) *RegistryBuilder {
+ rb.kindDecoders[kind] = dec
+ return rb
+}
+
+// RegisterTypeMapEntry will register the provided type to the BSON type. The primary usage for this
+// mapping is decoding situations where an empty interface is used and a default type needs to be
+// created and decoded into.
+//
+// By default, BSON documents will decode into interface{} values as bson.D. To change the default type for BSON
+// documents, a type map entry for bsontype.EmbeddedDocument should be registered. For example, to force BSON documents
+// to decode to bson.Raw, use the following code:
+//
+// rb.RegisterTypeMapEntry(bsontype.EmbeddedDocument, reflect.TypeOf(bson.Raw{}))
+func (rb *RegistryBuilder) RegisterTypeMapEntry(bt bsontype.Type, rt reflect.Type) *RegistryBuilder {
+ rb.typeMap[bt] = rt
+ return rb
+}
+
+// Build creates a Registry from the current state of this RegistryBuilder.
+func (rb *RegistryBuilder) Build() *Registry {
+ registry := new(Registry)
+
+ registry.typeEncoders = make(map[reflect.Type]ValueEncoder)
+ for t, enc := range rb.typeEncoders {
+ registry.typeEncoders[t] = enc
+ }
+
+ registry.typeDecoders = make(map[reflect.Type]ValueDecoder)
+ for t, dec := range rb.typeDecoders {
+ registry.typeDecoders[t] = dec
+ }
+
+ registry.interfaceEncoders = make([]interfaceValueEncoder, len(rb.interfaceEncoders))
+ copy(registry.interfaceEncoders, rb.interfaceEncoders)
+
+ registry.interfaceDecoders = make([]interfaceValueDecoder, len(rb.interfaceDecoders))
+ copy(registry.interfaceDecoders, rb.interfaceDecoders)
+
+ registry.kindEncoders = make(map[reflect.Kind]ValueEncoder)
+ for kind, enc := range rb.kindEncoders {
+ registry.kindEncoders[kind] = enc
+ }
+
+ registry.kindDecoders = make(map[reflect.Kind]ValueDecoder)
+ for kind, dec := range rb.kindDecoders {
+ registry.kindDecoders[kind] = dec
+ }
+
+ registry.typeMap = make(map[bsontype.Type]reflect.Type)
+ for bt, rt := range rb.typeMap {
+ registry.typeMap[bt] = rt
+ }
+
+ return registry
+}
+
+// LookupEncoder inspects the registry for an encoder for the given type. The lookup precedence works as follows:
+//
+// 1. An encoder registered for the exact type. If the given type represents an interface, an encoder registered using
+// RegisterTypeEncoder for the interface will be selected.
+//
+// 2. An encoder registered using RegisterHookEncoder for an interface implemented by the type or by a pointer to the
+// type.
+//
+// 3. An encoder registered for the reflect.Kind of the value.
+//
+// If no encoder is found, an error of type ErrNoEncoder is returned.
+func (r *Registry) LookupEncoder(t reflect.Type) (ValueEncoder, error) {
+ encodererr := ErrNoEncoder{Type: t}
+ r.mu.RLock()
+ enc, found := r.lookupTypeEncoder(t)
+ r.mu.RUnlock()
+ if found {
+ if enc == nil {
+ return nil, ErrNoEncoder{Type: t}
+ }
+ return enc, nil
+ }
+
+ enc, found = r.lookupInterfaceEncoder(t, true)
+ if found {
+ r.mu.Lock()
+ r.typeEncoders[t] = enc
+ r.mu.Unlock()
+ return enc, nil
+ }
+
+ if t == nil {
+ r.mu.Lock()
+ r.typeEncoders[t] = nil
+ r.mu.Unlock()
+ return nil, encodererr
+ }
+
+ enc, found = r.kindEncoders[t.Kind()]
+ if !found {
+ r.mu.Lock()
+ r.typeEncoders[t] = nil
+ r.mu.Unlock()
+ return nil, encodererr
+ }
+
+ r.mu.Lock()
+ r.typeEncoders[t] = enc
+ r.mu.Unlock()
+ return enc, nil
+}
+
+func (r *Registry) lookupTypeEncoder(t reflect.Type) (ValueEncoder, bool) {
+ enc, found := r.typeEncoders[t]
+ return enc, found
+}
+
+func (r *Registry) lookupInterfaceEncoder(t reflect.Type, allowAddr bool) (ValueEncoder, bool) {
+ if t == nil {
+ return nil, false
+ }
+ for _, ienc := range r.interfaceEncoders {
+ if t.Implements(ienc.i) {
+ return ienc.ve, true
+ }
+ if allowAddr && t.Kind() != reflect.Ptr && reflect.PtrTo(t).Implements(ienc.i) {
+ // if *t implements an interface, this will catch if t implements an interface further ahead
+ // in interfaceEncoders
+ defaultEnc, found := r.lookupInterfaceEncoder(t, false)
+ if !found {
+ defaultEnc = r.kindEncoders[t.Kind()]
+ }
+ return newCondAddrEncoder(ienc.ve, defaultEnc), true
+ }
+ }
+ return nil, false
+}
+
+// LookupDecoder inspects the registry for an decoder for the given type. The lookup precedence works as follows:
+//
+// 1. A decoder registered for the exact type. If the given type represents an interface, a decoder registered using
+// RegisterTypeDecoder for the interface will be selected.
+//
+// 2. A decoder registered using RegisterHookDecoder for an interface implemented by the type or by a pointer to the
+// type.
+//
+// 3. A decoder registered for the reflect.Kind of the value.
+//
+// If no decoder is found, an error of type ErrNoDecoder is returned.
+func (r *Registry) LookupDecoder(t reflect.Type) (ValueDecoder, error) {
+ if t == nil {
+ return nil, ErrNilType
+ }
+ decodererr := ErrNoDecoder{Type: t}
+ r.mu.RLock()
+ dec, found := r.lookupTypeDecoder(t)
+ r.mu.RUnlock()
+ if found {
+ if dec == nil {
+ return nil, ErrNoDecoder{Type: t}
+ }
+ return dec, nil
+ }
+
+ dec, found = r.lookupInterfaceDecoder(t, true)
+ if found {
+ r.mu.Lock()
+ r.typeDecoders[t] = dec
+ r.mu.Unlock()
+ return dec, nil
+ }
+
+ dec, found = r.kindDecoders[t.Kind()]
+ if !found {
+ r.mu.Lock()
+ r.typeDecoders[t] = nil
+ r.mu.Unlock()
+ return nil, decodererr
+ }
+
+ r.mu.Lock()
+ r.typeDecoders[t] = dec
+ r.mu.Unlock()
+ return dec, nil
+}
+
+func (r *Registry) lookupTypeDecoder(t reflect.Type) (ValueDecoder, bool) {
+ dec, found := r.typeDecoders[t]
+ return dec, found
+}
+
+func (r *Registry) lookupInterfaceDecoder(t reflect.Type, allowAddr bool) (ValueDecoder, bool) {
+ for _, idec := range r.interfaceDecoders {
+ if t.Implements(idec.i) {
+ return idec.vd, true
+ }
+ if allowAddr && t.Kind() != reflect.Ptr && reflect.PtrTo(t).Implements(idec.i) {
+ // if *t implements an interface, this will catch if t implements an interface further ahead
+ // in interfaceDecoders
+ defaultDec, found := r.lookupInterfaceDecoder(t, false)
+ if !found {
+ defaultDec = r.kindDecoders[t.Kind()]
+ }
+ return newCondAddrDecoder(idec.vd, defaultDec), true
+ }
+ }
+ return nil, false
+}
+
+// LookupTypeMapEntry inspects the registry's type map for a Go type for the corresponding BSON
+// type. If no type is found, ErrNoTypeMapEntry is returned.
+func (r *Registry) LookupTypeMapEntry(bt bsontype.Type) (reflect.Type, error) {
+ t, ok := r.typeMap[bt]
+ if !ok || t == nil {
+ return nil, ErrNoTypeMapEntry{Type: bt}
+ }
+ return t, nil
+}
+
+type interfaceValueEncoder struct {
+ i reflect.Type
+ ve ValueEncoder
+}
+
+type interfaceValueDecoder struct {
+ i reflect.Type
+ vd ValueDecoder
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go
new file mode 100644
index 000000000..3c1b6b860
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/slice_codec.go
@@ -0,0 +1,199 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "fmt"
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+var defaultSliceCodec = NewSliceCodec()
+
+// SliceCodec is the Codec used for slice values.
+type SliceCodec struct {
+ EncodeNilAsEmpty bool
+}
+
+var _ ValueCodec = &MapCodec{}
+
+// NewSliceCodec returns a MapCodec with options opts.
+func NewSliceCodec(opts ...*bsonoptions.SliceCodecOptions) *SliceCodec {
+ sliceOpt := bsonoptions.MergeSliceCodecOptions(opts...)
+
+ codec := SliceCodec{}
+ if sliceOpt.EncodeNilAsEmpty != nil {
+ codec.EncodeNilAsEmpty = *sliceOpt.EncodeNilAsEmpty
+ }
+ return &codec
+}
+
+// EncodeValue is the ValueEncoder for slice types.
+func (sc SliceCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Slice {
+ return ValueEncoderError{Name: "SliceEncodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
+ }
+
+ if val.IsNil() && !sc.EncodeNilAsEmpty {
+ return vw.WriteNull()
+ }
+
+ // If we have a []byte we want to treat it as a binary instead of as an array.
+ if val.Type().Elem() == tByte {
+ var byteSlice []byte
+ for idx := 0; idx < val.Len(); idx++ {
+ byteSlice = append(byteSlice, val.Index(idx).Interface().(byte))
+ }
+ return vw.WriteBinary(byteSlice)
+ }
+
+ // If we have a []primitive.E we want to treat it as a document instead of as an array.
+ if val.Type().ConvertibleTo(tD) {
+ d := val.Convert(tD).Interface().(primitive.D)
+
+ dw, err := vw.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ for _, e := range d {
+ err = encodeElement(ec, dw, e)
+ if err != nil {
+ return err
+ }
+ }
+
+ return dw.WriteDocumentEnd()
+ }
+
+ aw, err := vw.WriteArray()
+ if err != nil {
+ return err
+ }
+
+ elemType := val.Type().Elem()
+ encoder, err := ec.LookupEncoder(elemType)
+ if err != nil && elemType.Kind() != reflect.Interface {
+ return err
+ }
+
+ for idx := 0; idx < val.Len(); idx++ {
+ currEncoder, currVal, lookupErr := defaultValueEncoders.lookupElementEncoder(ec, encoder, val.Index(idx))
+ if lookupErr != nil && lookupErr != errInvalidValue {
+ return lookupErr
+ }
+
+ vw, err := aw.WriteArrayElement()
+ if err != nil {
+ return err
+ }
+
+ if lookupErr == errInvalidValue {
+ err = vw.WriteNull()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ err = currEncoder.EncodeValue(ec, vw, currVal)
+ if err != nil {
+ return err
+ }
+ }
+ return aw.WriteArrayEnd()
+}
+
+// DecodeValue is the ValueDecoder for slice types.
+func (sc *SliceCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Kind() != reflect.Slice {
+ return ValueDecoderError{Name: "SliceDecodeValue", Kinds: []reflect.Kind{reflect.Slice}, Received: val}
+ }
+
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Array:
+ case bsontype.Null:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadNull()
+ case bsontype.Undefined:
+ val.Set(reflect.Zero(val.Type()))
+ return vr.ReadUndefined()
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ if val.Type().Elem() != tE {
+ return fmt.Errorf("cannot decode document into %s", val.Type())
+ }
+ case bsontype.Binary:
+ if val.Type().Elem() != tByte {
+ return fmt.Errorf("SliceDecodeValue can only decode a binary into a byte array, got %v", vrType)
+ }
+ data, subtype, err := vr.ReadBinary()
+ if err != nil {
+ return err
+ }
+ if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
+ return fmt.Errorf("SliceDecodeValue can only be used to decode subtype 0x00 or 0x02 for %s, got %v", bsontype.Binary, subtype)
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, len(data)))
+ }
+
+ val.SetLen(0)
+ for _, elem := range data {
+ val.Set(reflect.Append(val, reflect.ValueOf(elem)))
+ }
+ return nil
+ case bsontype.String:
+ if sliceType := val.Type().Elem(); sliceType != tByte {
+ return fmt.Errorf("SliceDecodeValue can only decode a string into a byte array, got %v", sliceType)
+ }
+ str, err := vr.ReadString()
+ if err != nil {
+ return err
+ }
+ byteStr := []byte(str)
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, len(byteStr)))
+ }
+
+ val.SetLen(0)
+ for _, elem := range byteStr {
+ val.Set(reflect.Append(val, reflect.ValueOf(elem)))
+ }
+ return nil
+ default:
+ return fmt.Errorf("cannot decode %v into a slice", vrType)
+ }
+
+ var elemsFunc func(DecodeContext, bsonrw.ValueReader, reflect.Value) ([]reflect.Value, error)
+ switch val.Type().Elem() {
+ case tE:
+ dc.Ancestor = val.Type()
+ elemsFunc = defaultValueDecoders.decodeD
+ default:
+ elemsFunc = defaultValueDecoders.decodeDefault
+ }
+
+ elems, err := elemsFunc(dc, vr, val)
+ if err != nil {
+ return err
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, len(elems)))
+ }
+
+ val.SetLen(0)
+ val.Set(reflect.Append(val, elems...))
+
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go
new file mode 100644
index 000000000..5332b7c3b
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/string_codec.go
@@ -0,0 +1,119 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "fmt"
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// StringCodec is the Codec used for struct values.
+type StringCodec struct {
+ DecodeObjectIDAsHex bool
+}
+
+var (
+ defaultStringCodec = NewStringCodec()
+
+ _ ValueCodec = defaultStringCodec
+ _ typeDecoder = defaultStringCodec
+)
+
+// NewStringCodec returns a StringCodec with options opts.
+func NewStringCodec(opts ...*bsonoptions.StringCodecOptions) *StringCodec {
+ stringOpt := bsonoptions.MergeStringCodecOptions(opts...)
+ return &StringCodec{*stringOpt.DecodeObjectIDAsHex}
+}
+
+// EncodeValue is the ValueEncoder for string types.
+func (sc *StringCodec) EncodeValue(ectx EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if val.Kind() != reflect.String {
+ return ValueEncoderError{
+ Name: "StringEncodeValue",
+ Kinds: []reflect.Kind{reflect.String},
+ Received: val,
+ }
+ }
+
+ return vw.WriteString(val.String())
+}
+
+func (sc *StringCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t.Kind() != reflect.String {
+ return emptyValue, ValueDecoderError{
+ Name: "StringDecodeValue",
+ Kinds: []reflect.Kind{reflect.String},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var str string
+ var err error
+ switch vr.Type() {
+ case bsontype.String:
+ str, err = vr.ReadString()
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.ObjectID:
+ oid, err := vr.ReadObjectID()
+ if err != nil {
+ return emptyValue, err
+ }
+ if sc.DecodeObjectIDAsHex {
+ str = oid.Hex()
+ } else {
+ byteArray := [12]byte(oid)
+ str = string(byteArray[:])
+ }
+ case bsontype.Symbol:
+ str, err = vr.ReadSymbol()
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Binary:
+ data, subtype, err := vr.ReadBinary()
+ if err != nil {
+ return emptyValue, err
+ }
+ if subtype != bsontype.BinaryGeneric && subtype != bsontype.BinaryBinaryOld {
+ return emptyValue, decodeBinaryError{subtype: subtype, typeName: "string"}
+ }
+ str = string(data)
+ case bsontype.Null:
+ if err = vr.ReadNull(); err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Undefined:
+ if err = vr.ReadUndefined(); err != nil {
+ return emptyValue, err
+ }
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a string type", vr.Type())
+ }
+
+ return reflect.ValueOf(str), nil
+}
+
+// DecodeValue is the ValueDecoder for string types.
+func (sc *StringCodec) DecodeValue(dctx DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Kind() != reflect.String {
+ return ValueDecoderError{Name: "StringDecodeValue", Kinds: []reflect.Kind{reflect.String}, Received: val}
+ }
+
+ elem, err := sc.decodeType(dctx, vr, val.Type())
+ if err != nil {
+ return err
+ }
+
+ val.SetString(elem.String())
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go
new file mode 100644
index 000000000..be3f2081e
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_codec.go
@@ -0,0 +1,664 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// DecodeError represents an error that occurs when unmarshalling BSON bytes into a native Go type.
+type DecodeError struct {
+ keys []string
+ wrapped error
+}
+
+// Unwrap returns the underlying error
+func (de *DecodeError) Unwrap() error {
+ return de.wrapped
+}
+
+// Error implements the error interface.
+func (de *DecodeError) Error() string {
+ // The keys are stored in reverse order because the de.keys slice is builtup while propagating the error up the
+ // stack of BSON keys, so we call de.Keys(), which reverses them.
+ keyPath := strings.Join(de.Keys(), ".")
+ return fmt.Sprintf("error decoding key %s: %v", keyPath, de.wrapped)
+}
+
+// Keys returns the BSON key path that caused an error as a slice of strings. The keys in the slice are in top-down
+// order. For example, if the document being unmarshalled was {a: {b: {c: 1}}} and the value for c was supposed to be
+// a string, the keys slice will be ["a", "b", "c"].
+func (de *DecodeError) Keys() []string {
+ reversedKeys := make([]string, 0, len(de.keys))
+ for idx := len(de.keys) - 1; idx >= 0; idx-- {
+ reversedKeys = append(reversedKeys, de.keys[idx])
+ }
+
+ return reversedKeys
+}
+
+// Zeroer allows custom struct types to implement a report of zero
+// state. All struct types that don't implement Zeroer or where IsZero
+// returns false are considered to be not zero.
+type Zeroer interface {
+ IsZero() bool
+}
+
+// StructCodec is the Codec used for struct values.
+type StructCodec struct {
+ cache map[reflect.Type]*structDescription
+ l sync.RWMutex
+ parser StructTagParser
+ DecodeZeroStruct bool
+ DecodeDeepZeroInline bool
+ EncodeOmitDefaultStruct bool
+ AllowUnexportedFields bool
+ OverwriteDuplicatedInlinedFields bool
+}
+
+var _ ValueEncoder = &StructCodec{}
+var _ ValueDecoder = &StructCodec{}
+
+// NewStructCodec returns a StructCodec that uses p for struct tag parsing.
+func NewStructCodec(p StructTagParser, opts ...*bsonoptions.StructCodecOptions) (*StructCodec, error) {
+ if p == nil {
+ return nil, errors.New("a StructTagParser must be provided to NewStructCodec")
+ }
+
+ structOpt := bsonoptions.MergeStructCodecOptions(opts...)
+
+ codec := &StructCodec{
+ cache: make(map[reflect.Type]*structDescription),
+ parser: p,
+ }
+
+ if structOpt.DecodeZeroStruct != nil {
+ codec.DecodeZeroStruct = *structOpt.DecodeZeroStruct
+ }
+ if structOpt.DecodeDeepZeroInline != nil {
+ codec.DecodeDeepZeroInline = *structOpt.DecodeDeepZeroInline
+ }
+ if structOpt.EncodeOmitDefaultStruct != nil {
+ codec.EncodeOmitDefaultStruct = *structOpt.EncodeOmitDefaultStruct
+ }
+ if structOpt.OverwriteDuplicatedInlinedFields != nil {
+ codec.OverwriteDuplicatedInlinedFields = *structOpt.OverwriteDuplicatedInlinedFields
+ }
+ if structOpt.AllowUnexportedFields != nil {
+ codec.AllowUnexportedFields = *structOpt.AllowUnexportedFields
+ }
+
+ return codec, nil
+}
+
+// EncodeValue handles encoding generic struct types.
+func (sc *StructCodec) EncodeValue(r EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Kind() != reflect.Struct {
+ return ValueEncoderError{Name: "StructCodec.EncodeValue", Kinds: []reflect.Kind{reflect.Struct}, Received: val}
+ }
+
+ sd, err := sc.describeStruct(r.Registry, val.Type())
+ if err != nil {
+ return err
+ }
+
+ dw, err := vw.WriteDocument()
+ if err != nil {
+ return err
+ }
+ var rv reflect.Value
+ for _, desc := range sd.fl {
+ if desc.inline == nil {
+ rv = val.Field(desc.idx)
+ } else {
+ rv, err = fieldByIndexErr(val, desc.inline)
+ if err != nil {
+ continue
+ }
+ }
+
+ desc.encoder, rv, err = defaultValueEncoders.lookupElementEncoder(r, desc.encoder, rv)
+
+ if err != nil && err != errInvalidValue {
+ return err
+ }
+
+ if err == errInvalidValue {
+ if desc.omitEmpty {
+ continue
+ }
+ vw2, err := dw.WriteDocumentElement(desc.name)
+ if err != nil {
+ return err
+ }
+ err = vw2.WriteNull()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ if desc.encoder == nil {
+ return ErrNoEncoder{Type: rv.Type()}
+ }
+
+ encoder := desc.encoder
+
+ var isZero bool
+ rvInterface := rv.Interface()
+ if cz, ok := encoder.(CodecZeroer); ok {
+ isZero = cz.IsTypeZero(rvInterface)
+ } else if rv.Kind() == reflect.Interface {
+ // sc.isZero will not treat an interface rv as an interface, so we need to check for the zero interface separately.
+ isZero = rv.IsNil()
+ } else {
+ isZero = sc.isZero(rvInterface)
+ }
+ if desc.omitEmpty && isZero {
+ continue
+ }
+
+ vw2, err := dw.WriteDocumentElement(desc.name)
+ if err != nil {
+ return err
+ }
+
+ ectx := EncodeContext{Registry: r.Registry, MinSize: desc.minSize}
+ err = encoder.EncodeValue(ectx, vw2, rv)
+ if err != nil {
+ return err
+ }
+ }
+
+ if sd.inlineMap >= 0 {
+ rv := val.Field(sd.inlineMap)
+ collisionFn := func(key string) bool {
+ _, exists := sd.fm[key]
+ return exists
+ }
+
+ return defaultMapCodec.mapEncodeValue(r, dw, rv, collisionFn)
+ }
+
+ return dw.WriteDocumentEnd()
+}
+
+func newDecodeError(key string, original error) error {
+ de, ok := original.(*DecodeError)
+ if !ok {
+ return &DecodeError{
+ keys: []string{key},
+ wrapped: original,
+ }
+ }
+
+ de.keys = append(de.keys, key)
+ return de
+}
+
+// DecodeValue implements the Codec interface.
+// By default, map types in val will not be cleared. If a map has existing key/value pairs, it will be extended with the new ones from vr.
+// For slices, the decoder will set the length of the slice to zero and append all elements. The underlying array will not be cleared.
+func (sc *StructCodec) DecodeValue(r DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Kind() != reflect.Struct {
+ return ValueDecoderError{Name: "StructCodec.DecodeValue", Kinds: []reflect.Kind{reflect.Struct}, Received: val}
+ }
+
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Type(0), bsontype.EmbeddedDocument:
+ case bsontype.Null:
+ if err := vr.ReadNull(); err != nil {
+ return err
+ }
+
+ val.Set(reflect.Zero(val.Type()))
+ return nil
+ case bsontype.Undefined:
+ if err := vr.ReadUndefined(); err != nil {
+ return err
+ }
+
+ val.Set(reflect.Zero(val.Type()))
+ return nil
+ default:
+ return fmt.Errorf("cannot decode %v into a %s", vrType, val.Type())
+ }
+
+ sd, err := sc.describeStruct(r.Registry, val.Type())
+ if err != nil {
+ return err
+ }
+
+ if sc.DecodeZeroStruct {
+ val.Set(reflect.Zero(val.Type()))
+ }
+ if sc.DecodeDeepZeroInline && sd.inline {
+ val.Set(deepZero(val.Type()))
+ }
+
+ var decoder ValueDecoder
+ var inlineMap reflect.Value
+ if sd.inlineMap >= 0 {
+ inlineMap = val.Field(sd.inlineMap)
+ decoder, err = r.LookupDecoder(inlineMap.Type().Elem())
+ if err != nil {
+ return err
+ }
+ }
+
+ dr, err := vr.ReadDocument()
+ if err != nil {
+ return err
+ }
+
+ for {
+ name, vr, err := dr.ReadElement()
+ if err == bsonrw.ErrEOD {
+ break
+ }
+ if err != nil {
+ return err
+ }
+
+ fd, exists := sd.fm[name]
+ if !exists {
+ // if the original name isn't found in the struct description, try again with the name in lowercase
+ // this could match if a BSON tag isn't specified because by default, describeStruct lowercases all field
+ // names
+ fd, exists = sd.fm[strings.ToLower(name)]
+ }
+
+ if !exists {
+ if sd.inlineMap < 0 {
+ // The encoding/json package requires a flag to return on error for non-existent fields.
+ // This functionality seems appropriate for the struct codec.
+ err = vr.Skip()
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ if inlineMap.IsNil() {
+ inlineMap.Set(reflect.MakeMap(inlineMap.Type()))
+ }
+
+ elem := reflect.New(inlineMap.Type().Elem()).Elem()
+ r.Ancestor = inlineMap.Type()
+ err = decoder.DecodeValue(r, vr, elem)
+ if err != nil {
+ return err
+ }
+ inlineMap.SetMapIndex(reflect.ValueOf(name), elem)
+ continue
+ }
+
+ var field reflect.Value
+ if fd.inline == nil {
+ field = val.Field(fd.idx)
+ } else {
+ field, err = getInlineField(val, fd.inline)
+ if err != nil {
+ return err
+ }
+ }
+
+ if !field.CanSet() { // Being settable is a super set of being addressable.
+ innerErr := fmt.Errorf("field %v is not settable", field)
+ return newDecodeError(fd.name, innerErr)
+ }
+ if field.Kind() == reflect.Ptr && field.IsNil() {
+ field.Set(reflect.New(field.Type().Elem()))
+ }
+ field = field.Addr()
+
+ dctx := DecodeContext{Registry: r.Registry, Truncate: fd.truncate || r.Truncate}
+ if fd.decoder == nil {
+ return newDecodeError(fd.name, ErrNoDecoder{Type: field.Elem().Type()})
+ }
+
+ err = fd.decoder.DecodeValue(dctx, vr, field.Elem())
+ if err != nil {
+ return newDecodeError(fd.name, err)
+ }
+ }
+
+ return nil
+}
+
+func (sc *StructCodec) isZero(i interface{}) bool {
+ v := reflect.ValueOf(i)
+
+ // check the value validity
+ if !v.IsValid() {
+ return true
+ }
+
+ if z, ok := v.Interface().(Zeroer); ok && (v.Kind() != reflect.Ptr || !v.IsNil()) {
+ return z.IsZero()
+ }
+
+ switch v.Kind() {
+ case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+ return v.Len() == 0
+ case reflect.Bool:
+ return !v.Bool()
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return v.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return v.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return v.Float() == 0
+ case reflect.Interface, reflect.Ptr:
+ return v.IsNil()
+ case reflect.Struct:
+ if sc.EncodeOmitDefaultStruct {
+ vt := v.Type()
+ if vt == tTime {
+ return v.Interface().(time.Time).IsZero()
+ }
+ for i := 0; i < v.NumField(); i++ {
+ if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous {
+ continue // Private field
+ }
+ fld := v.Field(i)
+ if !sc.isZero(fld.Interface()) {
+ return false
+ }
+ }
+ return true
+ }
+ }
+
+ return false
+}
+
+type structDescription struct {
+ fm map[string]fieldDescription
+ fl []fieldDescription
+ inlineMap int
+ inline bool
+}
+
+type fieldDescription struct {
+ name string // BSON key name
+ fieldName string // struct field name
+ idx int
+ omitEmpty bool
+ minSize bool
+ truncate bool
+ inline []int
+ encoder ValueEncoder
+ decoder ValueDecoder
+}
+
+type byIndex []fieldDescription
+
+func (bi byIndex) Len() int { return len(bi) }
+
+func (bi byIndex) Swap(i, j int) { bi[i], bi[j] = bi[j], bi[i] }
+
+func (bi byIndex) Less(i, j int) bool {
+ // If a field is inlined, its index in the top level struct is stored at inline[0]
+ iIdx, jIdx := bi[i].idx, bi[j].idx
+ if len(bi[i].inline) > 0 {
+ iIdx = bi[i].inline[0]
+ }
+ if len(bi[j].inline) > 0 {
+ jIdx = bi[j].inline[0]
+ }
+ if iIdx != jIdx {
+ return iIdx < jIdx
+ }
+ for k, biik := range bi[i].inline {
+ if k >= len(bi[j].inline) {
+ return false
+ }
+ if biik != bi[j].inline[k] {
+ return biik < bi[j].inline[k]
+ }
+ }
+ return len(bi[i].inline) < len(bi[j].inline)
+}
+
+func (sc *StructCodec) describeStruct(r *Registry, t reflect.Type) (*structDescription, error) {
+ // We need to analyze the struct, including getting the tags, collecting
+ // information about inlining, and create a map of the field name to the field.
+ sc.l.RLock()
+ ds, exists := sc.cache[t]
+ sc.l.RUnlock()
+ if exists {
+ return ds, nil
+ }
+
+ numFields := t.NumField()
+ sd := &structDescription{
+ fm: make(map[string]fieldDescription, numFields),
+ fl: make([]fieldDescription, 0, numFields),
+ inlineMap: -1,
+ }
+
+ var fields []fieldDescription
+ for i := 0; i < numFields; i++ {
+ sf := t.Field(i)
+ if sf.PkgPath != "" && (!sc.AllowUnexportedFields || !sf.Anonymous) {
+ // field is private or unexported fields aren't allowed, ignore
+ continue
+ }
+
+ sfType := sf.Type
+ encoder, err := r.LookupEncoder(sfType)
+ if err != nil {
+ encoder = nil
+ }
+ decoder, err := r.LookupDecoder(sfType)
+ if err != nil {
+ decoder = nil
+ }
+
+ description := fieldDescription{
+ fieldName: sf.Name,
+ idx: i,
+ encoder: encoder,
+ decoder: decoder,
+ }
+
+ stags, err := sc.parser.ParseStructTags(sf)
+ if err != nil {
+ return nil, err
+ }
+ if stags.Skip {
+ continue
+ }
+ description.name = stags.Name
+ description.omitEmpty = stags.OmitEmpty
+ description.minSize = stags.MinSize
+ description.truncate = stags.Truncate
+
+ if stags.Inline {
+ sd.inline = true
+ switch sfType.Kind() {
+ case reflect.Map:
+ if sd.inlineMap >= 0 {
+ return nil, errors.New("(struct " + t.String() + ") multiple inline maps")
+ }
+ if sfType.Key() != tString {
+ return nil, errors.New("(struct " + t.String() + ") inline map must have a string keys")
+ }
+ sd.inlineMap = description.idx
+ case reflect.Ptr:
+ sfType = sfType.Elem()
+ if sfType.Kind() != reflect.Struct {
+ return nil, fmt.Errorf("(struct %s) inline fields must be a struct, a struct pointer, or a map", t.String())
+ }
+ fallthrough
+ case reflect.Struct:
+ inlinesf, err := sc.describeStruct(r, sfType)
+ if err != nil {
+ return nil, err
+ }
+ for _, fd := range inlinesf.fl {
+ if fd.inline == nil {
+ fd.inline = []int{i, fd.idx}
+ } else {
+ fd.inline = append([]int{i}, fd.inline...)
+ }
+ fields = append(fields, fd)
+
+ }
+ default:
+ return nil, fmt.Errorf("(struct %s) inline fields must be a struct, a struct pointer, or a map", t.String())
+ }
+ continue
+ }
+ fields = append(fields, description)
+ }
+
+ // Sort fieldDescriptions by name and use dominance rules to determine which should be added for each name
+ sort.Slice(fields, func(i, j int) bool {
+ x := fields
+ // sort field by name, breaking ties with depth, then
+ // breaking ties with index sequence.
+ if x[i].name != x[j].name {
+ return x[i].name < x[j].name
+ }
+ if len(x[i].inline) != len(x[j].inline) {
+ return len(x[i].inline) < len(x[j].inline)
+ }
+ return byIndex(x).Less(i, j)
+ })
+
+ for advance, i := 0, 0; i < len(fields); i += advance {
+ // One iteration per name.
+ // Find the sequence of fields with the name of this first field.
+ fi := fields[i]
+ name := fi.name
+ for advance = 1; i+advance < len(fields); advance++ {
+ fj := fields[i+advance]
+ if fj.name != name {
+ break
+ }
+ }
+ if advance == 1 { // Only one field with this name
+ sd.fl = append(sd.fl, fi)
+ sd.fm[name] = fi
+ continue
+ }
+ dominant, ok := dominantField(fields[i : i+advance])
+ if !ok || !sc.OverwriteDuplicatedInlinedFields {
+ return nil, fmt.Errorf("struct %s has duplicated key %s", t.String(), name)
+ }
+ sd.fl = append(sd.fl, dominant)
+ sd.fm[name] = dominant
+ }
+
+ sort.Sort(byIndex(sd.fl))
+
+ sc.l.Lock()
+ sc.cache[t] = sd
+ sc.l.Unlock()
+
+ return sd, nil
+}
+
+// dominantField looks through the fields, all of which are known to
+// have the same name, to find the single field that dominates the
+// others using Go's inlining rules. If there are multiple top-level
+// fields, the boolean will be false: This condition is an error in Go
+// and we skip all the fields.
+func dominantField(fields []fieldDescription) (fieldDescription, bool) {
+ // The fields are sorted in increasing index-length order, then by presence of tag.
+ // That means that the first field is the dominant one. We need only check
+ // for error cases: two fields at top level.
+ if len(fields) > 1 &&
+ len(fields[0].inline) == len(fields[1].inline) {
+ return fieldDescription{}, false
+ }
+ return fields[0], true
+}
+
+func fieldByIndexErr(v reflect.Value, index []int) (result reflect.Value, err error) {
+ defer func() {
+ if recovered := recover(); recovered != nil {
+ switch r := recovered.(type) {
+ case string:
+ err = fmt.Errorf("%s", r)
+ case error:
+ err = r
+ }
+ }
+ }()
+
+ result = v.FieldByIndex(index)
+ return
+}
+
+func getInlineField(val reflect.Value, index []int) (reflect.Value, error) {
+ field, err := fieldByIndexErr(val, index)
+ if err == nil {
+ return field, nil
+ }
+
+ // if parent of this element doesn't exist, fix its parent
+ inlineParent := index[:len(index)-1]
+ var fParent reflect.Value
+ if fParent, err = fieldByIndexErr(val, inlineParent); err != nil {
+ fParent, err = getInlineField(val, inlineParent)
+ if err != nil {
+ return fParent, err
+ }
+ }
+ fParent.Set(reflect.New(fParent.Type().Elem()))
+
+ return fieldByIndexErr(val, index)
+}
+
+// DeepZero returns recursive zero object
+func deepZero(st reflect.Type) (result reflect.Value) {
+ result = reflect.Indirect(reflect.New(st))
+
+ if result.Kind() == reflect.Struct {
+ for i := 0; i < result.NumField(); i++ {
+ if f := result.Field(i); f.Kind() == reflect.Ptr {
+ if f.CanInterface() {
+ if ft := reflect.TypeOf(f.Interface()); ft.Elem().Kind() == reflect.Struct {
+ result.Field(i).Set(recursivePointerTo(deepZero(ft.Elem())))
+ }
+ }
+ }
+ }
+ }
+
+ return
+}
+
+// recursivePointerTo calls reflect.New(v.Type) but recursively for its fields inside
+func recursivePointerTo(v reflect.Value) reflect.Value {
+ v = reflect.Indirect(v)
+ result := reflect.New(v.Type())
+ if v.Kind() == reflect.Struct {
+ for i := 0; i < v.NumField(); i++ {
+ if f := v.Field(i); f.Kind() == reflect.Ptr {
+ if f.Elem().Kind() == reflect.Struct {
+ result.Elem().Field(i).Set(recursivePointerTo(f))
+ }
+ }
+ }
+ }
+
+ return result
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go
new file mode 100644
index 000000000..62708c5c7
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/struct_tag_parser.go
@@ -0,0 +1,139 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "reflect"
+ "strings"
+)
+
+// StructTagParser returns the struct tags for a given struct field.
+type StructTagParser interface {
+ ParseStructTags(reflect.StructField) (StructTags, error)
+}
+
+// StructTagParserFunc is an adapter that allows a generic function to be used
+// as a StructTagParser.
+type StructTagParserFunc func(reflect.StructField) (StructTags, error)
+
+// ParseStructTags implements the StructTagParser interface.
+func (stpf StructTagParserFunc) ParseStructTags(sf reflect.StructField) (StructTags, error) {
+ return stpf(sf)
+}
+
+// StructTags represents the struct tag fields that the StructCodec uses during
+// the encoding and decoding process.
+//
+// In the case of a struct, the lowercased field name is used as the key for each exported
+// field but this behavior may be changed using a struct tag. The tag may also contain flags to
+// adjust the marshalling behavior for the field.
+//
+// The properties are defined below:
+//
+// OmitEmpty Only include the field if it's not set to the zero value for the type or to
+// empty slices or maps.
+//
+// MinSize Marshal an integer of a type larger than 32 bits value as an int32, if that's
+// feasible while preserving the numeric value.
+//
+// Truncate When unmarshaling a BSON double, it is permitted to lose precision to fit within
+// a float32.
+//
+// Inline Inline the field, which must be a struct or a map, causing all of its fields
+// or keys to be processed as if they were part of the outer struct. For maps,
+// keys must not conflict with the bson keys of other struct fields.
+//
+// Skip This struct field should be skipped. This is usually denoted by parsing a "-"
+// for the name.
+//
+// TODO(skriptble): Add tags for undefined as nil and for null as nil.
+type StructTags struct {
+ Name string
+ OmitEmpty bool
+ MinSize bool
+ Truncate bool
+ Inline bool
+ Skip bool
+}
+
+// DefaultStructTagParser is the StructTagParser used by the StructCodec by default.
+// It will handle the bson struct tag. See the documentation for StructTags to see
+// what each of the returned fields means.
+//
+// If there is no name in the struct tag fields, the struct field name is lowercased.
+// The tag formats accepted are:
+//
+// "[<key>][,<flag1>[,<flag2>]]"
+//
+// `(...) bson:"[<key>][,<flag1>[,<flag2>]]" (...)`
+//
+// An example:
+//
+// type T struct {
+// A bool
+// B int "myb"
+// C string "myc,omitempty"
+// D string `bson:",omitempty" json:"jsonkey"`
+// E int64 ",minsize"
+// F int64 "myf,omitempty,minsize"
+// }
+//
+// A struct tag either consisting entirely of '-' or with a bson key with a
+// value consisting entirely of '-' will return a StructTags with Skip true and
+// the remaining fields will be their default values.
+var DefaultStructTagParser StructTagParserFunc = func(sf reflect.StructField) (StructTags, error) {
+ key := strings.ToLower(sf.Name)
+ tag, ok := sf.Tag.Lookup("bson")
+ if !ok && !strings.Contains(string(sf.Tag), ":") && len(sf.Tag) > 0 {
+ tag = string(sf.Tag)
+ }
+ return parseTags(key, tag)
+}
+
+func parseTags(key string, tag string) (StructTags, error) {
+ var st StructTags
+ if tag == "-" {
+ st.Skip = true
+ return st, nil
+ }
+
+ for idx, str := range strings.Split(tag, ",") {
+ if idx == 0 && str != "" {
+ key = str
+ }
+ switch str {
+ case "omitempty":
+ st.OmitEmpty = true
+ case "minsize":
+ st.MinSize = true
+ case "truncate":
+ st.Truncate = true
+ case "inline":
+ st.Inline = true
+ }
+ }
+
+ st.Name = key
+
+ return st, nil
+}
+
+// JSONFallbackStructTagParser has the same behavior as DefaultStructTagParser
+// but will also fallback to parsing the json tag instead on a field where the
+// bson tag isn't available.
+var JSONFallbackStructTagParser StructTagParserFunc = func(sf reflect.StructField) (StructTags, error) {
+ key := strings.ToLower(sf.Name)
+ tag, ok := sf.Tag.Lookup("bson")
+ if !ok {
+ tag, ok = sf.Tag.Lookup("json")
+ }
+ if !ok && !strings.Contains(string(sf.Tag), ":") && len(sf.Tag) > 0 {
+ tag = string(sf.Tag)
+ }
+
+ return parseTags(key, tag)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go
new file mode 100644
index 000000000..ec7e30f72
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/time_codec.go
@@ -0,0 +1,127 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "fmt"
+ "reflect"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+const (
+ timeFormatString = "2006-01-02T15:04:05.999Z07:00"
+)
+
+// TimeCodec is the Codec used for time.Time values.
+type TimeCodec struct {
+ UseLocalTimeZone bool
+}
+
+var (
+ defaultTimeCodec = NewTimeCodec()
+
+ _ ValueCodec = defaultTimeCodec
+ _ typeDecoder = defaultTimeCodec
+)
+
+// NewTimeCodec returns a TimeCodec with options opts.
+func NewTimeCodec(opts ...*bsonoptions.TimeCodecOptions) *TimeCodec {
+ timeOpt := bsonoptions.MergeTimeCodecOptions(opts...)
+
+ codec := TimeCodec{}
+ if timeOpt.UseLocalTimeZone != nil {
+ codec.UseLocalTimeZone = *timeOpt.UseLocalTimeZone
+ }
+ return &codec
+}
+
+func (tc *TimeCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ if t != tTime {
+ return emptyValue, ValueDecoderError{
+ Name: "TimeDecodeValue",
+ Types: []reflect.Type{tTime},
+ Received: reflect.Zero(t),
+ }
+ }
+
+ var timeVal time.Time
+ switch vrType := vr.Type(); vrType {
+ case bsontype.DateTime:
+ dt, err := vr.ReadDateTime()
+ if err != nil {
+ return emptyValue, err
+ }
+ timeVal = time.Unix(dt/1000, dt%1000*1000000)
+ case bsontype.String:
+ // assume strings are in the isoTimeFormat
+ timeStr, err := vr.ReadString()
+ if err != nil {
+ return emptyValue, err
+ }
+ timeVal, err = time.Parse(timeFormatString, timeStr)
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Int64:
+ i64, err := vr.ReadInt64()
+ if err != nil {
+ return emptyValue, err
+ }
+ timeVal = time.Unix(i64/1000, i64%1000*1000000)
+ case bsontype.Timestamp:
+ t, _, err := vr.ReadTimestamp()
+ if err != nil {
+ return emptyValue, err
+ }
+ timeVal = time.Unix(int64(t), 0)
+ case bsontype.Null:
+ if err := vr.ReadNull(); err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Undefined:
+ if err := vr.ReadUndefined(); err != nil {
+ return emptyValue, err
+ }
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into a time.Time", vrType)
+ }
+
+ if !tc.UseLocalTimeZone {
+ timeVal = timeVal.UTC()
+ }
+ return reflect.ValueOf(timeVal), nil
+}
+
+// DecodeValue is the ValueDecoderFunc for time.Time.
+func (tc *TimeCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tTime {
+ return ValueDecoderError{Name: "TimeDecodeValue", Types: []reflect.Type{tTime}, Received: val}
+ }
+
+ elem, err := tc.decodeType(dc, vr, tTime)
+ if err != nil {
+ return err
+ }
+
+ val.Set(elem)
+ return nil
+}
+
+// EncodeValue is the ValueEncoderFunc for time.TIme.
+func (tc *TimeCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tTime {
+ return ValueEncoderError{Name: "TimeEncodeValue", Types: []reflect.Type{tTime}, Received: val}
+ }
+ tt := val.Interface().(time.Time)
+ dt := primitive.NewDateTimeFromTime(tt)
+ return vw.WriteDateTime(int64(dt))
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go
new file mode 100644
index 000000000..07f4b70e6
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/types.go
@@ -0,0 +1,57 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "encoding/json"
+ "net/url"
+ "reflect"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/primitive"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+var tBool = reflect.TypeOf(false)
+var tFloat64 = reflect.TypeOf(float64(0))
+var tInt32 = reflect.TypeOf(int32(0))
+var tInt64 = reflect.TypeOf(int64(0))
+var tString = reflect.TypeOf("")
+var tTime = reflect.TypeOf(time.Time{})
+
+var tEmpty = reflect.TypeOf((*interface{})(nil)).Elem()
+var tByteSlice = reflect.TypeOf([]byte(nil))
+var tByte = reflect.TypeOf(byte(0x00))
+var tURL = reflect.TypeOf(url.URL{})
+var tJSONNumber = reflect.TypeOf(json.Number(""))
+
+var tValueMarshaler = reflect.TypeOf((*ValueMarshaler)(nil)).Elem()
+var tValueUnmarshaler = reflect.TypeOf((*ValueUnmarshaler)(nil)).Elem()
+var tMarshaler = reflect.TypeOf((*Marshaler)(nil)).Elem()
+var tUnmarshaler = reflect.TypeOf((*Unmarshaler)(nil)).Elem()
+var tProxy = reflect.TypeOf((*Proxy)(nil)).Elem()
+
+var tBinary = reflect.TypeOf(primitive.Binary{})
+var tUndefined = reflect.TypeOf(primitive.Undefined{})
+var tOID = reflect.TypeOf(primitive.ObjectID{})
+var tDateTime = reflect.TypeOf(primitive.DateTime(0))
+var tNull = reflect.TypeOf(primitive.Null{})
+var tRegex = reflect.TypeOf(primitive.Regex{})
+var tCodeWithScope = reflect.TypeOf(primitive.CodeWithScope{})
+var tDBPointer = reflect.TypeOf(primitive.DBPointer{})
+var tJavaScript = reflect.TypeOf(primitive.JavaScript(""))
+var tSymbol = reflect.TypeOf(primitive.Symbol(""))
+var tTimestamp = reflect.TypeOf(primitive.Timestamp{})
+var tDecimal = reflect.TypeOf(primitive.Decimal128{})
+var tMinKey = reflect.TypeOf(primitive.MinKey{})
+var tMaxKey = reflect.TypeOf(primitive.MaxKey{})
+var tD = reflect.TypeOf(primitive.D{})
+var tA = reflect.TypeOf(primitive.A{})
+var tE = reflect.TypeOf(primitive.E{})
+
+var tCoreDocument = reflect.TypeOf(bsoncore.Document{})
+var tCoreArray = reflect.TypeOf(bsoncore.Array{})
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go
new file mode 100644
index 000000000..0b21ce999
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsoncodec/uint_codec.go
@@ -0,0 +1,173 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncodec
+
+import (
+ "fmt"
+ "math"
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsonoptions"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// UIntCodec is the Codec used for uint values.
+type UIntCodec struct {
+ EncodeToMinSize bool
+}
+
+var (
+ defaultUIntCodec = NewUIntCodec()
+
+ _ ValueCodec = defaultUIntCodec
+ _ typeDecoder = defaultUIntCodec
+)
+
+// NewUIntCodec returns a UIntCodec with options opts.
+func NewUIntCodec(opts ...*bsonoptions.UIntCodecOptions) *UIntCodec {
+ uintOpt := bsonoptions.MergeUIntCodecOptions(opts...)
+
+ codec := UIntCodec{}
+ if uintOpt.EncodeToMinSize != nil {
+ codec.EncodeToMinSize = *uintOpt.EncodeToMinSize
+ }
+ return &codec
+}
+
+// EncodeValue is the ValueEncoder for uint types.
+func (uic *UIntCodec) EncodeValue(ec EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ switch val.Kind() {
+ case reflect.Uint8, reflect.Uint16:
+ return vw.WriteInt32(int32(val.Uint()))
+ case reflect.Uint, reflect.Uint32, reflect.Uint64:
+ u64 := val.Uint()
+
+ // If ec.MinSize or if encodeToMinSize is true for a non-uint64 value we should write val as an int32
+ useMinSize := ec.MinSize || (uic.EncodeToMinSize && val.Kind() != reflect.Uint64)
+
+ if u64 <= math.MaxInt32 && useMinSize {
+ return vw.WriteInt32(int32(u64))
+ }
+ if u64 > math.MaxInt64 {
+ return fmt.Errorf("%d overflows int64", u64)
+ }
+ return vw.WriteInt64(int64(u64))
+ }
+
+ return ValueEncoderError{
+ Name: "UintEncodeValue",
+ Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
+ Received: val,
+ }
+}
+
+func (uic *UIntCodec) decodeType(dc DecodeContext, vr bsonrw.ValueReader, t reflect.Type) (reflect.Value, error) {
+ var i64 int64
+ var err error
+ switch vrType := vr.Type(); vrType {
+ case bsontype.Int32:
+ i32, err := vr.ReadInt32()
+ if err != nil {
+ return emptyValue, err
+ }
+ i64 = int64(i32)
+ case bsontype.Int64:
+ i64, err = vr.ReadInt64()
+ if err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Double:
+ f64, err := vr.ReadDouble()
+ if err != nil {
+ return emptyValue, err
+ }
+ if !dc.Truncate && math.Floor(f64) != f64 {
+ return emptyValue, errCannotTruncate
+ }
+ if f64 > float64(math.MaxInt64) {
+ return emptyValue, fmt.Errorf("%g overflows int64", f64)
+ }
+ i64 = int64(f64)
+ case bsontype.Boolean:
+ b, err := vr.ReadBoolean()
+ if err != nil {
+ return emptyValue, err
+ }
+ if b {
+ i64 = 1
+ }
+ case bsontype.Null:
+ if err = vr.ReadNull(); err != nil {
+ return emptyValue, err
+ }
+ case bsontype.Undefined:
+ if err = vr.ReadUndefined(); err != nil {
+ return emptyValue, err
+ }
+ default:
+ return emptyValue, fmt.Errorf("cannot decode %v into an integer type", vrType)
+ }
+
+ switch t.Kind() {
+ case reflect.Uint8:
+ if i64 < 0 || i64 > math.MaxUint8 {
+ return emptyValue, fmt.Errorf("%d overflows uint8", i64)
+ }
+
+ return reflect.ValueOf(uint8(i64)), nil
+ case reflect.Uint16:
+ if i64 < 0 || i64 > math.MaxUint16 {
+ return emptyValue, fmt.Errorf("%d overflows uint16", i64)
+ }
+
+ return reflect.ValueOf(uint16(i64)), nil
+ case reflect.Uint32:
+ if i64 < 0 || i64 > math.MaxUint32 {
+ return emptyValue, fmt.Errorf("%d overflows uint32", i64)
+ }
+
+ return reflect.ValueOf(uint32(i64)), nil
+ case reflect.Uint64:
+ if i64 < 0 {
+ return emptyValue, fmt.Errorf("%d overflows uint64", i64)
+ }
+
+ return reflect.ValueOf(uint64(i64)), nil
+ case reflect.Uint:
+ if i64 < 0 || int64(uint(i64)) != i64 { // Can we fit this inside of an uint
+ return emptyValue, fmt.Errorf("%d overflows uint", i64)
+ }
+
+ return reflect.ValueOf(uint(i64)), nil
+ default:
+ return emptyValue, ValueDecoderError{
+ Name: "UintDecodeValue",
+ Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
+ Received: reflect.Zero(t),
+ }
+ }
+}
+
+// DecodeValue is the ValueDecoder for uint types.
+func (uic *UIntCodec) DecodeValue(dc DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() {
+ return ValueDecoderError{
+ Name: "UintDecodeValue",
+ Kinds: []reflect.Kind{reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint},
+ Received: val,
+ }
+ }
+
+ elem, err := uic.decodeType(dc, vr, val.Type())
+ if err != nil {
+ return err
+ }
+
+ val.SetUint(elem.Uint())
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go
new file mode 100644
index 000000000..b1256a4dc
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/byte_slice_codec_options.go
@@ -0,0 +1,38 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+// ByteSliceCodecOptions represents all possible options for byte slice encoding and decoding.
+type ByteSliceCodecOptions struct {
+ EncodeNilAsEmpty *bool // Specifies if a nil byte slice should encode as an empty binary instead of null. Defaults to false.
+}
+
+// ByteSliceCodec creates a new *ByteSliceCodecOptions
+func ByteSliceCodec() *ByteSliceCodecOptions {
+ return &ByteSliceCodecOptions{}
+}
+
+// SetEncodeNilAsEmpty specifies if a nil byte slice should encode as an empty binary instead of null. Defaults to false.
+func (bs *ByteSliceCodecOptions) SetEncodeNilAsEmpty(b bool) *ByteSliceCodecOptions {
+ bs.EncodeNilAsEmpty = &b
+ return bs
+}
+
+// MergeByteSliceCodecOptions combines the given *ByteSliceCodecOptions into a single *ByteSliceCodecOptions in a last one wins fashion.
+func MergeByteSliceCodecOptions(opts ...*ByteSliceCodecOptions) *ByteSliceCodecOptions {
+ bs := ByteSliceCodec()
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.EncodeNilAsEmpty != nil {
+ bs.EncodeNilAsEmpty = opt.EncodeNilAsEmpty
+ }
+ }
+
+ return bs
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go
new file mode 100644
index 000000000..c40973c8d
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/doc.go
@@ -0,0 +1,8 @@
+// Copyright (C) MongoDB, Inc. 2022-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package bsonoptions defines the optional configurations for the BSON codecs.
+package bsonoptions
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go
new file mode 100644
index 000000000..6caaa000e
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/empty_interface_codec_options.go
@@ -0,0 +1,38 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+// EmptyInterfaceCodecOptions represents all possible options for interface{} encoding and decoding.
+type EmptyInterfaceCodecOptions struct {
+ DecodeBinaryAsSlice *bool // Specifies if Old and Generic type binarys should default to []slice instead of primitive.Binary. Defaults to false.
+}
+
+// EmptyInterfaceCodec creates a new *EmptyInterfaceCodecOptions
+func EmptyInterfaceCodec() *EmptyInterfaceCodecOptions {
+ return &EmptyInterfaceCodecOptions{}
+}
+
+// SetDecodeBinaryAsSlice specifies if Old and Generic type binarys should default to []slice instead of primitive.Binary. Defaults to false.
+func (e *EmptyInterfaceCodecOptions) SetDecodeBinaryAsSlice(b bool) *EmptyInterfaceCodecOptions {
+ e.DecodeBinaryAsSlice = &b
+ return e
+}
+
+// MergeEmptyInterfaceCodecOptions combines the given *EmptyInterfaceCodecOptions into a single *EmptyInterfaceCodecOptions in a last one wins fashion.
+func MergeEmptyInterfaceCodecOptions(opts ...*EmptyInterfaceCodecOptions) *EmptyInterfaceCodecOptions {
+ e := EmptyInterfaceCodec()
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.DecodeBinaryAsSlice != nil {
+ e.DecodeBinaryAsSlice = opt.DecodeBinaryAsSlice
+ }
+ }
+
+ return e
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go
new file mode 100644
index 000000000..7a6a880b8
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/map_codec_options.go
@@ -0,0 +1,67 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+// MapCodecOptions represents all possible options for map encoding and decoding.
+type MapCodecOptions struct {
+ DecodeZerosMap *bool // Specifies if the map should be zeroed before decoding into it. Defaults to false.
+ EncodeNilAsEmpty *bool // Specifies if a nil map should encode as an empty document instead of null. Defaults to false.
+ // Specifies how keys should be handled. If false, the behavior matches encoding/json, where the encoding key type must
+ // either be a string, an integer type, or implement bsoncodec.KeyMarshaler and the decoding key type must either be a
+ // string, an integer type, or implement bsoncodec.KeyUnmarshaler. If true, keys are encoded with fmt.Sprint() and the
+ // encoding key type must be a string, an integer type, or a float. If true, the use of Stringer will override
+ // TextMarshaler/TextUnmarshaler. Defaults to false.
+ EncodeKeysWithStringer *bool
+}
+
+// MapCodec creates a new *MapCodecOptions
+func MapCodec() *MapCodecOptions {
+ return &MapCodecOptions{}
+}
+
+// SetDecodeZerosMap specifies if the map should be zeroed before decoding into it. Defaults to false.
+func (t *MapCodecOptions) SetDecodeZerosMap(b bool) *MapCodecOptions {
+ t.DecodeZerosMap = &b
+ return t
+}
+
+// SetEncodeNilAsEmpty specifies if a nil map should encode as an empty document instead of null. Defaults to false.
+func (t *MapCodecOptions) SetEncodeNilAsEmpty(b bool) *MapCodecOptions {
+ t.EncodeNilAsEmpty = &b
+ return t
+}
+
+// SetEncodeKeysWithStringer specifies how keys should be handled. If false, the behavior matches encoding/json, where the
+// encoding key type must either be a string, an integer type, or implement bsoncodec.KeyMarshaler and the decoding key
+// type must either be a string, an integer type, or implement bsoncodec.KeyUnmarshaler. If true, keys are encoded with
+// fmt.Sprint() and the encoding key type must be a string, an integer type, or a float. If true, the use of Stringer
+// will override TextMarshaler/TextUnmarshaler. Defaults to false.
+func (t *MapCodecOptions) SetEncodeKeysWithStringer(b bool) *MapCodecOptions {
+ t.EncodeKeysWithStringer = &b
+ return t
+}
+
+// MergeMapCodecOptions combines the given *MapCodecOptions into a single *MapCodecOptions in a last one wins fashion.
+func MergeMapCodecOptions(opts ...*MapCodecOptions) *MapCodecOptions {
+ s := MapCodec()
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.DecodeZerosMap != nil {
+ s.DecodeZerosMap = opt.DecodeZerosMap
+ }
+ if opt.EncodeNilAsEmpty != nil {
+ s.EncodeNilAsEmpty = opt.EncodeNilAsEmpty
+ }
+ if opt.EncodeKeysWithStringer != nil {
+ s.EncodeKeysWithStringer = opt.EncodeKeysWithStringer
+ }
+ }
+
+ return s
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go
new file mode 100644
index 000000000..ef965e4b4
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/slice_codec_options.go
@@ -0,0 +1,38 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+// SliceCodecOptions represents all possible options for slice encoding and decoding.
+type SliceCodecOptions struct {
+ EncodeNilAsEmpty *bool // Specifies if a nil slice should encode as an empty array instead of null. Defaults to false.
+}
+
+// SliceCodec creates a new *SliceCodecOptions
+func SliceCodec() *SliceCodecOptions {
+ return &SliceCodecOptions{}
+}
+
+// SetEncodeNilAsEmpty specifies if a nil slice should encode as an empty array instead of null. Defaults to false.
+func (s *SliceCodecOptions) SetEncodeNilAsEmpty(b bool) *SliceCodecOptions {
+ s.EncodeNilAsEmpty = &b
+ return s
+}
+
+// MergeSliceCodecOptions combines the given *SliceCodecOptions into a single *SliceCodecOptions in a last one wins fashion.
+func MergeSliceCodecOptions(opts ...*SliceCodecOptions) *SliceCodecOptions {
+ s := SliceCodec()
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.EncodeNilAsEmpty != nil {
+ s.EncodeNilAsEmpty = opt.EncodeNilAsEmpty
+ }
+ }
+
+ return s
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go
new file mode 100644
index 000000000..65964f420
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/string_codec_options.go
@@ -0,0 +1,41 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+var defaultDecodeOIDAsHex = true
+
+// StringCodecOptions represents all possible options for string encoding and decoding.
+type StringCodecOptions struct {
+ DecodeObjectIDAsHex *bool // Specifies if we should decode ObjectID as the hex value. Defaults to true.
+}
+
+// StringCodec creates a new *StringCodecOptions
+func StringCodec() *StringCodecOptions {
+ return &StringCodecOptions{}
+}
+
+// SetDecodeObjectIDAsHex specifies if object IDs should be decoded as their hex representation. If false, a string made
+// from the raw object ID bytes will be used. Defaults to true.
+func (t *StringCodecOptions) SetDecodeObjectIDAsHex(b bool) *StringCodecOptions {
+ t.DecodeObjectIDAsHex = &b
+ return t
+}
+
+// MergeStringCodecOptions combines the given *StringCodecOptions into a single *StringCodecOptions in a last one wins fashion.
+func MergeStringCodecOptions(opts ...*StringCodecOptions) *StringCodecOptions {
+ s := &StringCodecOptions{&defaultDecodeOIDAsHex}
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.DecodeObjectIDAsHex != nil {
+ s.DecodeObjectIDAsHex = opt.DecodeObjectIDAsHex
+ }
+ }
+
+ return s
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go
new file mode 100644
index 000000000..78d1dd866
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/struct_codec_options.go
@@ -0,0 +1,87 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+var defaultOverwriteDuplicatedInlinedFields = true
+
+// StructCodecOptions represents all possible options for struct encoding and decoding.
+type StructCodecOptions struct {
+ DecodeZeroStruct *bool // Specifies if structs should be zeroed before decoding into them. Defaults to false.
+ DecodeDeepZeroInline *bool // Specifies if structs should be recursively zeroed when a inline value is decoded. Defaults to false.
+ EncodeOmitDefaultStruct *bool // Specifies if default structs should be considered empty by omitempty. Defaults to false.
+ AllowUnexportedFields *bool // Specifies if unexported fields should be marshaled/unmarshaled. Defaults to false.
+ OverwriteDuplicatedInlinedFields *bool // Specifies if fields in inlined structs can be overwritten by higher level struct fields with the same key. Defaults to true.
+}
+
+// StructCodec creates a new *StructCodecOptions
+func StructCodec() *StructCodecOptions {
+ return &StructCodecOptions{}
+}
+
+// SetDecodeZeroStruct specifies if structs should be zeroed before decoding into them. Defaults to false.
+func (t *StructCodecOptions) SetDecodeZeroStruct(b bool) *StructCodecOptions {
+ t.DecodeZeroStruct = &b
+ return t
+}
+
+// SetDecodeDeepZeroInline specifies if structs should be zeroed before decoding into them. Defaults to false.
+func (t *StructCodecOptions) SetDecodeDeepZeroInline(b bool) *StructCodecOptions {
+ t.DecodeDeepZeroInline = &b
+ return t
+}
+
+// SetEncodeOmitDefaultStruct specifies if default structs should be considered empty by omitempty. A default struct has all
+// its values set to their default value. Defaults to false.
+func (t *StructCodecOptions) SetEncodeOmitDefaultStruct(b bool) *StructCodecOptions {
+ t.EncodeOmitDefaultStruct = &b
+ return t
+}
+
+// SetOverwriteDuplicatedInlinedFields specifies if inlined struct fields can be overwritten by higher level struct fields with the
+// same bson key. When true and decoding, values will be written to the outermost struct with a matching key, and when
+// encoding, keys will have the value of the top-most matching field. When false, decoding and encoding will error if
+// there are duplicate keys after the struct is inlined. Defaults to true.
+func (t *StructCodecOptions) SetOverwriteDuplicatedInlinedFields(b bool) *StructCodecOptions {
+ t.OverwriteDuplicatedInlinedFields = &b
+ return t
+}
+
+// SetAllowUnexportedFields specifies if unexported fields should be marshaled/unmarshaled. Defaults to false.
+func (t *StructCodecOptions) SetAllowUnexportedFields(b bool) *StructCodecOptions {
+ t.AllowUnexportedFields = &b
+ return t
+}
+
+// MergeStructCodecOptions combines the given *StructCodecOptions into a single *StructCodecOptions in a last one wins fashion.
+func MergeStructCodecOptions(opts ...*StructCodecOptions) *StructCodecOptions {
+ s := &StructCodecOptions{
+ OverwriteDuplicatedInlinedFields: &defaultOverwriteDuplicatedInlinedFields,
+ }
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+
+ if opt.DecodeZeroStruct != nil {
+ s.DecodeZeroStruct = opt.DecodeZeroStruct
+ }
+ if opt.DecodeDeepZeroInline != nil {
+ s.DecodeDeepZeroInline = opt.DecodeDeepZeroInline
+ }
+ if opt.EncodeOmitDefaultStruct != nil {
+ s.EncodeOmitDefaultStruct = opt.EncodeOmitDefaultStruct
+ }
+ if opt.OverwriteDuplicatedInlinedFields != nil {
+ s.OverwriteDuplicatedInlinedFields = opt.OverwriteDuplicatedInlinedFields
+ }
+ if opt.AllowUnexportedFields != nil {
+ s.AllowUnexportedFields = opt.AllowUnexportedFields
+ }
+ }
+
+ return s
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go
new file mode 100644
index 000000000..13496d121
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/time_codec_options.go
@@ -0,0 +1,38 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+// TimeCodecOptions represents all possible options for time.Time encoding and decoding.
+type TimeCodecOptions struct {
+ UseLocalTimeZone *bool // Specifies if we should decode into the local time zone. Defaults to false.
+}
+
+// TimeCodec creates a new *TimeCodecOptions
+func TimeCodec() *TimeCodecOptions {
+ return &TimeCodecOptions{}
+}
+
+// SetUseLocalTimeZone specifies if we should decode into the local time zone. Defaults to false.
+func (t *TimeCodecOptions) SetUseLocalTimeZone(b bool) *TimeCodecOptions {
+ t.UseLocalTimeZone = &b
+ return t
+}
+
+// MergeTimeCodecOptions combines the given *TimeCodecOptions into a single *TimeCodecOptions in a last one wins fashion.
+func MergeTimeCodecOptions(opts ...*TimeCodecOptions) *TimeCodecOptions {
+ t := TimeCodec()
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.UseLocalTimeZone != nil {
+ t.UseLocalTimeZone = opt.UseLocalTimeZone
+ }
+ }
+
+ return t
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go
new file mode 100644
index 000000000..e08b7f192
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonoptions/uint_codec_options.go
@@ -0,0 +1,38 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonoptions
+
+// UIntCodecOptions represents all possible options for uint encoding and decoding.
+type UIntCodecOptions struct {
+ EncodeToMinSize *bool // Specifies if all uints except uint64 should be decoded to minimum size bsontype. Defaults to false.
+}
+
+// UIntCodec creates a new *UIntCodecOptions
+func UIntCodec() *UIntCodecOptions {
+ return &UIntCodecOptions{}
+}
+
+// SetEncodeToMinSize specifies if all uints except uint64 should be decoded to minimum size bsontype. Defaults to false.
+func (u *UIntCodecOptions) SetEncodeToMinSize(b bool) *UIntCodecOptions {
+ u.EncodeToMinSize = &b
+ return u
+}
+
+// MergeUIntCodecOptions combines the given *UIntCodecOptions into a single *UIntCodecOptions in a last one wins fashion.
+func MergeUIntCodecOptions(opts ...*UIntCodecOptions) *UIntCodecOptions {
+ u := UIntCodec()
+ for _, opt := range opts {
+ if opt == nil {
+ continue
+ }
+ if opt.EncodeToMinSize != nil {
+ u.EncodeToMinSize = opt.EncodeToMinSize
+ }
+ }
+
+ return u
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go
new file mode 100644
index 000000000..5cdf6460b
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/copier.go
@@ -0,0 +1,445 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "fmt"
+ "io"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+// Copier is a type that allows copying between ValueReaders, ValueWriters, and
+// []byte values.
+type Copier struct{}
+
+// NewCopier creates a new copier with the given registry. If a nil registry is provided
+// a default registry is used.
+func NewCopier() Copier {
+ return Copier{}
+}
+
+// CopyDocument handles copying a document from src to dst.
+func CopyDocument(dst ValueWriter, src ValueReader) error {
+ return Copier{}.CopyDocument(dst, src)
+}
+
+// CopyDocument handles copying one document from the src to the dst.
+func (c Copier) CopyDocument(dst ValueWriter, src ValueReader) error {
+ dr, err := src.ReadDocument()
+ if err != nil {
+ return err
+ }
+
+ dw, err := dst.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ return c.copyDocumentCore(dw, dr)
+}
+
+// CopyArrayFromBytes copies the values from a BSON array represented as a
+// []byte to a ValueWriter.
+func (c Copier) CopyArrayFromBytes(dst ValueWriter, src []byte) error {
+ aw, err := dst.WriteArray()
+ if err != nil {
+ return err
+ }
+
+ err = c.CopyBytesToArrayWriter(aw, src)
+ if err != nil {
+ return err
+ }
+
+ return aw.WriteArrayEnd()
+}
+
+// CopyDocumentFromBytes copies the values from a BSON document represented as a
+// []byte to a ValueWriter.
+func (c Copier) CopyDocumentFromBytes(dst ValueWriter, src []byte) error {
+ dw, err := dst.WriteDocument()
+ if err != nil {
+ return err
+ }
+
+ err = c.CopyBytesToDocumentWriter(dw, src)
+ if err != nil {
+ return err
+ }
+
+ return dw.WriteDocumentEnd()
+}
+
+type writeElementFn func(key string) (ValueWriter, error)
+
+// CopyBytesToArrayWriter copies the values from a BSON Array represented as a []byte to an
+// ArrayWriter.
+func (c Copier) CopyBytesToArrayWriter(dst ArrayWriter, src []byte) error {
+ wef := func(_ string) (ValueWriter, error) {
+ return dst.WriteArrayElement()
+ }
+
+ return c.copyBytesToValueWriter(src, wef)
+}
+
+// CopyBytesToDocumentWriter copies the values from a BSON document represented as a []byte to a
+// DocumentWriter.
+func (c Copier) CopyBytesToDocumentWriter(dst DocumentWriter, src []byte) error {
+ wef := func(key string) (ValueWriter, error) {
+ return dst.WriteDocumentElement(key)
+ }
+
+ return c.copyBytesToValueWriter(src, wef)
+}
+
+func (c Copier) copyBytesToValueWriter(src []byte, wef writeElementFn) error {
+ // TODO(skriptble): Create errors types here. Anything thats a tag should be a property.
+ length, rem, ok := bsoncore.ReadLength(src)
+ if !ok {
+ return fmt.Errorf("couldn't read length from src, not enough bytes. length=%d", len(src))
+ }
+ if len(src) < int(length) {
+ return fmt.Errorf("length read exceeds number of bytes available. length=%d bytes=%d", len(src), length)
+ }
+ rem = rem[:length-4]
+
+ var t bsontype.Type
+ var key string
+ var val bsoncore.Value
+ for {
+ t, rem, ok = bsoncore.ReadType(rem)
+ if !ok {
+ return io.EOF
+ }
+ if t == bsontype.Type(0) {
+ if len(rem) != 0 {
+ return fmt.Errorf("document end byte found before end of document. remaining bytes=%v", rem)
+ }
+ break
+ }
+
+ key, rem, ok = bsoncore.ReadKey(rem)
+ if !ok {
+ return fmt.Errorf("invalid key found. remaining bytes=%v", rem)
+ }
+
+ // write as either array element or document element using writeElementFn
+ vw, err := wef(key)
+ if err != nil {
+ return err
+ }
+
+ val, rem, ok = bsoncore.ReadValue(rem, t)
+ if !ok {
+ return fmt.Errorf("not enough bytes available to read type. bytes=%d type=%s", len(rem), t)
+ }
+ err = c.CopyValueFromBytes(vw, t, val.Data)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// CopyDocumentToBytes copies an entire document from the ValueReader and
+// returns it as bytes.
+func (c Copier) CopyDocumentToBytes(src ValueReader) ([]byte, error) {
+ return c.AppendDocumentBytes(nil, src)
+}
+
+// AppendDocumentBytes functions the same as CopyDocumentToBytes, but will
+// append the result to dst.
+func (c Copier) AppendDocumentBytes(dst []byte, src ValueReader) ([]byte, error) {
+ if br, ok := src.(BytesReader); ok {
+ _, dst, err := br.ReadValueBytes(dst)
+ return dst, err
+ }
+
+ vw := vwPool.Get().(*valueWriter)
+ defer vwPool.Put(vw)
+
+ vw.reset(dst)
+
+ err := c.CopyDocument(vw, src)
+ dst = vw.buf
+ return dst, err
+}
+
+// AppendArrayBytes copies an array from the ValueReader to dst.
+func (c Copier) AppendArrayBytes(dst []byte, src ValueReader) ([]byte, error) {
+ if br, ok := src.(BytesReader); ok {
+ _, dst, err := br.ReadValueBytes(dst)
+ return dst, err
+ }
+
+ vw := vwPool.Get().(*valueWriter)
+ defer vwPool.Put(vw)
+
+ vw.reset(dst)
+
+ err := c.copyArray(vw, src)
+ dst = vw.buf
+ return dst, err
+}
+
+// CopyValueFromBytes will write the value represtend by t and src to dst.
+func (c Copier) CopyValueFromBytes(dst ValueWriter, t bsontype.Type, src []byte) error {
+ if wvb, ok := dst.(BytesWriter); ok {
+ return wvb.WriteValueBytes(t, src)
+ }
+
+ vr := vrPool.Get().(*valueReader)
+ defer vrPool.Put(vr)
+
+ vr.reset(src)
+ vr.pushElement(t)
+
+ return c.CopyValue(dst, vr)
+}
+
+// CopyValueToBytes copies a value from src and returns it as a bsontype.Type and a
+// []byte.
+func (c Copier) CopyValueToBytes(src ValueReader) (bsontype.Type, []byte, error) {
+ return c.AppendValueBytes(nil, src)
+}
+
+// AppendValueBytes functions the same as CopyValueToBytes, but will append the
+// result to dst.
+func (c Copier) AppendValueBytes(dst []byte, src ValueReader) (bsontype.Type, []byte, error) {
+ if br, ok := src.(BytesReader); ok {
+ return br.ReadValueBytes(dst)
+ }
+
+ vw := vwPool.Get().(*valueWriter)
+ defer vwPool.Put(vw)
+
+ start := len(dst)
+
+ vw.reset(dst)
+ vw.push(mElement)
+
+ err := c.CopyValue(vw, src)
+ if err != nil {
+ return 0, dst, err
+ }
+
+ return bsontype.Type(vw.buf[start]), vw.buf[start+2:], nil
+}
+
+// CopyValue will copy a single value from src to dst.
+func (c Copier) CopyValue(dst ValueWriter, src ValueReader) error {
+ var err error
+ switch src.Type() {
+ case bsontype.Double:
+ var f64 float64
+ f64, err = src.ReadDouble()
+ if err != nil {
+ break
+ }
+ err = dst.WriteDouble(f64)
+ case bsontype.String:
+ var str string
+ str, err = src.ReadString()
+ if err != nil {
+ return err
+ }
+ err = dst.WriteString(str)
+ case bsontype.EmbeddedDocument:
+ err = c.CopyDocument(dst, src)
+ case bsontype.Array:
+ err = c.copyArray(dst, src)
+ case bsontype.Binary:
+ var data []byte
+ var subtype byte
+ data, subtype, err = src.ReadBinary()
+ if err != nil {
+ break
+ }
+ err = dst.WriteBinaryWithSubtype(data, subtype)
+ case bsontype.Undefined:
+ err = src.ReadUndefined()
+ if err != nil {
+ break
+ }
+ err = dst.WriteUndefined()
+ case bsontype.ObjectID:
+ var oid primitive.ObjectID
+ oid, err = src.ReadObjectID()
+ if err != nil {
+ break
+ }
+ err = dst.WriteObjectID(oid)
+ case bsontype.Boolean:
+ var b bool
+ b, err = src.ReadBoolean()
+ if err != nil {
+ break
+ }
+ err = dst.WriteBoolean(b)
+ case bsontype.DateTime:
+ var dt int64
+ dt, err = src.ReadDateTime()
+ if err != nil {
+ break
+ }
+ err = dst.WriteDateTime(dt)
+ case bsontype.Null:
+ err = src.ReadNull()
+ if err != nil {
+ break
+ }
+ err = dst.WriteNull()
+ case bsontype.Regex:
+ var pattern, options string
+ pattern, options, err = src.ReadRegex()
+ if err != nil {
+ break
+ }
+ err = dst.WriteRegex(pattern, options)
+ case bsontype.DBPointer:
+ var ns string
+ var pointer primitive.ObjectID
+ ns, pointer, err = src.ReadDBPointer()
+ if err != nil {
+ break
+ }
+ err = dst.WriteDBPointer(ns, pointer)
+ case bsontype.JavaScript:
+ var js string
+ js, err = src.ReadJavascript()
+ if err != nil {
+ break
+ }
+ err = dst.WriteJavascript(js)
+ case bsontype.Symbol:
+ var symbol string
+ symbol, err = src.ReadSymbol()
+ if err != nil {
+ break
+ }
+ err = dst.WriteSymbol(symbol)
+ case bsontype.CodeWithScope:
+ var code string
+ var srcScope DocumentReader
+ code, srcScope, err = src.ReadCodeWithScope()
+ if err != nil {
+ break
+ }
+
+ var dstScope DocumentWriter
+ dstScope, err = dst.WriteCodeWithScope(code)
+ if err != nil {
+ break
+ }
+ err = c.copyDocumentCore(dstScope, srcScope)
+ case bsontype.Int32:
+ var i32 int32
+ i32, err = src.ReadInt32()
+ if err != nil {
+ break
+ }
+ err = dst.WriteInt32(i32)
+ case bsontype.Timestamp:
+ var t, i uint32
+ t, i, err = src.ReadTimestamp()
+ if err != nil {
+ break
+ }
+ err = dst.WriteTimestamp(t, i)
+ case bsontype.Int64:
+ var i64 int64
+ i64, err = src.ReadInt64()
+ if err != nil {
+ break
+ }
+ err = dst.WriteInt64(i64)
+ case bsontype.Decimal128:
+ var d128 primitive.Decimal128
+ d128, err = src.ReadDecimal128()
+ if err != nil {
+ break
+ }
+ err = dst.WriteDecimal128(d128)
+ case bsontype.MinKey:
+ err = src.ReadMinKey()
+ if err != nil {
+ break
+ }
+ err = dst.WriteMinKey()
+ case bsontype.MaxKey:
+ err = src.ReadMaxKey()
+ if err != nil {
+ break
+ }
+ err = dst.WriteMaxKey()
+ default:
+ err = fmt.Errorf("Cannot copy unknown BSON type %s", src.Type())
+ }
+
+ return err
+}
+
+func (c Copier) copyArray(dst ValueWriter, src ValueReader) error {
+ ar, err := src.ReadArray()
+ if err != nil {
+ return err
+ }
+
+ aw, err := dst.WriteArray()
+ if err != nil {
+ return err
+ }
+
+ for {
+ vr, err := ar.ReadValue()
+ if err == ErrEOA {
+ break
+ }
+ if err != nil {
+ return err
+ }
+
+ vw, err := aw.WriteArrayElement()
+ if err != nil {
+ return err
+ }
+
+ err = c.CopyValue(vw, vr)
+ if err != nil {
+ return err
+ }
+ }
+
+ return aw.WriteArrayEnd()
+}
+
+func (c Copier) copyDocumentCore(dw DocumentWriter, dr DocumentReader) error {
+ for {
+ key, vr, err := dr.ReadElement()
+ if err == ErrEOD {
+ break
+ }
+ if err != nil {
+ return err
+ }
+
+ vw, err := dw.WriteDocumentElement(key)
+ if err != nil {
+ return err
+ }
+
+ err = c.CopyValue(vw, vr)
+ if err != nil {
+ return err
+ }
+ }
+
+ return dw.WriteDocumentEnd()
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go
new file mode 100644
index 000000000..750b0d2af
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/doc.go
@@ -0,0 +1,9 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package bsonrw contains abstractions for reading and writing
+// BSON and BSON like types from sources.
+package bsonrw // import "go.mongodb.org/mongo-driver/bson/bsonrw"
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go
new file mode 100644
index 000000000..54c76bf74
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_parser.go
@@ -0,0 +1,806 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "encoding/base64"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+const maxNestingDepth = 200
+
+// ErrInvalidJSON indicates the JSON input is invalid
+var ErrInvalidJSON = errors.New("invalid JSON input")
+
+type jsonParseState byte
+
+const (
+ jpsStartState jsonParseState = iota
+ jpsSawBeginObject
+ jpsSawEndObject
+ jpsSawBeginArray
+ jpsSawEndArray
+ jpsSawColon
+ jpsSawComma
+ jpsSawKey
+ jpsSawValue
+ jpsDoneState
+ jpsInvalidState
+)
+
+type jsonParseMode byte
+
+const (
+ jpmInvalidMode jsonParseMode = iota
+ jpmObjectMode
+ jpmArrayMode
+)
+
+type extJSONValue struct {
+ t bsontype.Type
+ v interface{}
+}
+
+type extJSONObject struct {
+ keys []string
+ values []*extJSONValue
+}
+
+type extJSONParser struct {
+ js *jsonScanner
+ s jsonParseState
+ m []jsonParseMode
+ k string
+ v *extJSONValue
+
+ err error
+ canonical bool
+ depth int
+ maxDepth int
+
+ emptyObject bool
+ relaxedUUID bool
+}
+
+// newExtJSONParser returns a new extended JSON parser, ready to to begin
+// parsing from the first character of the argued json input. It will not
+// perform any read-ahead and will therefore not report any errors about
+// malformed JSON at this point.
+func newExtJSONParser(r io.Reader, canonical bool) *extJSONParser {
+ return &extJSONParser{
+ js: &jsonScanner{r: r},
+ s: jpsStartState,
+ m: []jsonParseMode{},
+ canonical: canonical,
+ maxDepth: maxNestingDepth,
+ }
+}
+
+// peekType examines the next value and returns its BSON Type
+func (ejp *extJSONParser) peekType() (bsontype.Type, error) {
+ var t bsontype.Type
+ var err error
+ initialState := ejp.s
+
+ ejp.advanceState()
+ switch ejp.s {
+ case jpsSawValue:
+ t = ejp.v.t
+ case jpsSawBeginArray:
+ t = bsontype.Array
+ case jpsInvalidState:
+ err = ejp.err
+ case jpsSawComma:
+ // in array mode, seeing a comma means we need to progress again to actually observe a type
+ if ejp.peekMode() == jpmArrayMode {
+ return ejp.peekType()
+ }
+ case jpsSawEndArray:
+ // this would only be a valid state if we were in array mode, so return end-of-array error
+ err = ErrEOA
+ case jpsSawBeginObject:
+ // peek key to determine type
+ ejp.advanceState()
+ switch ejp.s {
+ case jpsSawEndObject: // empty embedded document
+ t = bsontype.EmbeddedDocument
+ ejp.emptyObject = true
+ case jpsInvalidState:
+ err = ejp.err
+ case jpsSawKey:
+ if initialState == jpsStartState {
+ return bsontype.EmbeddedDocument, nil
+ }
+ t = wrapperKeyBSONType(ejp.k)
+
+ // if $uuid is encountered, parse as binary subtype 4
+ if ejp.k == "$uuid" {
+ ejp.relaxedUUID = true
+ t = bsontype.Binary
+ }
+
+ switch t {
+ case bsontype.JavaScript:
+ // just saw $code, need to check for $scope at same level
+ _, err = ejp.readValue(bsontype.JavaScript)
+ if err != nil {
+ break
+ }
+
+ switch ejp.s {
+ case jpsSawEndObject: // type is TypeJavaScript
+ case jpsSawComma:
+ ejp.advanceState()
+
+ if ejp.s == jpsSawKey && ejp.k == "$scope" {
+ t = bsontype.CodeWithScope
+ } else {
+ err = fmt.Errorf("invalid extended JSON: unexpected key %s in CodeWithScope object", ejp.k)
+ }
+ case jpsInvalidState:
+ err = ejp.err
+ default:
+ err = ErrInvalidJSON
+ }
+ case bsontype.CodeWithScope:
+ err = errors.New("invalid extended JSON: code with $scope must contain $code before $scope")
+ }
+ }
+ }
+
+ return t, err
+}
+
+// readKey parses the next key and its type and returns them
+func (ejp *extJSONParser) readKey() (string, bsontype.Type, error) {
+ if ejp.emptyObject {
+ ejp.emptyObject = false
+ return "", 0, ErrEOD
+ }
+
+ // advance to key (or return with error)
+ switch ejp.s {
+ case jpsStartState:
+ ejp.advanceState()
+ if ejp.s == jpsSawBeginObject {
+ ejp.advanceState()
+ }
+ case jpsSawBeginObject:
+ ejp.advanceState()
+ case jpsSawValue, jpsSawEndObject, jpsSawEndArray:
+ ejp.advanceState()
+ switch ejp.s {
+ case jpsSawBeginObject, jpsSawComma:
+ ejp.advanceState()
+ case jpsSawEndObject:
+ return "", 0, ErrEOD
+ case jpsDoneState:
+ return "", 0, io.EOF
+ case jpsInvalidState:
+ return "", 0, ejp.err
+ default:
+ return "", 0, ErrInvalidJSON
+ }
+ case jpsSawKey: // do nothing (key was peeked before)
+ default:
+ return "", 0, invalidRequestError("key")
+ }
+
+ // read key
+ var key string
+
+ switch ejp.s {
+ case jpsSawKey:
+ key = ejp.k
+ case jpsSawEndObject:
+ return "", 0, ErrEOD
+ case jpsInvalidState:
+ return "", 0, ejp.err
+ default:
+ return "", 0, invalidRequestError("key")
+ }
+
+ // check for colon
+ ejp.advanceState()
+ if err := ensureColon(ejp.s, key); err != nil {
+ return "", 0, err
+ }
+
+ // peek at the value to determine type
+ t, err := ejp.peekType()
+ if err != nil {
+ return "", 0, err
+ }
+
+ return key, t, nil
+}
+
+// readValue returns the value corresponding to the Type returned by peekType
+func (ejp *extJSONParser) readValue(t bsontype.Type) (*extJSONValue, error) {
+ if ejp.s == jpsInvalidState {
+ return nil, ejp.err
+ }
+
+ var v *extJSONValue
+
+ switch t {
+ case bsontype.Null, bsontype.Boolean, bsontype.String:
+ if ejp.s != jpsSawValue {
+ return nil, invalidRequestError(t.String())
+ }
+ v = ejp.v
+ case bsontype.Int32, bsontype.Int64, bsontype.Double:
+ // relaxed version allows these to be literal number values
+ if ejp.s == jpsSawValue {
+ v = ejp.v
+ break
+ }
+ fallthrough
+ case bsontype.Decimal128, bsontype.Symbol, bsontype.ObjectID, bsontype.MinKey, bsontype.MaxKey, bsontype.Undefined:
+ switch ejp.s {
+ case jpsSawKey:
+ // read colon
+ ejp.advanceState()
+ if err := ensureColon(ejp.s, ejp.k); err != nil {
+ return nil, err
+ }
+
+ // read value
+ ejp.advanceState()
+ if ejp.s != jpsSawValue || !ejp.ensureExtValueType(t) {
+ return nil, invalidJSONErrorForType("value", t)
+ }
+
+ v = ejp.v
+
+ // read end object
+ ejp.advanceState()
+ if ejp.s != jpsSawEndObject {
+ return nil, invalidJSONErrorForType("} after value", t)
+ }
+ default:
+ return nil, invalidRequestError(t.String())
+ }
+ case bsontype.Binary, bsontype.Regex, bsontype.Timestamp, bsontype.DBPointer:
+ if ejp.s != jpsSawKey {
+ return nil, invalidRequestError(t.String())
+ }
+ // read colon
+ ejp.advanceState()
+ if err := ensureColon(ejp.s, ejp.k); err != nil {
+ return nil, err
+ }
+
+ ejp.advanceState()
+ if t == bsontype.Binary && ejp.s == jpsSawValue {
+ // convert relaxed $uuid format
+ if ejp.relaxedUUID {
+ defer func() { ejp.relaxedUUID = false }()
+ uuid, err := ejp.v.parseSymbol()
+ if err != nil {
+ return nil, err
+ }
+
+ // RFC 4122 defines the length of a UUID as 36 and the hyphens in a UUID as appearing
+ // in the 8th, 13th, 18th, and 23rd characters.
+ //
+ // See https://tools.ietf.org/html/rfc4122#section-3
+ valid := len(uuid) == 36 &&
+ string(uuid[8]) == "-" &&
+ string(uuid[13]) == "-" &&
+ string(uuid[18]) == "-" &&
+ string(uuid[23]) == "-"
+ if !valid {
+ return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens")
+ }
+
+ // remove hyphens
+ uuidNoHyphens := strings.Replace(uuid, "-", "", -1)
+ if len(uuidNoHyphens) != 32 {
+ return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding length and hyphens")
+ }
+
+ // convert hex to bytes
+ bytes, err := hex.DecodeString(uuidNoHyphens)
+ if err != nil {
+ return nil, fmt.Errorf("$uuid value does not follow RFC 4122 format regarding hex bytes: %v", err)
+ }
+
+ ejp.advanceState()
+ if ejp.s != jpsSawEndObject {
+ return nil, invalidJSONErrorForType("$uuid and value and then }", bsontype.Binary)
+ }
+
+ base64 := &extJSONValue{
+ t: bsontype.String,
+ v: base64.StdEncoding.EncodeToString(bytes),
+ }
+ subType := &extJSONValue{
+ t: bsontype.String,
+ v: "04",
+ }
+
+ v = &extJSONValue{
+ t: bsontype.EmbeddedDocument,
+ v: &extJSONObject{
+ keys: []string{"base64", "subType"},
+ values: []*extJSONValue{base64, subType},
+ },
+ }
+
+ break
+ }
+
+ // convert legacy $binary format
+ base64 := ejp.v
+
+ ejp.advanceState()
+ if ejp.s != jpsSawComma {
+ return nil, invalidJSONErrorForType(",", bsontype.Binary)
+ }
+
+ ejp.advanceState()
+ key, t, err := ejp.readKey()
+ if err != nil {
+ return nil, err
+ }
+ if key != "$type" {
+ return nil, invalidJSONErrorForType("$type", bsontype.Binary)
+ }
+
+ subType, err := ejp.readValue(t)
+ if err != nil {
+ return nil, err
+ }
+
+ ejp.advanceState()
+ if ejp.s != jpsSawEndObject {
+ return nil, invalidJSONErrorForType("2 key-value pairs and then }", bsontype.Binary)
+ }
+
+ v = &extJSONValue{
+ t: bsontype.EmbeddedDocument,
+ v: &extJSONObject{
+ keys: []string{"base64", "subType"},
+ values: []*extJSONValue{base64, subType},
+ },
+ }
+ break
+ }
+
+ // read KV pairs
+ if ejp.s != jpsSawBeginObject {
+ return nil, invalidJSONErrorForType("{", t)
+ }
+
+ keys, vals, err := ejp.readObject(2, true)
+ if err != nil {
+ return nil, err
+ }
+
+ ejp.advanceState()
+ if ejp.s != jpsSawEndObject {
+ return nil, invalidJSONErrorForType("2 key-value pairs and then }", t)
+ }
+
+ v = &extJSONValue{t: bsontype.EmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}}
+
+ case bsontype.DateTime:
+ switch ejp.s {
+ case jpsSawValue:
+ v = ejp.v
+ case jpsSawKey:
+ // read colon
+ ejp.advanceState()
+ if err := ensureColon(ejp.s, ejp.k); err != nil {
+ return nil, err
+ }
+
+ ejp.advanceState()
+ switch ejp.s {
+ case jpsSawBeginObject:
+ keys, vals, err := ejp.readObject(1, true)
+ if err != nil {
+ return nil, err
+ }
+ v = &extJSONValue{t: bsontype.EmbeddedDocument, v: &extJSONObject{keys: keys, values: vals}}
+ case jpsSawValue:
+ if ejp.canonical {
+ return nil, invalidJSONError("{")
+ }
+ v = ejp.v
+ default:
+ if ejp.canonical {
+ return nil, invalidJSONErrorForType("object", t)
+ }
+ return nil, invalidJSONErrorForType("ISO-8601 Internet Date/Time Format as described in RFC-3339", t)
+ }
+
+ ejp.advanceState()
+ if ejp.s != jpsSawEndObject {
+ return nil, invalidJSONErrorForType("value and then }", t)
+ }
+ default:
+ return nil, invalidRequestError(t.String())
+ }
+ case bsontype.JavaScript:
+ switch ejp.s {
+ case jpsSawKey:
+ // read colon
+ ejp.advanceState()
+ if err := ensureColon(ejp.s, ejp.k); err != nil {
+ return nil, err
+ }
+
+ // read value
+ ejp.advanceState()
+ if ejp.s != jpsSawValue {
+ return nil, invalidJSONErrorForType("value", t)
+ }
+ v = ejp.v
+
+ // read end object or comma and just return
+ ejp.advanceState()
+ case jpsSawEndObject:
+ v = ejp.v
+ default:
+ return nil, invalidRequestError(t.String())
+ }
+ case bsontype.CodeWithScope:
+ if ejp.s == jpsSawKey && ejp.k == "$scope" {
+ v = ejp.v // this is the $code string from earlier
+
+ // read colon
+ ejp.advanceState()
+ if err := ensureColon(ejp.s, ejp.k); err != nil {
+ return nil, err
+ }
+
+ // read {
+ ejp.advanceState()
+ if ejp.s != jpsSawBeginObject {
+ return nil, invalidJSONError("$scope to be embedded document")
+ }
+ } else {
+ return nil, invalidRequestError(t.String())
+ }
+ case bsontype.EmbeddedDocument, bsontype.Array:
+ return nil, invalidRequestError(t.String())
+ }
+
+ return v, nil
+}
+
+// readObject is a utility method for reading full objects of known (or expected) size
+// it is useful for extended JSON types such as binary, datetime, regex, and timestamp
+func (ejp *extJSONParser) readObject(numKeys int, started bool) ([]string, []*extJSONValue, error) {
+ keys := make([]string, numKeys)
+ vals := make([]*extJSONValue, numKeys)
+
+ if !started {
+ ejp.advanceState()
+ if ejp.s != jpsSawBeginObject {
+ return nil, nil, invalidJSONError("{")
+ }
+ }
+
+ for i := 0; i < numKeys; i++ {
+ key, t, err := ejp.readKey()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ switch ejp.s {
+ case jpsSawKey:
+ v, err := ejp.readValue(t)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ keys[i] = key
+ vals[i] = v
+ case jpsSawValue:
+ keys[i] = key
+ vals[i] = ejp.v
+ default:
+ return nil, nil, invalidJSONError("value")
+ }
+ }
+
+ ejp.advanceState()
+ if ejp.s != jpsSawEndObject {
+ return nil, nil, invalidJSONError("}")
+ }
+
+ return keys, vals, nil
+}
+
+// advanceState reads the next JSON token from the scanner and transitions
+// from the current state based on that token's type
+func (ejp *extJSONParser) advanceState() {
+ if ejp.s == jpsDoneState || ejp.s == jpsInvalidState {
+ return
+ }
+
+ jt, err := ejp.js.nextToken()
+
+ if err != nil {
+ ejp.err = err
+ ejp.s = jpsInvalidState
+ return
+ }
+
+ valid := ejp.validateToken(jt.t)
+ if !valid {
+ ejp.err = unexpectedTokenError(jt)
+ ejp.s = jpsInvalidState
+ return
+ }
+
+ switch jt.t {
+ case jttBeginObject:
+ ejp.s = jpsSawBeginObject
+ ejp.pushMode(jpmObjectMode)
+ ejp.depth++
+
+ if ejp.depth > ejp.maxDepth {
+ ejp.err = nestingDepthError(jt.p, ejp.depth)
+ ejp.s = jpsInvalidState
+ }
+ case jttEndObject:
+ ejp.s = jpsSawEndObject
+ ejp.depth--
+
+ if ejp.popMode() != jpmObjectMode {
+ ejp.err = unexpectedTokenError(jt)
+ ejp.s = jpsInvalidState
+ }
+ case jttBeginArray:
+ ejp.s = jpsSawBeginArray
+ ejp.pushMode(jpmArrayMode)
+ case jttEndArray:
+ ejp.s = jpsSawEndArray
+
+ if ejp.popMode() != jpmArrayMode {
+ ejp.err = unexpectedTokenError(jt)
+ ejp.s = jpsInvalidState
+ }
+ case jttColon:
+ ejp.s = jpsSawColon
+ case jttComma:
+ ejp.s = jpsSawComma
+ case jttEOF:
+ ejp.s = jpsDoneState
+ if len(ejp.m) != 0 {
+ ejp.err = unexpectedTokenError(jt)
+ ejp.s = jpsInvalidState
+ }
+ case jttString:
+ switch ejp.s {
+ case jpsSawComma:
+ if ejp.peekMode() == jpmArrayMode {
+ ejp.s = jpsSawValue
+ ejp.v = extendJSONToken(jt)
+ return
+ }
+ fallthrough
+ case jpsSawBeginObject:
+ ejp.s = jpsSawKey
+ ejp.k = jt.v.(string)
+ return
+ }
+ fallthrough
+ default:
+ ejp.s = jpsSawValue
+ ejp.v = extendJSONToken(jt)
+ }
+}
+
+var jpsValidTransitionTokens = map[jsonParseState]map[jsonTokenType]bool{
+ jpsStartState: {
+ jttBeginObject: true,
+ jttBeginArray: true,
+ jttInt32: true,
+ jttInt64: true,
+ jttDouble: true,
+ jttString: true,
+ jttBool: true,
+ jttNull: true,
+ jttEOF: true,
+ },
+ jpsSawBeginObject: {
+ jttEndObject: true,
+ jttString: true,
+ },
+ jpsSawEndObject: {
+ jttEndObject: true,
+ jttEndArray: true,
+ jttComma: true,
+ jttEOF: true,
+ },
+ jpsSawBeginArray: {
+ jttBeginObject: true,
+ jttBeginArray: true,
+ jttEndArray: true,
+ jttInt32: true,
+ jttInt64: true,
+ jttDouble: true,
+ jttString: true,
+ jttBool: true,
+ jttNull: true,
+ },
+ jpsSawEndArray: {
+ jttEndObject: true,
+ jttEndArray: true,
+ jttComma: true,
+ jttEOF: true,
+ },
+ jpsSawColon: {
+ jttBeginObject: true,
+ jttBeginArray: true,
+ jttInt32: true,
+ jttInt64: true,
+ jttDouble: true,
+ jttString: true,
+ jttBool: true,
+ jttNull: true,
+ },
+ jpsSawComma: {
+ jttBeginObject: true,
+ jttBeginArray: true,
+ jttInt32: true,
+ jttInt64: true,
+ jttDouble: true,
+ jttString: true,
+ jttBool: true,
+ jttNull: true,
+ },
+ jpsSawKey: {
+ jttColon: true,
+ },
+ jpsSawValue: {
+ jttEndObject: true,
+ jttEndArray: true,
+ jttComma: true,
+ jttEOF: true,
+ },
+ jpsDoneState: {},
+ jpsInvalidState: {},
+}
+
+func (ejp *extJSONParser) validateToken(jtt jsonTokenType) bool {
+ switch ejp.s {
+ case jpsSawEndObject:
+ // if we are at depth zero and the next token is a '{',
+ // we can consider it valid only if we are not in array mode.
+ if jtt == jttBeginObject && ejp.depth == 0 {
+ return ejp.peekMode() != jpmArrayMode
+ }
+ case jpsSawComma:
+ switch ejp.peekMode() {
+ // the only valid next token after a comma inside a document is a string (a key)
+ case jpmObjectMode:
+ return jtt == jttString
+ case jpmInvalidMode:
+ return false
+ }
+ }
+
+ _, ok := jpsValidTransitionTokens[ejp.s][jtt]
+ return ok
+}
+
+// ensureExtValueType returns true if the current value has the expected
+// value type for single-key extended JSON types. For example,
+// {"$numberInt": v} v must be TypeString
+func (ejp *extJSONParser) ensureExtValueType(t bsontype.Type) bool {
+ switch t {
+ case bsontype.MinKey, bsontype.MaxKey:
+ return ejp.v.t == bsontype.Int32
+ case bsontype.Undefined:
+ return ejp.v.t == bsontype.Boolean
+ case bsontype.Int32, bsontype.Int64, bsontype.Double, bsontype.Decimal128, bsontype.Symbol, bsontype.ObjectID:
+ return ejp.v.t == bsontype.String
+ default:
+ return false
+ }
+}
+
+func (ejp *extJSONParser) pushMode(m jsonParseMode) {
+ ejp.m = append(ejp.m, m)
+}
+
+func (ejp *extJSONParser) popMode() jsonParseMode {
+ l := len(ejp.m)
+ if l == 0 {
+ return jpmInvalidMode
+ }
+
+ m := ejp.m[l-1]
+ ejp.m = ejp.m[:l-1]
+
+ return m
+}
+
+func (ejp *extJSONParser) peekMode() jsonParseMode {
+ l := len(ejp.m)
+ if l == 0 {
+ return jpmInvalidMode
+ }
+
+ return ejp.m[l-1]
+}
+
+func extendJSONToken(jt *jsonToken) *extJSONValue {
+ var t bsontype.Type
+
+ switch jt.t {
+ case jttInt32:
+ t = bsontype.Int32
+ case jttInt64:
+ t = bsontype.Int64
+ case jttDouble:
+ t = bsontype.Double
+ case jttString:
+ t = bsontype.String
+ case jttBool:
+ t = bsontype.Boolean
+ case jttNull:
+ t = bsontype.Null
+ default:
+ return nil
+ }
+
+ return &extJSONValue{t: t, v: jt.v}
+}
+
+func ensureColon(s jsonParseState, key string) error {
+ if s != jpsSawColon {
+ return fmt.Errorf("invalid JSON input: missing colon after key \"%s\"", key)
+ }
+
+ return nil
+}
+
+func invalidRequestError(s string) error {
+ return fmt.Errorf("invalid request to read %s", s)
+}
+
+func invalidJSONError(expected string) error {
+ return fmt.Errorf("invalid JSON input; expected %s", expected)
+}
+
+func invalidJSONErrorForType(expected string, t bsontype.Type) error {
+ return fmt.Errorf("invalid JSON input; expected %s for %s", expected, t)
+}
+
+func unexpectedTokenError(jt *jsonToken) error {
+ switch jt.t {
+ case jttInt32, jttInt64, jttDouble:
+ return fmt.Errorf("invalid JSON input; unexpected number (%v) at position %d", jt.v, jt.p)
+ case jttString:
+ return fmt.Errorf("invalid JSON input; unexpected string (\"%v\") at position %d", jt.v, jt.p)
+ case jttBool:
+ return fmt.Errorf("invalid JSON input; unexpected boolean literal (%v) at position %d", jt.v, jt.p)
+ case jttNull:
+ return fmt.Errorf("invalid JSON input; unexpected null literal at position %d", jt.p)
+ case jttEOF:
+ return fmt.Errorf("invalid JSON input; unexpected end of input at position %d", jt.p)
+ default:
+ return fmt.Errorf("invalid JSON input; unexpected %c at position %d", jt.v.(byte), jt.p)
+ }
+}
+
+func nestingDepthError(p, depth int) error {
+ return fmt.Errorf("invalid JSON input; nesting too deep (%d levels) at position %d", depth, p)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go
new file mode 100644
index 000000000..35832d73a
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_reader.go
@@ -0,0 +1,644 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "fmt"
+ "io"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// ExtJSONValueReaderPool is a pool for ValueReaders that read ExtJSON.
+type ExtJSONValueReaderPool struct {
+ pool sync.Pool
+}
+
+// NewExtJSONValueReaderPool instantiates a new ExtJSONValueReaderPool.
+func NewExtJSONValueReaderPool() *ExtJSONValueReaderPool {
+ return &ExtJSONValueReaderPool{
+ pool: sync.Pool{
+ New: func() interface{} {
+ return new(extJSONValueReader)
+ },
+ },
+ }
+}
+
+// Get retrieves a ValueReader from the pool and uses src as the underlying ExtJSON.
+func (bvrp *ExtJSONValueReaderPool) Get(r io.Reader, canonical bool) (ValueReader, error) {
+ vr := bvrp.pool.Get().(*extJSONValueReader)
+ return vr.reset(r, canonical)
+}
+
+// Put inserts a ValueReader into the pool. If the ValueReader is not a ExtJSON ValueReader nothing
+// is inserted into the pool and ok will be false.
+func (bvrp *ExtJSONValueReaderPool) Put(vr ValueReader) (ok bool) {
+ bvr, ok := vr.(*extJSONValueReader)
+ if !ok {
+ return false
+ }
+
+ bvr, _ = bvr.reset(nil, false)
+ bvrp.pool.Put(bvr)
+ return true
+}
+
+type ejvrState struct {
+ mode mode
+ vType bsontype.Type
+ depth int
+}
+
+// extJSONValueReader is for reading extended JSON.
+type extJSONValueReader struct {
+ p *extJSONParser
+
+ stack []ejvrState
+ frame int
+}
+
+// NewExtJSONValueReader creates a new ValueReader from a given io.Reader
+// It will interpret the JSON of r as canonical or relaxed according to the
+// given canonical flag
+func NewExtJSONValueReader(r io.Reader, canonical bool) (ValueReader, error) {
+ return newExtJSONValueReader(r, canonical)
+}
+
+func newExtJSONValueReader(r io.Reader, canonical bool) (*extJSONValueReader, error) {
+ ejvr := new(extJSONValueReader)
+ return ejvr.reset(r, canonical)
+}
+
+func (ejvr *extJSONValueReader) reset(r io.Reader, canonical bool) (*extJSONValueReader, error) {
+ p := newExtJSONParser(r, canonical)
+ typ, err := p.peekType()
+
+ if err != nil {
+ return nil, ErrInvalidJSON
+ }
+
+ var m mode
+ switch typ {
+ case bsontype.EmbeddedDocument:
+ m = mTopLevel
+ case bsontype.Array:
+ m = mArray
+ default:
+ m = mValue
+ }
+
+ stack := make([]ejvrState, 1, 5)
+ stack[0] = ejvrState{
+ mode: m,
+ vType: typ,
+ }
+ return &extJSONValueReader{
+ p: p,
+ stack: stack,
+ }, nil
+}
+
+func (ejvr *extJSONValueReader) advanceFrame() {
+ if ejvr.frame+1 >= len(ejvr.stack) { // We need to grow the stack
+ length := len(ejvr.stack)
+ if length+1 >= cap(ejvr.stack) {
+ // double it
+ buf := make([]ejvrState, 2*cap(ejvr.stack)+1)
+ copy(buf, ejvr.stack)
+ ejvr.stack = buf
+ }
+ ejvr.stack = ejvr.stack[:length+1]
+ }
+ ejvr.frame++
+
+ // Clean the stack
+ ejvr.stack[ejvr.frame].mode = 0
+ ejvr.stack[ejvr.frame].vType = 0
+ ejvr.stack[ejvr.frame].depth = 0
+}
+
+func (ejvr *extJSONValueReader) pushDocument() {
+ ejvr.advanceFrame()
+
+ ejvr.stack[ejvr.frame].mode = mDocument
+ ejvr.stack[ejvr.frame].depth = ejvr.p.depth
+}
+
+func (ejvr *extJSONValueReader) pushCodeWithScope() {
+ ejvr.advanceFrame()
+
+ ejvr.stack[ejvr.frame].mode = mCodeWithScope
+}
+
+func (ejvr *extJSONValueReader) pushArray() {
+ ejvr.advanceFrame()
+
+ ejvr.stack[ejvr.frame].mode = mArray
+}
+
+func (ejvr *extJSONValueReader) push(m mode, t bsontype.Type) {
+ ejvr.advanceFrame()
+
+ ejvr.stack[ejvr.frame].mode = m
+ ejvr.stack[ejvr.frame].vType = t
+}
+
+func (ejvr *extJSONValueReader) pop() {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mElement, mValue:
+ ejvr.frame--
+ case mDocument, mArray, mCodeWithScope:
+ ejvr.frame -= 2 // we pop twice to jump over the vrElement: vrDocument -> vrElement -> vrDocument/TopLevel/etc...
+ }
+}
+
+func (ejvr *extJSONValueReader) skipObject() {
+ // read entire object until depth returns to 0 (last ending } or ] seen)
+ depth := 1
+ for depth > 0 {
+ ejvr.p.advanceState()
+
+ // If object is empty, raise depth and continue. When emptyObject is true, the
+ // parser has already read both the opening and closing brackets of an empty
+ // object ("{}"), so the next valid token will be part of the parent document,
+ // not part of the nested document.
+ //
+ // If there is a comma, there are remaining fields, emptyObject must be set back
+ // to false, and comma must be skipped with advanceState().
+ if ejvr.p.emptyObject {
+ if ejvr.p.s == jpsSawComma {
+ ejvr.p.emptyObject = false
+ ejvr.p.advanceState()
+ }
+ depth--
+ continue
+ }
+
+ switch ejvr.p.s {
+ case jpsSawBeginObject, jpsSawBeginArray:
+ depth++
+ case jpsSawEndObject, jpsSawEndArray:
+ depth--
+ }
+ }
+}
+
+func (ejvr *extJSONValueReader) invalidTransitionErr(destination mode, name string, modes []mode) error {
+ te := TransitionError{
+ name: name,
+ current: ejvr.stack[ejvr.frame].mode,
+ destination: destination,
+ modes: modes,
+ action: "read",
+ }
+ if ejvr.frame != 0 {
+ te.parent = ejvr.stack[ejvr.frame-1].mode
+ }
+ return te
+}
+
+func (ejvr *extJSONValueReader) typeError(t bsontype.Type) error {
+ return fmt.Errorf("positioned on %s, but attempted to read %s", ejvr.stack[ejvr.frame].vType, t)
+}
+
+func (ejvr *extJSONValueReader) ensureElementValue(t bsontype.Type, destination mode, callerName string, addModes ...mode) error {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mElement, mValue:
+ if ejvr.stack[ejvr.frame].vType != t {
+ return ejvr.typeError(t)
+ }
+ default:
+ modes := []mode{mElement, mValue}
+ if addModes != nil {
+ modes = append(modes, addModes...)
+ }
+ return ejvr.invalidTransitionErr(destination, callerName, modes)
+ }
+
+ return nil
+}
+
+func (ejvr *extJSONValueReader) Type() bsontype.Type {
+ return ejvr.stack[ejvr.frame].vType
+}
+
+func (ejvr *extJSONValueReader) Skip() error {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mElement, mValue:
+ default:
+ return ejvr.invalidTransitionErr(0, "Skip", []mode{mElement, mValue})
+ }
+
+ defer ejvr.pop()
+
+ t := ejvr.stack[ejvr.frame].vType
+ switch t {
+ case bsontype.Array, bsontype.EmbeddedDocument, bsontype.CodeWithScope:
+ // read entire array, doc or CodeWithScope
+ ejvr.skipObject()
+ default:
+ _, err := ejvr.p.readValue(t)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (ejvr *extJSONValueReader) ReadArray() (ArrayReader, error) {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mTopLevel: // allow reading array from top level
+ case mArray:
+ return ejvr, nil
+ default:
+ if err := ejvr.ensureElementValue(bsontype.Array, mArray, "ReadArray", mTopLevel, mArray); err != nil {
+ return nil, err
+ }
+ }
+
+ ejvr.pushArray()
+
+ return ejvr, nil
+}
+
+func (ejvr *extJSONValueReader) ReadBinary() (b []byte, btype byte, err error) {
+ if err := ejvr.ensureElementValue(bsontype.Binary, 0, "ReadBinary"); err != nil {
+ return nil, 0, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Binary)
+ if err != nil {
+ return nil, 0, err
+ }
+
+ b, btype, err = v.parseBinary()
+
+ ejvr.pop()
+ return b, btype, err
+}
+
+func (ejvr *extJSONValueReader) ReadBoolean() (bool, error) {
+ if err := ejvr.ensureElementValue(bsontype.Boolean, 0, "ReadBoolean"); err != nil {
+ return false, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Boolean)
+ if err != nil {
+ return false, err
+ }
+
+ if v.t != bsontype.Boolean {
+ return false, fmt.Errorf("expected type bool, but got type %s", v.t)
+ }
+
+ ejvr.pop()
+ return v.v.(bool), nil
+}
+
+func (ejvr *extJSONValueReader) ReadDocument() (DocumentReader, error) {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mTopLevel:
+ return ejvr, nil
+ case mElement, mValue:
+ if ejvr.stack[ejvr.frame].vType != bsontype.EmbeddedDocument {
+ return nil, ejvr.typeError(bsontype.EmbeddedDocument)
+ }
+
+ ejvr.pushDocument()
+ return ejvr, nil
+ default:
+ return nil, ejvr.invalidTransitionErr(mDocument, "ReadDocument", []mode{mTopLevel, mElement, mValue})
+ }
+}
+
+func (ejvr *extJSONValueReader) ReadCodeWithScope() (code string, dr DocumentReader, err error) {
+ if err = ejvr.ensureElementValue(bsontype.CodeWithScope, 0, "ReadCodeWithScope"); err != nil {
+ return "", nil, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.CodeWithScope)
+ if err != nil {
+ return "", nil, err
+ }
+
+ code, err = v.parseJavascript()
+
+ ejvr.pushCodeWithScope()
+ return code, ejvr, err
+}
+
+func (ejvr *extJSONValueReader) ReadDBPointer() (ns string, oid primitive.ObjectID, err error) {
+ if err = ejvr.ensureElementValue(bsontype.DBPointer, 0, "ReadDBPointer"); err != nil {
+ return "", primitive.NilObjectID, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.DBPointer)
+ if err != nil {
+ return "", primitive.NilObjectID, err
+ }
+
+ ns, oid, err = v.parseDBPointer()
+
+ ejvr.pop()
+ return ns, oid, err
+}
+
+func (ejvr *extJSONValueReader) ReadDateTime() (int64, error) {
+ if err := ejvr.ensureElementValue(bsontype.DateTime, 0, "ReadDateTime"); err != nil {
+ return 0, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.DateTime)
+ if err != nil {
+ return 0, err
+ }
+
+ d, err := v.parseDateTime()
+
+ ejvr.pop()
+ return d, err
+}
+
+func (ejvr *extJSONValueReader) ReadDecimal128() (primitive.Decimal128, error) {
+ if err := ejvr.ensureElementValue(bsontype.Decimal128, 0, "ReadDecimal128"); err != nil {
+ return primitive.Decimal128{}, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Decimal128)
+ if err != nil {
+ return primitive.Decimal128{}, err
+ }
+
+ d, err := v.parseDecimal128()
+
+ ejvr.pop()
+ return d, err
+}
+
+func (ejvr *extJSONValueReader) ReadDouble() (float64, error) {
+ if err := ejvr.ensureElementValue(bsontype.Double, 0, "ReadDouble"); err != nil {
+ return 0, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Double)
+ if err != nil {
+ return 0, err
+ }
+
+ d, err := v.parseDouble()
+
+ ejvr.pop()
+ return d, err
+}
+
+func (ejvr *extJSONValueReader) ReadInt32() (int32, error) {
+ if err := ejvr.ensureElementValue(bsontype.Int32, 0, "ReadInt32"); err != nil {
+ return 0, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Int32)
+ if err != nil {
+ return 0, err
+ }
+
+ i, err := v.parseInt32()
+
+ ejvr.pop()
+ return i, err
+}
+
+func (ejvr *extJSONValueReader) ReadInt64() (int64, error) {
+ if err := ejvr.ensureElementValue(bsontype.Int64, 0, "ReadInt64"); err != nil {
+ return 0, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Int64)
+ if err != nil {
+ return 0, err
+ }
+
+ i, err := v.parseInt64()
+
+ ejvr.pop()
+ return i, err
+}
+
+func (ejvr *extJSONValueReader) ReadJavascript() (code string, err error) {
+ if err = ejvr.ensureElementValue(bsontype.JavaScript, 0, "ReadJavascript"); err != nil {
+ return "", err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.JavaScript)
+ if err != nil {
+ return "", err
+ }
+
+ code, err = v.parseJavascript()
+
+ ejvr.pop()
+ return code, err
+}
+
+func (ejvr *extJSONValueReader) ReadMaxKey() error {
+ if err := ejvr.ensureElementValue(bsontype.MaxKey, 0, "ReadMaxKey"); err != nil {
+ return err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.MaxKey)
+ if err != nil {
+ return err
+ }
+
+ err = v.parseMinMaxKey("max")
+
+ ejvr.pop()
+ return err
+}
+
+func (ejvr *extJSONValueReader) ReadMinKey() error {
+ if err := ejvr.ensureElementValue(bsontype.MinKey, 0, "ReadMinKey"); err != nil {
+ return err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.MinKey)
+ if err != nil {
+ return err
+ }
+
+ err = v.parseMinMaxKey("min")
+
+ ejvr.pop()
+ return err
+}
+
+func (ejvr *extJSONValueReader) ReadNull() error {
+ if err := ejvr.ensureElementValue(bsontype.Null, 0, "ReadNull"); err != nil {
+ return err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Null)
+ if err != nil {
+ return err
+ }
+
+ if v.t != bsontype.Null {
+ return fmt.Errorf("expected type null but got type %s", v.t)
+ }
+
+ ejvr.pop()
+ return nil
+}
+
+func (ejvr *extJSONValueReader) ReadObjectID() (primitive.ObjectID, error) {
+ if err := ejvr.ensureElementValue(bsontype.ObjectID, 0, "ReadObjectID"); err != nil {
+ return primitive.ObjectID{}, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.ObjectID)
+ if err != nil {
+ return primitive.ObjectID{}, err
+ }
+
+ oid, err := v.parseObjectID()
+
+ ejvr.pop()
+ return oid, err
+}
+
+func (ejvr *extJSONValueReader) ReadRegex() (pattern string, options string, err error) {
+ if err = ejvr.ensureElementValue(bsontype.Regex, 0, "ReadRegex"); err != nil {
+ return "", "", err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Regex)
+ if err != nil {
+ return "", "", err
+ }
+
+ pattern, options, err = v.parseRegex()
+
+ ejvr.pop()
+ return pattern, options, err
+}
+
+func (ejvr *extJSONValueReader) ReadString() (string, error) {
+ if err := ejvr.ensureElementValue(bsontype.String, 0, "ReadString"); err != nil {
+ return "", err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.String)
+ if err != nil {
+ return "", err
+ }
+
+ if v.t != bsontype.String {
+ return "", fmt.Errorf("expected type string but got type %s", v.t)
+ }
+
+ ejvr.pop()
+ return v.v.(string), nil
+}
+
+func (ejvr *extJSONValueReader) ReadSymbol() (symbol string, err error) {
+ if err = ejvr.ensureElementValue(bsontype.Symbol, 0, "ReadSymbol"); err != nil {
+ return "", err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Symbol)
+ if err != nil {
+ return "", err
+ }
+
+ symbol, err = v.parseSymbol()
+
+ ejvr.pop()
+ return symbol, err
+}
+
+func (ejvr *extJSONValueReader) ReadTimestamp() (t uint32, i uint32, err error) {
+ if err = ejvr.ensureElementValue(bsontype.Timestamp, 0, "ReadTimestamp"); err != nil {
+ return 0, 0, err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Timestamp)
+ if err != nil {
+ return 0, 0, err
+ }
+
+ t, i, err = v.parseTimestamp()
+
+ ejvr.pop()
+ return t, i, err
+}
+
+func (ejvr *extJSONValueReader) ReadUndefined() error {
+ if err := ejvr.ensureElementValue(bsontype.Undefined, 0, "ReadUndefined"); err != nil {
+ return err
+ }
+
+ v, err := ejvr.p.readValue(bsontype.Undefined)
+ if err != nil {
+ return err
+ }
+
+ err = v.parseUndefined()
+
+ ejvr.pop()
+ return err
+}
+
+func (ejvr *extJSONValueReader) ReadElement() (string, ValueReader, error) {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mTopLevel, mDocument, mCodeWithScope:
+ default:
+ return "", nil, ejvr.invalidTransitionErr(mElement, "ReadElement", []mode{mTopLevel, mDocument, mCodeWithScope})
+ }
+
+ name, t, err := ejvr.p.readKey()
+
+ if err != nil {
+ if err == ErrEOD {
+ if ejvr.stack[ejvr.frame].mode == mCodeWithScope {
+ _, err := ejvr.p.peekType()
+ if err != nil {
+ return "", nil, err
+ }
+ }
+
+ ejvr.pop()
+ }
+
+ return "", nil, err
+ }
+
+ ejvr.push(mElement, t)
+ return name, ejvr, nil
+}
+
+func (ejvr *extJSONValueReader) ReadValue() (ValueReader, error) {
+ switch ejvr.stack[ejvr.frame].mode {
+ case mArray:
+ default:
+ return nil, ejvr.invalidTransitionErr(mValue, "ReadValue", []mode{mArray})
+ }
+
+ t, err := ejvr.p.peekType()
+ if err != nil {
+ if err == ErrEOA {
+ ejvr.pop()
+ }
+
+ return nil, err
+ }
+
+ ejvr.push(mValue, t)
+ return ejvr, nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go
new file mode 100644
index 000000000..ba39c9601
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_tables.go
@@ -0,0 +1,223 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+//
+// Based on github.com/golang/go by The Go Authors
+// See THIRD-PARTY-NOTICES for original license terms.
+
+package bsonrw
+
+import "unicode/utf8"
+
+// safeSet holds the value true if the ASCII character with the given array
+// position can be represented inside a JSON string without any further
+// escaping.
+//
+// All values are true except for the ASCII control characters (0-31), the
+// double quote ("), and the backslash character ("\").
+var safeSet = [utf8.RuneSelf]bool{
+ ' ': true,
+ '!': true,
+ '"': false,
+ '#': true,
+ '$': true,
+ '%': true,
+ '&': true,
+ '\'': true,
+ '(': true,
+ ')': true,
+ '*': true,
+ '+': true,
+ ',': true,
+ '-': true,
+ '.': true,
+ '/': true,
+ '0': true,
+ '1': true,
+ '2': true,
+ '3': true,
+ '4': true,
+ '5': true,
+ '6': true,
+ '7': true,
+ '8': true,
+ '9': true,
+ ':': true,
+ ';': true,
+ '<': true,
+ '=': true,
+ '>': true,
+ '?': true,
+ '@': true,
+ 'A': true,
+ 'B': true,
+ 'C': true,
+ 'D': true,
+ 'E': true,
+ 'F': true,
+ 'G': true,
+ 'H': true,
+ 'I': true,
+ 'J': true,
+ 'K': true,
+ 'L': true,
+ 'M': true,
+ 'N': true,
+ 'O': true,
+ 'P': true,
+ 'Q': true,
+ 'R': true,
+ 'S': true,
+ 'T': true,
+ 'U': true,
+ 'V': true,
+ 'W': true,
+ 'X': true,
+ 'Y': true,
+ 'Z': true,
+ '[': true,
+ '\\': false,
+ ']': true,
+ '^': true,
+ '_': true,
+ '`': true,
+ 'a': true,
+ 'b': true,
+ 'c': true,
+ 'd': true,
+ 'e': true,
+ 'f': true,
+ 'g': true,
+ 'h': true,
+ 'i': true,
+ 'j': true,
+ 'k': true,
+ 'l': true,
+ 'm': true,
+ 'n': true,
+ 'o': true,
+ 'p': true,
+ 'q': true,
+ 'r': true,
+ 's': true,
+ 't': true,
+ 'u': true,
+ 'v': true,
+ 'w': true,
+ 'x': true,
+ 'y': true,
+ 'z': true,
+ '{': true,
+ '|': true,
+ '}': true,
+ '~': true,
+ '\u007f': true,
+}
+
+// htmlSafeSet holds the value true if the ASCII character with the given
+// array position can be safely represented inside a JSON string, embedded
+// inside of HTML <script> tags, without any additional escaping.
+//
+// All values are true except for the ASCII control characters (0-31), the
+// double quote ("), the backslash character ("\"), HTML opening and closing
+// tags ("<" and ">"), and the ampersand ("&").
+var htmlSafeSet = [utf8.RuneSelf]bool{
+ ' ': true,
+ '!': true,
+ '"': false,
+ '#': true,
+ '$': true,
+ '%': true,
+ '&': false,
+ '\'': true,
+ '(': true,
+ ')': true,
+ '*': true,
+ '+': true,
+ ',': true,
+ '-': true,
+ '.': true,
+ '/': true,
+ '0': true,
+ '1': true,
+ '2': true,
+ '3': true,
+ '4': true,
+ '5': true,
+ '6': true,
+ '7': true,
+ '8': true,
+ '9': true,
+ ':': true,
+ ';': true,
+ '<': false,
+ '=': true,
+ '>': false,
+ '?': true,
+ '@': true,
+ 'A': true,
+ 'B': true,
+ 'C': true,
+ 'D': true,
+ 'E': true,
+ 'F': true,
+ 'G': true,
+ 'H': true,
+ 'I': true,
+ 'J': true,
+ 'K': true,
+ 'L': true,
+ 'M': true,
+ 'N': true,
+ 'O': true,
+ 'P': true,
+ 'Q': true,
+ 'R': true,
+ 'S': true,
+ 'T': true,
+ 'U': true,
+ 'V': true,
+ 'W': true,
+ 'X': true,
+ 'Y': true,
+ 'Z': true,
+ '[': true,
+ '\\': false,
+ ']': true,
+ '^': true,
+ '_': true,
+ '`': true,
+ 'a': true,
+ 'b': true,
+ 'c': true,
+ 'd': true,
+ 'e': true,
+ 'f': true,
+ 'g': true,
+ 'h': true,
+ 'i': true,
+ 'j': true,
+ 'k': true,
+ 'l': true,
+ 'm': true,
+ 'n': true,
+ 'o': true,
+ 'p': true,
+ 'q': true,
+ 'r': true,
+ 's': true,
+ 't': true,
+ 'u': true,
+ 'v': true,
+ 'w': true,
+ 'x': true,
+ 'y': true,
+ 'z': true,
+ '{': true,
+ '|': true,
+ '}': true,
+ '~': true,
+ '\u007f': true,
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_wrappers.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_wrappers.go
new file mode 100644
index 000000000..969570424
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_wrappers.go
@@ -0,0 +1,492 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "math"
+ "strconv"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+func wrapperKeyBSONType(key string) bsontype.Type {
+ switch key {
+ case "$numberInt":
+ return bsontype.Int32
+ case "$numberLong":
+ return bsontype.Int64
+ case "$oid":
+ return bsontype.ObjectID
+ case "$symbol":
+ return bsontype.Symbol
+ case "$numberDouble":
+ return bsontype.Double
+ case "$numberDecimal":
+ return bsontype.Decimal128
+ case "$binary":
+ return bsontype.Binary
+ case "$code":
+ return bsontype.JavaScript
+ case "$scope":
+ return bsontype.CodeWithScope
+ case "$timestamp":
+ return bsontype.Timestamp
+ case "$regularExpression":
+ return bsontype.Regex
+ case "$dbPointer":
+ return bsontype.DBPointer
+ case "$date":
+ return bsontype.DateTime
+ case "$minKey":
+ return bsontype.MinKey
+ case "$maxKey":
+ return bsontype.MaxKey
+ case "$undefined":
+ return bsontype.Undefined
+ }
+
+ return bsontype.EmbeddedDocument
+}
+
+func (ejv *extJSONValue) parseBinary() (b []byte, subType byte, err error) {
+ if ejv.t != bsontype.EmbeddedDocument {
+ return nil, 0, fmt.Errorf("$binary value should be object, but instead is %s", ejv.t)
+ }
+
+ binObj := ejv.v.(*extJSONObject)
+ bFound := false
+ stFound := false
+
+ for i, key := range binObj.keys {
+ val := binObj.values[i]
+
+ switch key {
+ case "base64":
+ if bFound {
+ return nil, 0, errors.New("duplicate base64 key in $binary")
+ }
+
+ if val.t != bsontype.String {
+ return nil, 0, fmt.Errorf("$binary base64 value should be string, but instead is %s", val.t)
+ }
+
+ base64Bytes, err := base64.StdEncoding.DecodeString(val.v.(string))
+ if err != nil {
+ return nil, 0, fmt.Errorf("invalid $binary base64 string: %s", val.v.(string))
+ }
+
+ b = base64Bytes
+ bFound = true
+ case "subType":
+ if stFound {
+ return nil, 0, errors.New("duplicate subType key in $binary")
+ }
+
+ if val.t != bsontype.String {
+ return nil, 0, fmt.Errorf("$binary subType value should be string, but instead is %s", val.t)
+ }
+
+ i, err := strconv.ParseInt(val.v.(string), 16, 64)
+ if err != nil {
+ return nil, 0, fmt.Errorf("invalid $binary subType string: %s", val.v.(string))
+ }
+
+ subType = byte(i)
+ stFound = true
+ default:
+ return nil, 0, fmt.Errorf("invalid key in $binary object: %s", key)
+ }
+ }
+
+ if !bFound {
+ return nil, 0, errors.New("missing base64 field in $binary object")
+ }
+
+ if !stFound {
+ return nil, 0, errors.New("missing subType field in $binary object")
+
+ }
+
+ return b, subType, nil
+}
+
+func (ejv *extJSONValue) parseDBPointer() (ns string, oid primitive.ObjectID, err error) {
+ if ejv.t != bsontype.EmbeddedDocument {
+ return "", primitive.NilObjectID, fmt.Errorf("$dbPointer value should be object, but instead is %s", ejv.t)
+ }
+
+ dbpObj := ejv.v.(*extJSONObject)
+ oidFound := false
+ nsFound := false
+
+ for i, key := range dbpObj.keys {
+ val := dbpObj.values[i]
+
+ switch key {
+ case "$ref":
+ if nsFound {
+ return "", primitive.NilObjectID, errors.New("duplicate $ref key in $dbPointer")
+ }
+
+ if val.t != bsontype.String {
+ return "", primitive.NilObjectID, fmt.Errorf("$dbPointer $ref value should be string, but instead is %s", val.t)
+ }
+
+ ns = val.v.(string)
+ nsFound = true
+ case "$id":
+ if oidFound {
+ return "", primitive.NilObjectID, errors.New("duplicate $id key in $dbPointer")
+ }
+
+ if val.t != bsontype.String {
+ return "", primitive.NilObjectID, fmt.Errorf("$dbPointer $id value should be string, but instead is %s", val.t)
+ }
+
+ oid, err = primitive.ObjectIDFromHex(val.v.(string))
+ if err != nil {
+ return "", primitive.NilObjectID, err
+ }
+
+ oidFound = true
+ default:
+ return "", primitive.NilObjectID, fmt.Errorf("invalid key in $dbPointer object: %s", key)
+ }
+ }
+
+ if !nsFound {
+ return "", oid, errors.New("missing $ref field in $dbPointer object")
+ }
+
+ if !oidFound {
+ return "", oid, errors.New("missing $id field in $dbPointer object")
+ }
+
+ return ns, oid, nil
+}
+
+const (
+ rfc3339Milli = "2006-01-02T15:04:05.999Z07:00"
+)
+
+var (
+ timeFormats = []string{rfc3339Milli, "2006-01-02T15:04:05.999Z0700"}
+)
+
+func (ejv *extJSONValue) parseDateTime() (int64, error) {
+ switch ejv.t {
+ case bsontype.Int32:
+ return int64(ejv.v.(int32)), nil
+ case bsontype.Int64:
+ return ejv.v.(int64), nil
+ case bsontype.String:
+ return parseDatetimeString(ejv.v.(string))
+ case bsontype.EmbeddedDocument:
+ return parseDatetimeObject(ejv.v.(*extJSONObject))
+ default:
+ return 0, fmt.Errorf("$date value should be string or object, but instead is %s", ejv.t)
+ }
+}
+
+func parseDatetimeString(data string) (int64, error) {
+ var t time.Time
+ var err error
+ // try acceptable time formats until one matches
+ for _, format := range timeFormats {
+ t, err = time.Parse(format, data)
+ if err == nil {
+ break
+ }
+ }
+ if err != nil {
+ return 0, fmt.Errorf("invalid $date value string: %s", data)
+ }
+
+ return int64(primitive.NewDateTimeFromTime(t)), nil
+}
+
+func parseDatetimeObject(data *extJSONObject) (d int64, err error) {
+ dFound := false
+
+ for i, key := range data.keys {
+ val := data.values[i]
+
+ switch key {
+ case "$numberLong":
+ if dFound {
+ return 0, errors.New("duplicate $numberLong key in $date")
+ }
+
+ if val.t != bsontype.String {
+ return 0, fmt.Errorf("$date $numberLong field should be string, but instead is %s", val.t)
+ }
+
+ d, err = val.parseInt64()
+ if err != nil {
+ return 0, err
+ }
+ dFound = true
+ default:
+ return 0, fmt.Errorf("invalid key in $date object: %s", key)
+ }
+ }
+
+ if !dFound {
+ return 0, errors.New("missing $numberLong field in $date object")
+ }
+
+ return d, nil
+}
+
+func (ejv *extJSONValue) parseDecimal128() (primitive.Decimal128, error) {
+ if ejv.t != bsontype.String {
+ return primitive.Decimal128{}, fmt.Errorf("$numberDecimal value should be string, but instead is %s", ejv.t)
+ }
+
+ d, err := primitive.ParseDecimal128(ejv.v.(string))
+ if err != nil {
+ return primitive.Decimal128{}, fmt.Errorf("$invalid $numberDecimal string: %s", ejv.v.(string))
+ }
+
+ return d, nil
+}
+
+func (ejv *extJSONValue) parseDouble() (float64, error) {
+ if ejv.t == bsontype.Double {
+ return ejv.v.(float64), nil
+ }
+
+ if ejv.t != bsontype.String {
+ return 0, fmt.Errorf("$numberDouble value should be string, but instead is %s", ejv.t)
+ }
+
+ switch ejv.v.(string) {
+ case "Infinity":
+ return math.Inf(1), nil
+ case "-Infinity":
+ return math.Inf(-1), nil
+ case "NaN":
+ return math.NaN(), nil
+ }
+
+ f, err := strconv.ParseFloat(ejv.v.(string), 64)
+ if err != nil {
+ return 0, err
+ }
+
+ return f, nil
+}
+
+func (ejv *extJSONValue) parseInt32() (int32, error) {
+ if ejv.t == bsontype.Int32 {
+ return ejv.v.(int32), nil
+ }
+
+ if ejv.t != bsontype.String {
+ return 0, fmt.Errorf("$numberInt value should be string, but instead is %s", ejv.t)
+ }
+
+ i, err := strconv.ParseInt(ejv.v.(string), 10, 64)
+ if err != nil {
+ return 0, err
+ }
+
+ if i < math.MinInt32 || i > math.MaxInt32 {
+ return 0, fmt.Errorf("$numberInt value should be int32 but instead is int64: %d", i)
+ }
+
+ return int32(i), nil
+}
+
+func (ejv *extJSONValue) parseInt64() (int64, error) {
+ if ejv.t == bsontype.Int64 {
+ return ejv.v.(int64), nil
+ }
+
+ if ejv.t != bsontype.String {
+ return 0, fmt.Errorf("$numberLong value should be string, but instead is %s", ejv.t)
+ }
+
+ i, err := strconv.ParseInt(ejv.v.(string), 10, 64)
+ if err != nil {
+ return 0, err
+ }
+
+ return i, nil
+}
+
+func (ejv *extJSONValue) parseJavascript() (code string, err error) {
+ if ejv.t != bsontype.String {
+ return "", fmt.Errorf("$code value should be string, but instead is %s", ejv.t)
+ }
+
+ return ejv.v.(string), nil
+}
+
+func (ejv *extJSONValue) parseMinMaxKey(minmax string) error {
+ if ejv.t != bsontype.Int32 {
+ return fmt.Errorf("$%sKey value should be int32, but instead is %s", minmax, ejv.t)
+ }
+
+ if ejv.v.(int32) != 1 {
+ return fmt.Errorf("$%sKey value must be 1, but instead is %d", minmax, ejv.v.(int32))
+ }
+
+ return nil
+}
+
+func (ejv *extJSONValue) parseObjectID() (primitive.ObjectID, error) {
+ if ejv.t != bsontype.String {
+ return primitive.NilObjectID, fmt.Errorf("$oid value should be string, but instead is %s", ejv.t)
+ }
+
+ return primitive.ObjectIDFromHex(ejv.v.(string))
+}
+
+func (ejv *extJSONValue) parseRegex() (pattern, options string, err error) {
+ if ejv.t != bsontype.EmbeddedDocument {
+ return "", "", fmt.Errorf("$regularExpression value should be object, but instead is %s", ejv.t)
+ }
+
+ regexObj := ejv.v.(*extJSONObject)
+ patFound := false
+ optFound := false
+
+ for i, key := range regexObj.keys {
+ val := regexObj.values[i]
+
+ switch key {
+ case "pattern":
+ if patFound {
+ return "", "", errors.New("duplicate pattern key in $regularExpression")
+ }
+
+ if val.t != bsontype.String {
+ return "", "", fmt.Errorf("$regularExpression pattern value should be string, but instead is %s", val.t)
+ }
+
+ pattern = val.v.(string)
+ patFound = true
+ case "options":
+ if optFound {
+ return "", "", errors.New("duplicate options key in $regularExpression")
+ }
+
+ if val.t != bsontype.String {
+ return "", "", fmt.Errorf("$regularExpression options value should be string, but instead is %s", val.t)
+ }
+
+ options = val.v.(string)
+ optFound = true
+ default:
+ return "", "", fmt.Errorf("invalid key in $regularExpression object: %s", key)
+ }
+ }
+
+ if !patFound {
+ return "", "", errors.New("missing pattern field in $regularExpression object")
+ }
+
+ if !optFound {
+ return "", "", errors.New("missing options field in $regularExpression object")
+
+ }
+
+ return pattern, options, nil
+}
+
+func (ejv *extJSONValue) parseSymbol() (string, error) {
+ if ejv.t != bsontype.String {
+ return "", fmt.Errorf("$symbol value should be string, but instead is %s", ejv.t)
+ }
+
+ return ejv.v.(string), nil
+}
+
+func (ejv *extJSONValue) parseTimestamp() (t, i uint32, err error) {
+ if ejv.t != bsontype.EmbeddedDocument {
+ return 0, 0, fmt.Errorf("$timestamp value should be object, but instead is %s", ejv.t)
+ }
+
+ handleKey := func(key string, val *extJSONValue, flag bool) (uint32, error) {
+ if flag {
+ return 0, fmt.Errorf("duplicate %s key in $timestamp", key)
+ }
+
+ switch val.t {
+ case bsontype.Int32:
+ value := val.v.(int32)
+
+ if value < 0 {
+ return 0, fmt.Errorf("$timestamp %s number should be uint32: %d", key, value)
+ }
+
+ return uint32(value), nil
+ case bsontype.Int64:
+ value := val.v.(int64)
+ if value < 0 || value > int64(math.MaxUint32) {
+ return 0, fmt.Errorf("$timestamp %s number should be uint32: %d", key, value)
+ }
+
+ return uint32(value), nil
+ default:
+ return 0, fmt.Errorf("$timestamp %s value should be uint32, but instead is %s", key, val.t)
+ }
+ }
+
+ tsObj := ejv.v.(*extJSONObject)
+ tFound := false
+ iFound := false
+
+ for j, key := range tsObj.keys {
+ val := tsObj.values[j]
+
+ switch key {
+ case "t":
+ if t, err = handleKey(key, val, tFound); err != nil {
+ return 0, 0, err
+ }
+
+ tFound = true
+ case "i":
+ if i, err = handleKey(key, val, iFound); err != nil {
+ return 0, 0, err
+ }
+
+ iFound = true
+ default:
+ return 0, 0, fmt.Errorf("invalid key in $timestamp object: %s", key)
+ }
+ }
+
+ if !tFound {
+ return 0, 0, errors.New("missing t field in $timestamp object")
+ }
+
+ if !iFound {
+ return 0, 0, errors.New("missing i field in $timestamp object")
+ }
+
+ return t, i, nil
+}
+
+func (ejv *extJSONValue) parseUndefined() error {
+ if ejv.t != bsontype.Boolean {
+ return fmt.Errorf("undefined value should be boolean, but instead is %s", ejv.t)
+ }
+
+ if !ejv.v.(bool) {
+ return fmt.Errorf("$undefined balue boolean should be true, but instead is %v", ejv.v.(bool))
+ }
+
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go
new file mode 100644
index 000000000..99ed524b7
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/extjson_writer.go
@@ -0,0 +1,732 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "bytes"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "math"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+ "unicode/utf8"
+
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// ExtJSONValueWriterPool is a pool for ExtJSON ValueWriters.
+type ExtJSONValueWriterPool struct {
+ pool sync.Pool
+}
+
+// NewExtJSONValueWriterPool creates a new pool for ValueWriter instances that write to ExtJSON.
+func NewExtJSONValueWriterPool() *ExtJSONValueWriterPool {
+ return &ExtJSONValueWriterPool{
+ pool: sync.Pool{
+ New: func() interface{} {
+ return new(extJSONValueWriter)
+ },
+ },
+ }
+}
+
+// Get retrieves a ExtJSON ValueWriter from the pool and resets it to use w as the destination.
+func (bvwp *ExtJSONValueWriterPool) Get(w io.Writer, canonical, escapeHTML bool) ValueWriter {
+ vw := bvwp.pool.Get().(*extJSONValueWriter)
+ if writer, ok := w.(*SliceWriter); ok {
+ vw.reset(*writer, canonical, escapeHTML)
+ vw.w = writer
+ return vw
+ }
+ vw.buf = vw.buf[:0]
+ vw.w = w
+ return vw
+}
+
+// Put inserts a ValueWriter into the pool. If the ValueWriter is not a ExtJSON ValueWriter, nothing
+// happens and ok will be false.
+func (bvwp *ExtJSONValueWriterPool) Put(vw ValueWriter) (ok bool) {
+ bvw, ok := vw.(*extJSONValueWriter)
+ if !ok {
+ return false
+ }
+
+ if _, ok := bvw.w.(*SliceWriter); ok {
+ bvw.buf = nil
+ }
+ bvw.w = nil
+
+ bvwp.pool.Put(bvw)
+ return true
+}
+
+type ejvwState struct {
+ mode mode
+}
+
+type extJSONValueWriter struct {
+ w io.Writer
+ buf []byte
+
+ stack []ejvwState
+ frame int64
+ canonical bool
+ escapeHTML bool
+}
+
+// NewExtJSONValueWriter creates a ValueWriter that writes Extended JSON to w.
+func NewExtJSONValueWriter(w io.Writer, canonical, escapeHTML bool) (ValueWriter, error) {
+ if w == nil {
+ return nil, errNilWriter
+ }
+
+ return newExtJSONWriter(w, canonical, escapeHTML), nil
+}
+
+func newExtJSONWriter(w io.Writer, canonical, escapeHTML bool) *extJSONValueWriter {
+ stack := make([]ejvwState, 1, 5)
+ stack[0] = ejvwState{mode: mTopLevel}
+
+ return &extJSONValueWriter{
+ w: w,
+ buf: []byte{},
+ stack: stack,
+ canonical: canonical,
+ escapeHTML: escapeHTML,
+ }
+}
+
+func newExtJSONWriterFromSlice(buf []byte, canonical, escapeHTML bool) *extJSONValueWriter {
+ stack := make([]ejvwState, 1, 5)
+ stack[0] = ejvwState{mode: mTopLevel}
+
+ return &extJSONValueWriter{
+ buf: buf,
+ stack: stack,
+ canonical: canonical,
+ escapeHTML: escapeHTML,
+ }
+}
+
+func (ejvw *extJSONValueWriter) reset(buf []byte, canonical, escapeHTML bool) {
+ if ejvw.stack == nil {
+ ejvw.stack = make([]ejvwState, 1, 5)
+ }
+
+ ejvw.stack = ejvw.stack[:1]
+ ejvw.stack[0] = ejvwState{mode: mTopLevel}
+ ejvw.canonical = canonical
+ ejvw.escapeHTML = escapeHTML
+ ejvw.frame = 0
+ ejvw.buf = buf
+ ejvw.w = nil
+}
+
+func (ejvw *extJSONValueWriter) advanceFrame() {
+ if ejvw.frame+1 >= int64(len(ejvw.stack)) { // We need to grow the stack
+ length := len(ejvw.stack)
+ if length+1 >= cap(ejvw.stack) {
+ // double it
+ buf := make([]ejvwState, 2*cap(ejvw.stack)+1)
+ copy(buf, ejvw.stack)
+ ejvw.stack = buf
+ }
+ ejvw.stack = ejvw.stack[:length+1]
+ }
+ ejvw.frame++
+}
+
+func (ejvw *extJSONValueWriter) push(m mode) {
+ ejvw.advanceFrame()
+
+ ejvw.stack[ejvw.frame].mode = m
+}
+
+func (ejvw *extJSONValueWriter) pop() {
+ switch ejvw.stack[ejvw.frame].mode {
+ case mElement, mValue:
+ ejvw.frame--
+ case mDocument, mArray, mCodeWithScope:
+ ejvw.frame -= 2 // we pop twice to jump over the mElement: mDocument -> mElement -> mDocument/mTopLevel/etc...
+ }
+}
+
+func (ejvw *extJSONValueWriter) invalidTransitionErr(destination mode, name string, modes []mode) error {
+ te := TransitionError{
+ name: name,
+ current: ejvw.stack[ejvw.frame].mode,
+ destination: destination,
+ modes: modes,
+ action: "write",
+ }
+ if ejvw.frame != 0 {
+ te.parent = ejvw.stack[ejvw.frame-1].mode
+ }
+ return te
+}
+
+func (ejvw *extJSONValueWriter) ensureElementValue(destination mode, callerName string, addmodes ...mode) error {
+ switch ejvw.stack[ejvw.frame].mode {
+ case mElement, mValue:
+ default:
+ modes := []mode{mElement, mValue}
+ if addmodes != nil {
+ modes = append(modes, addmodes...)
+ }
+ return ejvw.invalidTransitionErr(destination, callerName, modes)
+ }
+
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) writeExtendedSingleValue(key string, value string, quotes bool) {
+ var s string
+ if quotes {
+ s = fmt.Sprintf(`{"$%s":"%s"}`, key, value)
+ } else {
+ s = fmt.Sprintf(`{"$%s":%s}`, key, value)
+ }
+
+ ejvw.buf = append(ejvw.buf, []byte(s)...)
+}
+
+func (ejvw *extJSONValueWriter) WriteArray() (ArrayWriter, error) {
+ if err := ejvw.ensureElementValue(mArray, "WriteArray"); err != nil {
+ return nil, err
+ }
+
+ ejvw.buf = append(ejvw.buf, '[')
+
+ ejvw.push(mArray)
+ return ejvw, nil
+}
+
+func (ejvw *extJSONValueWriter) WriteBinary(b []byte) error {
+ return ejvw.WriteBinaryWithSubtype(b, 0x00)
+}
+
+func (ejvw *extJSONValueWriter) WriteBinaryWithSubtype(b []byte, btype byte) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteBinaryWithSubtype"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(`{"$binary":{"base64":"`)
+ buf.WriteString(base64.StdEncoding.EncodeToString(b))
+ buf.WriteString(fmt.Sprintf(`","subType":"%02x"}},`, btype))
+
+ ejvw.buf = append(ejvw.buf, buf.Bytes()...)
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteBoolean(b bool) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteBoolean"); err != nil {
+ return err
+ }
+
+ ejvw.buf = append(ejvw.buf, []byte(strconv.FormatBool(b))...)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteCodeWithScope(code string) (DocumentWriter, error) {
+ if err := ejvw.ensureElementValue(mCodeWithScope, "WriteCodeWithScope"); err != nil {
+ return nil, err
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(`{"$code":`)
+ writeStringWithEscapes(code, &buf, ejvw.escapeHTML)
+ buf.WriteString(`,"$scope":{`)
+
+ ejvw.buf = append(ejvw.buf, buf.Bytes()...)
+
+ ejvw.push(mCodeWithScope)
+ return ejvw, nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDBPointer(ns string, oid primitive.ObjectID) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteDBPointer"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(`{"$dbPointer":{"$ref":"`)
+ buf.WriteString(ns)
+ buf.WriteString(`","$id":{"$oid":"`)
+ buf.WriteString(oid.Hex())
+ buf.WriteString(`"}}},`)
+
+ ejvw.buf = append(ejvw.buf, buf.Bytes()...)
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDateTime(dt int64) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteDateTime"); err != nil {
+ return err
+ }
+
+ t := time.Unix(dt/1e3, dt%1e3*1e6).UTC()
+
+ if ejvw.canonical || t.Year() < 1970 || t.Year() > 9999 {
+ s := fmt.Sprintf(`{"$numberLong":"%d"}`, dt)
+ ejvw.writeExtendedSingleValue("date", s, false)
+ } else {
+ ejvw.writeExtendedSingleValue("date", t.Format(rfc3339Milli), true)
+ }
+
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDecimal128(d primitive.Decimal128) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteDecimal128"); err != nil {
+ return err
+ }
+
+ ejvw.writeExtendedSingleValue("numberDecimal", d.String(), true)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDocument() (DocumentWriter, error) {
+ if ejvw.stack[ejvw.frame].mode == mTopLevel {
+ ejvw.buf = append(ejvw.buf, '{')
+ return ejvw, nil
+ }
+
+ if err := ejvw.ensureElementValue(mDocument, "WriteDocument", mTopLevel); err != nil {
+ return nil, err
+ }
+
+ ejvw.buf = append(ejvw.buf, '{')
+ ejvw.push(mDocument)
+ return ejvw, nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDouble(f float64) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteDouble"); err != nil {
+ return err
+ }
+
+ s := formatDouble(f)
+
+ if ejvw.canonical {
+ ejvw.writeExtendedSingleValue("numberDouble", s, true)
+ } else {
+ switch s {
+ case "Infinity":
+ fallthrough
+ case "-Infinity":
+ fallthrough
+ case "NaN":
+ s = fmt.Sprintf(`{"$numberDouble":"%s"}`, s)
+ }
+ ejvw.buf = append(ejvw.buf, []byte(s)...)
+ }
+
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteInt32(i int32) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteInt32"); err != nil {
+ return err
+ }
+
+ s := strconv.FormatInt(int64(i), 10)
+
+ if ejvw.canonical {
+ ejvw.writeExtendedSingleValue("numberInt", s, true)
+ } else {
+ ejvw.buf = append(ejvw.buf, []byte(s)...)
+ }
+
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteInt64(i int64) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteInt64"); err != nil {
+ return err
+ }
+
+ s := strconv.FormatInt(i, 10)
+
+ if ejvw.canonical {
+ ejvw.writeExtendedSingleValue("numberLong", s, true)
+ } else {
+ ejvw.buf = append(ejvw.buf, []byte(s)...)
+ }
+
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteJavascript(code string) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteJavascript"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ writeStringWithEscapes(code, &buf, ejvw.escapeHTML)
+
+ ejvw.writeExtendedSingleValue("code", buf.String(), false)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteMaxKey() error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteMaxKey"); err != nil {
+ return err
+ }
+
+ ejvw.writeExtendedSingleValue("maxKey", "1", false)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteMinKey() error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteMinKey"); err != nil {
+ return err
+ }
+
+ ejvw.writeExtendedSingleValue("minKey", "1", false)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteNull() error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteNull"); err != nil {
+ return err
+ }
+
+ ejvw.buf = append(ejvw.buf, []byte("null")...)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteObjectID(oid primitive.ObjectID) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteObjectID"); err != nil {
+ return err
+ }
+
+ ejvw.writeExtendedSingleValue("oid", oid.Hex(), true)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteRegex(pattern string, options string) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteRegex"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(`{"$regularExpression":{"pattern":`)
+ writeStringWithEscapes(pattern, &buf, ejvw.escapeHTML)
+ buf.WriteString(`,"options":"`)
+ buf.WriteString(sortStringAlphebeticAscending(options))
+ buf.WriteString(`"}},`)
+
+ ejvw.buf = append(ejvw.buf, buf.Bytes()...)
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteString(s string) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteString"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ writeStringWithEscapes(s, &buf, ejvw.escapeHTML)
+
+ ejvw.buf = append(ejvw.buf, buf.Bytes()...)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteSymbol(symbol string) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteSymbol"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ writeStringWithEscapes(symbol, &buf, ejvw.escapeHTML)
+
+ ejvw.writeExtendedSingleValue("symbol", buf.String(), false)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteTimestamp(t uint32, i uint32) error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteTimestamp"); err != nil {
+ return err
+ }
+
+ var buf bytes.Buffer
+ buf.WriteString(`{"$timestamp":{"t":`)
+ buf.WriteString(strconv.FormatUint(uint64(t), 10))
+ buf.WriteString(`,"i":`)
+ buf.WriteString(strconv.FormatUint(uint64(i), 10))
+ buf.WriteString(`}},`)
+
+ ejvw.buf = append(ejvw.buf, buf.Bytes()...)
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteUndefined() error {
+ if err := ejvw.ensureElementValue(mode(0), "WriteUndefined"); err != nil {
+ return err
+ }
+
+ ejvw.writeExtendedSingleValue("undefined", "true", false)
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDocumentElement(key string) (ValueWriter, error) {
+ switch ejvw.stack[ejvw.frame].mode {
+ case mDocument, mTopLevel, mCodeWithScope:
+ var buf bytes.Buffer
+ writeStringWithEscapes(key, &buf, ejvw.escapeHTML)
+
+ ejvw.buf = append(ejvw.buf, []byte(fmt.Sprintf(`%s:`, buf.String()))...)
+ ejvw.push(mElement)
+ default:
+ return nil, ejvw.invalidTransitionErr(mElement, "WriteDocumentElement", []mode{mDocument, mTopLevel, mCodeWithScope})
+ }
+
+ return ejvw, nil
+}
+
+func (ejvw *extJSONValueWriter) WriteDocumentEnd() error {
+ switch ejvw.stack[ejvw.frame].mode {
+ case mDocument, mTopLevel, mCodeWithScope:
+ default:
+ return fmt.Errorf("incorrect mode to end document: %s", ejvw.stack[ejvw.frame].mode)
+ }
+
+ // close the document
+ if ejvw.buf[len(ejvw.buf)-1] == ',' {
+ ejvw.buf[len(ejvw.buf)-1] = '}'
+ } else {
+ ejvw.buf = append(ejvw.buf, '}')
+ }
+
+ switch ejvw.stack[ejvw.frame].mode {
+ case mCodeWithScope:
+ ejvw.buf = append(ejvw.buf, '}')
+ fallthrough
+ case mDocument:
+ ejvw.buf = append(ejvw.buf, ',')
+ case mTopLevel:
+ if ejvw.w != nil {
+ if _, err := ejvw.w.Write(ejvw.buf); err != nil {
+ return err
+ }
+ ejvw.buf = ejvw.buf[:0]
+ }
+ }
+
+ ejvw.pop()
+ return nil
+}
+
+func (ejvw *extJSONValueWriter) WriteArrayElement() (ValueWriter, error) {
+ switch ejvw.stack[ejvw.frame].mode {
+ case mArray:
+ ejvw.push(mValue)
+ default:
+ return nil, ejvw.invalidTransitionErr(mValue, "WriteArrayElement", []mode{mArray})
+ }
+
+ return ejvw, nil
+}
+
+func (ejvw *extJSONValueWriter) WriteArrayEnd() error {
+ switch ejvw.stack[ejvw.frame].mode {
+ case mArray:
+ // close the array
+ if ejvw.buf[len(ejvw.buf)-1] == ',' {
+ ejvw.buf[len(ejvw.buf)-1] = ']'
+ } else {
+ ejvw.buf = append(ejvw.buf, ']')
+ }
+
+ ejvw.buf = append(ejvw.buf, ',')
+
+ ejvw.pop()
+ default:
+ return fmt.Errorf("incorrect mode to end array: %s", ejvw.stack[ejvw.frame].mode)
+ }
+
+ return nil
+}
+
+func formatDouble(f float64) string {
+ var s string
+ if math.IsInf(f, 1) {
+ s = "Infinity"
+ } else if math.IsInf(f, -1) {
+ s = "-Infinity"
+ } else if math.IsNaN(f) {
+ s = "NaN"
+ } else {
+ // Print exactly one decimalType place for integers; otherwise, print as many are necessary to
+ // perfectly represent it.
+ s = strconv.FormatFloat(f, 'G', -1, 64)
+ if !strings.ContainsRune(s, 'E') && !strings.ContainsRune(s, '.') {
+ s += ".0"
+ }
+ }
+
+ return s
+}
+
+var hexChars = "0123456789abcdef"
+
+func writeStringWithEscapes(s string, buf *bytes.Buffer, escapeHTML bool) {
+ buf.WriteByte('"')
+ start := 0
+ for i := 0; i < len(s); {
+ if b := s[i]; b < utf8.RuneSelf {
+ if htmlSafeSet[b] || (!escapeHTML && safeSet[b]) {
+ i++
+ continue
+ }
+ if start < i {
+ buf.WriteString(s[start:i])
+ }
+ switch b {
+ case '\\', '"':
+ buf.WriteByte('\\')
+ buf.WriteByte(b)
+ case '\n':
+ buf.WriteByte('\\')
+ buf.WriteByte('n')
+ case '\r':
+ buf.WriteByte('\\')
+ buf.WriteByte('r')
+ case '\t':
+ buf.WriteByte('\\')
+ buf.WriteByte('t')
+ case '\b':
+ buf.WriteByte('\\')
+ buf.WriteByte('b')
+ case '\f':
+ buf.WriteByte('\\')
+ buf.WriteByte('f')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ buf.WriteString(`\u00`)
+ buf.WriteByte(hexChars[b>>4])
+ buf.WriteByte(hexChars[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRuneInString(s[i:])
+ if c == utf8.RuneError && size == 1 {
+ if start < i {
+ buf.WriteString(s[start:i])
+ }
+ buf.WriteString(`\ufffd`)
+ i += size
+ start = i
+ continue
+ }
+ // U+2028 is LINE SEPARATOR.
+ // U+2029 is PARAGRAPH SEPARATOR.
+ // They are both technically valid characters in JSON strings,
+ // but don't work in JSONP, which has to be evaluated as JavaScript,
+ // and can lead to security holes there. It is valid JSON to
+ // escape them, so we do so unconditionally.
+ // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
+ if c == '\u2028' || c == '\u2029' {
+ if start < i {
+ buf.WriteString(s[start:i])
+ }
+ buf.WriteString(`\u202`)
+ buf.WriteByte(hexChars[c&0xF])
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+ if start < len(s) {
+ buf.WriteString(s[start:])
+ }
+ buf.WriteByte('"')
+}
+
+type sortableString []rune
+
+func (ss sortableString) Len() int {
+ return len(ss)
+}
+
+func (ss sortableString) Less(i, j int) bool {
+ return ss[i] < ss[j]
+}
+
+func (ss sortableString) Swap(i, j int) {
+ oldI := ss[i]
+ ss[i] = ss[j]
+ ss[j] = oldI
+}
+
+func sortStringAlphebeticAscending(s string) string {
+ ss := sortableString([]rune(s))
+ sort.Sort(ss)
+ return string([]rune(ss))
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/json_scanner.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/json_scanner.go
new file mode 100644
index 000000000..cd4843a3a
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/json_scanner.go
@@ -0,0 +1,528 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "strconv"
+ "unicode"
+ "unicode/utf16"
+)
+
+type jsonTokenType byte
+
+const (
+ jttBeginObject jsonTokenType = iota
+ jttEndObject
+ jttBeginArray
+ jttEndArray
+ jttColon
+ jttComma
+ jttInt32
+ jttInt64
+ jttDouble
+ jttString
+ jttBool
+ jttNull
+ jttEOF
+)
+
+type jsonToken struct {
+ t jsonTokenType
+ v interface{}
+ p int
+}
+
+type jsonScanner struct {
+ r io.Reader
+ buf []byte
+ pos int
+ lastReadErr error
+}
+
+// nextToken returns the next JSON token if one exists. A token is a character
+// of the JSON grammar, a number, a string, or a literal.
+func (js *jsonScanner) nextToken() (*jsonToken, error) {
+ c, err := js.readNextByte()
+
+ // keep reading until a non-space is encountered (break on read error or EOF)
+ for isWhiteSpace(c) && err == nil {
+ c, err = js.readNextByte()
+ }
+
+ if err == io.EOF {
+ return &jsonToken{t: jttEOF}, nil
+ } else if err != nil {
+ return nil, err
+ }
+
+ // switch on the character
+ switch c {
+ case '{':
+ return &jsonToken{t: jttBeginObject, v: byte('{'), p: js.pos - 1}, nil
+ case '}':
+ return &jsonToken{t: jttEndObject, v: byte('}'), p: js.pos - 1}, nil
+ case '[':
+ return &jsonToken{t: jttBeginArray, v: byte('['), p: js.pos - 1}, nil
+ case ']':
+ return &jsonToken{t: jttEndArray, v: byte(']'), p: js.pos - 1}, nil
+ case ':':
+ return &jsonToken{t: jttColon, v: byte(':'), p: js.pos - 1}, nil
+ case ',':
+ return &jsonToken{t: jttComma, v: byte(','), p: js.pos - 1}, nil
+ case '"': // RFC-8259 only allows for double quotes (") not single (')
+ return js.scanString()
+ default:
+ // check if it's a number
+ if c == '-' || isDigit(c) {
+ return js.scanNumber(c)
+ } else if c == 't' || c == 'f' || c == 'n' {
+ // maybe a literal
+ return js.scanLiteral(c)
+ } else {
+ return nil, fmt.Errorf("invalid JSON input. Position: %d. Character: %c", js.pos-1, c)
+ }
+ }
+}
+
+// readNextByte attempts to read the next byte from the buffer. If the buffer
+// has been exhausted, this function calls readIntoBuf, thus refilling the
+// buffer and resetting the read position to 0
+func (js *jsonScanner) readNextByte() (byte, error) {
+ if js.pos >= len(js.buf) {
+ err := js.readIntoBuf()
+
+ if err != nil {
+ return 0, err
+ }
+ }
+
+ b := js.buf[js.pos]
+ js.pos++
+
+ return b, nil
+}
+
+// readNNextBytes reads n bytes into dst, starting at offset
+func (js *jsonScanner) readNNextBytes(dst []byte, n, offset int) error {
+ var err error
+
+ for i := 0; i < n; i++ {
+ dst[i+offset], err = js.readNextByte()
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// readIntoBuf reads up to 512 bytes from the scanner's io.Reader into the buffer
+func (js *jsonScanner) readIntoBuf() error {
+ if js.lastReadErr != nil {
+ js.buf = js.buf[:0]
+ js.pos = 0
+ return js.lastReadErr
+ }
+
+ if cap(js.buf) == 0 {
+ js.buf = make([]byte, 0, 512)
+ }
+
+ n, err := js.r.Read(js.buf[:cap(js.buf)])
+ if err != nil {
+ js.lastReadErr = err
+ if n > 0 {
+ err = nil
+ }
+ }
+ js.buf = js.buf[:n]
+ js.pos = 0
+
+ return err
+}
+
+func isWhiteSpace(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\r' || c == '\n'
+}
+
+func isDigit(c byte) bool {
+ return unicode.IsDigit(rune(c))
+}
+
+func isValueTerminator(c byte) bool {
+ return c == ',' || c == '}' || c == ']' || isWhiteSpace(c)
+}
+
+// getu4 decodes the 4-byte hex sequence from the beginning of s, returning the hex value as a rune,
+// or it returns -1. Note that the "\u" from the unicode escape sequence should not be present.
+// It is copied and lightly modified from the Go JSON decode function at
+// https://github.com/golang/go/blob/1b0a0316802b8048d69da49dc23c5a5ab08e8ae8/src/encoding/json/decode.go#L1169-L1188
+func getu4(s []byte) rune {
+ if len(s) < 4 {
+ return -1
+ }
+ var r rune
+ for _, c := range s[:4] {
+ switch {
+ case '0' <= c && c <= '9':
+ c = c - '0'
+ case 'a' <= c && c <= 'f':
+ c = c - 'a' + 10
+ case 'A' <= c && c <= 'F':
+ c = c - 'A' + 10
+ default:
+ return -1
+ }
+ r = r*16 + rune(c)
+ }
+ return r
+}
+
+// scanString reads from an opening '"' to a closing '"' and handles escaped characters
+func (js *jsonScanner) scanString() (*jsonToken, error) {
+ var b bytes.Buffer
+ var c byte
+ var err error
+
+ p := js.pos - 1
+
+ for {
+ c, err = js.readNextByte()
+ if err != nil {
+ if err == io.EOF {
+ return nil, errors.New("end of input in JSON string")
+ }
+ return nil, err
+ }
+
+ evalNextChar:
+ switch c {
+ case '\\':
+ c, err = js.readNextByte()
+ if err != nil {
+ if err == io.EOF {
+ return nil, errors.New("end of input in JSON string")
+ }
+ return nil, err
+ }
+
+ evalNextEscapeChar:
+ switch c {
+ case '"', '\\', '/':
+ b.WriteByte(c)
+ case 'b':
+ b.WriteByte('\b')
+ case 'f':
+ b.WriteByte('\f')
+ case 'n':
+ b.WriteByte('\n')
+ case 'r':
+ b.WriteByte('\r')
+ case 't':
+ b.WriteByte('\t')
+ case 'u':
+ us := make([]byte, 4)
+ err = js.readNNextBytes(us, 4, 0)
+ if err != nil {
+ return nil, fmt.Errorf("invalid unicode sequence in JSON string: %s", us)
+ }
+
+ rn := getu4(us)
+
+ // If the rune we just decoded is the high or low value of a possible surrogate pair,
+ // try to decode the next sequence as the low value of a surrogate pair. We're
+ // expecting the next sequence to be another Unicode escape sequence (e.g. "\uDD1E"),
+ // but need to handle cases where the input is not a valid surrogate pair.
+ // For more context on unicode surrogate pairs, see:
+ // https://www.christianfscott.com/rust-chars-vs-go-runes/
+ // https://www.unicode.org/glossary/#high_surrogate_code_point
+ if utf16.IsSurrogate(rn) {
+ c, err = js.readNextByte()
+ if err != nil {
+ if err == io.EOF {
+ return nil, errors.New("end of input in JSON string")
+ }
+ return nil, err
+ }
+
+ // If the next value isn't the beginning of a backslash escape sequence, write
+ // the Unicode replacement character for the surrogate value and goto the
+ // beginning of the next char eval block.
+ if c != '\\' {
+ b.WriteRune(unicode.ReplacementChar)
+ goto evalNextChar
+ }
+
+ c, err = js.readNextByte()
+ if err != nil {
+ if err == io.EOF {
+ return nil, errors.New("end of input in JSON string")
+ }
+ return nil, err
+ }
+
+ // If the next value isn't the beginning of a unicode escape sequence, write the
+ // Unicode replacement character for the surrogate value and goto the beginning
+ // of the next escape char eval block.
+ if c != 'u' {
+ b.WriteRune(unicode.ReplacementChar)
+ goto evalNextEscapeChar
+ }
+
+ err = js.readNNextBytes(us, 4, 0)
+ if err != nil {
+ return nil, fmt.Errorf("invalid unicode sequence in JSON string: %s", us)
+ }
+
+ rn2 := getu4(us)
+
+ // Try to decode the pair of runes as a utf16 surrogate pair. If that fails, write
+ // the Unicode replacement character for the surrogate value and the 2nd decoded rune.
+ if rnPair := utf16.DecodeRune(rn, rn2); rnPair != unicode.ReplacementChar {
+ b.WriteRune(rnPair)
+ } else {
+ b.WriteRune(unicode.ReplacementChar)
+ b.WriteRune(rn2)
+ }
+
+ break
+ }
+
+ b.WriteRune(rn)
+ default:
+ return nil, fmt.Errorf("invalid escape sequence in JSON string '\\%c'", c)
+ }
+ case '"':
+ return &jsonToken{t: jttString, v: b.String(), p: p}, nil
+ default:
+ b.WriteByte(c)
+ }
+ }
+}
+
+// scanLiteral reads an unquoted sequence of characters and determines if it is one of
+// three valid JSON literals (true, false, null); if so, it returns the appropriate
+// jsonToken; otherwise, it returns an error
+func (js *jsonScanner) scanLiteral(first byte) (*jsonToken, error) {
+ p := js.pos - 1
+
+ lit := make([]byte, 4)
+ lit[0] = first
+
+ err := js.readNNextBytes(lit, 3, 1)
+ if err != nil {
+ return nil, err
+ }
+
+ c5, err := js.readNextByte()
+
+ if bytes.Equal([]byte("true"), lit) && (isValueTerminator(c5) || err == io.EOF) {
+ js.pos = int(math.Max(0, float64(js.pos-1)))
+ return &jsonToken{t: jttBool, v: true, p: p}, nil
+ } else if bytes.Equal([]byte("null"), lit) && (isValueTerminator(c5) || err == io.EOF) {
+ js.pos = int(math.Max(0, float64(js.pos-1)))
+ return &jsonToken{t: jttNull, v: nil, p: p}, nil
+ } else if bytes.Equal([]byte("fals"), lit) {
+ if c5 == 'e' {
+ c5, err = js.readNextByte()
+
+ if isValueTerminator(c5) || err == io.EOF {
+ js.pos = int(math.Max(0, float64(js.pos-1)))
+ return &jsonToken{t: jttBool, v: false, p: p}, nil
+ }
+ }
+ }
+
+ return nil, fmt.Errorf("invalid JSON literal. Position: %d, literal: %s", p, lit)
+}
+
+type numberScanState byte
+
+const (
+ nssSawLeadingMinus numberScanState = iota
+ nssSawLeadingZero
+ nssSawIntegerDigits
+ nssSawDecimalPoint
+ nssSawFractionDigits
+ nssSawExponentLetter
+ nssSawExponentSign
+ nssSawExponentDigits
+ nssDone
+ nssInvalid
+)
+
+// scanNumber reads a JSON number (according to RFC-8259)
+func (js *jsonScanner) scanNumber(first byte) (*jsonToken, error) {
+ var b bytes.Buffer
+ var s numberScanState
+ var c byte
+ var err error
+
+ t := jttInt64 // assume it's an int64 until the type can be determined
+ start := js.pos - 1
+
+ b.WriteByte(first)
+
+ switch first {
+ case '-':
+ s = nssSawLeadingMinus
+ case '0':
+ s = nssSawLeadingZero
+ default:
+ s = nssSawIntegerDigits
+ }
+
+ for {
+ c, err = js.readNextByte()
+
+ if err != nil && err != io.EOF {
+ return nil, err
+ }
+
+ switch s {
+ case nssSawLeadingMinus:
+ switch c {
+ case '0':
+ s = nssSawLeadingZero
+ b.WriteByte(c)
+ default:
+ if isDigit(c) {
+ s = nssSawIntegerDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ }
+ case nssSawLeadingZero:
+ switch c {
+ case '.':
+ s = nssSawDecimalPoint
+ b.WriteByte(c)
+ case 'e', 'E':
+ s = nssSawExponentLetter
+ b.WriteByte(c)
+ case '}', ']', ',':
+ s = nssDone
+ default:
+ if isWhiteSpace(c) || err == io.EOF {
+ s = nssDone
+ } else {
+ s = nssInvalid
+ }
+ }
+ case nssSawIntegerDigits:
+ switch c {
+ case '.':
+ s = nssSawDecimalPoint
+ b.WriteByte(c)
+ case 'e', 'E':
+ s = nssSawExponentLetter
+ b.WriteByte(c)
+ case '}', ']', ',':
+ s = nssDone
+ default:
+ if isWhiteSpace(c) || err == io.EOF {
+ s = nssDone
+ } else if isDigit(c) {
+ s = nssSawIntegerDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ }
+ case nssSawDecimalPoint:
+ t = jttDouble
+ if isDigit(c) {
+ s = nssSawFractionDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ case nssSawFractionDigits:
+ switch c {
+ case 'e', 'E':
+ s = nssSawExponentLetter
+ b.WriteByte(c)
+ case '}', ']', ',':
+ s = nssDone
+ default:
+ if isWhiteSpace(c) || err == io.EOF {
+ s = nssDone
+ } else if isDigit(c) {
+ s = nssSawFractionDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ }
+ case nssSawExponentLetter:
+ t = jttDouble
+ switch c {
+ case '+', '-':
+ s = nssSawExponentSign
+ b.WriteByte(c)
+ default:
+ if isDigit(c) {
+ s = nssSawExponentDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ }
+ case nssSawExponentSign:
+ if isDigit(c) {
+ s = nssSawExponentDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ case nssSawExponentDigits:
+ switch c {
+ case '}', ']', ',':
+ s = nssDone
+ default:
+ if isWhiteSpace(c) || err == io.EOF {
+ s = nssDone
+ } else if isDigit(c) {
+ s = nssSawExponentDigits
+ b.WriteByte(c)
+ } else {
+ s = nssInvalid
+ }
+ }
+ }
+
+ switch s {
+ case nssInvalid:
+ return nil, fmt.Errorf("invalid JSON number. Position: %d", start)
+ case nssDone:
+ js.pos = int(math.Max(0, float64(js.pos-1)))
+ if t != jttDouble {
+ v, err := strconv.ParseInt(b.String(), 10, 64)
+ if err == nil {
+ if v < math.MinInt32 || v > math.MaxInt32 {
+ return &jsonToken{t: jttInt64, v: v, p: start}, nil
+ }
+
+ return &jsonToken{t: jttInt32, v: int32(v), p: start}, nil
+ }
+ }
+
+ v, err := strconv.ParseFloat(b.String(), 64)
+ if err != nil {
+ return nil, err
+ }
+
+ return &jsonToken{t: jttDouble, v: v, p: start}, nil
+ }
+ }
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/mode.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/mode.go
new file mode 100644
index 000000000..617b5e221
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/mode.go
@@ -0,0 +1,108 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "fmt"
+)
+
+type mode int
+
+const (
+ _ mode = iota
+ mTopLevel
+ mDocument
+ mArray
+ mValue
+ mElement
+ mCodeWithScope
+ mSpacer
+)
+
+func (m mode) String() string {
+ var str string
+
+ switch m {
+ case mTopLevel:
+ str = "TopLevel"
+ case mDocument:
+ str = "DocumentMode"
+ case mArray:
+ str = "ArrayMode"
+ case mValue:
+ str = "ValueMode"
+ case mElement:
+ str = "ElementMode"
+ case mCodeWithScope:
+ str = "CodeWithScopeMode"
+ case mSpacer:
+ str = "CodeWithScopeSpacerFrame"
+ default:
+ str = "UnknownMode"
+ }
+
+ return str
+}
+
+func (m mode) TypeString() string {
+ var str string
+
+ switch m {
+ case mTopLevel:
+ str = "TopLevel"
+ case mDocument:
+ str = "Document"
+ case mArray:
+ str = "Array"
+ case mValue:
+ str = "Value"
+ case mElement:
+ str = "Element"
+ case mCodeWithScope:
+ str = "CodeWithScope"
+ case mSpacer:
+ str = "CodeWithScopeSpacer"
+ default:
+ str = "Unknown"
+ }
+
+ return str
+}
+
+// TransitionError is an error returned when an invalid progressing a
+// ValueReader or ValueWriter state machine occurs.
+// If read is false, the error is for writing
+type TransitionError struct {
+ name string
+ parent mode
+ current mode
+ destination mode
+ modes []mode
+ action string
+}
+
+func (te TransitionError) Error() string {
+ errString := fmt.Sprintf("%s can only %s", te.name, te.action)
+ if te.destination != mode(0) {
+ errString = fmt.Sprintf("%s a %s", errString, te.destination.TypeString())
+ }
+ errString = fmt.Sprintf("%s while positioned on a", errString)
+ for ind, m := range te.modes {
+ if ind != 0 && len(te.modes) > 2 {
+ errString = fmt.Sprintf("%s,", errString)
+ }
+ if ind == len(te.modes)-1 && len(te.modes) > 1 {
+ errString = fmt.Sprintf("%s or", errString)
+ }
+ errString = fmt.Sprintf("%s %s", errString, m.TypeString())
+ }
+ errString = fmt.Sprintf("%s but is positioned on a %s", errString, te.current.TypeString())
+ if te.parent != mode(0) {
+ errString = fmt.Sprintf("%s with parent %s", errString, te.parent.TypeString())
+ }
+ return errString
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/reader.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/reader.go
new file mode 100644
index 000000000..0b8fa28d5
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/reader.go
@@ -0,0 +1,63 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// ArrayReader is implemented by types that allow reading values from a BSON
+// array.
+type ArrayReader interface {
+ ReadValue() (ValueReader, error)
+}
+
+// DocumentReader is implemented by types that allow reading elements from a
+// BSON document.
+type DocumentReader interface {
+ ReadElement() (string, ValueReader, error)
+}
+
+// ValueReader is a generic interface used to read values from BSON. This type
+// is implemented by several types with different underlying representations of
+// BSON, such as a bson.Document, raw BSON bytes, or extended JSON.
+type ValueReader interface {
+ Type() bsontype.Type
+ Skip() error
+
+ ReadArray() (ArrayReader, error)
+ ReadBinary() (b []byte, btype byte, err error)
+ ReadBoolean() (bool, error)
+ ReadDocument() (DocumentReader, error)
+ ReadCodeWithScope() (code string, dr DocumentReader, err error)
+ ReadDBPointer() (ns string, oid primitive.ObjectID, err error)
+ ReadDateTime() (int64, error)
+ ReadDecimal128() (primitive.Decimal128, error)
+ ReadDouble() (float64, error)
+ ReadInt32() (int32, error)
+ ReadInt64() (int64, error)
+ ReadJavascript() (code string, err error)
+ ReadMaxKey() error
+ ReadMinKey() error
+ ReadNull() error
+ ReadObjectID() (primitive.ObjectID, error)
+ ReadRegex() (pattern, options string, err error)
+ ReadString() (string, error)
+ ReadSymbol() (symbol string, err error)
+ ReadTimestamp() (t, i uint32, err error)
+ ReadUndefined() error
+}
+
+// BytesReader is a generic interface used to read BSON bytes from a
+// ValueReader. This imterface is meant to be a superset of ValueReader, so that
+// types that implement ValueReader may also implement this interface.
+//
+// The bytes of the value will be appended to dst.
+type BytesReader interface {
+ ReadValueBytes(dst []byte) (bsontype.Type, []byte, error)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_reader.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_reader.go
new file mode 100644
index 000000000..ef5d837c2
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_reader.go
@@ -0,0 +1,874 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "bytes"
+ "encoding/binary"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+var _ ValueReader = (*valueReader)(nil)
+
+var vrPool = sync.Pool{
+ New: func() interface{} {
+ return new(valueReader)
+ },
+}
+
+// BSONValueReaderPool is a pool for ValueReaders that read BSON.
+type BSONValueReaderPool struct {
+ pool sync.Pool
+}
+
+// NewBSONValueReaderPool instantiates a new BSONValueReaderPool.
+func NewBSONValueReaderPool() *BSONValueReaderPool {
+ return &BSONValueReaderPool{
+ pool: sync.Pool{
+ New: func() interface{} {
+ return new(valueReader)
+ },
+ },
+ }
+}
+
+// Get retrieves a ValueReader from the pool and uses src as the underlying BSON.
+func (bvrp *BSONValueReaderPool) Get(src []byte) ValueReader {
+ vr := bvrp.pool.Get().(*valueReader)
+ vr.reset(src)
+ return vr
+}
+
+// Put inserts a ValueReader into the pool. If the ValueReader is not a BSON ValueReader nothing
+// is inserted into the pool and ok will be false.
+func (bvrp *BSONValueReaderPool) Put(vr ValueReader) (ok bool) {
+ bvr, ok := vr.(*valueReader)
+ if !ok {
+ return false
+ }
+
+ bvr.reset(nil)
+ bvrp.pool.Put(bvr)
+ return true
+}
+
+// ErrEOA is the error returned when the end of a BSON array has been reached.
+var ErrEOA = errors.New("end of array")
+
+// ErrEOD is the error returned when the end of a BSON document has been reached.
+var ErrEOD = errors.New("end of document")
+
+type vrState struct {
+ mode mode
+ vType bsontype.Type
+ end int64
+}
+
+// valueReader is for reading BSON values.
+type valueReader struct {
+ offset int64
+ d []byte
+
+ stack []vrState
+ frame int64
+}
+
+// NewBSONDocumentReader returns a ValueReader using b for the underlying BSON
+// representation. Parameter b must be a BSON Document.
+func NewBSONDocumentReader(b []byte) ValueReader {
+ // TODO(skriptble): There's a lack of symmetry between the reader and writer, since the reader takes a []byte while the
+ // TODO writer takes an io.Writer. We should have two versions of each, one that takes a []byte and one that takes an
+ // TODO io.Reader or io.Writer. The []byte version will need to return a thing that can return the finished []byte since
+ // TODO it might be reallocated when appended to.
+ return newValueReader(b)
+}
+
+// NewBSONValueReader returns a ValueReader that starts in the Value mode instead of in top
+// level document mode. This enables the creation of a ValueReader for a single BSON value.
+func NewBSONValueReader(t bsontype.Type, val []byte) ValueReader {
+ stack := make([]vrState, 1, 5)
+ stack[0] = vrState{
+ mode: mValue,
+ vType: t,
+ }
+ return &valueReader{
+ d: val,
+ stack: stack,
+ }
+}
+
+func newValueReader(b []byte) *valueReader {
+ stack := make([]vrState, 1, 5)
+ stack[0] = vrState{
+ mode: mTopLevel,
+ }
+ return &valueReader{
+ d: b,
+ stack: stack,
+ }
+}
+
+func (vr *valueReader) reset(b []byte) {
+ if vr.stack == nil {
+ vr.stack = make([]vrState, 1, 5)
+ }
+ vr.stack = vr.stack[:1]
+ vr.stack[0] = vrState{mode: mTopLevel}
+ vr.d = b
+ vr.offset = 0
+ vr.frame = 0
+}
+
+func (vr *valueReader) advanceFrame() {
+ if vr.frame+1 >= int64(len(vr.stack)) { // We need to grow the stack
+ length := len(vr.stack)
+ if length+1 >= cap(vr.stack) {
+ // double it
+ buf := make([]vrState, 2*cap(vr.stack)+1)
+ copy(buf, vr.stack)
+ vr.stack = buf
+ }
+ vr.stack = vr.stack[:length+1]
+ }
+ vr.frame++
+
+ // Clean the stack
+ vr.stack[vr.frame].mode = 0
+ vr.stack[vr.frame].vType = 0
+ vr.stack[vr.frame].end = 0
+}
+
+func (vr *valueReader) pushDocument() error {
+ vr.advanceFrame()
+
+ vr.stack[vr.frame].mode = mDocument
+
+ size, err := vr.readLength()
+ if err != nil {
+ return err
+ }
+ vr.stack[vr.frame].end = int64(size) + vr.offset - 4
+
+ return nil
+}
+
+func (vr *valueReader) pushArray() error {
+ vr.advanceFrame()
+
+ vr.stack[vr.frame].mode = mArray
+
+ size, err := vr.readLength()
+ if err != nil {
+ return err
+ }
+ vr.stack[vr.frame].end = int64(size) + vr.offset - 4
+
+ return nil
+}
+
+func (vr *valueReader) pushElement(t bsontype.Type) {
+ vr.advanceFrame()
+
+ vr.stack[vr.frame].mode = mElement
+ vr.stack[vr.frame].vType = t
+}
+
+func (vr *valueReader) pushValue(t bsontype.Type) {
+ vr.advanceFrame()
+
+ vr.stack[vr.frame].mode = mValue
+ vr.stack[vr.frame].vType = t
+}
+
+func (vr *valueReader) pushCodeWithScope() (int64, error) {
+ vr.advanceFrame()
+
+ vr.stack[vr.frame].mode = mCodeWithScope
+
+ size, err := vr.readLength()
+ if err != nil {
+ return 0, err
+ }
+ vr.stack[vr.frame].end = int64(size) + vr.offset - 4
+
+ return int64(size), nil
+}
+
+func (vr *valueReader) pop() {
+ switch vr.stack[vr.frame].mode {
+ case mElement, mValue:
+ vr.frame--
+ case mDocument, mArray, mCodeWithScope:
+ vr.frame -= 2 // we pop twice to jump over the vrElement: vrDocument -> vrElement -> vrDocument/TopLevel/etc...
+ }
+}
+
+func (vr *valueReader) invalidTransitionErr(destination mode, name string, modes []mode) error {
+ te := TransitionError{
+ name: name,
+ current: vr.stack[vr.frame].mode,
+ destination: destination,
+ modes: modes,
+ action: "read",
+ }
+ if vr.frame != 0 {
+ te.parent = vr.stack[vr.frame-1].mode
+ }
+ return te
+}
+
+func (vr *valueReader) typeError(t bsontype.Type) error {
+ return fmt.Errorf("positioned on %s, but attempted to read %s", vr.stack[vr.frame].vType, t)
+}
+
+func (vr *valueReader) invalidDocumentLengthError() error {
+ return fmt.Errorf("document is invalid, end byte is at %d, but null byte found at %d", vr.stack[vr.frame].end, vr.offset)
+}
+
+func (vr *valueReader) ensureElementValue(t bsontype.Type, destination mode, callerName string) error {
+ switch vr.stack[vr.frame].mode {
+ case mElement, mValue:
+ if vr.stack[vr.frame].vType != t {
+ return vr.typeError(t)
+ }
+ default:
+ return vr.invalidTransitionErr(destination, callerName, []mode{mElement, mValue})
+ }
+
+ return nil
+}
+
+func (vr *valueReader) Type() bsontype.Type {
+ return vr.stack[vr.frame].vType
+}
+
+func (vr *valueReader) nextElementLength() (int32, error) {
+ var length int32
+ var err error
+ switch vr.stack[vr.frame].vType {
+ case bsontype.Array, bsontype.EmbeddedDocument, bsontype.CodeWithScope:
+ length, err = vr.peekLength()
+ case bsontype.Binary:
+ length, err = vr.peekLength()
+ length += 4 + 1 // binary length + subtype byte
+ case bsontype.Boolean:
+ length = 1
+ case bsontype.DBPointer:
+ length, err = vr.peekLength()
+ length += 4 + 12 // string length + ObjectID length
+ case bsontype.DateTime, bsontype.Double, bsontype.Int64, bsontype.Timestamp:
+ length = 8
+ case bsontype.Decimal128:
+ length = 16
+ case bsontype.Int32:
+ length = 4
+ case bsontype.JavaScript, bsontype.String, bsontype.Symbol:
+ length, err = vr.peekLength()
+ length += 4
+ case bsontype.MaxKey, bsontype.MinKey, bsontype.Null, bsontype.Undefined:
+ length = 0
+ case bsontype.ObjectID:
+ length = 12
+ case bsontype.Regex:
+ regex := bytes.IndexByte(vr.d[vr.offset:], 0x00)
+ if regex < 0 {
+ err = io.EOF
+ break
+ }
+ pattern := bytes.IndexByte(vr.d[vr.offset+int64(regex)+1:], 0x00)
+ if pattern < 0 {
+ err = io.EOF
+ break
+ }
+ length = int32(int64(regex) + 1 + int64(pattern) + 1)
+ default:
+ return 0, fmt.Errorf("attempted to read bytes of unknown BSON type %v", vr.stack[vr.frame].vType)
+ }
+
+ return length, err
+}
+
+func (vr *valueReader) ReadValueBytes(dst []byte) (bsontype.Type, []byte, error) {
+ switch vr.stack[vr.frame].mode {
+ case mTopLevel:
+ length, err := vr.peekLength()
+ if err != nil {
+ return bsontype.Type(0), nil, err
+ }
+ dst, err = vr.appendBytes(dst, length)
+ if err != nil {
+ return bsontype.Type(0), nil, err
+ }
+ return bsontype.Type(0), dst, nil
+ case mElement, mValue:
+ length, err := vr.nextElementLength()
+ if err != nil {
+ return bsontype.Type(0), dst, err
+ }
+
+ dst, err = vr.appendBytes(dst, length)
+ t := vr.stack[vr.frame].vType
+ vr.pop()
+ return t, dst, err
+ default:
+ return bsontype.Type(0), nil, vr.invalidTransitionErr(0, "ReadValueBytes", []mode{mElement, mValue})
+ }
+}
+
+func (vr *valueReader) Skip() error {
+ switch vr.stack[vr.frame].mode {
+ case mElement, mValue:
+ default:
+ return vr.invalidTransitionErr(0, "Skip", []mode{mElement, mValue})
+ }
+
+ length, err := vr.nextElementLength()
+ if err != nil {
+ return err
+ }
+
+ err = vr.skipBytes(length)
+ vr.pop()
+ return err
+}
+
+func (vr *valueReader) ReadArray() (ArrayReader, error) {
+ if err := vr.ensureElementValue(bsontype.Array, mArray, "ReadArray"); err != nil {
+ return nil, err
+ }
+
+ err := vr.pushArray()
+ if err != nil {
+ return nil, err
+ }
+
+ return vr, nil
+}
+
+func (vr *valueReader) ReadBinary() (b []byte, btype byte, err error) {
+ if err := vr.ensureElementValue(bsontype.Binary, 0, "ReadBinary"); err != nil {
+ return nil, 0, err
+ }
+
+ length, err := vr.readLength()
+ if err != nil {
+ return nil, 0, err
+ }
+
+ btype, err = vr.readByte()
+ if err != nil {
+ return nil, 0, err
+ }
+
+ // Check length in case it is an old binary without a length.
+ if btype == 0x02 && length > 4 {
+ length, err = vr.readLength()
+ if err != nil {
+ return nil, 0, err
+ }
+ }
+
+ b, err = vr.readBytes(length)
+ if err != nil {
+ return nil, 0, err
+ }
+ // Make a copy of the returned byte slice because it's just a subslice from the valueReader's
+ // buffer and is not safe to return in the unmarshaled value.
+ cp := make([]byte, len(b))
+ copy(cp, b)
+
+ vr.pop()
+ return cp, btype, nil
+}
+
+func (vr *valueReader) ReadBoolean() (bool, error) {
+ if err := vr.ensureElementValue(bsontype.Boolean, 0, "ReadBoolean"); err != nil {
+ return false, err
+ }
+
+ b, err := vr.readByte()
+ if err != nil {
+ return false, err
+ }
+
+ if b > 1 {
+ return false, fmt.Errorf("invalid byte for boolean, %b", b)
+ }
+
+ vr.pop()
+ return b == 1, nil
+}
+
+func (vr *valueReader) ReadDocument() (DocumentReader, error) {
+ switch vr.stack[vr.frame].mode {
+ case mTopLevel:
+ // read size
+ size, err := vr.readLength()
+ if err != nil {
+ return nil, err
+ }
+ if int(size) != len(vr.d) {
+ return nil, fmt.Errorf("invalid document length")
+ }
+ vr.stack[vr.frame].end = int64(size) + vr.offset - 4
+ return vr, nil
+ case mElement, mValue:
+ if vr.stack[vr.frame].vType != bsontype.EmbeddedDocument {
+ return nil, vr.typeError(bsontype.EmbeddedDocument)
+ }
+ default:
+ return nil, vr.invalidTransitionErr(mDocument, "ReadDocument", []mode{mTopLevel, mElement, mValue})
+ }
+
+ err := vr.pushDocument()
+ if err != nil {
+ return nil, err
+ }
+
+ return vr, nil
+}
+
+func (vr *valueReader) ReadCodeWithScope() (code string, dr DocumentReader, err error) {
+ if err := vr.ensureElementValue(bsontype.CodeWithScope, 0, "ReadCodeWithScope"); err != nil {
+ return "", nil, err
+ }
+
+ totalLength, err := vr.readLength()
+ if err != nil {
+ return "", nil, err
+ }
+ strLength, err := vr.readLength()
+ if err != nil {
+ return "", nil, err
+ }
+ if strLength <= 0 {
+ return "", nil, fmt.Errorf("invalid string length: %d", strLength)
+ }
+ strBytes, err := vr.readBytes(strLength)
+ if err != nil {
+ return "", nil, err
+ }
+ code = string(strBytes[:len(strBytes)-1])
+
+ size, err := vr.pushCodeWithScope()
+ if err != nil {
+ return "", nil, err
+ }
+
+ // The total length should equal:
+ // 4 (total length) + strLength + 4 (the length of str itself) + (document length)
+ componentsLength := int64(4+strLength+4) + size
+ if int64(totalLength) != componentsLength {
+ return "", nil, fmt.Errorf(
+ "length of CodeWithScope does not match lengths of components; total: %d; components: %d",
+ totalLength, componentsLength,
+ )
+ }
+ return code, vr, nil
+}
+
+func (vr *valueReader) ReadDBPointer() (ns string, oid primitive.ObjectID, err error) {
+ if err := vr.ensureElementValue(bsontype.DBPointer, 0, "ReadDBPointer"); err != nil {
+ return "", oid, err
+ }
+
+ ns, err = vr.readString()
+ if err != nil {
+ return "", oid, err
+ }
+
+ oidbytes, err := vr.readBytes(12)
+ if err != nil {
+ return "", oid, err
+ }
+
+ copy(oid[:], oidbytes)
+
+ vr.pop()
+ return ns, oid, nil
+}
+
+func (vr *valueReader) ReadDateTime() (int64, error) {
+ if err := vr.ensureElementValue(bsontype.DateTime, 0, "ReadDateTime"); err != nil {
+ return 0, err
+ }
+
+ i, err := vr.readi64()
+ if err != nil {
+ return 0, err
+ }
+
+ vr.pop()
+ return i, nil
+}
+
+func (vr *valueReader) ReadDecimal128() (primitive.Decimal128, error) {
+ if err := vr.ensureElementValue(bsontype.Decimal128, 0, "ReadDecimal128"); err != nil {
+ return primitive.Decimal128{}, err
+ }
+
+ b, err := vr.readBytes(16)
+ if err != nil {
+ return primitive.Decimal128{}, err
+ }
+
+ l := binary.LittleEndian.Uint64(b[0:8])
+ h := binary.LittleEndian.Uint64(b[8:16])
+
+ vr.pop()
+ return primitive.NewDecimal128(h, l), nil
+}
+
+func (vr *valueReader) ReadDouble() (float64, error) {
+ if err := vr.ensureElementValue(bsontype.Double, 0, "ReadDouble"); err != nil {
+ return 0, err
+ }
+
+ u, err := vr.readu64()
+ if err != nil {
+ return 0, err
+ }
+
+ vr.pop()
+ return math.Float64frombits(u), nil
+}
+
+func (vr *valueReader) ReadInt32() (int32, error) {
+ if err := vr.ensureElementValue(bsontype.Int32, 0, "ReadInt32"); err != nil {
+ return 0, err
+ }
+
+ vr.pop()
+ return vr.readi32()
+}
+
+func (vr *valueReader) ReadInt64() (int64, error) {
+ if err := vr.ensureElementValue(bsontype.Int64, 0, "ReadInt64"); err != nil {
+ return 0, err
+ }
+
+ vr.pop()
+ return vr.readi64()
+}
+
+func (vr *valueReader) ReadJavascript() (code string, err error) {
+ if err := vr.ensureElementValue(bsontype.JavaScript, 0, "ReadJavascript"); err != nil {
+ return "", err
+ }
+
+ vr.pop()
+ return vr.readString()
+}
+
+func (vr *valueReader) ReadMaxKey() error {
+ if err := vr.ensureElementValue(bsontype.MaxKey, 0, "ReadMaxKey"); err != nil {
+ return err
+ }
+
+ vr.pop()
+ return nil
+}
+
+func (vr *valueReader) ReadMinKey() error {
+ if err := vr.ensureElementValue(bsontype.MinKey, 0, "ReadMinKey"); err != nil {
+ return err
+ }
+
+ vr.pop()
+ return nil
+}
+
+func (vr *valueReader) ReadNull() error {
+ if err := vr.ensureElementValue(bsontype.Null, 0, "ReadNull"); err != nil {
+ return err
+ }
+
+ vr.pop()
+ return nil
+}
+
+func (vr *valueReader) ReadObjectID() (primitive.ObjectID, error) {
+ if err := vr.ensureElementValue(bsontype.ObjectID, 0, "ReadObjectID"); err != nil {
+ return primitive.ObjectID{}, err
+ }
+
+ oidbytes, err := vr.readBytes(12)
+ if err != nil {
+ return primitive.ObjectID{}, err
+ }
+
+ var oid primitive.ObjectID
+ copy(oid[:], oidbytes)
+
+ vr.pop()
+ return oid, nil
+}
+
+func (vr *valueReader) ReadRegex() (string, string, error) {
+ if err := vr.ensureElementValue(bsontype.Regex, 0, "ReadRegex"); err != nil {
+ return "", "", err
+ }
+
+ pattern, err := vr.readCString()
+ if err != nil {
+ return "", "", err
+ }
+
+ options, err := vr.readCString()
+ if err != nil {
+ return "", "", err
+ }
+
+ vr.pop()
+ return pattern, options, nil
+}
+
+func (vr *valueReader) ReadString() (string, error) {
+ if err := vr.ensureElementValue(bsontype.String, 0, "ReadString"); err != nil {
+ return "", err
+ }
+
+ vr.pop()
+ return vr.readString()
+}
+
+func (vr *valueReader) ReadSymbol() (symbol string, err error) {
+ if err := vr.ensureElementValue(bsontype.Symbol, 0, "ReadSymbol"); err != nil {
+ return "", err
+ }
+
+ vr.pop()
+ return vr.readString()
+}
+
+func (vr *valueReader) ReadTimestamp() (t uint32, i uint32, err error) {
+ if err := vr.ensureElementValue(bsontype.Timestamp, 0, "ReadTimestamp"); err != nil {
+ return 0, 0, err
+ }
+
+ i, err = vr.readu32()
+ if err != nil {
+ return 0, 0, err
+ }
+
+ t, err = vr.readu32()
+ if err != nil {
+ return 0, 0, err
+ }
+
+ vr.pop()
+ return t, i, nil
+}
+
+func (vr *valueReader) ReadUndefined() error {
+ if err := vr.ensureElementValue(bsontype.Undefined, 0, "ReadUndefined"); err != nil {
+ return err
+ }
+
+ vr.pop()
+ return nil
+}
+
+func (vr *valueReader) ReadElement() (string, ValueReader, error) {
+ switch vr.stack[vr.frame].mode {
+ case mTopLevel, mDocument, mCodeWithScope:
+ default:
+ return "", nil, vr.invalidTransitionErr(mElement, "ReadElement", []mode{mTopLevel, mDocument, mCodeWithScope})
+ }
+
+ t, err := vr.readByte()
+ if err != nil {
+ return "", nil, err
+ }
+
+ if t == 0 {
+ if vr.offset != vr.stack[vr.frame].end {
+ return "", nil, vr.invalidDocumentLengthError()
+ }
+
+ vr.pop()
+ return "", nil, ErrEOD
+ }
+
+ name, err := vr.readCString()
+ if err != nil {
+ return "", nil, err
+ }
+
+ vr.pushElement(bsontype.Type(t))
+ return name, vr, nil
+}
+
+func (vr *valueReader) ReadValue() (ValueReader, error) {
+ switch vr.stack[vr.frame].mode {
+ case mArray:
+ default:
+ return nil, vr.invalidTransitionErr(mValue, "ReadValue", []mode{mArray})
+ }
+
+ t, err := vr.readByte()
+ if err != nil {
+ return nil, err
+ }
+
+ if t == 0 {
+ if vr.offset != vr.stack[vr.frame].end {
+ return nil, vr.invalidDocumentLengthError()
+ }
+
+ vr.pop()
+ return nil, ErrEOA
+ }
+
+ _, err = vr.readCString()
+ if err != nil {
+ return nil, err
+ }
+
+ vr.pushValue(bsontype.Type(t))
+ return vr, nil
+}
+
+// readBytes reads length bytes from the valueReader starting at the current offset. Note that the
+// returned byte slice is a subslice from the valueReader buffer and must be converted or copied
+// before returning in an unmarshaled value.
+func (vr *valueReader) readBytes(length int32) ([]byte, error) {
+ if length < 0 {
+ return nil, fmt.Errorf("invalid length: %d", length)
+ }
+
+ if vr.offset+int64(length) > int64(len(vr.d)) {
+ return nil, io.EOF
+ }
+
+ start := vr.offset
+ vr.offset += int64(length)
+
+ return vr.d[start : start+int64(length)], nil
+}
+
+func (vr *valueReader) appendBytes(dst []byte, length int32) ([]byte, error) {
+ if vr.offset+int64(length) > int64(len(vr.d)) {
+ return nil, io.EOF
+ }
+
+ start := vr.offset
+ vr.offset += int64(length)
+ return append(dst, vr.d[start:start+int64(length)]...), nil
+}
+
+func (vr *valueReader) skipBytes(length int32) error {
+ if vr.offset+int64(length) > int64(len(vr.d)) {
+ return io.EOF
+ }
+
+ vr.offset += int64(length)
+ return nil
+}
+
+func (vr *valueReader) readByte() (byte, error) {
+ if vr.offset+1 > int64(len(vr.d)) {
+ return 0x0, io.EOF
+ }
+
+ vr.offset++
+ return vr.d[vr.offset-1], nil
+}
+
+func (vr *valueReader) readCString() (string, error) {
+ idx := bytes.IndexByte(vr.d[vr.offset:], 0x00)
+ if idx < 0 {
+ return "", io.EOF
+ }
+ start := vr.offset
+ // idx does not include the null byte
+ vr.offset += int64(idx) + 1
+ return string(vr.d[start : start+int64(idx)]), nil
+}
+
+func (vr *valueReader) readString() (string, error) {
+ length, err := vr.readLength()
+ if err != nil {
+ return "", err
+ }
+
+ if int64(length)+vr.offset > int64(len(vr.d)) {
+ return "", io.EOF
+ }
+
+ if length <= 0 {
+ return "", fmt.Errorf("invalid string length: %d", length)
+ }
+
+ if vr.d[vr.offset+int64(length)-1] != 0x00 {
+ return "", fmt.Errorf("string does not end with null byte, but with %v", vr.d[vr.offset+int64(length)-1])
+ }
+
+ start := vr.offset
+ vr.offset += int64(length)
+ return string(vr.d[start : start+int64(length)-1]), nil
+}
+
+func (vr *valueReader) peekLength() (int32, error) {
+ if vr.offset+4 > int64(len(vr.d)) {
+ return 0, io.EOF
+ }
+
+ idx := vr.offset
+ return (int32(vr.d[idx]) | int32(vr.d[idx+1])<<8 | int32(vr.d[idx+2])<<16 | int32(vr.d[idx+3])<<24), nil
+}
+
+func (vr *valueReader) readLength() (int32, error) { return vr.readi32() }
+
+func (vr *valueReader) readi32() (int32, error) {
+ if vr.offset+4 > int64(len(vr.d)) {
+ return 0, io.EOF
+ }
+
+ idx := vr.offset
+ vr.offset += 4
+ return (int32(vr.d[idx]) | int32(vr.d[idx+1])<<8 | int32(vr.d[idx+2])<<16 | int32(vr.d[idx+3])<<24), nil
+}
+
+func (vr *valueReader) readu32() (uint32, error) {
+ if vr.offset+4 > int64(len(vr.d)) {
+ return 0, io.EOF
+ }
+
+ idx := vr.offset
+ vr.offset += 4
+ return (uint32(vr.d[idx]) | uint32(vr.d[idx+1])<<8 | uint32(vr.d[idx+2])<<16 | uint32(vr.d[idx+3])<<24), nil
+}
+
+func (vr *valueReader) readi64() (int64, error) {
+ if vr.offset+8 > int64(len(vr.d)) {
+ return 0, io.EOF
+ }
+
+ idx := vr.offset
+ vr.offset += 8
+ return int64(vr.d[idx]) | int64(vr.d[idx+1])<<8 | int64(vr.d[idx+2])<<16 | int64(vr.d[idx+3])<<24 |
+ int64(vr.d[idx+4])<<32 | int64(vr.d[idx+5])<<40 | int64(vr.d[idx+6])<<48 | int64(vr.d[idx+7])<<56, nil
+}
+
+func (vr *valueReader) readu64() (uint64, error) {
+ if vr.offset+8 > int64(len(vr.d)) {
+ return 0, io.EOF
+ }
+
+ idx := vr.offset
+ vr.offset += 8
+ return uint64(vr.d[idx]) | uint64(vr.d[idx+1])<<8 | uint64(vr.d[idx+2])<<16 | uint64(vr.d[idx+3])<<24 |
+ uint64(vr.d[idx+4])<<32 | uint64(vr.d[idx+5])<<40 | uint64(vr.d[idx+6])<<48 | uint64(vr.d[idx+7])<<56, nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_writer.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_writer.go
new file mode 100644
index 000000000..f95a08afd
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/value_writer.go
@@ -0,0 +1,606 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "strconv"
+ "strings"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+var _ ValueWriter = (*valueWriter)(nil)
+
+var vwPool = sync.Pool{
+ New: func() interface{} {
+ return new(valueWriter)
+ },
+}
+
+// BSONValueWriterPool is a pool for BSON ValueWriters.
+type BSONValueWriterPool struct {
+ pool sync.Pool
+}
+
+// NewBSONValueWriterPool creates a new pool for ValueWriter instances that write to BSON.
+func NewBSONValueWriterPool() *BSONValueWriterPool {
+ return &BSONValueWriterPool{
+ pool: sync.Pool{
+ New: func() interface{} {
+ return new(valueWriter)
+ },
+ },
+ }
+}
+
+// Get retrieves a BSON ValueWriter from the pool and resets it to use w as the destination.
+func (bvwp *BSONValueWriterPool) Get(w io.Writer) ValueWriter {
+ vw := bvwp.pool.Get().(*valueWriter)
+
+ // TODO: Having to call reset here with the same buffer doesn't really make sense.
+ vw.reset(vw.buf)
+ vw.buf = vw.buf[:0]
+ vw.w = w
+ return vw
+}
+
+// GetAtModeElement retrieves a ValueWriterFlusher from the pool and resets it to use w as the destination.
+func (bvwp *BSONValueWriterPool) GetAtModeElement(w io.Writer) ValueWriterFlusher {
+ vw := bvwp.Get(w).(*valueWriter)
+ vw.push(mElement)
+ return vw
+}
+
+// Put inserts a ValueWriter into the pool. If the ValueWriter is not a BSON ValueWriter, nothing
+// happens and ok will be false.
+func (bvwp *BSONValueWriterPool) Put(vw ValueWriter) (ok bool) {
+ bvw, ok := vw.(*valueWriter)
+ if !ok {
+ return false
+ }
+
+ bvwp.pool.Put(bvw)
+ return true
+}
+
+// This is here so that during testing we can change it and not require
+// allocating a 4GB slice.
+var maxSize = math.MaxInt32
+
+var errNilWriter = errors.New("cannot create a ValueWriter from a nil io.Writer")
+
+type errMaxDocumentSizeExceeded struct {
+ size int64
+}
+
+func (mdse errMaxDocumentSizeExceeded) Error() string {
+ return fmt.Sprintf("document size (%d) is larger than the max int32", mdse.size)
+}
+
+type vwMode int
+
+const (
+ _ vwMode = iota
+ vwTopLevel
+ vwDocument
+ vwArray
+ vwValue
+ vwElement
+ vwCodeWithScope
+)
+
+func (vm vwMode) String() string {
+ var str string
+
+ switch vm {
+ case vwTopLevel:
+ str = "TopLevel"
+ case vwDocument:
+ str = "DocumentMode"
+ case vwArray:
+ str = "ArrayMode"
+ case vwValue:
+ str = "ValueMode"
+ case vwElement:
+ str = "ElementMode"
+ case vwCodeWithScope:
+ str = "CodeWithScopeMode"
+ default:
+ str = "UnknownMode"
+ }
+
+ return str
+}
+
+type vwState struct {
+ mode mode
+ key string
+ arrkey int
+ start int32
+}
+
+type valueWriter struct {
+ w io.Writer
+ buf []byte
+
+ stack []vwState
+ frame int64
+}
+
+func (vw *valueWriter) advanceFrame() {
+ if vw.frame+1 >= int64(len(vw.stack)) { // We need to grow the stack
+ length := len(vw.stack)
+ if length+1 >= cap(vw.stack) {
+ // double it
+ buf := make([]vwState, 2*cap(vw.stack)+1)
+ copy(buf, vw.stack)
+ vw.stack = buf
+ }
+ vw.stack = vw.stack[:length+1]
+ }
+ vw.frame++
+}
+
+func (vw *valueWriter) push(m mode) {
+ vw.advanceFrame()
+
+ // Clean the stack
+ vw.stack[vw.frame].mode = m
+ vw.stack[vw.frame].key = ""
+ vw.stack[vw.frame].arrkey = 0
+ vw.stack[vw.frame].start = 0
+
+ vw.stack[vw.frame].mode = m
+ switch m {
+ case mDocument, mArray, mCodeWithScope:
+ vw.reserveLength()
+ }
+}
+
+func (vw *valueWriter) reserveLength() {
+ vw.stack[vw.frame].start = int32(len(vw.buf))
+ vw.buf = append(vw.buf, 0x00, 0x00, 0x00, 0x00)
+}
+
+func (vw *valueWriter) pop() {
+ switch vw.stack[vw.frame].mode {
+ case mElement, mValue:
+ vw.frame--
+ case mDocument, mArray, mCodeWithScope:
+ vw.frame -= 2 // we pop twice to jump over the mElement: mDocument -> mElement -> mDocument/mTopLevel/etc...
+ }
+}
+
+// NewBSONValueWriter creates a ValueWriter that writes BSON to w.
+//
+// This ValueWriter will only write entire documents to the io.Writer and it
+// will buffer the document as it is built.
+func NewBSONValueWriter(w io.Writer) (ValueWriter, error) {
+ if w == nil {
+ return nil, errNilWriter
+ }
+ return newValueWriter(w), nil
+}
+
+func newValueWriter(w io.Writer) *valueWriter {
+ vw := new(valueWriter)
+ stack := make([]vwState, 1, 5)
+ stack[0] = vwState{mode: mTopLevel}
+ vw.w = w
+ vw.stack = stack
+
+ return vw
+}
+
+func newValueWriterFromSlice(buf []byte) *valueWriter {
+ vw := new(valueWriter)
+ stack := make([]vwState, 1, 5)
+ stack[0] = vwState{mode: mTopLevel}
+ vw.stack = stack
+ vw.buf = buf
+
+ return vw
+}
+
+func (vw *valueWriter) reset(buf []byte) {
+ if vw.stack == nil {
+ vw.stack = make([]vwState, 1, 5)
+ }
+ vw.stack = vw.stack[:1]
+ vw.stack[0] = vwState{mode: mTopLevel}
+ vw.buf = buf
+ vw.frame = 0
+ vw.w = nil
+}
+
+func (vw *valueWriter) invalidTransitionError(destination mode, name string, modes []mode) error {
+ te := TransitionError{
+ name: name,
+ current: vw.stack[vw.frame].mode,
+ destination: destination,
+ modes: modes,
+ action: "write",
+ }
+ if vw.frame != 0 {
+ te.parent = vw.stack[vw.frame-1].mode
+ }
+ return te
+}
+
+func (vw *valueWriter) writeElementHeader(t bsontype.Type, destination mode, callerName string, addmodes ...mode) error {
+ switch vw.stack[vw.frame].mode {
+ case mElement:
+ key := vw.stack[vw.frame].key
+ if !isValidCString(key) {
+ return errors.New("BSON element key cannot contain null bytes")
+ }
+
+ vw.buf = bsoncore.AppendHeader(vw.buf, t, key)
+ case mValue:
+ // TODO: Do this with a cache of the first 1000 or so array keys.
+ vw.buf = bsoncore.AppendHeader(vw.buf, t, strconv.Itoa(vw.stack[vw.frame].arrkey))
+ default:
+ modes := []mode{mElement, mValue}
+ if addmodes != nil {
+ modes = append(modes, addmodes...)
+ }
+ return vw.invalidTransitionError(destination, callerName, modes)
+ }
+
+ return nil
+}
+
+func (vw *valueWriter) WriteValueBytes(t bsontype.Type, b []byte) error {
+ if err := vw.writeElementHeader(t, mode(0), "WriteValueBytes"); err != nil {
+ return err
+ }
+ vw.buf = append(vw.buf, b...)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteArray() (ArrayWriter, error) {
+ if err := vw.writeElementHeader(bsontype.Array, mArray, "WriteArray"); err != nil {
+ return nil, err
+ }
+
+ vw.push(mArray)
+
+ return vw, nil
+}
+
+func (vw *valueWriter) WriteBinary(b []byte) error {
+ return vw.WriteBinaryWithSubtype(b, 0x00)
+}
+
+func (vw *valueWriter) WriteBinaryWithSubtype(b []byte, btype byte) error {
+ if err := vw.writeElementHeader(bsontype.Binary, mode(0), "WriteBinaryWithSubtype"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendBinary(vw.buf, btype, b)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteBoolean(b bool) error {
+ if err := vw.writeElementHeader(bsontype.Boolean, mode(0), "WriteBoolean"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendBoolean(vw.buf, b)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteCodeWithScope(code string) (DocumentWriter, error) {
+ if err := vw.writeElementHeader(bsontype.CodeWithScope, mCodeWithScope, "WriteCodeWithScope"); err != nil {
+ return nil, err
+ }
+
+ // CodeWithScope is a different than other types because we need an extra
+ // frame on the stack. In the EndDocument code, we write the document
+ // length, pop, write the code with scope length, and pop. To simplify the
+ // pop code, we push a spacer frame that we'll always jump over.
+ vw.push(mCodeWithScope)
+ vw.buf = bsoncore.AppendString(vw.buf, code)
+ vw.push(mSpacer)
+ vw.push(mDocument)
+
+ return vw, nil
+}
+
+func (vw *valueWriter) WriteDBPointer(ns string, oid primitive.ObjectID) error {
+ if err := vw.writeElementHeader(bsontype.DBPointer, mode(0), "WriteDBPointer"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendDBPointer(vw.buf, ns, oid)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteDateTime(dt int64) error {
+ if err := vw.writeElementHeader(bsontype.DateTime, mode(0), "WriteDateTime"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendDateTime(vw.buf, dt)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteDecimal128(d128 primitive.Decimal128) error {
+ if err := vw.writeElementHeader(bsontype.Decimal128, mode(0), "WriteDecimal128"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendDecimal128(vw.buf, d128)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteDouble(f float64) error {
+ if err := vw.writeElementHeader(bsontype.Double, mode(0), "WriteDouble"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendDouble(vw.buf, f)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteInt32(i32 int32) error {
+ if err := vw.writeElementHeader(bsontype.Int32, mode(0), "WriteInt32"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendInt32(vw.buf, i32)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteInt64(i64 int64) error {
+ if err := vw.writeElementHeader(bsontype.Int64, mode(0), "WriteInt64"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendInt64(vw.buf, i64)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteJavascript(code string) error {
+ if err := vw.writeElementHeader(bsontype.JavaScript, mode(0), "WriteJavascript"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendJavaScript(vw.buf, code)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteMaxKey() error {
+ if err := vw.writeElementHeader(bsontype.MaxKey, mode(0), "WriteMaxKey"); err != nil {
+ return err
+ }
+
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteMinKey() error {
+ if err := vw.writeElementHeader(bsontype.MinKey, mode(0), "WriteMinKey"); err != nil {
+ return err
+ }
+
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteNull() error {
+ if err := vw.writeElementHeader(bsontype.Null, mode(0), "WriteNull"); err != nil {
+ return err
+ }
+
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteObjectID(oid primitive.ObjectID) error {
+ if err := vw.writeElementHeader(bsontype.ObjectID, mode(0), "WriteObjectID"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendObjectID(vw.buf, oid)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteRegex(pattern string, options string) error {
+ if !isValidCString(pattern) || !isValidCString(options) {
+ return errors.New("BSON regex values cannot contain null bytes")
+ }
+ if err := vw.writeElementHeader(bsontype.Regex, mode(0), "WriteRegex"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendRegex(vw.buf, pattern, sortStringAlphebeticAscending(options))
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteString(s string) error {
+ if err := vw.writeElementHeader(bsontype.String, mode(0), "WriteString"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendString(vw.buf, s)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteDocument() (DocumentWriter, error) {
+ if vw.stack[vw.frame].mode == mTopLevel {
+ vw.reserveLength()
+ return vw, nil
+ }
+ if err := vw.writeElementHeader(bsontype.EmbeddedDocument, mDocument, "WriteDocument", mTopLevel); err != nil {
+ return nil, err
+ }
+
+ vw.push(mDocument)
+ return vw, nil
+}
+
+func (vw *valueWriter) WriteSymbol(symbol string) error {
+ if err := vw.writeElementHeader(bsontype.Symbol, mode(0), "WriteSymbol"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendSymbol(vw.buf, symbol)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteTimestamp(t uint32, i uint32) error {
+ if err := vw.writeElementHeader(bsontype.Timestamp, mode(0), "WriteTimestamp"); err != nil {
+ return err
+ }
+
+ vw.buf = bsoncore.AppendTimestamp(vw.buf, t, i)
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteUndefined() error {
+ if err := vw.writeElementHeader(bsontype.Undefined, mode(0), "WriteUndefined"); err != nil {
+ return err
+ }
+
+ vw.pop()
+ return nil
+}
+
+func (vw *valueWriter) WriteDocumentElement(key string) (ValueWriter, error) {
+ switch vw.stack[vw.frame].mode {
+ case mTopLevel, mDocument:
+ default:
+ return nil, vw.invalidTransitionError(mElement, "WriteDocumentElement", []mode{mTopLevel, mDocument})
+ }
+
+ vw.push(mElement)
+ vw.stack[vw.frame].key = key
+
+ return vw, nil
+}
+
+func (vw *valueWriter) WriteDocumentEnd() error {
+ switch vw.stack[vw.frame].mode {
+ case mTopLevel, mDocument:
+ default:
+ return fmt.Errorf("incorrect mode to end document: %s", vw.stack[vw.frame].mode)
+ }
+
+ vw.buf = append(vw.buf, 0x00)
+
+ err := vw.writeLength()
+ if err != nil {
+ return err
+ }
+
+ if vw.stack[vw.frame].mode == mTopLevel {
+ if err = vw.Flush(); err != nil {
+ return err
+ }
+ }
+
+ vw.pop()
+
+ if vw.stack[vw.frame].mode == mCodeWithScope {
+ // We ignore the error here because of the guarantee of writeLength.
+ // See the docs for writeLength for more info.
+ _ = vw.writeLength()
+ vw.pop()
+ }
+ return nil
+}
+
+func (vw *valueWriter) Flush() error {
+ if vw.w == nil {
+ return nil
+ }
+
+ if _, err := vw.w.Write(vw.buf); err != nil {
+ return err
+ }
+ // reset buffer
+ vw.buf = vw.buf[:0]
+ return nil
+}
+
+func (vw *valueWriter) WriteArrayElement() (ValueWriter, error) {
+ if vw.stack[vw.frame].mode != mArray {
+ return nil, vw.invalidTransitionError(mValue, "WriteArrayElement", []mode{mArray})
+ }
+
+ arrkey := vw.stack[vw.frame].arrkey
+ vw.stack[vw.frame].arrkey++
+
+ vw.push(mValue)
+ vw.stack[vw.frame].arrkey = arrkey
+
+ return vw, nil
+}
+
+func (vw *valueWriter) WriteArrayEnd() error {
+ if vw.stack[vw.frame].mode != mArray {
+ return fmt.Errorf("incorrect mode to end array: %s", vw.stack[vw.frame].mode)
+ }
+
+ vw.buf = append(vw.buf, 0x00)
+
+ err := vw.writeLength()
+ if err != nil {
+ return err
+ }
+
+ vw.pop()
+ return nil
+}
+
+// NOTE: We assume that if we call writeLength more than once the same function
+// within the same function without altering the vw.buf that this method will
+// not return an error. If this changes ensure that the following methods are
+// updated:
+//
+// - WriteDocumentEnd
+func (vw *valueWriter) writeLength() error {
+ length := len(vw.buf)
+ if length > maxSize {
+ return errMaxDocumentSizeExceeded{size: int64(len(vw.buf))}
+ }
+ length = length - int(vw.stack[vw.frame].start)
+ start := vw.stack[vw.frame].start
+
+ vw.buf[start+0] = byte(length)
+ vw.buf[start+1] = byte(length >> 8)
+ vw.buf[start+2] = byte(length >> 16)
+ vw.buf[start+3] = byte(length >> 24)
+ return nil
+}
+
+func isValidCString(cs string) bool {
+ return !strings.ContainsRune(cs, '\x00')
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/writer.go b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/writer.go
new file mode 100644
index 000000000..dff65f87f
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsonrw/writer.go
@@ -0,0 +1,78 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsonrw
+
+import (
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// ArrayWriter is the interface used to create a BSON or BSON adjacent array.
+// Callers must ensure they call WriteArrayEnd when they have finished creating
+// the array.
+type ArrayWriter interface {
+ WriteArrayElement() (ValueWriter, error)
+ WriteArrayEnd() error
+}
+
+// DocumentWriter is the interface used to create a BSON or BSON adjacent
+// document. Callers must ensure they call WriteDocumentEnd when they have
+// finished creating the document.
+type DocumentWriter interface {
+ WriteDocumentElement(string) (ValueWriter, error)
+ WriteDocumentEnd() error
+}
+
+// ValueWriter is the interface used to write BSON values. Implementations of
+// this interface handle creating BSON or BSON adjacent representations of the
+// values.
+type ValueWriter interface {
+ WriteArray() (ArrayWriter, error)
+ WriteBinary(b []byte) error
+ WriteBinaryWithSubtype(b []byte, btype byte) error
+ WriteBoolean(bool) error
+ WriteCodeWithScope(code string) (DocumentWriter, error)
+ WriteDBPointer(ns string, oid primitive.ObjectID) error
+ WriteDateTime(dt int64) error
+ WriteDecimal128(primitive.Decimal128) error
+ WriteDouble(float64) error
+ WriteInt32(int32) error
+ WriteInt64(int64) error
+ WriteJavascript(code string) error
+ WriteMaxKey() error
+ WriteMinKey() error
+ WriteNull() error
+ WriteObjectID(primitive.ObjectID) error
+ WriteRegex(pattern, options string) error
+ WriteString(string) error
+ WriteDocument() (DocumentWriter, error)
+ WriteSymbol(symbol string) error
+ WriteTimestamp(t, i uint32) error
+ WriteUndefined() error
+}
+
+// ValueWriterFlusher is a superset of ValueWriter that exposes functionality to flush to the underlying buffer.
+type ValueWriterFlusher interface {
+ ValueWriter
+ Flush() error
+}
+
+// BytesWriter is the interface used to write BSON bytes to a ValueWriter.
+// This interface is meant to be a superset of ValueWriter, so that types that
+// implement ValueWriter may also implement this interface.
+type BytesWriter interface {
+ WriteValueBytes(t bsontype.Type, b []byte) error
+}
+
+// SliceWriter allows a pointer to a slice of bytes to be used as an io.Writer.
+type SliceWriter []byte
+
+func (sw *SliceWriter) Write(p []byte) (int, error) {
+ written := len(p)
+ *sw = append(*sw, p...)
+ return written, nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/bsontype/bsontype.go b/vendor/go.mongodb.org/mongo-driver/bson/bsontype/bsontype.go
new file mode 100644
index 000000000..7c91ae518
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/bsontype/bsontype.go
@@ -0,0 +1,97 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package bsontype is a utility package that contains types for each BSON type and the
+// a stringifier for the Type to enable easier debugging when working with BSON.
+package bsontype // import "go.mongodb.org/mongo-driver/bson/bsontype"
+
+// These constants uniquely refer to each BSON type.
+const (
+ Double Type = 0x01
+ String Type = 0x02
+ EmbeddedDocument Type = 0x03
+ Array Type = 0x04
+ Binary Type = 0x05
+ Undefined Type = 0x06
+ ObjectID Type = 0x07
+ Boolean Type = 0x08
+ DateTime Type = 0x09
+ Null Type = 0x0A
+ Regex Type = 0x0B
+ DBPointer Type = 0x0C
+ JavaScript Type = 0x0D
+ Symbol Type = 0x0E
+ CodeWithScope Type = 0x0F
+ Int32 Type = 0x10
+ Timestamp Type = 0x11
+ Int64 Type = 0x12
+ Decimal128 Type = 0x13
+ MinKey Type = 0xFF
+ MaxKey Type = 0x7F
+
+ BinaryGeneric byte = 0x00
+ BinaryFunction byte = 0x01
+ BinaryBinaryOld byte = 0x02
+ BinaryUUIDOld byte = 0x03
+ BinaryUUID byte = 0x04
+ BinaryMD5 byte = 0x05
+ BinaryEncrypted byte = 0x06
+ BinaryColumn byte = 0x07
+ BinaryUserDefined byte = 0x80
+)
+
+// Type represents a BSON type.
+type Type byte
+
+// String returns the string representation of the BSON type's name.
+func (bt Type) String() string {
+ switch bt {
+ case '\x01':
+ return "double"
+ case '\x02':
+ return "string"
+ case '\x03':
+ return "embedded document"
+ case '\x04':
+ return "array"
+ case '\x05':
+ return "binary"
+ case '\x06':
+ return "undefined"
+ case '\x07':
+ return "objectID"
+ case '\x08':
+ return "boolean"
+ case '\x09':
+ return "UTC datetime"
+ case '\x0A':
+ return "null"
+ case '\x0B':
+ return "regex"
+ case '\x0C':
+ return "dbPointer"
+ case '\x0D':
+ return "javascript"
+ case '\x0E':
+ return "symbol"
+ case '\x0F':
+ return "code with scope"
+ case '\x10':
+ return "32-bit integer"
+ case '\x11':
+ return "timestamp"
+ case '\x12':
+ return "64-bit integer"
+ case '\x13':
+ return "128-bit decimal"
+ case '\xFF':
+ return "min key"
+ case '\x7F':
+ return "max key"
+ default:
+ return "invalid"
+ }
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/decoder.go b/vendor/go.mongodb.org/mongo-driver/bson/decoder.go
new file mode 100644
index 000000000..6e189fa58
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/decoder.go
@@ -0,0 +1,141 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsoncodec"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+)
+
+// ErrDecodeToNil is the error returned when trying to decode to a nil value
+var ErrDecodeToNil = errors.New("cannot Decode to nil value")
+
+// This pool is used to keep the allocations of Decoders down. This is only used for the Marshal*
+// methods and is not consumable from outside of this package. The Decoders retrieved from this pool
+// must have both Reset and SetRegistry called on them.
+var decPool = sync.Pool{
+ New: func() interface{} {
+ return new(Decoder)
+ },
+}
+
+// A Decoder reads and decodes BSON documents from a stream. It reads from a bsonrw.ValueReader as
+// the source of BSON data.
+type Decoder struct {
+ dc bsoncodec.DecodeContext
+ vr bsonrw.ValueReader
+
+ // We persist defaultDocumentM and defaultDocumentD on the Decoder to prevent overwriting from
+ // (*Decoder).SetContext.
+ defaultDocumentM bool
+ defaultDocumentD bool
+}
+
+// NewDecoder returns a new decoder that uses the DefaultRegistry to read from vr.
+func NewDecoder(vr bsonrw.ValueReader) (*Decoder, error) {
+ if vr == nil {
+ return nil, errors.New("cannot create a new Decoder with a nil ValueReader")
+ }
+
+ return &Decoder{
+ dc: bsoncodec.DecodeContext{Registry: DefaultRegistry},
+ vr: vr,
+ }, nil
+}
+
+// NewDecoderWithContext returns a new decoder that uses DecodeContext dc to read from vr.
+func NewDecoderWithContext(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader) (*Decoder, error) {
+ if dc.Registry == nil {
+ dc.Registry = DefaultRegistry
+ }
+ if vr == nil {
+ return nil, errors.New("cannot create a new Decoder with a nil ValueReader")
+ }
+
+ return &Decoder{
+ dc: dc,
+ vr: vr,
+ }, nil
+}
+
+// Decode reads the next BSON document from the stream and decodes it into the
+// value pointed to by val.
+//
+// The documentation for Unmarshal contains details about of BSON into a Go
+// value.
+func (d *Decoder) Decode(val interface{}) error {
+ if unmarshaler, ok := val.(Unmarshaler); ok {
+ // TODO(skriptble): Reuse a []byte here and use the AppendDocumentBytes method.
+ buf, err := bsonrw.Copier{}.CopyDocumentToBytes(d.vr)
+ if err != nil {
+ return err
+ }
+ return unmarshaler.UnmarshalBSON(buf)
+ }
+
+ rval := reflect.ValueOf(val)
+ switch rval.Kind() {
+ case reflect.Ptr:
+ if rval.IsNil() {
+ return ErrDecodeToNil
+ }
+ rval = rval.Elem()
+ case reflect.Map:
+ if rval.IsNil() {
+ return ErrDecodeToNil
+ }
+ default:
+ return fmt.Errorf("argument to Decode must be a pointer or a map, but got %v", rval)
+ }
+ decoder, err := d.dc.LookupDecoder(rval.Type())
+ if err != nil {
+ return err
+ }
+ if d.defaultDocumentM {
+ d.dc.DefaultDocumentM()
+ }
+ if d.defaultDocumentD {
+ d.dc.DefaultDocumentD()
+ }
+ return decoder.DecodeValue(d.dc, d.vr, rval)
+}
+
+// Reset will reset the state of the decoder, using the same *DecodeContext used in
+// the original construction but using vr for reading.
+func (d *Decoder) Reset(vr bsonrw.ValueReader) error {
+ d.vr = vr
+ return nil
+}
+
+// SetRegistry replaces the current registry of the decoder with r.
+func (d *Decoder) SetRegistry(r *bsoncodec.Registry) error {
+ d.dc.Registry = r
+ return nil
+}
+
+// SetContext replaces the current registry of the decoder with dc.
+func (d *Decoder) SetContext(dc bsoncodec.DecodeContext) error {
+ d.dc = dc
+ return nil
+}
+
+// DefaultDocumentM will decode empty documents using the primitive.M type. This behavior is restricted to data typed as
+// "interface{}" or "map[string]interface{}".
+func (d *Decoder) DefaultDocumentM() {
+ d.defaultDocumentM = true
+}
+
+// DefaultDocumentD will decode empty documents using the primitive.D type. This behavior is restricted to data typed as
+// "interface{}" or "map[string]interface{}".
+func (d *Decoder) DefaultDocumentD() {
+ d.defaultDocumentD = true
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/doc.go b/vendor/go.mongodb.org/mongo-driver/bson/doc.go
new file mode 100644
index 000000000..0134006d8
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/doc.go
@@ -0,0 +1,141 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package bson is a library for reading, writing, and manipulating BSON. BSON is a binary serialization format used to
+// store documents and make remote procedure calls in MongoDB. The BSON specification is located at https://bsonspec.org.
+// The BSON library handles marshalling and unmarshalling of values through a configurable codec system. For a description
+// of the codec system and examples of registering custom codecs, see the bsoncodec package.
+//
+// # Raw BSON
+//
+// The Raw family of types is used to validate and retrieve elements from a slice of bytes. This
+// type is most useful when you want do lookups on BSON bytes without unmarshaling it into another
+// type.
+//
+// Example:
+//
+// var raw bson.Raw = ... // bytes from somewhere
+// err := raw.Validate()
+// if err != nil { return err }
+// val := raw.Lookup("foo")
+// i32, ok := val.Int32OK()
+// // do something with i32...
+//
+// # Native Go Types
+//
+// The D and M types defined in this package can be used to build representations of BSON using native Go types. D is a
+// slice and M is a map. For more information about the use cases for these types, see the documentation on the type
+// definitions.
+//
+// Note that a D should not be constructed with duplicate key names, as that can cause undefined server behavior.
+//
+// Example:
+//
+// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
+// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
+//
+// When decoding BSON to a D or M, the following type mappings apply when unmarshalling:
+//
+// 1. BSON int32 unmarshals to an int32.
+// 2. BSON int64 unmarshals to an int64.
+// 3. BSON double unmarshals to a float64.
+// 4. BSON string unmarshals to a string.
+// 5. BSON boolean unmarshals to a bool.
+// 6. BSON embedded document unmarshals to the parent type (i.e. D for a D, M for an M).
+// 7. BSON array unmarshals to a bson.A.
+// 8. BSON ObjectId unmarshals to a primitive.ObjectID.
+// 9. BSON datetime unmarshals to a primitive.DateTime.
+// 10. BSON binary unmarshals to a primitive.Binary.
+// 11. BSON regular expression unmarshals to a primitive.Regex.
+// 12. BSON JavaScript unmarshals to a primitive.JavaScript.
+// 13. BSON code with scope unmarshals to a primitive.CodeWithScope.
+// 14. BSON timestamp unmarshals to an primitive.Timestamp.
+// 15. BSON 128-bit decimal unmarshals to an primitive.Decimal128.
+// 16. BSON min key unmarshals to an primitive.MinKey.
+// 17. BSON max key unmarshals to an primitive.MaxKey.
+// 18. BSON undefined unmarshals to a primitive.Undefined.
+// 19. BSON null unmarshals to nil.
+// 20. BSON DBPointer unmarshals to a primitive.DBPointer.
+// 21. BSON symbol unmarshals to a primitive.Symbol.
+//
+// The above mappings also apply when marshalling a D or M to BSON. Some other useful marshalling mappings are:
+//
+// 1. time.Time marshals to a BSON datetime.
+// 2. int8, int16, and int32 marshal to a BSON int32.
+// 3. int marshals to a BSON int32 if the value is between math.MinInt32 and math.MaxInt32, inclusive, and a BSON int64
+// otherwise.
+// 4. int64 marshals to BSON int64.
+// 5. uint8 and uint16 marshal to a BSON int32.
+// 6. uint, uint32, and uint64 marshal to a BSON int32 if the value is between math.MinInt32 and math.MaxInt32,
+// inclusive, and BSON int64 otherwise.
+// 7. BSON null and undefined values will unmarshal into the zero value of a field (e.g. unmarshalling a BSON null or
+// undefined value into a string will yield the empty string.).
+//
+// # Structs
+//
+// Structs can be marshalled/unmarshalled to/from BSON or Extended JSON. When transforming structs to/from BSON or Extended
+// JSON, the following rules apply:
+//
+// 1. Only exported fields in structs will be marshalled or unmarshalled.
+//
+// 2. When marshalling a struct, each field will be lowercased to generate the key for the corresponding BSON element.
+// For example, a struct field named "Foo" will generate key "foo". This can be overridden via a struct tag (e.g.
+// `bson:"fooField"` to generate key "fooField" instead).
+//
+// 3. An embedded struct field is marshalled as a subdocument. The key will be the lowercased name of the field's type.
+//
+// 4. A pointer field is marshalled as the underlying type if the pointer is non-nil. If the pointer is nil, it is
+// marshalled as a BSON null value.
+//
+// 5. When unmarshalling, a field of type interface{} will follow the D/M type mappings listed above. BSON documents
+// unmarshalled into an interface{} field will be unmarshalled as a D.
+//
+// The encoding of each struct field can be customized by the "bson" struct tag.
+//
+// This tag behavior is configurable, and different struct tag behavior can be configured by initializing a new
+// bsoncodec.StructCodec with the desired tag parser and registering that StructCodec onto the Registry. By default, JSON tags
+// are not honored, but that can be enabled by creating a StructCodec with JSONFallbackStructTagParser, like below:
+//
+// Example:
+//
+// structcodec, _ := bsoncodec.NewStructCodec(bsoncodec.JSONFallbackStructTagParser)
+//
+// The bson tag gives the name of the field, possibly followed by a comma-separated list of options.
+// The name may be empty in order to specify options without overriding the default field name. The following options can be used
+// to configure behavior:
+//
+// 1. omitempty: If the omitempty struct tag is specified on a field, the field will not be marshalled if it is set to
+// the zero value. Fields with language primitive types such as integers, booleans, and strings are considered empty if
+// their value is equal to the zero value for the type (i.e. 0 for integers, false for booleans, and "" for strings).
+// Slices, maps, and arrays are considered empty if they are of length zero. Interfaces and pointers are considered
+// empty if their value is nil. By default, structs are only considered empty if the struct type implements the
+// bsoncodec.Zeroer interface and the IsZero method returns true. Struct fields whose types do not implement Zeroer are
+// never considered empty and will be marshalled as embedded documents.
+// NOTE: It is recommended that this tag be used for all slice and map fields.
+//
+// 2. minsize: If the minsize struct tag is specified on a field of type int64, uint, uint32, or uint64 and the value of
+// the field can fit in a signed int32, the field will be serialized as a BSON int32 rather than a BSON int64. For other
+// types, this tag is ignored.
+//
+// 3. truncate: If the truncate struct tag is specified on a field with a non-float numeric type, BSON doubles unmarshalled
+// into that field will be truncated at the decimal point. For example, if 3.14 is unmarshalled into a field of type int,
+// it will be unmarshalled as 3. If this tag is not specified, the decoder will throw an error if the value cannot be
+// decoded without losing precision. For float64 or non-numeric types, this tag is ignored.
+//
+// 4. inline: If the inline struct tag is specified for a struct or map field, the field will be "flattened" when
+// marshalling and "un-flattened" when unmarshalling. This means that all of the fields in that struct/map will be
+// pulled up one level and will become top-level fields rather than being fields in a nested document. For example, if a
+// map field named "Map" with value map[string]interface{}{"foo": "bar"} is inlined, the resulting document will be
+// {"foo": "bar"} instead of {"map": {"foo": "bar"}}. There can only be one inlined map field in a struct. If there are
+// duplicated fields in the resulting document when an inlined struct is marshalled, the inlined field will be overwritten.
+// If there are duplicated fields in the resulting document when an inlined map is marshalled, an error will be returned.
+// This tag can be used with fields that are pointers to structs. If an inlined pointer field is nil, it will not be
+// marshalled. For fields that are not maps or structs, this tag is ignored.
+//
+// # Marshalling and Unmarshalling
+//
+// Manually marshalling and unmarshalling can be done with the Marshal and Unmarshal family of functions.
+package bson
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/encoder.go b/vendor/go.mongodb.org/mongo-driver/bson/encoder.go
new file mode 100644
index 000000000..fe5125d08
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/encoder.go
@@ -0,0 +1,99 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "errors"
+ "reflect"
+ "sync"
+
+ "go.mongodb.org/mongo-driver/bson/bsoncodec"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+)
+
+// This pool is used to keep the allocations of Encoders down. This is only used for the Marshal*
+// methods and is not consumable from outside of this package. The Encoders retrieved from this pool
+// must have both Reset and SetRegistry called on them.
+var encPool = sync.Pool{
+ New: func() interface{} {
+ return new(Encoder)
+ },
+}
+
+// An Encoder writes a serialization format to an output stream. It writes to a bsonrw.ValueWriter
+// as the destination of BSON data.
+type Encoder struct {
+ ec bsoncodec.EncodeContext
+ vw bsonrw.ValueWriter
+}
+
+// NewEncoder returns a new encoder that uses the DefaultRegistry to write to vw.
+func NewEncoder(vw bsonrw.ValueWriter) (*Encoder, error) {
+ if vw == nil {
+ return nil, errors.New("cannot create a new Encoder with a nil ValueWriter")
+ }
+
+ return &Encoder{
+ ec: bsoncodec.EncodeContext{Registry: DefaultRegistry},
+ vw: vw,
+ }, nil
+}
+
+// NewEncoderWithContext returns a new encoder that uses EncodeContext ec to write to vw.
+func NewEncoderWithContext(ec bsoncodec.EncodeContext, vw bsonrw.ValueWriter) (*Encoder, error) {
+ if ec.Registry == nil {
+ ec = bsoncodec.EncodeContext{Registry: DefaultRegistry}
+ }
+ if vw == nil {
+ return nil, errors.New("cannot create a new Encoder with a nil ValueWriter")
+ }
+
+ return &Encoder{
+ ec: ec,
+ vw: vw,
+ }, nil
+}
+
+// Encode writes the BSON encoding of val to the stream.
+//
+// The documentation for Marshal contains details about the conversion of Go
+// values to BSON.
+func (e *Encoder) Encode(val interface{}) error {
+ if marshaler, ok := val.(Marshaler); ok {
+ // TODO(skriptble): Should we have a MarshalAppender interface so that we can have []byte reuse?
+ buf, err := marshaler.MarshalBSON()
+ if err != nil {
+ return err
+ }
+ return bsonrw.Copier{}.CopyDocumentFromBytes(e.vw, buf)
+ }
+
+ encoder, err := e.ec.LookupEncoder(reflect.TypeOf(val))
+ if err != nil {
+ return err
+ }
+ return encoder.EncodeValue(e.ec, e.vw, reflect.ValueOf(val))
+}
+
+// Reset will reset the state of the encoder, using the same *EncodeContext used in
+// the original construction but using vw.
+func (e *Encoder) Reset(vw bsonrw.ValueWriter) error {
+ e.vw = vw
+ return nil
+}
+
+// SetRegistry replaces the current registry of the encoder with r.
+func (e *Encoder) SetRegistry(r *bsoncodec.Registry) error {
+ e.ec.Registry = r
+ return nil
+}
+
+// SetContext replaces the current EncodeContext of the encoder with er.
+func (e *Encoder) SetContext(ec bsoncodec.EncodeContext) error {
+ e.ec = ec
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/marshal.go b/vendor/go.mongodb.org/mongo-driver/bson/marshal.go
new file mode 100644
index 000000000..db8d8ee92
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/marshal.go
@@ -0,0 +1,248 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "bytes"
+ "encoding/json"
+
+ "go.mongodb.org/mongo-driver/bson/bsoncodec"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+const defaultDstCap = 256
+
+var bvwPool = bsonrw.NewBSONValueWriterPool()
+var extjPool = bsonrw.NewExtJSONValueWriterPool()
+
+// Marshaler is an interface implemented by types that can marshal themselves
+// into a BSON document represented as bytes. The bytes returned must be a valid
+// BSON document if the error is nil.
+type Marshaler interface {
+ MarshalBSON() ([]byte, error)
+}
+
+// ValueMarshaler is an interface implemented by types that can marshal
+// themselves into a BSON value as bytes. The type must be the valid type for
+// the bytes returned. The bytes and byte type together must be valid if the
+// error is nil.
+type ValueMarshaler interface {
+ MarshalBSONValue() (bsontype.Type, []byte, error)
+}
+
+// Marshal returns the BSON encoding of val as a BSON document. If val is not a type that can be transformed into a
+// document, MarshalValue should be used instead.
+//
+// Marshal will use the default registry created by NewRegistry to recursively
+// marshal val into a []byte. Marshal will inspect struct tags and alter the
+// marshaling process accordingly.
+func Marshal(val interface{}) ([]byte, error) {
+ return MarshalWithRegistry(DefaultRegistry, val)
+}
+
+// MarshalAppend will encode val as a BSON document and append the bytes to dst. If dst is not large enough to hold the
+// bytes, it will be grown. If val is not a type that can be transformed into a document, MarshalValueAppend should be
+// used instead.
+func MarshalAppend(dst []byte, val interface{}) ([]byte, error) {
+ return MarshalAppendWithRegistry(DefaultRegistry, dst, val)
+}
+
+// MarshalWithRegistry returns the BSON encoding of val as a BSON document. If val is not a type that can be transformed
+// into a document, MarshalValueWithRegistry should be used instead.
+func MarshalWithRegistry(r *bsoncodec.Registry, val interface{}) ([]byte, error) {
+ dst := make([]byte, 0)
+ return MarshalAppendWithRegistry(r, dst, val)
+}
+
+// MarshalWithContext returns the BSON encoding of val as a BSON document using EncodeContext ec. If val is not a type
+// that can be transformed into a document, MarshalValueWithContext should be used instead.
+func MarshalWithContext(ec bsoncodec.EncodeContext, val interface{}) ([]byte, error) {
+ dst := make([]byte, 0)
+ return MarshalAppendWithContext(ec, dst, val)
+}
+
+// MarshalAppendWithRegistry will encode val as a BSON document using Registry r and append the bytes to dst. If dst is
+// not large enough to hold the bytes, it will be grown. If val is not a type that can be transformed into a document,
+// MarshalValueAppendWithRegistry should be used instead.
+func MarshalAppendWithRegistry(r *bsoncodec.Registry, dst []byte, val interface{}) ([]byte, error) {
+ return MarshalAppendWithContext(bsoncodec.EncodeContext{Registry: r}, dst, val)
+}
+
+// MarshalAppendWithContext will encode val as a BSON document using Registry r and EncodeContext ec and append the
+// bytes to dst. If dst is not large enough to hold the bytes, it will be grown. If val is not a type that can be
+// transformed into a document, MarshalValueAppendWithContext should be used instead.
+func MarshalAppendWithContext(ec bsoncodec.EncodeContext, dst []byte, val interface{}) ([]byte, error) {
+ sw := new(bsonrw.SliceWriter)
+ *sw = dst
+ vw := bvwPool.Get(sw)
+ defer bvwPool.Put(vw)
+
+ enc := encPool.Get().(*Encoder)
+ defer encPool.Put(enc)
+
+ err := enc.Reset(vw)
+ if err != nil {
+ return nil, err
+ }
+ err = enc.SetContext(ec)
+ if err != nil {
+ return nil, err
+ }
+
+ err = enc.Encode(val)
+ if err != nil {
+ return nil, err
+ }
+
+ return *sw, nil
+}
+
+// MarshalValue returns the BSON encoding of val.
+//
+// MarshalValue will use bson.DefaultRegistry to transform val into a BSON value. If val is a struct, this function will
+// inspect struct tags and alter the marshalling process accordingly.
+func MarshalValue(val interface{}) (bsontype.Type, []byte, error) {
+ return MarshalValueWithRegistry(DefaultRegistry, val)
+}
+
+// MarshalValueAppend will append the BSON encoding of val to dst. If dst is not large enough to hold the BSON encoding
+// of val, dst will be grown.
+func MarshalValueAppend(dst []byte, val interface{}) (bsontype.Type, []byte, error) {
+ return MarshalValueAppendWithRegistry(DefaultRegistry, dst, val)
+}
+
+// MarshalValueWithRegistry returns the BSON encoding of val using Registry r.
+func MarshalValueWithRegistry(r *bsoncodec.Registry, val interface{}) (bsontype.Type, []byte, error) {
+ dst := make([]byte, 0)
+ return MarshalValueAppendWithRegistry(r, dst, val)
+}
+
+// MarshalValueWithContext returns the BSON encoding of val using EncodeContext ec.
+func MarshalValueWithContext(ec bsoncodec.EncodeContext, val interface{}) (bsontype.Type, []byte, error) {
+ dst := make([]byte, 0)
+ return MarshalValueAppendWithContext(ec, dst, val)
+}
+
+// MarshalValueAppendWithRegistry will append the BSON encoding of val to dst using Registry r. If dst is not large
+// enough to hold the BSON encoding of val, dst will be grown.
+func MarshalValueAppendWithRegistry(r *bsoncodec.Registry, dst []byte, val interface{}) (bsontype.Type, []byte, error) {
+ return MarshalValueAppendWithContext(bsoncodec.EncodeContext{Registry: r}, dst, val)
+}
+
+// MarshalValueAppendWithContext will append the BSON encoding of val to dst using EncodeContext ec. If dst is not large
+// enough to hold the BSON encoding of val, dst will be grown.
+func MarshalValueAppendWithContext(ec bsoncodec.EncodeContext, dst []byte, val interface{}) (bsontype.Type, []byte, error) {
+ // get a ValueWriter configured to write to dst
+ sw := new(bsonrw.SliceWriter)
+ *sw = dst
+ vwFlusher := bvwPool.GetAtModeElement(sw)
+
+ // get an Encoder and encode the value
+ enc := encPool.Get().(*Encoder)
+ defer encPool.Put(enc)
+ if err := enc.Reset(vwFlusher); err != nil {
+ return 0, nil, err
+ }
+ if err := enc.SetContext(ec); err != nil {
+ return 0, nil, err
+ }
+ if err := enc.Encode(val); err != nil {
+ return 0, nil, err
+ }
+
+ // flush the bytes written because we cannot guarantee that a full document has been written
+ // after the flush, *sw will be in the format
+ // [value type, 0 (null byte to indicate end of empty element name), value bytes..]
+ if err := vwFlusher.Flush(); err != nil {
+ return 0, nil, err
+ }
+ buffer := *sw
+ return bsontype.Type(buffer[0]), buffer[2:], nil
+}
+
+// MarshalExtJSON returns the extended JSON encoding of val.
+func MarshalExtJSON(val interface{}, canonical, escapeHTML bool) ([]byte, error) {
+ return MarshalExtJSONWithRegistry(DefaultRegistry, val, canonical, escapeHTML)
+}
+
+// MarshalExtJSONAppend will append the extended JSON encoding of val to dst.
+// If dst is not large enough to hold the extended JSON encoding of val, dst
+// will be grown.
+func MarshalExtJSONAppend(dst []byte, val interface{}, canonical, escapeHTML bool) ([]byte, error) {
+ return MarshalExtJSONAppendWithRegistry(DefaultRegistry, dst, val, canonical, escapeHTML)
+}
+
+// MarshalExtJSONWithRegistry returns the extended JSON encoding of val using Registry r.
+func MarshalExtJSONWithRegistry(r *bsoncodec.Registry, val interface{}, canonical, escapeHTML bool) ([]byte, error) {
+ dst := make([]byte, 0, defaultDstCap)
+ return MarshalExtJSONAppendWithContext(bsoncodec.EncodeContext{Registry: r}, dst, val, canonical, escapeHTML)
+}
+
+// MarshalExtJSONWithContext returns the extended JSON encoding of val using Registry r.
+func MarshalExtJSONWithContext(ec bsoncodec.EncodeContext, val interface{}, canonical, escapeHTML bool) ([]byte, error) {
+ dst := make([]byte, 0, defaultDstCap)
+ return MarshalExtJSONAppendWithContext(ec, dst, val, canonical, escapeHTML)
+}
+
+// MarshalExtJSONAppendWithRegistry will append the extended JSON encoding of
+// val to dst using Registry r. If dst is not large enough to hold the BSON
+// encoding of val, dst will be grown.
+func MarshalExtJSONAppendWithRegistry(r *bsoncodec.Registry, dst []byte, val interface{}, canonical, escapeHTML bool) ([]byte, error) {
+ return MarshalExtJSONAppendWithContext(bsoncodec.EncodeContext{Registry: r}, dst, val, canonical, escapeHTML)
+}
+
+// MarshalExtJSONAppendWithContext will append the extended JSON encoding of
+// val to dst using Registry r. If dst is not large enough to hold the BSON
+// encoding of val, dst will be grown.
+func MarshalExtJSONAppendWithContext(ec bsoncodec.EncodeContext, dst []byte, val interface{}, canonical, escapeHTML bool) ([]byte, error) {
+ sw := new(bsonrw.SliceWriter)
+ *sw = dst
+ ejvw := extjPool.Get(sw, canonical, escapeHTML)
+ defer extjPool.Put(ejvw)
+
+ enc := encPool.Get().(*Encoder)
+ defer encPool.Put(enc)
+
+ err := enc.Reset(ejvw)
+ if err != nil {
+ return nil, err
+ }
+ err = enc.SetContext(ec)
+ if err != nil {
+ return nil, err
+ }
+
+ err = enc.Encode(val)
+ if err != nil {
+ return nil, err
+ }
+
+ return *sw, nil
+}
+
+// IndentExtJSON will prefix and indent the provided extended JSON src and append it to dst.
+func IndentExtJSON(dst *bytes.Buffer, src []byte, prefix, indent string) error {
+ return json.Indent(dst, src, prefix, indent)
+}
+
+// MarshalExtJSONIndent returns the extended JSON encoding of val with each line with prefixed
+// and indented.
+func MarshalExtJSONIndent(val interface{}, canonical, escapeHTML bool, prefix, indent string) ([]byte, error) {
+ marshaled, err := MarshalExtJSON(val, canonical, escapeHTML)
+ if err != nil {
+ return nil, err
+ }
+
+ var buf bytes.Buffer
+ err = IndentExtJSON(&buf, marshaled, prefix, indent)
+ if err != nil {
+ return nil, err
+ }
+
+ return buf.Bytes(), nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/primitive/decimal.go b/vendor/go.mongodb.org/mongo-driver/bson/primitive/decimal.go
new file mode 100644
index 000000000..ba7c9112e
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/primitive/decimal.go
@@ -0,0 +1,423 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+//
+// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer
+// See THIRD-PARTY-NOTICES for original license terms.
+
+package primitive
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "math/big"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// These constants are the maximum and minimum values for the exponent field in a decimal128 value.
+const (
+ MaxDecimal128Exp = 6111
+ MinDecimal128Exp = -6176
+)
+
+// These errors are returned when an invalid value is parsed as a big.Int.
+var (
+ ErrParseNaN = errors.New("cannot parse NaN as a *big.Int")
+ ErrParseInf = errors.New("cannot parse Infinity as a *big.Int")
+ ErrParseNegInf = errors.New("cannot parse -Infinity as a *big.Int")
+)
+
+// Decimal128 holds decimal128 BSON values.
+type Decimal128 struct {
+ h, l uint64
+}
+
+// NewDecimal128 creates a Decimal128 using the provide high and low uint64s.
+func NewDecimal128(h, l uint64) Decimal128 {
+ return Decimal128{h: h, l: l}
+}
+
+// GetBytes returns the underlying bytes of the BSON decimal value as two uint64 values. The first
+// contains the most first 8 bytes of the value and the second contains the latter.
+func (d Decimal128) GetBytes() (uint64, uint64) {
+ return d.h, d.l
+}
+
+// String returns a string representation of the decimal value.
+func (d Decimal128) String() string {
+ var posSign int // positive sign
+ var exp int // exponent
+ var high, low uint64 // significand high/low
+
+ if d.h>>63&1 == 0 {
+ posSign = 1
+ }
+
+ switch d.h >> 58 & (1<<5 - 1) {
+ case 0x1F:
+ return "NaN"
+ case 0x1E:
+ return "-Infinity"[posSign:]
+ }
+
+ low = d.l
+ if d.h>>61&3 == 3 {
+ // Bits: 1*sign 2*ignored 14*exponent 111*significand.
+ // Implicit 0b100 prefix in significand.
+ exp = int(d.h >> 47 & (1<<14 - 1))
+ //high = 4<<47 | d.h&(1<<47-1)
+ // Spec says all of these values are out of range.
+ high, low = 0, 0
+ } else {
+ // Bits: 1*sign 14*exponent 113*significand
+ exp = int(d.h >> 49 & (1<<14 - 1))
+ high = d.h & (1<<49 - 1)
+ }
+ exp += MinDecimal128Exp
+
+ // Would be handled by the logic below, but that's trivial and common.
+ if high == 0 && low == 0 && exp == 0 {
+ return "-0"[posSign:]
+ }
+
+ var repr [48]byte // Loop 5 times over 9 digits plus dot, negative sign, and leading zero.
+ var last = len(repr)
+ var i = len(repr)
+ var dot = len(repr) + exp
+ var rem uint32
+Loop:
+ for d9 := 0; d9 < 5; d9++ {
+ high, low, rem = divmod(high, low, 1e9)
+ for d1 := 0; d1 < 9; d1++ {
+ // Handle "-0.0", "0.00123400", "-1.00E-6", "1.050E+3", etc.
+ if i < len(repr) && (dot == i || low == 0 && high == 0 && rem > 0 && rem < 10 && (dot < i-6 || exp > 0)) {
+ exp += len(repr) - i
+ i--
+ repr[i] = '.'
+ last = i - 1
+ dot = len(repr) // Unmark.
+ }
+ c := '0' + byte(rem%10)
+ rem /= 10
+ i--
+ repr[i] = c
+ // Handle "0E+3", "1E+3", etc.
+ if low == 0 && high == 0 && rem == 0 && i == len(repr)-1 && (dot < i-5 || exp > 0) {
+ last = i
+ break Loop
+ }
+ if c != '0' {
+ last = i
+ }
+ // Break early. Works without it, but why.
+ if dot > i && low == 0 && high == 0 && rem == 0 {
+ break Loop
+ }
+ }
+ }
+ repr[last-1] = '-'
+ last--
+
+ if exp > 0 {
+ return string(repr[last+posSign:]) + "E+" + strconv.Itoa(exp)
+ }
+ if exp < 0 {
+ return string(repr[last+posSign:]) + "E" + strconv.Itoa(exp)
+ }
+ return string(repr[last+posSign:])
+}
+
+// BigInt returns significand as big.Int and exponent, bi * 10 ^ exp.
+func (d Decimal128) BigInt() (*big.Int, int, error) {
+ high, low := d.GetBytes()
+ posSign := high>>63&1 == 0 // positive sign
+
+ switch high >> 58 & (1<<5 - 1) {
+ case 0x1F:
+ return nil, 0, ErrParseNaN
+ case 0x1E:
+ if posSign {
+ return nil, 0, ErrParseInf
+ }
+ return nil, 0, ErrParseNegInf
+ }
+
+ var exp int
+ if high>>61&3 == 3 {
+ // Bits: 1*sign 2*ignored 14*exponent 111*significand.
+ // Implicit 0b100 prefix in significand.
+ exp = int(high >> 47 & (1<<14 - 1))
+ //high = 4<<47 | d.h&(1<<47-1)
+ // Spec says all of these values are out of range.
+ high, low = 0, 0
+ } else {
+ // Bits: 1*sign 14*exponent 113*significand
+ exp = int(high >> 49 & (1<<14 - 1))
+ high = high & (1<<49 - 1)
+ }
+ exp += MinDecimal128Exp
+
+ // Would be handled by the logic below, but that's trivial and common.
+ if high == 0 && low == 0 && exp == 0 {
+ if posSign {
+ return new(big.Int), 0, nil
+ }
+ return new(big.Int), 0, nil
+ }
+
+ bi := big.NewInt(0)
+ const host32bit = ^uint(0)>>32 == 0
+ if host32bit {
+ bi.SetBits([]big.Word{big.Word(low), big.Word(low >> 32), big.Word(high), big.Word(high >> 32)})
+ } else {
+ bi.SetBits([]big.Word{big.Word(low), big.Word(high)})
+ }
+
+ if !posSign {
+ return bi.Neg(bi), exp, nil
+ }
+ return bi, exp, nil
+}
+
+// IsNaN returns whether d is NaN.
+func (d Decimal128) IsNaN() bool {
+ return d.h>>58&(1<<5-1) == 0x1F
+}
+
+// IsInf returns:
+//
+// +1 d == Infinity
+// 0 other case
+// -1 d == -Infinity
+func (d Decimal128) IsInf() int {
+ if d.h>>58&(1<<5-1) != 0x1E {
+ return 0
+ }
+
+ if d.h>>63&1 == 0 {
+ return 1
+ }
+ return -1
+}
+
+// IsZero returns true if d is the empty Decimal128.
+func (d Decimal128) IsZero() bool {
+ return d.h == 0 && d.l == 0
+}
+
+// MarshalJSON returns Decimal128 as a string.
+func (d Decimal128) MarshalJSON() ([]byte, error) {
+ return json.Marshal(d.String())
+}
+
+// UnmarshalJSON creates a primitive.Decimal128 from a JSON string, an extended JSON $numberDecimal value, or the string
+// "null". If b is a JSON string or extended JSON value, d will have the value of that string, and if b is "null", d will
+// be unchanged.
+func (d *Decimal128) UnmarshalJSON(b []byte) error {
+ // Ignore "null" to keep parity with the standard library. Decoding a JSON null into a non-pointer Decimal128 field
+ // will leave the field unchanged. For pointer values, encoding/json will set the pointer to nil and will not
+ // enter the UnmarshalJSON hook.
+ if string(b) == "null" {
+ return nil
+ }
+
+ var res interface{}
+ err := json.Unmarshal(b, &res)
+ if err != nil {
+ return err
+ }
+ str, ok := res.(string)
+
+ // Extended JSON
+ if !ok {
+ m, ok := res.(map[string]interface{})
+ if !ok {
+ return errors.New("not an extended JSON Decimal128: expected document")
+ }
+ d128, ok := m["$numberDecimal"]
+ if !ok {
+ return errors.New("not an extended JSON Decimal128: expected key $numberDecimal")
+ }
+ str, ok = d128.(string)
+ if !ok {
+ return errors.New("not an extended JSON Decimal128: expected decimal to be string")
+ }
+ }
+
+ *d, err = ParseDecimal128(str)
+ return err
+}
+
+func divmod(h, l uint64, div uint32) (qh, ql uint64, rem uint32) {
+ div64 := uint64(div)
+ a := h >> 32
+ aq := a / div64
+ ar := a % div64
+ b := ar<<32 + h&(1<<32-1)
+ bq := b / div64
+ br := b % div64
+ c := br<<32 + l>>32
+ cq := c / div64
+ cr := c % div64
+ d := cr<<32 + l&(1<<32-1)
+ dq := d / div64
+ dr := d % div64
+ return (aq<<32 | bq), (cq<<32 | dq), uint32(dr)
+}
+
+var dNaN = Decimal128{0x1F << 58, 0}
+var dPosInf = Decimal128{0x1E << 58, 0}
+var dNegInf = Decimal128{0x3E << 58, 0}
+
+func dErr(s string) (Decimal128, error) {
+ return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s)
+}
+
+// match scientific notation number, example -10.15e-18
+var normalNumber = regexp.MustCompile(`^(?P<int>[-+]?\d*)?(?:\.(?P<dec>\d*))?(?:[Ee](?P<exp>[-+]?\d+))?$`)
+
+// ParseDecimal128 takes the given string and attempts to parse it into a valid
+// Decimal128 value.
+func ParseDecimal128(s string) (Decimal128, error) {
+ if s == "" {
+ return dErr(s)
+ }
+
+ matches := normalNumber.FindStringSubmatch(s)
+ if len(matches) == 0 {
+ orig := s
+ neg := s[0] == '-'
+ if neg || s[0] == '+' {
+ s = s[1:]
+ }
+
+ if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") {
+ return dNaN, nil
+ }
+ if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") {
+ if neg {
+ return dNegInf, nil
+ }
+ return dPosInf, nil
+ }
+ return dErr(orig)
+ }
+
+ intPart := matches[1]
+ decPart := matches[2]
+ expPart := matches[3]
+
+ var err error
+ exp := 0
+ if expPart != "" {
+ exp, err = strconv.Atoi(expPart)
+ if err != nil {
+ return dErr(s)
+ }
+ }
+ if decPart != "" {
+ exp -= len(decPart)
+ }
+
+ if len(strings.Trim(intPart+decPart, "-0")) > 35 {
+ return dErr(s)
+ }
+
+ bi, ok := new(big.Int).SetString(intPart+decPart, 10)
+ if !ok {
+ return dErr(s)
+ }
+
+ d, ok := ParseDecimal128FromBigInt(bi, exp)
+ if !ok {
+ return dErr(s)
+ }
+
+ if bi.Sign() == 0 && s[0] == '-' {
+ d.h |= 1 << 63
+ }
+
+ return d, nil
+}
+
+var (
+ ten = big.NewInt(10)
+ zero = new(big.Int)
+
+ maxS, _ = new(big.Int).SetString("9999999999999999999999999999999999", 10)
+)
+
+// ParseDecimal128FromBigInt attempts to parse the given significand and exponent into a valid Decimal128 value.
+func ParseDecimal128FromBigInt(bi *big.Int, exp int) (Decimal128, bool) {
+ //copy
+ bi = new(big.Int).Set(bi)
+
+ q := new(big.Int)
+ r := new(big.Int)
+
+ for bigIntCmpAbs(bi, maxS) == 1 {
+ bi, _ = q.QuoRem(bi, ten, r)
+ if r.Cmp(zero) != 0 {
+ return Decimal128{}, false
+ }
+ exp++
+ if exp > MaxDecimal128Exp {
+ return Decimal128{}, false
+ }
+ }
+
+ for exp < MinDecimal128Exp {
+ // Subnormal.
+ bi, _ = q.QuoRem(bi, ten, r)
+ if r.Cmp(zero) != 0 {
+ return Decimal128{}, false
+ }
+ exp++
+ }
+ for exp > MaxDecimal128Exp {
+ // Clamped.
+ bi.Mul(bi, ten)
+ if bigIntCmpAbs(bi, maxS) == 1 {
+ return Decimal128{}, false
+ }
+ exp--
+ }
+
+ b := bi.Bytes()
+ var h, l uint64
+ for i := 0; i < len(b); i++ {
+ if i < len(b)-8 {
+ h = h<<8 | uint64(b[i])
+ continue
+ }
+ l = l<<8 | uint64(b[i])
+ }
+
+ h |= uint64(exp-MinDecimal128Exp) & uint64(1<<14-1) << 49
+ if bi.Sign() == -1 {
+ h |= 1 << 63
+ }
+
+ return Decimal128{h: h, l: l}, true
+}
+
+// bigIntCmpAbs computes big.Int.Cmp(absoluteValue(x), absoluteValue(y)).
+func bigIntCmpAbs(x, y *big.Int) int {
+ xAbs := bigIntAbsValue(x)
+ yAbs := bigIntAbsValue(y)
+ return xAbs.Cmp(yAbs)
+}
+
+// bigIntAbsValue returns a big.Int containing the absolute value of b.
+// If b is already a non-negative number, it is returned without any changes or copies.
+func bigIntAbsValue(b *big.Int) *big.Int {
+ if b.Sign() >= 0 {
+ return b // already positive
+ }
+ return new(big.Int).Abs(b)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/primitive/objectid.go b/vendor/go.mongodb.org/mongo-driver/bson/primitive/objectid.go
new file mode 100644
index 000000000..ded367316
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/primitive/objectid.go
@@ -0,0 +1,206 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+//
+// Based on gopkg.in/mgo.v2/bson by Gustavo Niemeyer
+// See THIRD-PARTY-NOTICES for original license terms.
+
+package primitive
+
+import (
+ "crypto/rand"
+ "encoding"
+ "encoding/binary"
+ "encoding/hex"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "sync/atomic"
+ "time"
+)
+
+// ErrInvalidHex indicates that a hex string cannot be converted to an ObjectID.
+var ErrInvalidHex = errors.New("the provided hex string is not a valid ObjectID")
+
+// ObjectID is the BSON ObjectID type.
+type ObjectID [12]byte
+
+// NilObjectID is the zero value for ObjectID.
+var NilObjectID ObjectID
+
+var objectIDCounter = readRandomUint32()
+var processUnique = processUniqueBytes()
+
+var _ encoding.TextMarshaler = ObjectID{}
+var _ encoding.TextUnmarshaler = &ObjectID{}
+
+// NewObjectID generates a new ObjectID.
+func NewObjectID() ObjectID {
+ return NewObjectIDFromTimestamp(time.Now())
+}
+
+// NewObjectIDFromTimestamp generates a new ObjectID based on the given time.
+func NewObjectIDFromTimestamp(timestamp time.Time) ObjectID {
+ var b [12]byte
+
+ binary.BigEndian.PutUint32(b[0:4], uint32(timestamp.Unix()))
+ copy(b[4:9], processUnique[:])
+ putUint24(b[9:12], atomic.AddUint32(&objectIDCounter, 1))
+
+ return b
+}
+
+// Timestamp extracts the time part of the ObjectId.
+func (id ObjectID) Timestamp() time.Time {
+ unixSecs := binary.BigEndian.Uint32(id[0:4])
+ return time.Unix(int64(unixSecs), 0).UTC()
+}
+
+// Hex returns the hex encoding of the ObjectID as a string.
+func (id ObjectID) Hex() string {
+ var buf [24]byte
+ hex.Encode(buf[:], id[:])
+ return string(buf[:])
+}
+
+func (id ObjectID) String() string {
+ return fmt.Sprintf("ObjectID(%q)", id.Hex())
+}
+
+// IsZero returns true if id is the empty ObjectID.
+func (id ObjectID) IsZero() bool {
+ return id == NilObjectID
+}
+
+// ObjectIDFromHex creates a new ObjectID from a hex string. It returns an error if the hex string is not a
+// valid ObjectID.
+func ObjectIDFromHex(s string) (ObjectID, error) {
+ if len(s) != 24 {
+ return NilObjectID, ErrInvalidHex
+ }
+
+ b, err := hex.DecodeString(s)
+ if err != nil {
+ return NilObjectID, err
+ }
+
+ var oid [12]byte
+ copy(oid[:], b)
+
+ return oid, nil
+}
+
+// IsValidObjectID returns true if the provided hex string represents a valid ObjectID and false if not.
+func IsValidObjectID(s string) bool {
+ _, err := ObjectIDFromHex(s)
+ return err == nil
+}
+
+// MarshalText returns the ObjectID as UTF-8-encoded text. Implementing this allows us to use ObjectID
+// as a map key when marshalling JSON. See https://pkg.go.dev/encoding#TextMarshaler
+func (id ObjectID) MarshalText() ([]byte, error) {
+ return []byte(id.Hex()), nil
+}
+
+// UnmarshalText populates the byte slice with the ObjectID. Implementing this allows us to use ObjectID
+// as a map key when unmarshalling JSON. See https://pkg.go.dev/encoding#TextUnmarshaler
+func (id *ObjectID) UnmarshalText(b []byte) error {
+ oid, err := ObjectIDFromHex(string(b))
+ if err != nil {
+ return err
+ }
+ *id = oid
+ return nil
+}
+
+// MarshalJSON returns the ObjectID as a string
+func (id ObjectID) MarshalJSON() ([]byte, error) {
+ return json.Marshal(id.Hex())
+}
+
+// UnmarshalJSON populates the byte slice with the ObjectID. If the byte slice is 24 bytes long, it
+// will be populated with the hex representation of the ObjectID. If the byte slice is twelve bytes
+// long, it will be populated with the BSON representation of the ObjectID. This method also accepts empty strings and
+// decodes them as NilObjectID. For any other inputs, an error will be returned.
+func (id *ObjectID) UnmarshalJSON(b []byte) error {
+ // Ignore "null" to keep parity with the standard library. Decoding a JSON null into a non-pointer ObjectID field
+ // will leave the field unchanged. For pointer values, encoding/json will set the pointer to nil and will not
+ // enter the UnmarshalJSON hook.
+ if string(b) == "null" {
+ return nil
+ }
+
+ var err error
+ switch len(b) {
+ case 12:
+ copy(id[:], b)
+ default:
+ // Extended JSON
+ var res interface{}
+ err := json.Unmarshal(b, &res)
+ if err != nil {
+ return err
+ }
+ str, ok := res.(string)
+ if !ok {
+ m, ok := res.(map[string]interface{})
+ if !ok {
+ return errors.New("not an extended JSON ObjectID")
+ }
+ oid, ok := m["$oid"]
+ if !ok {
+ return errors.New("not an extended JSON ObjectID")
+ }
+ str, ok = oid.(string)
+ if !ok {
+ return errors.New("not an extended JSON ObjectID")
+ }
+ }
+
+ // An empty string is not a valid ObjectID, but we treat it as a special value that decodes as NilObjectID.
+ if len(str) == 0 {
+ copy(id[:], NilObjectID[:])
+ return nil
+ }
+
+ if len(str) != 24 {
+ return fmt.Errorf("cannot unmarshal into an ObjectID, the length must be 24 but it is %d", len(str))
+ }
+
+ _, err = hex.Decode(id[:], []byte(str))
+ if err != nil {
+ return err
+ }
+ }
+
+ return err
+}
+
+func processUniqueBytes() [5]byte {
+ var b [5]byte
+ _, err := io.ReadFull(rand.Reader, b[:])
+ if err != nil {
+ panic(fmt.Errorf("cannot initialize objectid package with crypto.rand.Reader: %v", err))
+ }
+
+ return b
+}
+
+func readRandomUint32() uint32 {
+ var b [4]byte
+ _, err := io.ReadFull(rand.Reader, b[:])
+ if err != nil {
+ panic(fmt.Errorf("cannot initialize objectid package with crypto.rand.Reader: %v", err))
+ }
+
+ return (uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24)
+}
+
+func putUint24(b []byte, v uint32) {
+ b[0] = byte(v >> 16)
+ b[1] = byte(v >> 8)
+ b[2] = byte(v)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/primitive/primitive.go b/vendor/go.mongodb.org/mongo-driver/bson/primitive/primitive.go
new file mode 100644
index 000000000..c72ccc1c4
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/primitive/primitive.go
@@ -0,0 +1,217 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package primitive contains types similar to Go primitives for BSON types that do not have direct
+// Go primitive representations.
+package primitive // import "go.mongodb.org/mongo-driver/bson/primitive"
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "time"
+)
+
+// Binary represents a BSON binary value.
+type Binary struct {
+ Subtype byte
+ Data []byte
+}
+
+// Equal compares bp to bp2 and returns true if they are equal.
+func (bp Binary) Equal(bp2 Binary) bool {
+ if bp.Subtype != bp2.Subtype {
+ return false
+ }
+ return bytes.Equal(bp.Data, bp2.Data)
+}
+
+// IsZero returns if bp is the empty Binary.
+func (bp Binary) IsZero() bool {
+ return bp.Subtype == 0 && len(bp.Data) == 0
+}
+
+// Undefined represents the BSON undefined value type.
+type Undefined struct{}
+
+// DateTime represents the BSON datetime value.
+type DateTime int64
+
+var _ json.Marshaler = DateTime(0)
+var _ json.Unmarshaler = (*DateTime)(nil)
+
+// MarshalJSON marshal to time type.
+func (d DateTime) MarshalJSON() ([]byte, error) {
+ return json.Marshal(d.Time())
+}
+
+// UnmarshalJSON creates a primitive.DateTime from a JSON string.
+func (d *DateTime) UnmarshalJSON(data []byte) error {
+ // Ignore "null" to keep parity with the time.Time type and the standard library. Decoding "null" into a non-pointer
+ // DateTime field will leave the field unchanged. For pointer values, the encoding/json will set the pointer to nil
+ // and will not defer to the UnmarshalJSON hook.
+ if string(data) == "null" {
+ return nil
+ }
+
+ var tempTime time.Time
+ if err := json.Unmarshal(data, &tempTime); err != nil {
+ return err
+ }
+
+ *d = NewDateTimeFromTime(tempTime)
+ return nil
+}
+
+// Time returns the date as a time type.
+func (d DateTime) Time() time.Time {
+ return time.Unix(int64(d)/1000, int64(d)%1000*1000000)
+}
+
+// NewDateTimeFromTime creates a new DateTime from a Time.
+func NewDateTimeFromTime(t time.Time) DateTime {
+ return DateTime(t.Unix()*1e3 + int64(t.Nanosecond())/1e6)
+}
+
+// Null represents the BSON null value.
+type Null struct{}
+
+// Regex represents a BSON regex value.
+type Regex struct {
+ Pattern string
+ Options string
+}
+
+func (rp Regex) String() string {
+ return fmt.Sprintf(`{"pattern": "%s", "options": "%s"}`, rp.Pattern, rp.Options)
+}
+
+// Equal compares rp to rp2 and returns true if they are equal.
+func (rp Regex) Equal(rp2 Regex) bool {
+ return rp.Pattern == rp2.Pattern && rp.Options == rp2.Options
+}
+
+// IsZero returns if rp is the empty Regex.
+func (rp Regex) IsZero() bool {
+ return rp.Pattern == "" && rp.Options == ""
+}
+
+// DBPointer represents a BSON dbpointer value.
+type DBPointer struct {
+ DB string
+ Pointer ObjectID
+}
+
+func (d DBPointer) String() string {
+ return fmt.Sprintf(`{"db": "%s", "pointer": "%s"}`, d.DB, d.Pointer)
+}
+
+// Equal compares d to d2 and returns true if they are equal.
+func (d DBPointer) Equal(d2 DBPointer) bool {
+ return d == d2
+}
+
+// IsZero returns if d is the empty DBPointer.
+func (d DBPointer) IsZero() bool {
+ return d.DB == "" && d.Pointer.IsZero()
+}
+
+// JavaScript represents a BSON JavaScript code value.
+type JavaScript string
+
+// Symbol represents a BSON symbol value.
+type Symbol string
+
+// CodeWithScope represents a BSON JavaScript code with scope value.
+type CodeWithScope struct {
+ Code JavaScript
+ Scope interface{}
+}
+
+func (cws CodeWithScope) String() string {
+ return fmt.Sprintf(`{"code": "%s", "scope": %v}`, cws.Code, cws.Scope)
+}
+
+// Timestamp represents a BSON timestamp value.
+type Timestamp struct {
+ T uint32
+ I uint32
+}
+
+// Equal compares tp to tp2 and returns true if they are equal.
+func (tp Timestamp) Equal(tp2 Timestamp) bool {
+ return tp.T == tp2.T && tp.I == tp2.I
+}
+
+// IsZero returns if tp is the zero Timestamp.
+func (tp Timestamp) IsZero() bool {
+ return tp.T == 0 && tp.I == 0
+}
+
+// CompareTimestamp returns an integer comparing two Timestamps, where T is compared first, followed by I.
+// Returns 0 if tp = tp2, 1 if tp > tp2, -1 if tp < tp2.
+func CompareTimestamp(tp, tp2 Timestamp) int {
+ if tp.Equal(tp2) {
+ return 0
+ }
+
+ if tp.T > tp2.T {
+ return 1
+ }
+ if tp.T < tp2.T {
+ return -1
+ }
+ // Compare I values because T values are equal
+ if tp.I > tp2.I {
+ return 1
+ }
+ return -1
+}
+
+// MinKey represents the BSON minkey value.
+type MinKey struct{}
+
+// MaxKey represents the BSON maxkey value.
+type MaxKey struct{}
+
+// D is an ordered representation of a BSON document. This type should be used when the order of the elements matters,
+// such as MongoDB command documents. If the order of the elements does not matter, an M should be used instead.
+//
+// Example usage:
+//
+// bson.D{{"foo", "bar"}, {"hello", "world"}, {"pi", 3.14159}}
+type D []E
+
+// Map creates a map from the elements of the D.
+func (d D) Map() M {
+ m := make(M, len(d))
+ for _, e := range d {
+ m[e.Key] = e.Value
+ }
+ return m
+}
+
+// E represents a BSON element for a D. It is usually used inside a D.
+type E struct {
+ Key string
+ Value interface{}
+}
+
+// M is an unordered representation of a BSON document. This type should be used when the order of the elements does not
+// matter. This type is handled as a regular map[string]interface{} when encoding and decoding. Elements will be
+// serialized in an undefined, random order. If the order of the elements matters, a D should be used instead.
+//
+// Example usage:
+//
+// bson.M{"foo": "bar", "hello": "world", "pi": 3.14159}
+type M map[string]interface{}
+
+// An A is an ordered representation of a BSON array.
+//
+// Example usage:
+//
+// bson.A{"bar", "world", 3.14159, bson.D{{"qux", 12345}}}
+type A []interface{}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/primitive_codecs.go b/vendor/go.mongodb.org/mongo-driver/bson/primitive_codecs.go
new file mode 100644
index 000000000..1cbe3884d
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/primitive_codecs.go
@@ -0,0 +1,92 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "errors"
+ "reflect"
+
+ "go.mongodb.org/mongo-driver/bson/bsoncodec"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+)
+
+var tRawValue = reflect.TypeOf(RawValue{})
+var tRaw = reflect.TypeOf(Raw(nil))
+
+var primitiveCodecs PrimitiveCodecs
+
+// PrimitiveCodecs is a namespace for all of the default bsoncodec.Codecs for the primitive types
+// defined in this package.
+type PrimitiveCodecs struct{}
+
+// RegisterPrimitiveCodecs will register the encode and decode methods attached to PrimitiveCodecs
+// with the provided RegistryBuilder. if rb is nil, a new empty RegistryBuilder will be created.
+func (pc PrimitiveCodecs) RegisterPrimitiveCodecs(rb *bsoncodec.RegistryBuilder) {
+ if rb == nil {
+ panic(errors.New("argument to RegisterPrimitiveCodecs must not be nil"))
+ }
+
+ rb.
+ RegisterTypeEncoder(tRawValue, bsoncodec.ValueEncoderFunc(pc.RawValueEncodeValue)).
+ RegisterTypeEncoder(tRaw, bsoncodec.ValueEncoderFunc(pc.RawEncodeValue)).
+ RegisterTypeDecoder(tRawValue, bsoncodec.ValueDecoderFunc(pc.RawValueDecodeValue)).
+ RegisterTypeDecoder(tRaw, bsoncodec.ValueDecoderFunc(pc.RawDecodeValue))
+}
+
+// RawValueEncodeValue is the ValueEncoderFunc for RawValue.
+func (PrimitiveCodecs) RawValueEncodeValue(ec bsoncodec.EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tRawValue {
+ return bsoncodec.ValueEncoderError{Name: "RawValueEncodeValue", Types: []reflect.Type{tRawValue}, Received: val}
+ }
+
+ rawvalue := val.Interface().(RawValue)
+
+ return bsonrw.Copier{}.CopyValueFromBytes(vw, rawvalue.Type, rawvalue.Value)
+}
+
+// RawValueDecodeValue is the ValueDecoderFunc for RawValue.
+func (PrimitiveCodecs) RawValueDecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tRawValue {
+ return bsoncodec.ValueDecoderError{Name: "RawValueDecodeValue", Types: []reflect.Type{tRawValue}, Received: val}
+ }
+
+ t, value, err := bsonrw.Copier{}.CopyValueToBytes(vr)
+ if err != nil {
+ return err
+ }
+
+ val.Set(reflect.ValueOf(RawValue{Type: t, Value: value}))
+ return nil
+}
+
+// RawEncodeValue is the ValueEncoderFunc for Reader.
+func (PrimitiveCodecs) RawEncodeValue(ec bsoncodec.EncodeContext, vw bsonrw.ValueWriter, val reflect.Value) error {
+ if !val.IsValid() || val.Type() != tRaw {
+ return bsoncodec.ValueEncoderError{Name: "RawEncodeValue", Types: []reflect.Type{tRaw}, Received: val}
+ }
+
+ rdr := val.Interface().(Raw)
+
+ return bsonrw.Copier{}.CopyDocumentFromBytes(vw, rdr)
+}
+
+// RawDecodeValue is the ValueDecoderFunc for Reader.
+func (PrimitiveCodecs) RawDecodeValue(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val reflect.Value) error {
+ if !val.CanSet() || val.Type() != tRaw {
+ return bsoncodec.ValueDecoderError{Name: "RawDecodeValue", Types: []reflect.Type{tRaw}, Received: val}
+ }
+
+ if val.IsNil() {
+ val.Set(reflect.MakeSlice(val.Type(), 0, 0))
+ }
+
+ val.SetLen(0)
+
+ rdr, err := bsonrw.Copier{}.AppendDocumentBytes(val.Interface().(Raw), vr)
+ val.Set(reflect.ValueOf(rdr))
+ return err
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/raw.go b/vendor/go.mongodb.org/mongo-driver/bson/raw.go
new file mode 100644
index 000000000..efd705daa
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/raw.go
@@ -0,0 +1,85 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "errors"
+ "io"
+
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+// ErrNilReader indicates that an operation was attempted on a nil bson.Reader.
+var ErrNilReader = errors.New("nil reader")
+
+// Raw is a wrapper around a byte slice. It will interpret the slice as a
+// BSON document. This type is a wrapper around a bsoncore.Document. Errors returned from the
+// methods on this type and associated types come from the bsoncore package.
+type Raw []byte
+
+// NewFromIOReader reads in a document from the given io.Reader and constructs a Raw from
+// it.
+func NewFromIOReader(r io.Reader) (Raw, error) {
+ doc, err := bsoncore.NewDocumentFromReader(r)
+ return Raw(doc), err
+}
+
+// Validate validates the document. This method only validates the first document in
+// the slice, to validate other documents, the slice must be resliced.
+func (r Raw) Validate() (err error) { return bsoncore.Document(r).Validate() }
+
+// Lookup search the document, potentially recursively, for the given key. If
+// there are multiple keys provided, this method will recurse down, as long as
+// the top and intermediate nodes are either documents or arrays.If an error
+// occurs or if the value doesn't exist, an empty RawValue is returned.
+func (r Raw) Lookup(key ...string) RawValue {
+ return convertFromCoreValue(bsoncore.Document(r).Lookup(key...))
+}
+
+// LookupErr searches the document and potentially subdocuments or arrays for the
+// provided key. Each key provided to this method represents a layer of depth.
+func (r Raw) LookupErr(key ...string) (RawValue, error) {
+ val, err := bsoncore.Document(r).LookupErr(key...)
+ return convertFromCoreValue(val), err
+}
+
+// Elements returns this document as a slice of elements. The returned slice will contain valid
+// elements. If the document is not valid, the elements up to the invalid point will be returned
+// along with an error.
+func (r Raw) Elements() ([]RawElement, error) {
+ elems, err := bsoncore.Document(r).Elements()
+ relems := make([]RawElement, 0, len(elems))
+ for _, elem := range elems {
+ relems = append(relems, RawElement(elem))
+ }
+ return relems, err
+}
+
+// Values returns this document as a slice of values. The returned slice will contain valid values.
+// If the document is not valid, the values up to the invalid point will be returned along with an
+// error.
+func (r Raw) Values() ([]RawValue, error) {
+ vals, err := bsoncore.Document(r).Values()
+ rvals := make([]RawValue, 0, len(vals))
+ for _, val := range vals {
+ rvals = append(rvals, convertFromCoreValue(val))
+ }
+ return rvals, err
+}
+
+// Index searches for and retrieves the element at the given index. This method will panic if
+// the document is invalid or if the index is out of bounds.
+func (r Raw) Index(index uint) RawElement { return RawElement(bsoncore.Document(r).Index(index)) }
+
+// IndexErr searches for and retrieves the element at the given index.
+func (r Raw) IndexErr(index uint) (RawElement, error) {
+ elem, err := bsoncore.Document(r).IndexErr(index)
+ return RawElement(elem), err
+}
+
+// String implements the fmt.Stringer interface.
+func (r Raw) String() string { return bsoncore.Document(r).String() }
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/raw_element.go b/vendor/go.mongodb.org/mongo-driver/bson/raw_element.go
new file mode 100644
index 000000000..006f503a3
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/raw_element.go
@@ -0,0 +1,51 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+// RawElement represents a BSON element in byte form. This type provides a simple way to
+// transform a slice of bytes into a BSON element and extract information from it.
+//
+// RawElement is a thin wrapper around a bsoncore.Element.
+type RawElement []byte
+
+// Key returns the key for this element. If the element is not valid, this method returns an empty
+// string. If knowing if the element is valid is important, use KeyErr.
+func (re RawElement) Key() string { return bsoncore.Element(re).Key() }
+
+// KeyErr returns the key for this element, returning an error if the element is not valid.
+func (re RawElement) KeyErr() (string, error) { return bsoncore.Element(re).KeyErr() }
+
+// Value returns the value of this element. If the element is not valid, this method returns an
+// empty Value. If knowing if the element is valid is important, use ValueErr.
+func (re RawElement) Value() RawValue { return convertFromCoreValue(bsoncore.Element(re).Value()) }
+
+// ValueErr returns the value for this element, returning an error if the element is not valid.
+func (re RawElement) ValueErr() (RawValue, error) {
+ val, err := bsoncore.Element(re).ValueErr()
+ return convertFromCoreValue(val), err
+}
+
+// Validate ensures re is a valid BSON element.
+func (re RawElement) Validate() error { return bsoncore.Element(re).Validate() }
+
+// String implements the fmt.Stringer interface. The output will be in extended JSON format.
+func (re RawElement) String() string {
+ doc := bsoncore.BuildDocument(nil, re)
+ j, err := MarshalExtJSON(Raw(doc), true, false)
+ if err != nil {
+ return "<malformed>"
+ }
+ return string(j)
+}
+
+// DebugString outputs a human readable version of RawElement. It will attempt to stringify the
+// valid components of the element even if the entire element is not valid.
+func (re RawElement) DebugString() string { return bsoncore.Element(re).DebugString() }
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/raw_value.go b/vendor/go.mongodb.org/mongo-driver/bson/raw_value.go
new file mode 100644
index 000000000..75297f30f
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/raw_value.go
@@ -0,0 +1,309 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "reflect"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsoncodec"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+ "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+)
+
+// ErrNilContext is returned when the provided DecodeContext is nil.
+var ErrNilContext = errors.New("DecodeContext cannot be nil")
+
+// ErrNilRegistry is returned when the provided registry is nil.
+var ErrNilRegistry = errors.New("Registry cannot be nil")
+
+// RawValue represents a BSON value in byte form. It can be used to hold unprocessed BSON or to
+// defer processing of BSON. Type is the BSON type of the value and Value are the raw bytes that
+// represent the element.
+//
+// This type wraps bsoncore.Value for most of it's functionality.
+type RawValue struct {
+ Type bsontype.Type
+ Value []byte
+
+ r *bsoncodec.Registry
+}
+
+// Unmarshal deserializes BSON into the provided val. If RawValue cannot be unmarshaled into val, an
+// error is returned. This method will use the registry used to create the RawValue, if the RawValue
+// was created from partial BSON processing, or it will use the default registry. Users wishing to
+// specify the registry to use should use UnmarshalWithRegistry.
+func (rv RawValue) Unmarshal(val interface{}) error {
+ reg := rv.r
+ if reg == nil {
+ reg = DefaultRegistry
+ }
+ return rv.UnmarshalWithRegistry(reg, val)
+}
+
+// Equal compares rv and rv2 and returns true if they are equal.
+func (rv RawValue) Equal(rv2 RawValue) bool {
+ if rv.Type != rv2.Type {
+ return false
+ }
+
+ if !bytes.Equal(rv.Value, rv2.Value) {
+ return false
+ }
+
+ return true
+}
+
+// UnmarshalWithRegistry performs the same unmarshalling as Unmarshal but uses the provided registry
+// instead of the one attached or the default registry.
+func (rv RawValue) UnmarshalWithRegistry(r *bsoncodec.Registry, val interface{}) error {
+ if r == nil {
+ return ErrNilRegistry
+ }
+
+ vr := bsonrw.NewBSONValueReader(rv.Type, rv.Value)
+ rval := reflect.ValueOf(val)
+ if rval.Kind() != reflect.Ptr {
+ return fmt.Errorf("argument to Unmarshal* must be a pointer to a type, but got %v", rval)
+ }
+ rval = rval.Elem()
+ dec, err := r.LookupDecoder(rval.Type())
+ if err != nil {
+ return err
+ }
+ return dec.DecodeValue(bsoncodec.DecodeContext{Registry: r}, vr, rval)
+}
+
+// UnmarshalWithContext performs the same unmarshalling as Unmarshal but uses the provided DecodeContext
+// instead of the one attached or the default registry.
+func (rv RawValue) UnmarshalWithContext(dc *bsoncodec.DecodeContext, val interface{}) error {
+ if dc == nil {
+ return ErrNilContext
+ }
+
+ vr := bsonrw.NewBSONValueReader(rv.Type, rv.Value)
+ rval := reflect.ValueOf(val)
+ if rval.Kind() != reflect.Ptr {
+ return fmt.Errorf("argument to Unmarshal* must be a pointer to a type, but got %v", rval)
+ }
+ rval = rval.Elem()
+ dec, err := dc.LookupDecoder(rval.Type())
+ if err != nil {
+ return err
+ }
+ return dec.DecodeValue(*dc, vr, rval)
+}
+
+func convertFromCoreValue(v bsoncore.Value) RawValue { return RawValue{Type: v.Type, Value: v.Data} }
+func convertToCoreValue(v RawValue) bsoncore.Value {
+ return bsoncore.Value{Type: v.Type, Data: v.Value}
+}
+
+// Validate ensures the value is a valid BSON value.
+func (rv RawValue) Validate() error { return convertToCoreValue(rv).Validate() }
+
+// IsNumber returns true if the type of v is a numeric BSON type.
+func (rv RawValue) IsNumber() bool { return convertToCoreValue(rv).IsNumber() }
+
+// String implements the fmt.String interface. This method will return values in extended JSON
+// format. If the value is not valid, this returns an empty string
+func (rv RawValue) String() string { return convertToCoreValue(rv).String() }
+
+// DebugString outputs a human readable version of Document. It will attempt to stringify the
+// valid components of the document even if the entire document is not valid.
+func (rv RawValue) DebugString() string { return convertToCoreValue(rv).DebugString() }
+
+// Double returns the float64 value for this element.
+// It panics if e's BSON type is not bsontype.Double.
+func (rv RawValue) Double() float64 { return convertToCoreValue(rv).Double() }
+
+// DoubleOK is the same as Double, but returns a boolean instead of panicking.
+func (rv RawValue) DoubleOK() (float64, bool) { return convertToCoreValue(rv).DoubleOK() }
+
+// StringValue returns the string value for this element.
+// It panics if e's BSON type is not bsontype.String.
+//
+// NOTE: This method is called StringValue to avoid a collision with the String method which
+// implements the fmt.Stringer interface.
+func (rv RawValue) StringValue() string { return convertToCoreValue(rv).StringValue() }
+
+// StringValueOK is the same as StringValue, but returns a boolean instead of
+// panicking.
+func (rv RawValue) StringValueOK() (string, bool) { return convertToCoreValue(rv).StringValueOK() }
+
+// Document returns the BSON document the Value represents as a Document. It panics if the
+// value is a BSON type other than document.
+func (rv RawValue) Document() Raw { return Raw(convertToCoreValue(rv).Document()) }
+
+// DocumentOK is the same as Document, except it returns a boolean
+// instead of panicking.
+func (rv RawValue) DocumentOK() (Raw, bool) {
+ doc, ok := convertToCoreValue(rv).DocumentOK()
+ return Raw(doc), ok
+}
+
+// Array returns the BSON array the Value represents as an Array. It panics if the
+// value is a BSON type other than array.
+func (rv RawValue) Array() Raw { return Raw(convertToCoreValue(rv).Array()) }
+
+// ArrayOK is the same as Array, except it returns a boolean instead
+// of panicking.
+func (rv RawValue) ArrayOK() (Raw, bool) {
+ doc, ok := convertToCoreValue(rv).ArrayOK()
+ return Raw(doc), ok
+}
+
+// Binary returns the BSON binary value the Value represents. It panics if the value is a BSON type
+// other than binary.
+func (rv RawValue) Binary() (subtype byte, data []byte) { return convertToCoreValue(rv).Binary() }
+
+// BinaryOK is the same as Binary, except it returns a boolean instead of
+// panicking.
+func (rv RawValue) BinaryOK() (subtype byte, data []byte, ok bool) {
+ return convertToCoreValue(rv).BinaryOK()
+}
+
+// ObjectID returns the BSON objectid value the Value represents. It panics if the value is a BSON
+// type other than objectid.
+func (rv RawValue) ObjectID() primitive.ObjectID { return convertToCoreValue(rv).ObjectID() }
+
+// ObjectIDOK is the same as ObjectID, except it returns a boolean instead of
+// panicking.
+func (rv RawValue) ObjectIDOK() (primitive.ObjectID, bool) {
+ return convertToCoreValue(rv).ObjectIDOK()
+}
+
+// Boolean returns the boolean value the Value represents. It panics if the
+// value is a BSON type other than boolean.
+func (rv RawValue) Boolean() bool { return convertToCoreValue(rv).Boolean() }
+
+// BooleanOK is the same as Boolean, except it returns a boolean instead of
+// panicking.
+func (rv RawValue) BooleanOK() (bool, bool) { return convertToCoreValue(rv).BooleanOK() }
+
+// DateTime returns the BSON datetime value the Value represents as a
+// unix timestamp. It panics if the value is a BSON type other than datetime.
+func (rv RawValue) DateTime() int64 { return convertToCoreValue(rv).DateTime() }
+
+// DateTimeOK is the same as DateTime, except it returns a boolean instead of
+// panicking.
+func (rv RawValue) DateTimeOK() (int64, bool) { return convertToCoreValue(rv).DateTimeOK() }
+
+// Time returns the BSON datetime value the Value represents. It panics if the value is a BSON
+// type other than datetime.
+func (rv RawValue) Time() time.Time { return convertToCoreValue(rv).Time() }
+
+// TimeOK is the same as Time, except it returns a boolean instead of
+// panicking.
+func (rv RawValue) TimeOK() (time.Time, bool) { return convertToCoreValue(rv).TimeOK() }
+
+// Regex returns the BSON regex value the Value represents. It panics if the value is a BSON
+// type other than regex.
+func (rv RawValue) Regex() (pattern, options string) { return convertToCoreValue(rv).Regex() }
+
+// RegexOK is the same as Regex, except it returns a boolean instead of
+// panicking.
+func (rv RawValue) RegexOK() (pattern, options string, ok bool) {
+ return convertToCoreValue(rv).RegexOK()
+}
+
+// DBPointer returns the BSON dbpointer value the Value represents. It panics if the value is a BSON
+// type other than DBPointer.
+func (rv RawValue) DBPointer() (string, primitive.ObjectID) {
+ return convertToCoreValue(rv).DBPointer()
+}
+
+// DBPointerOK is the same as DBPoitner, except that it returns a boolean
+// instead of panicking.
+func (rv RawValue) DBPointerOK() (string, primitive.ObjectID, bool) {
+ return convertToCoreValue(rv).DBPointerOK()
+}
+
+// JavaScript returns the BSON JavaScript code value the Value represents. It panics if the value is
+// a BSON type other than JavaScript code.
+func (rv RawValue) JavaScript() string { return convertToCoreValue(rv).JavaScript() }
+
+// JavaScriptOK is the same as Javascript, excepti that it returns a boolean
+// instead of panicking.
+func (rv RawValue) JavaScriptOK() (string, bool) { return convertToCoreValue(rv).JavaScriptOK() }
+
+// Symbol returns the BSON symbol value the Value represents. It panics if the value is a BSON
+// type other than symbol.
+func (rv RawValue) Symbol() string { return convertToCoreValue(rv).Symbol() }
+
+// SymbolOK is the same as Symbol, excepti that it returns a boolean
+// instead of panicking.
+func (rv RawValue) SymbolOK() (string, bool) { return convertToCoreValue(rv).SymbolOK() }
+
+// CodeWithScope returns the BSON JavaScript code with scope the Value represents.
+// It panics if the value is a BSON type other than JavaScript code with scope.
+func (rv RawValue) CodeWithScope() (string, Raw) {
+ code, scope := convertToCoreValue(rv).CodeWithScope()
+ return code, Raw(scope)
+}
+
+// CodeWithScopeOK is the same as CodeWithScope, except that it returns a boolean instead of
+// panicking.
+func (rv RawValue) CodeWithScopeOK() (string, Raw, bool) {
+ code, scope, ok := convertToCoreValue(rv).CodeWithScopeOK()
+ return code, Raw(scope), ok
+}
+
+// Int32 returns the int32 the Value represents. It panics if the value is a BSON type other than
+// int32.
+func (rv RawValue) Int32() int32 { return convertToCoreValue(rv).Int32() }
+
+// Int32OK is the same as Int32, except that it returns a boolean instead of
+// panicking.
+func (rv RawValue) Int32OK() (int32, bool) { return convertToCoreValue(rv).Int32OK() }
+
+// AsInt32 returns a BSON number as an int32. If the BSON type is not a numeric one, this method
+// will panic.
+func (rv RawValue) AsInt32() int32 { return convertToCoreValue(rv).AsInt32() }
+
+// AsInt32OK is the same as AsInt32, except that it returns a boolean instead of
+// panicking.
+func (rv RawValue) AsInt32OK() (int32, bool) { return convertToCoreValue(rv).AsInt32OK() }
+
+// Timestamp returns the BSON timestamp value the Value represents. It panics if the value is a
+// BSON type other than timestamp.
+func (rv RawValue) Timestamp() (t, i uint32) { return convertToCoreValue(rv).Timestamp() }
+
+// TimestampOK is the same as Timestamp, except that it returns a boolean
+// instead of panicking.
+func (rv RawValue) TimestampOK() (t, i uint32, ok bool) { return convertToCoreValue(rv).TimestampOK() }
+
+// Int64 returns the int64 the Value represents. It panics if the value is a BSON type other than
+// int64.
+func (rv RawValue) Int64() int64 { return convertToCoreValue(rv).Int64() }
+
+// Int64OK is the same as Int64, except that it returns a boolean instead of
+// panicking.
+func (rv RawValue) Int64OK() (int64, bool) { return convertToCoreValue(rv).Int64OK() }
+
+// AsInt64 returns a BSON number as an int64. If the BSON type is not a numeric one, this method
+// will panic.
+func (rv RawValue) AsInt64() int64 { return convertToCoreValue(rv).AsInt64() }
+
+// AsInt64OK is the same as AsInt64, except that it returns a boolean instead of
+// panicking.
+func (rv RawValue) AsInt64OK() (int64, bool) { return convertToCoreValue(rv).AsInt64OK() }
+
+// Decimal128 returns the decimal the Value represents. It panics if the value is a BSON type other than
+// decimal.
+func (rv RawValue) Decimal128() primitive.Decimal128 { return convertToCoreValue(rv).Decimal128() }
+
+// Decimal128OK is the same as Decimal128, except that it returns a boolean
+// instead of panicking.
+func (rv RawValue) Decimal128OK() (primitive.Decimal128, bool) {
+ return convertToCoreValue(rv).Decimal128OK()
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/registry.go b/vendor/go.mongodb.org/mongo-driver/bson/registry.go
new file mode 100644
index 000000000..16d7573e7
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/registry.go
@@ -0,0 +1,24 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import "go.mongodb.org/mongo-driver/bson/bsoncodec"
+
+// DefaultRegistry is the default bsoncodec.Registry. It contains the default codecs and the
+// primitive codecs.
+var DefaultRegistry = NewRegistryBuilder().Build()
+
+// NewRegistryBuilder creates a new RegistryBuilder configured with the default encoders and
+// decoders from the bsoncodec.DefaultValueEncoders and bsoncodec.DefaultValueDecoders types and the
+// PrimitiveCodecs type in this package.
+func NewRegistryBuilder() *bsoncodec.RegistryBuilder {
+ rb := bsoncodec.NewRegistryBuilder()
+ bsoncodec.DefaultValueEncoders{}.RegisterDefaultEncoders(rb)
+ bsoncodec.DefaultValueDecoders{}.RegisterDefaultDecoders(rb)
+ primitiveCodecs.RegisterPrimitiveCodecs(rb)
+ return rb
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/types.go b/vendor/go.mongodb.org/mongo-driver/bson/types.go
new file mode 100644
index 000000000..13a1c35cf
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/types.go
@@ -0,0 +1,36 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// These constants uniquely refer to each BSON type.
+const (
+ TypeDouble = bsontype.Double
+ TypeString = bsontype.String
+ TypeEmbeddedDocument = bsontype.EmbeddedDocument
+ TypeArray = bsontype.Array
+ TypeBinary = bsontype.Binary
+ TypeUndefined = bsontype.Undefined
+ TypeObjectID = bsontype.ObjectID
+ TypeBoolean = bsontype.Boolean
+ TypeDateTime = bsontype.DateTime
+ TypeNull = bsontype.Null
+ TypeRegex = bsontype.Regex
+ TypeDBPointer = bsontype.DBPointer
+ TypeJavaScript = bsontype.JavaScript
+ TypeSymbol = bsontype.Symbol
+ TypeCodeWithScope = bsontype.CodeWithScope
+ TypeInt32 = bsontype.Int32
+ TypeTimestamp = bsontype.Timestamp
+ TypeInt64 = bsontype.Int64
+ TypeDecimal128 = bsontype.Decimal128
+ TypeMinKey = bsontype.MinKey
+ TypeMaxKey = bsontype.MaxKey
+)
diff --git a/vendor/go.mongodb.org/mongo-driver/bson/unmarshal.go b/vendor/go.mongodb.org/mongo-driver/bson/unmarshal.go
new file mode 100644
index 000000000..f936ba183
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/bson/unmarshal.go
@@ -0,0 +1,101 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bson
+
+import (
+ "bytes"
+
+ "go.mongodb.org/mongo-driver/bson/bsoncodec"
+ "go.mongodb.org/mongo-driver/bson/bsonrw"
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// Unmarshaler is an interface implemented by types that can unmarshal a BSON
+// document representation of themselves. The BSON bytes can be assumed to be
+// valid. UnmarshalBSON must copy the BSON bytes if it wishes to retain the data
+// after returning.
+type Unmarshaler interface {
+ UnmarshalBSON([]byte) error
+}
+
+// ValueUnmarshaler is an interface implemented by types that can unmarshal a
+// BSON value representation of themselves. The BSON bytes and type can be
+// assumed to be valid. UnmarshalBSONValue must copy the BSON value bytes if it
+// wishes to retain the data after returning.
+type ValueUnmarshaler interface {
+ UnmarshalBSONValue(bsontype.Type, []byte) error
+}
+
+// Unmarshal parses the BSON-encoded data and stores the result in the value
+// pointed to by val. If val is nil or not a pointer, Unmarshal returns
+// InvalidUnmarshalError.
+func Unmarshal(data []byte, val interface{}) error {
+ return UnmarshalWithRegistry(DefaultRegistry, data, val)
+}
+
+// UnmarshalWithRegistry parses the BSON-encoded data using Registry r and
+// stores the result in the value pointed to by val. If val is nil or not
+// a pointer, UnmarshalWithRegistry returns InvalidUnmarshalError.
+func UnmarshalWithRegistry(r *bsoncodec.Registry, data []byte, val interface{}) error {
+ vr := bsonrw.NewBSONDocumentReader(data)
+ return unmarshalFromReader(bsoncodec.DecodeContext{Registry: r}, vr, val)
+}
+
+// UnmarshalWithContext parses the BSON-encoded data using DecodeContext dc and
+// stores the result in the value pointed to by val. If val is nil or not
+// a pointer, UnmarshalWithRegistry returns InvalidUnmarshalError.
+func UnmarshalWithContext(dc bsoncodec.DecodeContext, data []byte, val interface{}) error {
+ vr := bsonrw.NewBSONDocumentReader(data)
+ return unmarshalFromReader(dc, vr, val)
+}
+
+// UnmarshalExtJSON parses the extended JSON-encoded data and stores the result
+// in the value pointed to by val. If val is nil or not a pointer, Unmarshal
+// returns InvalidUnmarshalError.
+func UnmarshalExtJSON(data []byte, canonical bool, val interface{}) error {
+ return UnmarshalExtJSONWithRegistry(DefaultRegistry, data, canonical, val)
+}
+
+// UnmarshalExtJSONWithRegistry parses the extended JSON-encoded data using
+// Registry r and stores the result in the value pointed to by val. If val is
+// nil or not a pointer, UnmarshalWithRegistry returns InvalidUnmarshalError.
+func UnmarshalExtJSONWithRegistry(r *bsoncodec.Registry, data []byte, canonical bool, val interface{}) error {
+ ejvr, err := bsonrw.NewExtJSONValueReader(bytes.NewReader(data), canonical)
+ if err != nil {
+ return err
+ }
+
+ return unmarshalFromReader(bsoncodec.DecodeContext{Registry: r}, ejvr, val)
+}
+
+// UnmarshalExtJSONWithContext parses the extended JSON-encoded data using
+// DecodeContext dc and stores the result in the value pointed to by val. If val is
+// nil or not a pointer, UnmarshalWithRegistry returns InvalidUnmarshalError.
+func UnmarshalExtJSONWithContext(dc bsoncodec.DecodeContext, data []byte, canonical bool, val interface{}) error {
+ ejvr, err := bsonrw.NewExtJSONValueReader(bytes.NewReader(data), canonical)
+ if err != nil {
+ return err
+ }
+
+ return unmarshalFromReader(dc, ejvr, val)
+}
+
+func unmarshalFromReader(dc bsoncodec.DecodeContext, vr bsonrw.ValueReader, val interface{}) error {
+ dec := decPool.Get().(*Decoder)
+ defer decPool.Put(dec)
+
+ err := dec.Reset(vr)
+ if err != nil {
+ return err
+ }
+ err = dec.SetContext(dc)
+ if err != nil {
+ return err
+ }
+
+ return dec.Decode(val)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/array.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/array.go
new file mode 100644
index 000000000..8ea60ba3c
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/array.go
@@ -0,0 +1,164 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "strconv"
+)
+
+// NewArrayLengthError creates and returns an error for when the length of an array exceeds the
+// bytes available.
+func NewArrayLengthError(length, rem int) error {
+ return lengthError("array", length, rem)
+}
+
+// Array is a raw bytes representation of a BSON array.
+type Array []byte
+
+// NewArrayFromReader reads an array from r. This function will only validate the length is
+// correct and that the array ends with a null byte.
+func NewArrayFromReader(r io.Reader) (Array, error) {
+ return newBufferFromReader(r)
+}
+
+// Index searches for and retrieves the value at the given index. This method will panic if
+// the array is invalid or if the index is out of bounds.
+func (a Array) Index(index uint) Value {
+ value, err := a.IndexErr(index)
+ if err != nil {
+ panic(err)
+ }
+ return value
+}
+
+// IndexErr searches for and retrieves the value at the given index.
+func (a Array) IndexErr(index uint) (Value, error) {
+ elem, err := indexErr(a, index)
+ if err != nil {
+ return Value{}, err
+ }
+ return elem.Value(), err
+}
+
+// DebugString outputs a human readable version of Array. It will attempt to stringify the
+// valid components of the array even if the entire array is not valid.
+func (a Array) DebugString() string {
+ if len(a) < 5 {
+ return "<malformed>"
+ }
+ var buf bytes.Buffer
+ buf.WriteString("Array")
+ length, rem, _ := ReadLength(a) // We know we have enough bytes to read the length
+ buf.WriteByte('(')
+ buf.WriteString(strconv.Itoa(int(length)))
+ length -= 4
+ buf.WriteString(")[")
+ var elem Element
+ var ok bool
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ buf.WriteString(fmt.Sprintf("<malformed (%d)>", length))
+ break
+ }
+ fmt.Fprintf(&buf, "%s", elem.Value().DebugString())
+ if length != 1 {
+ buf.WriteByte(',')
+ }
+ }
+ buf.WriteByte(']')
+
+ return buf.String()
+}
+
+// String outputs an ExtendedJSON version of Array. If the Array is not valid, this method
+// returns an empty string.
+func (a Array) String() string {
+ if len(a) < 5 {
+ return ""
+ }
+ var buf bytes.Buffer
+ buf.WriteByte('[')
+
+ length, rem, _ := ReadLength(a) // We know we have enough bytes to read the length
+
+ length -= 4
+
+ var elem Element
+ var ok bool
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return ""
+ }
+ fmt.Fprintf(&buf, "%s", elem.Value().String())
+ if length > 1 {
+ buf.WriteByte(',')
+ }
+ }
+ if length != 1 { // Missing final null byte or inaccurate length
+ return ""
+ }
+
+ buf.WriteByte(']')
+ return buf.String()
+}
+
+// Values returns this array as a slice of values. The returned slice will contain valid values.
+// If the array is not valid, the values up to the invalid point will be returned along with an
+// error.
+func (a Array) Values() ([]Value, error) {
+ return values(a)
+}
+
+// Validate validates the array and ensures the elements contained within are valid.
+func (a Array) Validate() error {
+ length, rem, ok := ReadLength(a)
+ if !ok {
+ return NewInsufficientBytesError(a, rem)
+ }
+ if int(length) > len(a) {
+ return NewArrayLengthError(int(length), len(a))
+ }
+ if a[length-1] != 0x00 {
+ return ErrMissingNull
+ }
+
+ length -= 4
+ var elem Element
+
+ var keyNum int64
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return NewInsufficientBytesError(a, rem)
+ }
+
+ // validate element
+ err := elem.Validate()
+ if err != nil {
+ return err
+ }
+
+ // validate keys increase numerically
+ if fmt.Sprint(keyNum) != elem.Key() {
+ return fmt.Errorf("array key %q is out of order or invalid", elem.Key())
+ }
+ keyNum++
+ }
+
+ if len(rem) < 1 || rem[0] != 0x00 {
+ return ErrMissingNull
+ }
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_arraybuilder.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_arraybuilder.go
new file mode 100644
index 000000000..7e6937d89
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_arraybuilder.go
@@ -0,0 +1,201 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "strconv"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// ArrayBuilder builds a bson array
+type ArrayBuilder struct {
+ arr []byte
+ indexes []int32
+ keys []int
+}
+
+// NewArrayBuilder creates a new ArrayBuilder
+func NewArrayBuilder() *ArrayBuilder {
+ return (&ArrayBuilder{}).startArray()
+}
+
+// startArray reserves the array's length and sets the index to where the length begins
+func (a *ArrayBuilder) startArray() *ArrayBuilder {
+ var index int32
+ index, a.arr = AppendArrayStart(a.arr)
+ a.indexes = append(a.indexes, index)
+ a.keys = append(a.keys, 0)
+ return a
+}
+
+// Build updates the length of the array and index to the beginning of the documents length
+// bytes, then returns the array (bson bytes)
+func (a *ArrayBuilder) Build() Array {
+ lastIndex := len(a.indexes) - 1
+ lastKey := len(a.keys) - 1
+ a.arr, _ = AppendArrayEnd(a.arr, a.indexes[lastIndex])
+ a.indexes = a.indexes[:lastIndex]
+ a.keys = a.keys[:lastKey]
+ return a.arr
+}
+
+// incrementKey() increments the value keys and returns the key to be used to a.appendArray* functions
+func (a *ArrayBuilder) incrementKey() string {
+ idx := len(a.keys) - 1
+ key := strconv.Itoa(a.keys[idx])
+ a.keys[idx]++
+ return key
+}
+
+// AppendInt32 will append i32 to ArrayBuilder.arr
+func (a *ArrayBuilder) AppendInt32(i32 int32) *ArrayBuilder {
+ a.arr = AppendInt32Element(a.arr, a.incrementKey(), i32)
+ return a
+}
+
+// AppendDocument will append doc to ArrayBuilder.arr
+func (a *ArrayBuilder) AppendDocument(doc []byte) *ArrayBuilder {
+ a.arr = AppendDocumentElement(a.arr, a.incrementKey(), doc)
+ return a
+}
+
+// AppendArray will append arr to ArrayBuilder.arr
+func (a *ArrayBuilder) AppendArray(arr []byte) *ArrayBuilder {
+ a.arr = AppendArrayElement(a.arr, a.incrementKey(), arr)
+ return a
+}
+
+// AppendDouble will append f to ArrayBuilder.doc
+func (a *ArrayBuilder) AppendDouble(f float64) *ArrayBuilder {
+ a.arr = AppendDoubleElement(a.arr, a.incrementKey(), f)
+ return a
+}
+
+// AppendString will append str to ArrayBuilder.doc
+func (a *ArrayBuilder) AppendString(str string) *ArrayBuilder {
+ a.arr = AppendStringElement(a.arr, a.incrementKey(), str)
+ return a
+}
+
+// AppendObjectID will append oid to ArrayBuilder.doc
+func (a *ArrayBuilder) AppendObjectID(oid primitive.ObjectID) *ArrayBuilder {
+ a.arr = AppendObjectIDElement(a.arr, a.incrementKey(), oid)
+ return a
+}
+
+// AppendBinary will append a BSON binary element using subtype, and
+// b to a.arr
+func (a *ArrayBuilder) AppendBinary(subtype byte, b []byte) *ArrayBuilder {
+ a.arr = AppendBinaryElement(a.arr, a.incrementKey(), subtype, b)
+ return a
+}
+
+// AppendUndefined will append a BSON undefined element using key to a.arr
+func (a *ArrayBuilder) AppendUndefined() *ArrayBuilder {
+ a.arr = AppendUndefinedElement(a.arr, a.incrementKey())
+ return a
+}
+
+// AppendBoolean will append a boolean element using b to a.arr
+func (a *ArrayBuilder) AppendBoolean(b bool) *ArrayBuilder {
+ a.arr = AppendBooleanElement(a.arr, a.incrementKey(), b)
+ return a
+}
+
+// AppendDateTime will append datetime element dt to a.arr
+func (a *ArrayBuilder) AppendDateTime(dt int64) *ArrayBuilder {
+ a.arr = AppendDateTimeElement(a.arr, a.incrementKey(), dt)
+ return a
+}
+
+// AppendNull will append a null element to a.arr
+func (a *ArrayBuilder) AppendNull() *ArrayBuilder {
+ a.arr = AppendNullElement(a.arr, a.incrementKey())
+ return a
+}
+
+// AppendRegex will append pattern and options to a.arr
+func (a *ArrayBuilder) AppendRegex(pattern, options string) *ArrayBuilder {
+ a.arr = AppendRegexElement(a.arr, a.incrementKey(), pattern, options)
+ return a
+}
+
+// AppendDBPointer will append ns and oid to a.arr
+func (a *ArrayBuilder) AppendDBPointer(ns string, oid primitive.ObjectID) *ArrayBuilder {
+ a.arr = AppendDBPointerElement(a.arr, a.incrementKey(), ns, oid)
+ return a
+}
+
+// AppendJavaScript will append js to a.arr
+func (a *ArrayBuilder) AppendJavaScript(js string) *ArrayBuilder {
+ a.arr = AppendJavaScriptElement(a.arr, a.incrementKey(), js)
+ return a
+}
+
+// AppendSymbol will append symbol to a.arr
+func (a *ArrayBuilder) AppendSymbol(symbol string) *ArrayBuilder {
+ a.arr = AppendSymbolElement(a.arr, a.incrementKey(), symbol)
+ return a
+}
+
+// AppendCodeWithScope will append code and scope to a.arr
+func (a *ArrayBuilder) AppendCodeWithScope(code string, scope Document) *ArrayBuilder {
+ a.arr = AppendCodeWithScopeElement(a.arr, a.incrementKey(), code, scope)
+ return a
+}
+
+// AppendTimestamp will append t and i to a.arr
+func (a *ArrayBuilder) AppendTimestamp(t, i uint32) *ArrayBuilder {
+ a.arr = AppendTimestampElement(a.arr, a.incrementKey(), t, i)
+ return a
+}
+
+// AppendInt64 will append i64 to a.arr
+func (a *ArrayBuilder) AppendInt64(i64 int64) *ArrayBuilder {
+ a.arr = AppendInt64Element(a.arr, a.incrementKey(), i64)
+ return a
+}
+
+// AppendDecimal128 will append d128 to a.arr
+func (a *ArrayBuilder) AppendDecimal128(d128 primitive.Decimal128) *ArrayBuilder {
+ a.arr = AppendDecimal128Element(a.arr, a.incrementKey(), d128)
+ return a
+}
+
+// AppendMaxKey will append a max key element to a.arr
+func (a *ArrayBuilder) AppendMaxKey() *ArrayBuilder {
+ a.arr = AppendMaxKeyElement(a.arr, a.incrementKey())
+ return a
+}
+
+// AppendMinKey will append a min key element to a.arr
+func (a *ArrayBuilder) AppendMinKey() *ArrayBuilder {
+ a.arr = AppendMinKeyElement(a.arr, a.incrementKey())
+ return a
+}
+
+// AppendValue appends a BSON value to the array.
+func (a *ArrayBuilder) AppendValue(val Value) *ArrayBuilder {
+ a.arr = AppendValueElement(a.arr, a.incrementKey(), val)
+ return a
+}
+
+// StartArray starts building an inline Array. After this document is completed,
+// the user must call a.FinishArray
+func (a *ArrayBuilder) StartArray() *ArrayBuilder {
+ a.arr = AppendHeader(a.arr, bsontype.Array, a.incrementKey())
+ a.startArray()
+ return a
+}
+
+// FinishArray builds the most recent array created
+func (a *ArrayBuilder) FinishArray() *ArrayBuilder {
+ a.arr = a.Build()
+ return a
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_documentbuilder.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_documentbuilder.go
new file mode 100644
index 000000000..52162f8aa
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bson_documentbuilder.go
@@ -0,0 +1,189 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// DocumentBuilder builds a bson document
+type DocumentBuilder struct {
+ doc []byte
+ indexes []int32
+}
+
+// startDocument reserves the document's length and set the index to where the length begins
+func (db *DocumentBuilder) startDocument() *DocumentBuilder {
+ var index int32
+ index, db.doc = AppendDocumentStart(db.doc)
+ db.indexes = append(db.indexes, index)
+ return db
+}
+
+// NewDocumentBuilder creates a new DocumentBuilder
+func NewDocumentBuilder() *DocumentBuilder {
+ return (&DocumentBuilder{}).startDocument()
+}
+
+// Build updates the length of the document and index to the beginning of the documents length
+// bytes, then returns the document (bson bytes)
+func (db *DocumentBuilder) Build() Document {
+ last := len(db.indexes) - 1
+ db.doc, _ = AppendDocumentEnd(db.doc, db.indexes[last])
+ db.indexes = db.indexes[:last]
+ return db.doc
+}
+
+// AppendInt32 will append an int32 element using key and i32 to DocumentBuilder.doc
+func (db *DocumentBuilder) AppendInt32(key string, i32 int32) *DocumentBuilder {
+ db.doc = AppendInt32Element(db.doc, key, i32)
+ return db
+}
+
+// AppendDocument will append a bson embedded document element using key
+// and doc to DocumentBuilder.doc
+func (db *DocumentBuilder) AppendDocument(key string, doc []byte) *DocumentBuilder {
+ db.doc = AppendDocumentElement(db.doc, key, doc)
+ return db
+}
+
+// AppendArray will append a bson array using key and arr to DocumentBuilder.doc
+func (db *DocumentBuilder) AppendArray(key string, arr []byte) *DocumentBuilder {
+ db.doc = AppendHeader(db.doc, bsontype.Array, key)
+ db.doc = AppendArray(db.doc, arr)
+ return db
+}
+
+// AppendDouble will append a double element using key and f to DocumentBuilder.doc
+func (db *DocumentBuilder) AppendDouble(key string, f float64) *DocumentBuilder {
+ db.doc = AppendDoubleElement(db.doc, key, f)
+ return db
+}
+
+// AppendString will append str to DocumentBuilder.doc with the given key
+func (db *DocumentBuilder) AppendString(key string, str string) *DocumentBuilder {
+ db.doc = AppendStringElement(db.doc, key, str)
+ return db
+}
+
+// AppendObjectID will append oid to DocumentBuilder.doc with the given key
+func (db *DocumentBuilder) AppendObjectID(key string, oid primitive.ObjectID) *DocumentBuilder {
+ db.doc = AppendObjectIDElement(db.doc, key, oid)
+ return db
+}
+
+// AppendBinary will append a BSON binary element using key, subtype, and
+// b to db.doc
+func (db *DocumentBuilder) AppendBinary(key string, subtype byte, b []byte) *DocumentBuilder {
+ db.doc = AppendBinaryElement(db.doc, key, subtype, b)
+ return db
+}
+
+// AppendUndefined will append a BSON undefined element using key to db.doc
+func (db *DocumentBuilder) AppendUndefined(key string) *DocumentBuilder {
+ db.doc = AppendUndefinedElement(db.doc, key)
+ return db
+}
+
+// AppendBoolean will append a boolean element using key and b to db.doc
+func (db *DocumentBuilder) AppendBoolean(key string, b bool) *DocumentBuilder {
+ db.doc = AppendBooleanElement(db.doc, key, b)
+ return db
+}
+
+// AppendDateTime will append a datetime element using key and dt to db.doc
+func (db *DocumentBuilder) AppendDateTime(key string, dt int64) *DocumentBuilder {
+ db.doc = AppendDateTimeElement(db.doc, key, dt)
+ return db
+}
+
+// AppendNull will append a null element using key to db.doc
+func (db *DocumentBuilder) AppendNull(key string) *DocumentBuilder {
+ db.doc = AppendNullElement(db.doc, key)
+ return db
+}
+
+// AppendRegex will append pattern and options using key to db.doc
+func (db *DocumentBuilder) AppendRegex(key, pattern, options string) *DocumentBuilder {
+ db.doc = AppendRegexElement(db.doc, key, pattern, options)
+ return db
+}
+
+// AppendDBPointer will append ns and oid to using key to db.doc
+func (db *DocumentBuilder) AppendDBPointer(key string, ns string, oid primitive.ObjectID) *DocumentBuilder {
+ db.doc = AppendDBPointerElement(db.doc, key, ns, oid)
+ return db
+}
+
+// AppendJavaScript will append js using the provided key to db.doc
+func (db *DocumentBuilder) AppendJavaScript(key, js string) *DocumentBuilder {
+ db.doc = AppendJavaScriptElement(db.doc, key, js)
+ return db
+}
+
+// AppendSymbol will append a BSON symbol element using key and symbol db.doc
+func (db *DocumentBuilder) AppendSymbol(key, symbol string) *DocumentBuilder {
+ db.doc = AppendSymbolElement(db.doc, key, symbol)
+ return db
+}
+
+// AppendCodeWithScope will append code and scope using key to db.doc
+func (db *DocumentBuilder) AppendCodeWithScope(key string, code string, scope Document) *DocumentBuilder {
+ db.doc = AppendCodeWithScopeElement(db.doc, key, code, scope)
+ return db
+}
+
+// AppendTimestamp will append t and i to db.doc using provided key
+func (db *DocumentBuilder) AppendTimestamp(key string, t, i uint32) *DocumentBuilder {
+ db.doc = AppendTimestampElement(db.doc, key, t, i)
+ return db
+}
+
+// AppendInt64 will append i64 to dst using key to db.doc
+func (db *DocumentBuilder) AppendInt64(key string, i64 int64) *DocumentBuilder {
+ db.doc = AppendInt64Element(db.doc, key, i64)
+ return db
+}
+
+// AppendDecimal128 will append d128 to db.doc using provided key
+func (db *DocumentBuilder) AppendDecimal128(key string, d128 primitive.Decimal128) *DocumentBuilder {
+ db.doc = AppendDecimal128Element(db.doc, key, d128)
+ return db
+}
+
+// AppendMaxKey will append a max key element using key to db.doc
+func (db *DocumentBuilder) AppendMaxKey(key string) *DocumentBuilder {
+ db.doc = AppendMaxKeyElement(db.doc, key)
+ return db
+}
+
+// AppendMinKey will append a min key element using key to db.doc
+func (db *DocumentBuilder) AppendMinKey(key string) *DocumentBuilder {
+ db.doc = AppendMinKeyElement(db.doc, key)
+ return db
+}
+
+// AppendValue will append a BSON element with the provided key and value to the document.
+func (db *DocumentBuilder) AppendValue(key string, val Value) *DocumentBuilder {
+ db.doc = AppendValueElement(db.doc, key, val)
+ return db
+}
+
+// StartDocument starts building an inline document element with the provided key
+// After this document is completed, the user must call finishDocument
+func (db *DocumentBuilder) StartDocument(key string) *DocumentBuilder {
+ db.doc = AppendHeader(db.doc, bsontype.EmbeddedDocument, key)
+ db = db.startDocument()
+ return db
+}
+
+// FinishDocument builds the most recent document created
+func (db *DocumentBuilder) FinishDocument() *DocumentBuilder {
+ db.doc = db.Build()
+ return db
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bsoncore.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bsoncore.go
new file mode 100644
index 000000000..17aad6d71
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/bsoncore.go
@@ -0,0 +1,862 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+// Package bsoncore contains functions that can be used to encode and decode BSON
+// elements and values to or from a slice of bytes. These functions are aimed at
+// allowing low level manipulation of BSON and can be used to build a higher
+// level BSON library.
+//
+// The Read* functions within this package return the values of the element and
+// a boolean indicating if the values are valid. A boolean was used instead of
+// an error because any error that would be returned would be the same: not
+// enough bytes. This library attempts to do no validation, it will only return
+// false if there are not enough bytes for an item to be read. For example, the
+// ReadDocument function checks the length, if that length is larger than the
+// number of bytes available, it will return false, if there are enough bytes, it
+// will return those bytes and true. It is the consumers responsibility to
+// validate those bytes.
+//
+// The Append* functions within this package will append the type value to the
+// given dst slice. If the slice has enough capacity, it will not grow the
+// slice. The Append*Element functions within this package operate in the same
+// way, but additionally append the BSON type and the key before the value.
+package bsoncore // import "go.mongodb.org/mongo-driver/x/bsonx/bsoncore"
+
+import (
+ "bytes"
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+ "time"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+const (
+ // EmptyDocumentLength is the length of a document that has been started/ended but has no elements.
+ EmptyDocumentLength = 5
+ // nullTerminator is a string version of the 0 byte that is appended at the end of cstrings.
+ nullTerminator = string(byte(0))
+ invalidKeyPanicMsg = "BSON element keys cannot contain null bytes"
+ invalidRegexPanicMsg = "BSON regex values cannot contain null bytes"
+)
+
+// AppendType will append t to dst and return the extended buffer.
+func AppendType(dst []byte, t bsontype.Type) []byte { return append(dst, byte(t)) }
+
+// AppendKey will append key to dst and return the extended buffer.
+func AppendKey(dst []byte, key string) []byte { return append(dst, key+nullTerminator...) }
+
+// AppendHeader will append Type t and key to dst and return the extended
+// buffer.
+func AppendHeader(dst []byte, t bsontype.Type, key string) []byte {
+ if !isValidCString(key) {
+ panic(invalidKeyPanicMsg)
+ }
+
+ dst = AppendType(dst, t)
+ dst = append(dst, key...)
+ return append(dst, 0x00)
+ // return append(AppendType(dst, t), key+string(0x00)...)
+}
+
+// TODO(skriptble): All of the Read* functions should return src resliced to start just after what was read.
+
+// ReadType will return the first byte of the provided []byte as a type. If
+// there is no available byte, false is returned.
+func ReadType(src []byte) (bsontype.Type, []byte, bool) {
+ if len(src) < 1 {
+ return 0, src, false
+ }
+ return bsontype.Type(src[0]), src[1:], true
+}
+
+// ReadKey will read a key from src. The 0x00 byte will not be present
+// in the returned string. If there are not enough bytes available, false is
+// returned.
+func ReadKey(src []byte) (string, []byte, bool) { return readcstring(src) }
+
+// ReadKeyBytes will read a key from src as bytes. The 0x00 byte will
+// not be present in the returned string. If there are not enough bytes
+// available, false is returned.
+func ReadKeyBytes(src []byte) ([]byte, []byte, bool) { return readcstringbytes(src) }
+
+// ReadHeader will read a type byte and a key from src. If both of these
+// values cannot be read, false is returned.
+func ReadHeader(src []byte) (t bsontype.Type, key string, rem []byte, ok bool) {
+ t, rem, ok = ReadType(src)
+ if !ok {
+ return 0, "", src, false
+ }
+ key, rem, ok = ReadKey(rem)
+ if !ok {
+ return 0, "", src, false
+ }
+
+ return t, key, rem, true
+}
+
+// ReadHeaderBytes will read a type and a key from src and the remainder of the bytes
+// are returned as rem. If either the type or key cannot be red, ok will be false.
+func ReadHeaderBytes(src []byte) (header []byte, rem []byte, ok bool) {
+ if len(src) < 1 {
+ return nil, src, false
+ }
+ idx := bytes.IndexByte(src[1:], 0x00)
+ if idx == -1 {
+ return nil, src, false
+ }
+ return src[:idx], src[idx+1:], true
+}
+
+// ReadElement reads the next full element from src. It returns the element, the remaining bytes in
+// the slice, and a boolean indicating if the read was successful.
+func ReadElement(src []byte) (Element, []byte, bool) {
+ if len(src) < 1 {
+ return nil, src, false
+ }
+ t := bsontype.Type(src[0])
+ idx := bytes.IndexByte(src[1:], 0x00)
+ if idx == -1 {
+ return nil, src, false
+ }
+ length, ok := valueLength(src[idx+2:], t) // We add 2 here because we called IndexByte with src[1:]
+ if !ok {
+ return nil, src, false
+ }
+ elemLength := 1 + idx + 1 + int(length)
+ if elemLength > len(src) {
+ return nil, src, false
+ }
+ if elemLength < 0 {
+ return nil, src, false
+ }
+ return src[:elemLength], src[elemLength:], true
+}
+
+// AppendValueElement appends value to dst as an element using key as the element's key.
+func AppendValueElement(dst []byte, key string, value Value) []byte {
+ dst = AppendHeader(dst, value.Type, key)
+ dst = append(dst, value.Data...)
+ return dst
+}
+
+// ReadValue reads the next value as the provided types and returns a Value, the remaining bytes,
+// and a boolean indicating if the read was successful.
+func ReadValue(src []byte, t bsontype.Type) (Value, []byte, bool) {
+ data, rem, ok := readValue(src, t)
+ if !ok {
+ return Value{}, src, false
+ }
+ return Value{Type: t, Data: data}, rem, true
+}
+
+// AppendDouble will append f to dst and return the extended buffer.
+func AppendDouble(dst []byte, f float64) []byte {
+ return appendu64(dst, math.Float64bits(f))
+}
+
+// AppendDoubleElement will append a BSON double element using key and f to dst
+// and return the extended buffer.
+func AppendDoubleElement(dst []byte, key string, f float64) []byte {
+ return AppendDouble(AppendHeader(dst, bsontype.Double, key), f)
+}
+
+// ReadDouble will read a float64 from src. If there are not enough bytes it
+// will return false.
+func ReadDouble(src []byte) (float64, []byte, bool) {
+ bits, src, ok := readu64(src)
+ if !ok {
+ return 0, src, false
+ }
+ return math.Float64frombits(bits), src, true
+}
+
+// AppendString will append s to dst and return the extended buffer.
+func AppendString(dst []byte, s string) []byte {
+ return appendstring(dst, s)
+}
+
+// AppendStringElement will append a BSON string element using key and val to dst
+// and return the extended buffer.
+func AppendStringElement(dst []byte, key, val string) []byte {
+ return AppendString(AppendHeader(dst, bsontype.String, key), val)
+}
+
+// ReadString will read a string from src. If there are not enough bytes it
+// will return false.
+func ReadString(src []byte) (string, []byte, bool) {
+ return readstring(src)
+}
+
+// AppendDocumentStart reserves a document's length and returns the index where the length begins.
+// This index can later be used to write the length of the document.
+func AppendDocumentStart(dst []byte) (index int32, b []byte) {
+ // TODO(skriptble): We really need AppendDocumentStart and AppendDocumentEnd. AppendDocumentStart would handle calling
+ // TODO ReserveLength and providing the index of the start of the document. AppendDocumentEnd would handle taking that
+ // TODO start index, adding the null byte, calculating the length, and filling in the length at the start of the
+ // TODO document.
+ return ReserveLength(dst)
+}
+
+// AppendDocumentStartInline functions the same as AppendDocumentStart but takes a pointer to the
+// index int32 which allows this function to be used inline.
+func AppendDocumentStartInline(dst []byte, index *int32) []byte {
+ idx, doc := AppendDocumentStart(dst)
+ *index = idx
+ return doc
+}
+
+// AppendDocumentElementStart writes a document element header and then reserves the length bytes.
+func AppendDocumentElementStart(dst []byte, key string) (index int32, b []byte) {
+ return AppendDocumentStart(AppendHeader(dst, bsontype.EmbeddedDocument, key))
+}
+
+// AppendDocumentEnd writes the null byte for a document and updates the length of the document.
+// The index should be the beginning of the document's length bytes.
+func AppendDocumentEnd(dst []byte, index int32) ([]byte, error) {
+ if int(index) > len(dst)-4 {
+ return dst, fmt.Errorf("not enough bytes available after index to write length")
+ }
+ dst = append(dst, 0x00)
+ dst = UpdateLength(dst, index, int32(len(dst[index:])))
+ return dst, nil
+}
+
+// AppendDocument will append doc to dst and return the extended buffer.
+func AppendDocument(dst []byte, doc []byte) []byte { return append(dst, doc...) }
+
+// AppendDocumentElement will append a BSON embedded document element using key
+// and doc to dst and return the extended buffer.
+func AppendDocumentElement(dst []byte, key string, doc []byte) []byte {
+ return AppendDocument(AppendHeader(dst, bsontype.EmbeddedDocument, key), doc)
+}
+
+// BuildDocument will create a document with the given slice of elements and will append
+// it to dst and return the extended buffer.
+func BuildDocument(dst []byte, elems ...[]byte) []byte {
+ idx, dst := ReserveLength(dst)
+ for _, elem := range elems {
+ dst = append(dst, elem...)
+ }
+ dst = append(dst, 0x00)
+ dst = UpdateLength(dst, idx, int32(len(dst[idx:])))
+ return dst
+}
+
+// BuildDocumentValue creates an Embedded Document value from the given elements.
+func BuildDocumentValue(elems ...[]byte) Value {
+ return Value{Type: bsontype.EmbeddedDocument, Data: BuildDocument(nil, elems...)}
+}
+
+// BuildDocumentElement will append a BSON embedded document elemnt using key and the provided
+// elements and return the extended buffer.
+func BuildDocumentElement(dst []byte, key string, elems ...[]byte) []byte {
+ return BuildDocument(AppendHeader(dst, bsontype.EmbeddedDocument, key), elems...)
+}
+
+// BuildDocumentFromElements is an alaias for the BuildDocument function.
+var BuildDocumentFromElements = BuildDocument
+
+// ReadDocument will read a document from src. If there are not enough bytes it
+// will return false.
+func ReadDocument(src []byte) (doc Document, rem []byte, ok bool) { return readLengthBytes(src) }
+
+// AppendArrayStart appends the length bytes to an array and then returns the index of the start
+// of those length bytes.
+func AppendArrayStart(dst []byte) (index int32, b []byte) { return ReserveLength(dst) }
+
+// AppendArrayElementStart appends an array element header and then the length bytes for an array,
+// returning the index where the length starts.
+func AppendArrayElementStart(dst []byte, key string) (index int32, b []byte) {
+ return AppendArrayStart(AppendHeader(dst, bsontype.Array, key))
+}
+
+// AppendArrayEnd appends the null byte to an array and calculates the length, inserting that
+// calculated length starting at index.
+func AppendArrayEnd(dst []byte, index int32) ([]byte, error) { return AppendDocumentEnd(dst, index) }
+
+// AppendArray will append arr to dst and return the extended buffer.
+func AppendArray(dst []byte, arr []byte) []byte { return append(dst, arr...) }
+
+// AppendArrayElement will append a BSON array element using key and arr to dst
+// and return the extended buffer.
+func AppendArrayElement(dst []byte, key string, arr []byte) []byte {
+ return AppendArray(AppendHeader(dst, bsontype.Array, key), arr)
+}
+
+// BuildArray will append a BSON array to dst built from values.
+func BuildArray(dst []byte, values ...Value) []byte {
+ idx, dst := ReserveLength(dst)
+ for pos, val := range values {
+ dst = AppendValueElement(dst, strconv.Itoa(pos), val)
+ }
+ dst = append(dst, 0x00)
+ dst = UpdateLength(dst, idx, int32(len(dst[idx:])))
+ return dst
+}
+
+// BuildArrayElement will create an array element using the provided values.
+func BuildArrayElement(dst []byte, key string, values ...Value) []byte {
+ return BuildArray(AppendHeader(dst, bsontype.Array, key), values...)
+}
+
+// ReadArray will read an array from src. If there are not enough bytes it
+// will return false.
+func ReadArray(src []byte) (arr Array, rem []byte, ok bool) { return readLengthBytes(src) }
+
+// AppendBinary will append subtype and b to dst and return the extended buffer.
+func AppendBinary(dst []byte, subtype byte, b []byte) []byte {
+ if subtype == 0x02 {
+ return appendBinarySubtype2(dst, subtype, b)
+ }
+ dst = append(appendLength(dst, int32(len(b))), subtype)
+ return append(dst, b...)
+}
+
+// AppendBinaryElement will append a BSON binary element using key, subtype, and
+// b to dst and return the extended buffer.
+func AppendBinaryElement(dst []byte, key string, subtype byte, b []byte) []byte {
+ return AppendBinary(AppendHeader(dst, bsontype.Binary, key), subtype, b)
+}
+
+// ReadBinary will read a subtype and bin from src. If there are not enough bytes it
+// will return false.
+func ReadBinary(src []byte) (subtype byte, bin []byte, rem []byte, ok bool) {
+ length, rem, ok := ReadLength(src)
+ if !ok {
+ return 0x00, nil, src, false
+ }
+ if len(rem) < 1 { // subtype
+ return 0x00, nil, src, false
+ }
+ subtype, rem = rem[0], rem[1:]
+
+ if len(rem) < int(length) {
+ return 0x00, nil, src, false
+ }
+
+ if subtype == 0x02 {
+ length, rem, ok = ReadLength(rem)
+ if !ok || len(rem) < int(length) {
+ return 0x00, nil, src, false
+ }
+ }
+
+ return subtype, rem[:length], rem[length:], true
+}
+
+// AppendUndefinedElement will append a BSON undefined element using key to dst
+// and return the extended buffer.
+func AppendUndefinedElement(dst []byte, key string) []byte {
+ return AppendHeader(dst, bsontype.Undefined, key)
+}
+
+// AppendObjectID will append oid to dst and return the extended buffer.
+func AppendObjectID(dst []byte, oid primitive.ObjectID) []byte { return append(dst, oid[:]...) }
+
+// AppendObjectIDElement will append a BSON ObjectID element using key and oid to dst
+// and return the extended buffer.
+func AppendObjectIDElement(dst []byte, key string, oid primitive.ObjectID) []byte {
+ return AppendObjectID(AppendHeader(dst, bsontype.ObjectID, key), oid)
+}
+
+// ReadObjectID will read an ObjectID from src. If there are not enough bytes it
+// will return false.
+func ReadObjectID(src []byte) (primitive.ObjectID, []byte, bool) {
+ if len(src) < 12 {
+ return primitive.ObjectID{}, src, false
+ }
+ var oid primitive.ObjectID
+ copy(oid[:], src[0:12])
+ return oid, src[12:], true
+}
+
+// AppendBoolean will append b to dst and return the extended buffer.
+func AppendBoolean(dst []byte, b bool) []byte {
+ if b {
+ return append(dst, 0x01)
+ }
+ return append(dst, 0x00)
+}
+
+// AppendBooleanElement will append a BSON boolean element using key and b to dst
+// and return the extended buffer.
+func AppendBooleanElement(dst []byte, key string, b bool) []byte {
+ return AppendBoolean(AppendHeader(dst, bsontype.Boolean, key), b)
+}
+
+// ReadBoolean will read a bool from src. If there are not enough bytes it
+// will return false.
+func ReadBoolean(src []byte) (bool, []byte, bool) {
+ if len(src) < 1 {
+ return false, src, false
+ }
+
+ return src[0] == 0x01, src[1:], true
+}
+
+// AppendDateTime will append dt to dst and return the extended buffer.
+func AppendDateTime(dst []byte, dt int64) []byte { return appendi64(dst, dt) }
+
+// AppendDateTimeElement will append a BSON datetime element using key and dt to dst
+// and return the extended buffer.
+func AppendDateTimeElement(dst []byte, key string, dt int64) []byte {
+ return AppendDateTime(AppendHeader(dst, bsontype.DateTime, key), dt)
+}
+
+// ReadDateTime will read an int64 datetime from src. If there are not enough bytes it
+// will return false.
+func ReadDateTime(src []byte) (int64, []byte, bool) { return readi64(src) }
+
+// AppendTime will append time as a BSON DateTime to dst and return the extended buffer.
+func AppendTime(dst []byte, t time.Time) []byte {
+ return AppendDateTime(dst, t.Unix()*1000+int64(t.Nanosecond()/1e6))
+}
+
+// AppendTimeElement will append a BSON datetime element using key and dt to dst
+// and return the extended buffer.
+func AppendTimeElement(dst []byte, key string, t time.Time) []byte {
+ return AppendTime(AppendHeader(dst, bsontype.DateTime, key), t)
+}
+
+// ReadTime will read an time.Time datetime from src. If there are not enough bytes it
+// will return false.
+func ReadTime(src []byte) (time.Time, []byte, bool) {
+ dt, rem, ok := readi64(src)
+ return time.Unix(dt/1e3, dt%1e3*1e6), rem, ok
+}
+
+// AppendNullElement will append a BSON null element using key to dst
+// and return the extended buffer.
+func AppendNullElement(dst []byte, key string) []byte { return AppendHeader(dst, bsontype.Null, key) }
+
+// AppendRegex will append pattern and options to dst and return the extended buffer.
+func AppendRegex(dst []byte, pattern, options string) []byte {
+ if !isValidCString(pattern) || !isValidCString(options) {
+ panic(invalidRegexPanicMsg)
+ }
+
+ return append(dst, pattern+nullTerminator+options+nullTerminator...)
+}
+
+// AppendRegexElement will append a BSON regex element using key, pattern, and
+// options to dst and return the extended buffer.
+func AppendRegexElement(dst []byte, key, pattern, options string) []byte {
+ return AppendRegex(AppendHeader(dst, bsontype.Regex, key), pattern, options)
+}
+
+// ReadRegex will read a pattern and options from src. If there are not enough bytes it
+// will return false.
+func ReadRegex(src []byte) (pattern, options string, rem []byte, ok bool) {
+ pattern, rem, ok = readcstring(src)
+ if !ok {
+ return "", "", src, false
+ }
+ options, rem, ok = readcstring(rem)
+ if !ok {
+ return "", "", src, false
+ }
+ return pattern, options, rem, true
+}
+
+// AppendDBPointer will append ns and oid to dst and return the extended buffer.
+func AppendDBPointer(dst []byte, ns string, oid primitive.ObjectID) []byte {
+ return append(appendstring(dst, ns), oid[:]...)
+}
+
+// AppendDBPointerElement will append a BSON DBPointer element using key, ns,
+// and oid to dst and return the extended buffer.
+func AppendDBPointerElement(dst []byte, key, ns string, oid primitive.ObjectID) []byte {
+ return AppendDBPointer(AppendHeader(dst, bsontype.DBPointer, key), ns, oid)
+}
+
+// ReadDBPointer will read a ns and oid from src. If there are not enough bytes it
+// will return false.
+func ReadDBPointer(src []byte) (ns string, oid primitive.ObjectID, rem []byte, ok bool) {
+ ns, rem, ok = readstring(src)
+ if !ok {
+ return "", primitive.ObjectID{}, src, false
+ }
+ oid, rem, ok = ReadObjectID(rem)
+ if !ok {
+ return "", primitive.ObjectID{}, src, false
+ }
+ return ns, oid, rem, true
+}
+
+// AppendJavaScript will append js to dst and return the extended buffer.
+func AppendJavaScript(dst []byte, js string) []byte { return appendstring(dst, js) }
+
+// AppendJavaScriptElement will append a BSON JavaScript element using key and
+// js to dst and return the extended buffer.
+func AppendJavaScriptElement(dst []byte, key, js string) []byte {
+ return AppendJavaScript(AppendHeader(dst, bsontype.JavaScript, key), js)
+}
+
+// ReadJavaScript will read a js string from src. If there are not enough bytes it
+// will return false.
+func ReadJavaScript(src []byte) (js string, rem []byte, ok bool) { return readstring(src) }
+
+// AppendSymbol will append symbol to dst and return the extended buffer.
+func AppendSymbol(dst []byte, symbol string) []byte { return appendstring(dst, symbol) }
+
+// AppendSymbolElement will append a BSON symbol element using key and symbol to dst
+// and return the extended buffer.
+func AppendSymbolElement(dst []byte, key, symbol string) []byte {
+ return AppendSymbol(AppendHeader(dst, bsontype.Symbol, key), symbol)
+}
+
+// ReadSymbol will read a symbol string from src. If there are not enough bytes it
+// will return false.
+func ReadSymbol(src []byte) (symbol string, rem []byte, ok bool) { return readstring(src) }
+
+// AppendCodeWithScope will append code and scope to dst and return the extended buffer.
+func AppendCodeWithScope(dst []byte, code string, scope []byte) []byte {
+ length := int32(4 + 4 + len(code) + 1 + len(scope)) // length of cws, length of code, code, 0x00, scope
+ dst = appendLength(dst, length)
+
+ return append(appendstring(dst, code), scope...)
+}
+
+// AppendCodeWithScopeElement will append a BSON code with scope element using
+// key, code, and scope to dst
+// and return the extended buffer.
+func AppendCodeWithScopeElement(dst []byte, key, code string, scope []byte) []byte {
+ return AppendCodeWithScope(AppendHeader(dst, bsontype.CodeWithScope, key), code, scope)
+}
+
+// ReadCodeWithScope will read code and scope from src. If there are not enough bytes it
+// will return false.
+func ReadCodeWithScope(src []byte) (code string, scope []byte, rem []byte, ok bool) {
+ length, rem, ok := ReadLength(src)
+ if !ok || len(src) < int(length) {
+ return "", nil, src, false
+ }
+
+ code, rem, ok = readstring(rem)
+ if !ok {
+ return "", nil, src, false
+ }
+
+ scope, rem, ok = ReadDocument(rem)
+ if !ok {
+ return "", nil, src, false
+ }
+ return code, scope, rem, true
+}
+
+// AppendInt32 will append i32 to dst and return the extended buffer.
+func AppendInt32(dst []byte, i32 int32) []byte { return appendi32(dst, i32) }
+
+// AppendInt32Element will append a BSON int32 element using key and i32 to dst
+// and return the extended buffer.
+func AppendInt32Element(dst []byte, key string, i32 int32) []byte {
+ return AppendInt32(AppendHeader(dst, bsontype.Int32, key), i32)
+}
+
+// ReadInt32 will read an int32 from src. If there are not enough bytes it
+// will return false.
+func ReadInt32(src []byte) (int32, []byte, bool) { return readi32(src) }
+
+// AppendTimestamp will append t and i to dst and return the extended buffer.
+func AppendTimestamp(dst []byte, t, i uint32) []byte {
+ return appendu32(appendu32(dst, i), t) // i is the lower 4 bytes, t is the higher 4 bytes
+}
+
+// AppendTimestampElement will append a BSON timestamp element using key, t, and
+// i to dst and return the extended buffer.
+func AppendTimestampElement(dst []byte, key string, t, i uint32) []byte {
+ return AppendTimestamp(AppendHeader(dst, bsontype.Timestamp, key), t, i)
+}
+
+// ReadTimestamp will read t and i from src. If there are not enough bytes it
+// will return false.
+func ReadTimestamp(src []byte) (t, i uint32, rem []byte, ok bool) {
+ i, rem, ok = readu32(src)
+ if !ok {
+ return 0, 0, src, false
+ }
+ t, rem, ok = readu32(rem)
+ if !ok {
+ return 0, 0, src, false
+ }
+ return t, i, rem, true
+}
+
+// AppendInt64 will append i64 to dst and return the extended buffer.
+func AppendInt64(dst []byte, i64 int64) []byte { return appendi64(dst, i64) }
+
+// AppendInt64Element will append a BSON int64 element using key and i64 to dst
+// and return the extended buffer.
+func AppendInt64Element(dst []byte, key string, i64 int64) []byte {
+ return AppendInt64(AppendHeader(dst, bsontype.Int64, key), i64)
+}
+
+// ReadInt64 will read an int64 from src. If there are not enough bytes it
+// will return false.
+func ReadInt64(src []byte) (int64, []byte, bool) { return readi64(src) }
+
+// AppendDecimal128 will append d128 to dst and return the extended buffer.
+func AppendDecimal128(dst []byte, d128 primitive.Decimal128) []byte {
+ high, low := d128.GetBytes()
+ return appendu64(appendu64(dst, low), high)
+}
+
+// AppendDecimal128Element will append a BSON primitive.28 element using key and
+// d128 to dst and return the extended buffer.
+func AppendDecimal128Element(dst []byte, key string, d128 primitive.Decimal128) []byte {
+ return AppendDecimal128(AppendHeader(dst, bsontype.Decimal128, key), d128)
+}
+
+// ReadDecimal128 will read a primitive.Decimal128 from src. If there are not enough bytes it
+// will return false.
+func ReadDecimal128(src []byte) (primitive.Decimal128, []byte, bool) {
+ l, rem, ok := readu64(src)
+ if !ok {
+ return primitive.Decimal128{}, src, false
+ }
+
+ h, rem, ok := readu64(rem)
+ if !ok {
+ return primitive.Decimal128{}, src, false
+ }
+
+ return primitive.NewDecimal128(h, l), rem, true
+}
+
+// AppendMaxKeyElement will append a BSON max key element using key to dst
+// and return the extended buffer.
+func AppendMaxKeyElement(dst []byte, key string) []byte {
+ return AppendHeader(dst, bsontype.MaxKey, key)
+}
+
+// AppendMinKeyElement will append a BSON min key element using key to dst
+// and return the extended buffer.
+func AppendMinKeyElement(dst []byte, key string) []byte {
+ return AppendHeader(dst, bsontype.MinKey, key)
+}
+
+// EqualValue will return true if the two values are equal.
+func EqualValue(t1, t2 bsontype.Type, v1, v2 []byte) bool {
+ if t1 != t2 {
+ return false
+ }
+ v1, _, ok := readValue(v1, t1)
+ if !ok {
+ return false
+ }
+ v2, _, ok = readValue(v2, t2)
+ if !ok {
+ return false
+ }
+ return bytes.Equal(v1, v2)
+}
+
+// valueLength will determine the length of the next value contained in src as if it
+// is type t. The returned bool will be false if there are not enough bytes in src for
+// a value of type t.
+func valueLength(src []byte, t bsontype.Type) (int32, bool) {
+ var length int32
+ ok := true
+ switch t {
+ case bsontype.Array, bsontype.EmbeddedDocument, bsontype.CodeWithScope:
+ length, _, ok = ReadLength(src)
+ case bsontype.Binary:
+ length, _, ok = ReadLength(src)
+ length += 4 + 1 // binary length + subtype byte
+ case bsontype.Boolean:
+ length = 1
+ case bsontype.DBPointer:
+ length, _, ok = ReadLength(src)
+ length += 4 + 12 // string length + ObjectID length
+ case bsontype.DateTime, bsontype.Double, bsontype.Int64, bsontype.Timestamp:
+ length = 8
+ case bsontype.Decimal128:
+ length = 16
+ case bsontype.Int32:
+ length = 4
+ case bsontype.JavaScript, bsontype.String, bsontype.Symbol:
+ length, _, ok = ReadLength(src)
+ length += 4
+ case bsontype.MaxKey, bsontype.MinKey, bsontype.Null, bsontype.Undefined:
+ length = 0
+ case bsontype.ObjectID:
+ length = 12
+ case bsontype.Regex:
+ regex := bytes.IndexByte(src, 0x00)
+ if regex < 0 {
+ ok = false
+ break
+ }
+ pattern := bytes.IndexByte(src[regex+1:], 0x00)
+ if pattern < 0 {
+ ok = false
+ break
+ }
+ length = int32(int64(regex) + 1 + int64(pattern) + 1)
+ default:
+ ok = false
+ }
+
+ return length, ok
+}
+
+func readValue(src []byte, t bsontype.Type) ([]byte, []byte, bool) {
+ length, ok := valueLength(src, t)
+ if !ok || int(length) > len(src) {
+ return nil, src, false
+ }
+
+ return src[:length], src[length:], true
+}
+
+// ReserveLength reserves the space required for length and returns the index where to write the length
+// and the []byte with reserved space.
+func ReserveLength(dst []byte) (int32, []byte) {
+ index := len(dst)
+ return int32(index), append(dst, 0x00, 0x00, 0x00, 0x00)
+}
+
+// UpdateLength updates the length at index with length and returns the []byte.
+func UpdateLength(dst []byte, index, length int32) []byte {
+ dst[index] = byte(length)
+ dst[index+1] = byte(length >> 8)
+ dst[index+2] = byte(length >> 16)
+ dst[index+3] = byte(length >> 24)
+ return dst
+}
+
+func appendLength(dst []byte, l int32) []byte { return appendi32(dst, l) }
+
+func appendi32(dst []byte, i32 int32) []byte {
+ return append(dst, byte(i32), byte(i32>>8), byte(i32>>16), byte(i32>>24))
+}
+
+// ReadLength reads an int32 length from src and returns the length and the remaining bytes. If
+// there aren't enough bytes to read a valid length, src is returned unomdified and the returned
+// bool will be false.
+func ReadLength(src []byte) (int32, []byte, bool) {
+ ln, src, ok := readi32(src)
+ if ln < 0 {
+ return ln, src, false
+ }
+ return ln, src, ok
+}
+
+func readi32(src []byte) (int32, []byte, bool) {
+ if len(src) < 4 {
+ return 0, src, false
+ }
+ return (int32(src[0]) | int32(src[1])<<8 | int32(src[2])<<16 | int32(src[3])<<24), src[4:], true
+}
+
+func appendi64(dst []byte, i64 int64) []byte {
+ return append(dst,
+ byte(i64), byte(i64>>8), byte(i64>>16), byte(i64>>24),
+ byte(i64>>32), byte(i64>>40), byte(i64>>48), byte(i64>>56),
+ )
+}
+
+func readi64(src []byte) (int64, []byte, bool) {
+ if len(src) < 8 {
+ return 0, src, false
+ }
+ i64 := (int64(src[0]) | int64(src[1])<<8 | int64(src[2])<<16 | int64(src[3])<<24 |
+ int64(src[4])<<32 | int64(src[5])<<40 | int64(src[6])<<48 | int64(src[7])<<56)
+ return i64, src[8:], true
+}
+
+func appendu32(dst []byte, u32 uint32) []byte {
+ return append(dst, byte(u32), byte(u32>>8), byte(u32>>16), byte(u32>>24))
+}
+
+func readu32(src []byte) (uint32, []byte, bool) {
+ if len(src) < 4 {
+ return 0, src, false
+ }
+
+ return (uint32(src[0]) | uint32(src[1])<<8 | uint32(src[2])<<16 | uint32(src[3])<<24), src[4:], true
+}
+
+func appendu64(dst []byte, u64 uint64) []byte {
+ return append(dst,
+ byte(u64), byte(u64>>8), byte(u64>>16), byte(u64>>24),
+ byte(u64>>32), byte(u64>>40), byte(u64>>48), byte(u64>>56),
+ )
+}
+
+func readu64(src []byte) (uint64, []byte, bool) {
+ if len(src) < 8 {
+ return 0, src, false
+ }
+ u64 := (uint64(src[0]) | uint64(src[1])<<8 | uint64(src[2])<<16 | uint64(src[3])<<24 |
+ uint64(src[4])<<32 | uint64(src[5])<<40 | uint64(src[6])<<48 | uint64(src[7])<<56)
+ return u64, src[8:], true
+}
+
+// keep in sync with readcstringbytes
+func readcstring(src []byte) (string, []byte, bool) {
+ idx := bytes.IndexByte(src, 0x00)
+ if idx < 0 {
+ return "", src, false
+ }
+ return string(src[:idx]), src[idx+1:], true
+}
+
+// keep in sync with readcstring
+func readcstringbytes(src []byte) ([]byte, []byte, bool) {
+ idx := bytes.IndexByte(src, 0x00)
+ if idx < 0 {
+ return nil, src, false
+ }
+ return src[:idx], src[idx+1:], true
+}
+
+func appendstring(dst []byte, s string) []byte {
+ l := int32(len(s) + 1)
+ dst = appendLength(dst, l)
+ dst = append(dst, s...)
+ return append(dst, 0x00)
+}
+
+func readstring(src []byte) (string, []byte, bool) {
+ l, rem, ok := ReadLength(src)
+ if !ok {
+ return "", src, false
+ }
+ if len(src[4:]) < int(l) || l == 0 {
+ return "", src, false
+ }
+
+ return string(rem[:l-1]), rem[l:], true
+}
+
+// readLengthBytes attempts to read a length and that number of bytes. This
+// function requires that the length include the four bytes for itself.
+func readLengthBytes(src []byte) ([]byte, []byte, bool) {
+ l, _, ok := ReadLength(src)
+ if !ok {
+ return nil, src, false
+ }
+ if len(src) < int(l) {
+ return nil, src, false
+ }
+ return src[:l], src[l:], true
+}
+
+func appendBinarySubtype2(dst []byte, subtype byte, b []byte) []byte {
+ dst = appendLength(dst, int32(len(b)+4)) // The bytes we'll encode need to be 4 larger for the length bytes
+ dst = append(dst, subtype)
+ dst = appendLength(dst, int32(len(b)))
+ return append(dst, b...)
+}
+
+func isValidCString(cs string) bool {
+ return !strings.ContainsRune(cs, '\x00')
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document.go
new file mode 100644
index 000000000..d6e4bb069
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document.go
@@ -0,0 +1,386 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "strconv"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// ValidationError is an error type returned when attempting to validate a document or array.
+type ValidationError string
+
+func (ve ValidationError) Error() string { return string(ve) }
+
+// NewDocumentLengthError creates and returns an error for when the length of a document exceeds the
+// bytes available.
+func NewDocumentLengthError(length, rem int) error {
+ return lengthError("document", length, rem)
+}
+
+func lengthError(bufferType string, length, rem int) error {
+ return ValidationError(fmt.Sprintf("%v length exceeds available bytes. length=%d remainingBytes=%d",
+ bufferType, length, rem))
+}
+
+// InsufficientBytesError indicates that there were not enough bytes to read the next component.
+type InsufficientBytesError struct {
+ Source []byte
+ Remaining []byte
+}
+
+// NewInsufficientBytesError creates a new InsufficientBytesError with the given Document and
+// remaining bytes.
+func NewInsufficientBytesError(src, rem []byte) InsufficientBytesError {
+ return InsufficientBytesError{Source: src, Remaining: rem}
+}
+
+// Error implements the error interface.
+func (ibe InsufficientBytesError) Error() string {
+ return "too few bytes to read next component"
+}
+
+// Equal checks that err2 also is an ErrTooSmall.
+func (ibe InsufficientBytesError) Equal(err2 error) bool {
+ switch err2.(type) {
+ case InsufficientBytesError:
+ return true
+ default:
+ return false
+ }
+}
+
+// InvalidDepthTraversalError is returned when attempting a recursive Lookup when one component of
+// the path is neither an embedded document nor an array.
+type InvalidDepthTraversalError struct {
+ Key string
+ Type bsontype.Type
+}
+
+func (idte InvalidDepthTraversalError) Error() string {
+ return fmt.Sprintf(
+ "attempt to traverse into %s, but it's type is %s, not %s nor %s",
+ idte.Key, idte.Type, bsontype.EmbeddedDocument, bsontype.Array,
+ )
+}
+
+// ErrMissingNull is returned when a document or array's last byte is not null.
+const ErrMissingNull ValidationError = "document or array end is missing null byte"
+
+// ErrInvalidLength indicates that a length in a binary representation of a BSON document or array
+// is invalid.
+const ErrInvalidLength ValidationError = "document or array length is invalid"
+
+// ErrNilReader indicates that an operation was attempted on a nil io.Reader.
+var ErrNilReader = errors.New("nil reader")
+
+// ErrEmptyKey indicates that no key was provided to a Lookup method.
+var ErrEmptyKey = errors.New("empty key provided")
+
+// ErrElementNotFound indicates that an Element matching a certain condition does not exist.
+var ErrElementNotFound = errors.New("element not found")
+
+// ErrOutOfBounds indicates that an index provided to access something was invalid.
+var ErrOutOfBounds = errors.New("out of bounds")
+
+// Document is a raw bytes representation of a BSON document.
+type Document []byte
+
+// NewDocumentFromReader reads a document from r. This function will only validate the length is
+// correct and that the document ends with a null byte.
+func NewDocumentFromReader(r io.Reader) (Document, error) {
+ return newBufferFromReader(r)
+}
+
+func newBufferFromReader(r io.Reader) ([]byte, error) {
+ if r == nil {
+ return nil, ErrNilReader
+ }
+
+ var lengthBytes [4]byte
+
+ // ReadFull guarantees that we will have read at least len(lengthBytes) if err == nil
+ _, err := io.ReadFull(r, lengthBytes[:])
+ if err != nil {
+ return nil, err
+ }
+
+ length, _, _ := readi32(lengthBytes[:]) // ignore ok since we always have enough bytes to read a length
+ if length < 0 {
+ return nil, ErrInvalidLength
+ }
+ buffer := make([]byte, length)
+
+ copy(buffer, lengthBytes[:])
+
+ _, err = io.ReadFull(r, buffer[4:])
+ if err != nil {
+ return nil, err
+ }
+
+ if buffer[length-1] != 0x00 {
+ return nil, ErrMissingNull
+ }
+
+ return buffer, nil
+}
+
+// Lookup searches the document, potentially recursively, for the given key. If there are multiple
+// keys provided, this method will recurse down, as long as the top and intermediate nodes are
+// either documents or arrays. If an error occurs or if the value doesn't exist, an empty Value is
+// returned.
+func (d Document) Lookup(key ...string) Value {
+ val, _ := d.LookupErr(key...)
+ return val
+}
+
+// LookupErr is the same as Lookup, except it returns an error in addition to an empty Value.
+func (d Document) LookupErr(key ...string) (Value, error) {
+ if len(key) < 1 {
+ return Value{}, ErrEmptyKey
+ }
+ length, rem, ok := ReadLength(d)
+ if !ok {
+ return Value{}, NewInsufficientBytesError(d, rem)
+ }
+
+ length -= 4
+
+ var elem Element
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return Value{}, NewInsufficientBytesError(d, rem)
+ }
+ // We use `KeyBytes` rather than `Key` to avoid a needless string alloc.
+ if string(elem.KeyBytes()) != key[0] {
+ continue
+ }
+ if len(key) > 1 {
+ tt := bsontype.Type(elem[0])
+ switch tt {
+ case bsontype.EmbeddedDocument:
+ val, err := elem.Value().Document().LookupErr(key[1:]...)
+ if err != nil {
+ return Value{}, err
+ }
+ return val, nil
+ case bsontype.Array:
+ // Convert to Document to continue Lookup recursion.
+ val, err := Document(elem.Value().Array()).LookupErr(key[1:]...)
+ if err != nil {
+ return Value{}, err
+ }
+ return val, nil
+ default:
+ return Value{}, InvalidDepthTraversalError{Key: elem.Key(), Type: tt}
+ }
+ }
+ return elem.ValueErr()
+ }
+ return Value{}, ErrElementNotFound
+}
+
+// Index searches for and retrieves the element at the given index. This method will panic if
+// the document is invalid or if the index is out of bounds.
+func (d Document) Index(index uint) Element {
+ elem, err := d.IndexErr(index)
+ if err != nil {
+ panic(err)
+ }
+ return elem
+}
+
+// IndexErr searches for and retrieves the element at the given index.
+func (d Document) IndexErr(index uint) (Element, error) {
+ return indexErr(d, index)
+}
+
+func indexErr(b []byte, index uint) (Element, error) {
+ length, rem, ok := ReadLength(b)
+ if !ok {
+ return nil, NewInsufficientBytesError(b, rem)
+ }
+
+ length -= 4
+
+ var current uint
+ var elem Element
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return nil, NewInsufficientBytesError(b, rem)
+ }
+ if current != index {
+ current++
+ continue
+ }
+ return elem, nil
+ }
+ return nil, ErrOutOfBounds
+}
+
+// DebugString outputs a human readable version of Document. It will attempt to stringify the
+// valid components of the document even if the entire document is not valid.
+func (d Document) DebugString() string {
+ if len(d) < 5 {
+ return "<malformed>"
+ }
+ var buf bytes.Buffer
+ buf.WriteString("Document")
+ length, rem, _ := ReadLength(d) // We know we have enough bytes to read the length
+ buf.WriteByte('(')
+ buf.WriteString(strconv.Itoa(int(length)))
+ length -= 4
+ buf.WriteString("){")
+ var elem Element
+ var ok bool
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ buf.WriteString(fmt.Sprintf("<malformed (%d)>", length))
+ break
+ }
+ fmt.Fprintf(&buf, "%s ", elem.DebugString())
+ }
+ buf.WriteByte('}')
+
+ return buf.String()
+}
+
+// String outputs an ExtendedJSON version of Document. If the document is not valid, this method
+// returns an empty string.
+func (d Document) String() string {
+ if len(d) < 5 {
+ return ""
+ }
+ var buf bytes.Buffer
+ buf.WriteByte('{')
+
+ length, rem, _ := ReadLength(d) // We know we have enough bytes to read the length
+
+ length -= 4
+
+ var elem Element
+ var ok bool
+ first := true
+ for length > 1 {
+ if !first {
+ buf.WriteByte(',')
+ }
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return ""
+ }
+ fmt.Fprintf(&buf, "%s", elem.String())
+ first = false
+ }
+ buf.WriteByte('}')
+
+ return buf.String()
+}
+
+// Elements returns this document as a slice of elements. The returned slice will contain valid
+// elements. If the document is not valid, the elements up to the invalid point will be returned
+// along with an error.
+func (d Document) Elements() ([]Element, error) {
+ length, rem, ok := ReadLength(d)
+ if !ok {
+ return nil, NewInsufficientBytesError(d, rem)
+ }
+
+ length -= 4
+
+ var elem Element
+ var elems []Element
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return elems, NewInsufficientBytesError(d, rem)
+ }
+ if err := elem.Validate(); err != nil {
+ return elems, err
+ }
+ elems = append(elems, elem)
+ }
+ return elems, nil
+}
+
+// Values returns this document as a slice of values. The returned slice will contain valid values.
+// If the document is not valid, the values up to the invalid point will be returned along with an
+// error.
+func (d Document) Values() ([]Value, error) {
+ return values(d)
+}
+
+func values(b []byte) ([]Value, error) {
+ length, rem, ok := ReadLength(b)
+ if !ok {
+ return nil, NewInsufficientBytesError(b, rem)
+ }
+
+ length -= 4
+
+ var elem Element
+ var vals []Value
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return vals, NewInsufficientBytesError(b, rem)
+ }
+ if err := elem.Value().Validate(); err != nil {
+ return vals, err
+ }
+ vals = append(vals, elem.Value())
+ }
+ return vals, nil
+}
+
+// Validate validates the document and ensures the elements contained within are valid.
+func (d Document) Validate() error {
+ length, rem, ok := ReadLength(d)
+ if !ok {
+ return NewInsufficientBytesError(d, rem)
+ }
+ if int(length) > len(d) {
+ return NewDocumentLengthError(int(length), len(d))
+ }
+ if d[length-1] != 0x00 {
+ return ErrMissingNull
+ }
+
+ length -= 4
+ var elem Element
+
+ for length > 1 {
+ elem, rem, ok = ReadElement(rem)
+ length -= int32(len(elem))
+ if !ok {
+ return NewInsufficientBytesError(d, rem)
+ }
+ err := elem.Validate()
+ if err != nil {
+ return err
+ }
+ }
+
+ if len(rem) < 1 || rem[0] != 0x00 {
+ return ErrMissingNull
+ }
+ return nil
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document_sequence.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document_sequence.go
new file mode 100644
index 000000000..e35bd0cd9
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/document_sequence.go
@@ -0,0 +1,189 @@
+// Copyright (C) MongoDB, Inc. 2022-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "errors"
+ "io"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// DocumentSequenceStyle is used to represent how a document sequence is laid out in a slice of
+// bytes.
+type DocumentSequenceStyle uint32
+
+// These constants are the valid styles for a DocumentSequence.
+const (
+ _ DocumentSequenceStyle = iota
+ SequenceStyle
+ ArrayStyle
+)
+
+// DocumentSequence represents a sequence of documents. The Style field indicates how the documents
+// are laid out inside of the Data field.
+type DocumentSequence struct {
+ Style DocumentSequenceStyle
+ Data []byte
+ Pos int
+}
+
+// ErrCorruptedDocument is returned when a full document couldn't be read from the sequence.
+var ErrCorruptedDocument = errors.New("invalid DocumentSequence: corrupted document")
+
+// ErrNonDocument is returned when a DocumentSequence contains a non-document BSON value.
+var ErrNonDocument = errors.New("invalid DocumentSequence: a non-document value was found in sequence")
+
+// ErrInvalidDocumentSequenceStyle is returned when an unknown DocumentSequenceStyle is set on a
+// DocumentSequence.
+var ErrInvalidDocumentSequenceStyle = errors.New("invalid DocumentSequenceStyle")
+
+// DocumentCount returns the number of documents in the sequence.
+func (ds *DocumentSequence) DocumentCount() int {
+ if ds == nil {
+ return 0
+ }
+ switch ds.Style {
+ case SequenceStyle:
+ var count int
+ var ok bool
+ rem := ds.Data
+ for len(rem) > 0 {
+ _, rem, ok = ReadDocument(rem)
+ if !ok {
+ return 0
+ }
+ count++
+ }
+ return count
+ case ArrayStyle:
+ _, rem, ok := ReadLength(ds.Data)
+ if !ok {
+ return 0
+ }
+
+ var count int
+ for len(rem) > 1 {
+ _, rem, ok = ReadElement(rem)
+ if !ok {
+ return 0
+ }
+ count++
+ }
+ return count
+ default:
+ return 0
+ }
+}
+
+// Empty returns true if the sequence is empty. It always returns true for unknown sequence styles.
+func (ds *DocumentSequence) Empty() bool {
+ if ds == nil {
+ return true
+ }
+
+ switch ds.Style {
+ case SequenceStyle:
+ return len(ds.Data) == 0
+ case ArrayStyle:
+ return len(ds.Data) <= 5
+ default:
+ return true
+ }
+}
+
+// ResetIterator resets the iteration point for the Next method to the beginning of the document
+// sequence.
+func (ds *DocumentSequence) ResetIterator() {
+ if ds == nil {
+ return
+ }
+ ds.Pos = 0
+}
+
+// Documents returns a slice of the documents. If nil either the Data field is also nil or could not
+// be properly read.
+func (ds *DocumentSequence) Documents() ([]Document, error) {
+ if ds == nil {
+ return nil, nil
+ }
+ switch ds.Style {
+ case SequenceStyle:
+ rem := ds.Data
+ var docs []Document
+ var doc Document
+ var ok bool
+ for {
+ doc, rem, ok = ReadDocument(rem)
+ if !ok {
+ if len(rem) == 0 {
+ break
+ }
+ return nil, ErrCorruptedDocument
+ }
+ docs = append(docs, doc)
+ }
+ return docs, nil
+ case ArrayStyle:
+ if len(ds.Data) == 0 {
+ return nil, nil
+ }
+ vals, err := Document(ds.Data).Values()
+ if err != nil {
+ return nil, ErrCorruptedDocument
+ }
+ docs := make([]Document, 0, len(vals))
+ for _, v := range vals {
+ if v.Type != bsontype.EmbeddedDocument {
+ return nil, ErrNonDocument
+ }
+ docs = append(docs, v.Data)
+ }
+ return docs, nil
+ default:
+ return nil, ErrInvalidDocumentSequenceStyle
+ }
+}
+
+// Next retrieves the next document from this sequence and returns it. This method will return
+// io.EOF when it has reached the end of the sequence.
+func (ds *DocumentSequence) Next() (Document, error) {
+ if ds == nil || ds.Pos >= len(ds.Data) {
+ return nil, io.EOF
+ }
+ switch ds.Style {
+ case SequenceStyle:
+ doc, _, ok := ReadDocument(ds.Data[ds.Pos:])
+ if !ok {
+ return nil, ErrCorruptedDocument
+ }
+ ds.Pos += len(doc)
+ return doc, nil
+ case ArrayStyle:
+ if ds.Pos < 4 {
+ if len(ds.Data) < 4 {
+ return nil, ErrCorruptedDocument
+ }
+ ds.Pos = 4 // Skip the length of the document
+ }
+ if len(ds.Data[ds.Pos:]) == 1 && ds.Data[ds.Pos] == 0x00 {
+ return nil, io.EOF // At the end of the document
+ }
+ elem, _, ok := ReadElement(ds.Data[ds.Pos:])
+ if !ok {
+ return nil, ErrCorruptedDocument
+ }
+ ds.Pos += len(elem)
+ val := elem.Value()
+ if val.Type != bsontype.EmbeddedDocument {
+ return nil, ErrNonDocument
+ }
+ return val.Data, nil
+ default:
+ return nil, ErrInvalidDocumentSequenceStyle
+ }
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/element.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/element.go
new file mode 100644
index 000000000..3acb4222b
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/element.go
@@ -0,0 +1,152 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "bytes"
+ "fmt"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+)
+
+// MalformedElementError represents a class of errors that RawElement methods return.
+type MalformedElementError string
+
+func (mee MalformedElementError) Error() string { return string(mee) }
+
+// ErrElementMissingKey is returned when a RawElement is missing a key.
+const ErrElementMissingKey MalformedElementError = "element is missing key"
+
+// ErrElementMissingType is returned when a RawElement is missing a type.
+const ErrElementMissingType MalformedElementError = "element is missing type"
+
+// Element is a raw bytes representation of a BSON element.
+type Element []byte
+
+// Key returns the key for this element. If the element is not valid, this method returns an empty
+// string. If knowing if the element is valid is important, use KeyErr.
+func (e Element) Key() string {
+ key, _ := e.KeyErr()
+ return key
+}
+
+// KeyBytes returns the key for this element as a []byte. If the element is not valid, this method
+// returns an empty string. If knowing if the element is valid is important, use KeyErr. This method
+// will not include the null byte at the end of the key in the slice of bytes.
+func (e Element) KeyBytes() []byte {
+ key, _ := e.KeyBytesErr()
+ return key
+}
+
+// KeyErr returns the key for this element, returning an error if the element is not valid.
+func (e Element) KeyErr() (string, error) {
+ key, err := e.KeyBytesErr()
+ return string(key), err
+}
+
+// KeyBytesErr returns the key for this element as a []byte, returning an error if the element is
+// not valid.
+func (e Element) KeyBytesErr() ([]byte, error) {
+ if len(e) <= 0 {
+ return nil, ErrElementMissingType
+ }
+ idx := bytes.IndexByte(e[1:], 0x00)
+ if idx == -1 {
+ return nil, ErrElementMissingKey
+ }
+ return e[1 : idx+1], nil
+}
+
+// Validate ensures the element is a valid BSON element.
+func (e Element) Validate() error {
+ if len(e) < 1 {
+ return ErrElementMissingType
+ }
+ idx := bytes.IndexByte(e[1:], 0x00)
+ if idx == -1 {
+ return ErrElementMissingKey
+ }
+ return Value{Type: bsontype.Type(e[0]), Data: e[idx+2:]}.Validate()
+}
+
+// CompareKey will compare this element's key to key. This method makes it easy to compare keys
+// without needing to allocate a string. The key may be null terminated. If a valid key cannot be
+// read this method will return false.
+func (e Element) CompareKey(key []byte) bool {
+ if len(e) < 2 {
+ return false
+ }
+ idx := bytes.IndexByte(e[1:], 0x00)
+ if idx == -1 {
+ return false
+ }
+ if index := bytes.IndexByte(key, 0x00); index > -1 {
+ key = key[:index]
+ }
+ return bytes.Equal(e[1:idx+1], key)
+}
+
+// Value returns the value of this element. If the element is not valid, this method returns an
+// empty Value. If knowing if the element is valid is important, use ValueErr.
+func (e Element) Value() Value {
+ val, _ := e.ValueErr()
+ return val
+}
+
+// ValueErr returns the value for this element, returning an error if the element is not valid.
+func (e Element) ValueErr() (Value, error) {
+ if len(e) <= 0 {
+ return Value{}, ErrElementMissingType
+ }
+ idx := bytes.IndexByte(e[1:], 0x00)
+ if idx == -1 {
+ return Value{}, ErrElementMissingKey
+ }
+
+ val, rem, exists := ReadValue(e[idx+2:], bsontype.Type(e[0]))
+ if !exists {
+ return Value{}, NewInsufficientBytesError(e, rem)
+ }
+ return val, nil
+}
+
+// String implements the fmt.String interface. The output will be in extended JSON format.
+func (e Element) String() string {
+ if len(e) <= 0 {
+ return ""
+ }
+ t := bsontype.Type(e[0])
+ idx := bytes.IndexByte(e[1:], 0x00)
+ if idx == -1 {
+ return ""
+ }
+ key, valBytes := []byte(e[1:idx+1]), []byte(e[idx+2:])
+ val, _, valid := ReadValue(valBytes, t)
+ if !valid {
+ return ""
+ }
+ return fmt.Sprintf(`"%s": %v`, key, val)
+}
+
+// DebugString outputs a human readable version of RawElement. It will attempt to stringify the
+// valid components of the element even if the entire element is not valid.
+func (e Element) DebugString() string {
+ if len(e) <= 0 {
+ return "<malformed>"
+ }
+ t := bsontype.Type(e[0])
+ idx := bytes.IndexByte(e[1:], 0x00)
+ if idx == -1 {
+ return fmt.Sprintf(`bson.Element{[%s]<malformed>}`, t)
+ }
+ key, valBytes := []byte(e[1:idx+1]), []byte(e[idx+2:])
+ val, _, valid := ReadValue(valBytes, t)
+ if !valid {
+ return fmt.Sprintf(`bson.Element{[%s]"%s": <malformed>}`, t, key)
+ }
+ return fmt.Sprintf(`bson.Element{[%s]"%s": %v}`, t, key, val)
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/tables.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/tables.go
new file mode 100644
index 000000000..9fd903fd2
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/tables.go
@@ -0,0 +1,223 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+//
+// Based on github.com/golang/go by The Go Authors
+// See THIRD-PARTY-NOTICES for original license terms.
+
+package bsoncore
+
+import "unicode/utf8"
+
+// safeSet holds the value true if the ASCII character with the given array
+// position can be represented inside a JSON string without any further
+// escaping.
+//
+// All values are true except for the ASCII control characters (0-31), the
+// double quote ("), and the backslash character ("\").
+var safeSet = [utf8.RuneSelf]bool{
+ ' ': true,
+ '!': true,
+ '"': false,
+ '#': true,
+ '$': true,
+ '%': true,
+ '&': true,
+ '\'': true,
+ '(': true,
+ ')': true,
+ '*': true,
+ '+': true,
+ ',': true,
+ '-': true,
+ '.': true,
+ '/': true,
+ '0': true,
+ '1': true,
+ '2': true,
+ '3': true,
+ '4': true,
+ '5': true,
+ '6': true,
+ '7': true,
+ '8': true,
+ '9': true,
+ ':': true,
+ ';': true,
+ '<': true,
+ '=': true,
+ '>': true,
+ '?': true,
+ '@': true,
+ 'A': true,
+ 'B': true,
+ 'C': true,
+ 'D': true,
+ 'E': true,
+ 'F': true,
+ 'G': true,
+ 'H': true,
+ 'I': true,
+ 'J': true,
+ 'K': true,
+ 'L': true,
+ 'M': true,
+ 'N': true,
+ 'O': true,
+ 'P': true,
+ 'Q': true,
+ 'R': true,
+ 'S': true,
+ 'T': true,
+ 'U': true,
+ 'V': true,
+ 'W': true,
+ 'X': true,
+ 'Y': true,
+ 'Z': true,
+ '[': true,
+ '\\': false,
+ ']': true,
+ '^': true,
+ '_': true,
+ '`': true,
+ 'a': true,
+ 'b': true,
+ 'c': true,
+ 'd': true,
+ 'e': true,
+ 'f': true,
+ 'g': true,
+ 'h': true,
+ 'i': true,
+ 'j': true,
+ 'k': true,
+ 'l': true,
+ 'm': true,
+ 'n': true,
+ 'o': true,
+ 'p': true,
+ 'q': true,
+ 'r': true,
+ 's': true,
+ 't': true,
+ 'u': true,
+ 'v': true,
+ 'w': true,
+ 'x': true,
+ 'y': true,
+ 'z': true,
+ '{': true,
+ '|': true,
+ '}': true,
+ '~': true,
+ '\u007f': true,
+}
+
+// htmlSafeSet holds the value true if the ASCII character with the given
+// array position can be safely represented inside a JSON string, embedded
+// inside of HTML <script> tags, without any additional escaping.
+//
+// All values are true except for the ASCII control characters (0-31), the
+// double quote ("), the backslash character ("\"), HTML opening and closing
+// tags ("<" and ">"), and the ampersand ("&").
+var htmlSafeSet = [utf8.RuneSelf]bool{
+ ' ': true,
+ '!': true,
+ '"': false,
+ '#': true,
+ '$': true,
+ '%': true,
+ '&': false,
+ '\'': true,
+ '(': true,
+ ')': true,
+ '*': true,
+ '+': true,
+ ',': true,
+ '-': true,
+ '.': true,
+ '/': true,
+ '0': true,
+ '1': true,
+ '2': true,
+ '3': true,
+ '4': true,
+ '5': true,
+ '6': true,
+ '7': true,
+ '8': true,
+ '9': true,
+ ':': true,
+ ';': true,
+ '<': false,
+ '=': true,
+ '>': false,
+ '?': true,
+ '@': true,
+ 'A': true,
+ 'B': true,
+ 'C': true,
+ 'D': true,
+ 'E': true,
+ 'F': true,
+ 'G': true,
+ 'H': true,
+ 'I': true,
+ 'J': true,
+ 'K': true,
+ 'L': true,
+ 'M': true,
+ 'N': true,
+ 'O': true,
+ 'P': true,
+ 'Q': true,
+ 'R': true,
+ 'S': true,
+ 'T': true,
+ 'U': true,
+ 'V': true,
+ 'W': true,
+ 'X': true,
+ 'Y': true,
+ 'Z': true,
+ '[': true,
+ '\\': false,
+ ']': true,
+ '^': true,
+ '_': true,
+ '`': true,
+ 'a': true,
+ 'b': true,
+ 'c': true,
+ 'd': true,
+ 'e': true,
+ 'f': true,
+ 'g': true,
+ 'h': true,
+ 'i': true,
+ 'j': true,
+ 'k': true,
+ 'l': true,
+ 'm': true,
+ 'n': true,
+ 'o': true,
+ 'p': true,
+ 'q': true,
+ 'r': true,
+ 's': true,
+ 't': true,
+ 'u': true,
+ 'v': true,
+ 'w': true,
+ 'x': true,
+ 'y': true,
+ 'z': true,
+ '{': true,
+ '|': true,
+ '}': true,
+ '~': true,
+ '\u007f': true,
+}
diff --git a/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/value.go b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/value.go
new file mode 100644
index 000000000..789d2b982
--- /dev/null
+++ b/vendor/go.mongodb.org/mongo-driver/x/bsonx/bsoncore/value.go
@@ -0,0 +1,980 @@
+// Copyright (C) MongoDB, Inc. 2017-present.
+//
+// Licensed under the Apache License, Version 2.0 (the "License"); you may
+// not use this file except in compliance with the License. You may obtain
+// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+
+package bsoncore
+
+import (
+ "bytes"
+ "encoding/base64"
+ "fmt"
+ "math"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+
+ "go.mongodb.org/mongo-driver/bson/bsontype"
+ "go.mongodb.org/mongo-driver/bson/primitive"
+)
+
+// ElementTypeError specifies that a method to obtain a BSON value an incorrect type was called on a bson.Value.
+type ElementTypeError struct {
+ Method string
+ Type bsontype.Type
+}
+
+// Error implements the error interface.
+func (ete ElementTypeError) Error() string {
+ return "Call of " + ete.Method + " on " + ete.Type.String() + " type"
+}
+
+// Value represents a BSON value with a type and raw bytes.
+type Value struct {
+ Type bsontype.Type
+ Data []byte
+}
+
+// Validate ensures the value is a valid BSON value.
+func (v Value) Validate() error {
+ _, _, valid := readValue(v.Data, v.Type)
+ if !valid {
+ return NewInsufficientBytesError(v.Data, v.Data)
+ }
+ return nil
+}
+
+// IsNumber returns true if the type of v is a numeric BSON type.
+func (v Value) IsNumber() bool {
+ switch v.Type {
+ case bsontype.Double, bsontype.Int32, bsontype.Int64, bsontype.Decimal128:
+ return true
+ default:
+ return false
+ }
+}
+
+// AsInt32 returns a BSON number as an int32. If the BSON type is not a numeric one, this method
+// will panic.
+//
+// TODO(skriptble): Add support for Decimal128.
+func (v Value) AsInt32() int32 {
+ if !v.IsNumber() {
+ panic(ElementTypeError{"bsoncore.Value.AsInt32", v.Type})
+ }
+ var i32 int32
+ switch v.Type {
+ case bsontype.Double:
+ f64, _, ok := ReadDouble(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ i32 = int32(f64)
+ case bsontype.Int32:
+ var ok bool
+ i32, _, ok = ReadInt32(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ case bsontype.Int64:
+ i64, _, ok := ReadInt64(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ i32 = int32(i64)
+ case bsontype.Decimal128:
+ panic(ElementTypeError{"bsoncore.Value.AsInt32", v.Type})
+ }
+ return i32
+}
+
+// AsInt32OK functions the same as AsInt32 but returns a boolean instead of panicking. False
+// indicates an error.
+//
+// TODO(skriptble): Add support for Decimal128.
+func (v Value) AsInt32OK() (int32, bool) {
+ if !v.IsNumber() {
+ return 0, false
+ }
+ var i32 int32
+ switch v.Type {
+ case bsontype.Double:
+ f64, _, ok := ReadDouble(v.Data)
+ if !ok {
+ return 0, false
+ }
+ i32 = int32(f64)
+ case bsontype.Int32:
+ var ok bool
+ i32, _, ok = ReadInt32(v.Data)
+ if !ok {
+ return 0, false
+ }
+ case bsontype.Int64:
+ i64, _, ok := ReadInt64(v.Data)
+ if !ok {
+ return 0, false
+ }
+ i32 = int32(i64)
+ case bsontype.Decimal128:
+ return 0, false
+ }
+ return i32, true
+}
+
+// AsInt64 returns a BSON number as an int64. If the BSON type is not a numeric one, this method
+// will panic.
+//
+// TODO(skriptble): Add support for Decimal128.
+func (v Value) AsInt64() int64 {
+ if !v.IsNumber() {
+ panic(ElementTypeError{"bsoncore.Value.AsInt64", v.Type})
+ }
+ var i64 int64
+ switch v.Type {
+ case bsontype.Double:
+ f64, _, ok := ReadDouble(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ i64 = int64(f64)
+ case bsontype.Int32:
+ var ok bool
+ i32, _, ok := ReadInt32(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ i64 = int64(i32)
+ case bsontype.Int64:
+ var ok bool
+ i64, _, ok = ReadInt64(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ case bsontype.Decimal128:
+ panic(ElementTypeError{"bsoncore.Value.AsInt64", v.Type})
+ }
+ return i64
+}
+
+// AsInt64OK functions the same as AsInt64 but returns a boolean instead of panicking. False
+// indicates an error.
+//
+// TODO(skriptble): Add support for Decimal128.
+func (v Value) AsInt64OK() (int64, bool) {
+ if !v.IsNumber() {
+ return 0, false
+ }
+ var i64 int64
+ switch v.Type {
+ case bsontype.Double:
+ f64, _, ok := ReadDouble(v.Data)
+ if !ok {
+ return 0, false
+ }
+ i64 = int64(f64)
+ case bsontype.Int32:
+ var ok bool
+ i32, _, ok := ReadInt32(v.Data)
+ if !ok {
+ return 0, false
+ }
+ i64 = int64(i32)
+ case bsontype.Int64:
+ var ok bool
+ i64, _, ok = ReadInt64(v.Data)
+ if !ok {
+ return 0, false
+ }
+ case bsontype.Decimal128:
+ return 0, false
+ }
+ return i64, true
+}
+
+// AsFloat64 returns a BSON number as an float64. If the BSON type is not a numeric one, this method
+// will panic.
+//
+// TODO(skriptble): Add support for Decimal128.
+func (v Value) AsFloat64() float64 { return 0 }
+
+// AsFloat64OK functions the same as AsFloat64 but returns a boolean instead of panicking. False
+// indicates an error.
+//
+// TODO(skriptble): Add support for Decimal128.
+func (v Value) AsFloat64OK() (float64, bool) { return 0, false }
+
+// Add will add this value to another. This is currently only implemented for strings and numbers.
+// If either value is a string, the other type is coerced into a string and added to the other.
+//
+// This method will alter v and will attempt to reuse the []byte of v. If the []byte is too small,
+// it will be expanded.
+func (v *Value) Add(v2 Value) error { return nil }
+
+// Equal compaes v to v2 and returns true if they are equal.
+func (v Value) Equal(v2 Value) bool {
+ if v.Type != v2.Type {
+ return false
+ }
+
+ return bytes.Equal(v.Data, v2.Data)
+}
+
+// String implements the fmt.String interface. This method will return values in extended JSON
+// format. If the value is not valid, this returns an empty string
+func (v Value) String() string {
+ switch v.Type {
+ case bsontype.Double:
+ f64, ok := v.DoubleOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$numberDouble":"%s"}`, formatDouble(f64))
+ case bsontype.String:
+ str, ok := v.StringValueOK()
+ if !ok {
+ return ""
+ }
+ return escapeString(str)
+ case bsontype.EmbeddedDocument:
+ doc, ok := v.DocumentOK()
+ if !ok {
+ return ""
+ }
+ return doc.String()
+ case bsontype.Array:
+ arr, ok := v.ArrayOK()
+ if !ok {
+ return ""
+ }
+ return arr.String()
+ case bsontype.Binary:
+ subtype, data, ok := v.BinaryOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$binary":{"base64":"%s","subType":"%02x"}}`, base64.StdEncoding.EncodeToString(data), subtype)
+ case bsontype.Undefined:
+ return `{"$undefined":true}`
+ case bsontype.ObjectID:
+ oid, ok := v.ObjectIDOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$oid":"%s"}`, oid.Hex())
+ case bsontype.Boolean:
+ b, ok := v.BooleanOK()
+ if !ok {
+ return ""
+ }
+ return strconv.FormatBool(b)
+ case bsontype.DateTime:
+ dt, ok := v.DateTimeOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$date":{"$numberLong":"%d"}}`, dt)
+ case bsontype.Null:
+ return "null"
+ case bsontype.Regex:
+ pattern, options, ok := v.RegexOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(
+ `{"$regularExpression":{"pattern":%s,"options":"%s"}}`,
+ escapeString(pattern), sortStringAlphebeticAscending(options),
+ )
+ case bsontype.DBPointer:
+ ns, pointer, ok := v.DBPointerOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$dbPointer":{"$ref":%s,"$id":{"$oid":"%s"}}}`, escapeString(ns), pointer.Hex())
+ case bsontype.JavaScript:
+ js, ok := v.JavaScriptOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$code":%s}`, escapeString(js))
+ case bsontype.Symbol:
+ symbol, ok := v.SymbolOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$symbol":%s}`, escapeString(symbol))
+ case bsontype.CodeWithScope:
+ code, scope, ok := v.CodeWithScopeOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$code":%s,"$scope":%s}`, code, scope)
+ case bsontype.Int32:
+ i32, ok := v.Int32OK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$numberInt":"%d"}`, i32)
+ case bsontype.Timestamp:
+ t, i, ok := v.TimestampOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$timestamp":{"t":%v,"i":%v}}`, t, i)
+ case bsontype.Int64:
+ i64, ok := v.Int64OK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$numberLong":"%d"}`, i64)
+ case bsontype.Decimal128:
+ d128, ok := v.Decimal128OK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$numberDecimal":"%s"}`, d128.String())
+ case bsontype.MinKey:
+ return `{"$minKey":1}`
+ case bsontype.MaxKey:
+ return `{"$maxKey":1}`
+ default:
+ return ""
+ }
+}
+
+// DebugString outputs a human readable version of Document. It will attempt to stringify the
+// valid components of the document even if the entire document is not valid.
+func (v Value) DebugString() string {
+ switch v.Type {
+ case bsontype.String:
+ str, ok := v.StringValueOK()
+ if !ok {
+ return "<malformed>"
+ }
+ return escapeString(str)
+ case bsontype.EmbeddedDocument:
+ doc, ok := v.DocumentOK()
+ if !ok {
+ return "<malformed>"
+ }
+ return doc.DebugString()
+ case bsontype.Array:
+ arr, ok := v.ArrayOK()
+ if !ok {
+ return "<malformed>"
+ }
+ return arr.DebugString()
+ case bsontype.CodeWithScope:
+ code, scope, ok := v.CodeWithScopeOK()
+ if !ok {
+ return ""
+ }
+ return fmt.Sprintf(`{"$code":%s,"$scope":%s}`, code, scope.DebugString())
+ default:
+ str := v.String()
+ if str == "" {
+ return "<malformed>"
+ }
+ return str
+ }
+}
+
+// Double returns the float64 value for this element.
+// It panics if e's BSON type is not bsontype.Double.
+func (v Value) Double() float64 {
+ if v.Type != bsontype.Double {
+ panic(ElementTypeError{"bsoncore.Value.Double", v.Type})
+ }
+ f64, _, ok := ReadDouble(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return f64
+}
+
+// DoubleOK is the same as Double, but returns a boolean instead of panicking.
+func (v Value) DoubleOK() (float64, bool) {
+ if v.Type != bsontype.Double {
+ return 0, false
+ }
+ f64, _, ok := ReadDouble(v.Data)
+ if !ok {
+ return 0, false
+ }
+ return f64, true
+}
+
+// StringValue returns the string balue for this element.
+// It panics if e's BSON type is not bsontype.String.
+//
+// NOTE: This method is called StringValue to avoid a collision with the String method which
+// implements the fmt.Stringer interface.
+func (v Value) StringValue() string {
+ if v.Type != bsontype.String {
+ panic(ElementTypeError{"bsoncore.Value.StringValue", v.Type})
+ }
+ str, _, ok := ReadString(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return str
+}
+
+// StringValueOK is the same as StringValue, but returns a boolean instead of
+// panicking.
+func (v Value) StringValueOK() (string, bool) {
+ if v.Type != bsontype.String {
+ return "", false
+ }
+ str, _, ok := ReadString(v.Data)
+ if !ok {
+ return "", false
+ }
+ return str, true
+}
+
+// Document returns the BSON document the Value represents as a Document. It panics if the
+// value is a BSON type other than document.
+func (v Value) Document() Document {
+ if v.Type != bsontype.EmbeddedDocument {
+ panic(ElementTypeError{"bsoncore.Value.Document", v.Type})
+ }
+ doc, _, ok := ReadDocument(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return doc
+}
+
+// DocumentOK is the same as Document, except it returns a boolean
+// instead of panicking.
+func (v Value) DocumentOK() (Document, bool) {
+ if v.Type != bsontype.EmbeddedDocument {
+ return nil, false
+ }
+ doc, _, ok := ReadDocument(v.Data)
+ if !ok {
+ return nil, false
+ }
+ return doc, true
+}
+
+// Array returns the BSON array the Value represents as an Array. It panics if the
+// value is a BSON type other than array.
+func (v Value) Array() Array {
+ if v.Type != bsontype.Array {
+ panic(ElementTypeError{"bsoncore.Value.Array", v.Type})
+ }
+ arr, _, ok := ReadArray(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return arr
+}
+
+// ArrayOK is the same as Array, except it returns a boolean instead
+// of panicking.
+func (v Value) ArrayOK() (Array, bool) {
+ if v.Type != bsontype.Array {
+ return nil, false
+ }
+ arr, _, ok := ReadArray(v.Data)
+ if !ok {
+ return nil, false
+ }
+ return arr, true
+}
+
+// Binary returns the BSON binary value the Value represents. It panics if the value is a BSON type
+// other than binary.
+func (v Value) Binary() (subtype byte, data []byte) {
+ if v.Type != bsontype.Binary {
+ panic(ElementTypeError{"bsoncore.Value.Binary", v.Type})
+ }
+ subtype, data, _, ok := ReadBinary(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return subtype, data
+}
+
+// BinaryOK is the same as Binary, except it returns a boolean instead of
+// panicking.
+func (v Value) BinaryOK() (subtype byte, data []byte, ok bool) {
+ if v.Type != bsontype.Binary {
+ return 0x00, nil, false
+ }
+ subtype, data, _, ok = ReadBinary(v.Data)
+ if !ok {
+ return 0x00, nil, false
+ }
+ return subtype, data, true
+}
+
+// ObjectID returns the BSON objectid value the Value represents. It panics if the value is a BSON
+// type other than objectid.
+func (v Value) ObjectID() primitive.ObjectID {
+ if v.Type != bsontype.ObjectID {
+ panic(ElementTypeError{"bsoncore.Value.ObjectID", v.Type})
+ }
+ oid, _, ok := ReadObjectID(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return oid
+}
+
+// ObjectIDOK is the same as ObjectID, except it returns a boolean instead of
+// panicking.
+func (v Value) ObjectIDOK() (primitive.ObjectID, bool) {
+ if v.Type != bsontype.ObjectID {
+ return primitive.ObjectID{}, false
+ }
+ oid, _, ok := ReadObjectID(v.Data)
+ if !ok {
+ return primitive.ObjectID{}, false
+ }
+ return oid, true
+}
+
+// Boolean returns the boolean value the Value represents. It panics if the
+// value is a BSON type other than boolean.
+func (v Value) Boolean() bool {
+ if v.Type != bsontype.Boolean {
+ panic(ElementTypeError{"bsoncore.Value.Boolean", v.Type})
+ }
+ b, _, ok := ReadBoolean(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return b
+}
+
+// BooleanOK is the same as Boolean, except it returns a boolean instead of
+// panicking.
+func (v Value) BooleanOK() (bool, bool) {
+ if v.Type != bsontype.Boolean {
+ return false, false
+ }
+ b, _, ok := ReadBoolean(v.Data)
+ if !ok {
+ return false, false
+ }
+ return b, true
+}
+
+// DateTime returns the BSON datetime value the Value represents as a
+// unix timestamp. It panics if the value is a BSON type other than datetime.
+func (v Value) DateTime() int64 {
+ if v.Type != bsontype.DateTime {
+ panic(ElementTypeError{"bsoncore.Value.DateTime", v.Type})
+ }
+ dt, _, ok := ReadDateTime(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return dt
+}
+
+// DateTimeOK is the same as DateTime, except it returns a boolean instead of
+// panicking.
+func (v Value) DateTimeOK() (int64, bool) {
+ if v.Type != bsontype.DateTime {
+ return 0, false
+ }
+ dt, _, ok := ReadDateTime(v.Data)
+ if !ok {
+ return 0, false
+ }
+ return dt, true
+}
+
+// Time returns the BSON datetime value the Value represents. It panics if the value is a BSON
+// type other than datetime.
+func (v Value) Time() time.Time {
+ if v.Type != bsontype.DateTime {
+ panic(ElementTypeError{"bsoncore.Value.Time", v.Type})
+ }
+ dt, _, ok := ReadDateTime(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return time.Unix(dt/1000, dt%1000*1000000)
+}
+
+// TimeOK is the same as Time, except it returns a boolean instead of
+// panicking.
+func (v Value) TimeOK() (time.Time, bool) {
+ if v.Type != bsontype.DateTime {
+ return time.Time{}, false
+ }
+ dt, _, ok := ReadDateTime(v.Data)
+ if !ok {
+ return time.Time{}, false
+ }
+ return time.Unix(dt/1000, dt%1000*1000000), true
+}
+
+// Regex returns the BSON regex value the Value represents. It panics if the value is a BSON
+// type other than regex.
+func (v Value) Regex() (pattern, options string) {
+ if v.Type != bsontype.Regex {
+ panic(ElementTypeError{"bsoncore.Value.Regex", v.Type})
+ }
+ pattern, options, _, ok := ReadRegex(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return pattern, options
+}
+
+// RegexOK is the same as Regex, except it returns a boolean instead of
+// panicking.
+func (v Value) RegexOK() (pattern, options string, ok bool) {
+ if v.Type != bsontype.Regex {
+ return "", "", false
+ }
+ pattern, options, _, ok = ReadRegex(v.Data)
+ if !ok {
+ return "", "", false
+ }
+ return pattern, options, true
+}
+
+// DBPointer returns the BSON dbpointer value the Value represents. It panics if the value is a BSON
+// type other than DBPointer.
+func (v Value) DBPointer() (string, primitive.ObjectID) {
+ if v.Type != bsontype.DBPointer {
+ panic(ElementTypeError{"bsoncore.Value.DBPointer", v.Type})
+ }
+ ns, pointer, _, ok := ReadDBPointer(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return ns, pointer
+}
+
+// DBPointerOK is the same as DBPoitner, except that it returns a boolean
+// instead of panicking.
+func (v Value) DBPointerOK() (string, primitive.ObjectID, bool) {
+ if v.Type != bsontype.DBPointer {
+ return "", primitive.ObjectID{}, false
+ }
+ ns, pointer, _, ok := ReadDBPointer(v.Data)
+ if !ok {
+ return "", primitive.ObjectID{}, false
+ }
+ return ns, pointer, true
+}
+
+// JavaScript returns the BSON JavaScript code value the Value represents. It panics if the value is
+// a BSON type other than JavaScript code.
+func (v Value) JavaScript() string {
+ if v.Type != bsontype.JavaScript {
+ panic(ElementTypeError{"bsoncore.Value.JavaScript", v.Type})
+ }
+ js, _, ok := ReadJavaScript(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return js
+}
+
+// JavaScriptOK is the same as Javascript, excepti that it returns a boolean
+// instead of panicking.
+func (v Value) JavaScriptOK() (string, bool) {
+ if v.Type != bsontype.JavaScript {
+ return "", false
+ }
+ js, _, ok := ReadJavaScript(v.Data)
+ if !ok {
+ return "", false
+ }
+ return js, true
+}
+
+// Symbol returns the BSON symbol value the Value represents. It panics if the value is a BSON
+// type other than symbol.
+func (v Value) Symbol() string {
+ if v.Type != bsontype.Symbol {
+ panic(ElementTypeError{"bsoncore.Value.Symbol", v.Type})
+ }
+ symbol, _, ok := ReadSymbol(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return symbol
+}
+
+// SymbolOK is the same as Symbol, excepti that it returns a boolean
+// instead of panicking.
+func (v Value) SymbolOK() (string, bool) {
+ if v.Type != bsontype.Symbol {
+ return "", false
+ }
+ symbol, _, ok := ReadSymbol(v.Data)
+ if !ok {
+ return "", false
+ }
+ return symbol, true
+}
+
+// CodeWithScope returns the BSON JavaScript code with scope the Value represents.
+// It panics if the value is a BSON type other than JavaScript code with scope.
+func (v Value) CodeWithScope() (string, Document) {
+ if v.Type != bsontype.CodeWithScope {
+ panic(ElementTypeError{"bsoncore.Value.CodeWithScope", v.Type})
+ }
+ code, scope, _, ok := ReadCodeWithScope(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return code, scope
+}
+
+// CodeWithScopeOK is the same as CodeWithScope, except that it returns a boolean instead of
+// panicking.
+func (v Value) CodeWithScopeOK() (string, Document, bool) {
+ if v.Type != bsontype.CodeWithScope {
+ return "", nil, false
+ }
+ code, scope, _, ok := ReadCodeWithScope(v.Data)
+ if !ok {
+ return "", nil, false
+ }
+ return code, scope, true
+}
+
+// Int32 returns the int32 the Value represents. It panics if the value is a BSON type other than
+// int32.
+func (v Value) Int32() int32 {
+ if v.Type != bsontype.Int32 {
+ panic(ElementTypeError{"bsoncore.Value.Int32", v.Type})
+ }
+ i32, _, ok := ReadInt32(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return i32
+}
+
+// Int32OK is the same as Int32, except that it returns a boolean instead of
+// panicking.
+func (v Value) Int32OK() (int32, bool) {
+ if v.Type != bsontype.Int32 {
+ return 0, false
+ }
+ i32, _, ok := ReadInt32(v.Data)
+ if !ok {
+ return 0, false
+ }
+ return i32, true
+}
+
+// Timestamp returns the BSON timestamp value the Value represents. It panics if the value is a
+// BSON type other than timestamp.
+func (v Value) Timestamp() (t, i uint32) {
+ if v.Type != bsontype.Timestamp {
+ panic(ElementTypeError{"bsoncore.Value.Timestamp", v.Type})
+ }
+ t, i, _, ok := ReadTimestamp(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return t, i
+}
+
+// TimestampOK is the same as Timestamp, except that it returns a boolean
+// instead of panicking.
+func (v Value) TimestampOK() (t, i uint32, ok bool) {
+ if v.Type != bsontype.Timestamp {
+ return 0, 0, false
+ }
+ t, i, _, ok = ReadTimestamp(v.Data)
+ if !ok {
+ return 0, 0, false
+ }
+ return t, i, true
+}
+
+// Int64 returns the int64 the Value represents. It panics if the value is a BSON type other than
+// int64.
+func (v Value) Int64() int64 {
+ if v.Type != bsontype.Int64 {
+ panic(ElementTypeError{"bsoncore.Value.Int64", v.Type})
+ }
+ i64, _, ok := ReadInt64(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return i64
+}
+
+// Int64OK is the same as Int64, except that it returns a boolean instead of
+// panicking.
+func (v Value) Int64OK() (int64, bool) {
+ if v.Type != bsontype.Int64 {
+ return 0, false
+ }
+ i64, _, ok := ReadInt64(v.Data)
+ if !ok {
+ return 0, false
+ }
+ return i64, true
+}
+
+// Decimal128 returns the decimal the Value represents. It panics if the value is a BSON type other than
+// decimal.
+func (v Value) Decimal128() primitive.Decimal128 {
+ if v.Type != bsontype.Decimal128 {
+ panic(ElementTypeError{"bsoncore.Value.Decimal128", v.Type})
+ }
+ d128, _, ok := ReadDecimal128(v.Data)
+ if !ok {
+ panic(NewInsufficientBytesError(v.Data, v.Data))
+ }
+ return d128
+}
+
+// Decimal128OK is the same as Decimal128, except that it returns a boolean
+// instead of panicking.
+func (v Value) Decimal128OK() (primitive.Decimal128, bool) {
+ if v.Type != bsontype.Decimal128 {
+ return primitive.Decimal128{}, false
+ }
+ d128, _, ok := ReadDecimal128(v.Data)
+ if !ok {
+ return primitive.Decimal128{}, false
+ }
+ return d128, true
+}
+
+var hexChars = "0123456789abcdef"
+
+func escapeString(s string) string {
+ escapeHTML := true
+ var buf bytes.Buffer
+ buf.WriteByte('"')
+ start := 0
+ for i := 0; i < len(s); {
+ if b := s[i]; b < utf8.RuneSelf {
+ if htmlSafeSet[b] || (!escapeHTML && safeSet[b]) {
+ i++
+ continue
+ }
+ if start < i {
+ buf.WriteString(s[start:i])
+ }
+ switch b {
+ case '\\', '"':
+ buf.WriteByte('\\')
+ buf.WriteByte(b)
+ case '\n':
+ buf.WriteByte('\\')
+ buf.WriteByte('n')
+ case '\r':
+ buf.WriteByte('\\')
+ buf.WriteByte('r')
+ case '\t':
+ buf.WriteByte('\\')
+ buf.WriteByte('t')
+ case '\b':
+ buf.WriteByte('\\')
+ buf.WriteByte('b')
+ case '\f':
+ buf.WriteByte('\\')
+ buf.WriteByte('f')
+ default:
+ // This encodes bytes < 0x20 except for \t, \n and \r.
+ // If escapeHTML is set, it also escapes <, >, and &
+ // because they can lead to security holes when
+ // user-controlled strings are rendered into JSON
+ // and served to some browsers.
+ buf.WriteString(`\u00`)
+ buf.WriteByte(hexChars[b>>4])
+ buf.WriteByte(hexChars[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRuneInString(s[i:])
+ if c == utf8.RuneError && size == 1 {
+ if start < i {
+ buf.WriteString(s[start:i])
+ }
+ buf.WriteString(`\ufffd`)
+ i += size
+ start = i
+ continue
+ }
+ // U+2028 is LINE SEPARATOR.
+ // U+2029 is PARAGRAPH SEPARATOR.
+ // They are both technically valid characters in JSON strings,
+ // but don't work in JSONP, which has to be evaluated as JavaScript,
+ // and can lead to security holes there. It is valid JSON to
+ // escape them, so we do so unconditionally.
+ // See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
+ if c == '\u2028' || c == '\u2029' {
+ if start < i {
+ buf.WriteString(s[start:i])
+ }
+ buf.WriteString(`\u202`)
+ buf.WriteByte(hexChars[c&0xF])
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+ if start < len(s) {
+ buf.WriteString(s[start:])
+ }
+ buf.WriteByte('"')
+ return buf.String()
+}
+
+func formatDouble(f float64) string {
+ var s string
+ if math.IsInf(f, 1) {
+ s = "Infinity"
+ } else if math.IsInf(f, -1) {
+ s = "-Infinity"
+ } else if math.IsNaN(f) {
+ s = "NaN"
+ } else {
+ // Print exactly one decimalType place for integers; otherwise, print as many are necessary to
+ // perfectly represent it.
+ s = strconv.FormatFloat(f, 'G', -1, 64)
+ if !strings.ContainsRune(s, '.') {
+ s += ".0"
+ }
+ }
+
+ return s
+}
+
+type sortableString []rune
+
+func (ss sortableString) Len() int {
+ return len(ss)
+}
+
+func (ss sortableString) Less(i, j int) bool {
+ return ss[i] < ss[j]
+}
+
+func (ss sortableString) Swap(i, j int) {
+ oldI := ss[i]
+ ss[i] = ss[j]
+ ss[j] = oldI
+}
+
+func sortStringAlphebeticAscending(s string) string {
+ ss := sortableString([]rune(s))
+ sort.Sort(ss)
+ return string([]rune(ss))
+}
diff --git a/vendor/golang.org/x/crypto/scrypt/scrypt.go b/vendor/golang.org/x/crypto/scrypt/scrypt.go
new file mode 100644
index 000000000..c971a99fa
--- /dev/null
+++ b/vendor/golang.org/x/crypto/scrypt/scrypt.go
@@ -0,0 +1,212 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package scrypt implements the scrypt key derivation function as defined in
+// Colin Percival's paper "Stronger Key Derivation via Sequential Memory-Hard
+// Functions" (https://www.tarsnap.com/scrypt/scrypt.pdf).
+package scrypt // import "golang.org/x/crypto/scrypt"
+
+import (
+ "crypto/sha256"
+ "encoding/binary"
+ "errors"
+ "math/bits"
+
+ "golang.org/x/crypto/pbkdf2"
+)
+
+const maxInt = int(^uint(0) >> 1)
+
+// blockCopy copies n numbers from src into dst.
+func blockCopy(dst, src []uint32, n int) {
+ copy(dst, src[:n])
+}
+
+// blockXOR XORs numbers from dst with n numbers from src.
+func blockXOR(dst, src []uint32, n int) {
+ for i, v := range src[:n] {
+ dst[i] ^= v
+ }
+}
+
+// salsaXOR applies Salsa20/8 to the XOR of 16 numbers from tmp and in,
+// and puts the result into both tmp and out.
+func salsaXOR(tmp *[16]uint32, in, out []uint32) {
+ w0 := tmp[0] ^ in[0]
+ w1 := tmp[1] ^ in[1]
+ w2 := tmp[2] ^ in[2]
+ w3 := tmp[3] ^ in[3]
+ w4 := tmp[4] ^ in[4]
+ w5 := tmp[5] ^ in[5]
+ w6 := tmp[6] ^ in[6]
+ w7 := tmp[7] ^ in[7]
+ w8 := tmp[8] ^ in[8]
+ w9 := tmp[9] ^ in[9]
+ w10 := tmp[10] ^ in[10]
+ w11 := tmp[11] ^ in[11]
+ w12 := tmp[12] ^ in[12]
+ w13 := tmp[13] ^ in[13]
+ w14 := tmp[14] ^ in[14]
+ w15 := tmp[15] ^ in[15]
+
+ x0, x1, x2, x3, x4, x5, x6, x7, x8 := w0, w1, w2, w3, w4, w5, w6, w7, w8
+ x9, x10, x11, x12, x13, x14, x15 := w9, w10, w11, w12, w13, w14, w15
+
+ for i := 0; i < 8; i += 2 {
+ x4 ^= bits.RotateLeft32(x0+x12, 7)
+ x8 ^= bits.RotateLeft32(x4+x0, 9)
+ x12 ^= bits.RotateLeft32(x8+x4, 13)
+ x0 ^= bits.RotateLeft32(x12+x8, 18)
+
+ x9 ^= bits.RotateLeft32(x5+x1, 7)
+ x13 ^= bits.RotateLeft32(x9+x5, 9)
+ x1 ^= bits.RotateLeft32(x13+x9, 13)
+ x5 ^= bits.RotateLeft32(x1+x13, 18)
+
+ x14 ^= bits.RotateLeft32(x10+x6, 7)
+ x2 ^= bits.RotateLeft32(x14+x10, 9)
+ x6 ^= bits.RotateLeft32(x2+x14, 13)
+ x10 ^= bits.RotateLeft32(x6+x2, 18)
+
+ x3 ^= bits.RotateLeft32(x15+x11, 7)
+ x7 ^= bits.RotateLeft32(x3+x15, 9)
+ x11 ^= bits.RotateLeft32(x7+x3, 13)
+ x15 ^= bits.RotateLeft32(x11+x7, 18)
+
+ x1 ^= bits.RotateLeft32(x0+x3, 7)
+ x2 ^= bits.RotateLeft32(x1+x0, 9)
+ x3 ^= bits.RotateLeft32(x2+x1, 13)
+ x0 ^= bits.RotateLeft32(x3+x2, 18)
+
+ x6 ^= bits.RotateLeft32(x5+x4, 7)
+ x7 ^= bits.RotateLeft32(x6+x5, 9)
+ x4 ^= bits.RotateLeft32(x7+x6, 13)
+ x5 ^= bits.RotateLeft32(x4+x7, 18)
+
+ x11 ^= bits.RotateLeft32(x10+x9, 7)
+ x8 ^= bits.RotateLeft32(x11+x10, 9)
+ x9 ^= bits.RotateLeft32(x8+x11, 13)
+ x10 ^= bits.RotateLeft32(x9+x8, 18)
+
+ x12 ^= bits.RotateLeft32(x15+x14, 7)
+ x13 ^= bits.RotateLeft32(x12+x15, 9)
+ x14 ^= bits.RotateLeft32(x13+x12, 13)
+ x15 ^= bits.RotateLeft32(x14+x13, 18)
+ }
+ x0 += w0
+ x1 += w1
+ x2 += w2
+ x3 += w3
+ x4 += w4
+ x5 += w5
+ x6 += w6
+ x7 += w7
+ x8 += w8
+ x9 += w9
+ x10 += w10
+ x11 += w11
+ x12 += w12
+ x13 += w13
+ x14 += w14
+ x15 += w15
+
+ out[0], tmp[0] = x0, x0
+ out[1], tmp[1] = x1, x1
+ out[2], tmp[2] = x2, x2
+ out[3], tmp[3] = x3, x3
+ out[4], tmp[4] = x4, x4
+ out[5], tmp[5] = x5, x5
+ out[6], tmp[6] = x6, x6
+ out[7], tmp[7] = x7, x7
+ out[8], tmp[8] = x8, x8
+ out[9], tmp[9] = x9, x9
+ out[10], tmp[10] = x10, x10
+ out[11], tmp[11] = x11, x11
+ out[12], tmp[12] = x12, x12
+ out[13], tmp[13] = x13, x13
+ out[14], tmp[14] = x14, x14
+ out[15], tmp[15] = x15, x15
+}
+
+func blockMix(tmp *[16]uint32, in, out []uint32, r int) {
+ blockCopy(tmp[:], in[(2*r-1)*16:], 16)
+ for i := 0; i < 2*r; i += 2 {
+ salsaXOR(tmp, in[i*16:], out[i*8:])
+ salsaXOR(tmp, in[i*16+16:], out[i*8+r*16:])
+ }
+}
+
+func integer(b []uint32, r int) uint64 {
+ j := (2*r - 1) * 16
+ return uint64(b[j]) | uint64(b[j+1])<<32
+}
+
+func smix(b []byte, r, N int, v, xy []uint32) {
+ var tmp [16]uint32
+ R := 32 * r
+ x := xy
+ y := xy[R:]
+
+ j := 0
+ for i := 0; i < R; i++ {
+ x[i] = binary.LittleEndian.Uint32(b[j:])
+ j += 4
+ }
+ for i := 0; i < N; i += 2 {
+ blockCopy(v[i*R:], x, R)
+ blockMix(&tmp, x, y, r)
+
+ blockCopy(v[(i+1)*R:], y, R)
+ blockMix(&tmp, y, x, r)
+ }
+ for i := 0; i < N; i += 2 {
+ j := int(integer(x, r) & uint64(N-1))
+ blockXOR(x, v[j*R:], R)
+ blockMix(&tmp, x, y, r)
+
+ j = int(integer(y, r) & uint64(N-1))
+ blockXOR(y, v[j*R:], R)
+ blockMix(&tmp, y, x, r)
+ }
+ j = 0
+ for _, v := range x[:R] {
+ binary.LittleEndian.PutUint32(b[j:], v)
+ j += 4
+ }
+}
+
+// Key derives a key from the password, salt, and cost parameters, returning
+// a byte slice of length keyLen that can be used as cryptographic key.
+//
+// N is a CPU/memory cost parameter, which must be a power of two greater than 1.
+// r and p must satisfy r * p < 2³⁰. If the parameters do not satisfy the
+// limits, the function returns a nil byte slice and an error.
+//
+// For example, you can get a derived key for e.g. AES-256 (which needs a
+// 32-byte key) by doing:
+//
+// dk, err := scrypt.Key([]byte("some password"), salt, 32768, 8, 1, 32)
+//
+// The recommended parameters for interactive logins as of 2017 are N=32768, r=8
+// and p=1. The parameters N, r, and p should be increased as memory latency and
+// CPU parallelism increases; consider setting N to the highest power of 2 you
+// can derive within 100 milliseconds. Remember to get a good random salt.
+func Key(password, salt []byte, N, r, p, keyLen int) ([]byte, error) {
+ if N <= 1 || N&(N-1) != 0 {
+ return nil, errors.New("scrypt: N must be > 1 and a power of 2")
+ }
+ if uint64(r)*uint64(p) >= 1<<30 || r > maxInt/128/p || r > maxInt/256 || N > maxInt/128/r {
+ return nil, errors.New("scrypt: parameters are too large")
+ }
+
+ xy := make([]uint32, 64*r)
+ v := make([]uint32, 32*N*r)
+ b := pbkdf2.Key(password, salt, 1, p*128*r, sha256.New)
+
+ for i := 0; i < p; i++ {
+ smix(b[i*128*r:], r, N, v, xy)
+ }
+
+ return pbkdf2.Key(password, b, 1, keyLen, sha256.New), nil
+}
diff --git a/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go b/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go
new file mode 100644
index 000000000..150f887e7
--- /dev/null
+++ b/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go
@@ -0,0 +1,78 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package lazyregexp is a thin wrapper over regexp, allowing the use of global
+// regexp variables without forcing them to be compiled at init.
+package lazyregexp
+
+import (
+ "os"
+ "regexp"
+ "strings"
+ "sync"
+)
+
+// Regexp is a wrapper around [regexp.Regexp], where the underlying regexp will be
+// compiled the first time it is needed.
+type Regexp struct {
+ str string
+ once sync.Once
+ rx *regexp.Regexp
+}
+
+func (r *Regexp) re() *regexp.Regexp {
+ r.once.Do(r.build)
+ return r.rx
+}
+
+func (r *Regexp) build() {
+ r.rx = regexp.MustCompile(r.str)
+ r.str = ""
+}
+
+func (r *Regexp) FindSubmatch(s []byte) [][]byte {
+ return r.re().FindSubmatch(s)
+}
+
+func (r *Regexp) FindStringSubmatch(s string) []string {
+ return r.re().FindStringSubmatch(s)
+}
+
+func (r *Regexp) FindStringSubmatchIndex(s string) []int {
+ return r.re().FindStringSubmatchIndex(s)
+}
+
+func (r *Regexp) ReplaceAllString(src, repl string) string {
+ return r.re().ReplaceAllString(src, repl)
+}
+
+func (r *Regexp) FindString(s string) string {
+ return r.re().FindString(s)
+}
+
+func (r *Regexp) FindAllString(s string, n int) []string {
+ return r.re().FindAllString(s, n)
+}
+
+func (r *Regexp) MatchString(s string) bool {
+ return r.re().MatchString(s)
+}
+
+func (r *Regexp) SubexpNames() []string {
+ return r.re().SubexpNames()
+}
+
+var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test")
+
+// New creates a new lazy regexp, delaying the compiling work until it is first
+// needed. If the code is being run as part of tests, the regexp compiling will
+// happen immediately.
+func New(str string) *Regexp {
+ lr := &Regexp{str: str}
+ if inTest {
+ // In tests, always compile the regexps early.
+ lr.re()
+ }
+ return lr
+}
diff --git a/vendor/golang.org/x/mod/module/module.go b/vendor/golang.org/x/mod/module/module.go
new file mode 100644
index 000000000..2a364b229
--- /dev/null
+++ b/vendor/golang.org/x/mod/module/module.go
@@ -0,0 +1,841 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package module defines the module.Version type along with support code.
+//
+// The [module.Version] type is a simple Path, Version pair:
+//
+// type Version struct {
+// Path string
+// Version string
+// }
+//
+// There are no restrictions imposed directly by use of this structure,
+// but additional checking functions, most notably [Check], verify that
+// a particular path, version pair is valid.
+//
+// # Escaped Paths
+//
+// Module paths appear as substrings of file system paths
+// (in the download cache) and of web server URLs in the proxy protocol.
+// In general we cannot rely on file systems to be case-sensitive,
+// nor can we rely on web servers, since they read from file systems.
+// That is, we cannot rely on the file system to keep rsc.io/QUOTE
+// and rsc.io/quote separate. Windows and macOS don't.
+// Instead, we must never require two different casings of a file path.
+// Because we want the download cache to match the proxy protocol,
+// and because we want the proxy protocol to be possible to serve
+// from a tree of static files (which might be stored on a case-insensitive
+// file system), the proxy protocol must never require two different casings
+// of a URL path either.
+//
+// One possibility would be to make the escaped form be the lowercase
+// hexadecimal encoding of the actual path bytes. This would avoid ever
+// needing different casings of a file path, but it would be fairly illegible
+// to most programmers when those paths appeared in the file system
+// (including in file paths in compiler errors and stack traces)
+// in web server logs, and so on. Instead, we want a safe escaped form that
+// leaves most paths unaltered.
+//
+// The safe escaped form is to replace every uppercase letter
+// with an exclamation mark followed by the letter's lowercase equivalent.
+//
+// For example,
+//
+// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go.
+// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy
+// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus.
+//
+// Import paths that avoid upper-case letters are left unchanged.
+// Note that because import paths are ASCII-only and avoid various
+// problematic punctuation (like : < and >), the escaped form is also ASCII-only
+// and avoids the same problematic punctuation.
+//
+// Import paths have never allowed exclamation marks, so there is no
+// need to define how to escape a literal !.
+//
+// # Unicode Restrictions
+//
+// Today, paths are disallowed from using Unicode.
+//
+// Although paths are currently disallowed from using Unicode,
+// we would like at some point to allow Unicode letters as well, to assume that
+// file systems and URLs are Unicode-safe (storing UTF-8), and apply
+// the !-for-uppercase convention for escaping them in the file system.
+// But there are at least two subtle considerations.
+//
+// First, note that not all case-fold equivalent distinct runes
+// form an upper/lower pair.
+// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin)
+// are three distinct runes that case-fold to each other.
+// When we do add Unicode letters, we must not assume that upper/lower
+// are the only case-equivalent pairs.
+// Perhaps the Kelvin symbol would be disallowed entirely, for example.
+// Or perhaps it would escape as "!!k", or perhaps as "(212A)".
+//
+// Second, it would be nice to allow Unicode marks as well as letters,
+// but marks include combining marks, and then we must deal not
+// only with case folding but also normalization: both U+00E9 ('é')
+// and U+0065 U+0301 ('e' followed by combining acute accent)
+// look the same on the page and are treated by some file systems
+// as the same path. If we do allow Unicode marks in paths, there
+// must be some kind of normalization to allow only one canonical
+// encoding of any character used in an import path.
+package module
+
+// IMPORTANT NOTE
+//
+// This file essentially defines the set of valid import paths for the go command.
+// There are many subtle considerations, including Unicode ambiguity,
+// security, network, and file system representations.
+//
+// This file also defines the set of valid module path and version combinations,
+// another topic with many subtle considerations.
+//
+// Changes to the semantics in this file require approval from rsc.
+
+import (
+ "errors"
+ "fmt"
+ "path"
+ "sort"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/mod/semver"
+)
+
+// A Version (for clients, a module.Version) is defined by a module path and version pair.
+// These are stored in their plain (unescaped) form.
+type Version struct {
+ // Path is a module path, like "golang.org/x/text" or "rsc.io/quote/v2".
+ Path string
+
+ // Version is usually a semantic version in canonical form.
+ // There are three exceptions to this general rule.
+ // First, the top-level target of a build has no specific version
+ // and uses Version = "".
+ // Second, during MVS calculations the version "none" is used
+ // to represent the decision to take no version of a given module.
+ // Third, filesystem paths found in "replace" directives are
+ // represented by a path with an empty version.
+ Version string `json:",omitempty"`
+}
+
+// String returns a representation of the Version suitable for logging
+// (Path@Version, or just Path if Version is empty).
+func (m Version) String() string {
+ if m.Version == "" {
+ return m.Path
+ }
+ return m.Path + "@" + m.Version
+}
+
+// A ModuleError indicates an error specific to a module.
+type ModuleError struct {
+ Path string
+ Version string
+ Err error
+}
+
+// VersionError returns a [ModuleError] derived from a [Version] and error,
+// or err itself if it is already such an error.
+func VersionError(v Version, err error) error {
+ var mErr *ModuleError
+ if errors.As(err, &mErr) && mErr.Path == v.Path && mErr.Version == v.Version {
+ return err
+ }
+ return &ModuleError{
+ Path: v.Path,
+ Version: v.Version,
+ Err: err,
+ }
+}
+
+func (e *ModuleError) Error() string {
+ if v, ok := e.Err.(*InvalidVersionError); ok {
+ return fmt.Sprintf("%s@%s: invalid %s: %v", e.Path, v.Version, v.noun(), v.Err)
+ }
+ if e.Version != "" {
+ return fmt.Sprintf("%s@%s: %v", e.Path, e.Version, e.Err)
+ }
+ return fmt.Sprintf("module %s: %v", e.Path, e.Err)
+}
+
+func (e *ModuleError) Unwrap() error { return e.Err }
+
+// An InvalidVersionError indicates an error specific to a version, with the
+// module path unknown or specified externally.
+//
+// A [ModuleError] may wrap an InvalidVersionError, but an InvalidVersionError
+// must not wrap a ModuleError.
+type InvalidVersionError struct {
+ Version string
+ Pseudo bool
+ Err error
+}
+
+// noun returns either "version" or "pseudo-version", depending on whether
+// e.Version is a pseudo-version.
+func (e *InvalidVersionError) noun() string {
+ if e.Pseudo {
+ return "pseudo-version"
+ }
+ return "version"
+}
+
+func (e *InvalidVersionError) Error() string {
+ return fmt.Sprintf("%s %q invalid: %s", e.noun(), e.Version, e.Err)
+}
+
+func (e *InvalidVersionError) Unwrap() error { return e.Err }
+
+// An InvalidPathError indicates a module, import, or file path doesn't
+// satisfy all naming constraints. See [CheckPath], [CheckImportPath],
+// and [CheckFilePath] for specific restrictions.
+type InvalidPathError struct {
+ Kind string // "module", "import", or "file"
+ Path string
+ Err error
+}
+
+func (e *InvalidPathError) Error() string {
+ return fmt.Sprintf("malformed %s path %q: %v", e.Kind, e.Path, e.Err)
+}
+
+func (e *InvalidPathError) Unwrap() error { return e.Err }
+
+// Check checks that a given module path, version pair is valid.
+// In addition to the path being a valid module path
+// and the version being a valid semantic version,
+// the two must correspond.
+// For example, the path "yaml/v2" only corresponds to
+// semantic versions beginning with "v2.".
+func Check(path, version string) error {
+ if err := CheckPath(path); err != nil {
+ return err
+ }
+ if !semver.IsValid(version) {
+ return &ModuleError{
+ Path: path,
+ Err: &InvalidVersionError{Version: version, Err: errors.New("not a semantic version")},
+ }
+ }
+ _, pathMajor, _ := SplitPathVersion(path)
+ if err := CheckPathMajor(version, pathMajor); err != nil {
+ return &ModuleError{Path: path, Err: err}
+ }
+ return nil
+}
+
+// firstPathOK reports whether r can appear in the first element of a module path.
+// The first element of the path must be an LDH domain name, at least for now.
+// To avoid case ambiguity, the domain name must be entirely lower case.
+func firstPathOK(r rune) bool {
+ return r == '-' || r == '.' ||
+ '0' <= r && r <= '9' ||
+ 'a' <= r && r <= 'z'
+}
+
+// modPathOK reports whether r can appear in a module path element.
+// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~.
+//
+// This matches what "go get" has historically recognized in import paths,
+// and avoids confusing sequences like '%20' or '+' that would change meaning
+// if used in a URL.
+//
+// TODO(rsc): We would like to allow Unicode letters, but that requires additional
+// care in the safe encoding (see "escaped paths" above).
+func modPathOK(r rune) bool {
+ if r < utf8.RuneSelf {
+ return r == '-' || r == '.' || r == '_' || r == '~' ||
+ '0' <= r && r <= '9' ||
+ 'A' <= r && r <= 'Z' ||
+ 'a' <= r && r <= 'z'
+ }
+ return false
+}
+
+// importPathOK reports whether r can appear in a package import path element.
+//
+// Import paths are intermediate between module paths and file paths: we allow
+// disallow characters that would be confusing or ambiguous as arguments to
+// 'go get' (such as '@' and ' ' ), but allow certain characters that are
+// otherwise-unambiguous on the command line and historically used for some
+// binary names (such as '++' as a suffix for compiler binaries and wrappers).
+func importPathOK(r rune) bool {
+ return modPathOK(r) || r == '+'
+}
+
+// fileNameOK reports whether r can appear in a file name.
+// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters.
+// If we expand the set of allowed characters here, we have to
+// work harder at detecting potential case-folding and normalization collisions.
+// See note about "escaped paths" above.
+func fileNameOK(r rune) bool {
+ if r < utf8.RuneSelf {
+ // Entire set of ASCII punctuation, from which we remove characters:
+ // ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~
+ // We disallow some shell special characters: " ' * < > ? ` |
+ // (Note that some of those are disallowed by the Windows file system as well.)
+ // We also disallow path separators / : and \ (fileNameOK is only called on path element characters).
+ // We allow spaces (U+0020) in file names.
+ const allowed = "!#$%&()+,-.=@[]^_{}~ "
+ if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' {
+ return true
+ }
+ return strings.ContainsRune(allowed, r)
+ }
+ // It may be OK to add more ASCII punctuation here, but only carefully.
+ // For example Windows disallows < > \, and macOS disallows :, so we must not allow those.
+ return unicode.IsLetter(r)
+}
+
+// CheckPath checks that a module path is valid.
+// A valid module path is a valid import path, as checked by [CheckImportPath],
+// with three additional constraints.
+// First, the leading path element (up to the first slash, if any),
+// by convention a domain name, must contain only lower-case ASCII letters,
+// ASCII digits, dots (U+002E), and dashes (U+002D);
+// it must contain at least one dot and cannot start with a dash.
+// Second, for a final path element of the form /vN, where N looks numeric
+// (ASCII digits and dots) must not begin with a leading zero, must not be /v1,
+// and must not contain any dots. For paths beginning with "gopkg.in/",
+// this second requirement is replaced by a requirement that the path
+// follow the gopkg.in server's conventions.
+// Third, no path element may begin with a dot.
+func CheckPath(path string) (err error) {
+ defer func() {
+ if err != nil {
+ err = &InvalidPathError{Kind: "module", Path: path, Err: err}
+ }
+ }()
+
+ if err := checkPath(path, modulePath); err != nil {
+ return err
+ }
+ i := strings.Index(path, "/")
+ if i < 0 {
+ i = len(path)
+ }
+ if i == 0 {
+ return fmt.Errorf("leading slash")
+ }
+ if !strings.Contains(path[:i], ".") {
+ return fmt.Errorf("missing dot in first path element")
+ }
+ if path[0] == '-' {
+ return fmt.Errorf("leading dash in first path element")
+ }
+ for _, r := range path[:i] {
+ if !firstPathOK(r) {
+ return fmt.Errorf("invalid char %q in first path element", r)
+ }
+ }
+ if _, _, ok := SplitPathVersion(path); !ok {
+ return fmt.Errorf("invalid version")
+ }
+ return nil
+}
+
+// CheckImportPath checks that an import path is valid.
+//
+// A valid import path consists of one or more valid path elements
+// separated by slashes (U+002F). (It must not begin with nor end in a slash.)
+//
+// A valid path element is a non-empty string made up of
+// ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~.
+// It must not end with a dot (U+002E), nor contain two dots in a row.
+//
+// The element prefix up to the first dot must not be a reserved file name
+// on Windows, regardless of case (CON, com1, NuL, and so on). The element
+// must not have a suffix of a tilde followed by one or more ASCII digits
+// (to exclude paths elements that look like Windows short-names).
+//
+// CheckImportPath may be less restrictive in the future, but see the
+// top-level package documentation for additional information about
+// subtleties of Unicode.
+func CheckImportPath(path string) error {
+ if err := checkPath(path, importPath); err != nil {
+ return &InvalidPathError{Kind: "import", Path: path, Err: err}
+ }
+ return nil
+}
+
+// pathKind indicates what kind of path we're checking. Module paths,
+// import paths, and file paths have different restrictions.
+type pathKind int
+
+const (
+ modulePath pathKind = iota
+ importPath
+ filePath
+)
+
+// checkPath checks that a general path is valid. kind indicates what
+// specific constraints should be applied.
+//
+// checkPath returns an error describing why the path is not valid.
+// Because these checks apply to module, import, and file paths,
+// and because other checks may be applied, the caller is expected to wrap
+// this error with [InvalidPathError].
+func checkPath(path string, kind pathKind) error {
+ if !utf8.ValidString(path) {
+ return fmt.Errorf("invalid UTF-8")
+ }
+ if path == "" {
+ return fmt.Errorf("empty string")
+ }
+ if path[0] == '-' && kind != filePath {
+ return fmt.Errorf("leading dash")
+ }
+ if strings.Contains(path, "//") {
+ return fmt.Errorf("double slash")
+ }
+ if path[len(path)-1] == '/' {
+ return fmt.Errorf("trailing slash")
+ }
+ elemStart := 0
+ for i, r := range path {
+ if r == '/' {
+ if err := checkElem(path[elemStart:i], kind); err != nil {
+ return err
+ }
+ elemStart = i + 1
+ }
+ }
+ if err := checkElem(path[elemStart:], kind); err != nil {
+ return err
+ }
+ return nil
+}
+
+// checkElem checks whether an individual path element is valid.
+func checkElem(elem string, kind pathKind) error {
+ if elem == "" {
+ return fmt.Errorf("empty path element")
+ }
+ if strings.Count(elem, ".") == len(elem) {
+ return fmt.Errorf("invalid path element %q", elem)
+ }
+ if elem[0] == '.' && kind == modulePath {
+ return fmt.Errorf("leading dot in path element")
+ }
+ if elem[len(elem)-1] == '.' {
+ return fmt.Errorf("trailing dot in path element")
+ }
+ for _, r := range elem {
+ ok := false
+ switch kind {
+ case modulePath:
+ ok = modPathOK(r)
+ case importPath:
+ ok = importPathOK(r)
+ case filePath:
+ ok = fileNameOK(r)
+ default:
+ panic(fmt.Sprintf("internal error: invalid kind %v", kind))
+ }
+ if !ok {
+ return fmt.Errorf("invalid char %q", r)
+ }
+ }
+
+ // Windows disallows a bunch of path elements, sadly.
+ // See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
+ short := elem
+ if i := strings.Index(short, "."); i >= 0 {
+ short = short[:i]
+ }
+ for _, bad := range badWindowsNames {
+ if strings.EqualFold(bad, short) {
+ return fmt.Errorf("%q disallowed as path element component on Windows", short)
+ }
+ }
+
+ if kind == filePath {
+ // don't check for Windows short-names in file names. They're
+ // only an issue for import paths.
+ return nil
+ }
+
+ // Reject path components that look like Windows short-names.
+ // Those usually end in a tilde followed by one or more ASCII digits.
+ if tilde := strings.LastIndexByte(short, '~'); tilde >= 0 && tilde < len(short)-1 {
+ suffix := short[tilde+1:]
+ suffixIsDigits := true
+ for _, r := range suffix {
+ if r < '0' || r > '9' {
+ suffixIsDigits = false
+ break
+ }
+ }
+ if suffixIsDigits {
+ return fmt.Errorf("trailing tilde and digits in path element")
+ }
+ }
+
+ return nil
+}
+
+// CheckFilePath checks that a slash-separated file path is valid.
+// The definition of a valid file path is the same as the definition
+// of a valid import path except that the set of allowed characters is larger:
+// all Unicode letters, ASCII digits, the ASCII space character (U+0020),
+// and the ASCII punctuation characters
+// “!#$%&()+,-.=@[]^_{}~”.
+// (The excluded punctuation characters, " * < > ? ` ' | / \ and :,
+// have special meanings in certain shells or operating systems.)
+//
+// CheckFilePath may be less restrictive in the future, but see the
+// top-level package documentation for additional information about
+// subtleties of Unicode.
+func CheckFilePath(path string) error {
+ if err := checkPath(path, filePath); err != nil {
+ return &InvalidPathError{Kind: "file", Path: path, Err: err}
+ }
+ return nil
+}
+
+// badWindowsNames are the reserved file path elements on Windows.
+// See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file
+var badWindowsNames = []string{
+ "CON",
+ "PRN",
+ "AUX",
+ "NUL",
+ "COM1",
+ "COM2",
+ "COM3",
+ "COM4",
+ "COM5",
+ "COM6",
+ "COM7",
+ "COM8",
+ "COM9",
+ "LPT1",
+ "LPT2",
+ "LPT3",
+ "LPT4",
+ "LPT5",
+ "LPT6",
+ "LPT7",
+ "LPT8",
+ "LPT9",
+}
+
+// SplitPathVersion returns prefix and major version such that prefix+pathMajor == path
+// and version is either empty or "/vN" for N >= 2.
+// As a special case, gopkg.in paths are recognized directly;
+// they require ".vN" instead of "/vN", and for all N, not just N >= 2.
+// SplitPathVersion returns with ok = false when presented with
+// a path whose last path element does not satisfy the constraints
+// applied by [CheckPath], such as "example.com/pkg/v1" or "example.com/pkg/v1.2".
+func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) {
+ if strings.HasPrefix(path, "gopkg.in/") {
+ return splitGopkgIn(path)
+ }
+
+ i := len(path)
+ dot := false
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') {
+ if path[i-1] == '.' {
+ dot = true
+ }
+ i--
+ }
+ if i <= 1 || i == len(path) || path[i-1] != 'v' || path[i-2] != '/' {
+ return path, "", true
+ }
+ prefix, pathMajor = path[:i-2], path[i-2:]
+ if dot || len(pathMajor) <= 2 || pathMajor[2] == '0' || pathMajor == "/v1" {
+ return path, "", false
+ }
+ return prefix, pathMajor, true
+}
+
+// splitGopkgIn is like SplitPathVersion but only for gopkg.in paths.
+func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) {
+ if !strings.HasPrefix(path, "gopkg.in/") {
+ return path, "", false
+ }
+ i := len(path)
+ if strings.HasSuffix(path, "-unstable") {
+ i -= len("-unstable")
+ }
+ for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9') {
+ i--
+ }
+ if i <= 1 || path[i-1] != 'v' || path[i-2] != '.' {
+ // All gopkg.in paths must end in vN for some N.
+ return path, "", false
+ }
+ prefix, pathMajor = path[:i-2], path[i-2:]
+ if len(pathMajor) <= 2 || pathMajor[2] == '0' && pathMajor != ".v0" {
+ return path, "", false
+ }
+ return prefix, pathMajor, true
+}
+
+// MatchPathMajor reports whether the semantic version v
+// matches the path major version pathMajor.
+//
+// MatchPathMajor returns true if and only if [CheckPathMajor] returns nil.
+func MatchPathMajor(v, pathMajor string) bool {
+ return CheckPathMajor(v, pathMajor) == nil
+}
+
+// CheckPathMajor returns a non-nil error if the semantic version v
+// does not match the path major version pathMajor.
+func CheckPathMajor(v, pathMajor string) error {
+ // TODO(jayconrod): return errors or panic for invalid inputs. This function
+ // (and others) was covered by integration tests for cmd/go, and surrounding
+ // code protected against invalid inputs like non-canonical versions.
+ if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
+ pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
+ }
+ if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" {
+ // Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1.
+ // For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405.
+ return nil
+ }
+ m := semver.Major(v)
+ if pathMajor == "" {
+ if m == "v0" || m == "v1" || semver.Build(v) == "+incompatible" {
+ return nil
+ }
+ pathMajor = "v0 or v1"
+ } else if pathMajor[0] == '/' || pathMajor[0] == '.' {
+ if m == pathMajor[1:] {
+ return nil
+ }
+ pathMajor = pathMajor[1:]
+ }
+ return &InvalidVersionError{
+ Version: v,
+ Err: fmt.Errorf("should be %s, not %s", pathMajor, semver.Major(v)),
+ }
+}
+
+// PathMajorPrefix returns the major-version tag prefix implied by pathMajor.
+// An empty PathMajorPrefix allows either v0 or v1.
+//
+// Note that [MatchPathMajor] may accept some versions that do not actually begin
+// with this prefix: namely, it accepts a 'v0.0.0-' prefix for a '.v1'
+// pathMajor, even though that pathMajor implies 'v1' tagging.
+func PathMajorPrefix(pathMajor string) string {
+ if pathMajor == "" {
+ return ""
+ }
+ if pathMajor[0] != '/' && pathMajor[0] != '.' {
+ panic("pathMajor suffix " + pathMajor + " passed to PathMajorPrefix lacks separator")
+ }
+ if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
+ pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
+ }
+ m := pathMajor[1:]
+ if m != semver.Major(m) {
+ panic("pathMajor suffix " + pathMajor + "passed to PathMajorPrefix is not a valid major version")
+ }
+ return m
+}
+
+// CanonicalVersion returns the canonical form of the version string v.
+// It is the same as [semver.Canonical] except that it preserves the special build suffix "+incompatible".
+func CanonicalVersion(v string) string {
+ cv := semver.Canonical(v)
+ if semver.Build(v) == "+incompatible" {
+ cv += "+incompatible"
+ }
+ return cv
+}
+
+// Sort sorts the list by Path, breaking ties by comparing [Version] fields.
+// The Version fields are interpreted as semantic versions (using [semver.Compare])
+// optionally followed by a tie-breaking suffix introduced by a slash character,
+// like in "v0.0.1/go.mod".
+func Sort(list []Version) {
+ sort.Slice(list, func(i, j int) bool {
+ mi := list[i]
+ mj := list[j]
+ if mi.Path != mj.Path {
+ return mi.Path < mj.Path
+ }
+ // To help go.sum formatting, allow version/file.
+ // Compare semver prefix by semver rules,
+ // file by string order.
+ vi := mi.Version
+ vj := mj.Version
+ var fi, fj string
+ if k := strings.Index(vi, "/"); k >= 0 {
+ vi, fi = vi[:k], vi[k:]
+ }
+ if k := strings.Index(vj, "/"); k >= 0 {
+ vj, fj = vj[:k], vj[k:]
+ }
+ if vi != vj {
+ return semver.Compare(vi, vj) < 0
+ }
+ return fi < fj
+ })
+}
+
+// EscapePath returns the escaped form of the given module path.
+// It fails if the module path is invalid.
+func EscapePath(path string) (escaped string, err error) {
+ if err := CheckPath(path); err != nil {
+ return "", err
+ }
+
+ return escapeString(path)
+}
+
+// EscapeVersion returns the escaped form of the given module version.
+// Versions are allowed to be in non-semver form but must be valid file names
+// and not contain exclamation marks.
+func EscapeVersion(v string) (escaped string, err error) {
+ if err := checkElem(v, filePath); err != nil || strings.Contains(v, "!") {
+ return "", &InvalidVersionError{
+ Version: v,
+ Err: fmt.Errorf("disallowed version string"),
+ }
+ }
+ return escapeString(v)
+}
+
+func escapeString(s string) (escaped string, err error) {
+ haveUpper := false
+ for _, r := range s {
+ if r == '!' || r >= utf8.RuneSelf {
+ // This should be disallowed by CheckPath, but diagnose anyway.
+ // The correctness of the escaping loop below depends on it.
+ return "", fmt.Errorf("internal error: inconsistency in EscapePath")
+ }
+ if 'A' <= r && r <= 'Z' {
+ haveUpper = true
+ }
+ }
+
+ if !haveUpper {
+ return s, nil
+ }
+
+ var buf []byte
+ for _, r := range s {
+ if 'A' <= r && r <= 'Z' {
+ buf = append(buf, '!', byte(r+'a'-'A'))
+ } else {
+ buf = append(buf, byte(r))
+ }
+ }
+ return string(buf), nil
+}
+
+// UnescapePath returns the module path for the given escaped path.
+// It fails if the escaped path is invalid or describes an invalid path.
+func UnescapePath(escaped string) (path string, err error) {
+ path, ok := unescapeString(escaped)
+ if !ok {
+ return "", fmt.Errorf("invalid escaped module path %q", escaped)
+ }
+ if err := CheckPath(path); err != nil {
+ return "", fmt.Errorf("invalid escaped module path %q: %v", escaped, err)
+ }
+ return path, nil
+}
+
+// UnescapeVersion returns the version string for the given escaped version.
+// It fails if the escaped form is invalid or describes an invalid version.
+// Versions are allowed to be in non-semver form but must be valid file names
+// and not contain exclamation marks.
+func UnescapeVersion(escaped string) (v string, err error) {
+ v, ok := unescapeString(escaped)
+ if !ok {
+ return "", fmt.Errorf("invalid escaped version %q", escaped)
+ }
+ if err := checkElem(v, filePath); err != nil {
+ return "", fmt.Errorf("invalid escaped version %q: %v", v, err)
+ }
+ return v, nil
+}
+
+func unescapeString(escaped string) (string, bool) {
+ var buf []byte
+
+ bang := false
+ for _, r := range escaped {
+ if r >= utf8.RuneSelf {
+ return "", false
+ }
+ if bang {
+ bang = false
+ if r < 'a' || 'z' < r {
+ return "", false
+ }
+ buf = append(buf, byte(r+'A'-'a'))
+ continue
+ }
+ if r == '!' {
+ bang = true
+ continue
+ }
+ if 'A' <= r && r <= 'Z' {
+ return "", false
+ }
+ buf = append(buf, byte(r))
+ }
+ if bang {
+ return "", false
+ }
+ return string(buf), true
+}
+
+// MatchPrefixPatterns reports whether any path prefix of target matches one of
+// the glob patterns (as defined by [path.Match]) in the comma-separated globs
+// list. This implements the algorithm used when matching a module path to the
+// GOPRIVATE environment variable, as described by 'go help module-private'.
+//
+// It ignores any empty or malformed patterns in the list.
+// Trailing slashes on patterns are ignored.
+func MatchPrefixPatterns(globs, target string) bool {
+ for globs != "" {
+ // Extract next non-empty glob in comma-separated list.
+ var glob string
+ if i := strings.Index(globs, ","); i >= 0 {
+ glob, globs = globs[:i], globs[i+1:]
+ } else {
+ glob, globs = globs, ""
+ }
+ glob = strings.TrimSuffix(glob, "/")
+ if glob == "" {
+ continue
+ }
+
+ // A glob with N+1 path elements (N slashes) needs to be matched
+ // against the first N+1 path elements of target,
+ // which end just before the N+1'th slash.
+ n := strings.Count(glob, "/")
+ prefix := target
+ // Walk target, counting slashes, truncating at the N+1'th slash.
+ for i := 0; i < len(target); i++ {
+ if target[i] == '/' {
+ if n == 0 {
+ prefix = target[:i]
+ break
+ }
+ n--
+ }
+ }
+ if n > 0 {
+ // Not enough prefix elements.
+ continue
+ }
+ matched, _ := path.Match(glob, prefix)
+ if matched {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/mod/module/pseudo.go b/vendor/golang.org/x/mod/module/pseudo.go
new file mode 100644
index 000000000..9cf19d325
--- /dev/null
+++ b/vendor/golang.org/x/mod/module/pseudo.go
@@ -0,0 +1,250 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Pseudo-versions
+//
+// Code authors are expected to tag the revisions they want users to use,
+// including prereleases. However, not all authors tag versions at all,
+// and not all commits a user might want to try will have tags.
+// A pseudo-version is a version with a special form that allows us to
+// address an untagged commit and order that version with respect to
+// other versions we might encounter.
+//
+// A pseudo-version takes one of the general forms:
+//
+// (1) vX.0.0-yyyymmddhhmmss-abcdef123456
+// (2) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456
+// (3) vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible
+// (4) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456
+// (5) vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible
+//
+// If there is no recently tagged version with the right major version vX,
+// then form (1) is used, creating a space of pseudo-versions at the bottom
+// of the vX version range, less than any tagged version, including the unlikely v0.0.0.
+//
+// If the most recent tagged version before the target commit is vX.Y.Z or vX.Y.Z+incompatible,
+// then the pseudo-version uses form (2) or (3), making it a prerelease for the next
+// possible semantic version after vX.Y.Z. The leading 0 segment in the prerelease string
+// ensures that the pseudo-version compares less than possible future explicit prereleases
+// like vX.Y.(Z+1)-rc1 or vX.Y.(Z+1)-1.
+//
+// If the most recent tagged version before the target commit is vX.Y.Z-pre or vX.Y.Z-pre+incompatible,
+// then the pseudo-version uses form (4) or (5), making it a slightly later prerelease.
+
+package module
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+ "time"
+
+ "golang.org/x/mod/internal/lazyregexp"
+ "golang.org/x/mod/semver"
+)
+
+var pseudoVersionRE = lazyregexp.New(`^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)\d{14}-[A-Za-z0-9]+(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$`)
+
+const PseudoVersionTimestampFormat = "20060102150405"
+
+// PseudoVersion returns a pseudo-version for the given major version ("v1")
+// preexisting older tagged version ("" or "v1.2.3" or "v1.2.3-pre"), revision time,
+// and revision identifier (usually a 12-byte commit hash prefix).
+func PseudoVersion(major, older string, t time.Time, rev string) string {
+ if major == "" {
+ major = "v0"
+ }
+ segment := fmt.Sprintf("%s-%s", t.UTC().Format(PseudoVersionTimestampFormat), rev)
+ build := semver.Build(older)
+ older = semver.Canonical(older)
+ if older == "" {
+ return major + ".0.0-" + segment // form (1)
+ }
+ if semver.Prerelease(older) != "" {
+ return older + ".0." + segment + build // form (4), (5)
+ }
+
+ // Form (2), (3).
+ // Extract patch from vMAJOR.MINOR.PATCH
+ i := strings.LastIndex(older, ".") + 1
+ v, patch := older[:i], older[i:]
+
+ // Reassemble.
+ return v + incDecimal(patch) + "-0." + segment + build
+}
+
+// ZeroPseudoVersion returns a pseudo-version with a zero timestamp and
+// revision, which may be used as a placeholder.
+func ZeroPseudoVersion(major string) string {
+ return PseudoVersion(major, "", time.Time{}, "000000000000")
+}
+
+// incDecimal returns the decimal string incremented by 1.
+func incDecimal(decimal string) string {
+ // Scan right to left turning 9s to 0s until you find a digit to increment.
+ digits := []byte(decimal)
+ i := len(digits) - 1
+ for ; i >= 0 && digits[i] == '9'; i-- {
+ digits[i] = '0'
+ }
+ if i >= 0 {
+ digits[i]++
+ } else {
+ // digits is all zeros
+ digits[0] = '1'
+ digits = append(digits, '0')
+ }
+ return string(digits)
+}
+
+// decDecimal returns the decimal string decremented by 1, or the empty string
+// if the decimal is all zeroes.
+func decDecimal(decimal string) string {
+ // Scan right to left turning 0s to 9s until you find a digit to decrement.
+ digits := []byte(decimal)
+ i := len(digits) - 1
+ for ; i >= 0 && digits[i] == '0'; i-- {
+ digits[i] = '9'
+ }
+ if i < 0 {
+ // decimal is all zeros
+ return ""
+ }
+ if i == 0 && digits[i] == '1' && len(digits) > 1 {
+ digits = digits[1:]
+ } else {
+ digits[i]--
+ }
+ return string(digits)
+}
+
+// IsPseudoVersion reports whether v is a pseudo-version.
+func IsPseudoVersion(v string) bool {
+ return strings.Count(v, "-") >= 2 && semver.IsValid(v) && pseudoVersionRE.MatchString(v)
+}
+
+// IsZeroPseudoVersion returns whether v is a pseudo-version with a zero base,
+// timestamp, and revision, as returned by [ZeroPseudoVersion].
+func IsZeroPseudoVersion(v string) bool {
+ return v == ZeroPseudoVersion(semver.Major(v))
+}
+
+// PseudoVersionTime returns the time stamp of the pseudo-version v.
+// It returns an error if v is not a pseudo-version or if the time stamp
+// embedded in the pseudo-version is not a valid time.
+func PseudoVersionTime(v string) (time.Time, error) {
+ _, timestamp, _, _, err := parsePseudoVersion(v)
+ if err != nil {
+ return time.Time{}, err
+ }
+ t, err := time.Parse("20060102150405", timestamp)
+ if err != nil {
+ return time.Time{}, &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: fmt.Errorf("malformed time %q", timestamp),
+ }
+ }
+ return t, nil
+}
+
+// PseudoVersionRev returns the revision identifier of the pseudo-version v.
+// It returns an error if v is not a pseudo-version.
+func PseudoVersionRev(v string) (rev string, err error) {
+ _, _, rev, _, err = parsePseudoVersion(v)
+ return
+}
+
+// PseudoVersionBase returns the canonical parent version, if any, upon which
+// the pseudo-version v is based.
+//
+// If v has no parent version (that is, if it is "vX.0.0-[…]"),
+// PseudoVersionBase returns the empty string and a nil error.
+func PseudoVersionBase(v string) (string, error) {
+ base, _, _, build, err := parsePseudoVersion(v)
+ if err != nil {
+ return "", err
+ }
+
+ switch pre := semver.Prerelease(base); pre {
+ case "":
+ // vX.0.0-yyyymmddhhmmss-abcdef123456 → ""
+ if build != "" {
+ // Pseudo-versions of the form vX.0.0-yyyymmddhhmmss-abcdef123456+incompatible
+ // are nonsensical: the "vX.0.0-" prefix implies that there is no parent tag,
+ // but the "+incompatible" suffix implies that the major version of
+ // the parent tag is not compatible with the module's import path.
+ //
+ // There are a few such entries in the index generated by proxy.golang.org,
+ // but we believe those entries were generated by the proxy itself.
+ return "", &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: fmt.Errorf("lacks base version, but has build metadata %q", build),
+ }
+ }
+ return "", nil
+
+ case "-0":
+ // vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456 → vX.Y.Z
+ // vX.Y.(Z+1)-0.yyyymmddhhmmss-abcdef123456+incompatible → vX.Y.Z+incompatible
+ base = strings.TrimSuffix(base, pre)
+ i := strings.LastIndexByte(base, '.')
+ if i < 0 {
+ panic("base from parsePseudoVersion missing patch number: " + base)
+ }
+ patch := decDecimal(base[i+1:])
+ if patch == "" {
+ // vX.0.0-0 is invalid, but has been observed in the wild in the index
+ // generated by requests to proxy.golang.org.
+ //
+ // NOTE(bcmills): I cannot find a historical bug that accounts for
+ // pseudo-versions of this form, nor have I seen such versions in any
+ // actual go.mod files. If we find actual examples of this form and a
+ // reasonable theory of how they came into existence, it seems fine to
+ // treat them as equivalent to vX.0.0 (especially since the invalid
+ // pseudo-versions have lower precedence than the real ones). For now, we
+ // reject them.
+ return "", &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: fmt.Errorf("version before %s would have negative patch number", base),
+ }
+ }
+ return base[:i+1] + patch + build, nil
+
+ default:
+ // vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456 → vX.Y.Z-pre
+ // vX.Y.Z-pre.0.yyyymmddhhmmss-abcdef123456+incompatible → vX.Y.Z-pre+incompatible
+ if !strings.HasSuffix(base, ".0") {
+ panic(`base from parsePseudoVersion missing ".0" before date: ` + base)
+ }
+ return strings.TrimSuffix(base, ".0") + build, nil
+ }
+}
+
+var errPseudoSyntax = errors.New("syntax error")
+
+func parsePseudoVersion(v string) (base, timestamp, rev, build string, err error) {
+ if !IsPseudoVersion(v) {
+ return "", "", "", "", &InvalidVersionError{
+ Version: v,
+ Pseudo: true,
+ Err: errPseudoSyntax,
+ }
+ }
+ build = semver.Build(v)
+ v = strings.TrimSuffix(v, build)
+ j := strings.LastIndex(v, "-")
+ v, rev = v[:j], v[j+1:]
+ i := strings.LastIndex(v, "-")
+ if j := strings.LastIndex(v, "."); j > i {
+ base = v[:j] // "vX.Y.Z-pre.0" or "vX.Y.(Z+1)-0"
+ timestamp = v[j+1:]
+ } else {
+ base = v[:i] // "vX.0.0"
+ timestamp = v[i+1:]
+ }
+ return base, timestamp, rev, build, nil
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
new file mode 100644
index 000000000..2c4c4e232
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
@@ -0,0 +1,634 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+// This file defines utilities for working with source positions.
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "sort"
+)
+
+// PathEnclosingInterval returns the node that encloses the source
+// interval [start, end), and all its ancestors up to the AST root.
+//
+// The definition of "enclosing" used by this function considers
+// additional whitespace abutting a node to be enclosed by it.
+// In this example:
+//
+// z := x + y // add them
+// <-A->
+// <----B----->
+//
+// the ast.BinaryExpr(+) node is considered to enclose interval B
+// even though its [Pos()..End()) is actually only interval A.
+// This behaviour makes user interfaces more tolerant of imperfect
+// input.
+//
+// This function treats tokens as nodes, though they are not included
+// in the result. e.g. PathEnclosingInterval("+") returns the
+// enclosing ast.BinaryExpr("x + y").
+//
+// If start==end, the 1-char interval following start is used instead.
+//
+// The 'exact' result is true if the interval contains only path[0]
+// and perhaps some adjacent whitespace. It is false if the interval
+// overlaps multiple children of path[0], or if it contains only
+// interior whitespace of path[0].
+// In this example:
+//
+// z := x + y // add them
+// <--C--> <---E-->
+// ^
+// D
+//
+// intervals C, D and E are inexact. C is contained by the
+// z-assignment statement, because it spans three of its children (:=,
+// x, +). So too is the 1-char interval D, because it contains only
+// interior whitespace of the assignment. E is considered interior
+// whitespace of the BlockStmt containing the assignment.
+//
+// The resulting path is never empty; it always contains at least the
+// 'root' *ast.File. Ideally PathEnclosingInterval would reject
+// intervals that lie wholly or partially outside the range of the
+// file, but unfortunately ast.File records only the token.Pos of
+// the 'package' keyword, but not of the start of the file itself.
+func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
+ // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
+
+ // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
+ var visit func(node ast.Node) bool
+ visit = func(node ast.Node) bool {
+ path = append(path, node)
+
+ nodePos := node.Pos()
+ nodeEnd := node.End()
+
+ // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
+
+ // Intersect [start, end) with interval of node.
+ if start < nodePos {
+ start = nodePos
+ }
+ if end > nodeEnd {
+ end = nodeEnd
+ }
+
+ // Find sole child that contains [start, end).
+ children := childrenOf(node)
+ l := len(children)
+ for i, child := range children {
+ // [childPos, childEnd) is unaugmented interval of child.
+ childPos := child.Pos()
+ childEnd := child.End()
+
+ // [augPos, augEnd) is whitespace-augmented interval of child.
+ augPos := childPos
+ augEnd := childEnd
+ if i > 0 {
+ augPos = children[i-1].End() // start of preceding whitespace
+ }
+ if i < l-1 {
+ nextChildPos := children[i+1].Pos()
+ // Does [start, end) lie between child and next child?
+ if start >= augEnd && end <= nextChildPos {
+ return false // inexact match
+ }
+ augEnd = nextChildPos // end of following whitespace
+ }
+
+ // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
+ // i, augPos, augEnd, start, end) // debugging
+
+ // Does augmented child strictly contain [start, end)?
+ if augPos <= start && end <= augEnd {
+ _, isToken := child.(tokenNode)
+ return isToken || visit(child)
+ }
+
+ // Does [start, end) overlap multiple children?
+ // i.e. left-augmented child contains start
+ // but LR-augmented child does not contain end.
+ if start < childEnd && end > augEnd {
+ break
+ }
+ }
+
+ // No single child contained [start, end),
+ // so node is the result. Is it exact?
+
+ // (It's tempting to put this condition before the
+ // child loop, but it gives the wrong result in the
+ // case where a node (e.g. ExprStmt) and its sole
+ // child have equal intervals.)
+ if start == nodePos && end == nodeEnd {
+ return true // exact match
+ }
+
+ return false // inexact: overlaps multiple children
+ }
+
+ // Ensure [start,end) is nondecreasing.
+ if start > end {
+ start, end = end, start
+ }
+
+ if start < root.End() && end > root.Pos() {
+ if start == end {
+ end = start + 1 // empty interval => interval of size 1
+ }
+ exact = visit(root)
+
+ // Reverse the path:
+ for i, l := 0, len(path); i < l/2; i++ {
+ path[i], path[l-1-i] = path[l-1-i], path[i]
+ }
+ } else {
+ // Selection lies within whitespace preceding the
+ // first (or following the last) declaration in the file.
+ // The result nonetheless always includes the ast.File.
+ path = append(path, root)
+ }
+
+ return
+}
+
+// tokenNode is a dummy implementation of ast.Node for a single token.
+// They are used transiently by PathEnclosingInterval but never escape
+// this package.
+type tokenNode struct {
+ pos token.Pos
+ end token.Pos
+}
+
+func (n tokenNode) Pos() token.Pos {
+ return n.pos
+}
+
+func (n tokenNode) End() token.Pos {
+ return n.end
+}
+
+func tok(pos token.Pos, len int) ast.Node {
+ return tokenNode{pos, pos + token.Pos(len)}
+}
+
+// childrenOf returns the direct non-nil children of ast.Node n.
+// It may include fake ast.Node implementations for bare tokens.
+// it is not safe to call (e.g.) ast.Walk on such nodes.
+func childrenOf(n ast.Node) []ast.Node {
+ var children []ast.Node
+
+ // First add nodes for all true subtrees.
+ ast.Inspect(n, func(node ast.Node) bool {
+ if node == n { // push n
+ return true // recur
+ }
+ if node != nil { // push child
+ children = append(children, node)
+ }
+ return false // no recursion
+ })
+
+ // Then add fake Nodes for bare tokens.
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Elt.End(), len("]")))
+
+ case *ast.AssignStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.BasicLit:
+ children = append(children,
+ tok(n.ValuePos, len(n.Value)))
+
+ case *ast.BinaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.BlockStmt:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("}")))
+
+ case *ast.BranchStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.CallExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ if n.Ellipsis != 0 {
+ children = append(children, tok(n.Ellipsis, len("...")))
+ }
+
+ case *ast.CaseClause:
+ if n.List == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.ChanType:
+ switch n.Dir {
+ case ast.RECV:
+ children = append(children, tok(n.Begin, len("<-chan")))
+ case ast.SEND:
+ children = append(children, tok(n.Begin, len("chan<-")))
+ case ast.RECV | ast.SEND:
+ children = append(children, tok(n.Begin, len("chan")))
+ }
+
+ case *ast.CommClause:
+ if n.Comm == nil {
+ children = append(children,
+ tok(n.Case, len("default")))
+ } else {
+ children = append(children,
+ tok(n.Case, len("case")))
+ }
+ children = append(children, tok(n.Colon, len(":")))
+
+ case *ast.Comment:
+ // nop
+
+ case *ast.CommentGroup:
+ // nop
+
+ case *ast.CompositeLit:
+ children = append(children,
+ tok(n.Lbrace, len("{")),
+ tok(n.Rbrace, len("{")))
+
+ case *ast.DeclStmt:
+ // nop
+
+ case *ast.DeferStmt:
+ children = append(children,
+ tok(n.Defer, len("defer")))
+
+ case *ast.Ellipsis:
+ children = append(children,
+ tok(n.Ellipsis, len("...")))
+
+ case *ast.EmptyStmt:
+ // nop
+
+ case *ast.ExprStmt:
+ // nop
+
+ case *ast.Field:
+ // TODO(adonovan): Field.{Doc,Comment,Tag}?
+
+ case *ast.FieldList:
+ children = append(children,
+ tok(n.Opening, len("(")), // or len("[")
+ tok(n.Closing, len(")"))) // or len("]")
+
+ case *ast.File:
+ // TODO test: Doc
+ children = append(children,
+ tok(n.Package, len("package")))
+
+ case *ast.ForStmt:
+ children = append(children,
+ tok(n.For, len("for")))
+
+ case *ast.FuncDecl:
+ // TODO(adonovan): FuncDecl.Comment?
+
+ // Uniquely, FuncDecl breaks the invariant that
+ // preorder traversal yields tokens in lexical order:
+ // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
+ //
+ // As a workaround, we inline the case for FuncType
+ // here and order things correctly.
+ //
+ children = nil // discard ast.Walk(FuncDecl) info subtrees
+ children = append(children, tok(n.Type.Func, len("func")))
+ if n.Recv != nil {
+ children = append(children, n.Recv)
+ }
+ children = append(children, n.Name)
+ if tparams := n.Type.TypeParams; tparams != nil {
+ children = append(children, tparams)
+ }
+ if n.Type.Params != nil {
+ children = append(children, n.Type.Params)
+ }
+ if n.Type.Results != nil {
+ children = append(children, n.Type.Results)
+ }
+ if n.Body != nil {
+ children = append(children, n.Body)
+ }
+
+ case *ast.FuncLit:
+ // nop
+
+ case *ast.FuncType:
+ if n.Func != 0 {
+ children = append(children,
+ tok(n.Func, len("func")))
+ }
+
+ case *ast.GenDecl:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+ if n.Lparen != 0 {
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+ }
+
+ case *ast.GoStmt:
+ children = append(children,
+ tok(n.Go, len("go")))
+
+ case *ast.Ident:
+ children = append(children,
+ tok(n.NamePos, len(n.Name)))
+
+ case *ast.IfStmt:
+ children = append(children,
+ tok(n.If, len("if")))
+
+ case *ast.ImportSpec:
+ // TODO(adonovan): ImportSpec.{Doc,EndPos}?
+
+ case *ast.IncDecStmt:
+ children = append(children,
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.IndexExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.IndexListExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.InterfaceType:
+ children = append(children,
+ tok(n.Interface, len("interface")))
+
+ case *ast.KeyValueExpr:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.LabeledStmt:
+ children = append(children,
+ tok(n.Colon, len(":")))
+
+ case *ast.MapType:
+ children = append(children,
+ tok(n.Map, len("map")))
+
+ case *ast.ParenExpr:
+ children = append(children,
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.RangeStmt:
+ children = append(children,
+ tok(n.For, len("for")),
+ tok(n.TokPos, len(n.Tok.String())))
+
+ case *ast.ReturnStmt:
+ children = append(children,
+ tok(n.Return, len("return")))
+
+ case *ast.SelectStmt:
+ children = append(children,
+ tok(n.Select, len("select")))
+
+ case *ast.SelectorExpr:
+ // nop
+
+ case *ast.SendStmt:
+ children = append(children,
+ tok(n.Arrow, len("<-")))
+
+ case *ast.SliceExpr:
+ children = append(children,
+ tok(n.Lbrack, len("[")),
+ tok(n.Rbrack, len("]")))
+
+ case *ast.StarExpr:
+ children = append(children, tok(n.Star, len("*")))
+
+ case *ast.StructType:
+ children = append(children, tok(n.Struct, len("struct")))
+
+ case *ast.SwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.TypeAssertExpr:
+ children = append(children,
+ tok(n.Lparen-1, len(".")),
+ tok(n.Lparen, len("(")),
+ tok(n.Rparen, len(")")))
+
+ case *ast.TypeSpec:
+ // TODO(adonovan): TypeSpec.{Doc,Comment}?
+
+ case *ast.TypeSwitchStmt:
+ children = append(children, tok(n.Switch, len("switch")))
+
+ case *ast.UnaryExpr:
+ children = append(children, tok(n.OpPos, len(n.Op.String())))
+
+ case *ast.ValueSpec:
+ // TODO(adonovan): ValueSpec.{Doc,Comment}?
+
+ case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
+ // nop
+ }
+
+ // TODO(adonovan): opt: merge the logic of ast.Inspect() into
+ // the switch above so we can make interleaved callbacks for
+ // both Nodes and Tokens in the right order and avoid the need
+ // to sort.
+ sort.Sort(byPos(children))
+
+ return children
+}
+
+type byPos []ast.Node
+
+func (sl byPos) Len() int {
+ return len(sl)
+}
+func (sl byPos) Less(i, j int) bool {
+ return sl[i].Pos() < sl[j].Pos()
+}
+func (sl byPos) Swap(i, j int) {
+ sl[i], sl[j] = sl[j], sl[i]
+}
+
+// NodeDescription returns a description of the concrete type of n suitable
+// for a user interface.
+//
+// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
+// StarExpr) we could be much more specific given the path to the AST
+// root. Perhaps we should do that.
+func NodeDescription(n ast.Node) string {
+ switch n := n.(type) {
+ case *ast.ArrayType:
+ return "array type"
+ case *ast.AssignStmt:
+ return "assignment"
+ case *ast.BadDecl:
+ return "bad declaration"
+ case *ast.BadExpr:
+ return "bad expression"
+ case *ast.BadStmt:
+ return "bad statement"
+ case *ast.BasicLit:
+ return "basic literal"
+ case *ast.BinaryExpr:
+ return fmt.Sprintf("binary %s operation", n.Op)
+ case *ast.BlockStmt:
+ return "block"
+ case *ast.BranchStmt:
+ switch n.Tok {
+ case token.BREAK:
+ return "break statement"
+ case token.CONTINUE:
+ return "continue statement"
+ case token.GOTO:
+ return "goto statement"
+ case token.FALLTHROUGH:
+ return "fall-through statement"
+ }
+ case *ast.CallExpr:
+ if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
+ return "function call (or conversion)"
+ }
+ return "function call"
+ case *ast.CaseClause:
+ return "case clause"
+ case *ast.ChanType:
+ return "channel type"
+ case *ast.CommClause:
+ return "communication clause"
+ case *ast.Comment:
+ return "comment"
+ case *ast.CommentGroup:
+ return "comment group"
+ case *ast.CompositeLit:
+ return "composite literal"
+ case *ast.DeclStmt:
+ return NodeDescription(n.Decl) + " statement"
+ case *ast.DeferStmt:
+ return "defer statement"
+ case *ast.Ellipsis:
+ return "ellipsis"
+ case *ast.EmptyStmt:
+ return "empty statement"
+ case *ast.ExprStmt:
+ return "expression statement"
+ case *ast.Field:
+ // Can be any of these:
+ // struct {x, y int} -- struct field(s)
+ // struct {T} -- anon struct field
+ // interface {I} -- interface embedding
+ // interface {f()} -- interface method
+ // func (A) func(B) C -- receiver, param(s), result(s)
+ return "field/method/parameter"
+ case *ast.FieldList:
+ return "field/method/parameter list"
+ case *ast.File:
+ return "source file"
+ case *ast.ForStmt:
+ return "for loop"
+ case *ast.FuncDecl:
+ return "function declaration"
+ case *ast.FuncLit:
+ return "function literal"
+ case *ast.FuncType:
+ return "function type"
+ case *ast.GenDecl:
+ switch n.Tok {
+ case token.IMPORT:
+ return "import declaration"
+ case token.CONST:
+ return "constant declaration"
+ case token.TYPE:
+ return "type declaration"
+ case token.VAR:
+ return "variable declaration"
+ }
+ case *ast.GoStmt:
+ return "go statement"
+ case *ast.Ident:
+ return "identifier"
+ case *ast.IfStmt:
+ return "if statement"
+ case *ast.ImportSpec:
+ return "import specification"
+ case *ast.IncDecStmt:
+ if n.Tok == token.INC {
+ return "increment statement"
+ }
+ return "decrement statement"
+ case *ast.IndexExpr:
+ return "index expression"
+ case *ast.IndexListExpr:
+ return "index list expression"
+ case *ast.InterfaceType:
+ return "interface type"
+ case *ast.KeyValueExpr:
+ return "key/value association"
+ case *ast.LabeledStmt:
+ return "statement label"
+ case *ast.MapType:
+ return "map type"
+ case *ast.Package:
+ return "package"
+ case *ast.ParenExpr:
+ return "parenthesized " + NodeDescription(n.X)
+ case *ast.RangeStmt:
+ return "range loop"
+ case *ast.ReturnStmt:
+ return "return statement"
+ case *ast.SelectStmt:
+ return "select statement"
+ case *ast.SelectorExpr:
+ return "selector"
+ case *ast.SendStmt:
+ return "channel send"
+ case *ast.SliceExpr:
+ return "slice expression"
+ case *ast.StarExpr:
+ return "*-operation" // load/store expr or pointer type
+ case *ast.StructType:
+ return "struct type"
+ case *ast.SwitchStmt:
+ return "switch statement"
+ case *ast.TypeAssertExpr:
+ return "type assertion"
+ case *ast.TypeSpec:
+ return "type specification"
+ case *ast.TypeSwitchStmt:
+ return "type switch"
+ case *ast.UnaryExpr:
+ return fmt.Sprintf("unary %s operation", n.Op)
+ case *ast.ValueSpec:
+ return "value specification"
+
+ }
+ panic(fmt.Sprintf("unexpected node type: %T", n))
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
new file mode 100644
index 000000000..18d1adb05
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -0,0 +1,485 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package astutil contains common utilities for working with the Go AST.
+package astutil // import "golang.org/x/tools/go/ast/astutil"
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strconv"
+ "strings"
+)
+
+// AddImport adds the import path to the file f, if absent.
+func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
+ return AddNamedImport(fset, f, "", path)
+}
+
+// AddNamedImport adds the import with the given name and path to the file f, if absent.
+// If name is not empty, it is used to rename the import.
+//
+// For example, calling
+//
+// AddNamedImport(fset, f, "pathpkg", "path")
+//
+// adds
+//
+// import pathpkg "path"
+func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
+ if imports(f, name, path) {
+ return false
+ }
+
+ newImport := &ast.ImportSpec{
+ Path: &ast.BasicLit{
+ Kind: token.STRING,
+ Value: strconv.Quote(path),
+ },
+ }
+ if name != "" {
+ newImport.Name = &ast.Ident{Name: name}
+ }
+
+ // Find an import decl to add to.
+ // The goal is to find an existing import
+ // whose import path has the longest shared
+ // prefix with path.
+ var (
+ bestMatch = -1 // length of longest shared prefix
+ lastImport = -1 // index in f.Decls of the file's final import decl
+ impDecl *ast.GenDecl // import decl containing the best match
+ impIndex = -1 // spec index in impDecl containing the best match
+
+ isThirdPartyPath = isThirdParty(path)
+ )
+ for i, decl := range f.Decls {
+ gen, ok := decl.(*ast.GenDecl)
+ if ok && gen.Tok == token.IMPORT {
+ lastImport = i
+ // Do not add to import "C", to avoid disrupting the
+ // association with its doc comment, breaking cgo.
+ if declImports(gen, "C") {
+ continue
+ }
+
+ // Match an empty import decl if that's all that is available.
+ if len(gen.Specs) == 0 && bestMatch == -1 {
+ impDecl = gen
+ }
+
+ // Compute longest shared prefix with imports in this group and find best
+ // matched import spec.
+ // 1. Always prefer import spec with longest shared prefix.
+ // 2. While match length is 0,
+ // - for stdlib package: prefer first import spec.
+ // - for third party package: prefer first third party import spec.
+ // We cannot use last import spec as best match for third party package
+ // because grouped imports are usually placed last by goimports -local
+ // flag.
+ // See issue #19190.
+ seenAnyThirdParty := false
+ for j, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ p := importPath(impspec)
+ n := matchLen(p, path)
+ if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
+ bestMatch = n
+ impDecl = gen
+ impIndex = j
+ }
+ seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
+ }
+ }
+ }
+
+ // If no import decl found, add one after the last import.
+ if impDecl == nil {
+ impDecl = &ast.GenDecl{
+ Tok: token.IMPORT,
+ }
+ if lastImport >= 0 {
+ impDecl.TokPos = f.Decls[lastImport].End()
+ } else {
+ // There are no existing imports.
+ // Our new import, preceded by a blank line, goes after the package declaration
+ // and after the comment, if any, that starts on the same line as the
+ // package declaration.
+ impDecl.TokPos = f.Package
+
+ file := fset.File(f.Package)
+ pkgLine := file.Line(f.Package)
+ for _, c := range f.Comments {
+ if file.Line(c.Pos()) > pkgLine {
+ break
+ }
+ // +2 for a blank line
+ impDecl.TokPos = c.End() + 2
+ }
+ }
+ f.Decls = append(f.Decls, nil)
+ copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+ f.Decls[lastImport+1] = impDecl
+ }
+
+ // Insert new import at insertAt.
+ insertAt := 0
+ if impIndex >= 0 {
+ // insert after the found import
+ insertAt = impIndex + 1
+ }
+ impDecl.Specs = append(impDecl.Specs, nil)
+ copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+ impDecl.Specs[insertAt] = newImport
+ pos := impDecl.Pos()
+ if insertAt > 0 {
+ // If there is a comment after an existing import, preserve the comment
+ // position by adding the new import after the comment.
+ if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
+ pos = spec.Comment.End()
+ } else {
+ // Assign same position as the previous import,
+ // so that the sorter sees it as being in the same block.
+ pos = impDecl.Specs[insertAt-1].Pos()
+ }
+ }
+ if newImport.Name != nil {
+ newImport.Name.NamePos = pos
+ }
+ newImport.Path.ValuePos = pos
+ newImport.EndPos = pos
+
+ // Clean up parens. impDecl contains at least one spec.
+ if len(impDecl.Specs) == 1 {
+ // Remove unneeded parens.
+ impDecl.Lparen = token.NoPos
+ } else if !impDecl.Lparen.IsValid() {
+ // impDecl needs parens added.
+ impDecl.Lparen = impDecl.Specs[0].Pos()
+ }
+
+ f.Imports = append(f.Imports, newImport)
+
+ if len(f.Decls) <= 1 {
+ return true
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+
+ return true
+}
+
+func isThirdParty(importPath string) bool {
+ // Third party package import path usually contains "." (".com", ".org", ...)
+ // This logic is taken from golang.org/x/tools/imports package.
+ return strings.Contains(importPath, ".")
+}
+
+// DeleteImport deletes the import path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
+ return DeleteNamedImport(fset, f, "", path)
+}
+
+// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
+// If there are duplicate import declarations, all matching ones are deleted.
+func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
+ var delspecs []*ast.ImportSpec
+ var delcomments []*ast.CommentGroup
+
+ // Find the import nodes that import path, if any.
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT {
+ continue
+ }
+ for j := 0; j < len(gen.Specs); j++ {
+ spec := gen.Specs[j]
+ impspec := spec.(*ast.ImportSpec)
+ if importName(impspec) != name || importPath(impspec) != path {
+ continue
+ }
+
+ // We found an import spec that imports path.
+ // Delete it.
+ delspecs = append(delspecs, impspec)
+ deleted = true
+ copy(gen.Specs[j:], gen.Specs[j+1:])
+ gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+ // If this was the last import spec in this decl,
+ // delete the decl, too.
+ if len(gen.Specs) == 0 {
+ copy(f.Decls[i:], f.Decls[i+1:])
+ f.Decls = f.Decls[:len(f.Decls)-1]
+ i--
+ break
+ } else if len(gen.Specs) == 1 {
+ if impspec.Doc != nil {
+ delcomments = append(delcomments, impspec.Doc)
+ }
+ if impspec.Comment != nil {
+ delcomments = append(delcomments, impspec.Comment)
+ }
+ for _, cg := range f.Comments {
+ // Found comment on the same line as the import spec.
+ if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
+ delcomments = append(delcomments, cg)
+ break
+ }
+ }
+
+ spec := gen.Specs[0].(*ast.ImportSpec)
+
+ // Move the documentation right after the import decl.
+ if spec.Doc != nil {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ }
+ for _, cg := range f.Comments {
+ if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
+ for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
+ fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
+ }
+ break
+ }
+ }
+ }
+ if j > 0 {
+ lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
+ lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
+ line := fset.PositionFor(impspec.Path.ValuePos, false).Line
+
+ // We deleted an entry but now there may be
+ // a blank line-sized hole where the import was.
+ if line-lastLine > 1 || !gen.Rparen.IsValid() {
+ // There was a blank line immediately preceding the deleted import,
+ // so there's no need to close the hole. The right parenthesis is
+ // invalid after AddImport to an import statement without parenthesis.
+ // Do nothing.
+ } else if line != fset.File(gen.Rparen).LineCount() {
+ // There was no blank line. Close the hole.
+ fset.File(gen.Rparen).MergeLine(line)
+ }
+ }
+ j--
+ }
+ }
+
+ // Delete imports from f.Imports.
+ for i := 0; i < len(f.Imports); i++ {
+ imp := f.Imports[i]
+ for j, del := range delspecs {
+ if imp == del {
+ copy(f.Imports[i:], f.Imports[i+1:])
+ f.Imports = f.Imports[:len(f.Imports)-1]
+ copy(delspecs[j:], delspecs[j+1:])
+ delspecs = delspecs[:len(delspecs)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ // Delete comments from f.Comments.
+ for i := 0; i < len(f.Comments); i++ {
+ cg := f.Comments[i]
+ for j, del := range delcomments {
+ if cg == del {
+ copy(f.Comments[i:], f.Comments[i+1:])
+ f.Comments = f.Comments[:len(f.Comments)-1]
+ copy(delcomments[j:], delcomments[j+1:])
+ delcomments = delcomments[:len(delcomments)-1]
+ i--
+ break
+ }
+ }
+ }
+
+ if len(delspecs) > 0 {
+ panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
+ }
+
+ return
+}
+
+// RewriteImport rewrites any import of path oldPath to path newPath.
+func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
+ for _, imp := range f.Imports {
+ if importPath(imp) == oldPath {
+ rewrote = true
+ // record old End, because the default is to compute
+ // it using the length of imp.Path.Value.
+ imp.EndPos = imp.End()
+ imp.Path.Value = strconv.Quote(newPath)
+ }
+ }
+ return
+}
+
+// UsesImport reports whether a given import is used.
+func UsesImport(f *ast.File, path string) (used bool) {
+ spec := importSpec(f, path)
+ if spec == nil {
+ return
+ }
+
+ name := spec.Name.String()
+ switch name {
+ case "<nil>":
+ // If the package name is not explicitly specified,
+ // make an educated guess. This is not guaranteed to be correct.
+ lastSlash := strings.LastIndex(path, "/")
+ if lastSlash == -1 {
+ name = path
+ } else {
+ name = path[lastSlash+1:]
+ }
+ case "_", ".":
+ // Not sure if this import is used - err on the side of caution.
+ return true
+ }
+
+ ast.Walk(visitFn(func(n ast.Node) {
+ sel, ok := n.(*ast.SelectorExpr)
+ if ok && isTopName(sel.X, name) {
+ used = true
+ }
+ }), f)
+
+ return
+}
+
+type visitFn func(node ast.Node)
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ fn(node)
+ return fn
+}
+
+// imports reports whether f has an import with the specified name and path.
+func imports(f *ast.File, name, path string) bool {
+ for _, s := range f.Imports {
+ if importName(s) == name && importPath(s) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+ for _, s := range f.Imports {
+ if importPath(s) == path {
+ return s
+ }
+ }
+ return nil
+}
+
+// importName returns the name of s,
+// or "" if the import is not named.
+func importName(s *ast.ImportSpec) string {
+ if s.Name == nil {
+ return ""
+ }
+ return s.Name.Name
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+ t, err := strconv.Unquote(s.Path.Value)
+ if err != nil {
+ return ""
+ }
+ return t
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+// matchLen returns the length of the longest path segment prefix shared by x and y.
+func matchLen(x, y string) int {
+ n := 0
+ for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
+ if x[i] == '/' {
+ n++
+ }
+ }
+ return n
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+ id, ok := n.(*ast.Ident)
+ return ok && id.Name == name && id.Obj == nil
+}
+
+// Imports returns the file imports grouped by paragraph.
+func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
+ var groups [][]*ast.ImportSpec
+
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok || genDecl.Tok != token.IMPORT {
+ break
+ }
+
+ group := []*ast.ImportSpec{}
+
+ var lastLine int
+ for _, spec := range genDecl.Specs {
+ importSpec := spec.(*ast.ImportSpec)
+ pos := importSpec.Path.ValuePos
+ line := fset.Position(pos).Line
+ if lastLine > 0 && pos > 0 && line-lastLine > 1 {
+ groups = append(groups, group)
+ group = []*ast.ImportSpec{}
+ }
+ group = append(group, importSpec)
+ lastLine = line
+ }
+ groups = append(groups, group)
+ }
+
+ return groups
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
new file mode 100644
index 000000000..58934f766
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
@@ -0,0 +1,486 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+ "sort"
+)
+
+// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
+// before and/or after the node's children, using a Cursor describing
+// the current node and providing operations on it.
+//
+// The return value of ApplyFunc controls the syntax tree traversal.
+// See Apply for details.
+type ApplyFunc func(*Cursor) bool
+
+// Apply traverses a syntax tree recursively, starting with root,
+// and calling pre and post for each node as described below.
+// Apply returns the syntax tree, possibly modified.
+//
+// If pre is not nil, it is called for each node before the node's
+// children are traversed (pre-order). If pre returns false, no
+// children are traversed, and post is not called for that node.
+//
+// If post is not nil, and a prior call of pre didn't return false,
+// post is called for each node after its children are traversed
+// (post-order). If post returns false, traversal is terminated and
+// Apply returns immediately.
+//
+// Only fields that refer to AST nodes are considered children;
+// i.e., token.Pos, Scopes, Objects, and fields of basic types
+// (strings, etc.) are ignored.
+//
+// Children are traversed in the order in which they appear in the
+// respective node's struct definition. A package's files are
+// traversed in the filenames' alphabetical order.
+func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
+ parent := &struct{ ast.Node }{root}
+ defer func() {
+ if r := recover(); r != nil && r != abort {
+ panic(r)
+ }
+ result = parent.Node
+ }()
+ a := &application{pre: pre, post: post}
+ a.apply(parent, "Node", nil, root)
+ return
+}
+
+var abort = new(int) // singleton, to signal termination of Apply
+
+// A Cursor describes a node encountered during Apply.
+// Information about the node and its parent is available
+// from the Node, Parent, Name, and Index methods.
+//
+// If p is a variable of type and value of the current parent node
+// c.Parent(), and f is the field identifier with name c.Name(),
+// the following invariants hold:
+//
+// p.f == c.Node() if c.Index() < 0
+// p.f[c.Index()] == c.Node() if c.Index() >= 0
+//
+// The methods Replace, Delete, InsertBefore, and InsertAfter
+// can be used to change the AST without disrupting Apply.
+type Cursor struct {
+ parent ast.Node
+ name string
+ iter *iterator // valid if non-nil
+ node ast.Node
+}
+
+// Node returns the current Node.
+func (c *Cursor) Node() ast.Node { return c.node }
+
+// Parent returns the parent of the current Node.
+func (c *Cursor) Parent() ast.Node { return c.parent }
+
+// Name returns the name of the parent Node field that contains the current Node.
+// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
+// the filename for the current Node.
+func (c *Cursor) Name() string { return c.name }
+
+// Index reports the index >= 0 of the current Node in the slice of Nodes that
+// contains it, or a value < 0 if the current Node is not part of a slice.
+// The index of the current node changes if InsertBefore is called while
+// processing the current node.
+func (c *Cursor) Index() int {
+ if c.iter != nil {
+ return c.iter.index
+ }
+ return -1
+}
+
+// field returns the current node's parent field value.
+func (c *Cursor) field() reflect.Value {
+ return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
+}
+
+// Replace replaces the current Node with n.
+// The replacement node is not walked by Apply.
+func (c *Cursor) Replace(n ast.Node) {
+ if _, ok := c.node.(*ast.File); ok {
+ file, ok := n.(*ast.File)
+ if !ok {
+ panic("attempt to replace *ast.File with non-*ast.File")
+ }
+ c.parent.(*ast.Package).Files[c.name] = file
+ return
+ }
+
+ v := c.field()
+ if i := c.Index(); i >= 0 {
+ v = v.Index(i)
+ }
+ v.Set(reflect.ValueOf(n))
+}
+
+// Delete deletes the current Node from its containing slice.
+// If the current Node is not part of a slice, Delete panics.
+// As a special case, if the current node is a package file,
+// Delete removes it from the package's Files map.
+func (c *Cursor) Delete() {
+ if _, ok := c.node.(*ast.File); ok {
+ delete(c.parent.(*ast.Package).Files, c.name)
+ return
+ }
+
+ i := c.Index()
+ if i < 0 {
+ panic("Delete node not contained in slice")
+ }
+ v := c.field()
+ l := v.Len()
+ reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
+ v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
+ v.SetLen(l - 1)
+ c.iter.step--
+}
+
+// InsertAfter inserts n after the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertAfter panics.
+// Apply does not walk n.
+func (c *Cursor) InsertAfter(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertAfter node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
+ v.Index(i + 1).Set(reflect.ValueOf(n))
+ c.iter.step++
+}
+
+// InsertBefore inserts n before the current Node in its containing slice.
+// If the current Node is not part of a slice, InsertBefore panics.
+// Apply will not walk n.
+func (c *Cursor) InsertBefore(n ast.Node) {
+ i := c.Index()
+ if i < 0 {
+ panic("InsertBefore node not contained in slice")
+ }
+ v := c.field()
+ v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
+ l := v.Len()
+ reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
+ v.Index(i).Set(reflect.ValueOf(n))
+ c.iter.index++
+}
+
+// application carries all the shared data so we can pass it around cheaply.
+type application struct {
+ pre, post ApplyFunc
+ cursor Cursor
+ iter iterator
+}
+
+func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
+ // convert typed nil into untyped nil
+ if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
+ n = nil
+ }
+
+ // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
+ saved := a.cursor
+ a.cursor.parent = parent
+ a.cursor.name = name
+ a.cursor.iter = iter
+ a.cursor.node = n
+
+ if a.pre != nil && !a.pre(&a.cursor) {
+ a.cursor = saved
+ return
+ }
+
+ // walk children
+ // (the order of the cases matches the order of the corresponding node types in go/ast)
+ switch n := n.(type) {
+ case nil:
+ // nothing to do
+
+ // Comments and fields
+ case *ast.Comment:
+ // nothing to do
+
+ case *ast.CommentGroup:
+ if n != nil {
+ a.applyList(n, "List")
+ }
+
+ case *ast.Field:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.FieldList:
+ a.applyList(n, "List")
+
+ // Expressions
+ case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
+ // nothing to do
+
+ case *ast.Ellipsis:
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.FuncLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CompositeLit:
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Elts")
+
+ case *ast.ParenExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SelectorExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Sel", nil, n.Sel)
+
+ case *ast.IndexExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Index", nil, n.Index)
+
+ case *ast.IndexListExpr:
+ a.apply(n, "X", nil, n.X)
+ a.applyList(n, "Indices")
+
+ case *ast.SliceExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Low", nil, n.Low)
+ a.apply(n, "High", nil, n.High)
+ a.apply(n, "Max", nil, n.Max)
+
+ case *ast.TypeAssertExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Type", nil, n.Type)
+
+ case *ast.CallExpr:
+ a.apply(n, "Fun", nil, n.Fun)
+ a.applyList(n, "Args")
+
+ case *ast.StarExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.UnaryExpr:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.BinaryExpr:
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Y", nil, n.Y)
+
+ case *ast.KeyValueExpr:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ // Types
+ case *ast.ArrayType:
+ a.apply(n, "Len", nil, n.Len)
+ a.apply(n, "Elt", nil, n.Elt)
+
+ case *ast.StructType:
+ a.apply(n, "Fields", nil, n.Fields)
+
+ case *ast.FuncType:
+ if tparams := n.TypeParams; tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
+ a.apply(n, "Params", nil, n.Params)
+ a.apply(n, "Results", nil, n.Results)
+
+ case *ast.InterfaceType:
+ a.apply(n, "Methods", nil, n.Methods)
+
+ case *ast.MapType:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.ChanType:
+ a.apply(n, "Value", nil, n.Value)
+
+ // Statements
+ case *ast.BadStmt:
+ // nothing to do
+
+ case *ast.DeclStmt:
+ a.apply(n, "Decl", nil, n.Decl)
+
+ case *ast.EmptyStmt:
+ // nothing to do
+
+ case *ast.LabeledStmt:
+ a.apply(n, "Label", nil, n.Label)
+ a.apply(n, "Stmt", nil, n.Stmt)
+
+ case *ast.ExprStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.SendStmt:
+ a.apply(n, "Chan", nil, n.Chan)
+ a.apply(n, "Value", nil, n.Value)
+
+ case *ast.IncDecStmt:
+ a.apply(n, "X", nil, n.X)
+
+ case *ast.AssignStmt:
+ a.applyList(n, "Lhs")
+ a.applyList(n, "Rhs")
+
+ case *ast.GoStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.DeferStmt:
+ a.apply(n, "Call", nil, n.Call)
+
+ case *ast.ReturnStmt:
+ a.applyList(n, "Results")
+
+ case *ast.BranchStmt:
+ a.apply(n, "Label", nil, n.Label)
+
+ case *ast.BlockStmt:
+ a.applyList(n, "List")
+
+ case *ast.IfStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Body", nil, n.Body)
+ a.apply(n, "Else", nil, n.Else)
+
+ case *ast.CaseClause:
+ a.applyList(n, "List")
+ a.applyList(n, "Body")
+
+ case *ast.SwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Tag", nil, n.Tag)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.TypeSwitchStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Assign", nil, n.Assign)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.CommClause:
+ a.apply(n, "Comm", nil, n.Comm)
+ a.applyList(n, "Body")
+
+ case *ast.SelectStmt:
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.ForStmt:
+ a.apply(n, "Init", nil, n.Init)
+ a.apply(n, "Cond", nil, n.Cond)
+ a.apply(n, "Post", nil, n.Post)
+ a.apply(n, "Body", nil, n.Body)
+
+ case *ast.RangeStmt:
+ a.apply(n, "Key", nil, n.Key)
+ a.apply(n, "Value", nil, n.Value)
+ a.apply(n, "X", nil, n.X)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Declarations
+ case *ast.ImportSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Path", nil, n.Path)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.ValueSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Names")
+ a.apply(n, "Type", nil, n.Type)
+ a.applyList(n, "Values")
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.TypeSpec:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ if tparams := n.TypeParams; tparams != nil {
+ a.apply(n, "TypeParams", nil, tparams)
+ }
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Comment", nil, n.Comment)
+
+ case *ast.BadDecl:
+ // nothing to do
+
+ case *ast.GenDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.applyList(n, "Specs")
+
+ case *ast.FuncDecl:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Recv", nil, n.Recv)
+ a.apply(n, "Name", nil, n.Name)
+ a.apply(n, "Type", nil, n.Type)
+ a.apply(n, "Body", nil, n.Body)
+
+ // Files and packages
+ case *ast.File:
+ a.apply(n, "Doc", nil, n.Doc)
+ a.apply(n, "Name", nil, n.Name)
+ a.applyList(n, "Decls")
+ // Don't walk n.Comments; they have either been walked already if
+ // they are Doc comments, or they can be easily walked explicitly.
+
+ case *ast.Package:
+ // collect and sort names for reproducible behavior
+ var names []string
+ for name := range n.Files {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ a.apply(n, name, nil, n.Files[name])
+ }
+
+ default:
+ panic(fmt.Sprintf("Apply: unexpected node type %T", n))
+ }
+
+ if a.post != nil && !a.post(&a.cursor) {
+ panic(abort)
+ }
+
+ a.cursor = saved
+}
+
+// An iterator controls iteration over a slice of nodes.
+type iterator struct {
+ index, step int
+}
+
+func (a *application) applyList(parent ast.Node, name string) {
+ // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
+ saved := a.iter
+ a.iter.index = 0
+ for {
+ // must reload parent.name each time, since cursor modifications might change it
+ v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
+ if a.iter.index >= v.Len() {
+ break
+ }
+
+ // element x may be nil in a bad AST - be cautious
+ var x ast.Node
+ if e := v.Index(a.iter.index); e.IsValid() {
+ x = e.Interface().(ast.Node)
+ }
+
+ a.iter.step = 1
+ a.apply(parent, name, &a.iter, x)
+ a.iter.index += a.iter.step
+ }
+ a.iter = saved
+}
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go
new file mode 100644
index 000000000..919d5305a
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go
@@ -0,0 +1,18 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package astutil
+
+import "go/ast"
+
+// Unparen returns e with any enclosing parentheses stripped.
+func Unparen(e ast.Expr) ast.Expr {
+ for {
+ p, ok := e.(*ast.ParenExpr)
+ if !ok {
+ return e
+ }
+ e = p.X
+ }
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
new file mode 100644
index 000000000..dfb8cd6c7
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go
@@ -0,0 +1,195 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package buildutil provides utilities related to the go/build
+// package in the standard library.
+//
+// All I/O is done via the build.Context file system interface, which must
+// be concurrency-safe.
+package buildutil // import "golang.org/x/tools/go/buildutil"
+
+import (
+ "go/build"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+)
+
+// AllPackages returns the package path of each Go package in any source
+// directory of the specified build context (e.g. $GOROOT or an element
+// of $GOPATH). Errors are ignored. The results are sorted.
+// All package paths are canonical, and thus may contain "/vendor/".
+//
+// The result may include import paths for directories that contain no
+// *.go files, such as "archive" (in $GOROOT/src).
+//
+// All I/O is done via the build.Context file system interface,
+// which must be concurrency-safe.
+func AllPackages(ctxt *build.Context) []string {
+ var list []string
+ ForEachPackage(ctxt, func(pkg string, _ error) {
+ list = append(list, pkg)
+ })
+ sort.Strings(list)
+ return list
+}
+
+// ForEachPackage calls the found function with the package path of
+// each Go package it finds in any source directory of the specified
+// build context (e.g. $GOROOT or an element of $GOPATH).
+// All package paths are canonical, and thus may contain "/vendor/".
+//
+// If the package directory exists but could not be read, the second
+// argument to the found function provides the error.
+//
+// All I/O is done via the build.Context file system interface,
+// which must be concurrency-safe.
+func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
+ ch := make(chan item)
+
+ var wg sync.WaitGroup
+ for _, root := range ctxt.SrcDirs() {
+ root := root
+ wg.Add(1)
+ go func() {
+ allPackages(ctxt, root, ch)
+ wg.Done()
+ }()
+ }
+ go func() {
+ wg.Wait()
+ close(ch)
+ }()
+
+ // All calls to found occur in the caller's goroutine.
+ for i := range ch {
+ found(i.importPath, i.err)
+ }
+}
+
+type item struct {
+ importPath string
+ err error // (optional)
+}
+
+// We use a process-wide counting semaphore to limit
+// the number of parallel calls to ReadDir.
+var ioLimit = make(chan bool, 20)
+
+func allPackages(ctxt *build.Context, root string, ch chan<- item) {
+ root = filepath.Clean(root) + string(os.PathSeparator)
+
+ var wg sync.WaitGroup
+
+ var walkDir func(dir string)
+ walkDir = func(dir string) {
+ // Avoid .foo, _foo, and testdata directory trees.
+ base := filepath.Base(dir)
+ if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" {
+ return
+ }
+
+ pkg := filepath.ToSlash(strings.TrimPrefix(dir, root))
+
+ // Prune search if we encounter any of these import paths.
+ switch pkg {
+ case "builtin":
+ return
+ }
+
+ ioLimit <- true
+ files, err := ReadDir(ctxt, dir)
+ <-ioLimit
+ if pkg != "" || err != nil {
+ ch <- item{pkg, err}
+ }
+ for _, fi := range files {
+ fi := fi
+ if fi.IsDir() {
+ wg.Add(1)
+ go func() {
+ walkDir(filepath.Join(dir, fi.Name()))
+ wg.Done()
+ }()
+ }
+ }
+ }
+
+ walkDir(root)
+ wg.Wait()
+}
+
+// ExpandPatterns returns the set of packages matched by patterns,
+// which may have the following forms:
+//
+// golang.org/x/tools/cmd/guru # a single package
+// golang.org/x/tools/... # all packages beneath dir
+// ... # the entire workspace.
+//
+// Order is significant: a pattern preceded by '-' removes matching
+// packages from the set. For example, these patterns match all encoding
+// packages except encoding/xml:
+//
+// encoding/... -encoding/xml
+//
+// A trailing slash in a pattern is ignored. (Path components of Go
+// package names are separated by slash, not the platform's path separator.)
+func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
+ // TODO(adonovan): support other features of 'go list':
+ // - "std"/"cmd"/"all" meta-packages
+ // - "..." not at the end of a pattern
+ // - relative patterns using "./" or "../" prefix
+
+ pkgs := make(map[string]bool)
+ doPkg := func(pkg string, neg bool) {
+ if neg {
+ delete(pkgs, pkg)
+ } else {
+ pkgs[pkg] = true
+ }
+ }
+
+ // Scan entire workspace if wildcards are present.
+ // TODO(adonovan): opt: scan only the necessary subtrees of the workspace.
+ var all []string
+ for _, arg := range patterns {
+ if strings.HasSuffix(arg, "...") {
+ all = AllPackages(ctxt)
+ break
+ }
+ }
+
+ for _, arg := range patterns {
+ if arg == "" {
+ continue
+ }
+
+ neg := arg[0] == '-'
+ if neg {
+ arg = arg[1:]
+ }
+
+ if arg == "..." {
+ // ... matches all packages
+ for _, pkg := range all {
+ doPkg(pkg, neg)
+ }
+ } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg {
+ // dir/... matches all packages beneath dir
+ for _, pkg := range all {
+ if strings.HasPrefix(pkg, dir) &&
+ (len(pkg) == len(dir) || pkg[len(dir)] == '/') {
+ doPkg(pkg, neg)
+ }
+ }
+ } else {
+ // single package
+ doPkg(strings.TrimSuffix(arg, "/"), neg)
+ }
+ }
+
+ return pkgs
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
new file mode 100644
index 000000000..763d18809
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go
@@ -0,0 +1,111 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "fmt"
+ "go/build"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "sort"
+ "strings"
+ "time"
+)
+
+// FakeContext returns a build.Context for the fake file tree specified
+// by pkgs, which maps package import paths to a mapping from file base
+// names to contents.
+//
+// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides
+// the necessary file access methods to read from memory instead of the
+// real file system.
+//
+// Unlike a real file tree, the fake one has only two levels---packages
+// and files---so ReadDir("/go/src/") returns all packages under
+// /go/src/ including, for instance, "math" and "math/big".
+// ReadDir("/go/src/math/big") would return all the files in the
+// "math/big" package.
+func FakeContext(pkgs map[string]map[string]string) *build.Context {
+ clean := func(filename string) string {
+ f := path.Clean(filepath.ToSlash(filename))
+ // Removing "/go/src" while respecting segment
+ // boundaries has this unfortunate corner case:
+ if f == "/go/src" {
+ return ""
+ }
+ return strings.TrimPrefix(f, "/go/src/")
+ }
+
+ ctxt := build.Default // copy
+ ctxt.GOROOT = "/go"
+ ctxt.GOPATH = ""
+ ctxt.Compiler = "gc"
+ ctxt.IsDir = func(dir string) bool {
+ dir = clean(dir)
+ if dir == "" {
+ return true // needed by (*build.Context).SrcDirs
+ }
+ return pkgs[dir] != nil
+ }
+ ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) {
+ dir = clean(dir)
+ var fis []os.FileInfo
+ if dir == "" {
+ // enumerate packages
+ for importPath := range pkgs {
+ fis = append(fis, fakeDirInfo(importPath))
+ }
+ } else {
+ // enumerate files of package
+ for basename := range pkgs[dir] {
+ fis = append(fis, fakeFileInfo(basename))
+ }
+ }
+ sort.Sort(byName(fis))
+ return fis, nil
+ }
+ ctxt.OpenFile = func(filename string) (io.ReadCloser, error) {
+ filename = clean(filename)
+ dir, base := path.Split(filename)
+ content, ok := pkgs[path.Clean(dir)][base]
+ if !ok {
+ return nil, fmt.Errorf("file not found: %s", filename)
+ }
+ return io.NopCloser(strings.NewReader(content)), nil
+ }
+ ctxt.IsAbsPath = func(path string) bool {
+ path = filepath.ToSlash(path)
+ // Don't rely on the default (filepath.Path) since on
+ // Windows, it reports virtual paths as non-absolute.
+ return strings.HasPrefix(path, "/")
+ }
+ return &ctxt
+}
+
+type byName []os.FileInfo
+
+func (s byName) Len() int { return len(s) }
+func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() }
+
+type fakeFileInfo string
+
+func (fi fakeFileInfo) Name() string { return string(fi) }
+func (fakeFileInfo) Sys() interface{} { return nil }
+func (fakeFileInfo) ModTime() time.Time { return time.Time{} }
+func (fakeFileInfo) IsDir() bool { return false }
+func (fakeFileInfo) Size() int64 { return 0 }
+func (fakeFileInfo) Mode() os.FileMode { return 0644 }
+
+type fakeDirInfo string
+
+func (fd fakeDirInfo) Name() string { return string(fd) }
+func (fakeDirInfo) Sys() interface{} { return nil }
+func (fakeDirInfo) ModTime() time.Time { return time.Time{} }
+func (fakeDirInfo) IsDir() bool { return true }
+func (fakeDirInfo) Size() int64 { return 0 }
+func (fakeDirInfo) Mode() os.FileMode { return 0755 }
diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go
new file mode 100644
index 000000000..7e371658d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/overlay.go
@@ -0,0 +1,101 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/build"
+ "io"
+ "path/filepath"
+ "strconv"
+ "strings"
+)
+
+// OverlayContext overlays a build.Context with additional files from
+// a map. Files in the map take precedence over other files.
+//
+// In addition to plain string comparison, two file names are
+// considered equal if their base names match and their directory
+// components point at the same directory on the file system. That is,
+// symbolic links are followed for directories, but not files.
+//
+// A common use case for OverlayContext is to allow editors to pass in
+// a set of unsaved, modified files.
+//
+// Currently, only the Context.OpenFile function will respect the
+// overlay. This may change in the future.
+func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context {
+ // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir
+
+ rc := func(data []byte) (io.ReadCloser, error) {
+ return io.NopCloser(bytes.NewBuffer(data)), nil
+ }
+
+ copy := *orig // make a copy
+ ctxt := &copy
+ ctxt.OpenFile = func(path string) (io.ReadCloser, error) {
+ // Fast path: names match exactly.
+ if content, ok := overlay[path]; ok {
+ return rc(content)
+ }
+
+ // Slow path: check for same file under a different
+ // alias, perhaps due to a symbolic link.
+ for filename, content := range overlay {
+ if sameFile(path, filename) {
+ return rc(content)
+ }
+ }
+
+ return OpenFile(orig, path)
+ }
+ return ctxt
+}
+
+// ParseOverlayArchive parses an archive containing Go files and their
+// contents. The result is intended to be used with OverlayContext.
+//
+// # Archive format
+//
+// The archive consists of a series of files. Each file consists of a
+// name, a decimal file size and the file contents, separated by
+// newlines. No newline follows after the file contents.
+func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) {
+ overlay := make(map[string][]byte)
+ r := bufio.NewReader(archive)
+ for {
+ // Read file name.
+ filename, err := r.ReadString('\n')
+ if err != nil {
+ if err == io.EOF {
+ break // OK
+ }
+ return nil, fmt.Errorf("reading archive file name: %v", err)
+ }
+ filename = filepath.Clean(strings.TrimSpace(filename))
+
+ // Read file size.
+ sz, err := r.ReadString('\n')
+ if err != nil {
+ return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err)
+ }
+ sz = strings.TrimSpace(sz)
+ size, err := strconv.ParseUint(sz, 10, 32)
+ if err != nil {
+ return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err)
+ }
+
+ // Read file content.
+ content := make([]byte, size)
+ if _, err := io.ReadFull(r, content); err != nil {
+ return nil, fmt.Errorf("reading archive file %s: %v", filename, err)
+ }
+ overlay[filename] = content
+ }
+
+ return overlay, nil
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go
new file mode 100644
index 000000000..7cf523bca
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/tags.go
@@ -0,0 +1,80 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go.
+
+import "fmt"
+
+const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " +
+ "For more information about build tags, see the description of " +
+ "build constraints in the documentation for the go/build package"
+
+// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses
+// a flag value in the same manner as go build's -tags flag and
+// populates a []string slice.
+//
+// See $GOROOT/src/go/build/doc.go for description of build tags.
+// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
+//
+// Example:
+//
+// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
+type TagsFlag []string
+
+func (v *TagsFlag) Set(s string) error {
+ var err error
+ *v, err = splitQuotedFields(s)
+ if *v == nil {
+ *v = []string{}
+ }
+ return err
+}
+
+func (v *TagsFlag) Get() interface{} { return *v }
+
+func splitQuotedFields(s string) ([]string, error) {
+ // Split fields allowing '' or "" around elements.
+ // Quotes further inside the string do not count.
+ var f []string
+ for len(s) > 0 {
+ for len(s) > 0 && isSpaceByte(s[0]) {
+ s = s[1:]
+ }
+ if len(s) == 0 {
+ break
+ }
+ // Accepted quoted string. No unescaping inside.
+ if s[0] == '"' || s[0] == '\'' {
+ quote := s[0]
+ s = s[1:]
+ i := 0
+ for i < len(s) && s[i] != quote {
+ i++
+ }
+ if i >= len(s) {
+ return nil, fmt.Errorf("unterminated %c string", quote)
+ }
+ f = append(f, s[:i])
+ s = s[i+1:]
+ continue
+ }
+ i := 0
+ for i < len(s) && !isSpaceByte(s[i]) {
+ i++
+ }
+ f = append(f, s[:i])
+ s = s[i:]
+ }
+ return f, nil
+}
+
+func (v *TagsFlag) String() string {
+ return "<tagsFlag>"
+}
+
+func isSpaceByte(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\n' || c == '\r'
+}
diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go
new file mode 100644
index 000000000..bee6390de
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/buildutil/util.go
@@ -0,0 +1,209 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package buildutil
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+)
+
+// ParseFile behaves like parser.ParseFile,
+// but uses the build context's file system interface, if any.
+//
+// If file is not absolute (as defined by IsAbsPath), the (dir, file)
+// components are joined using JoinPath; dir must be absolute.
+//
+// The displayPath function, if provided, is used to transform the
+// filename that will be attached to the ASTs.
+//
+// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
+func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
+ if !IsAbsPath(ctxt, file) {
+ file = JoinPath(ctxt, dir, file)
+ }
+ rd, err := OpenFile(ctxt, file)
+ if err != nil {
+ return nil, err
+ }
+ defer rd.Close() // ignore error
+ if displayPath != nil {
+ file = displayPath(file)
+ }
+ return parser.ParseFile(fset, file, rd, mode)
+}
+
+// ContainingPackage returns the package containing filename.
+//
+// If filename is not absolute, it is interpreted relative to working directory dir.
+// All I/O is via the build context's file system interface, if any.
+//
+// The '...Files []string' fields of the resulting build.Package are not
+// populated (build.FindOnly mode).
+func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
+ if !IsAbsPath(ctxt, filename) {
+ filename = JoinPath(ctxt, dir, filename)
+ }
+
+ // We must not assume the file tree uses
+ // "/" always,
+ // `\` always,
+ // or os.PathSeparator (which varies by platform),
+ // but to make any progress, we are forced to assume that
+ // paths will not use `\` unless the PathSeparator
+ // is also `\`, thus we can rely on filepath.ToSlash for some sanity.
+
+ dirSlash := path.Dir(filepath.ToSlash(filename)) + "/"
+
+ // We assume that no source root (GOPATH[i] or GOROOT) contains any other.
+ for _, srcdir := range ctxt.SrcDirs() {
+ srcdirSlash := filepath.ToSlash(srcdir) + "/"
+ if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok {
+ return ctxt.Import(importPath, dir, build.FindOnly)
+ }
+ }
+
+ return nil, fmt.Errorf("can't find package containing %s", filename)
+}
+
+// -- Effective methods of file system interface -------------------------
+
+// (go/build.Context defines these as methods, but does not export them.)
+
+// HasSubdir calls ctxt.HasSubdir (if not nil) or else uses
+// the local file system to answer the question.
+func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) {
+ if f := ctxt.HasSubdir; f != nil {
+ return f(root, dir)
+ }
+
+ // Try using paths we received.
+ if rel, ok = hasSubdir(root, dir); ok {
+ return
+ }
+
+ // Try expanding symlinks and comparing
+ // expanded against unexpanded and
+ // expanded against expanded.
+ rootSym, _ := filepath.EvalSymlinks(root)
+ dirSym, _ := filepath.EvalSymlinks(dir)
+
+ if rel, ok = hasSubdir(rootSym, dir); ok {
+ return
+ }
+ if rel, ok = hasSubdir(root, dirSym); ok {
+ return
+ }
+ return hasSubdir(rootSym, dirSym)
+}
+
+func hasSubdir(root, dir string) (rel string, ok bool) {
+ const sep = string(filepath.Separator)
+ root = filepath.Clean(root)
+ if !strings.HasSuffix(root, sep) {
+ root += sep
+ }
+
+ dir = filepath.Clean(dir)
+ if !strings.HasPrefix(dir, root) {
+ return "", false
+ }
+
+ return filepath.ToSlash(dir[len(root):]), true
+}
+
+// FileExists returns true if the specified file exists,
+// using the build context's file system interface.
+func FileExists(ctxt *build.Context, path string) bool {
+ if ctxt.OpenFile != nil {
+ r, err := ctxt.OpenFile(path)
+ if err != nil {
+ return false
+ }
+ r.Close() // ignore error
+ return true
+ }
+ _, err := os.Stat(path)
+ return err == nil
+}
+
+// OpenFile behaves like os.Open,
+// but uses the build context's file system interface, if any.
+func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) {
+ if ctxt.OpenFile != nil {
+ return ctxt.OpenFile(path)
+ }
+ return os.Open(path)
+}
+
+// IsAbsPath behaves like filepath.IsAbs,
+// but uses the build context's file system interface, if any.
+func IsAbsPath(ctxt *build.Context, path string) bool {
+ if ctxt.IsAbsPath != nil {
+ return ctxt.IsAbsPath(path)
+ }
+ return filepath.IsAbs(path)
+}
+
+// JoinPath behaves like filepath.Join,
+// but uses the build context's file system interface, if any.
+func JoinPath(ctxt *build.Context, path ...string) string {
+ if ctxt.JoinPath != nil {
+ return ctxt.JoinPath(path...)
+ }
+ return filepath.Join(path...)
+}
+
+// IsDir behaves like os.Stat plus IsDir,
+// but uses the build context's file system interface, if any.
+func IsDir(ctxt *build.Context, path string) bool {
+ if ctxt.IsDir != nil {
+ return ctxt.IsDir(path)
+ }
+ fi, err := os.Stat(path)
+ return err == nil && fi.IsDir()
+}
+
+// ReadDir behaves like ioutil.ReadDir,
+// but uses the build context's file system interface, if any.
+func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) {
+ if ctxt.ReadDir != nil {
+ return ctxt.ReadDir(path)
+ }
+ return ioutil.ReadDir(path)
+}
+
+// SplitPathList behaves like filepath.SplitList,
+// but uses the build context's file system interface, if any.
+func SplitPathList(ctxt *build.Context, s string) []string {
+ if ctxt.SplitPathList != nil {
+ return ctxt.SplitPathList(s)
+ }
+ return filepath.SplitList(s)
+}
+
+// sameFile returns true if x and y have the same basename and denote
+// the same file.
+func sameFile(x, y string) bool {
+ if path.Clean(x) == path.Clean(y) {
+ return true
+ }
+ if filepath.Base(x) == filepath.Base(y) { // (optimisation)
+ if xi, err := os.Stat(x); err == nil {
+ if yi, err := os.Stat(y); err == nil {
+ return os.SameFile(xi, yi)
+ }
+ }
+ }
+ return false
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
new file mode 100644
index 000000000..697974bb9
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
@@ -0,0 +1,219 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cgo handles cgo preprocessing of files containing `import "C"`.
+//
+// DESIGN
+//
+// The approach taken is to run the cgo processor on the package's
+// CgoFiles and parse the output, faking the filenames of the
+// resulting ASTs so that the synthetic file containing the C types is
+// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
+// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
+// not the names of the actual temporary files.
+//
+// The advantage of this approach is its fidelity to 'go build'. The
+// downside is that the token.Position.Offset for each AST node is
+// incorrect, being an offset within the temporary file. Line numbers
+// should still be correct because of the //line comments.
+//
+// The logic of this file is mostly plundered from the 'go build'
+// tool, which also invokes the cgo preprocessor.
+//
+//
+// REJECTED ALTERNATIVE
+//
+// An alternative approach that we explored is to extend go/types'
+// Importer mechanism to provide the identity of the importing package
+// so that each time `import "C"` appears it resolves to a different
+// synthetic package containing just the objects needed in that case.
+// The loader would invoke cgo but parse only the cgo_types.go file
+// defining the package-level objects, discarding the other files
+// resulting from preprocessing.
+//
+// The benefit of this approach would have been that source-level
+// syntax information would correspond exactly to the original cgo
+// file, with no preprocessing involved, making source tools like
+// godoc, guru, and eg happy. However, the approach was rejected
+// due to the additional complexity it would impose on go/types. (It
+// made for a beautiful demo, though.)
+//
+// cgo files, despite their *.go extension, are not legal Go source
+// files per the specification since they may refer to unexported
+// members of package "C" such as C.int. Also, a function such as
+// C.getpwent has in effect two types, one matching its C type and one
+// which additionally returns (errno C.int). The cgo preprocessor
+// uses name mangling to distinguish these two functions in the
+// processed code, but go/types would need to duplicate this logic in
+// its handling of function calls, analogous to the treatment of map
+// lookups in which y=m[k] and y,ok=m[k] are both legal.
+
+package cgo
+
+import (
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "log"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+)
+
+// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
+// the output and returns the resulting ASTs.
+func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
+ tmpdir, err := os.MkdirTemp("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
+ if err != nil {
+ return nil, err
+ }
+ defer os.RemoveAll(tmpdir)
+
+ pkgdir := bp.Dir
+ if DisplayPath != nil {
+ pkgdir = DisplayPath(pkgdir)
+ }
+
+ cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
+ if err != nil {
+ return nil, err
+ }
+ var files []*ast.File
+ for i := range cgoFiles {
+ rd, err := os.Open(cgoFiles[i])
+ if err != nil {
+ return nil, err
+ }
+ display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
+ f, err := parser.ParseFile(fset, display, rd, mode)
+ rd.Close()
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f)
+ }
+ return files, nil
+}
+
+var cgoRe = regexp.MustCompile(`[/\\:]`)
+
+// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
+// lists of files: the resulting processed files (in temporary
+// directory tmpdir) and the corresponding names of the unprocessed files.
+//
+// Run is adapted from (*builder).cgo in
+// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
+// Objective C, CGOPKGPATH, CGO_FLAGS.
+//
+// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
+// to the cgo preprocessor. This in turn will set the // line comments
+// referring to those files to use absolute paths. This is needed for
+// go/packages using the legacy go list support so it is able to find
+// the original files.
+func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
+ cgoCPPFLAGS, _, _, _ := cflags(bp, true)
+ _, cgoexeCFLAGS, _, _ := cflags(bp, false)
+
+ if len(bp.CgoPkgConfig) > 0 {
+ pcCFLAGS, err := pkgConfigFlags(bp)
+ if err != nil {
+ return nil, nil, err
+ }
+ cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
+ }
+
+ // Allows including _cgo_export.h from .[ch] files in the package.
+ cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
+
+ // _cgo_gotypes.go (displayed "C") contains the type definitions.
+ files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
+ displayFiles = append(displayFiles, "C")
+ for _, fn := range bp.CgoFiles {
+ // "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
+ f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
+ files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
+ displayFiles = append(displayFiles, fn)
+ }
+
+ var cgoflags []string
+ if bp.Goroot && bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_runtime_cgo=false")
+ }
+ if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
+ cgoflags = append(cgoflags, "-import_syscall=false")
+ }
+
+ var cgoFiles []string = bp.CgoFiles
+ if useabs {
+ cgoFiles = make([]string, len(bp.CgoFiles))
+ for i := range cgoFiles {
+ cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
+ }
+ }
+
+ args := stringList(
+ "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
+ cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
+ )
+ if false {
+ log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Dir = pkgdir
+ cmd.Env = append(os.Environ(), "PWD="+pkgdir)
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ if err := cmd.Run(); err != nil {
+ return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
+ }
+
+ return files, displayFiles, nil
+}
+
+// -- unmodified from 'go build' ---------------------------------------
+
+// Return the flags to use when invoking the C or C++ compilers, or cgo.
+func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
+ var defaults string
+ if def {
+ defaults = "-g -O2"
+ }
+
+ cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
+ cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
+ cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
+ ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
+ return
+}
+
+// envList returns the value of the given environment variable broken
+// into fields, using the default value when the variable is empty.
+func envList(key, def string) []string {
+ v := os.Getenv(key)
+ if v == "" {
+ v = def
+ }
+ return strings.Fields(v)
+}
+
+// stringList's arguments should be a sequence of string or []string values.
+// stringList flattens them into a single []string.
+func stringList(args ...interface{}) []string {
+ var x []string
+ for _, arg := range args {
+ switch arg := arg.(type) {
+ case []string:
+ x = append(x, arg...)
+ case string:
+ x = append(x, arg)
+ default:
+ panic("stringList: invalid argument")
+ }
+ }
+ return x
+}
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
new file mode 100644
index 000000000..b5bb95a63
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go
@@ -0,0 +1,39 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cgo
+
+import (
+ "errors"
+ "fmt"
+ "go/build"
+ "os/exec"
+ "strings"
+)
+
+// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
+func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
+ cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
+ out, err := cmd.CombinedOutput()
+ if err != nil {
+ s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
+ if len(out) > 0 {
+ s = fmt.Sprintf("%s: %s", s, out)
+ }
+ return nil, errors.New(s)
+ }
+ if len(out) > 0 {
+ flags = strings.Fields(string(out))
+ }
+ return
+}
+
+// pkgConfigFlags calls pkg-config if needed and returns the cflags
+// needed to build the package.
+func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
+ if len(p.CgoPkgConfig) == 0 {
+ return nil, nil
+ }
+ return pkgConfig("--cflags", p.CgoPkgConfig)
+}
diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go
new file mode 100644
index 000000000..e35b1fd7d
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/doc.go
@@ -0,0 +1,202 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package loader loads a complete Go program from source code, parsing
+// and type-checking the initial packages plus their transitive closure
+// of dependencies. The ASTs and the derived facts are retained for
+// later use.
+//
+// Deprecated: This is an older API and does not have support
+// for modules. Use golang.org/x/tools/go/packages instead.
+//
+// The package defines two primary types: Config, which specifies a
+// set of initial packages to load and various other options; and
+// Program, which is the result of successfully loading the packages
+// specified by a configuration.
+//
+// The configuration can be set directly, but *Config provides various
+// convenience methods to simplify the common cases, each of which can
+// be called any number of times. Finally, these are followed by a
+// call to Load() to actually load and type-check the program.
+//
+// var conf loader.Config
+//
+// // Use the command-line arguments to specify
+// // a set of initial packages to load from source.
+// // See FromArgsUsage for help.
+// rest, err := conf.FromArgs(os.Args[1:], wantTests)
+//
+// // Parse the specified files and create an ad hoc package with path "foo".
+// // All files must have the same 'package' declaration.
+// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
+//
+// // Create an ad hoc package with path "foo" from
+// // the specified already-parsed files.
+// // All ASTs must have the same 'package' declaration.
+// conf.CreateFromFiles("foo", parsedFiles)
+//
+// // Add "runtime" to the set of packages to be loaded.
+// conf.Import("runtime")
+//
+// // Adds "fmt" and "fmt_test" to the set of packages
+// // to be loaded. "fmt" will include *_test.go files.
+// conf.ImportWithTests("fmt")
+//
+// // Finally, load all the packages specified by the configuration.
+// prog, err := conf.Load()
+//
+// See examples_test.go for examples of API usage.
+//
+// # CONCEPTS AND TERMINOLOGY
+//
+// The WORKSPACE is the set of packages accessible to the loader. The
+// workspace is defined by Config.Build, a *build.Context. The
+// default context treats subdirectories of $GOROOT and $GOPATH as
+// packages, but this behavior may be overridden.
+//
+// An AD HOC package is one specified as a set of source files on the
+// command line. In the simplest case, it may consist of a single file
+// such as $GOROOT/src/net/http/triv.go.
+//
+// EXTERNAL TEST packages are those comprised of a set of *_test.go
+// files all with the same 'package foo_test' declaration, all in the
+// same directory. (go/build.Package calls these files XTestFiles.)
+//
+// An IMPORTABLE package is one that can be referred to by some import
+// spec. Every importable package is uniquely identified by its
+// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json",
+// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path
+// typically denotes a subdirectory of the workspace.
+//
+// An import declaration uses an IMPORT PATH to refer to a package.
+// Most import declarations use the package path as the import path.
+//
+// Due to VENDORING (https://golang.org/s/go15vendor), the
+// interpretation of an import path may depend on the directory in which
+// it appears. To resolve an import path to a package path, go/build
+// must search the enclosing directories for a subdirectory named
+// "vendor".
+//
+// ad hoc packages and external test packages are NON-IMPORTABLE. The
+// path of an ad hoc package is inferred from the package
+// declarations of its files and is therefore not a unique package key.
+// For example, Config.CreatePkgs may specify two initial ad hoc
+// packages, both with path "main".
+//
+// An AUGMENTED package is an importable package P plus all the
+// *_test.go files with same 'package foo' declaration as P.
+// (go/build.Package calls these files TestFiles.)
+//
+// The INITIAL packages are those specified in the configuration. A
+// DEPENDENCY is a package loaded to satisfy an import in an initial
+// package or another dependency.
+package loader
+
+// IMPLEMENTATION NOTES
+//
+// 'go test', in-package test files, and import cycles
+// ---------------------------------------------------
+//
+// An external test package may depend upon members of the augmented
+// package that are not in the unaugmented package, such as functions
+// that expose internals. (See bufio/export_test.go for an example.)
+// So, the loader must ensure that for each external test package
+// it loads, it also augments the corresponding non-test package.
+//
+// The import graph over n unaugmented packages must be acyclic; the
+// import graph over n-1 unaugmented packages plus one augmented
+// package must also be acyclic. ('go test' relies on this.) But the
+// import graph over n augmented packages may contain cycles.
+//
+// First, all the (unaugmented) non-test packages and their
+// dependencies are imported in the usual way; the loader reports an
+// error if it detects an import cycle.
+//
+// Then, each package P for which testing is desired is augmented by
+// the list P' of its in-package test files, by calling
+// (*types.Checker).Files. This arrangement ensures that P' may
+// reference definitions within P, but P may not reference definitions
+// within P'. Furthermore, P' may import any other package, including
+// ones that depend upon P, without an import cycle error.
+//
+// Consider two packages A and B, both of which have lists of
+// in-package test files we'll call A' and B', and which have the
+// following import graph edges:
+// B imports A
+// B' imports A
+// A' imports B
+// This last edge would be expected to create an error were it not
+// for the special type-checking discipline above.
+// Cycles of size greater than two are possible. For example:
+// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil"
+// io/ioutil/tempfile_test.go (package ioutil) imports "regexp"
+// regexp/exec_test.go (package regexp) imports "compress/bzip2"
+//
+//
+// Concurrency
+// -----------
+//
+// Let us define the import dependency graph as follows. Each node is a
+// list of files passed to (Checker).Files at once. Many of these lists
+// are the production code of an importable Go package, so those nodes
+// are labelled by the package's path. The remaining nodes are
+// ad hoc packages and lists of in-package *_test.go files that augment
+// an importable package; those nodes have no label.
+//
+// The edges of the graph represent import statements appearing within a
+// file. An edge connects a node (a list of files) to the node it
+// imports, which is importable and thus always labelled.
+//
+// Loading is controlled by this dependency graph.
+//
+// To reduce I/O latency, we start loading a package's dependencies
+// asynchronously as soon as we've parsed its files and enumerated its
+// imports (scanImports). This performs a preorder traversal of the
+// import dependency graph.
+//
+// To exploit hardware parallelism, we type-check unrelated packages in
+// parallel, where "unrelated" means not ordered by the partial order of
+// the import dependency graph.
+//
+// We use a concurrency-safe non-blocking cache (importer.imported) to
+// record the results of type-checking, whether success or failure. An
+// entry is created in this cache by startLoad the first time the
+// package is imported. The first goroutine to request an entry becomes
+// responsible for completing the task and broadcasting completion to
+// subsequent requestors, which block until then.
+//
+// Type checking occurs in (parallel) postorder: we cannot type-check a
+// set of files until we have loaded and type-checked all of their
+// immediate dependencies (and thus all of their transitive
+// dependencies). If the input were guaranteed free of import cycles,
+// this would be trivial: we could simply wait for completion of the
+// dependencies and then invoke the typechecker.
+//
+// But as we saw in the 'go test' section above, some cycles in the
+// import graph over packages are actually legal, so long as the
+// cycle-forming edge originates in the in-package test files that
+// augment the package. This explains why the nodes of the import
+// dependency graph are not packages, but lists of files: the unlabelled
+// nodes avoid the cycles. Consider packages A and B where B imports A
+// and A's in-package tests AT import B. The naively constructed import
+// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but
+// the graph over lists of files is AT --> B --> A, where AT is an
+// unlabelled node.
+//
+// Awaiting completion of the dependencies in a cyclic graph would
+// deadlock, so we must materialize the import dependency graph (as
+// importer.graph) and check whether each import edge forms a cycle. If
+// x imports y, and the graph already contains a path from y to x, then
+// there is an import cycle, in which case the processing of x must not
+// wait for the completion of processing of y.
+//
+// When the type-checker makes a callback (doImport) to the loader for a
+// given import edge, there are two possible cases. In the normal case,
+// the dependency has already been completely type-checked; doImport
+// does a cache lookup and returns it. In the cyclic case, the entry in
+// the cache is still necessarily incomplete, indicating a cycle. We
+// perform the cycle check again to obtain the error message, and return
+// the error.
+//
+// The result of using concurrency is about a 2.5x speedup for stdlib_test.
diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go
new file mode 100644
index 000000000..013c0f505
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/loader.go
@@ -0,0 +1,1066 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+// See doc.go for package documentation and implementation notes.
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "go/types"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+ "sync"
+ "time"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/go/internal/cgo"
+ "golang.org/x/tools/internal/versions"
+)
+
+var ignoreVendor build.ImportMode
+
+const trace = false // show timing info for type-checking
+
+// Config specifies the configuration for loading a whole program from
+// Go source code.
+// The zero value for Config is a ready-to-use default configuration.
+type Config struct {
+ // Fset is the file set for the parser to use when loading the
+ // program. If nil, it may be lazily initialized by any
+ // method of Config.
+ Fset *token.FileSet
+
+ // ParserMode specifies the mode to be used by the parser when
+ // loading source packages.
+ ParserMode parser.Mode
+
+ // TypeChecker contains options relating to the type checker.
+ //
+ // The supplied IgnoreFuncBodies is not used; the effective
+ // value comes from the TypeCheckFuncBodies func below.
+ // The supplied Import function is not used either.
+ TypeChecker types.Config
+
+ // TypeCheckFuncBodies is a predicate over package paths.
+ // A package for which the predicate is false will
+ // have its package-level declarations type checked, but not
+ // its function bodies; this can be used to quickly load
+ // dependencies from source. If nil, all func bodies are type
+ // checked.
+ TypeCheckFuncBodies func(path string) bool
+
+ // If Build is non-nil, it is used to locate source packages.
+ // Otherwise &build.Default is used.
+ //
+ // By default, cgo is invoked to preprocess Go files that
+ // import the fake package "C". This behaviour can be
+ // disabled by setting CGO_ENABLED=0 in the environment prior
+ // to startup, or by setting Build.CgoEnabled=false.
+ Build *build.Context
+
+ // The current directory, used for resolving relative package
+ // references such as "./go/loader". If empty, os.Getwd will be
+ // used instead.
+ Cwd string
+
+ // If DisplayPath is non-nil, it is used to transform each
+ // file name obtained from Build.Import(). This can be used
+ // to prevent a virtualized build.Config's file names from
+ // leaking into the user interface.
+ DisplayPath func(path string) string
+
+ // If AllowErrors is true, Load will return a Program even
+ // if some of the its packages contained I/O, parser or type
+ // errors; such errors are accessible via PackageInfo.Errors. If
+ // false, Load will fail if any package had an error.
+ AllowErrors bool
+
+ // CreatePkgs specifies a list of non-importable initial
+ // packages to create. The resulting packages will appear in
+ // the corresponding elements of the Program.Created slice.
+ CreatePkgs []PkgSpec
+
+ // ImportPkgs specifies a set of initial packages to load.
+ // The map keys are package paths.
+ //
+ // The map value indicates whether to load tests. If true, Load
+ // will add and type-check two lists of files to the package:
+ // non-test files followed by in-package *_test.go files. In
+ // addition, it will append the external test package (if any)
+ // to Program.Created.
+ ImportPkgs map[string]bool
+
+ // FindPackage is called during Load to create the build.Package
+ // for a given import path from a given directory.
+ // If FindPackage is nil, (*build.Context).Import is used.
+ // A client may use this hook to adapt to a proprietary build
+ // system that does not follow the "go build" layout
+ // conventions, for example.
+ //
+ // It must be safe to call concurrently from multiple goroutines.
+ FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error)
+
+ // AfterTypeCheck is called immediately after a list of files
+ // has been type-checked and appended to info.Files.
+ //
+ // This optional hook function is the earliest opportunity for
+ // the client to observe the output of the type checker,
+ // which may be useful to reduce analysis latency when loading
+ // a large program.
+ //
+ // The function is permitted to modify info.Info, for instance
+ // to clear data structures that are no longer needed, which can
+ // dramatically reduce peak memory consumption.
+ //
+ // The function may be called twice for the same PackageInfo:
+ // once for the files of the package and again for the
+ // in-package test files.
+ //
+ // It must be safe to call concurrently from multiple goroutines.
+ AfterTypeCheck func(info *PackageInfo, files []*ast.File)
+}
+
+// A PkgSpec specifies a non-importable package to be created by Load.
+// Files are processed first, but typically only one of Files and
+// Filenames is provided. The path needn't be globally unique.
+//
+// For vendoring purposes, the package's directory is the one that
+// contains the first file.
+type PkgSpec struct {
+ Path string // package path ("" => use package declaration)
+ Files []*ast.File // ASTs of already-parsed files
+ Filenames []string // names of files to be parsed
+}
+
+// A Program is a Go program loaded from source as specified by a Config.
+type Program struct {
+ Fset *token.FileSet // the file set for this program
+
+ // Created[i] contains the initial package whose ASTs or
+ // filenames were supplied by Config.CreatePkgs[i], followed by
+ // the external test package, if any, of each package in
+ // Config.ImportPkgs ordered by ImportPath.
+ //
+ // NOTE: these files must not import "C". Cgo preprocessing is
+ // only performed on imported packages, not ad hoc packages.
+ //
+ // TODO(adonovan): we need to copy and adapt the logic of
+ // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make
+ // Config.Import and Config.Create methods return the same kind
+ // of entity, essentially a build.Package.
+ // Perhaps we can even reuse that type directly.
+ Created []*PackageInfo
+
+ // Imported contains the initially imported packages,
+ // as specified by Config.ImportPkgs.
+ Imported map[string]*PackageInfo
+
+ // AllPackages contains the PackageInfo of every package
+ // encountered by Load: all initial packages and all
+ // dependencies, including incomplete ones.
+ AllPackages map[*types.Package]*PackageInfo
+
+ // importMap is the canonical mapping of package paths to
+ // packages. It contains all Imported initial packages, but not
+ // Created ones, and all imported dependencies.
+ importMap map[string]*types.Package
+}
+
+// PackageInfo holds the ASTs and facts derived by the type-checker
+// for a single package.
+//
+// Not mutated once exposed via the API.
+type PackageInfo struct {
+ Pkg *types.Package
+ Importable bool // true if 'import "Pkg.Path()"' would resolve to this
+ TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors
+ Files []*ast.File // syntax trees for the package's files
+ Errors []error // non-nil if the package had errors
+ types.Info // type-checker deductions.
+ dir string // package directory
+
+ checker *types.Checker // transient type-checker state
+ errorFunc func(error)
+}
+
+func (info *PackageInfo) String() string { return info.Pkg.Path() }
+
+func (info *PackageInfo) appendError(err error) {
+ if info.errorFunc != nil {
+ info.errorFunc(err)
+ } else {
+ fmt.Fprintln(os.Stderr, err)
+ }
+ info.Errors = append(info.Errors, err)
+}
+
+func (conf *Config) fset() *token.FileSet {
+ if conf.Fset == nil {
+ conf.Fset = token.NewFileSet()
+ }
+ return conf.Fset
+}
+
+// ParseFile is a convenience function (intended for testing) that invokes
+// the parser using the Config's FileSet, which is initialized if nil.
+//
+// src specifies the parser input as a string, []byte, or io.Reader, and
+// filename is its apparent name. If src is nil, the contents of
+// filename are read from the file system.
+func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
+ // TODO(adonovan): use conf.build() etc like parseFiles does.
+ return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
+}
+
+// FromArgsUsage is a partial usage message that applications calling
+// FromArgs may wish to include in their -help output.
+const FromArgsUsage = `
+<args> is a list of arguments denoting a set of initial packages.
+It may take one of two forms:
+
+1. A list of *.go source files.
+
+ All of the specified files are loaded, parsed and type-checked
+ as a single package. All the files must belong to the same directory.
+
+2. A list of import paths, each denoting a package.
+
+ The package's directory is found relative to the $GOROOT and
+ $GOPATH using similar logic to 'go build', and the *.go files in
+ that directory are loaded, parsed and type-checked as a single
+ package.
+
+ In addition, all *_test.go files in the directory are then loaded
+ and parsed. Those files whose package declaration equals that of
+ the non-*_test.go files are included in the primary package. Test
+ files whose package declaration ends with "_test" are type-checked
+ as another package, the 'external' test package, so that a single
+ import path may denote two packages. (Whether this behaviour is
+ enabled is tool-specific, and may depend on additional flags.)
+
+A '--' argument terminates the list of packages.
+`
+
+// FromArgs interprets args as a set of initial packages to load from
+// source and updates the configuration. It returns the list of
+// unconsumed arguments.
+//
+// It is intended for use in command-line interfaces that require a
+// set of initial packages to be specified; see FromArgsUsage message
+// for details.
+//
+// Only superficial errors are reported at this stage; errors dependent
+// on I/O are detected during Load.
+func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
+ var rest []string
+ for i, arg := range args {
+ if arg == "--" {
+ rest = args[i+1:]
+ args = args[:i]
+ break // consume "--" and return the remaining args
+ }
+ }
+
+ if len(args) > 0 && strings.HasSuffix(args[0], ".go") {
+ // Assume args is a list of a *.go files
+ // denoting a single ad hoc package.
+ for _, arg := range args {
+ if !strings.HasSuffix(arg, ".go") {
+ return nil, fmt.Errorf("named files must be .go files: %s", arg)
+ }
+ }
+ conf.CreateFromFilenames("", args...)
+ } else {
+ // Assume args are directories each denoting a
+ // package and (perhaps) an external test, iff xtest.
+ for _, arg := range args {
+ if xtest {
+ conf.ImportWithTests(arg)
+ } else {
+ conf.Import(arg)
+ }
+ }
+ }
+
+ return rest, nil
+}
+
+// CreateFromFilenames is a convenience function that adds
+// a conf.CreatePkgs entry to create a package of the specified *.go
+// files.
+func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
+ conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
+}
+
+// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
+// entry to create package of the specified path and parsed files.
+func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
+ conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
+}
+
+// ImportWithTests is a convenience function that adds path to
+// ImportPkgs, the set of initial source packages located relative to
+// $GOPATH. The package will be augmented by any *_test.go files in
+// its directory that contain a "package x" (not "package x_test")
+// declaration.
+//
+// In addition, if any *_test.go files contain a "package x_test"
+// declaration, an additional package comprising just those files will
+// be added to CreatePkgs.
+func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
+
+// Import is a convenience function that adds path to ImportPkgs, the
+// set of initial packages that will be imported from source.
+func (conf *Config) Import(path string) { conf.addImport(path, false) }
+
+func (conf *Config) addImport(path string, tests bool) {
+ if path == "C" {
+ return // ignore; not a real package
+ }
+ if conf.ImportPkgs == nil {
+ conf.ImportPkgs = make(map[string]bool)
+ }
+ conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests
+}
+
+// PathEnclosingInterval returns the PackageInfo and ast.Node that
+// contain source interval [start, end), and all the node's ancestors
+// up to the AST root. It searches all ast.Files of all packages in prog.
+// exact is defined as for astutil.PathEnclosingInterval.
+//
+// The zero value is returned if not found.
+func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
+ for _, info := range prog.AllPackages {
+ for _, f := range info.Files {
+ if f.Pos() == token.NoPos {
+ // This can happen if the parser saw
+ // too many errors and bailed out.
+ // (Use parser.AllErrors to prevent that.)
+ continue
+ }
+ if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
+ continue
+ }
+ if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
+ return info, path, exact
+ }
+ }
+ }
+ return nil, nil, false
+}
+
+// InitialPackages returns a new slice containing the set of initial
+// packages (Created + Imported) in unspecified order.
+func (prog *Program) InitialPackages() []*PackageInfo {
+ infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
+ infos = append(infos, prog.Created...)
+ for _, info := range prog.Imported {
+ infos = append(infos, info)
+ }
+ return infos
+}
+
+// Package returns the ASTs and results of type checking for the
+// specified package.
+func (prog *Program) Package(path string) *PackageInfo {
+ if info, ok := prog.AllPackages[prog.importMap[path]]; ok {
+ return info
+ }
+ for _, info := range prog.Created {
+ if path == info.Pkg.Path() {
+ return info
+ }
+ }
+ return nil
+}
+
+// ---------- Implementation ----------
+
+// importer holds the working state of the algorithm.
+type importer struct {
+ conf *Config // the client configuration
+ start time.Time // for logging
+
+ progMu sync.Mutex // guards prog
+ prog *Program // the resulting program
+
+ // findpkg is a memoization of FindPackage.
+ findpkgMu sync.Mutex // guards findpkg
+ findpkg map[findpkgKey]*findpkgValue
+
+ importedMu sync.Mutex // guards imported
+ imported map[string]*importInfo // all imported packages (incl. failures) by import path
+
+ // import dependency graph: graph[x][y] => x imports y
+ //
+ // Since non-importable packages cannot be cyclic, we ignore
+ // their imports, thus we only need the subgraph over importable
+ // packages. Nodes are identified by their import paths.
+ graphMu sync.Mutex
+ graph map[string]map[string]bool
+}
+
+type findpkgKey struct {
+ importPath string
+ fromDir string
+ mode build.ImportMode
+}
+
+type findpkgValue struct {
+ ready chan struct{} // closed to broadcast readiness
+ bp *build.Package
+ err error
+}
+
+// importInfo tracks the success or failure of a single import.
+//
+// Upon completion, exactly one of info and err is non-nil:
+// info on successful creation of a package, err otherwise.
+// A successful package may still contain type errors.
+type importInfo struct {
+ path string // import path
+ info *PackageInfo // results of typechecking (including errors)
+ complete chan struct{} // closed to broadcast that info is set.
+}
+
+// awaitCompletion blocks until ii is complete,
+// i.e. the info field is safe to inspect.
+func (ii *importInfo) awaitCompletion() {
+ <-ii.complete // wait for close
+}
+
+// Complete marks ii as complete.
+// Its info and err fields will not be subsequently updated.
+func (ii *importInfo) Complete(info *PackageInfo) {
+ if info == nil {
+ panic("info == nil")
+ }
+ ii.info = info
+ close(ii.complete)
+}
+
+type importError struct {
+ path string // import path
+ err error // reason for failure to create a package
+}
+
+// Load creates the initial packages specified by conf.{Create,Import}Pkgs,
+// loading their dependencies packages as needed.
+//
+// On success, Load returns a Program containing a PackageInfo for
+// each package. On failure, it returns an error.
+//
+// If AllowErrors is true, Load will return a Program even if some
+// packages contained I/O, parser or type errors, or if dependencies
+// were missing. (Such errors are accessible via PackageInfo.Errors. If
+// false, Load will fail if any package had an error.
+//
+// It is an error if no packages were loaded.
+func (conf *Config) Load() (*Program, error) {
+ // Create a simple default error handler for parse/type errors.
+ if conf.TypeChecker.Error == nil {
+ conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) }
+ }
+
+ // Set default working directory for relative package references.
+ if conf.Cwd == "" {
+ var err error
+ conf.Cwd, err = os.Getwd()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Install default FindPackage hook using go/build logic.
+ if conf.FindPackage == nil {
+ conf.FindPackage = (*build.Context).Import
+ }
+
+ prog := &Program{
+ Fset: conf.fset(),
+ Imported: make(map[string]*PackageInfo),
+ importMap: make(map[string]*types.Package),
+ AllPackages: make(map[*types.Package]*PackageInfo),
+ }
+
+ imp := importer{
+ conf: conf,
+ prog: prog,
+ findpkg: make(map[findpkgKey]*findpkgValue),
+ imported: make(map[string]*importInfo),
+ start: time.Now(),
+ graph: make(map[string]map[string]bool),
+ }
+
+ // -- loading proper (concurrent phase) --------------------------------
+
+ var errpkgs []string // packages that contained errors
+
+ // Load the initially imported packages and their dependencies,
+ // in parallel.
+ // No vendor check on packages imported from the command line.
+ infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor)
+ for _, ie := range importErrors {
+ conf.TypeChecker.Error(ie.err) // failed to create package
+ errpkgs = append(errpkgs, ie.path)
+ }
+ for _, info := range infos {
+ prog.Imported[info.Pkg.Path()] = info
+ }
+
+ // Augment the designated initial packages by their tests.
+ // Dependencies are loaded in parallel.
+ var xtestPkgs []*build.Package
+ for importPath, augment := range conf.ImportPkgs {
+ if !augment {
+ continue
+ }
+
+ // No vendor check on packages imported from command line.
+ bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor)
+ if err != nil {
+ // Package not found, or can't even parse package declaration.
+ // Already reported by previous loop; ignore it.
+ continue
+ }
+
+ // Needs external test package?
+ if len(bp.XTestGoFiles) > 0 {
+ xtestPkgs = append(xtestPkgs, bp)
+ }
+
+ // Consult the cache using the canonical package path.
+ path := bp.ImportPath
+ imp.importedMu.Lock() // (unnecessary, we're sequential here)
+ ii, ok := imp.imported[path]
+ // Paranoid checks added due to issue #11012.
+ if !ok {
+ // Unreachable.
+ // The previous loop called importAll and thus
+ // startLoad for each path in ImportPkgs, which
+ // populates imp.imported[path] with a non-zero value.
+ panic(fmt.Sprintf("imported[%q] not found", path))
+ }
+ if ii == nil {
+ // Unreachable.
+ // The ii values in this loop are the same as in
+ // the previous loop, which enforced the invariant
+ // that at least one of ii.err and ii.info is non-nil.
+ panic(fmt.Sprintf("imported[%q] == nil", path))
+ }
+ if ii.info == nil {
+ // Unreachable.
+ // awaitCompletion has the postcondition
+ // ii.info != nil.
+ panic(fmt.Sprintf("imported[%q].info = nil", path))
+ }
+ info := ii.info
+ imp.importedMu.Unlock()
+
+ // Parse the in-package test files.
+ files, errs := imp.conf.parsePackageFiles(bp, 't')
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ // The test files augmenting package P cannot be imported,
+ // but may import packages that import P,
+ // so we must disable the cycle check.
+ imp.addFiles(info, files, false)
+ }
+
+ createPkg := func(path, dir string, files []*ast.File, errs []error) {
+ info := imp.newPackageInfo(path, dir)
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ // Ad hoc packages are non-importable,
+ // so no cycle check is needed.
+ // addFiles loads dependencies in parallel.
+ imp.addFiles(info, files, false)
+ prog.Created = append(prog.Created, info)
+ }
+
+ // Create packages specified by conf.CreatePkgs.
+ for _, cp := range conf.CreatePkgs {
+ files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode)
+ files = append(files, cp.Files...)
+
+ path := cp.Path
+ if path == "" {
+ if len(files) > 0 {
+ path = files[0].Name.Name
+ } else {
+ path = "(unnamed)"
+ }
+ }
+
+ dir := conf.Cwd
+ if len(files) > 0 && files[0].Pos().IsValid() {
+ dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name())
+ }
+ createPkg(path, dir, files, errs)
+ }
+
+ // Create external test packages.
+ sort.Sort(byImportPath(xtestPkgs))
+ for _, bp := range xtestPkgs {
+ files, errs := imp.conf.parsePackageFiles(bp, 'x')
+ createPkg(bp.ImportPath+"_test", bp.Dir, files, errs)
+ }
+
+ // -- finishing up (sequential) ----------------------------------------
+
+ if len(prog.Imported)+len(prog.Created) == 0 {
+ return nil, errors.New("no initial packages were loaded")
+ }
+
+ // Create infos for indirectly imported packages.
+ // e.g. incomplete packages without syntax, loaded from export data.
+ for _, obj := range prog.importMap {
+ info := prog.AllPackages[obj]
+ if info == nil {
+ prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true}
+ } else {
+ // finished
+ info.checker = nil
+ info.errorFunc = nil
+ }
+ }
+
+ if !conf.AllowErrors {
+ // Report errors in indirectly imported packages.
+ for _, info := range prog.AllPackages {
+ if len(info.Errors) > 0 {
+ errpkgs = append(errpkgs, info.Pkg.Path())
+ }
+ }
+ if errpkgs != nil {
+ var more string
+ if len(errpkgs) > 3 {
+ more = fmt.Sprintf(" and %d more", len(errpkgs)-3)
+ errpkgs = errpkgs[:3]
+ }
+ return nil, fmt.Errorf("couldn't load packages due to errors: %s%s",
+ strings.Join(errpkgs, ", "), more)
+ }
+ }
+
+ markErrorFreePackages(prog.AllPackages)
+
+ return prog, nil
+}
+
+type byImportPath []*build.Package
+
+func (b byImportPath) Len() int { return len(b) }
+func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath }
+func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+
+// markErrorFreePackages sets the TransitivelyErrorFree flag on all
+// applicable packages.
+func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) {
+ // Build the transpose of the import graph.
+ importedBy := make(map[*types.Package]map[*types.Package]bool)
+ for P := range allPackages {
+ for _, Q := range P.Imports() {
+ clients, ok := importedBy[Q]
+ if !ok {
+ clients = make(map[*types.Package]bool)
+ importedBy[Q] = clients
+ }
+ clients[P] = true
+ }
+ }
+
+ // Find all packages reachable from some error package.
+ reachable := make(map[*types.Package]bool)
+ var visit func(*types.Package)
+ visit = func(p *types.Package) {
+ if !reachable[p] {
+ reachable[p] = true
+ for q := range importedBy[p] {
+ visit(q)
+ }
+ }
+ }
+ for _, info := range allPackages {
+ if len(info.Errors) > 0 {
+ visit(info.Pkg)
+ }
+ }
+
+ // Mark the others as "transitively error-free".
+ for _, info := range allPackages {
+ if !reachable[info.Pkg] {
+ info.TransitivelyErrorFree = true
+ }
+ }
+}
+
+// build returns the effective build context.
+func (conf *Config) build() *build.Context {
+ if conf.Build != nil {
+ return conf.Build
+ }
+ return &build.Default
+}
+
+// parsePackageFiles enumerates the files belonging to package path,
+// then loads, parses and returns them, plus a list of I/O or parse
+// errors that were encountered.
+//
+// 'which' indicates which files to include:
+//
+// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
+// 't': include in-package *_test.go source files (TestGoFiles)
+// 'x': include external *_test.go source files. (XTestGoFiles)
+func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
+ if bp.ImportPath == "unsafe" {
+ return nil, nil
+ }
+ var filenames []string
+ switch which {
+ case 'g':
+ filenames = bp.GoFiles
+ case 't':
+ filenames = bp.TestGoFiles
+ case 'x':
+ filenames = bp.XTestGoFiles
+ default:
+ panic(which)
+ }
+
+ files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode)
+
+ // Preprocess CgoFiles and parse the outputs (sequentially).
+ if which == 'g' && bp.CgoFiles != nil {
+ cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode)
+ if err != nil {
+ errs = append(errs, err)
+ } else {
+ files = append(files, cgofiles...)
+ }
+ }
+
+ return files, errs
+}
+
+// doImport imports the package denoted by path.
+// It implements the types.Importer signature.
+//
+// It returns an error if a package could not be created
+// (e.g. go/build or parse error), but type errors are reported via
+// the types.Config.Error callback (the first of which is also saved
+// in the package's PackageInfo).
+//
+// Idempotent.
+func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
+ if to == "C" {
+ // This should be unreachable, but ad hoc packages are
+ // not currently subject to cgo preprocessing.
+ // See https://golang.org/issue/11627.
+ return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`,
+ from.Pkg.Path())
+ }
+
+ bp, err := imp.findPackage(to, from.dir, 0)
+ if err != nil {
+ return nil, err
+ }
+
+ // The standard unsafe package is handled specially,
+ // and has no PackageInfo.
+ if bp.ImportPath == "unsafe" {
+ return types.Unsafe, nil
+ }
+
+ // Look for the package in the cache using its canonical path.
+ path := bp.ImportPath
+ imp.importedMu.Lock()
+ ii := imp.imported[path]
+ imp.importedMu.Unlock()
+ if ii == nil {
+ panic("internal error: unexpected import: " + path)
+ }
+ if ii.info != nil {
+ return ii.info.Pkg, nil
+ }
+
+ // Import of incomplete package: this indicates a cycle.
+ fromPath := from.Pkg.Path()
+ if cycle := imp.findPath(path, fromPath); cycle != nil {
+ // Normalize cycle: start from alphabetically largest node.
+ pos, start := -1, ""
+ for i, s := range cycle {
+ if pos < 0 || s > start {
+ pos, start = i, s
+ }
+ }
+ cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest
+ cycle = append(cycle, cycle[0]) // add start node to end to show cycliness
+ return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> "))
+ }
+
+ panic("internal error: import of incomplete (yet acyclic) package: " + fromPath)
+}
+
+// findPackage locates the package denoted by the importPath in the
+// specified directory.
+func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) {
+ // We use a non-blocking duplicate-suppressing cache (gopl.io §9.7)
+ // to avoid holding the lock around FindPackage.
+ key := findpkgKey{importPath, fromDir, mode}
+ imp.findpkgMu.Lock()
+ v, ok := imp.findpkg[key]
+ if ok {
+ // cache hit
+ imp.findpkgMu.Unlock()
+
+ <-v.ready // wait for entry to become ready
+ } else {
+ // Cache miss: this goroutine becomes responsible for
+ // populating the map entry and broadcasting its readiness.
+ v = &findpkgValue{ready: make(chan struct{})}
+ imp.findpkg[key] = v
+ imp.findpkgMu.Unlock()
+
+ ioLimit <- true
+ v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode)
+ <-ioLimit
+
+ if _, ok := v.err.(*build.NoGoError); ok {
+ v.err = nil // empty directory is not an error
+ }
+
+ close(v.ready) // broadcast ready condition
+ }
+ return v.bp, v.err
+}
+
+// importAll loads, parses, and type-checks the specified packages in
+// parallel and returns their completed importInfos in unspecified order.
+//
+// fromPath is the package path of the importing package, if it is
+// importable, "" otherwise. It is used for cycle detection.
+//
+// fromDir is the directory containing the import declaration that
+// caused these imports.
+func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
+ if fromPath != "" {
+ // We're loading a set of imports.
+ //
+ // We must record graph edges from the importing package
+ // to its dependencies, and check for cycles.
+ imp.graphMu.Lock()
+ deps, ok := imp.graph[fromPath]
+ if !ok {
+ deps = make(map[string]bool)
+ imp.graph[fromPath] = deps
+ }
+ for importPath := range imports {
+ deps[importPath] = true
+ }
+ imp.graphMu.Unlock()
+ }
+
+ var pending []*importInfo
+ for importPath := range imports {
+ if fromPath != "" {
+ if cycle := imp.findPath(importPath, fromPath); cycle != nil {
+ // Cycle-forming import: we must not check it
+ // since it would deadlock.
+ if trace {
+ fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle)
+ }
+ continue
+ }
+ }
+ bp, err := imp.findPackage(importPath, fromDir, mode)
+ if err != nil {
+ errors = append(errors, importError{
+ path: importPath,
+ err: err,
+ })
+ continue
+ }
+ pending = append(pending, imp.startLoad(bp))
+ }
+
+ for _, ii := range pending {
+ ii.awaitCompletion()
+ infos = append(infos, ii.info)
+ }
+
+ return infos, errors
+}
+
+// findPath returns an arbitrary path from 'from' to 'to' in the import
+// graph, or nil if there was none.
+func (imp *importer) findPath(from, to string) []string {
+ imp.graphMu.Lock()
+ defer imp.graphMu.Unlock()
+
+ seen := make(map[string]bool)
+ var search func(stack []string, importPath string) []string
+ search = func(stack []string, importPath string) []string {
+ if !seen[importPath] {
+ seen[importPath] = true
+ stack = append(stack, importPath)
+ if importPath == to {
+ return stack
+ }
+ for x := range imp.graph[importPath] {
+ if p := search(stack, x); p != nil {
+ return p
+ }
+ }
+ }
+ return nil
+ }
+ return search(make([]string, 0, 20), from)
+}
+
+// startLoad initiates the loading, parsing and type-checking of the
+// specified package and its dependencies, if it has not already begun.
+//
+// It returns an importInfo, not necessarily in a completed state. The
+// caller must call awaitCompletion() before accessing its info field.
+//
+// startLoad is concurrency-safe and idempotent.
+func (imp *importer) startLoad(bp *build.Package) *importInfo {
+ path := bp.ImportPath
+ imp.importedMu.Lock()
+ ii, ok := imp.imported[path]
+ if !ok {
+ ii = &importInfo{path: path, complete: make(chan struct{})}
+ imp.imported[path] = ii
+ go func() {
+ info := imp.load(bp)
+ ii.Complete(info)
+ }()
+ }
+ imp.importedMu.Unlock()
+
+ return ii
+}
+
+// load implements package loading by parsing Go source files
+// located by go/build.
+func (imp *importer) load(bp *build.Package) *PackageInfo {
+ info := imp.newPackageInfo(bp.ImportPath, bp.Dir)
+ info.Importable = true
+ files, errs := imp.conf.parsePackageFiles(bp, 'g')
+ for _, err := range errs {
+ info.appendError(err)
+ }
+
+ imp.addFiles(info, files, true)
+
+ imp.progMu.Lock()
+ imp.prog.importMap[bp.ImportPath] = info.Pkg
+ imp.progMu.Unlock()
+
+ return info
+}
+
+// addFiles adds and type-checks the specified files to info, loading
+// their dependencies if needed. The order of files determines the
+// package initialization order. It may be called multiple times on the
+// same package. Errors are appended to the info.Errors field.
+//
+// cycleCheck determines whether the imports within files create
+// dependency edges that should be checked for potential cycles.
+func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
+ // Ensure the dependencies are loaded, in parallel.
+ var fromPath string
+ if cycleCheck {
+ fromPath = info.Pkg.Path()
+ }
+ // TODO(adonovan): opt: make the caller do scanImports.
+ // Callers with a build.Package can skip it.
+ imp.importAll(fromPath, info.dir, scanImports(files), 0)
+
+ if trace {
+ fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n",
+ time.Since(imp.start), info.Pkg.Path(), len(files))
+ }
+
+ // Don't call checker.Files on Unsafe, even with zero files,
+ // because it would mutate the package, which is a global.
+ if info.Pkg == types.Unsafe {
+ if len(files) > 0 {
+ panic(`"unsafe" package contains unexpected files`)
+ }
+ } else {
+ // Ignore the returned (first) error since we
+ // already collect them all in the PackageInfo.
+ info.checker.Files(files)
+ info.Files = append(info.Files, files...)
+ }
+
+ if imp.conf.AfterTypeCheck != nil {
+ imp.conf.AfterTypeCheck(info, files)
+ }
+
+ if trace {
+ fmt.Fprintf(os.Stderr, "%s: stop %q\n",
+ time.Since(imp.start), info.Pkg.Path())
+ }
+}
+
+func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
+ var pkg *types.Package
+ if path == "unsafe" {
+ pkg = types.Unsafe
+ } else {
+ pkg = types.NewPackage(path, "")
+ }
+ info := &PackageInfo{
+ Pkg: pkg,
+ Info: types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ },
+ errorFunc: imp.conf.TypeChecker.Error,
+ dir: dir,
+ }
+ versions.InitFileVersions(&info.Info)
+
+ // Copy the types.Config so we can vary it across PackageInfos.
+ tc := imp.conf.TypeChecker
+ tc.IgnoreFuncBodies = false
+ if f := imp.conf.TypeCheckFuncBodies; f != nil {
+ tc.IgnoreFuncBodies = !f(path)
+ }
+ tc.Importer = closure{imp, info}
+ tc.Error = info.appendError // appendError wraps the user's Error function
+
+ info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info)
+ imp.progMu.Lock()
+ imp.prog.AllPackages[pkg] = info
+ imp.progMu.Unlock()
+ return info
+}
+
+type closure struct {
+ imp *importer
+ info *PackageInfo
+}
+
+func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) }
diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go
new file mode 100644
index 000000000..3a80acae6
--- /dev/null
+++ b/vendor/golang.org/x/tools/go/loader/util.go
@@ -0,0 +1,123 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package loader
+
+import (
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io"
+ "os"
+ "strconv"
+ "sync"
+
+ "golang.org/x/tools/go/buildutil"
+)
+
+// We use a counting semaphore to limit
+// the number of parallel I/O calls per process.
+var ioLimit = make(chan bool, 10)
+
+// parseFiles parses the Go source files within directory dir and
+// returns the ASTs of the ones that could be at least partially parsed,
+// along with a list of I/O and parse errors encountered.
+//
+// I/O is done via ctxt, which may specify a virtual file system.
+// displayPath is used to transform the filenames attached to the ASTs.
+func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
+ if displayPath == nil {
+ displayPath = func(path string) string { return path }
+ }
+ var wg sync.WaitGroup
+ n := len(files)
+ parsed := make([]*ast.File, n)
+ errors := make([]error, n)
+ for i, file := range files {
+ if !buildutil.IsAbsPath(ctxt, file) {
+ file = buildutil.JoinPath(ctxt, dir, file)
+ }
+ wg.Add(1)
+ go func(i int, file string) {
+ ioLimit <- true // wait
+ defer func() {
+ wg.Done()
+ <-ioLimit // signal
+ }()
+ var rd io.ReadCloser
+ var err error
+ if ctxt.OpenFile != nil {
+ rd, err = ctxt.OpenFile(file)
+ } else {
+ rd, err = os.Open(file)
+ }
+ if err != nil {
+ errors[i] = err // open failed
+ return
+ }
+
+ // ParseFile may return both an AST and an error.
+ parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode)
+ rd.Close()
+ }(i, file)
+ }
+ wg.Wait()
+
+ // Eliminate nils, preserving order.
+ var o int
+ for _, f := range parsed {
+ if f != nil {
+ parsed[o] = f
+ o++
+ }
+ }
+ parsed = parsed[:o]
+
+ o = 0
+ for _, err := range errors {
+ if err != nil {
+ errors[o] = err
+ o++
+ }
+ }
+ errors = errors[:o]
+
+ return parsed, errors
+}
+
+// scanImports returns the set of all import paths from all
+// import specs in the specified files.
+func scanImports(files []*ast.File) map[string]bool {
+ imports := make(map[string]bool)
+ for _, f := range files {
+ for _, decl := range f.Decls {
+ if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT {
+ for _, spec := range decl.Specs {
+ spec := spec.(*ast.ImportSpec)
+
+ // NB: do not assume the program is well-formed!
+ path, err := strconv.Unquote(spec.Path.Value)
+ if err != nil {
+ continue // quietly ignore the error
+ }
+ if path == "C" {
+ continue // skip pseudopackage
+ }
+ imports[path] = true
+ }
+ }
+ }
+ }
+ return imports
+}
+
+// ---------- Internal helpers ----------
+
+// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos)
+func tokenFileContainsPos(f *token.File, pos token.Pos) bool {
+ p := int(pos)
+ base := f.Base()
+ return base <= p && p < base+f.Size()
+}
diff --git a/vendor/golang.org/x/tools/imports/forward.go b/vendor/golang.org/x/tools/imports/forward.go
new file mode 100644
index 000000000..cb6db8893
--- /dev/null
+++ b/vendor/golang.org/x/tools/imports/forward.go
@@ -0,0 +1,77 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package imports implements a Go pretty-printer (like package "go/format")
+// that also adds or removes import statements as necessary.
+package imports // import "golang.org/x/tools/imports"
+
+import (
+ "log"
+ "os"
+
+ "golang.org/x/tools/internal/gocommand"
+ intimp "golang.org/x/tools/internal/imports"
+)
+
+// Options specifies options for processing files.
+type Options struct {
+ Fragment bool // Accept fragment of a source file (no package statement)
+ AllErrors bool // Report all errors (not just the first 10 on different lines)
+
+ Comments bool // Print comments (true if nil *Options provided)
+ TabIndent bool // Use tabs for indent (true if nil *Options provided)
+ TabWidth int // Tab width (8 if nil *Options provided)
+
+ FormatOnly bool // Disable the insertion and deletion of imports
+}
+
+// Debug controls verbose logging.
+var Debug = false
+
+// LocalPrefix is a comma-separated string of import path prefixes, which, if
+// set, instructs Process to sort the import paths with the given prefixes
+// into another group after 3rd-party packages.
+var LocalPrefix string
+
+// Process formats and adjusts imports for the provided file.
+// If opt is nil the defaults are used, and if src is nil the source
+// is read from the filesystem.
+//
+// Note that filename's directory influences which imports can be chosen,
+// so it is important that filename be accurate.
+// To process data “as if” it were in filename, pass the data as a non-nil src.
+func Process(filename string, src []byte, opt *Options) ([]byte, error) {
+ var err error
+ if src == nil {
+ src, err = os.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ }
+ if opt == nil {
+ opt = &Options{Comments: true, TabIndent: true, TabWidth: 8}
+ }
+ intopt := &intimp.Options{
+ Env: &intimp.ProcessEnv{
+ GocmdRunner: &gocommand.Runner{},
+ },
+ LocalPrefix: LocalPrefix,
+ AllErrors: opt.AllErrors,
+ Comments: opt.Comments,
+ FormatOnly: opt.FormatOnly,
+ Fragment: opt.Fragment,
+ TabIndent: opt.TabIndent,
+ TabWidth: opt.TabWidth,
+ }
+ if Debug {
+ intopt.Env.Logf = log.Printf
+ }
+ return intimp.Process(filename, src, intopt)
+}
+
+// VendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+func VendorlessPath(ipath string) string {
+ return intimp.VendorlessPath(ipath)
+}
diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
new file mode 100644
index 000000000..52f74e643
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
@@ -0,0 +1,331 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package gopathwalk is like filepath.Walk but specialized for finding Go
+// packages, particularly in $GOPATH and $GOROOT.
+package gopathwalk
+
+import (
+ "bufio"
+ "bytes"
+ "io/fs"
+ "log"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+// Options controls the behavior of a Walk call.
+type Options struct {
+ // If Logf is non-nil, debug logging is enabled through this function.
+ Logf func(format string, args ...interface{})
+ // Search module caches. Also disables legacy goimports ignore rules.
+ ModulesEnabled bool
+}
+
+// RootType indicates the type of a Root.
+type RootType int
+
+const (
+ RootUnknown RootType = iota
+ RootGOROOT
+ RootGOPATH
+ RootCurrentModule
+ RootModuleCache
+ RootOther
+)
+
+// A Root is a starting point for a Walk.
+type Root struct {
+ Path string
+ Type RootType
+}
+
+// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
+// For each package found, add will be called with the absolute
+// paths of the containing source directory and the package directory.
+func Walk(roots []Root, add func(root Root, dir string), opts Options) {
+ WalkSkip(roots, add, func(Root, string) bool { return false }, opts)
+}
+
+// WalkSkip walks Go source directories ($GOROOT, $GOPATH, etc) to find packages.
+// For each package found, add will be called with the absolute
+// paths of the containing source directory and the package directory.
+// For each directory that will be scanned, skip will be called
+// with the absolute paths of the containing source directory and the directory.
+// If skip returns false on a directory it will be processed.
+func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root, dir string) bool, opts Options) {
+ for _, root := range roots {
+ walkDir(root, add, skip, opts)
+ }
+}
+
+// walkDir creates a walker and starts fastwalk with this walker.
+func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) {
+ if _, err := os.Stat(root.Path); os.IsNotExist(err) {
+ if opts.Logf != nil {
+ opts.Logf("skipping nonexistent directory: %v", root.Path)
+ }
+ return
+ }
+ start := time.Now()
+ if opts.Logf != nil {
+ opts.Logf("scanning %s", root.Path)
+ }
+
+ w := &walker{
+ root: root,
+ add: add,
+ skip: skip,
+ opts: opts,
+ added: make(map[string]bool),
+ }
+ w.init()
+
+ // Add a trailing path separator to cause filepath.WalkDir to traverse symlinks.
+ path := root.Path
+ if len(path) == 0 {
+ path = "." + string(filepath.Separator)
+ } else if !os.IsPathSeparator(path[len(path)-1]) {
+ path = path + string(filepath.Separator)
+ }
+
+ if err := filepath.WalkDir(path, w.walk); err != nil {
+ logf := opts.Logf
+ if logf == nil {
+ logf = log.Printf
+ }
+ logf("scanning directory %v: %v", root.Path, err)
+ }
+
+ if opts.Logf != nil {
+ opts.Logf("scanned %s in %v", root.Path, time.Since(start))
+ }
+}
+
+// walker is the callback for fastwalk.Walk.
+type walker struct {
+ root Root // The source directory to scan.
+ add func(Root, string) // The callback that will be invoked for every possible Go package dir.
+ skip func(Root, string) bool // The callback that will be invoked for every dir. dir is skipped if it returns true.
+ opts Options // Options passed to Walk by the user.
+
+ pathSymlinks []os.FileInfo
+ ignoredDirs []string
+
+ added map[string]bool
+}
+
+// init initializes the walker based on its Options
+func (w *walker) init() {
+ var ignoredPaths []string
+ if w.root.Type == RootModuleCache {
+ ignoredPaths = []string{"cache"}
+ }
+ if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH {
+ ignoredPaths = w.getIgnoredDirs(w.root.Path)
+ ignoredPaths = append(ignoredPaths, "v", "mod")
+ }
+
+ for _, p := range ignoredPaths {
+ full := filepath.Join(w.root.Path, p)
+ w.ignoredDirs = append(w.ignoredDirs, full)
+ if w.opts.Logf != nil {
+ w.opts.Logf("Directory added to ignore list: %s", full)
+ }
+ }
+}
+
+// getIgnoredDirs reads an optional config file at <path>/.goimportsignore
+// of relative directories to ignore when scanning for go files.
+// The provided path is one of the $GOPATH entries with "src" appended.
+func (w *walker) getIgnoredDirs(path string) []string {
+ file := filepath.Join(path, ".goimportsignore")
+ slurp, err := os.ReadFile(file)
+ if w.opts.Logf != nil {
+ if err != nil {
+ w.opts.Logf("%v", err)
+ } else {
+ w.opts.Logf("Read %s", file)
+ }
+ }
+ if err != nil {
+ return nil
+ }
+
+ var ignoredDirs []string
+ bs := bufio.NewScanner(bytes.NewReader(slurp))
+ for bs.Scan() {
+ line := strings.TrimSpace(bs.Text())
+ if line == "" || strings.HasPrefix(line, "#") {
+ continue
+ }
+ ignoredDirs = append(ignoredDirs, line)
+ }
+ return ignoredDirs
+}
+
+// shouldSkipDir reports whether the file should be skipped or not.
+func (w *walker) shouldSkipDir(dir string) bool {
+ for _, ignoredDir := range w.ignoredDirs {
+ if dir == ignoredDir {
+ return true
+ }
+ }
+ if w.skip != nil {
+ // Check with the user specified callback.
+ return w.skip(w.root, dir)
+ }
+ return false
+}
+
+// walk walks through the given path.
+//
+// Errors are logged if w.opts.Logf is non-nil, but otherwise ignored:
+// walk returns only nil or fs.SkipDir.
+func (w *walker) walk(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ // We have no way to report errors back through Walk or WalkSkip,
+ // so just log and ignore them.
+ if w.opts.Logf != nil {
+ w.opts.Logf("%v", err)
+ }
+ if d == nil {
+ // Nothing more to do: the error prevents us from knowing
+ // what path even represents.
+ return nil
+ }
+ }
+
+ if d.Type().IsRegular() {
+ if !strings.HasSuffix(path, ".go") {
+ return nil
+ }
+
+ dir := filepath.Dir(path)
+ if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
+ // Doesn't make sense to have regular files
+ // directly in your $GOPATH/src or $GOROOT/src.
+ return nil
+ }
+
+ if !w.added[dir] {
+ w.add(w.root, dir)
+ w.added[dir] = true
+ }
+ return nil
+ }
+
+ if d.IsDir() {
+ base := filepath.Base(path)
+ if base == "" || base[0] == '.' || base[0] == '_' ||
+ base == "testdata" ||
+ (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") ||
+ (!w.opts.ModulesEnabled && base == "node_modules") {
+ return fs.SkipDir
+ }
+ if w.shouldSkipDir(path) {
+ return fs.SkipDir
+ }
+ return nil
+ }
+
+ if d.Type()&os.ModeSymlink != 0 {
+ // TODO(bcmills): 'go list all' itself ignores symlinks within GOROOT/src
+ // and GOPATH/src. Do we really need to traverse them here? If so, why?
+
+ fi, err := os.Stat(path)
+ if err != nil || !fi.IsDir() {
+ // Not a directory. Just walk the file (or broken link) and be done.
+ return w.walk(path, fs.FileInfoToDirEntry(fi), err)
+ }
+
+ // Avoid walking symlink cycles: if we have already followed a symlink to
+ // this directory as a parent of itself, don't follow it again.
+ //
+ // This doesn't catch the first time through a cycle, but it also minimizes
+ // the number of extra stat calls we make if we *don't* encounter a cycle.
+ // Since we don't actually expect to encounter symlink cycles in practice,
+ // this seems like the right tradeoff.
+ for _, parent := range w.pathSymlinks {
+ if os.SameFile(fi, parent) {
+ return nil
+ }
+ }
+
+ w.pathSymlinks = append(w.pathSymlinks, fi)
+ defer func() {
+ w.pathSymlinks = w.pathSymlinks[:len(w.pathSymlinks)-1]
+ }()
+
+ // On some platforms the OS (or the Go os package) sometimes fails to
+ // resolve directory symlinks before a trailing slash
+ // (even though POSIX requires it to do so).
+ //
+ // On macOS that failure may be caused by a known libc/kernel bug;
+ // see https://go.dev/issue/59586.
+ //
+ // On Windows before Go 1.21, it may be caused by a bug in
+ // os.Lstat (fixed in https://go.dev/cl/463177).
+ //
+ // Since we need to handle this explicitly on broken platforms anyway,
+ // it is simplest to just always do that and not rely on POSIX pathname
+ // resolution to walk the directory (such as by calling WalkDir with
+ // a trailing slash appended to the path).
+ //
+ // Instead, we make a sequence of walk calls — directly and through
+ // recursive calls to filepath.WalkDir — simulating what WalkDir would do
+ // if the symlink were a regular directory.
+
+ // First we call walk on the path as a directory
+ // (instead of a symlink).
+ err = w.walk(path, fs.FileInfoToDirEntry(fi), nil)
+ if err == fs.SkipDir {
+ return nil
+ } else if err != nil {
+ // This should be impossible, but handle it anyway in case
+ // walk is changed to return other errors.
+ return err
+ }
+
+ // Now read the directory and walk its entries.
+ ents, err := os.ReadDir(path)
+ if err != nil {
+ // Report the ReadDir error, as filepath.WalkDir would do.
+ err = w.walk(path, fs.FileInfoToDirEntry(fi), err)
+ if err == fs.SkipDir {
+ return nil
+ } else if err != nil {
+ return err // Again, should be impossible.
+ }
+ // Fall through and iterate over whatever entries we did manage to get.
+ }
+
+ for _, d := range ents {
+ nextPath := filepath.Join(path, d.Name())
+ if d.IsDir() {
+ // We want to walk the whole directory tree rooted at nextPath,
+ // not just the single entry for the directory.
+ err := filepath.WalkDir(nextPath, w.walk)
+ if err != nil && w.opts.Logf != nil {
+ w.opts.Logf("%v", err)
+ }
+ } else {
+ err := w.walk(nextPath, d, nil)
+ if err == fs.SkipDir {
+ // Skip the rest of the entries in the parent directory of nextPath
+ // (that is, path itself).
+ break
+ } else if err != nil {
+ return err // Again, should be impossible.
+ }
+ }
+ }
+ return nil
+ }
+
+ // Not a file, regular directory, or symlink; skip.
+ return nil
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
new file mode 100644
index 000000000..dd369c072
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/fix.go
@@ -0,0 +1,1769 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/parser"
+ "go/token"
+ "io/fs"
+ "io/ioutil"
+ "os"
+ "path"
+ "path/filepath"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// importToGroup is a list of functions which map from an import path to
+// a group number.
+var importToGroup = []func(localPrefix, importPath string) (num int, ok bool){
+ func(localPrefix, importPath string) (num int, ok bool) {
+ if localPrefix == "" {
+ return
+ }
+ for _, p := range strings.Split(localPrefix, ",") {
+ if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath {
+ return 3, true
+ }
+ }
+ return
+ },
+ func(_, importPath string) (num int, ok bool) {
+ if strings.HasPrefix(importPath, "appengine") {
+ return 2, true
+ }
+ return
+ },
+ func(_, importPath string) (num int, ok bool) {
+ firstComponent := strings.Split(importPath, "/")[0]
+ if strings.Contains(firstComponent, ".") {
+ return 1, true
+ }
+ return
+ },
+}
+
+func importGroup(localPrefix, importPath string) int {
+ for _, fn := range importToGroup {
+ if n, ok := fn(localPrefix, importPath); ok {
+ return n
+ }
+ }
+ return 0
+}
+
+type ImportFixType int
+
+const (
+ AddImport ImportFixType = iota
+ DeleteImport
+ SetImportName
+)
+
+type ImportFix struct {
+ // StmtInfo represents the import statement this fix will add, remove, or change.
+ StmtInfo ImportInfo
+ // IdentName is the identifier that this fix will add or remove.
+ IdentName string
+ // FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
+ FixType ImportFixType
+ Relevance float64 // see pkg
+}
+
+// An ImportInfo represents a single import statement.
+type ImportInfo struct {
+ ImportPath string // import path, e.g. "crypto/rand".
+ Name string // import name, e.g. "crand", or "" if none.
+}
+
+// A packageInfo represents what's known about a package.
+type packageInfo struct {
+ name string // real package name, if known.
+ exports map[string]bool // known exports.
+}
+
+// parseOtherFiles parses all the Go files in srcDir except filename, including
+// test files if filename looks like a test.
+func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File {
+ // This could use go/packages but it doesn't buy much, and it fails
+ // with https://golang.org/issue/26296 in LoadFiles mode in some cases.
+ considerTests := strings.HasSuffix(filename, "_test.go")
+
+ fileBase := filepath.Base(filename)
+ packageFileInfos, err := os.ReadDir(srcDir)
+ if err != nil {
+ return nil
+ }
+
+ var files []*ast.File
+ for _, fi := range packageFileInfos {
+ if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") {
+ continue
+ }
+ if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") {
+ continue
+ }
+
+ f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0)
+ if err != nil {
+ continue
+ }
+
+ files = append(files, f)
+ }
+
+ return files
+}
+
+// addGlobals puts the names of package vars into the provided map.
+func addGlobals(f *ast.File, globals map[string]bool) {
+ for _, decl := range f.Decls {
+ genDecl, ok := decl.(*ast.GenDecl)
+ if !ok {
+ continue
+ }
+
+ for _, spec := range genDecl.Specs {
+ valueSpec, ok := spec.(*ast.ValueSpec)
+ if !ok {
+ continue
+ }
+ globals[valueSpec.Names[0].Name] = true
+ }
+ }
+}
+
+// collectReferences builds a map of selector expressions, from
+// left hand side (X) to a set of right hand sides (Sel).
+func collectReferences(f *ast.File) references {
+ refs := references{}
+
+ var visitor visitFn
+ visitor = func(node ast.Node) ast.Visitor {
+ if node == nil {
+ return visitor
+ }
+ switch v := node.(type) {
+ case *ast.SelectorExpr:
+ xident, ok := v.X.(*ast.Ident)
+ if !ok {
+ break
+ }
+ if xident.Obj != nil {
+ // If the parser can resolve it, it's not a package ref.
+ break
+ }
+ if !ast.IsExported(v.Sel.Name) {
+ // Whatever this is, it's not exported from a package.
+ break
+ }
+ pkgName := xident.Name
+ r := refs[pkgName]
+ if r == nil {
+ r = make(map[string]bool)
+ refs[pkgName] = r
+ }
+ r[v.Sel.Name] = true
+ }
+ return visitor
+ }
+ ast.Walk(visitor, f)
+ return refs
+}
+
+// collectImports returns all the imports in f.
+// Unnamed imports (., _) and "C" are ignored.
+func collectImports(f *ast.File) []*ImportInfo {
+ var imports []*ImportInfo
+ for _, imp := range f.Imports {
+ var name string
+ if imp.Name != nil {
+ name = imp.Name.Name
+ }
+ if imp.Path.Value == `"C"` || name == "_" || name == "." {
+ continue
+ }
+ path := strings.Trim(imp.Path.Value, `"`)
+ imports = append(imports, &ImportInfo{
+ Name: name,
+ ImportPath: path,
+ })
+ }
+ return imports
+}
+
+// findMissingImport searches pass's candidates for an import that provides
+// pkg, containing all of syms.
+func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
+ for _, candidate := range p.candidates {
+ pkgInfo, ok := p.knownPackages[candidate.ImportPath]
+ if !ok {
+ continue
+ }
+ if p.importIdentifier(candidate) != pkg {
+ continue
+ }
+
+ allFound := true
+ for right := range syms {
+ if !pkgInfo.exports[right] {
+ allFound = false
+ break
+ }
+ }
+
+ if allFound {
+ return candidate
+ }
+ }
+ return nil
+}
+
+// references is set of references found in a Go file. The first map key is the
+// left hand side of a selector expression, the second key is the right hand
+// side, and the value should always be true.
+type references map[string]map[string]bool
+
+// A pass contains all the inputs and state necessary to fix a file's imports.
+// It can be modified in some ways during use; see comments below.
+type pass struct {
+ // Inputs. These must be set before a call to load, and not modified after.
+ fset *token.FileSet // fset used to parse f and its siblings.
+ f *ast.File // the file being fixed.
+ srcDir string // the directory containing f.
+ env *ProcessEnv // the environment to use for go commands, etc.
+ loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
+ otherFiles []*ast.File // sibling files.
+
+ // Intermediate state, generated by load.
+ existingImports map[string][]*ImportInfo
+ allRefs references
+ missingRefs references
+
+ // Inputs to fix. These can be augmented between successive fix calls.
+ lastTry bool // indicates that this is the last call and fix should clean up as best it can.
+ candidates []*ImportInfo // candidate imports in priority order.
+ knownPackages map[string]*packageInfo // information about all known packages.
+}
+
+// loadPackageNames saves the package names for everything referenced by imports.
+func (p *pass) loadPackageNames(imports []*ImportInfo) error {
+ if p.env.Logf != nil {
+ p.env.Logf("loading package names for %v packages", len(imports))
+ defer func() {
+ p.env.Logf("done loading package names for %v packages", len(imports))
+ }()
+ }
+ var unknown []string
+ for _, imp := range imports {
+ if _, ok := p.knownPackages[imp.ImportPath]; ok {
+ continue
+ }
+ unknown = append(unknown, imp.ImportPath)
+ }
+
+ resolver, err := p.env.GetResolver()
+ if err != nil {
+ return err
+ }
+
+ names, err := resolver.loadPackageNames(unknown, p.srcDir)
+ if err != nil {
+ return err
+ }
+
+ for path, name := range names {
+ p.knownPackages[path] = &packageInfo{
+ name: name,
+ exports: map[string]bool{},
+ }
+ }
+ return nil
+}
+
+// importIdentifier returns the identifier that imp will introduce. It will
+// guess if the package name has not been loaded, e.g. because the source
+// is not available.
+func (p *pass) importIdentifier(imp *ImportInfo) string {
+ if imp.Name != "" {
+ return imp.Name
+ }
+ known := p.knownPackages[imp.ImportPath]
+ if known != nil && known.name != "" {
+ return known.name
+ }
+ return ImportPathToAssumedName(imp.ImportPath)
+}
+
+// load reads in everything necessary to run a pass, and reports whether the
+// file already has all the imports it needs. It fills in p.missingRefs with the
+// file's missing symbols, if any, or removes unused imports if not.
+func (p *pass) load() ([]*ImportFix, bool) {
+ p.knownPackages = map[string]*packageInfo{}
+ p.missingRefs = references{}
+ p.existingImports = map[string][]*ImportInfo{}
+
+ // Load basic information about the file in question.
+ p.allRefs = collectReferences(p.f)
+
+ // Load stuff from other files in the same package:
+ // global variables so we know they don't need resolving, and imports
+ // that we might want to mimic.
+ globals := map[string]bool{}
+ for _, otherFile := range p.otherFiles {
+ // Don't load globals from files that are in the same directory
+ // but a different package. Using them to suggest imports is OK.
+ if p.f.Name.Name == otherFile.Name.Name {
+ addGlobals(otherFile, globals)
+ }
+ p.candidates = append(p.candidates, collectImports(otherFile)...)
+ }
+
+ // Resolve all the import paths we've seen to package names, and store
+ // f's imports by the identifier they introduce.
+ imports := collectImports(p.f)
+ if p.loadRealPackageNames {
+ err := p.loadPackageNames(append(imports, p.candidates...))
+ if err != nil {
+ if p.env.Logf != nil {
+ p.env.Logf("loading package names: %v", err)
+ }
+ return nil, false
+ }
+ }
+ for _, imp := range imports {
+ p.existingImports[p.importIdentifier(imp)] = append(p.existingImports[p.importIdentifier(imp)], imp)
+ }
+
+ // Find missing references.
+ for left, rights := range p.allRefs {
+ if globals[left] {
+ continue
+ }
+ _, ok := p.existingImports[left]
+ if !ok {
+ p.missingRefs[left] = rights
+ continue
+ }
+ }
+ if len(p.missingRefs) != 0 {
+ return nil, false
+ }
+
+ return p.fix()
+}
+
+// fix attempts to satisfy missing imports using p.candidates. If it finds
+// everything, or if p.lastTry is true, it updates fixes to add the imports it found,
+// delete anything unused, and update import names, and returns true.
+func (p *pass) fix() ([]*ImportFix, bool) {
+ // Find missing imports.
+ var selected []*ImportInfo
+ for left, rights := range p.missingRefs {
+ if imp := p.findMissingImport(left, rights); imp != nil {
+ selected = append(selected, imp)
+ }
+ }
+
+ if !p.lastTry && len(selected) != len(p.missingRefs) {
+ return nil, false
+ }
+
+ // Found everything, or giving up. Add the new imports and remove any unused.
+ var fixes []*ImportFix
+ for _, identifierImports := range p.existingImports {
+ for _, imp := range identifierImports {
+ // We deliberately ignore globals here, because we can't be sure
+ // they're in the same package. People do things like put multiple
+ // main packages in the same directory, and we don't want to
+ // remove imports if they happen to have the same name as a var in
+ // a different package.
+ if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok {
+ fixes = append(fixes, &ImportFix{
+ StmtInfo: *imp,
+ IdentName: p.importIdentifier(imp),
+ FixType: DeleteImport,
+ })
+ continue
+ }
+
+ // An existing import may need to update its import name to be correct.
+ if name := p.importSpecName(imp); name != imp.Name {
+ fixes = append(fixes, &ImportFix{
+ StmtInfo: ImportInfo{
+ Name: name,
+ ImportPath: imp.ImportPath,
+ },
+ IdentName: p.importIdentifier(imp),
+ FixType: SetImportName,
+ })
+ }
+ }
+ }
+ // Collecting fixes involved map iteration, so sort for stability. See
+ // golang/go#59976.
+ sortFixes(fixes)
+
+ // collect selected fixes in a separate slice, so that it can be sorted
+ // separately. Note that these fixes must occur after fixes to existing
+ // imports. TODO(rfindley): figure out why.
+ var selectedFixes []*ImportFix
+ for _, imp := range selected {
+ selectedFixes = append(selectedFixes, &ImportFix{
+ StmtInfo: ImportInfo{
+ Name: p.importSpecName(imp),
+ ImportPath: imp.ImportPath,
+ },
+ IdentName: p.importIdentifier(imp),
+ FixType: AddImport,
+ })
+ }
+ sortFixes(selectedFixes)
+
+ return append(fixes, selectedFixes...), true
+}
+
+func sortFixes(fixes []*ImportFix) {
+ sort.Slice(fixes, func(i, j int) bool {
+ fi, fj := fixes[i], fixes[j]
+ if fi.StmtInfo.ImportPath != fj.StmtInfo.ImportPath {
+ return fi.StmtInfo.ImportPath < fj.StmtInfo.ImportPath
+ }
+ if fi.StmtInfo.Name != fj.StmtInfo.Name {
+ return fi.StmtInfo.Name < fj.StmtInfo.Name
+ }
+ if fi.IdentName != fj.IdentName {
+ return fi.IdentName < fj.IdentName
+ }
+ return fi.FixType < fj.FixType
+ })
+}
+
+// importSpecName gets the import name of imp in the import spec.
+//
+// When the import identifier matches the assumed import name, the import name does
+// not appear in the import spec.
+func (p *pass) importSpecName(imp *ImportInfo) string {
+ // If we did not load the real package names, or the name is already set,
+ // we just return the existing name.
+ if !p.loadRealPackageNames || imp.Name != "" {
+ return imp.Name
+ }
+
+ ident := p.importIdentifier(imp)
+ if ident == ImportPathToAssumedName(imp.ImportPath) {
+ return "" // ident not needed since the assumed and real names are the same.
+ }
+ return ident
+}
+
+// apply will perform the fixes on f in order.
+func apply(fset *token.FileSet, f *ast.File, fixes []*ImportFix) {
+ for _, fix := range fixes {
+ switch fix.FixType {
+ case DeleteImport:
+ astutil.DeleteNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
+ case AddImport:
+ astutil.AddNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath)
+ case SetImportName:
+ // Find the matching import path and change the name.
+ for _, spec := range f.Imports {
+ path := strings.Trim(spec.Path.Value, `"`)
+ if path == fix.StmtInfo.ImportPath {
+ spec.Name = &ast.Ident{
+ Name: fix.StmtInfo.Name,
+ NamePos: spec.Pos(),
+ }
+ }
+ }
+ }
+ }
+}
+
+// assumeSiblingImportsValid assumes that siblings' use of packages is valid,
+// adding the exports they use.
+func (p *pass) assumeSiblingImportsValid() {
+ for _, f := range p.otherFiles {
+ refs := collectReferences(f)
+ imports := collectImports(f)
+ importsByName := map[string]*ImportInfo{}
+ for _, imp := range imports {
+ importsByName[p.importIdentifier(imp)] = imp
+ }
+ for left, rights := range refs {
+ if imp, ok := importsByName[left]; ok {
+ if m, ok := stdlib[imp.ImportPath]; ok {
+ // We have the stdlib in memory; no need to guess.
+ rights = copyExports(m)
+ }
+ p.addCandidate(imp, &packageInfo{
+ // no name; we already know it.
+ exports: rights,
+ })
+ }
+ }
+ }
+}
+
+// addCandidate adds a candidate import to p, and merges in the information
+// in pkg.
+func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) {
+ p.candidates = append(p.candidates, imp)
+ if existing, ok := p.knownPackages[imp.ImportPath]; ok {
+ if existing.name == "" {
+ existing.name = pkg.name
+ }
+ for export := range pkg.exports {
+ existing.exports[export] = true
+ }
+ } else {
+ p.knownPackages[imp.ImportPath] = pkg
+ }
+}
+
+// fixImports adds and removes imports from f so that all its references are
+// satisfied and there are no unused imports.
+//
+// This is declared as a variable rather than a function so goimports can
+// easily be extended by adding a file with an init function.
+var fixImports = fixImportsDefault
+
+func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error {
+ fixes, err := getFixes(context.Background(), fset, f, filename, env)
+ if err != nil {
+ return err
+ }
+ apply(fset, f, fixes)
+ return err
+}
+
+// getFixes gets the import fixes that need to be made to f in order to fix the imports.
+// It does not modify the ast.
+func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
+ abs, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ srcDir := filepath.Dir(abs)
+ if env.Logf != nil {
+ env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
+ }
+
+ // First pass: looking only at f, and using the naive algorithm to
+ // derive package names from import paths, see if the file is already
+ // complete. We can't add any imports yet, because we don't know
+ // if missing references are actually package vars.
+ p := &pass{fset: fset, f: f, srcDir: srcDir, env: env}
+ if fixes, done := p.load(); done {
+ return fixes, nil
+ }
+
+ otherFiles := parseOtherFiles(fset, srcDir, filename)
+
+ // Second pass: add information from other files in the same package,
+ // like their package vars and imports.
+ p.otherFiles = otherFiles
+ if fixes, done := p.load(); done {
+ return fixes, nil
+ }
+
+ // Now we can try adding imports from the stdlib.
+ p.assumeSiblingImportsValid()
+ addStdlibCandidates(p, p.missingRefs)
+ if fixes, done := p.fix(); done {
+ return fixes, nil
+ }
+
+ // Third pass: get real package names where we had previously used
+ // the naive algorithm.
+ p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
+ p.loadRealPackageNames = true
+ p.otherFiles = otherFiles
+ if fixes, done := p.load(); done {
+ return fixes, nil
+ }
+
+ if err := addStdlibCandidates(p, p.missingRefs); err != nil {
+ return nil, err
+ }
+ p.assumeSiblingImportsValid()
+ if fixes, done := p.fix(); done {
+ return fixes, nil
+ }
+
+ // Go look for candidates in $GOPATH, etc. We don't necessarily load
+ // the real exports of sibling imports, so keep assuming their contents.
+ if err := addExternalCandidates(ctx, p, p.missingRefs, filename); err != nil {
+ return nil, err
+ }
+
+ p.lastTry = true
+ fixes, _ := p.fix()
+ return fixes, nil
+}
+
+// MaxRelevance is the highest relevance, used for the standard library.
+// Chosen arbitrarily to match pre-existing gopls code.
+const MaxRelevance = 7.0
+
+// getCandidatePkgs works with the passed callback to find all acceptable packages.
+// It deduplicates by import path, and uses a cached stdlib rather than reading
+// from disk.
+func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
+ notSelf := func(p *pkg) bool {
+ return p.packageName != filePkg || p.dir != filepath.Dir(filename)
+ }
+ goenv, err := env.goEnv()
+ if err != nil {
+ return err
+ }
+
+ var mu sync.Mutex // to guard asynchronous access to dupCheck
+ dupCheck := map[string]struct{}{}
+
+ // Start off with the standard library.
+ for importPath, exports := range stdlib {
+ p := &pkg{
+ dir: filepath.Join(goenv["GOROOT"], "src", importPath),
+ importPathShort: importPath,
+ packageName: path.Base(importPath),
+ relevance: MaxRelevance,
+ }
+ dupCheck[importPath] = struct{}{}
+ if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) {
+ wrappedCallback.exportsLoaded(p, exports)
+ }
+ }
+
+ scanFilter := &scanCallback{
+ rootFound: func(root gopathwalk.Root) bool {
+ // Exclude goroot results -- getting them is relatively expensive, not cached,
+ // and generally redundant with the in-memory version.
+ return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
+ },
+ dirFound: wrappedCallback.dirFound,
+ packageNameLoaded: func(pkg *pkg) bool {
+ mu.Lock()
+ defer mu.Unlock()
+ if _, ok := dupCheck[pkg.importPathShort]; ok {
+ return false
+ }
+ dupCheck[pkg.importPathShort] = struct{}{}
+ return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ // If we're an x_test, load the package under test's test variant.
+ if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
+ var err error
+ _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
+ if err != nil {
+ return
+ }
+ }
+ wrappedCallback.exportsLoaded(pkg, exports)
+ },
+ }
+ resolver, err := env.GetResolver()
+ if err != nil {
+ return err
+ }
+ return resolver.scan(ctx, scanFilter)
+}
+
+func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) (map[string]float64, error) {
+ result := make(map[string]float64)
+ resolver, err := env.GetResolver()
+ if err != nil {
+ return nil, err
+ }
+ for _, path := range paths {
+ result[path] = resolver.scoreImportPath(ctx, path)
+ }
+ return result, nil
+}
+
+func PrimeCache(ctx context.Context, env *ProcessEnv) error {
+ // Fully scan the disk for directories, but don't actually read any Go files.
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return false
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, "", "", env)
+}
+
+func candidateImportName(pkg *pkg) string {
+ if ImportPathToAssumedName(pkg.importPathShort) != pkg.packageName {
+ return pkg.packageName
+ }
+ return ""
+}
+
+// GetAllCandidates calls wrapped for each package whose name starts with
+// searchPrefix, and can be imported from filename with the package name filePkg.
+//
+// Beware that the wrapped function may be called multiple times concurrently.
+// TODO(adonovan): encapsulate the concurrency.
+func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ // Try the assumed package name first, then a simpler path match
+ // in case of packages named vN, which are not uncommon.
+ return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
+ strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if !strings.HasPrefix(pkg.packageName, searchPrefix) {
+ return false
+ }
+ wrapped(ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ })
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
+}
+
+// GetImportPaths calls wrapped for each package whose import path starts with
+// searchPrefix, and can be imported from filename with the package name filePkg.
+func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ return strings.HasPrefix(pkg.importPathShort, searchPrefix)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ wrapped(ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ })
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
+}
+
+// A PackageExport is a package and its exports.
+type PackageExport struct {
+ Fix *ImportFix
+ Exports []string
+}
+
+// GetPackageExports returns all known packages with name pkg and their exports.
+func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return pkg.packageName == searchPkg
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ sort.Strings(exports)
+ wrapped(PackageExport{
+ Fix: &ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ },
+ Exports: exports,
+ })
+ },
+ }
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
+}
+
+var requiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB", "GOWORK"}
+
+// ProcessEnv contains environment variables and settings that affect the use of
+// the go command, the go/build package, etc.
+type ProcessEnv struct {
+ GocmdRunner *gocommand.Runner
+
+ BuildFlags []string
+ ModFlag string
+ ModFile string
+
+ // SkipPathInScan returns true if the path should be skipped from scans of
+ // the RootCurrentModule root type. The function argument is a clean,
+ // absolute path.
+ SkipPathInScan func(string) bool
+
+ // Env overrides the OS environment, and can be used to specify
+ // GOPROXY, GO111MODULE, etc. PATH cannot be set here, because
+ // exec.Command will not honor it.
+ // Specifying all of RequiredGoEnvVars avoids a call to `go env`.
+ Env map[string]string
+
+ WorkingDir string
+
+ // If Logf is non-nil, debug logging is enabled through this function.
+ Logf func(format string, args ...interface{})
+
+ initialized bool
+
+ resolver Resolver
+}
+
+func (e *ProcessEnv) goEnv() (map[string]string, error) {
+ if err := e.init(); err != nil {
+ return nil, err
+ }
+ return e.Env, nil
+}
+
+func (e *ProcessEnv) matchFile(dir, name string) (bool, error) {
+ bctx, err := e.buildContext()
+ if err != nil {
+ return false, err
+ }
+ return bctx.MatchFile(dir, name)
+}
+
+// CopyConfig copies the env's configuration into a new env.
+func (e *ProcessEnv) CopyConfig() *ProcessEnv {
+ copy := &ProcessEnv{
+ GocmdRunner: e.GocmdRunner,
+ initialized: e.initialized,
+ BuildFlags: e.BuildFlags,
+ Logf: e.Logf,
+ WorkingDir: e.WorkingDir,
+ resolver: nil,
+ Env: map[string]string{},
+ }
+ for k, v := range e.Env {
+ copy.Env[k] = v
+ }
+ return copy
+}
+
+func (e *ProcessEnv) init() error {
+ if e.initialized {
+ return nil
+ }
+
+ foundAllRequired := true
+ for _, k := range requiredGoEnvVars {
+ if _, ok := e.Env[k]; !ok {
+ foundAllRequired = false
+ break
+ }
+ }
+ if foundAllRequired {
+ e.initialized = true
+ return nil
+ }
+
+ if e.Env == nil {
+ e.Env = map[string]string{}
+ }
+
+ goEnv := map[string]string{}
+ stdout, err := e.invokeGo(context.TODO(), "env", append([]string{"-json"}, requiredGoEnvVars...)...)
+ if err != nil {
+ return err
+ }
+ if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil {
+ return err
+ }
+ for k, v := range goEnv {
+ e.Env[k] = v
+ }
+ e.initialized = true
+ return nil
+}
+
+func (e *ProcessEnv) env() []string {
+ var env []string // the gocommand package will prepend os.Environ.
+ for k, v := range e.Env {
+ env = append(env, k+"="+v)
+ }
+ return env
+}
+
+func (e *ProcessEnv) GetResolver() (Resolver, error) {
+ if e.resolver != nil {
+ return e.resolver, nil
+ }
+ if err := e.init(); err != nil {
+ return nil, err
+ }
+ if len(e.Env["GOMOD"]) == 0 && len(e.Env["GOWORK"]) == 0 {
+ e.resolver = newGopathResolver(e)
+ return e.resolver, nil
+ }
+ e.resolver = newModuleResolver(e)
+ return e.resolver, nil
+}
+
+func (e *ProcessEnv) buildContext() (*build.Context, error) {
+ ctx := build.Default
+ goenv, err := e.goEnv()
+ if err != nil {
+ return nil, err
+ }
+ ctx.GOROOT = goenv["GOROOT"]
+ ctx.GOPATH = goenv["GOPATH"]
+
+ // As of Go 1.14, build.Context has a Dir field
+ // (see golang.org/issue/34860).
+ // Populate it only if present.
+ rc := reflect.ValueOf(&ctx).Elem()
+ dir := rc.FieldByName("Dir")
+ if dir.IsValid() && dir.Kind() == reflect.String {
+ dir.SetString(e.WorkingDir)
+ }
+
+ // Since Go 1.11, go/build.Context.Import may invoke 'go list' depending on
+ // the value in GO111MODULE in the process's environment. We always want to
+ // run in GOPATH mode when calling Import, so we need to prevent this from
+ // happening. In Go 1.16, GO111MODULE defaults to "on", so this problem comes
+ // up more frequently.
+ //
+ // HACK: setting any of the Context I/O hooks prevents Import from invoking
+ // 'go list', regardless of GO111MODULE. This is undocumented, but it's
+ // unlikely to change before GOPATH support is removed.
+ ctx.ReadDir = ioutil.ReadDir
+
+ return &ctx, nil
+}
+
+func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) (*bytes.Buffer, error) {
+ inv := gocommand.Invocation{
+ Verb: verb,
+ Args: args,
+ BuildFlags: e.BuildFlags,
+ Env: e.env(),
+ Logf: e.Logf,
+ WorkingDir: e.WorkingDir,
+ }
+ return e.GocmdRunner.Run(ctx, inv)
+}
+
+func addStdlibCandidates(pass *pass, refs references) error {
+ goenv, err := pass.env.goEnv()
+ if err != nil {
+ return err
+ }
+ add := func(pkg string) {
+ // Prevent self-imports.
+ if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir {
+ return
+ }
+ exports := copyExports(stdlib[pkg])
+ pass.addCandidate(
+ &ImportInfo{ImportPath: pkg},
+ &packageInfo{name: path.Base(pkg), exports: exports})
+ }
+ for left := range refs {
+ if left == "rand" {
+ // Make sure we try crypto/rand before math/rand.
+ add("crypto/rand")
+ add("math/rand")
+ continue
+ }
+ for importPath := range stdlib {
+ if path.Base(importPath) == left {
+ add(importPath)
+ }
+ }
+ }
+ return nil
+}
+
+// A Resolver does the build-system-specific parts of goimports.
+type Resolver interface {
+ // loadPackageNames loads the package names in importPaths.
+ loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
+ // scan works with callback to search for packages. See scanCallback for details.
+ scan(ctx context.Context, callback *scanCallback) error
+ // loadExports returns the set of exported symbols in the package at dir.
+ // loadExports may be called concurrently.
+ loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
+ // scoreImportPath returns the relevance for an import path.
+ scoreImportPath(ctx context.Context, path string) float64
+
+ ClearForNewScan()
+}
+
+// A scanCallback controls a call to scan and receives its results.
+// In general, minor errors will be silently discarded; a user should not
+// expect to receive a full series of calls for everything.
+type scanCallback struct {
+ // rootFound is called before scanning a new root dir. If it returns true,
+ // the root will be scanned. Returning false will not necessarily prevent
+ // directories from that root making it to dirFound.
+ rootFound func(gopathwalk.Root) bool
+ // dirFound is called when a directory is found that is possibly a Go package.
+ // pkg will be populated with everything except packageName.
+ // If it returns true, the package's name will be loaded.
+ dirFound func(pkg *pkg) bool
+ // packageNameLoaded is called when a package is found and its name is loaded.
+ // If it returns true, the package's exports will be loaded.
+ packageNameLoaded func(pkg *pkg) bool
+ // exportsLoaded is called when a package's exports have been loaded.
+ exportsLoaded func(pkg *pkg, exports []string)
+}
+
+func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error {
+ ctx, done := event.Start(ctx, "imports.addExternalCandidates")
+ defer done()
+
+ var mu sync.Mutex
+ found := make(map[string][]pkgDistance)
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true // We want everything.
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, refs, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if _, want := refs[pkg.packageName]; !want {
+ return false
+ }
+ if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
+ // The candidate is in the same directory and has the
+ // same package name. Don't try to import ourselves.
+ return false
+ }
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
+ return false // We'll do our own loading after we sort.
+ },
+ }
+ resolver, err := pass.env.GetResolver()
+ if err != nil {
+ return err
+ }
+ if err = resolver.scan(context.Background(), callback); err != nil {
+ return err
+ }
+
+ // Search for imports matching potential package references.
+ type result struct {
+ imp *ImportInfo
+ pkg *packageInfo
+ }
+ results := make(chan result, len(refs))
+
+ ctx, cancel := context.WithCancel(context.TODO())
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+ var (
+ firstErr error
+ firstErrOnce sync.Once
+ )
+ for pkgName, symbols := range refs {
+ wg.Add(1)
+ go func(pkgName string, symbols map[string]bool) {
+ defer wg.Done()
+
+ found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename)
+
+ if err != nil {
+ firstErrOnce.Do(func() {
+ firstErr = err
+ cancel()
+ })
+ return
+ }
+
+ if found == nil {
+ return // No matching package.
+ }
+
+ imp := &ImportInfo{
+ ImportPath: found.importPathShort,
+ }
+
+ pkg := &packageInfo{
+ name: pkgName,
+ exports: symbols,
+ }
+ results <- result{imp, pkg}
+ }(pkgName, symbols)
+ }
+ go func() {
+ wg.Wait()
+ close(results)
+ }()
+
+ for result := range results {
+ pass.addCandidate(result.imp, result.pkg)
+ }
+ return firstErr
+}
+
+// notIdentifier reports whether ch is an invalid identifier character.
+func notIdentifier(ch rune) bool {
+ return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' ||
+ '0' <= ch && ch <= '9' ||
+ ch == '_' ||
+ ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch)))
+}
+
+// ImportPathToAssumedName returns the assumed package name of an import path.
+// It does this using only string parsing of the import path.
+// It picks the last element of the path that does not look like a major
+// version, and then picks the valid identifier off the start of that element.
+// It is used to determine if a local rename should be added to an import for
+// clarity.
+// This function could be moved to a standard package and exported if we want
+// for use in other tools.
+func ImportPathToAssumedName(importPath string) string {
+ base := path.Base(importPath)
+ if strings.HasPrefix(base, "v") {
+ if _, err := strconv.Atoi(base[1:]); err == nil {
+ dir := path.Dir(importPath)
+ if dir != "." {
+ base = path.Base(dir)
+ }
+ }
+ }
+ base = strings.TrimPrefix(base, "go-")
+ if i := strings.IndexFunc(base, notIdentifier); i >= 0 {
+ base = base[:i]
+ }
+ return base
+}
+
+// gopathResolver implements resolver for GOPATH workspaces.
+type gopathResolver struct {
+ env *ProcessEnv
+ walked bool
+ cache *dirInfoCache
+ scanSema chan struct{} // scanSema prevents concurrent scans.
+}
+
+func newGopathResolver(env *ProcessEnv) *gopathResolver {
+ r := &gopathResolver{
+ env: env,
+ cache: &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ },
+ scanSema: make(chan struct{}, 1),
+ }
+ r.scanSema <- struct{}{}
+ return r
+}
+
+func (r *gopathResolver) ClearForNewScan() {
+ <-r.scanSema
+ r.cache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ r.walked = false
+ r.scanSema <- struct{}{}
+}
+
+func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ names := map[string]string{}
+ bctx, err := r.env.buildContext()
+ if err != nil {
+ return nil, err
+ }
+ for _, path := range importPaths {
+ names[path] = importPathToName(bctx, path, srcDir)
+ }
+ return names, nil
+}
+
+// importPathToName finds out the actual package name, as declared in its .go files.
+func importPathToName(bctx *build.Context, importPath, srcDir string) string {
+ // Fast path for standard library without going to disk.
+ if _, ok := stdlib[importPath]; ok {
+ return path.Base(importPath) // stdlib packages always match their paths.
+ }
+
+ buildPkg, err := bctx.Import(importPath, srcDir, build.FindOnly)
+ if err != nil {
+ return ""
+ }
+ pkgName, err := packageDirToName(buildPkg.Dir)
+ if err != nil {
+ return ""
+ }
+ return pkgName
+}
+
+// packageDirToName is a faster version of build.Import if
+// the only thing desired is the package name. Given a directory,
+// packageDirToName then only parses one file in the package,
+// trusting that the files in the directory are consistent.
+func packageDirToName(dir string) (packageName string, err error) {
+ d, err := os.Open(dir)
+ if err != nil {
+ return "", err
+ }
+ names, err := d.Readdirnames(-1)
+ d.Close()
+ if err != nil {
+ return "", err
+ }
+ sort.Strings(names) // to have predictable behavior
+ var lastErr error
+ var nfile int
+ for _, name := range names {
+ if !strings.HasSuffix(name, ".go") {
+ continue
+ }
+ if strings.HasSuffix(name, "_test.go") {
+ continue
+ }
+ nfile++
+ fullFile := filepath.Join(dir, name)
+
+ fset := token.NewFileSet()
+ f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly)
+ if err != nil {
+ lastErr = err
+ continue
+ }
+ pkgName := f.Name.Name
+ if pkgName == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by ctx.MatchFile.
+ continue
+ }
+ if pkgName == "main" {
+ // Also skip package main, assuming it's a +build ignore generator or example.
+ // Since you can't import a package main anyway, there's no harm here.
+ continue
+ }
+ return pkgName, nil
+ }
+ if lastErr != nil {
+ return "", lastErr
+ }
+ return "", fmt.Errorf("no importable package found in %d Go files", nfile)
+}
+
+type pkg struct {
+ dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
+ importPathShort string // vendorless import path ("net/http", "a/b")
+ packageName string // package name loaded from source if requested
+ relevance float64 // a weakly-defined score of how relevant a package is. 0 is most relevant.
+}
+
+type pkgDistance struct {
+ pkg *pkg
+ distance int // relative distance to target
+}
+
+// byDistanceOrImportPathShortLength sorts by relative distance breaking ties
+// on the short import path length and then the import string itself.
+type byDistanceOrImportPathShortLength []pkgDistance
+
+func (s byDistanceOrImportPathShortLength) Len() int { return len(s) }
+func (s byDistanceOrImportPathShortLength) Less(i, j int) bool {
+ di, dj := s[i].distance, s[j].distance
+ if di == -1 {
+ return false
+ }
+ if dj == -1 {
+ return true
+ }
+ if di != dj {
+ return di < dj
+ }
+
+ vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort
+ if len(vi) != len(vj) {
+ return len(vi) < len(vj)
+ }
+ return vi < vj
+}
+func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+
+func distance(basepath, targetpath string) int {
+ p, err := filepath.Rel(basepath, targetpath)
+ if err != nil {
+ return -1
+ }
+ if p == "." {
+ return 0
+ }
+ return strings.Count(p, string(filepath.Separator)) + 1
+}
+
+func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
+ add := func(root gopathwalk.Root, dir string) {
+ // We assume cached directories have not changed. We can skip them and their
+ // children.
+ if _, ok := r.cache.Load(dir); ok {
+ return
+ }
+
+ importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):])
+ info := directoryPackageInfo{
+ status: directoryScanned,
+ dir: dir,
+ rootType: root.Type,
+ nonCanonicalImportPath: VendorlessPath(importpath),
+ }
+ r.cache.Store(dir, info)
+ }
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
+ }
+
+ p := &pkg{
+ importPathShort: info.nonCanonicalImportPath,
+ dir: info.dir,
+ relevance: MaxRelevance - 1,
+ }
+ if info.rootType == gopathwalk.RootGOROOT {
+ p.relevance = MaxRelevance
+ }
+
+ if !callback.dirFound(p) {
+ return
+ }
+ var err error
+ p.packageName, err = r.cache.CachePackageName(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.packageNameLoaded(p) {
+ return
+ }
+ if _, exports, err := r.loadExports(ctx, p, false); err == nil {
+ callback.exportsLoaded(p, exports)
+ }
+ }
+ stop := r.cache.ScanAndListen(ctx, processDir)
+ defer stop()
+
+ goenv, err := r.env.goEnv()
+ if err != nil {
+ return err
+ }
+ var roots []gopathwalk.Root
+ roots = append(roots, gopathwalk.Root{Path: filepath.Join(goenv["GOROOT"], "src"), Type: gopathwalk.RootGOROOT})
+ for _, p := range filepath.SplitList(goenv["GOPATH"]) {
+ roots = append(roots, gopathwalk.Root{Path: filepath.Join(p, "src"), Type: gopathwalk.RootGOPATH})
+ }
+ // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
+ roots = filterRoots(roots, callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ gopathwalk.Walk(roots, add, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: false})
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
+
+func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
+ }
+ return MaxRelevance - 1
+}
+
+func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
+ var result []gopathwalk.Root
+ for _, root := range roots {
+ if !include(root) {
+ continue
+ }
+ result = append(result, root)
+ }
+ return result
+}
+
+func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+ if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
+ return r.cache.CacheExports(ctx, r.env, info)
+ }
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
+}
+
+// VendorlessPath returns the devendorized version of the import path ipath.
+// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
+func VendorlessPath(ipath string) string {
+ // Devendorize for use in import statement.
+ if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
+ return ipath[i+len("/vendor/"):]
+ }
+ if strings.HasPrefix(ipath, "vendor/") {
+ return ipath[len("vendor/"):]
+ }
+ return ipath
+}
+
+func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
+ // Look for non-test, buildable .go files which could provide exports.
+ all, err := os.ReadDir(dir)
+ if err != nil {
+ return "", nil, err
+ }
+ var files []fs.DirEntry
+ for _, fi := range all {
+ name := fi.Name()
+ if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
+ continue
+ }
+ match, err := env.matchFile(dir, fi.Name())
+ if err != nil || !match {
+ continue
+ }
+ files = append(files, fi)
+ }
+
+ if len(files) == 0 {
+ return "", nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir)
+ }
+
+ var pkgName string
+ var exports []string
+ fset := token.NewFileSet()
+ for _, fi := range files {
+ select {
+ case <-ctx.Done():
+ return "", nil, ctx.Err()
+ default:
+ }
+
+ fullFile := filepath.Join(dir, fi.Name())
+ f, err := parser.ParseFile(fset, fullFile, nil, 0)
+ if err != nil {
+ if env.Logf != nil {
+ env.Logf("error parsing %v: %v", fullFile, err)
+ }
+ continue
+ }
+ if f.Name.Name == "documentation" {
+ // Special case from go/build.ImportDir, not
+ // handled by MatchFile above.
+ continue
+ }
+ if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
+ // x_test package. We want internal test files only.
+ continue
+ }
+ pkgName = f.Name.Name
+ for name := range f.Scope.Objects {
+ if ast.IsExported(name) {
+ exports = append(exports, name)
+ }
+ }
+ }
+
+ if env.Logf != nil {
+ sortedExports := append([]string(nil), exports...)
+ sort.Strings(sortedExports)
+ env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", "))
+ }
+ return pkgName, exports, nil
+}
+
+// findImport searches for a package with the given symbols.
+// If no package is found, findImport returns ("", false, nil)
+func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
+ // Sort the candidates by their import package length,
+ // assuming that shorter package names are better than long
+ // ones. Note that this sorts by the de-vendored name, so
+ // there's no "penalty" for vendoring.
+ sort.Sort(byDistanceOrImportPathShortLength(candidates))
+ if pass.env.Logf != nil {
+ for i, c := range candidates {
+ pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir)
+ }
+ }
+ resolver, err := pass.env.GetResolver()
+ if err != nil {
+ return nil, err
+ }
+
+ // Collect exports for packages with matching names.
+ rescv := make([]chan *pkg, len(candidates))
+ for i := range candidates {
+ rescv[i] = make(chan *pkg, 1)
+ }
+ const maxConcurrentPackageImport = 4
+ loadExportsSem := make(chan struct{}, maxConcurrentPackageImport)
+
+ ctx, cancel := context.WithCancel(ctx)
+ var wg sync.WaitGroup
+ defer func() {
+ cancel()
+ wg.Wait()
+ }()
+
+ wg.Add(1)
+ go func() {
+ defer wg.Done()
+ for i, c := range candidates {
+ select {
+ case loadExportsSem <- struct{}{}:
+ case <-ctx.Done():
+ return
+ }
+
+ wg.Add(1)
+ go func(c pkgDistance, resc chan<- *pkg) {
+ defer func() {
+ <-loadExportsSem
+ wg.Done()
+ }()
+
+ if pass.env.Logf != nil {
+ pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
+ }
+ // If we're an x_test, load the package under test's test variant.
+ includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
+ _, exports, err := resolver.loadExports(ctx, c.pkg, includeTest)
+ if err != nil {
+ if pass.env.Logf != nil {
+ pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
+ }
+ resc <- nil
+ return
+ }
+
+ exportsMap := make(map[string]bool, len(exports))
+ for _, sym := range exports {
+ exportsMap[sym] = true
+ }
+
+ // If it doesn't have the right
+ // symbols, send nil to mean no match.
+ for symbol := range symbols {
+ if !exportsMap[symbol] {
+ resc <- nil
+ return
+ }
+ }
+ resc <- c.pkg
+ }(c, rescv[i])
+ }
+ }()
+
+ for _, resc := range rescv {
+ pkg := <-resc
+ if pkg == nil {
+ continue
+ }
+ return pkg, nil
+ }
+ return nil, nil
+}
+
+// pkgIsCandidate reports whether pkg is a candidate for satisfying the
+// finding which package pkgIdent in the file named by filename is trying
+// to refer to.
+//
+// This check is purely lexical and is meant to be as fast as possible
+// because it's run over all $GOPATH directories to filter out poor
+// candidates in order to limit the CPU and I/O later parsing the
+// exports in candidate packages.
+//
+// filename is the file being formatted.
+// pkgIdent is the package being searched for, like "client" (if
+// searching for "client.New")
+func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
+ // Check "internal" and "vendor" visibility:
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+
+ // Speed optimization to minimize disk I/O:
+ // the last two components on disk must contain the
+ // package name somewhere.
+ //
+ // This permits mismatch naming like directory
+ // "go-foo" being package "foo", or "pkg.v3" being "pkg",
+ // or directory "google.golang.org/api/cloudbilling/v1"
+ // being package "cloudbilling", but doesn't
+ // permit a directory "foo" to be package
+ // "bar", which is strongly discouraged
+ // anyway. There's no reason goimports needs
+ // to be slow just to accommodate that.
+ for pkgIdent := range refs {
+ lastTwo := lastTwoComponents(pkg.importPathShort)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+func hasHyphenOrUpperASCII(s string) bool {
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ if b == '-' || ('A' <= b && b <= 'Z') {
+ return true
+ }
+ }
+ return false
+}
+
+func lowerASCIIAndRemoveHyphen(s string) (ret string) {
+ buf := make([]byte, 0, len(s))
+ for i := 0; i < len(s); i++ {
+ b := s[i]
+ switch {
+ case b == '-':
+ continue
+ case 'A' <= b && b <= 'Z':
+ buf = append(buf, b+('a'-'A'))
+ default:
+ buf = append(buf, b)
+ }
+ }
+ return string(buf)
+}
+
+// canUse reports whether the package in dir is usable from filename,
+// respecting the Go "internal" and "vendor" visibility rules.
+func canUse(filename, dir string) bool {
+ // Fast path check, before any allocations. If it doesn't contain vendor
+ // or internal, it's not tricky:
+ // Note that this can false-negative on directories like "notinternal",
+ // but we check it correctly below. This is just a fast path.
+ if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") {
+ return true
+ }
+
+ dirSlash := filepath.ToSlash(dir)
+ if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") {
+ return true
+ }
+ // Vendor or internal directory only visible from children of parent.
+ // That means the path from the current directory to the target directory
+ // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal
+ // or bar/vendor or bar/internal.
+ // After stripping all the leading ../, the only okay place to see vendor or internal
+ // is at the very beginning of the path.
+ absfile, err := filepath.Abs(filename)
+ if err != nil {
+ return false
+ }
+ absdir, err := filepath.Abs(dir)
+ if err != nil {
+ return false
+ }
+ rel, err := filepath.Rel(absfile, absdir)
+ if err != nil {
+ return false
+ }
+ relSlash := filepath.ToSlash(rel)
+ if i := strings.LastIndex(relSlash, "../"); i >= 0 {
+ relSlash = relSlash[i+len("../"):]
+ }
+ return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal")
+}
+
+// lastTwoComponents returns at most the last two path components
+// of v, using either / or \ as the path separator.
+func lastTwoComponents(v string) string {
+ nslash := 0
+ for i := len(v) - 1; i >= 0; i-- {
+ if v[i] == '/' || v[i] == '\\' {
+ nslash++
+ if nslash == 2 {
+ return v[i:]
+ }
+ }
+ }
+ return v
+}
+
+type visitFn func(node ast.Node) ast.Visitor
+
+func (fn visitFn) Visit(node ast.Node) ast.Visitor {
+ return fn(node)
+}
+
+func copyExports(pkg []string) map[string]bool {
+ m := make(map[string]bool, len(pkg))
+ for _, v := range pkg {
+ m[v] = true
+ }
+ return m
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
new file mode 100644
index 000000000..58e637b90
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/imports.go
@@ -0,0 +1,356 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:generate go run mkstdlib.go
+
+// Package imports implements a Go pretty-printer (like package "go/format")
+// that also adds or removes import statements as necessary.
+package imports
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "go/ast"
+ "go/format"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/ast/astutil"
+ "golang.org/x/tools/internal/event"
+)
+
+// Options is golang.org/x/tools/imports.Options with extra internal-only options.
+type Options struct {
+ Env *ProcessEnv // The environment to use. Note: this contains the cached module and filesystem state.
+
+ // LocalPrefix is a comma-separated string of import path prefixes, which, if
+ // set, instructs Process to sort the import paths with the given prefixes
+ // into another group after 3rd-party packages.
+ LocalPrefix string
+
+ Fragment bool // Accept fragment of a source file (no package statement)
+ AllErrors bool // Report all errors (not just the first 10 on different lines)
+
+ Comments bool // Print comments (true if nil *Options provided)
+ TabIndent bool // Use tabs for indent (true if nil *Options provided)
+ TabWidth int // Tab width (8 if nil *Options provided)
+
+ FormatOnly bool // Disable the insertion and deletion of imports
+}
+
+// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env.
+func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) {
+ fileSet := token.NewFileSet()
+ file, adjust, err := parse(fileSet, filename, src, opt)
+ if err != nil {
+ return nil, err
+ }
+
+ if !opt.FormatOnly {
+ if err := fixImports(fileSet, file, filename, opt.Env); err != nil {
+ return nil, err
+ }
+ }
+ return formatFile(fileSet, file, src, adjust, opt)
+}
+
+// FixImports returns a list of fixes to the imports that, when applied,
+// will leave the imports in the same state as Process. src and opt must
+// be specified.
+//
+// Note that filename's directory influences which imports can be chosen,
+// so it is important that filename be accurate.
+func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
+ ctx, done := event.Start(ctx, "imports.FixImports")
+ defer done()
+
+ fileSet := token.NewFileSet()
+ file, _, err := parse(fileSet, filename, src, opt)
+ if err != nil {
+ return nil, err
+ }
+
+ return getFixes(ctx, fileSet, file, filename, opt.Env)
+}
+
+// ApplyFixes applies all of the fixes to the file and formats it. extraMode
+// is added in when parsing the file. src and opts must be specified, but no
+// env is needed.
+func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) {
+ // Don't use parse() -- we don't care about fragments or statement lists
+ // here, and we need to work with unparseable files.
+ fileSet := token.NewFileSet()
+ parserMode := parser.Mode(0)
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+ parserMode |= extraMode
+
+ file, err := parser.ParseFile(fileSet, filename, src, parserMode)
+ if file == nil {
+ return nil, err
+ }
+
+ // Apply the fixes to the file.
+ apply(fileSet, file, fixes)
+
+ return formatFile(fileSet, file, src, nil, opt)
+}
+
+// formatFile formats the file syntax tree.
+// It may mutate the token.FileSet.
+//
+// If an adjust function is provided, it is called after formatting
+// with the original source (formatFile's src parameter) and the
+// formatted file, and returns the postpocessed result.
+func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
+ mergeImports(file)
+ sortImports(opt.LocalPrefix, fset.File(file.Pos()), file)
+ var spacesBefore []string // import paths we need spaces before
+ for _, impSection := range astutil.Imports(fset, file) {
+ // Within each block of contiguous imports, see if any
+ // import lines are in different group numbers. If so,
+ // we'll need to put a space between them so it's
+ // compatible with gofmt.
+ lastGroup := -1
+ for _, importSpec := range impSection {
+ importPath, _ := strconv.Unquote(importSpec.Path.Value)
+ groupNum := importGroup(opt.LocalPrefix, importPath)
+ if groupNum != lastGroup && lastGroup != -1 {
+ spacesBefore = append(spacesBefore, importPath)
+ }
+ lastGroup = groupNum
+ }
+
+ }
+
+ printerMode := printer.UseSpaces
+ if opt.TabIndent {
+ printerMode |= printer.TabIndent
+ }
+ printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
+
+ var buf bytes.Buffer
+ err := printConfig.Fprint(&buf, fset, file)
+ if err != nil {
+ return nil, err
+ }
+ out := buf.Bytes()
+ if adjust != nil {
+ out = adjust(src, out)
+ }
+ if len(spacesBefore) > 0 {
+ out, err = addImportSpaces(bytes.NewReader(out), spacesBefore)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ out, err = format.Source(out)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// parse parses src, which was read from filename,
+// as a Go source file or statement list.
+func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
+ parserMode := parser.Mode(0)
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+
+ // Try as whole source file.
+ file, err := parser.ParseFile(fset, filename, src, parserMode)
+ if err == nil {
+ return file, nil, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // package line and we accept fragmented input, fall through to
+ // try as a source fragment. Stop and return on any other error.
+ if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
+ return nil, nil, err
+ }
+
+ // If this is a declaration list, make it a source file
+ // by inserting a package clause.
+ // Insert using a ;, not a newline, so that parse errors are on
+ // the correct line.
+ const prefix = "package main;"
+ psrc := append([]byte(prefix), src...)
+ file, err = parser.ParseFile(fset, filename, psrc, parserMode)
+ if err == nil {
+ // Gofmt will turn the ; into a \n.
+ // Do that ourselves now and update the file contents,
+ // so that positions and line numbers are correct going forward.
+ psrc[len(prefix)-1] = '\n'
+ fset.File(file.Package).SetLinesForContent(psrc)
+
+ // If a main function exists, we will assume this is a main
+ // package and leave the file.
+ if containsMainFunc(file) {
+ return file, nil, nil
+ }
+
+ adjust := func(orig, src []byte) []byte {
+ // Remove the package clause.
+ src = src[len(prefix):]
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+ // If the error is that the source file didn't begin with a
+ // declaration, fall through to try as a statement list.
+ // Stop and return on any other error.
+ if !strings.Contains(err.Error(), "expected declaration") {
+ return nil, nil, err
+ }
+
+ // If this is a statement list, make it a source file
+ // by inserting a package clause and turning the list
+ // into a function body. This handles expressions too.
+ // Insert using a ;, not a newline, so that the line numbers
+ // in fsrc match the ones in src.
+ fsrc := append(append([]byte("package p; func _() {"), src...), '}')
+ file, err = parser.ParseFile(fset, filename, fsrc, parserMode)
+ if err == nil {
+ adjust := func(orig, src []byte) []byte {
+ // Remove the wrapping.
+ // Gofmt has turned the ; into a \n\n.
+ src = src[len("package p\n\nfunc _() {"):]
+ src = src[:len(src)-len("}\n")]
+ // Gofmt has also indented the function body one level.
+ // Remove that indent.
+ src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1)
+ return matchSpace(orig, src)
+ }
+ return file, adjust, nil
+ }
+
+ // Failed, and out of options.
+ return nil, nil, err
+}
+
+// containsMainFunc checks if a file contains a function declaration with the
+// function signature 'func main()'
+func containsMainFunc(file *ast.File) bool {
+ for _, decl := range file.Decls {
+ if f, ok := decl.(*ast.FuncDecl); ok {
+ if f.Name.Name != "main" {
+ continue
+ }
+
+ if len(f.Type.Params.List) != 0 {
+ continue
+ }
+
+ if f.Type.Results != nil && len(f.Type.Results.List) != 0 {
+ continue
+ }
+
+ return true
+ }
+ }
+
+ return false
+}
+
+func cutSpace(b []byte) (before, middle, after []byte) {
+ i := 0
+ for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') {
+ i++
+ }
+ j := len(b)
+ for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') {
+ j--
+ }
+ if i <= j {
+ return b[:i], b[i:j], b[j:]
+ }
+ return nil, nil, b[j:]
+}
+
+// matchSpace reformats src to use the same space context as orig.
+// 1. If orig begins with blank lines, matchSpace inserts them at the beginning of src.
+// 2. matchSpace copies the indentation of the first non-blank line in orig
+// to every non-blank line in src.
+// 3. matchSpace copies the trailing space from orig and uses it in place
+// of src's trailing space.
+func matchSpace(orig []byte, src []byte) []byte {
+ before, _, after := cutSpace(orig)
+ i := bytes.LastIndex(before, []byte{'\n'})
+ before, indent := before[:i+1], before[i+1:]
+
+ _, src, _ = cutSpace(src)
+
+ var b bytes.Buffer
+ b.Write(before)
+ for len(src) > 0 {
+ line := src
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, src = line[:i+1], line[i+1:]
+ } else {
+ src = nil
+ }
+ if len(line) > 0 && line[0] != '\n' { // not blank
+ b.Write(indent)
+ }
+ b.Write(line)
+ }
+ b.Write(after)
+ return b.Bytes()
+}
+
+var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+?)"`)
+
+func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) {
+ var out bytes.Buffer
+ in := bufio.NewReader(r)
+ inImports := false
+ done := false
+ for {
+ s, err := in.ReadString('\n')
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return nil, err
+ }
+
+ if !inImports && !done && strings.HasPrefix(s, "import") {
+ inImports = true
+ }
+ if inImports && (strings.HasPrefix(s, "var") ||
+ strings.HasPrefix(s, "func") ||
+ strings.HasPrefix(s, "const") ||
+ strings.HasPrefix(s, "type")) {
+ done = true
+ inImports = false
+ }
+ if inImports && len(breaks) > 0 {
+ if m := impLine.FindStringSubmatch(s); m != nil {
+ if m[1] == breaks[0] {
+ out.WriteByte('\n')
+ breaks = breaks[1:]
+ }
+ }
+ }
+
+ fmt.Fprint(&out, s)
+ }
+ return out.Bytes(), nil
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go
new file mode 100644
index 000000000..5f4d435d3
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/mod.go
@@ -0,0 +1,723 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+
+ "golang.org/x/mod/module"
+ "golang.org/x/tools/internal/event"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// ModuleResolver implements resolver for modules using the go command as little
+// as feasible.
+type ModuleResolver struct {
+ env *ProcessEnv
+ moduleCacheDir string
+ dummyVendorMod *gocommand.ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory.
+ roots []gopathwalk.Root
+ scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots.
+ scannedRoots map[gopathwalk.Root]bool
+
+ initialized bool
+ mains []*gocommand.ModuleJSON
+ mainByDir map[string]*gocommand.ModuleJSON
+ modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path...
+ modsByDir []*gocommand.ModuleJSON // ...or number of path components in their Dir.
+
+ // moduleCacheCache stores information about the module cache.
+ moduleCacheCache *dirInfoCache
+ otherCache *dirInfoCache
+}
+
+func newModuleResolver(e *ProcessEnv) *ModuleResolver {
+ r := &ModuleResolver{
+ env: e,
+ scanSema: make(chan struct{}, 1),
+ }
+ r.scanSema <- struct{}{}
+ return r
+}
+
+func (r *ModuleResolver) init() error {
+ if r.initialized {
+ return nil
+ }
+
+ goenv, err := r.env.goEnv()
+ if err != nil {
+ return err
+ }
+ inv := gocommand.Invocation{
+ BuildFlags: r.env.BuildFlags,
+ ModFlag: r.env.ModFlag,
+ ModFile: r.env.ModFile,
+ Env: r.env.env(),
+ Logf: r.env.Logf,
+ WorkingDir: r.env.WorkingDir,
+ }
+
+ vendorEnabled := false
+ var mainModVendor *gocommand.ModuleJSON
+
+ // Module vendor directories are ignored in workspace mode:
+ // https://go.googlesource.com/proposal/+/master/design/45713-workspace.md
+ if len(r.env.Env["GOWORK"]) == 0 {
+ vendorEnabled, mainModVendor, err = gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner)
+ if err != nil {
+ return err
+ }
+ }
+
+ if mainModVendor != nil && vendorEnabled {
+ // Vendor mode is on, so all the non-Main modules are irrelevant,
+ // and we need to search /vendor for everything.
+ r.mains = []*gocommand.ModuleJSON{mainModVendor}
+ r.dummyVendorMod = &gocommand.ModuleJSON{
+ Path: "",
+ Dir: filepath.Join(mainModVendor.Dir, "vendor"),
+ }
+ r.modsByModPath = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+ r.modsByDir = []*gocommand.ModuleJSON{mainModVendor, r.dummyVendorMod}
+ } else {
+ // Vendor mode is off, so run go list -m ... to find everything.
+ err := r.initAllMods()
+ // We expect an error when running outside of a module with
+ // GO111MODULE=on. Other errors are fatal.
+ if err != nil {
+ if errMsg := err.Error(); !strings.Contains(errMsg, "working directory is not part of a module") && !strings.Contains(errMsg, "go.mod file not found") {
+ return err
+ }
+ }
+ }
+
+ if gmc := r.env.Env["GOMODCACHE"]; gmc != "" {
+ r.moduleCacheDir = gmc
+ } else {
+ gopaths := filepath.SplitList(goenv["GOPATH"])
+ if len(gopaths) == 0 {
+ return fmt.Errorf("empty GOPATH")
+ }
+ r.moduleCacheDir = filepath.Join(gopaths[0], "/pkg/mod")
+ }
+
+ sort.Slice(r.modsByModPath, func(i, j int) bool {
+ count := func(x int) int {
+ return strings.Count(r.modsByModPath[x].Path, "/")
+ }
+ return count(j) < count(i) // descending order
+ })
+ sort.Slice(r.modsByDir, func(i, j int) bool {
+ count := func(x int) int {
+ return strings.Count(r.modsByDir[x].Dir, string(filepath.Separator))
+ }
+ return count(j) < count(i) // descending order
+ })
+
+ r.roots = []gopathwalk.Root{
+ {Path: filepath.Join(goenv["GOROOT"], "/src"), Type: gopathwalk.RootGOROOT},
+ }
+ r.mainByDir = make(map[string]*gocommand.ModuleJSON)
+ for _, main := range r.mains {
+ r.roots = append(r.roots, gopathwalk.Root{Path: main.Dir, Type: gopathwalk.RootCurrentModule})
+ r.mainByDir[main.Dir] = main
+ }
+ if vendorEnabled {
+ r.roots = append(r.roots, gopathwalk.Root{Path: r.dummyVendorMod.Dir, Type: gopathwalk.RootOther})
+ } else {
+ addDep := func(mod *gocommand.ModuleJSON) {
+ if mod.Replace == nil {
+ // This is redundant with the cache, but we'll skip it cheaply enough.
+ r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootModuleCache})
+ } else {
+ r.roots = append(r.roots, gopathwalk.Root{Path: mod.Dir, Type: gopathwalk.RootOther})
+ }
+ }
+ // Walk dependent modules before scanning the full mod cache, direct deps first.
+ for _, mod := range r.modsByModPath {
+ if !mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ for _, mod := range r.modsByModPath {
+ if mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ r.roots = append(r.roots, gopathwalk.Root{Path: r.moduleCacheDir, Type: gopathwalk.RootModuleCache})
+ }
+
+ r.scannedRoots = map[gopathwalk.Root]bool{}
+ if r.moduleCacheCache == nil {
+ r.moduleCacheCache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ }
+ if r.otherCache == nil {
+ r.otherCache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ }
+ r.initialized = true
+ return nil
+}
+
+func (r *ModuleResolver) initAllMods() error {
+ stdout, err := r.env.invokeGo(context.TODO(), "list", "-m", "-e", "-json", "...")
+ if err != nil {
+ return err
+ }
+ for dec := json.NewDecoder(stdout); dec.More(); {
+ mod := &gocommand.ModuleJSON{}
+ if err := dec.Decode(mod); err != nil {
+ return err
+ }
+ if mod.Dir == "" {
+ if r.env.Logf != nil {
+ r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path)
+ }
+ // Can't do anything with a module that's not downloaded.
+ continue
+ }
+ // golang/go#36193: the go command doesn't always clean paths.
+ mod.Dir = filepath.Clean(mod.Dir)
+ r.modsByModPath = append(r.modsByModPath, mod)
+ r.modsByDir = append(r.modsByDir, mod)
+ if mod.Main {
+ r.mains = append(r.mains, mod)
+ }
+ }
+ return nil
+}
+
+func (r *ModuleResolver) ClearForNewScan() {
+ <-r.scanSema
+ r.scannedRoots = map[gopathwalk.Root]bool{}
+ r.otherCache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ r.scanSema <- struct{}{}
+}
+
+func (r *ModuleResolver) ClearForNewMod() {
+ <-r.scanSema
+ *r = ModuleResolver{
+ env: r.env,
+ moduleCacheCache: r.moduleCacheCache,
+ otherCache: r.otherCache,
+ scanSema: r.scanSema,
+ }
+ r.init()
+ r.scanSema <- struct{}{}
+}
+
+// findPackage returns the module and directory that contains the package at
+// the given import path, or returns nil, "" if no module is in scope.
+func (r *ModuleResolver) findPackage(importPath string) (*gocommand.ModuleJSON, string) {
+ // This can't find packages in the stdlib, but that's harmless for all
+ // the existing code paths.
+ for _, m := range r.modsByModPath {
+ if !strings.HasPrefix(importPath, m.Path) {
+ continue
+ }
+ pathInModule := importPath[len(m.Path):]
+ pkgDir := filepath.Join(m.Dir, pathInModule)
+ if r.dirIsNestedModule(pkgDir, m) {
+ continue
+ }
+
+ if info, ok := r.cacheLoad(pkgDir); ok {
+ if loaded, err := info.reachedStatus(nameLoaded); loaded {
+ if err != nil {
+ continue // No package in this dir.
+ }
+ return m, pkgDir
+ }
+ if scanned, err := info.reachedStatus(directoryScanned); scanned && err != nil {
+ continue // Dir is unreadable, etc.
+ }
+ // This is slightly wrong: a directory doesn't have to have an
+ // importable package to count as a package for package-to-module
+ // resolution. package main or _test files should count but
+ // don't.
+ // TODO(heschi): fix this.
+ if _, err := r.cachePackageName(info); err == nil {
+ return m, pkgDir
+ }
+ }
+
+ // Not cached. Read the filesystem.
+ pkgFiles, err := os.ReadDir(pkgDir)
+ if err != nil {
+ continue
+ }
+ // A module only contains a package if it has buildable go
+ // files in that directory. If not, it could be provided by an
+ // outer module. See #29736.
+ for _, fi := range pkgFiles {
+ if ok, _ := r.env.matchFile(pkgDir, fi.Name()); ok {
+ return m, pkgDir
+ }
+ }
+ }
+ return nil, ""
+}
+
+func (r *ModuleResolver) cacheLoad(dir string) (directoryPackageInfo, bool) {
+ if info, ok := r.moduleCacheCache.Load(dir); ok {
+ return info, ok
+ }
+ return r.otherCache.Load(dir)
+}
+
+func (r *ModuleResolver) cacheStore(info directoryPackageInfo) {
+ if info.rootType == gopathwalk.RootModuleCache {
+ r.moduleCacheCache.Store(info.dir, info)
+ } else {
+ r.otherCache.Store(info.dir, info)
+ }
+}
+
+func (r *ModuleResolver) cacheKeys() []string {
+ return append(r.moduleCacheCache.Keys(), r.otherCache.Keys()...)
+}
+
+// cachePackageName caches the package name for a dir already in the cache.
+func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
+ if info.rootType == gopathwalk.RootModuleCache {
+ return r.moduleCacheCache.CachePackageName(info)
+ }
+ return r.otherCache.CachePackageName(info)
+}
+
+func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
+ if info.rootType == gopathwalk.RootModuleCache {
+ return r.moduleCacheCache.CacheExports(ctx, env, info)
+ }
+ return r.otherCache.CacheExports(ctx, env, info)
+}
+
+// findModuleByDir returns the module that contains dir, or nil if no such
+// module is in scope.
+func (r *ModuleResolver) findModuleByDir(dir string) *gocommand.ModuleJSON {
+ // This is quite tricky and may not be correct. dir could be:
+ // - a package in the main module.
+ // - a replace target underneath the main module's directory.
+ // - a nested module in the above.
+ // - a replace target somewhere totally random.
+ // - a nested module in the above.
+ // - in the mod cache.
+ // - in /vendor/ in -mod=vendor mode.
+ // - nested module? Dunno.
+ // Rumor has it that replace targets cannot contain other replace targets.
+ //
+ // Note that it is critical here that modsByDir is sorted to have deeper dirs
+ // first. This ensures that findModuleByDir finds the innermost module.
+ // See also golang/go#56291.
+ for _, m := range r.modsByDir {
+ if !strings.HasPrefix(dir, m.Dir) {
+ continue
+ }
+
+ if r.dirIsNestedModule(dir, m) {
+ continue
+ }
+
+ return m
+ }
+ return nil
+}
+
+// dirIsNestedModule reports if dir is contained in a nested module underneath
+// mod, not actually in mod.
+func (r *ModuleResolver) dirIsNestedModule(dir string, mod *gocommand.ModuleJSON) bool {
+ if !strings.HasPrefix(dir, mod.Dir) {
+ return false
+ }
+ if r.dirInModuleCache(dir) {
+ // Nested modules in the module cache are pruned,
+ // so it cannot be a nested module.
+ return false
+ }
+ if mod != nil && mod == r.dummyVendorMod {
+ // The /vendor pseudomodule is flattened and doesn't actually count.
+ return false
+ }
+ modDir, _ := r.modInfo(dir)
+ if modDir == "" {
+ return false
+ }
+ return modDir != mod.Dir
+}
+
+func (r *ModuleResolver) modInfo(dir string) (modDir string, modName string) {
+ readModName := func(modFile string) string {
+ modBytes, err := os.ReadFile(modFile)
+ if err != nil {
+ return ""
+ }
+ return modulePath(modBytes)
+ }
+
+ if r.dirInModuleCache(dir) {
+ if matches := modCacheRegexp.FindStringSubmatch(dir); len(matches) == 3 {
+ index := strings.Index(dir, matches[1]+"@"+matches[2])
+ modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2])
+ return modDir, readModName(filepath.Join(modDir, "go.mod"))
+ }
+ }
+ for {
+ if info, ok := r.cacheLoad(dir); ok {
+ return info.moduleDir, info.moduleName
+ }
+ f := filepath.Join(dir, "go.mod")
+ info, err := os.Stat(f)
+ if err == nil && !info.IsDir() {
+ return dir, readModName(f)
+ }
+
+ d := filepath.Dir(dir)
+ if len(d) >= len(dir) {
+ return "", "" // reached top of file system, no go.mod
+ }
+ dir = d
+ }
+}
+
+func (r *ModuleResolver) dirInModuleCache(dir string) bool {
+ if r.moduleCacheDir == "" {
+ return false
+ }
+ return strings.HasPrefix(dir, r.moduleCacheDir)
+}
+
+func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
+ if err := r.init(); err != nil {
+ return nil, err
+ }
+ names := map[string]string{}
+ for _, path := range importPaths {
+ _, packageDir := r.findPackage(path)
+ if packageDir == "" {
+ continue
+ }
+ name, err := packageDirToName(packageDir)
+ if err != nil {
+ continue
+ }
+ names[path] = name
+ }
+ return names, nil
+}
+
+func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
+ ctx, done := event.Start(ctx, "imports.ModuleResolver.scan")
+ defer done()
+
+ if err := r.init(); err != nil {
+ return err
+ }
+
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
+ }
+ pkg, err := r.canonicalize(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.dirFound(pkg) {
+ return
+ }
+ pkg.packageName, err = r.cachePackageName(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.packageNameLoaded(pkg) {
+ return
+ }
+ _, exports, err := r.loadExports(ctx, pkg, false)
+ if err != nil {
+ return
+ }
+ callback.exportsLoaded(pkg, exports)
+ }
+
+ // Start processing everything in the cache, and listen for the new stuff
+ // we discover in the walk below.
+ stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
+ defer stop1()
+ stop2 := r.otherCache.ScanAndListen(ctx, processDir)
+ defer stop2()
+
+ // We assume cached directories are fully cached, including all their
+ // children, and have not changed. We can skip them.
+ skip := func(root gopathwalk.Root, dir string) bool {
+ if r.env.SkipPathInScan != nil && root.Type == gopathwalk.RootCurrentModule {
+ if root.Path == dir {
+ return false
+ }
+
+ if r.env.SkipPathInScan(filepath.Clean(dir)) {
+ return true
+ }
+ }
+
+ info, ok := r.cacheLoad(dir)
+ if !ok {
+ return false
+ }
+ // This directory can be skipped as long as we have already scanned it.
+ // Packages with errors will continue to have errors, so there is no need
+ // to rescan them.
+ packageScanned, _ := info.reachedStatus(directoryScanned)
+ return packageScanned
+ }
+
+ // Add anything new to the cache, and process it if we're still listening.
+ add := func(root gopathwalk.Root, dir string) {
+ r.cacheStore(r.scanDirForPackage(root, dir))
+ }
+
+ // r.roots and the callback are not necessarily safe to use in the
+ // goroutine below. Process them eagerly.
+ roots := filterRoots(r.roots, callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ // We have the lock on r.scannedRoots, and no other scans can run.
+ for _, root := range roots {
+ if ctx.Err() != nil {
+ return
+ }
+
+ if r.scannedRoots[root] {
+ continue
+ }
+ gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: true})
+ r.scannedRoots[root] = true
+ }
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
+
+func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
+ }
+ mod, _ := r.findPackage(path)
+ return modRelevance(mod)
+}
+
+func modRelevance(mod *gocommand.ModuleJSON) float64 {
+ var relevance float64
+ switch {
+ case mod == nil: // out of scope
+ return MaxRelevance - 4
+ case mod.Indirect:
+ relevance = MaxRelevance - 3
+ case !mod.Main:
+ relevance = MaxRelevance - 2
+ default:
+ relevance = MaxRelevance - 1 // main module ties with stdlib
+ }
+
+ _, versionString, ok := module.SplitPathVersion(mod.Path)
+ if ok {
+ index := strings.Index(versionString, "v")
+ if index == -1 {
+ return relevance
+ }
+ if versionNumber, err := strconv.ParseFloat(versionString[index+1:], 64); err == nil {
+ relevance += versionNumber / 1000
+ }
+ }
+
+ return relevance
+}
+
+// canonicalize gets the result of canonicalizing the packages using the results
+// of initializing the resolver from 'go list -m'.
+func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
+ // Packages in GOROOT are already canonical, regardless of the std/cmd modules.
+ if info.rootType == gopathwalk.RootGOROOT {
+ return &pkg{
+ importPathShort: info.nonCanonicalImportPath,
+ dir: info.dir,
+ packageName: path.Base(info.nonCanonicalImportPath),
+ relevance: MaxRelevance,
+ }, nil
+ }
+
+ importPath := info.nonCanonicalImportPath
+ mod := r.findModuleByDir(info.dir)
+ // Check if the directory is underneath a module that's in scope.
+ if mod != nil {
+ // It is. If dir is the target of a replace directive,
+ // our guessed import path is wrong. Use the real one.
+ if mod.Dir == info.dir {
+ importPath = mod.Path
+ } else {
+ dirInMod := info.dir[len(mod.Dir)+len("/"):]
+ importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
+ }
+ } else if !strings.HasPrefix(importPath, info.moduleName) {
+ // The module's name doesn't match the package's import path. It
+ // probably needs a replace directive we don't have.
+ return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
+ }
+
+ res := &pkg{
+ importPathShort: importPath,
+ dir: info.dir,
+ relevance: modRelevance(mod),
+ }
+ // We may have discovered a package that has a different version
+ // in scope already. Canonicalize to that one if possible.
+ if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" {
+ res.dir = canonicalDir
+ }
+ return res, nil
+}
+
+func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+ if err := r.init(); err != nil {
+ return "", nil, err
+ }
+ if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
+ return r.cacheExports(ctx, r.env, info)
+ }
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
+}
+
+func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
+ subdir := ""
+ if dir != root.Path {
+ subdir = dir[len(root.Path)+len("/"):]
+ }
+ importPath := filepath.ToSlash(subdir)
+ if strings.HasPrefix(importPath, "vendor/") {
+ // Only enter vendor directories if they're explicitly requested as a root.
+ return directoryPackageInfo{
+ status: directoryScanned,
+ err: fmt.Errorf("unwanted vendor directory"),
+ }
+ }
+ switch root.Type {
+ case gopathwalk.RootCurrentModule:
+ importPath = path.Join(r.mainByDir[root.Path].Path, filepath.ToSlash(subdir))
+ case gopathwalk.RootModuleCache:
+ matches := modCacheRegexp.FindStringSubmatch(subdir)
+ if len(matches) == 0 {
+ return directoryPackageInfo{
+ status: directoryScanned,
+ err: fmt.Errorf("invalid module cache path: %v", subdir),
+ }
+ }
+ modPath, err := module.UnescapePath(filepath.ToSlash(matches[1]))
+ if err != nil {
+ if r.env.Logf != nil {
+ r.env.Logf("decoding module cache path %q: %v", subdir, err)
+ }
+ return directoryPackageInfo{
+ status: directoryScanned,
+ err: fmt.Errorf("decoding module cache path %q: %v", subdir, err),
+ }
+ }
+ importPath = path.Join(modPath, filepath.ToSlash(matches[3]))
+ }
+
+ modDir, modName := r.modInfo(dir)
+ result := directoryPackageInfo{
+ status: directoryScanned,
+ dir: dir,
+ rootType: root.Type,
+ nonCanonicalImportPath: importPath,
+ moduleDir: modDir,
+ moduleName: modName,
+ }
+ if root.Type == gopathwalk.RootGOROOT {
+ // stdlib packages are always in scope, despite the confusing go.mod
+ return result
+ }
+ return result
+}
+
+// modCacheRegexp splits a path in a module cache into module, module version, and package.
+var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
+
+var (
+ slashSlash = []byte("//")
+ moduleStr = []byte("module")
+)
+
+// modulePath returns the module path from the gomod file text.
+// If it cannot find a module path, it returns an empty string.
+// It is tolerant of unrelated problems in the go.mod file.
+//
+// Copied from cmd/go/internal/modfile.
+func modulePath(mod []byte) string {
+ for len(mod) > 0 {
+ line := mod
+ mod = nil
+ if i := bytes.IndexByte(line, '\n'); i >= 0 {
+ line, mod = line[:i], line[i+1:]
+ }
+ if i := bytes.Index(line, slashSlash); i >= 0 {
+ line = line[:i]
+ }
+ line = bytes.TrimSpace(line)
+ if !bytes.HasPrefix(line, moduleStr) {
+ continue
+ }
+ line = line[len(moduleStr):]
+ n := len(line)
+ line = bytes.TrimSpace(line)
+ if len(line) == n || len(line) == 0 {
+ continue
+ }
+
+ if line[0] == '"' || line[0] == '`' {
+ p, err := strconv.Unquote(string(line))
+ if err != nil {
+ return "" // malformed quoted string or multiline module path
+ }
+ return p
+ }
+
+ return string(line)
+ }
+ return "" // missing module path
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
new file mode 100644
index 000000000..45690abbb
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
@@ -0,0 +1,236 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "context"
+ "fmt"
+ "sync"
+
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// To find packages to import, the resolver needs to know about all of
+// the packages that could be imported. This includes packages that are
+// already in modules that are in (1) the current module, (2) replace targets,
+// and (3) packages in the module cache. Packages in (1) and (2) may change over
+// time, as the client may edit the current module and locally replaced modules.
+// The module cache (which includes all of the packages in (3)) can only
+// ever be added to.
+//
+// The resolver can thus save state about packages in the module cache
+// and guarantee that this will not change over time. To obtain information
+// about new modules added to the module cache, the module cache should be
+// rescanned.
+//
+// It is OK to serve information about modules that have been deleted,
+// as they do still exist.
+// TODO(suzmue): can we share information with the caller about
+// what module needs to be downloaded to import this package?
+
+type directoryPackageStatus int
+
+const (
+ _ directoryPackageStatus = iota
+ directoryScanned
+ nameLoaded
+ exportsLoaded
+)
+
+type directoryPackageInfo struct {
+ // status indicates the extent to which this struct has been filled in.
+ status directoryPackageStatus
+ // err is non-nil when there was an error trying to reach status.
+ err error
+
+ // Set when status >= directoryScanned.
+
+ // dir is the absolute directory of this package.
+ dir string
+ rootType gopathwalk.RootType
+ // nonCanonicalImportPath is the package's expected import path. It may
+ // not actually be importable at that path.
+ nonCanonicalImportPath string
+
+ // Module-related information.
+ moduleDir string // The directory that is the module root of this dir.
+ moduleName string // The module name that contains this dir.
+
+ // Set when status >= nameLoaded.
+
+ packageName string // the package name, as declared in the source.
+
+ // Set when status >= exportsLoaded.
+
+ exports []string
+}
+
+// reachedStatus returns true when info has a status at least target and any error associated with
+// an attempt to reach target.
+func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (bool, error) {
+ if info.err == nil {
+ return info.status >= target, nil
+ }
+ if info.status == target {
+ return true, info.err
+ }
+ return true, nil
+}
+
+// dirInfoCache is a concurrency safe map for storing information about
+// directories that may contain packages.
+//
+// The information in this cache is built incrementally. Entries are initialized in scan.
+// No new keys should be added in any other functions, as all directories containing
+// packages are identified in scan.
+//
+// Other functions, including loadExports and findPackage, may update entries in this cache
+// as they discover new things about the directory.
+//
+// The information in the cache is not expected to change for the cache's
+// lifetime, so there is no protection against competing writes. Users should
+// take care not to hold the cache across changes to the underlying files.
+//
+// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc)
+type dirInfoCache struct {
+ mu sync.Mutex
+ // dirs stores information about packages in directories, keyed by absolute path.
+ dirs map[string]*directoryPackageInfo
+ listeners map[*int]cacheListener
+}
+
+type cacheListener func(directoryPackageInfo)
+
+// ScanAndListen calls listener on all the items in the cache, and on anything
+// newly added. The returned stop function waits for all in-flight callbacks to
+// finish and blocks new ones.
+func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
+ ctx, cancel := context.WithCancel(ctx)
+
+ // Flushing out all the callbacks is tricky without knowing how many there
+ // are going to be. Setting an arbitrary limit makes it much easier.
+ const maxInFlight = 10
+ sema := make(chan struct{}, maxInFlight)
+ for i := 0; i < maxInFlight; i++ {
+ sema <- struct{}{}
+ }
+
+ cookie := new(int) // A unique ID we can use for the listener.
+
+ // We can't hold mu while calling the listener.
+ d.mu.Lock()
+ var keys []string
+ for key := range d.dirs {
+ keys = append(keys, key)
+ }
+ d.listeners[cookie] = func(info directoryPackageInfo) {
+ select {
+ case <-ctx.Done():
+ return
+ case <-sema:
+ }
+ listener(info)
+ sema <- struct{}{}
+ }
+ d.mu.Unlock()
+
+ stop := func() {
+ cancel()
+ d.mu.Lock()
+ delete(d.listeners, cookie)
+ d.mu.Unlock()
+ for i := 0; i < maxInFlight; i++ {
+ <-sema
+ }
+ }
+
+ // Process the pre-existing keys.
+ for _, k := range keys {
+ select {
+ case <-ctx.Done():
+ return stop
+ default:
+ }
+ if v, ok := d.Load(k); ok {
+ listener(v)
+ }
+ }
+
+ return stop
+}
+
+// Store stores the package info for dir.
+func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) {
+ d.mu.Lock()
+ _, old := d.dirs[dir]
+ d.dirs[dir] = &info
+ var listeners []cacheListener
+ for _, l := range d.listeners {
+ listeners = append(listeners, l)
+ }
+ d.mu.Unlock()
+
+ if !old {
+ for _, l := range listeners {
+ l(info)
+ }
+ }
+}
+
+// Load returns a copy of the directoryPackageInfo for absolute directory dir.
+func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+ info, ok := d.dirs[dir]
+ if !ok {
+ return directoryPackageInfo{}, false
+ }
+ return *info, true
+}
+
+// Keys returns the keys currently present in d.
+func (d *dirInfoCache) Keys() (keys []string) {
+ d.mu.Lock()
+ defer d.mu.Unlock()
+ for key := range d.dirs {
+ keys = append(keys, key)
+ }
+ return keys
+}
+
+func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
+ if loaded, err := info.reachedStatus(nameLoaded); loaded {
+ return info.packageName, err
+ }
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return "", fmt.Errorf("cannot read package name, scan error: %v", err)
+ }
+ info.packageName, info.err = packageDirToName(info.dir)
+ info.status = nameLoaded
+ d.Store(info.dir, info)
+ return info.packageName, info.err
+}
+
+func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
+ if reached, _ := info.reachedStatus(exportsLoaded); reached {
+ return info.packageName, info.exports, info.err
+ }
+ if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
+ return "", nil, err
+ }
+ info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
+ if info.err == context.Canceled || info.err == context.DeadlineExceeded {
+ return info.packageName, info.exports, info.err
+ }
+ // The cache structure wants things to proceed linearly. We can skip a
+ // step here, but only if we succeed.
+ if info.status == nameLoaded || info.err == nil {
+ info.status = exportsLoaded
+ } else {
+ info.status = nameLoaded
+ }
+ d.Store(info.dir, info)
+ return info.packageName, info.exports, info.err
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go
new file mode 100644
index 000000000..1a0a7ebd9
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go
@@ -0,0 +1,297 @@
+// Copyright 2013 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Hacked up copy of go/ast/import.go
+// Modified to use a single token.File in preference to a FileSet.
+
+package imports
+
+import (
+ "go/ast"
+ "go/token"
+ "log"
+ "sort"
+ "strconv"
+)
+
+// sortImports sorts runs of consecutive import lines in import blocks in f.
+// It also removes duplicate imports when it is possible to do so without data loss.
+//
+// It may mutate the token.File.
+func sortImports(localPrefix string, tokFile *token.File, f *ast.File) {
+ for i, d := range f.Decls {
+ d, ok := d.(*ast.GenDecl)
+ if !ok || d.Tok != token.IMPORT {
+ // Not an import declaration, so we're done.
+ // Imports are always first.
+ break
+ }
+
+ if len(d.Specs) == 0 {
+ // Empty import block, remove it.
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ }
+
+ if !d.Lparen.IsValid() {
+ // Not a block: sorted by default.
+ continue
+ }
+
+ // Identify and sort runs of specs on successive lines.
+ i := 0
+ specs := d.Specs[:0]
+ for j, s := range d.Specs {
+ if j > i && tokFile.Line(s.Pos()) > 1+tokFile.Line(d.Specs[j-1].End()) {
+ // j begins a new run. End this one.
+ specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:j])...)
+ i = j
+ }
+ }
+ specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:])...)
+ d.Specs = specs
+
+ // Deduping can leave a blank line before the rparen; clean that up.
+ // Ignore line directives.
+ if len(d.Specs) > 0 {
+ lastSpec := d.Specs[len(d.Specs)-1]
+ lastLine := tokFile.PositionFor(lastSpec.Pos(), false).Line
+ if rParenLine := tokFile.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 {
+ tokFile.MergeLine(rParenLine - 1) // has side effects!
+ }
+ }
+ }
+}
+
+// mergeImports merges all the import declarations into the first one.
+// Taken from golang.org/x/tools/ast/astutil.
+// This does not adjust line numbers properly
+func mergeImports(f *ast.File) {
+ if len(f.Decls) <= 1 {
+ return
+ }
+
+ // Merge all the import declarations into the first one.
+ var first *ast.GenDecl
+ for i := 0; i < len(f.Decls); i++ {
+ decl := f.Decls[i]
+ gen, ok := decl.(*ast.GenDecl)
+ if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
+ continue
+ }
+ if first == nil {
+ first = gen
+ continue // Don't touch the first one.
+ }
+ // We now know there is more than one package in this import
+ // declaration. Ensure that it ends up parenthesized.
+ first.Lparen = first.Pos()
+ // Move the imports of the other import declaration to the first one.
+ for _, spec := range gen.Specs {
+ spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
+ first.Specs = append(first.Specs, spec)
+ }
+ f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
+ i--
+ }
+}
+
+// declImports reports whether gen contains an import of path.
+// Taken from golang.org/x/tools/ast/astutil.
+func declImports(gen *ast.GenDecl, path string) bool {
+ if gen.Tok != token.IMPORT {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ impspec := spec.(*ast.ImportSpec)
+ if importPath(impspec) == path {
+ return true
+ }
+ }
+ return false
+}
+
+func importPath(s ast.Spec) string {
+ t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value)
+ if err == nil {
+ return t
+ }
+ return ""
+}
+
+func importName(s ast.Spec) string {
+ n := s.(*ast.ImportSpec).Name
+ if n == nil {
+ return ""
+ }
+ return n.Name
+}
+
+func importComment(s ast.Spec) string {
+ c := s.(*ast.ImportSpec).Comment
+ if c == nil {
+ return ""
+ }
+ return c.Text()
+}
+
+// collapse indicates whether prev may be removed, leaving only next.
+func collapse(prev, next ast.Spec) bool {
+ if importPath(next) != importPath(prev) || importName(next) != importName(prev) {
+ return false
+ }
+ return prev.(*ast.ImportSpec).Comment == nil
+}
+
+type posSpan struct {
+ Start token.Pos
+ End token.Pos
+}
+
+// sortSpecs sorts the import specs within each import decl.
+// It may mutate the token.File.
+func sortSpecs(localPrefix string, tokFile *token.File, f *ast.File, specs []ast.Spec) []ast.Spec {
+ // Can't short-circuit here even if specs are already sorted,
+ // since they might yet need deduplication.
+ // A lone import, however, may be safely ignored.
+ if len(specs) <= 1 {
+ return specs
+ }
+
+ // Record positions for specs.
+ pos := make([]posSpan, len(specs))
+ for i, s := range specs {
+ pos[i] = posSpan{s.Pos(), s.End()}
+ }
+
+ // Identify comments in this range.
+ // Any comment from pos[0].Start to the final line counts.
+ lastLine := tokFile.Line(pos[len(pos)-1].End)
+ cstart := len(f.Comments)
+ cend := len(f.Comments)
+ for i, g := range f.Comments {
+ if g.Pos() < pos[0].Start {
+ continue
+ }
+ if i < cstart {
+ cstart = i
+ }
+ if tokFile.Line(g.End()) > lastLine {
+ cend = i
+ break
+ }
+ }
+ comments := f.Comments[cstart:cend]
+
+ // Assign each comment to the import spec preceding it.
+ importComment := map[*ast.ImportSpec][]*ast.CommentGroup{}
+ specIndex := 0
+ for _, g := range comments {
+ for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() {
+ specIndex++
+ }
+ s := specs[specIndex].(*ast.ImportSpec)
+ importComment[s] = append(importComment[s], g)
+ }
+
+ // Sort the import specs by import path.
+ // Remove duplicates, when possible without data loss.
+ // Reassign the import paths to have the same position sequence.
+ // Reassign each comment to abut the end of its spec.
+ // Sort the comments by new position.
+ sort.Sort(byImportSpec{localPrefix, specs})
+
+ // Dedup. Thanks to our sorting, we can just consider
+ // adjacent pairs of imports.
+ deduped := specs[:0]
+ for i, s := range specs {
+ if i == len(specs)-1 || !collapse(s, specs[i+1]) {
+ deduped = append(deduped, s)
+ } else {
+ p := s.Pos()
+ tokFile.MergeLine(tokFile.Line(p)) // has side effects!
+ }
+ }
+ specs = deduped
+
+ // Fix up comment positions
+ for i, s := range specs {
+ s := s.(*ast.ImportSpec)
+ if s.Name != nil {
+ s.Name.NamePos = pos[i].Start
+ }
+ s.Path.ValuePos = pos[i].Start
+ s.EndPos = pos[i].End
+ nextSpecPos := pos[i].End
+
+ for _, g := range importComment[s] {
+ for _, c := range g.List {
+ c.Slash = pos[i].End
+ nextSpecPos = c.End()
+ }
+ }
+ if i < len(specs)-1 {
+ pos[i+1].Start = nextSpecPos
+ pos[i+1].End = nextSpecPos
+ }
+ }
+
+ sort.Sort(byCommentPos(comments))
+
+ // Fixup comments can insert blank lines, because import specs are on different lines.
+ // We remove those blank lines here by merging import spec to the first import spec line.
+ firstSpecLine := tokFile.Line(specs[0].Pos())
+ for _, s := range specs[1:] {
+ p := s.Pos()
+ line := tokFile.Line(p)
+ for previousLine := line - 1; previousLine >= firstSpecLine; {
+ // MergeLine can panic. Avoid the panic at the cost of not removing the blank line
+ // golang/go#50329
+ if previousLine > 0 && previousLine < tokFile.LineCount() {
+ tokFile.MergeLine(previousLine) // has side effects!
+ previousLine--
+ } else {
+ // try to gather some data to diagnose how this could happen
+ req := "Please report what the imports section of your go file looked like."
+ log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s",
+ firstSpecLine, line, previousLine, tokFile.LineCount(), req)
+ }
+ }
+ }
+ return specs
+}
+
+type byImportSpec struct {
+ localPrefix string
+ specs []ast.Spec // slice of *ast.ImportSpec
+}
+
+func (x byImportSpec) Len() int { return len(x.specs) }
+func (x byImportSpec) Swap(i, j int) { x.specs[i], x.specs[j] = x.specs[j], x.specs[i] }
+func (x byImportSpec) Less(i, j int) bool {
+ ipath := importPath(x.specs[i])
+ jpath := importPath(x.specs[j])
+
+ igroup := importGroup(x.localPrefix, ipath)
+ jgroup := importGroup(x.localPrefix, jpath)
+ if igroup != jgroup {
+ return igroup < jgroup
+ }
+
+ if ipath != jpath {
+ return ipath < jpath
+ }
+ iname := importName(x.specs[i])
+ jname := importName(x.specs[j])
+
+ if iname != jname {
+ return iname < jname
+ }
+ return importComment(x.specs[i]) < importComment(x.specs[j])
+}
+
+type byCommentPos []*ast.CommentGroup
+
+func (x byCommentPos) Len() int { return len(x) }
+func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
+func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() }
diff --git a/vendor/golang.org/x/tools/internal/imports/zstdlib.go b/vendor/golang.org/x/tools/internal/imports/zstdlib.go
new file mode 100644
index 000000000..9f992c2be
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/zstdlib.go
@@ -0,0 +1,11345 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by mkstdlib.go. DO NOT EDIT.
+
+package imports
+
+var stdlib = map[string][]string{
+ "archive/tar": {
+ "ErrFieldTooLong",
+ "ErrHeader",
+ "ErrInsecurePath",
+ "ErrWriteAfterClose",
+ "ErrWriteTooLong",
+ "FileInfoHeader",
+ "Format",
+ "FormatGNU",
+ "FormatPAX",
+ "FormatUSTAR",
+ "FormatUnknown",
+ "Header",
+ "NewReader",
+ "NewWriter",
+ "Reader",
+ "TypeBlock",
+ "TypeChar",
+ "TypeCont",
+ "TypeDir",
+ "TypeFifo",
+ "TypeGNULongLink",
+ "TypeGNULongName",
+ "TypeGNUSparse",
+ "TypeLink",
+ "TypeReg",
+ "TypeRegA",
+ "TypeSymlink",
+ "TypeXGlobalHeader",
+ "TypeXHeader",
+ "Writer",
+ },
+ "archive/zip": {
+ "Compressor",
+ "Decompressor",
+ "Deflate",
+ "ErrAlgorithm",
+ "ErrChecksum",
+ "ErrFormat",
+ "ErrInsecurePath",
+ "File",
+ "FileHeader",
+ "FileInfoHeader",
+ "NewReader",
+ "NewWriter",
+ "OpenReader",
+ "ReadCloser",
+ "Reader",
+ "RegisterCompressor",
+ "RegisterDecompressor",
+ "Store",
+ "Writer",
+ },
+ "bufio": {
+ "ErrAdvanceTooFar",
+ "ErrBadReadCount",
+ "ErrBufferFull",
+ "ErrFinalToken",
+ "ErrInvalidUnreadByte",
+ "ErrInvalidUnreadRune",
+ "ErrNegativeAdvance",
+ "ErrNegativeCount",
+ "ErrTooLong",
+ "MaxScanTokenSize",
+ "NewReadWriter",
+ "NewReader",
+ "NewReaderSize",
+ "NewScanner",
+ "NewWriter",
+ "NewWriterSize",
+ "ReadWriter",
+ "Reader",
+ "ScanBytes",
+ "ScanLines",
+ "ScanRunes",
+ "ScanWords",
+ "Scanner",
+ "SplitFunc",
+ "Writer",
+ },
+ "bytes": {
+ "Buffer",
+ "Clone",
+ "Compare",
+ "Contains",
+ "ContainsAny",
+ "ContainsFunc",
+ "ContainsRune",
+ "Count",
+ "Cut",
+ "CutPrefix",
+ "CutSuffix",
+ "Equal",
+ "EqualFold",
+ "ErrTooLarge",
+ "Fields",
+ "FieldsFunc",
+ "HasPrefix",
+ "HasSuffix",
+ "Index",
+ "IndexAny",
+ "IndexByte",
+ "IndexFunc",
+ "IndexRune",
+ "Join",
+ "LastIndex",
+ "LastIndexAny",
+ "LastIndexByte",
+ "LastIndexFunc",
+ "Map",
+ "MinRead",
+ "NewBuffer",
+ "NewBufferString",
+ "NewReader",
+ "Reader",
+ "Repeat",
+ "Replace",
+ "ReplaceAll",
+ "Runes",
+ "Split",
+ "SplitAfter",
+ "SplitAfterN",
+ "SplitN",
+ "Title",
+ "ToLower",
+ "ToLowerSpecial",
+ "ToTitle",
+ "ToTitleSpecial",
+ "ToUpper",
+ "ToUpperSpecial",
+ "ToValidUTF8",
+ "Trim",
+ "TrimFunc",
+ "TrimLeft",
+ "TrimLeftFunc",
+ "TrimPrefix",
+ "TrimRight",
+ "TrimRightFunc",
+ "TrimSpace",
+ "TrimSuffix",
+ },
+ "cmp": {
+ "Compare",
+ "Less",
+ "Ordered",
+ },
+ "compress/bzip2": {
+ "NewReader",
+ "StructuralError",
+ },
+ "compress/flate": {
+ "BestCompression",
+ "BestSpeed",
+ "CorruptInputError",
+ "DefaultCompression",
+ "HuffmanOnly",
+ "InternalError",
+ "NewReader",
+ "NewReaderDict",
+ "NewWriter",
+ "NewWriterDict",
+ "NoCompression",
+ "ReadError",
+ "Reader",
+ "Resetter",
+ "WriteError",
+ "Writer",
+ },
+ "compress/gzip": {
+ "BestCompression",
+ "BestSpeed",
+ "DefaultCompression",
+ "ErrChecksum",
+ "ErrHeader",
+ "Header",
+ "HuffmanOnly",
+ "NewReader",
+ "NewWriter",
+ "NewWriterLevel",
+ "NoCompression",
+ "Reader",
+ "Writer",
+ },
+ "compress/lzw": {
+ "LSB",
+ "MSB",
+ "NewReader",
+ "NewWriter",
+ "Order",
+ "Reader",
+ "Writer",
+ },
+ "compress/zlib": {
+ "BestCompression",
+ "BestSpeed",
+ "DefaultCompression",
+ "ErrChecksum",
+ "ErrDictionary",
+ "ErrHeader",
+ "HuffmanOnly",
+ "NewReader",
+ "NewReaderDict",
+ "NewWriter",
+ "NewWriterLevel",
+ "NewWriterLevelDict",
+ "NoCompression",
+ "Resetter",
+ "Writer",
+ },
+ "container/heap": {
+ "Fix",
+ "Init",
+ "Interface",
+ "Pop",
+ "Push",
+ "Remove",
+ },
+ "container/list": {
+ "Element",
+ "List",
+ "New",
+ },
+ "container/ring": {
+ "New",
+ "Ring",
+ },
+ "context": {
+ "AfterFunc",
+ "Background",
+ "CancelCauseFunc",
+ "CancelFunc",
+ "Canceled",
+ "Cause",
+ "Context",
+ "DeadlineExceeded",
+ "TODO",
+ "WithCancel",
+ "WithCancelCause",
+ "WithDeadline",
+ "WithDeadlineCause",
+ "WithTimeout",
+ "WithTimeoutCause",
+ "WithValue",
+ "WithoutCancel",
+ },
+ "crypto": {
+ "BLAKE2b_256",
+ "BLAKE2b_384",
+ "BLAKE2b_512",
+ "BLAKE2s_256",
+ "Decrypter",
+ "DecrypterOpts",
+ "Hash",
+ "MD4",
+ "MD5",
+ "MD5SHA1",
+ "PrivateKey",
+ "PublicKey",
+ "RIPEMD160",
+ "RegisterHash",
+ "SHA1",
+ "SHA224",
+ "SHA256",
+ "SHA384",
+ "SHA3_224",
+ "SHA3_256",
+ "SHA3_384",
+ "SHA3_512",
+ "SHA512",
+ "SHA512_224",
+ "SHA512_256",
+ "Signer",
+ "SignerOpts",
+ },
+ "crypto/aes": {
+ "BlockSize",
+ "KeySizeError",
+ "NewCipher",
+ },
+ "crypto/cipher": {
+ "AEAD",
+ "Block",
+ "BlockMode",
+ "NewCBCDecrypter",
+ "NewCBCEncrypter",
+ "NewCFBDecrypter",
+ "NewCFBEncrypter",
+ "NewCTR",
+ "NewGCM",
+ "NewGCMWithNonceSize",
+ "NewGCMWithTagSize",
+ "NewOFB",
+ "Stream",
+ "StreamReader",
+ "StreamWriter",
+ },
+ "crypto/des": {
+ "BlockSize",
+ "KeySizeError",
+ "NewCipher",
+ "NewTripleDESCipher",
+ },
+ "crypto/dsa": {
+ "ErrInvalidPublicKey",
+ "GenerateKey",
+ "GenerateParameters",
+ "L1024N160",
+ "L2048N224",
+ "L2048N256",
+ "L3072N256",
+ "ParameterSizes",
+ "Parameters",
+ "PrivateKey",
+ "PublicKey",
+ "Sign",
+ "Verify",
+ },
+ "crypto/ecdh": {
+ "Curve",
+ "P256",
+ "P384",
+ "P521",
+ "PrivateKey",
+ "PublicKey",
+ "X25519",
+ },
+ "crypto/ecdsa": {
+ "GenerateKey",
+ "PrivateKey",
+ "PublicKey",
+ "Sign",
+ "SignASN1",
+ "Verify",
+ "VerifyASN1",
+ },
+ "crypto/ed25519": {
+ "GenerateKey",
+ "NewKeyFromSeed",
+ "Options",
+ "PrivateKey",
+ "PrivateKeySize",
+ "PublicKey",
+ "PublicKeySize",
+ "SeedSize",
+ "Sign",
+ "SignatureSize",
+ "Verify",
+ "VerifyWithOptions",
+ },
+ "crypto/elliptic": {
+ "Curve",
+ "CurveParams",
+ "GenerateKey",
+ "Marshal",
+ "MarshalCompressed",
+ "P224",
+ "P256",
+ "P384",
+ "P521",
+ "Unmarshal",
+ "UnmarshalCompressed",
+ },
+ "crypto/hmac": {
+ "Equal",
+ "New",
+ },
+ "crypto/md5": {
+ "BlockSize",
+ "New",
+ "Size",
+ "Sum",
+ },
+ "crypto/rand": {
+ "Int",
+ "Prime",
+ "Read",
+ "Reader",
+ },
+ "crypto/rc4": {
+ "Cipher",
+ "KeySizeError",
+ "NewCipher",
+ },
+ "crypto/rsa": {
+ "CRTValue",
+ "DecryptOAEP",
+ "DecryptPKCS1v15",
+ "DecryptPKCS1v15SessionKey",
+ "EncryptOAEP",
+ "EncryptPKCS1v15",
+ "ErrDecryption",
+ "ErrMessageTooLong",
+ "ErrVerification",
+ "GenerateKey",
+ "GenerateMultiPrimeKey",
+ "OAEPOptions",
+ "PKCS1v15DecryptOptions",
+ "PSSOptions",
+ "PSSSaltLengthAuto",
+ "PSSSaltLengthEqualsHash",
+ "PrecomputedValues",
+ "PrivateKey",
+ "PublicKey",
+ "SignPKCS1v15",
+ "SignPSS",
+ "VerifyPKCS1v15",
+ "VerifyPSS",
+ },
+ "crypto/sha1": {
+ "BlockSize",
+ "New",
+ "Size",
+ "Sum",
+ },
+ "crypto/sha256": {
+ "BlockSize",
+ "New",
+ "New224",
+ "Size",
+ "Size224",
+ "Sum224",
+ "Sum256",
+ },
+ "crypto/sha512": {
+ "BlockSize",
+ "New",
+ "New384",
+ "New512_224",
+ "New512_256",
+ "Size",
+ "Size224",
+ "Size256",
+ "Size384",
+ "Sum384",
+ "Sum512",
+ "Sum512_224",
+ "Sum512_256",
+ },
+ "crypto/subtle": {
+ "ConstantTimeByteEq",
+ "ConstantTimeCompare",
+ "ConstantTimeCopy",
+ "ConstantTimeEq",
+ "ConstantTimeLessOrEq",
+ "ConstantTimeSelect",
+ "XORBytes",
+ },
+ "crypto/tls": {
+ "AlertError",
+ "Certificate",
+ "CertificateRequestInfo",
+ "CertificateVerificationError",
+ "CipherSuite",
+ "CipherSuiteName",
+ "CipherSuites",
+ "Client",
+ "ClientAuthType",
+ "ClientHelloInfo",
+ "ClientSessionCache",
+ "ClientSessionState",
+ "Config",
+ "Conn",
+ "ConnectionState",
+ "CurveID",
+ "CurveP256",
+ "CurveP384",
+ "CurveP521",
+ "Dial",
+ "DialWithDialer",
+ "Dialer",
+ "ECDSAWithP256AndSHA256",
+ "ECDSAWithP384AndSHA384",
+ "ECDSAWithP521AndSHA512",
+ "ECDSAWithSHA1",
+ "Ed25519",
+ "InsecureCipherSuites",
+ "Listen",
+ "LoadX509KeyPair",
+ "NewLRUClientSessionCache",
+ "NewListener",
+ "NewResumptionState",
+ "NoClientCert",
+ "PKCS1WithSHA1",
+ "PKCS1WithSHA256",
+ "PKCS1WithSHA384",
+ "PKCS1WithSHA512",
+ "PSSWithSHA256",
+ "PSSWithSHA384",
+ "PSSWithSHA512",
+ "ParseSessionState",
+ "QUICClient",
+ "QUICConfig",
+ "QUICConn",
+ "QUICEncryptionLevel",
+ "QUICEncryptionLevelApplication",
+ "QUICEncryptionLevelEarly",
+ "QUICEncryptionLevelHandshake",
+ "QUICEncryptionLevelInitial",
+ "QUICEvent",
+ "QUICEventKind",
+ "QUICHandshakeDone",
+ "QUICNoEvent",
+ "QUICRejectedEarlyData",
+ "QUICServer",
+ "QUICSessionTicketOptions",
+ "QUICSetReadSecret",
+ "QUICSetWriteSecret",
+ "QUICTransportParameters",
+ "QUICTransportParametersRequired",
+ "QUICWriteData",
+ "RecordHeaderError",
+ "RenegotiateFreelyAsClient",
+ "RenegotiateNever",
+ "RenegotiateOnceAsClient",
+ "RenegotiationSupport",
+ "RequestClientCert",
+ "RequireAndVerifyClientCert",
+ "RequireAnyClientCert",
+ "Server",
+ "SessionState",
+ "SignatureScheme",
+ "TLS_AES_128_GCM_SHA256",
+ "TLS_AES_256_GCM_SHA384",
+ "TLS_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA",
+ "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
+ "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA",
+ "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA",
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA",
+ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA",
+ "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
+ "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256",
+ "TLS_ECDHE_RSA_WITH_RC4_128_SHA",
+ "TLS_FALLBACK_SCSV",
+ "TLS_RSA_WITH_3DES_EDE_CBC_SHA",
+ "TLS_RSA_WITH_AES_128_CBC_SHA",
+ "TLS_RSA_WITH_AES_128_CBC_SHA256",
+ "TLS_RSA_WITH_AES_128_GCM_SHA256",
+ "TLS_RSA_WITH_AES_256_CBC_SHA",
+ "TLS_RSA_WITH_AES_256_GCM_SHA384",
+ "TLS_RSA_WITH_RC4_128_SHA",
+ "VerifyClientCertIfGiven",
+ "VersionName",
+ "VersionSSL30",
+ "VersionTLS10",
+ "VersionTLS11",
+ "VersionTLS12",
+ "VersionTLS13",
+ "X25519",
+ "X509KeyPair",
+ },
+ "crypto/x509": {
+ "CANotAuthorizedForExtKeyUsage",
+ "CANotAuthorizedForThisName",
+ "CertPool",
+ "Certificate",
+ "CertificateInvalidError",
+ "CertificateRequest",
+ "ConstraintViolationError",
+ "CreateCertificate",
+ "CreateCertificateRequest",
+ "CreateRevocationList",
+ "DSA",
+ "DSAWithSHA1",
+ "DSAWithSHA256",
+ "DecryptPEMBlock",
+ "ECDSA",
+ "ECDSAWithSHA1",
+ "ECDSAWithSHA256",
+ "ECDSAWithSHA384",
+ "ECDSAWithSHA512",
+ "Ed25519",
+ "EncryptPEMBlock",
+ "ErrUnsupportedAlgorithm",
+ "Expired",
+ "ExtKeyUsage",
+ "ExtKeyUsageAny",
+ "ExtKeyUsageClientAuth",
+ "ExtKeyUsageCodeSigning",
+ "ExtKeyUsageEmailProtection",
+ "ExtKeyUsageIPSECEndSystem",
+ "ExtKeyUsageIPSECTunnel",
+ "ExtKeyUsageIPSECUser",
+ "ExtKeyUsageMicrosoftCommercialCodeSigning",
+ "ExtKeyUsageMicrosoftKernelCodeSigning",
+ "ExtKeyUsageMicrosoftServerGatedCrypto",
+ "ExtKeyUsageNetscapeServerGatedCrypto",
+ "ExtKeyUsageOCSPSigning",
+ "ExtKeyUsageServerAuth",
+ "ExtKeyUsageTimeStamping",
+ "HostnameError",
+ "IncompatibleUsage",
+ "IncorrectPasswordError",
+ "InsecureAlgorithmError",
+ "InvalidReason",
+ "IsEncryptedPEMBlock",
+ "KeyUsage",
+ "KeyUsageCRLSign",
+ "KeyUsageCertSign",
+ "KeyUsageContentCommitment",
+ "KeyUsageDataEncipherment",
+ "KeyUsageDecipherOnly",
+ "KeyUsageDigitalSignature",
+ "KeyUsageEncipherOnly",
+ "KeyUsageKeyAgreement",
+ "KeyUsageKeyEncipherment",
+ "MD2WithRSA",
+ "MD5WithRSA",
+ "MarshalECPrivateKey",
+ "MarshalPKCS1PrivateKey",
+ "MarshalPKCS1PublicKey",
+ "MarshalPKCS8PrivateKey",
+ "MarshalPKIXPublicKey",
+ "NameConstraintsWithoutSANs",
+ "NameMismatch",
+ "NewCertPool",
+ "NotAuthorizedToSign",
+ "PEMCipher",
+ "PEMCipher3DES",
+ "PEMCipherAES128",
+ "PEMCipherAES192",
+ "PEMCipherAES256",
+ "PEMCipherDES",
+ "ParseCRL",
+ "ParseCertificate",
+ "ParseCertificateRequest",
+ "ParseCertificates",
+ "ParseDERCRL",
+ "ParseECPrivateKey",
+ "ParsePKCS1PrivateKey",
+ "ParsePKCS1PublicKey",
+ "ParsePKCS8PrivateKey",
+ "ParsePKIXPublicKey",
+ "ParseRevocationList",
+ "PublicKeyAlgorithm",
+ "PureEd25519",
+ "RSA",
+ "RevocationList",
+ "RevocationListEntry",
+ "SHA1WithRSA",
+ "SHA256WithRSA",
+ "SHA256WithRSAPSS",
+ "SHA384WithRSA",
+ "SHA384WithRSAPSS",
+ "SHA512WithRSA",
+ "SHA512WithRSAPSS",
+ "SetFallbackRoots",
+ "SignatureAlgorithm",
+ "SystemCertPool",
+ "SystemRootsError",
+ "TooManyConstraints",
+ "TooManyIntermediates",
+ "UnconstrainedName",
+ "UnhandledCriticalExtension",
+ "UnknownAuthorityError",
+ "UnknownPublicKeyAlgorithm",
+ "UnknownSignatureAlgorithm",
+ "VerifyOptions",
+ },
+ "crypto/x509/pkix": {
+ "AlgorithmIdentifier",
+ "AttributeTypeAndValue",
+ "AttributeTypeAndValueSET",
+ "CertificateList",
+ "Extension",
+ "Name",
+ "RDNSequence",
+ "RelativeDistinguishedNameSET",
+ "RevokedCertificate",
+ "TBSCertificateList",
+ },
+ "database/sql": {
+ "ColumnType",
+ "Conn",
+ "DB",
+ "DBStats",
+ "Drivers",
+ "ErrConnDone",
+ "ErrNoRows",
+ "ErrTxDone",
+ "IsolationLevel",
+ "LevelDefault",
+ "LevelLinearizable",
+ "LevelReadCommitted",
+ "LevelReadUncommitted",
+ "LevelRepeatableRead",
+ "LevelSerializable",
+ "LevelSnapshot",
+ "LevelWriteCommitted",
+ "Named",
+ "NamedArg",
+ "NullBool",
+ "NullByte",
+ "NullFloat64",
+ "NullInt16",
+ "NullInt32",
+ "NullInt64",
+ "NullString",
+ "NullTime",
+ "Open",
+ "OpenDB",
+ "Out",
+ "RawBytes",
+ "Register",
+ "Result",
+ "Row",
+ "Rows",
+ "Scanner",
+ "Stmt",
+ "Tx",
+ "TxOptions",
+ },
+ "database/sql/driver": {
+ "Bool",
+ "ColumnConverter",
+ "Conn",
+ "ConnBeginTx",
+ "ConnPrepareContext",
+ "Connector",
+ "DefaultParameterConverter",
+ "Driver",
+ "DriverContext",
+ "ErrBadConn",
+ "ErrRemoveArgument",
+ "ErrSkip",
+ "Execer",
+ "ExecerContext",
+ "Int32",
+ "IsScanValue",
+ "IsValue",
+ "IsolationLevel",
+ "NamedValue",
+ "NamedValueChecker",
+ "NotNull",
+ "Null",
+ "Pinger",
+ "Queryer",
+ "QueryerContext",
+ "Result",
+ "ResultNoRows",
+ "Rows",
+ "RowsAffected",
+ "RowsColumnTypeDatabaseTypeName",
+ "RowsColumnTypeLength",
+ "RowsColumnTypeNullable",
+ "RowsColumnTypePrecisionScale",
+ "RowsColumnTypeScanType",
+ "RowsNextResultSet",
+ "SessionResetter",
+ "Stmt",
+ "StmtExecContext",
+ "StmtQueryContext",
+ "String",
+ "Tx",
+ "TxOptions",
+ "Validator",
+ "Value",
+ "ValueConverter",
+ "Valuer",
+ },
+ "debug/buildinfo": {
+ "BuildInfo",
+ "Read",
+ "ReadFile",
+ },
+ "debug/dwarf": {
+ "AddrType",
+ "ArrayType",
+ "Attr",
+ "AttrAbstractOrigin",
+ "AttrAccessibility",
+ "AttrAddrBase",
+ "AttrAddrClass",
+ "AttrAlignment",
+ "AttrAllocated",
+ "AttrArtificial",
+ "AttrAssociated",
+ "AttrBaseTypes",
+ "AttrBinaryScale",
+ "AttrBitOffset",
+ "AttrBitSize",
+ "AttrByteSize",
+ "AttrCallAllCalls",
+ "AttrCallAllSourceCalls",
+ "AttrCallAllTailCalls",
+ "AttrCallColumn",
+ "AttrCallDataLocation",
+ "AttrCallDataValue",
+ "AttrCallFile",
+ "AttrCallLine",
+ "AttrCallOrigin",
+ "AttrCallPC",
+ "AttrCallParameter",
+ "AttrCallReturnPC",
+ "AttrCallTailCall",
+ "AttrCallTarget",
+ "AttrCallTargetClobbered",
+ "AttrCallValue",
+ "AttrCalling",
+ "AttrCommonRef",
+ "AttrCompDir",
+ "AttrConstExpr",
+ "AttrConstValue",
+ "AttrContainingType",
+ "AttrCount",
+ "AttrDataBitOffset",
+ "AttrDataLocation",
+ "AttrDataMemberLoc",
+ "AttrDecimalScale",
+ "AttrDecimalSign",
+ "AttrDeclColumn",
+ "AttrDeclFile",
+ "AttrDeclLine",
+ "AttrDeclaration",
+ "AttrDefaultValue",
+ "AttrDefaulted",
+ "AttrDeleted",
+ "AttrDescription",
+ "AttrDigitCount",
+ "AttrDiscr",
+ "AttrDiscrList",
+ "AttrDiscrValue",
+ "AttrDwoName",
+ "AttrElemental",
+ "AttrEncoding",
+ "AttrEndianity",
+ "AttrEntrypc",
+ "AttrEnumClass",
+ "AttrExplicit",
+ "AttrExportSymbols",
+ "AttrExtension",
+ "AttrExternal",
+ "AttrFrameBase",
+ "AttrFriend",
+ "AttrHighpc",
+ "AttrIdentifierCase",
+ "AttrImport",
+ "AttrInline",
+ "AttrIsOptional",
+ "AttrLanguage",
+ "AttrLinkageName",
+ "AttrLocation",
+ "AttrLoclistsBase",
+ "AttrLowerBound",
+ "AttrLowpc",
+ "AttrMacroInfo",
+ "AttrMacros",
+ "AttrMainSubprogram",
+ "AttrMutable",
+ "AttrName",
+ "AttrNamelistItem",
+ "AttrNoreturn",
+ "AttrObjectPointer",
+ "AttrOrdering",
+ "AttrPictureString",
+ "AttrPriority",
+ "AttrProducer",
+ "AttrPrototyped",
+ "AttrPure",
+ "AttrRanges",
+ "AttrRank",
+ "AttrRecursive",
+ "AttrReference",
+ "AttrReturnAddr",
+ "AttrRnglistsBase",
+ "AttrRvalueReference",
+ "AttrSegment",
+ "AttrSibling",
+ "AttrSignature",
+ "AttrSmall",
+ "AttrSpecification",
+ "AttrStartScope",
+ "AttrStaticLink",
+ "AttrStmtList",
+ "AttrStrOffsetsBase",
+ "AttrStride",
+ "AttrStrideSize",
+ "AttrStringLength",
+ "AttrStringLengthBitSize",
+ "AttrStringLengthByteSize",
+ "AttrThreadsScaled",
+ "AttrTrampoline",
+ "AttrType",
+ "AttrUpperBound",
+ "AttrUseLocation",
+ "AttrUseUTF8",
+ "AttrVarParam",
+ "AttrVirtuality",
+ "AttrVisibility",
+ "AttrVtableElemLoc",
+ "BasicType",
+ "BoolType",
+ "CharType",
+ "Class",
+ "ClassAddrPtr",
+ "ClassAddress",
+ "ClassBlock",
+ "ClassConstant",
+ "ClassExprLoc",
+ "ClassFlag",
+ "ClassLinePtr",
+ "ClassLocList",
+ "ClassLocListPtr",
+ "ClassMacPtr",
+ "ClassRangeListPtr",
+ "ClassReference",
+ "ClassReferenceAlt",
+ "ClassReferenceSig",
+ "ClassRngList",
+ "ClassRngListsPtr",
+ "ClassStrOffsetsPtr",
+ "ClassString",
+ "ClassStringAlt",
+ "ClassUnknown",
+ "CommonType",
+ "ComplexType",
+ "Data",
+ "DecodeError",
+ "DotDotDotType",
+ "Entry",
+ "EnumType",
+ "EnumValue",
+ "ErrUnknownPC",
+ "Field",
+ "FloatType",
+ "FuncType",
+ "IntType",
+ "LineEntry",
+ "LineFile",
+ "LineReader",
+ "LineReaderPos",
+ "New",
+ "Offset",
+ "PtrType",
+ "QualType",
+ "Reader",
+ "StructField",
+ "StructType",
+ "Tag",
+ "TagAccessDeclaration",
+ "TagArrayType",
+ "TagAtomicType",
+ "TagBaseType",
+ "TagCallSite",
+ "TagCallSiteParameter",
+ "TagCatchDwarfBlock",
+ "TagClassType",
+ "TagCoarrayType",
+ "TagCommonDwarfBlock",
+ "TagCommonInclusion",
+ "TagCompileUnit",
+ "TagCondition",
+ "TagConstType",
+ "TagConstant",
+ "TagDwarfProcedure",
+ "TagDynamicType",
+ "TagEntryPoint",
+ "TagEnumerationType",
+ "TagEnumerator",
+ "TagFileType",
+ "TagFormalParameter",
+ "TagFriend",
+ "TagGenericSubrange",
+ "TagImmutableType",
+ "TagImportedDeclaration",
+ "TagImportedModule",
+ "TagImportedUnit",
+ "TagInheritance",
+ "TagInlinedSubroutine",
+ "TagInterfaceType",
+ "TagLabel",
+ "TagLexDwarfBlock",
+ "TagMember",
+ "TagModule",
+ "TagMutableType",
+ "TagNamelist",
+ "TagNamelistItem",
+ "TagNamespace",
+ "TagPackedType",
+ "TagPartialUnit",
+ "TagPointerType",
+ "TagPtrToMemberType",
+ "TagReferenceType",
+ "TagRestrictType",
+ "TagRvalueReferenceType",
+ "TagSetType",
+ "TagSharedType",
+ "TagSkeletonUnit",
+ "TagStringType",
+ "TagStructType",
+ "TagSubprogram",
+ "TagSubrangeType",
+ "TagSubroutineType",
+ "TagTemplateAlias",
+ "TagTemplateTypeParameter",
+ "TagTemplateValueParameter",
+ "TagThrownType",
+ "TagTryDwarfBlock",
+ "TagTypeUnit",
+ "TagTypedef",
+ "TagUnionType",
+ "TagUnspecifiedParameters",
+ "TagUnspecifiedType",
+ "TagVariable",
+ "TagVariant",
+ "TagVariantPart",
+ "TagVolatileType",
+ "TagWithStmt",
+ "Type",
+ "TypedefType",
+ "UcharType",
+ "UintType",
+ "UnspecifiedType",
+ "UnsupportedType",
+ "VoidType",
+ },
+ "debug/elf": {
+ "ARM_MAGIC_TRAMP_NUMBER",
+ "COMPRESS_HIOS",
+ "COMPRESS_HIPROC",
+ "COMPRESS_LOOS",
+ "COMPRESS_LOPROC",
+ "COMPRESS_ZLIB",
+ "COMPRESS_ZSTD",
+ "Chdr32",
+ "Chdr64",
+ "Class",
+ "CompressionType",
+ "DF_1_CONFALT",
+ "DF_1_DIRECT",
+ "DF_1_DISPRELDNE",
+ "DF_1_DISPRELPND",
+ "DF_1_EDITED",
+ "DF_1_ENDFILTEE",
+ "DF_1_GLOBAL",
+ "DF_1_GLOBAUDIT",
+ "DF_1_GROUP",
+ "DF_1_IGNMULDEF",
+ "DF_1_INITFIRST",
+ "DF_1_INTERPOSE",
+ "DF_1_KMOD",
+ "DF_1_LOADFLTR",
+ "DF_1_NOCOMMON",
+ "DF_1_NODEFLIB",
+ "DF_1_NODELETE",
+ "DF_1_NODIRECT",
+ "DF_1_NODUMP",
+ "DF_1_NOHDR",
+ "DF_1_NOKSYMS",
+ "DF_1_NOOPEN",
+ "DF_1_NORELOC",
+ "DF_1_NOW",
+ "DF_1_ORIGIN",
+ "DF_1_PIE",
+ "DF_1_SINGLETON",
+ "DF_1_STUB",
+ "DF_1_SYMINTPOSE",
+ "DF_1_TRANS",
+ "DF_1_WEAKFILTER",
+ "DF_BIND_NOW",
+ "DF_ORIGIN",
+ "DF_STATIC_TLS",
+ "DF_SYMBOLIC",
+ "DF_TEXTREL",
+ "DT_ADDRRNGHI",
+ "DT_ADDRRNGLO",
+ "DT_AUDIT",
+ "DT_AUXILIARY",
+ "DT_BIND_NOW",
+ "DT_CHECKSUM",
+ "DT_CONFIG",
+ "DT_DEBUG",
+ "DT_DEPAUDIT",
+ "DT_ENCODING",
+ "DT_FEATURE",
+ "DT_FILTER",
+ "DT_FINI",
+ "DT_FINI_ARRAY",
+ "DT_FINI_ARRAYSZ",
+ "DT_FLAGS",
+ "DT_FLAGS_1",
+ "DT_GNU_CONFLICT",
+ "DT_GNU_CONFLICTSZ",
+ "DT_GNU_HASH",
+ "DT_GNU_LIBLIST",
+ "DT_GNU_LIBLISTSZ",
+ "DT_GNU_PRELINKED",
+ "DT_HASH",
+ "DT_HIOS",
+ "DT_HIPROC",
+ "DT_INIT",
+ "DT_INIT_ARRAY",
+ "DT_INIT_ARRAYSZ",
+ "DT_JMPREL",
+ "DT_LOOS",
+ "DT_LOPROC",
+ "DT_MIPS_AUX_DYNAMIC",
+ "DT_MIPS_BASE_ADDRESS",
+ "DT_MIPS_COMPACT_SIZE",
+ "DT_MIPS_CONFLICT",
+ "DT_MIPS_CONFLICTNO",
+ "DT_MIPS_CXX_FLAGS",
+ "DT_MIPS_DELTA_CLASS",
+ "DT_MIPS_DELTA_CLASSSYM",
+ "DT_MIPS_DELTA_CLASSSYM_NO",
+ "DT_MIPS_DELTA_CLASS_NO",
+ "DT_MIPS_DELTA_INSTANCE",
+ "DT_MIPS_DELTA_INSTANCE_NO",
+ "DT_MIPS_DELTA_RELOC",
+ "DT_MIPS_DELTA_RELOC_NO",
+ "DT_MIPS_DELTA_SYM",
+ "DT_MIPS_DELTA_SYM_NO",
+ "DT_MIPS_DYNSTR_ALIGN",
+ "DT_MIPS_FLAGS",
+ "DT_MIPS_GOTSYM",
+ "DT_MIPS_GP_VALUE",
+ "DT_MIPS_HIDDEN_GOTIDX",
+ "DT_MIPS_HIPAGENO",
+ "DT_MIPS_ICHECKSUM",
+ "DT_MIPS_INTERFACE",
+ "DT_MIPS_INTERFACE_SIZE",
+ "DT_MIPS_IVERSION",
+ "DT_MIPS_LIBLIST",
+ "DT_MIPS_LIBLISTNO",
+ "DT_MIPS_LOCALPAGE_GOTIDX",
+ "DT_MIPS_LOCAL_GOTIDX",
+ "DT_MIPS_LOCAL_GOTNO",
+ "DT_MIPS_MSYM",
+ "DT_MIPS_OPTIONS",
+ "DT_MIPS_PERF_SUFFIX",
+ "DT_MIPS_PIXIE_INIT",
+ "DT_MIPS_PLTGOT",
+ "DT_MIPS_PROTECTED_GOTIDX",
+ "DT_MIPS_RLD_MAP",
+ "DT_MIPS_RLD_MAP_REL",
+ "DT_MIPS_RLD_TEXT_RESOLVE_ADDR",
+ "DT_MIPS_RLD_VERSION",
+ "DT_MIPS_RWPLT",
+ "DT_MIPS_SYMBOL_LIB",
+ "DT_MIPS_SYMTABNO",
+ "DT_MIPS_TIME_STAMP",
+ "DT_MIPS_UNREFEXTNO",
+ "DT_MOVEENT",
+ "DT_MOVESZ",
+ "DT_MOVETAB",
+ "DT_NEEDED",
+ "DT_NULL",
+ "DT_PLTGOT",
+ "DT_PLTPAD",
+ "DT_PLTPADSZ",
+ "DT_PLTREL",
+ "DT_PLTRELSZ",
+ "DT_POSFLAG_1",
+ "DT_PPC64_GLINK",
+ "DT_PPC64_OPD",
+ "DT_PPC64_OPDSZ",
+ "DT_PPC64_OPT",
+ "DT_PPC_GOT",
+ "DT_PPC_OPT",
+ "DT_PREINIT_ARRAY",
+ "DT_PREINIT_ARRAYSZ",
+ "DT_REL",
+ "DT_RELA",
+ "DT_RELACOUNT",
+ "DT_RELAENT",
+ "DT_RELASZ",
+ "DT_RELCOUNT",
+ "DT_RELENT",
+ "DT_RELSZ",
+ "DT_RPATH",
+ "DT_RUNPATH",
+ "DT_SONAME",
+ "DT_SPARC_REGISTER",
+ "DT_STRSZ",
+ "DT_STRTAB",
+ "DT_SYMBOLIC",
+ "DT_SYMENT",
+ "DT_SYMINENT",
+ "DT_SYMINFO",
+ "DT_SYMINSZ",
+ "DT_SYMTAB",
+ "DT_SYMTAB_SHNDX",
+ "DT_TEXTREL",
+ "DT_TLSDESC_GOT",
+ "DT_TLSDESC_PLT",
+ "DT_USED",
+ "DT_VALRNGHI",
+ "DT_VALRNGLO",
+ "DT_VERDEF",
+ "DT_VERDEFNUM",
+ "DT_VERNEED",
+ "DT_VERNEEDNUM",
+ "DT_VERSYM",
+ "Data",
+ "Dyn32",
+ "Dyn64",
+ "DynFlag",
+ "DynFlag1",
+ "DynTag",
+ "EI_ABIVERSION",
+ "EI_CLASS",
+ "EI_DATA",
+ "EI_NIDENT",
+ "EI_OSABI",
+ "EI_PAD",
+ "EI_VERSION",
+ "ELFCLASS32",
+ "ELFCLASS64",
+ "ELFCLASSNONE",
+ "ELFDATA2LSB",
+ "ELFDATA2MSB",
+ "ELFDATANONE",
+ "ELFMAG",
+ "ELFOSABI_86OPEN",
+ "ELFOSABI_AIX",
+ "ELFOSABI_ARM",
+ "ELFOSABI_AROS",
+ "ELFOSABI_CLOUDABI",
+ "ELFOSABI_FENIXOS",
+ "ELFOSABI_FREEBSD",
+ "ELFOSABI_HPUX",
+ "ELFOSABI_HURD",
+ "ELFOSABI_IRIX",
+ "ELFOSABI_LINUX",
+ "ELFOSABI_MODESTO",
+ "ELFOSABI_NETBSD",
+ "ELFOSABI_NONE",
+ "ELFOSABI_NSK",
+ "ELFOSABI_OPENBSD",
+ "ELFOSABI_OPENVMS",
+ "ELFOSABI_SOLARIS",
+ "ELFOSABI_STANDALONE",
+ "ELFOSABI_TRU64",
+ "EM_386",
+ "EM_486",
+ "EM_56800EX",
+ "EM_68HC05",
+ "EM_68HC08",
+ "EM_68HC11",
+ "EM_68HC12",
+ "EM_68HC16",
+ "EM_68K",
+ "EM_78KOR",
+ "EM_8051",
+ "EM_860",
+ "EM_88K",
+ "EM_960",
+ "EM_AARCH64",
+ "EM_ALPHA",
+ "EM_ALPHA_STD",
+ "EM_ALTERA_NIOS2",
+ "EM_AMDGPU",
+ "EM_ARC",
+ "EM_ARCA",
+ "EM_ARC_COMPACT",
+ "EM_ARC_COMPACT2",
+ "EM_ARM",
+ "EM_AVR",
+ "EM_AVR32",
+ "EM_BA1",
+ "EM_BA2",
+ "EM_BLACKFIN",
+ "EM_BPF",
+ "EM_C166",
+ "EM_CDP",
+ "EM_CE",
+ "EM_CLOUDSHIELD",
+ "EM_COGE",
+ "EM_COLDFIRE",
+ "EM_COOL",
+ "EM_COREA_1ST",
+ "EM_COREA_2ND",
+ "EM_CR",
+ "EM_CR16",
+ "EM_CRAYNV2",
+ "EM_CRIS",
+ "EM_CRX",
+ "EM_CSR_KALIMBA",
+ "EM_CUDA",
+ "EM_CYPRESS_M8C",
+ "EM_D10V",
+ "EM_D30V",
+ "EM_DSP24",
+ "EM_DSPIC30F",
+ "EM_DXP",
+ "EM_ECOG1",
+ "EM_ECOG16",
+ "EM_ECOG1X",
+ "EM_ECOG2",
+ "EM_ETPU",
+ "EM_EXCESS",
+ "EM_F2MC16",
+ "EM_FIREPATH",
+ "EM_FR20",
+ "EM_FR30",
+ "EM_FT32",
+ "EM_FX66",
+ "EM_H8S",
+ "EM_H8_300",
+ "EM_H8_300H",
+ "EM_H8_500",
+ "EM_HUANY",
+ "EM_IA_64",
+ "EM_INTEL205",
+ "EM_INTEL206",
+ "EM_INTEL207",
+ "EM_INTEL208",
+ "EM_INTEL209",
+ "EM_IP2K",
+ "EM_JAVELIN",
+ "EM_K10M",
+ "EM_KM32",
+ "EM_KMX16",
+ "EM_KMX32",
+ "EM_KMX8",
+ "EM_KVARC",
+ "EM_L10M",
+ "EM_LANAI",
+ "EM_LATTICEMICO32",
+ "EM_LOONGARCH",
+ "EM_M16C",
+ "EM_M32",
+ "EM_M32C",
+ "EM_M32R",
+ "EM_MANIK",
+ "EM_MAX",
+ "EM_MAXQ30",
+ "EM_MCHP_PIC",
+ "EM_MCST_ELBRUS",
+ "EM_ME16",
+ "EM_METAG",
+ "EM_MICROBLAZE",
+ "EM_MIPS",
+ "EM_MIPS_RS3_LE",
+ "EM_MIPS_RS4_BE",
+ "EM_MIPS_X",
+ "EM_MMA",
+ "EM_MMDSP_PLUS",
+ "EM_MMIX",
+ "EM_MN10200",
+ "EM_MN10300",
+ "EM_MOXIE",
+ "EM_MSP430",
+ "EM_NCPU",
+ "EM_NDR1",
+ "EM_NDS32",
+ "EM_NONE",
+ "EM_NORC",
+ "EM_NS32K",
+ "EM_OPEN8",
+ "EM_OPENRISC",
+ "EM_PARISC",
+ "EM_PCP",
+ "EM_PDP10",
+ "EM_PDP11",
+ "EM_PDSP",
+ "EM_PJ",
+ "EM_PPC",
+ "EM_PPC64",
+ "EM_PRISM",
+ "EM_QDSP6",
+ "EM_R32C",
+ "EM_RCE",
+ "EM_RH32",
+ "EM_RISCV",
+ "EM_RL78",
+ "EM_RS08",
+ "EM_RX",
+ "EM_S370",
+ "EM_S390",
+ "EM_SCORE7",
+ "EM_SEP",
+ "EM_SE_C17",
+ "EM_SE_C33",
+ "EM_SH",
+ "EM_SHARC",
+ "EM_SLE9X",
+ "EM_SNP1K",
+ "EM_SPARC",
+ "EM_SPARC32PLUS",
+ "EM_SPARCV9",
+ "EM_ST100",
+ "EM_ST19",
+ "EM_ST200",
+ "EM_ST7",
+ "EM_ST9PLUS",
+ "EM_STARCORE",
+ "EM_STM8",
+ "EM_STXP7X",
+ "EM_SVX",
+ "EM_TILE64",
+ "EM_TILEGX",
+ "EM_TILEPRO",
+ "EM_TINYJ",
+ "EM_TI_ARP32",
+ "EM_TI_C2000",
+ "EM_TI_C5500",
+ "EM_TI_C6000",
+ "EM_TI_PRU",
+ "EM_TMM_GPP",
+ "EM_TPC",
+ "EM_TRICORE",
+ "EM_TRIMEDIA",
+ "EM_TSK3000",
+ "EM_UNICORE",
+ "EM_V800",
+ "EM_V850",
+ "EM_VAX",
+ "EM_VIDEOCORE",
+ "EM_VIDEOCORE3",
+ "EM_VIDEOCORE5",
+ "EM_VISIUM",
+ "EM_VPP500",
+ "EM_X86_64",
+ "EM_XCORE",
+ "EM_XGATE",
+ "EM_XIMO16",
+ "EM_XTENSA",
+ "EM_Z80",
+ "EM_ZSP",
+ "ET_CORE",
+ "ET_DYN",
+ "ET_EXEC",
+ "ET_HIOS",
+ "ET_HIPROC",
+ "ET_LOOS",
+ "ET_LOPROC",
+ "ET_NONE",
+ "ET_REL",
+ "EV_CURRENT",
+ "EV_NONE",
+ "ErrNoSymbols",
+ "File",
+ "FileHeader",
+ "FormatError",
+ "Header32",
+ "Header64",
+ "ImportedSymbol",
+ "Machine",
+ "NT_FPREGSET",
+ "NT_PRPSINFO",
+ "NT_PRSTATUS",
+ "NType",
+ "NewFile",
+ "OSABI",
+ "Open",
+ "PF_MASKOS",
+ "PF_MASKPROC",
+ "PF_R",
+ "PF_W",
+ "PF_X",
+ "PT_AARCH64_ARCHEXT",
+ "PT_AARCH64_UNWIND",
+ "PT_ARM_ARCHEXT",
+ "PT_ARM_EXIDX",
+ "PT_DYNAMIC",
+ "PT_GNU_EH_FRAME",
+ "PT_GNU_MBIND_HI",
+ "PT_GNU_MBIND_LO",
+ "PT_GNU_PROPERTY",
+ "PT_GNU_RELRO",
+ "PT_GNU_STACK",
+ "PT_HIOS",
+ "PT_HIPROC",
+ "PT_INTERP",
+ "PT_LOAD",
+ "PT_LOOS",
+ "PT_LOPROC",
+ "PT_MIPS_ABIFLAGS",
+ "PT_MIPS_OPTIONS",
+ "PT_MIPS_REGINFO",
+ "PT_MIPS_RTPROC",
+ "PT_NOTE",
+ "PT_NULL",
+ "PT_OPENBSD_BOOTDATA",
+ "PT_OPENBSD_RANDOMIZE",
+ "PT_OPENBSD_WXNEEDED",
+ "PT_PAX_FLAGS",
+ "PT_PHDR",
+ "PT_S390_PGSTE",
+ "PT_SHLIB",
+ "PT_SUNWSTACK",
+ "PT_SUNW_EH_FRAME",
+ "PT_TLS",
+ "Prog",
+ "Prog32",
+ "Prog64",
+ "ProgFlag",
+ "ProgHeader",
+ "ProgType",
+ "R_386",
+ "R_386_16",
+ "R_386_32",
+ "R_386_32PLT",
+ "R_386_8",
+ "R_386_COPY",
+ "R_386_GLOB_DAT",
+ "R_386_GOT32",
+ "R_386_GOT32X",
+ "R_386_GOTOFF",
+ "R_386_GOTPC",
+ "R_386_IRELATIVE",
+ "R_386_JMP_SLOT",
+ "R_386_NONE",
+ "R_386_PC16",
+ "R_386_PC32",
+ "R_386_PC8",
+ "R_386_PLT32",
+ "R_386_RELATIVE",
+ "R_386_SIZE32",
+ "R_386_TLS_DESC",
+ "R_386_TLS_DESC_CALL",
+ "R_386_TLS_DTPMOD32",
+ "R_386_TLS_DTPOFF32",
+ "R_386_TLS_GD",
+ "R_386_TLS_GD_32",
+ "R_386_TLS_GD_CALL",
+ "R_386_TLS_GD_POP",
+ "R_386_TLS_GD_PUSH",
+ "R_386_TLS_GOTDESC",
+ "R_386_TLS_GOTIE",
+ "R_386_TLS_IE",
+ "R_386_TLS_IE_32",
+ "R_386_TLS_LDM",
+ "R_386_TLS_LDM_32",
+ "R_386_TLS_LDM_CALL",
+ "R_386_TLS_LDM_POP",
+ "R_386_TLS_LDM_PUSH",
+ "R_386_TLS_LDO_32",
+ "R_386_TLS_LE",
+ "R_386_TLS_LE_32",
+ "R_386_TLS_TPOFF",
+ "R_386_TLS_TPOFF32",
+ "R_390",
+ "R_390_12",
+ "R_390_16",
+ "R_390_20",
+ "R_390_32",
+ "R_390_64",
+ "R_390_8",
+ "R_390_COPY",
+ "R_390_GLOB_DAT",
+ "R_390_GOT12",
+ "R_390_GOT16",
+ "R_390_GOT20",
+ "R_390_GOT32",
+ "R_390_GOT64",
+ "R_390_GOTENT",
+ "R_390_GOTOFF",
+ "R_390_GOTOFF16",
+ "R_390_GOTOFF64",
+ "R_390_GOTPC",
+ "R_390_GOTPCDBL",
+ "R_390_GOTPLT12",
+ "R_390_GOTPLT16",
+ "R_390_GOTPLT20",
+ "R_390_GOTPLT32",
+ "R_390_GOTPLT64",
+ "R_390_GOTPLTENT",
+ "R_390_GOTPLTOFF16",
+ "R_390_GOTPLTOFF32",
+ "R_390_GOTPLTOFF64",
+ "R_390_JMP_SLOT",
+ "R_390_NONE",
+ "R_390_PC16",
+ "R_390_PC16DBL",
+ "R_390_PC32",
+ "R_390_PC32DBL",
+ "R_390_PC64",
+ "R_390_PLT16DBL",
+ "R_390_PLT32",
+ "R_390_PLT32DBL",
+ "R_390_PLT64",
+ "R_390_RELATIVE",
+ "R_390_TLS_DTPMOD",
+ "R_390_TLS_DTPOFF",
+ "R_390_TLS_GD32",
+ "R_390_TLS_GD64",
+ "R_390_TLS_GDCALL",
+ "R_390_TLS_GOTIE12",
+ "R_390_TLS_GOTIE20",
+ "R_390_TLS_GOTIE32",
+ "R_390_TLS_GOTIE64",
+ "R_390_TLS_IE32",
+ "R_390_TLS_IE64",
+ "R_390_TLS_IEENT",
+ "R_390_TLS_LDCALL",
+ "R_390_TLS_LDM32",
+ "R_390_TLS_LDM64",
+ "R_390_TLS_LDO32",
+ "R_390_TLS_LDO64",
+ "R_390_TLS_LE32",
+ "R_390_TLS_LE64",
+ "R_390_TLS_LOAD",
+ "R_390_TLS_TPOFF",
+ "R_AARCH64",
+ "R_AARCH64_ABS16",
+ "R_AARCH64_ABS32",
+ "R_AARCH64_ABS64",
+ "R_AARCH64_ADD_ABS_LO12_NC",
+ "R_AARCH64_ADR_GOT_PAGE",
+ "R_AARCH64_ADR_PREL_LO21",
+ "R_AARCH64_ADR_PREL_PG_HI21",
+ "R_AARCH64_ADR_PREL_PG_HI21_NC",
+ "R_AARCH64_CALL26",
+ "R_AARCH64_CONDBR19",
+ "R_AARCH64_COPY",
+ "R_AARCH64_GLOB_DAT",
+ "R_AARCH64_GOT_LD_PREL19",
+ "R_AARCH64_IRELATIVE",
+ "R_AARCH64_JUMP26",
+ "R_AARCH64_JUMP_SLOT",
+ "R_AARCH64_LD64_GOTOFF_LO15",
+ "R_AARCH64_LD64_GOTPAGE_LO15",
+ "R_AARCH64_LD64_GOT_LO12_NC",
+ "R_AARCH64_LDST128_ABS_LO12_NC",
+ "R_AARCH64_LDST16_ABS_LO12_NC",
+ "R_AARCH64_LDST32_ABS_LO12_NC",
+ "R_AARCH64_LDST64_ABS_LO12_NC",
+ "R_AARCH64_LDST8_ABS_LO12_NC",
+ "R_AARCH64_LD_PREL_LO19",
+ "R_AARCH64_MOVW_SABS_G0",
+ "R_AARCH64_MOVW_SABS_G1",
+ "R_AARCH64_MOVW_SABS_G2",
+ "R_AARCH64_MOVW_UABS_G0",
+ "R_AARCH64_MOVW_UABS_G0_NC",
+ "R_AARCH64_MOVW_UABS_G1",
+ "R_AARCH64_MOVW_UABS_G1_NC",
+ "R_AARCH64_MOVW_UABS_G2",
+ "R_AARCH64_MOVW_UABS_G2_NC",
+ "R_AARCH64_MOVW_UABS_G3",
+ "R_AARCH64_NONE",
+ "R_AARCH64_NULL",
+ "R_AARCH64_P32_ABS16",
+ "R_AARCH64_P32_ABS32",
+ "R_AARCH64_P32_ADD_ABS_LO12_NC",
+ "R_AARCH64_P32_ADR_GOT_PAGE",
+ "R_AARCH64_P32_ADR_PREL_LO21",
+ "R_AARCH64_P32_ADR_PREL_PG_HI21",
+ "R_AARCH64_P32_CALL26",
+ "R_AARCH64_P32_CONDBR19",
+ "R_AARCH64_P32_COPY",
+ "R_AARCH64_P32_GLOB_DAT",
+ "R_AARCH64_P32_GOT_LD_PREL19",
+ "R_AARCH64_P32_IRELATIVE",
+ "R_AARCH64_P32_JUMP26",
+ "R_AARCH64_P32_JUMP_SLOT",
+ "R_AARCH64_P32_LD32_GOT_LO12_NC",
+ "R_AARCH64_P32_LDST128_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST16_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST32_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST64_ABS_LO12_NC",
+ "R_AARCH64_P32_LDST8_ABS_LO12_NC",
+ "R_AARCH64_P32_LD_PREL_LO19",
+ "R_AARCH64_P32_MOVW_SABS_G0",
+ "R_AARCH64_P32_MOVW_UABS_G0",
+ "R_AARCH64_P32_MOVW_UABS_G0_NC",
+ "R_AARCH64_P32_MOVW_UABS_G1",
+ "R_AARCH64_P32_PREL16",
+ "R_AARCH64_P32_PREL32",
+ "R_AARCH64_P32_RELATIVE",
+ "R_AARCH64_P32_TLSDESC",
+ "R_AARCH64_P32_TLSDESC_ADD_LO12_NC",
+ "R_AARCH64_P32_TLSDESC_ADR_PAGE21",
+ "R_AARCH64_P32_TLSDESC_ADR_PREL21",
+ "R_AARCH64_P32_TLSDESC_CALL",
+ "R_AARCH64_P32_TLSDESC_LD32_LO12_NC",
+ "R_AARCH64_P32_TLSDESC_LD_PREL19",
+ "R_AARCH64_P32_TLSGD_ADD_LO12_NC",
+ "R_AARCH64_P32_TLSGD_ADR_PAGE21",
+ "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21",
+ "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC",
+ "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19",
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12",
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12",
+ "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC",
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0",
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC",
+ "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1",
+ "R_AARCH64_P32_TLS_DTPMOD",
+ "R_AARCH64_P32_TLS_DTPREL",
+ "R_AARCH64_P32_TLS_TPREL",
+ "R_AARCH64_P32_TSTBR14",
+ "R_AARCH64_PREL16",
+ "R_AARCH64_PREL32",
+ "R_AARCH64_PREL64",
+ "R_AARCH64_RELATIVE",
+ "R_AARCH64_TLSDESC",
+ "R_AARCH64_TLSDESC_ADD",
+ "R_AARCH64_TLSDESC_ADD_LO12_NC",
+ "R_AARCH64_TLSDESC_ADR_PAGE21",
+ "R_AARCH64_TLSDESC_ADR_PREL21",
+ "R_AARCH64_TLSDESC_CALL",
+ "R_AARCH64_TLSDESC_LD64_LO12_NC",
+ "R_AARCH64_TLSDESC_LDR",
+ "R_AARCH64_TLSDESC_LD_PREL19",
+ "R_AARCH64_TLSDESC_OFF_G0_NC",
+ "R_AARCH64_TLSDESC_OFF_G1",
+ "R_AARCH64_TLSGD_ADD_LO12_NC",
+ "R_AARCH64_TLSGD_ADR_PAGE21",
+ "R_AARCH64_TLSGD_ADR_PREL21",
+ "R_AARCH64_TLSGD_MOVW_G0_NC",
+ "R_AARCH64_TLSGD_MOVW_G1",
+ "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21",
+ "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC",
+ "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19",
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC",
+ "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1",
+ "R_AARCH64_TLSLD_ADR_PAGE21",
+ "R_AARCH64_TLSLD_ADR_PREL21",
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12",
+ "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC",
+ "R_AARCH64_TLSLE_ADD_TPREL_HI12",
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12",
+ "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC",
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12",
+ "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC",
+ "R_AARCH64_TLSLE_MOVW_TPREL_G2",
+ "R_AARCH64_TLS_DTPMOD64",
+ "R_AARCH64_TLS_DTPREL64",
+ "R_AARCH64_TLS_TPREL64",
+ "R_AARCH64_TSTBR14",
+ "R_ALPHA",
+ "R_ALPHA_BRADDR",
+ "R_ALPHA_COPY",
+ "R_ALPHA_GLOB_DAT",
+ "R_ALPHA_GPDISP",
+ "R_ALPHA_GPREL32",
+ "R_ALPHA_GPRELHIGH",
+ "R_ALPHA_GPRELLOW",
+ "R_ALPHA_GPVALUE",
+ "R_ALPHA_HINT",
+ "R_ALPHA_IMMED_BR_HI32",
+ "R_ALPHA_IMMED_GP_16",
+ "R_ALPHA_IMMED_GP_HI32",
+ "R_ALPHA_IMMED_LO32",
+ "R_ALPHA_IMMED_SCN_HI32",
+ "R_ALPHA_JMP_SLOT",
+ "R_ALPHA_LITERAL",
+ "R_ALPHA_LITUSE",
+ "R_ALPHA_NONE",
+ "R_ALPHA_OP_PRSHIFT",
+ "R_ALPHA_OP_PSUB",
+ "R_ALPHA_OP_PUSH",
+ "R_ALPHA_OP_STORE",
+ "R_ALPHA_REFLONG",
+ "R_ALPHA_REFQUAD",
+ "R_ALPHA_RELATIVE",
+ "R_ALPHA_SREL16",
+ "R_ALPHA_SREL32",
+ "R_ALPHA_SREL64",
+ "R_ARM",
+ "R_ARM_ABS12",
+ "R_ARM_ABS16",
+ "R_ARM_ABS32",
+ "R_ARM_ABS32_NOI",
+ "R_ARM_ABS8",
+ "R_ARM_ALU_PCREL_15_8",
+ "R_ARM_ALU_PCREL_23_15",
+ "R_ARM_ALU_PCREL_7_0",
+ "R_ARM_ALU_PC_G0",
+ "R_ARM_ALU_PC_G0_NC",
+ "R_ARM_ALU_PC_G1",
+ "R_ARM_ALU_PC_G1_NC",
+ "R_ARM_ALU_PC_G2",
+ "R_ARM_ALU_SBREL_19_12_NC",
+ "R_ARM_ALU_SBREL_27_20_CK",
+ "R_ARM_ALU_SB_G0",
+ "R_ARM_ALU_SB_G0_NC",
+ "R_ARM_ALU_SB_G1",
+ "R_ARM_ALU_SB_G1_NC",
+ "R_ARM_ALU_SB_G2",
+ "R_ARM_AMP_VCALL9",
+ "R_ARM_BASE_ABS",
+ "R_ARM_CALL",
+ "R_ARM_COPY",
+ "R_ARM_GLOB_DAT",
+ "R_ARM_GNU_VTENTRY",
+ "R_ARM_GNU_VTINHERIT",
+ "R_ARM_GOT32",
+ "R_ARM_GOTOFF",
+ "R_ARM_GOTOFF12",
+ "R_ARM_GOTPC",
+ "R_ARM_GOTRELAX",
+ "R_ARM_GOT_ABS",
+ "R_ARM_GOT_BREL12",
+ "R_ARM_GOT_PREL",
+ "R_ARM_IRELATIVE",
+ "R_ARM_JUMP24",
+ "R_ARM_JUMP_SLOT",
+ "R_ARM_LDC_PC_G0",
+ "R_ARM_LDC_PC_G1",
+ "R_ARM_LDC_PC_G2",
+ "R_ARM_LDC_SB_G0",
+ "R_ARM_LDC_SB_G1",
+ "R_ARM_LDC_SB_G2",
+ "R_ARM_LDRS_PC_G0",
+ "R_ARM_LDRS_PC_G1",
+ "R_ARM_LDRS_PC_G2",
+ "R_ARM_LDRS_SB_G0",
+ "R_ARM_LDRS_SB_G1",
+ "R_ARM_LDRS_SB_G2",
+ "R_ARM_LDR_PC_G1",
+ "R_ARM_LDR_PC_G2",
+ "R_ARM_LDR_SBREL_11_10_NC",
+ "R_ARM_LDR_SB_G0",
+ "R_ARM_LDR_SB_G1",
+ "R_ARM_LDR_SB_G2",
+ "R_ARM_ME_TOO",
+ "R_ARM_MOVT_ABS",
+ "R_ARM_MOVT_BREL",
+ "R_ARM_MOVT_PREL",
+ "R_ARM_MOVW_ABS_NC",
+ "R_ARM_MOVW_BREL",
+ "R_ARM_MOVW_BREL_NC",
+ "R_ARM_MOVW_PREL_NC",
+ "R_ARM_NONE",
+ "R_ARM_PC13",
+ "R_ARM_PC24",
+ "R_ARM_PLT32",
+ "R_ARM_PLT32_ABS",
+ "R_ARM_PREL31",
+ "R_ARM_PRIVATE_0",
+ "R_ARM_PRIVATE_1",
+ "R_ARM_PRIVATE_10",
+ "R_ARM_PRIVATE_11",
+ "R_ARM_PRIVATE_12",
+ "R_ARM_PRIVATE_13",
+ "R_ARM_PRIVATE_14",
+ "R_ARM_PRIVATE_15",
+ "R_ARM_PRIVATE_2",
+ "R_ARM_PRIVATE_3",
+ "R_ARM_PRIVATE_4",
+ "R_ARM_PRIVATE_5",
+ "R_ARM_PRIVATE_6",
+ "R_ARM_PRIVATE_7",
+ "R_ARM_PRIVATE_8",
+ "R_ARM_PRIVATE_9",
+ "R_ARM_RABS32",
+ "R_ARM_RBASE",
+ "R_ARM_REL32",
+ "R_ARM_REL32_NOI",
+ "R_ARM_RELATIVE",
+ "R_ARM_RPC24",
+ "R_ARM_RREL32",
+ "R_ARM_RSBREL32",
+ "R_ARM_RXPC25",
+ "R_ARM_SBREL31",
+ "R_ARM_SBREL32",
+ "R_ARM_SWI24",
+ "R_ARM_TARGET1",
+ "R_ARM_TARGET2",
+ "R_ARM_THM_ABS5",
+ "R_ARM_THM_ALU_ABS_G0_NC",
+ "R_ARM_THM_ALU_ABS_G1_NC",
+ "R_ARM_THM_ALU_ABS_G2_NC",
+ "R_ARM_THM_ALU_ABS_G3",
+ "R_ARM_THM_ALU_PREL_11_0",
+ "R_ARM_THM_GOT_BREL12",
+ "R_ARM_THM_JUMP11",
+ "R_ARM_THM_JUMP19",
+ "R_ARM_THM_JUMP24",
+ "R_ARM_THM_JUMP6",
+ "R_ARM_THM_JUMP8",
+ "R_ARM_THM_MOVT_ABS",
+ "R_ARM_THM_MOVT_BREL",
+ "R_ARM_THM_MOVT_PREL",
+ "R_ARM_THM_MOVW_ABS_NC",
+ "R_ARM_THM_MOVW_BREL",
+ "R_ARM_THM_MOVW_BREL_NC",
+ "R_ARM_THM_MOVW_PREL_NC",
+ "R_ARM_THM_PC12",
+ "R_ARM_THM_PC22",
+ "R_ARM_THM_PC8",
+ "R_ARM_THM_RPC22",
+ "R_ARM_THM_SWI8",
+ "R_ARM_THM_TLS_CALL",
+ "R_ARM_THM_TLS_DESCSEQ16",
+ "R_ARM_THM_TLS_DESCSEQ32",
+ "R_ARM_THM_XPC22",
+ "R_ARM_TLS_CALL",
+ "R_ARM_TLS_DESCSEQ",
+ "R_ARM_TLS_DTPMOD32",
+ "R_ARM_TLS_DTPOFF32",
+ "R_ARM_TLS_GD32",
+ "R_ARM_TLS_GOTDESC",
+ "R_ARM_TLS_IE12GP",
+ "R_ARM_TLS_IE32",
+ "R_ARM_TLS_LDM32",
+ "R_ARM_TLS_LDO12",
+ "R_ARM_TLS_LDO32",
+ "R_ARM_TLS_LE12",
+ "R_ARM_TLS_LE32",
+ "R_ARM_TLS_TPOFF32",
+ "R_ARM_V4BX",
+ "R_ARM_XPC25",
+ "R_INFO",
+ "R_INFO32",
+ "R_LARCH",
+ "R_LARCH_32",
+ "R_LARCH_32_PCREL",
+ "R_LARCH_64",
+ "R_LARCH_ABS64_HI12",
+ "R_LARCH_ABS64_LO20",
+ "R_LARCH_ABS_HI20",
+ "R_LARCH_ABS_LO12",
+ "R_LARCH_ADD16",
+ "R_LARCH_ADD24",
+ "R_LARCH_ADD32",
+ "R_LARCH_ADD64",
+ "R_LARCH_ADD8",
+ "R_LARCH_B16",
+ "R_LARCH_B21",
+ "R_LARCH_B26",
+ "R_LARCH_COPY",
+ "R_LARCH_GNU_VTENTRY",
+ "R_LARCH_GNU_VTINHERIT",
+ "R_LARCH_GOT64_HI12",
+ "R_LARCH_GOT64_LO20",
+ "R_LARCH_GOT64_PC_HI12",
+ "R_LARCH_GOT64_PC_LO20",
+ "R_LARCH_GOT_HI20",
+ "R_LARCH_GOT_LO12",
+ "R_LARCH_GOT_PC_HI20",
+ "R_LARCH_GOT_PC_LO12",
+ "R_LARCH_IRELATIVE",
+ "R_LARCH_JUMP_SLOT",
+ "R_LARCH_MARK_LA",
+ "R_LARCH_MARK_PCREL",
+ "R_LARCH_NONE",
+ "R_LARCH_PCALA64_HI12",
+ "R_LARCH_PCALA64_LO20",
+ "R_LARCH_PCALA_HI20",
+ "R_LARCH_PCALA_LO12",
+ "R_LARCH_RELATIVE",
+ "R_LARCH_RELAX",
+ "R_LARCH_SOP_ADD",
+ "R_LARCH_SOP_AND",
+ "R_LARCH_SOP_ASSERT",
+ "R_LARCH_SOP_IF_ELSE",
+ "R_LARCH_SOP_NOT",
+ "R_LARCH_SOP_POP_32_S_0_10_10_16_S2",
+ "R_LARCH_SOP_POP_32_S_0_5_10_16_S2",
+ "R_LARCH_SOP_POP_32_S_10_12",
+ "R_LARCH_SOP_POP_32_S_10_16",
+ "R_LARCH_SOP_POP_32_S_10_16_S2",
+ "R_LARCH_SOP_POP_32_S_10_5",
+ "R_LARCH_SOP_POP_32_S_5_20",
+ "R_LARCH_SOP_POP_32_U",
+ "R_LARCH_SOP_POP_32_U_10_12",
+ "R_LARCH_SOP_PUSH_ABSOLUTE",
+ "R_LARCH_SOP_PUSH_DUP",
+ "R_LARCH_SOP_PUSH_GPREL",
+ "R_LARCH_SOP_PUSH_PCREL",
+ "R_LARCH_SOP_PUSH_PLT_PCREL",
+ "R_LARCH_SOP_PUSH_TLS_GD",
+ "R_LARCH_SOP_PUSH_TLS_GOT",
+ "R_LARCH_SOP_PUSH_TLS_TPREL",
+ "R_LARCH_SOP_SL",
+ "R_LARCH_SOP_SR",
+ "R_LARCH_SOP_SUB",
+ "R_LARCH_SUB16",
+ "R_LARCH_SUB24",
+ "R_LARCH_SUB32",
+ "R_LARCH_SUB64",
+ "R_LARCH_SUB8",
+ "R_LARCH_TLS_DTPMOD32",
+ "R_LARCH_TLS_DTPMOD64",
+ "R_LARCH_TLS_DTPREL32",
+ "R_LARCH_TLS_DTPREL64",
+ "R_LARCH_TLS_GD_HI20",
+ "R_LARCH_TLS_GD_PC_HI20",
+ "R_LARCH_TLS_IE64_HI12",
+ "R_LARCH_TLS_IE64_LO20",
+ "R_LARCH_TLS_IE64_PC_HI12",
+ "R_LARCH_TLS_IE64_PC_LO20",
+ "R_LARCH_TLS_IE_HI20",
+ "R_LARCH_TLS_IE_LO12",
+ "R_LARCH_TLS_IE_PC_HI20",
+ "R_LARCH_TLS_IE_PC_LO12",
+ "R_LARCH_TLS_LD_HI20",
+ "R_LARCH_TLS_LD_PC_HI20",
+ "R_LARCH_TLS_LE64_HI12",
+ "R_LARCH_TLS_LE64_LO20",
+ "R_LARCH_TLS_LE_HI20",
+ "R_LARCH_TLS_LE_LO12",
+ "R_LARCH_TLS_TPREL32",
+ "R_LARCH_TLS_TPREL64",
+ "R_MIPS",
+ "R_MIPS_16",
+ "R_MIPS_26",
+ "R_MIPS_32",
+ "R_MIPS_64",
+ "R_MIPS_ADD_IMMEDIATE",
+ "R_MIPS_CALL16",
+ "R_MIPS_CALL_HI16",
+ "R_MIPS_CALL_LO16",
+ "R_MIPS_DELETE",
+ "R_MIPS_GOT16",
+ "R_MIPS_GOT_DISP",
+ "R_MIPS_GOT_HI16",
+ "R_MIPS_GOT_LO16",
+ "R_MIPS_GOT_OFST",
+ "R_MIPS_GOT_PAGE",
+ "R_MIPS_GPREL16",
+ "R_MIPS_GPREL32",
+ "R_MIPS_HI16",
+ "R_MIPS_HIGHER",
+ "R_MIPS_HIGHEST",
+ "R_MIPS_INSERT_A",
+ "R_MIPS_INSERT_B",
+ "R_MIPS_JALR",
+ "R_MIPS_LITERAL",
+ "R_MIPS_LO16",
+ "R_MIPS_NONE",
+ "R_MIPS_PC16",
+ "R_MIPS_PJUMP",
+ "R_MIPS_REL16",
+ "R_MIPS_REL32",
+ "R_MIPS_RELGOT",
+ "R_MIPS_SCN_DISP",
+ "R_MIPS_SHIFT5",
+ "R_MIPS_SHIFT6",
+ "R_MIPS_SUB",
+ "R_MIPS_TLS_DTPMOD32",
+ "R_MIPS_TLS_DTPMOD64",
+ "R_MIPS_TLS_DTPREL32",
+ "R_MIPS_TLS_DTPREL64",
+ "R_MIPS_TLS_DTPREL_HI16",
+ "R_MIPS_TLS_DTPREL_LO16",
+ "R_MIPS_TLS_GD",
+ "R_MIPS_TLS_GOTTPREL",
+ "R_MIPS_TLS_LDM",
+ "R_MIPS_TLS_TPREL32",
+ "R_MIPS_TLS_TPREL64",
+ "R_MIPS_TLS_TPREL_HI16",
+ "R_MIPS_TLS_TPREL_LO16",
+ "R_PPC",
+ "R_PPC64",
+ "R_PPC64_ADDR14",
+ "R_PPC64_ADDR14_BRNTAKEN",
+ "R_PPC64_ADDR14_BRTAKEN",
+ "R_PPC64_ADDR16",
+ "R_PPC64_ADDR16_DS",
+ "R_PPC64_ADDR16_HA",
+ "R_PPC64_ADDR16_HI",
+ "R_PPC64_ADDR16_HIGH",
+ "R_PPC64_ADDR16_HIGHA",
+ "R_PPC64_ADDR16_HIGHER",
+ "R_PPC64_ADDR16_HIGHER34",
+ "R_PPC64_ADDR16_HIGHERA",
+ "R_PPC64_ADDR16_HIGHERA34",
+ "R_PPC64_ADDR16_HIGHEST",
+ "R_PPC64_ADDR16_HIGHEST34",
+ "R_PPC64_ADDR16_HIGHESTA",
+ "R_PPC64_ADDR16_HIGHESTA34",
+ "R_PPC64_ADDR16_LO",
+ "R_PPC64_ADDR16_LO_DS",
+ "R_PPC64_ADDR24",
+ "R_PPC64_ADDR32",
+ "R_PPC64_ADDR64",
+ "R_PPC64_ADDR64_LOCAL",
+ "R_PPC64_COPY",
+ "R_PPC64_D28",
+ "R_PPC64_D34",
+ "R_PPC64_D34_HA30",
+ "R_PPC64_D34_HI30",
+ "R_PPC64_D34_LO",
+ "R_PPC64_DTPMOD64",
+ "R_PPC64_DTPREL16",
+ "R_PPC64_DTPREL16_DS",
+ "R_PPC64_DTPREL16_HA",
+ "R_PPC64_DTPREL16_HI",
+ "R_PPC64_DTPREL16_HIGH",
+ "R_PPC64_DTPREL16_HIGHA",
+ "R_PPC64_DTPREL16_HIGHER",
+ "R_PPC64_DTPREL16_HIGHERA",
+ "R_PPC64_DTPREL16_HIGHEST",
+ "R_PPC64_DTPREL16_HIGHESTA",
+ "R_PPC64_DTPREL16_LO",
+ "R_PPC64_DTPREL16_LO_DS",
+ "R_PPC64_DTPREL34",
+ "R_PPC64_DTPREL64",
+ "R_PPC64_ENTRY",
+ "R_PPC64_GLOB_DAT",
+ "R_PPC64_GNU_VTENTRY",
+ "R_PPC64_GNU_VTINHERIT",
+ "R_PPC64_GOT16",
+ "R_PPC64_GOT16_DS",
+ "R_PPC64_GOT16_HA",
+ "R_PPC64_GOT16_HI",
+ "R_PPC64_GOT16_LO",
+ "R_PPC64_GOT16_LO_DS",
+ "R_PPC64_GOT_DTPREL16_DS",
+ "R_PPC64_GOT_DTPREL16_HA",
+ "R_PPC64_GOT_DTPREL16_HI",
+ "R_PPC64_GOT_DTPREL16_LO_DS",
+ "R_PPC64_GOT_DTPREL_PCREL34",
+ "R_PPC64_GOT_PCREL34",
+ "R_PPC64_GOT_TLSGD16",
+ "R_PPC64_GOT_TLSGD16_HA",
+ "R_PPC64_GOT_TLSGD16_HI",
+ "R_PPC64_GOT_TLSGD16_LO",
+ "R_PPC64_GOT_TLSGD_PCREL34",
+ "R_PPC64_GOT_TLSLD16",
+ "R_PPC64_GOT_TLSLD16_HA",
+ "R_PPC64_GOT_TLSLD16_HI",
+ "R_PPC64_GOT_TLSLD16_LO",
+ "R_PPC64_GOT_TLSLD_PCREL34",
+ "R_PPC64_GOT_TPREL16_DS",
+ "R_PPC64_GOT_TPREL16_HA",
+ "R_PPC64_GOT_TPREL16_HI",
+ "R_PPC64_GOT_TPREL16_LO_DS",
+ "R_PPC64_GOT_TPREL_PCREL34",
+ "R_PPC64_IRELATIVE",
+ "R_PPC64_JMP_IREL",
+ "R_PPC64_JMP_SLOT",
+ "R_PPC64_NONE",
+ "R_PPC64_PCREL28",
+ "R_PPC64_PCREL34",
+ "R_PPC64_PCREL_OPT",
+ "R_PPC64_PLT16_HA",
+ "R_PPC64_PLT16_HI",
+ "R_PPC64_PLT16_LO",
+ "R_PPC64_PLT16_LO_DS",
+ "R_PPC64_PLT32",
+ "R_PPC64_PLT64",
+ "R_PPC64_PLTCALL",
+ "R_PPC64_PLTCALL_NOTOC",
+ "R_PPC64_PLTGOT16",
+ "R_PPC64_PLTGOT16_DS",
+ "R_PPC64_PLTGOT16_HA",
+ "R_PPC64_PLTGOT16_HI",
+ "R_PPC64_PLTGOT16_LO",
+ "R_PPC64_PLTGOT_LO_DS",
+ "R_PPC64_PLTREL32",
+ "R_PPC64_PLTREL64",
+ "R_PPC64_PLTSEQ",
+ "R_PPC64_PLTSEQ_NOTOC",
+ "R_PPC64_PLT_PCREL34",
+ "R_PPC64_PLT_PCREL34_NOTOC",
+ "R_PPC64_REL14",
+ "R_PPC64_REL14_BRNTAKEN",
+ "R_PPC64_REL14_BRTAKEN",
+ "R_PPC64_REL16",
+ "R_PPC64_REL16DX_HA",
+ "R_PPC64_REL16_HA",
+ "R_PPC64_REL16_HI",
+ "R_PPC64_REL16_HIGH",
+ "R_PPC64_REL16_HIGHA",
+ "R_PPC64_REL16_HIGHER",
+ "R_PPC64_REL16_HIGHER34",
+ "R_PPC64_REL16_HIGHERA",
+ "R_PPC64_REL16_HIGHERA34",
+ "R_PPC64_REL16_HIGHEST",
+ "R_PPC64_REL16_HIGHEST34",
+ "R_PPC64_REL16_HIGHESTA",
+ "R_PPC64_REL16_HIGHESTA34",
+ "R_PPC64_REL16_LO",
+ "R_PPC64_REL24",
+ "R_PPC64_REL24_NOTOC",
+ "R_PPC64_REL24_P9NOTOC",
+ "R_PPC64_REL30",
+ "R_PPC64_REL32",
+ "R_PPC64_REL64",
+ "R_PPC64_RELATIVE",
+ "R_PPC64_SECTOFF",
+ "R_PPC64_SECTOFF_DS",
+ "R_PPC64_SECTOFF_HA",
+ "R_PPC64_SECTOFF_HI",
+ "R_PPC64_SECTOFF_LO",
+ "R_PPC64_SECTOFF_LO_DS",
+ "R_PPC64_TLS",
+ "R_PPC64_TLSGD",
+ "R_PPC64_TLSLD",
+ "R_PPC64_TOC",
+ "R_PPC64_TOC16",
+ "R_PPC64_TOC16_DS",
+ "R_PPC64_TOC16_HA",
+ "R_PPC64_TOC16_HI",
+ "R_PPC64_TOC16_LO",
+ "R_PPC64_TOC16_LO_DS",
+ "R_PPC64_TOCSAVE",
+ "R_PPC64_TPREL16",
+ "R_PPC64_TPREL16_DS",
+ "R_PPC64_TPREL16_HA",
+ "R_PPC64_TPREL16_HI",
+ "R_PPC64_TPREL16_HIGH",
+ "R_PPC64_TPREL16_HIGHA",
+ "R_PPC64_TPREL16_HIGHER",
+ "R_PPC64_TPREL16_HIGHERA",
+ "R_PPC64_TPREL16_HIGHEST",
+ "R_PPC64_TPREL16_HIGHESTA",
+ "R_PPC64_TPREL16_LO",
+ "R_PPC64_TPREL16_LO_DS",
+ "R_PPC64_TPREL34",
+ "R_PPC64_TPREL64",
+ "R_PPC64_UADDR16",
+ "R_PPC64_UADDR32",
+ "R_PPC64_UADDR64",
+ "R_PPC_ADDR14",
+ "R_PPC_ADDR14_BRNTAKEN",
+ "R_PPC_ADDR14_BRTAKEN",
+ "R_PPC_ADDR16",
+ "R_PPC_ADDR16_HA",
+ "R_PPC_ADDR16_HI",
+ "R_PPC_ADDR16_LO",
+ "R_PPC_ADDR24",
+ "R_PPC_ADDR32",
+ "R_PPC_COPY",
+ "R_PPC_DTPMOD32",
+ "R_PPC_DTPREL16",
+ "R_PPC_DTPREL16_HA",
+ "R_PPC_DTPREL16_HI",
+ "R_PPC_DTPREL16_LO",
+ "R_PPC_DTPREL32",
+ "R_PPC_EMB_BIT_FLD",
+ "R_PPC_EMB_MRKREF",
+ "R_PPC_EMB_NADDR16",
+ "R_PPC_EMB_NADDR16_HA",
+ "R_PPC_EMB_NADDR16_HI",
+ "R_PPC_EMB_NADDR16_LO",
+ "R_PPC_EMB_NADDR32",
+ "R_PPC_EMB_RELSDA",
+ "R_PPC_EMB_RELSEC16",
+ "R_PPC_EMB_RELST_HA",
+ "R_PPC_EMB_RELST_HI",
+ "R_PPC_EMB_RELST_LO",
+ "R_PPC_EMB_SDA21",
+ "R_PPC_EMB_SDA2I16",
+ "R_PPC_EMB_SDA2REL",
+ "R_PPC_EMB_SDAI16",
+ "R_PPC_GLOB_DAT",
+ "R_PPC_GOT16",
+ "R_PPC_GOT16_HA",
+ "R_PPC_GOT16_HI",
+ "R_PPC_GOT16_LO",
+ "R_PPC_GOT_TLSGD16",
+ "R_PPC_GOT_TLSGD16_HA",
+ "R_PPC_GOT_TLSGD16_HI",
+ "R_PPC_GOT_TLSGD16_LO",
+ "R_PPC_GOT_TLSLD16",
+ "R_PPC_GOT_TLSLD16_HA",
+ "R_PPC_GOT_TLSLD16_HI",
+ "R_PPC_GOT_TLSLD16_LO",
+ "R_PPC_GOT_TPREL16",
+ "R_PPC_GOT_TPREL16_HA",
+ "R_PPC_GOT_TPREL16_HI",
+ "R_PPC_GOT_TPREL16_LO",
+ "R_PPC_JMP_SLOT",
+ "R_PPC_LOCAL24PC",
+ "R_PPC_NONE",
+ "R_PPC_PLT16_HA",
+ "R_PPC_PLT16_HI",
+ "R_PPC_PLT16_LO",
+ "R_PPC_PLT32",
+ "R_PPC_PLTREL24",
+ "R_PPC_PLTREL32",
+ "R_PPC_REL14",
+ "R_PPC_REL14_BRNTAKEN",
+ "R_PPC_REL14_BRTAKEN",
+ "R_PPC_REL24",
+ "R_PPC_REL32",
+ "R_PPC_RELATIVE",
+ "R_PPC_SDAREL16",
+ "R_PPC_SECTOFF",
+ "R_PPC_SECTOFF_HA",
+ "R_PPC_SECTOFF_HI",
+ "R_PPC_SECTOFF_LO",
+ "R_PPC_TLS",
+ "R_PPC_TPREL16",
+ "R_PPC_TPREL16_HA",
+ "R_PPC_TPREL16_HI",
+ "R_PPC_TPREL16_LO",
+ "R_PPC_TPREL32",
+ "R_PPC_UADDR16",
+ "R_PPC_UADDR32",
+ "R_RISCV",
+ "R_RISCV_32",
+ "R_RISCV_32_PCREL",
+ "R_RISCV_64",
+ "R_RISCV_ADD16",
+ "R_RISCV_ADD32",
+ "R_RISCV_ADD64",
+ "R_RISCV_ADD8",
+ "R_RISCV_ALIGN",
+ "R_RISCV_BRANCH",
+ "R_RISCV_CALL",
+ "R_RISCV_CALL_PLT",
+ "R_RISCV_COPY",
+ "R_RISCV_GNU_VTENTRY",
+ "R_RISCV_GNU_VTINHERIT",
+ "R_RISCV_GOT_HI20",
+ "R_RISCV_GPREL_I",
+ "R_RISCV_GPREL_S",
+ "R_RISCV_HI20",
+ "R_RISCV_JAL",
+ "R_RISCV_JUMP_SLOT",
+ "R_RISCV_LO12_I",
+ "R_RISCV_LO12_S",
+ "R_RISCV_NONE",
+ "R_RISCV_PCREL_HI20",
+ "R_RISCV_PCREL_LO12_I",
+ "R_RISCV_PCREL_LO12_S",
+ "R_RISCV_RELATIVE",
+ "R_RISCV_RELAX",
+ "R_RISCV_RVC_BRANCH",
+ "R_RISCV_RVC_JUMP",
+ "R_RISCV_RVC_LUI",
+ "R_RISCV_SET16",
+ "R_RISCV_SET32",
+ "R_RISCV_SET6",
+ "R_RISCV_SET8",
+ "R_RISCV_SUB16",
+ "R_RISCV_SUB32",
+ "R_RISCV_SUB6",
+ "R_RISCV_SUB64",
+ "R_RISCV_SUB8",
+ "R_RISCV_TLS_DTPMOD32",
+ "R_RISCV_TLS_DTPMOD64",
+ "R_RISCV_TLS_DTPREL32",
+ "R_RISCV_TLS_DTPREL64",
+ "R_RISCV_TLS_GD_HI20",
+ "R_RISCV_TLS_GOT_HI20",
+ "R_RISCV_TLS_TPREL32",
+ "R_RISCV_TLS_TPREL64",
+ "R_RISCV_TPREL_ADD",
+ "R_RISCV_TPREL_HI20",
+ "R_RISCV_TPREL_I",
+ "R_RISCV_TPREL_LO12_I",
+ "R_RISCV_TPREL_LO12_S",
+ "R_RISCV_TPREL_S",
+ "R_SPARC",
+ "R_SPARC_10",
+ "R_SPARC_11",
+ "R_SPARC_13",
+ "R_SPARC_16",
+ "R_SPARC_22",
+ "R_SPARC_32",
+ "R_SPARC_5",
+ "R_SPARC_6",
+ "R_SPARC_64",
+ "R_SPARC_7",
+ "R_SPARC_8",
+ "R_SPARC_COPY",
+ "R_SPARC_DISP16",
+ "R_SPARC_DISP32",
+ "R_SPARC_DISP64",
+ "R_SPARC_DISP8",
+ "R_SPARC_GLOB_DAT",
+ "R_SPARC_GLOB_JMP",
+ "R_SPARC_GOT10",
+ "R_SPARC_GOT13",
+ "R_SPARC_GOT22",
+ "R_SPARC_H44",
+ "R_SPARC_HH22",
+ "R_SPARC_HI22",
+ "R_SPARC_HIPLT22",
+ "R_SPARC_HIX22",
+ "R_SPARC_HM10",
+ "R_SPARC_JMP_SLOT",
+ "R_SPARC_L44",
+ "R_SPARC_LM22",
+ "R_SPARC_LO10",
+ "R_SPARC_LOPLT10",
+ "R_SPARC_LOX10",
+ "R_SPARC_M44",
+ "R_SPARC_NONE",
+ "R_SPARC_OLO10",
+ "R_SPARC_PC10",
+ "R_SPARC_PC22",
+ "R_SPARC_PCPLT10",
+ "R_SPARC_PCPLT22",
+ "R_SPARC_PCPLT32",
+ "R_SPARC_PC_HH22",
+ "R_SPARC_PC_HM10",
+ "R_SPARC_PC_LM22",
+ "R_SPARC_PLT32",
+ "R_SPARC_PLT64",
+ "R_SPARC_REGISTER",
+ "R_SPARC_RELATIVE",
+ "R_SPARC_UA16",
+ "R_SPARC_UA32",
+ "R_SPARC_UA64",
+ "R_SPARC_WDISP16",
+ "R_SPARC_WDISP19",
+ "R_SPARC_WDISP22",
+ "R_SPARC_WDISP30",
+ "R_SPARC_WPLT30",
+ "R_SYM32",
+ "R_SYM64",
+ "R_TYPE32",
+ "R_TYPE64",
+ "R_X86_64",
+ "R_X86_64_16",
+ "R_X86_64_32",
+ "R_X86_64_32S",
+ "R_X86_64_64",
+ "R_X86_64_8",
+ "R_X86_64_COPY",
+ "R_X86_64_DTPMOD64",
+ "R_X86_64_DTPOFF32",
+ "R_X86_64_DTPOFF64",
+ "R_X86_64_GLOB_DAT",
+ "R_X86_64_GOT32",
+ "R_X86_64_GOT64",
+ "R_X86_64_GOTOFF64",
+ "R_X86_64_GOTPC32",
+ "R_X86_64_GOTPC32_TLSDESC",
+ "R_X86_64_GOTPC64",
+ "R_X86_64_GOTPCREL",
+ "R_X86_64_GOTPCREL64",
+ "R_X86_64_GOTPCRELX",
+ "R_X86_64_GOTPLT64",
+ "R_X86_64_GOTTPOFF",
+ "R_X86_64_IRELATIVE",
+ "R_X86_64_JMP_SLOT",
+ "R_X86_64_NONE",
+ "R_X86_64_PC16",
+ "R_X86_64_PC32",
+ "R_X86_64_PC32_BND",
+ "R_X86_64_PC64",
+ "R_X86_64_PC8",
+ "R_X86_64_PLT32",
+ "R_X86_64_PLT32_BND",
+ "R_X86_64_PLTOFF64",
+ "R_X86_64_RELATIVE",
+ "R_X86_64_RELATIVE64",
+ "R_X86_64_REX_GOTPCRELX",
+ "R_X86_64_SIZE32",
+ "R_X86_64_SIZE64",
+ "R_X86_64_TLSDESC",
+ "R_X86_64_TLSDESC_CALL",
+ "R_X86_64_TLSGD",
+ "R_X86_64_TLSLD",
+ "R_X86_64_TPOFF32",
+ "R_X86_64_TPOFF64",
+ "Rel32",
+ "Rel64",
+ "Rela32",
+ "Rela64",
+ "SHF_ALLOC",
+ "SHF_COMPRESSED",
+ "SHF_EXECINSTR",
+ "SHF_GROUP",
+ "SHF_INFO_LINK",
+ "SHF_LINK_ORDER",
+ "SHF_MASKOS",
+ "SHF_MASKPROC",
+ "SHF_MERGE",
+ "SHF_OS_NONCONFORMING",
+ "SHF_STRINGS",
+ "SHF_TLS",
+ "SHF_WRITE",
+ "SHN_ABS",
+ "SHN_COMMON",
+ "SHN_HIOS",
+ "SHN_HIPROC",
+ "SHN_HIRESERVE",
+ "SHN_LOOS",
+ "SHN_LOPROC",
+ "SHN_LORESERVE",
+ "SHN_UNDEF",
+ "SHN_XINDEX",
+ "SHT_DYNAMIC",
+ "SHT_DYNSYM",
+ "SHT_FINI_ARRAY",
+ "SHT_GNU_ATTRIBUTES",
+ "SHT_GNU_HASH",
+ "SHT_GNU_LIBLIST",
+ "SHT_GNU_VERDEF",
+ "SHT_GNU_VERNEED",
+ "SHT_GNU_VERSYM",
+ "SHT_GROUP",
+ "SHT_HASH",
+ "SHT_HIOS",
+ "SHT_HIPROC",
+ "SHT_HIUSER",
+ "SHT_INIT_ARRAY",
+ "SHT_LOOS",
+ "SHT_LOPROC",
+ "SHT_LOUSER",
+ "SHT_MIPS_ABIFLAGS",
+ "SHT_NOBITS",
+ "SHT_NOTE",
+ "SHT_NULL",
+ "SHT_PREINIT_ARRAY",
+ "SHT_PROGBITS",
+ "SHT_REL",
+ "SHT_RELA",
+ "SHT_SHLIB",
+ "SHT_STRTAB",
+ "SHT_SYMTAB",
+ "SHT_SYMTAB_SHNDX",
+ "STB_GLOBAL",
+ "STB_HIOS",
+ "STB_HIPROC",
+ "STB_LOCAL",
+ "STB_LOOS",
+ "STB_LOPROC",
+ "STB_WEAK",
+ "STT_COMMON",
+ "STT_FILE",
+ "STT_FUNC",
+ "STT_HIOS",
+ "STT_HIPROC",
+ "STT_LOOS",
+ "STT_LOPROC",
+ "STT_NOTYPE",
+ "STT_OBJECT",
+ "STT_SECTION",
+ "STT_TLS",
+ "STV_DEFAULT",
+ "STV_HIDDEN",
+ "STV_INTERNAL",
+ "STV_PROTECTED",
+ "ST_BIND",
+ "ST_INFO",
+ "ST_TYPE",
+ "ST_VISIBILITY",
+ "Section",
+ "Section32",
+ "Section64",
+ "SectionFlag",
+ "SectionHeader",
+ "SectionIndex",
+ "SectionType",
+ "Sym32",
+ "Sym32Size",
+ "Sym64",
+ "Sym64Size",
+ "SymBind",
+ "SymType",
+ "SymVis",
+ "Symbol",
+ "Type",
+ "Version",
+ },
+ "debug/gosym": {
+ "DecodingError",
+ "Func",
+ "LineTable",
+ "NewLineTable",
+ "NewTable",
+ "Obj",
+ "Sym",
+ "Table",
+ "UnknownFileError",
+ "UnknownLineError",
+ },
+ "debug/macho": {
+ "ARM64_RELOC_ADDEND",
+ "ARM64_RELOC_BRANCH26",
+ "ARM64_RELOC_GOT_LOAD_PAGE21",
+ "ARM64_RELOC_GOT_LOAD_PAGEOFF12",
+ "ARM64_RELOC_PAGE21",
+ "ARM64_RELOC_PAGEOFF12",
+ "ARM64_RELOC_POINTER_TO_GOT",
+ "ARM64_RELOC_SUBTRACTOR",
+ "ARM64_RELOC_TLVP_LOAD_PAGE21",
+ "ARM64_RELOC_TLVP_LOAD_PAGEOFF12",
+ "ARM64_RELOC_UNSIGNED",
+ "ARM_RELOC_BR24",
+ "ARM_RELOC_HALF",
+ "ARM_RELOC_HALF_SECTDIFF",
+ "ARM_RELOC_LOCAL_SECTDIFF",
+ "ARM_RELOC_PAIR",
+ "ARM_RELOC_PB_LA_PTR",
+ "ARM_RELOC_SECTDIFF",
+ "ARM_RELOC_VANILLA",
+ "ARM_THUMB_32BIT_BRANCH",
+ "ARM_THUMB_RELOC_BR22",
+ "Cpu",
+ "Cpu386",
+ "CpuAmd64",
+ "CpuArm",
+ "CpuArm64",
+ "CpuPpc",
+ "CpuPpc64",
+ "Dylib",
+ "DylibCmd",
+ "Dysymtab",
+ "DysymtabCmd",
+ "ErrNotFat",
+ "FatArch",
+ "FatArchHeader",
+ "FatFile",
+ "File",
+ "FileHeader",
+ "FlagAllModsBound",
+ "FlagAllowStackExecution",
+ "FlagAppExtensionSafe",
+ "FlagBindAtLoad",
+ "FlagBindsToWeak",
+ "FlagCanonical",
+ "FlagDeadStrippableDylib",
+ "FlagDyldLink",
+ "FlagForceFlat",
+ "FlagHasTLVDescriptors",
+ "FlagIncrLink",
+ "FlagLazyInit",
+ "FlagNoFixPrebinding",
+ "FlagNoHeapExecution",
+ "FlagNoMultiDefs",
+ "FlagNoReexportedDylibs",
+ "FlagNoUndefs",
+ "FlagPIE",
+ "FlagPrebindable",
+ "FlagPrebound",
+ "FlagRootSafe",
+ "FlagSetuidSafe",
+ "FlagSplitSegs",
+ "FlagSubsectionsViaSymbols",
+ "FlagTwoLevel",
+ "FlagWeakDefines",
+ "FormatError",
+ "GENERIC_RELOC_LOCAL_SECTDIFF",
+ "GENERIC_RELOC_PAIR",
+ "GENERIC_RELOC_PB_LA_PTR",
+ "GENERIC_RELOC_SECTDIFF",
+ "GENERIC_RELOC_TLV",
+ "GENERIC_RELOC_VANILLA",
+ "Load",
+ "LoadBytes",
+ "LoadCmd",
+ "LoadCmdDylib",
+ "LoadCmdDylinker",
+ "LoadCmdDysymtab",
+ "LoadCmdRpath",
+ "LoadCmdSegment",
+ "LoadCmdSegment64",
+ "LoadCmdSymtab",
+ "LoadCmdThread",
+ "LoadCmdUnixThread",
+ "Magic32",
+ "Magic64",
+ "MagicFat",
+ "NewFatFile",
+ "NewFile",
+ "Nlist32",
+ "Nlist64",
+ "Open",
+ "OpenFat",
+ "Regs386",
+ "RegsAMD64",
+ "Reloc",
+ "RelocTypeARM",
+ "RelocTypeARM64",
+ "RelocTypeGeneric",
+ "RelocTypeX86_64",
+ "Rpath",
+ "RpathCmd",
+ "Section",
+ "Section32",
+ "Section64",
+ "SectionHeader",
+ "Segment",
+ "Segment32",
+ "Segment64",
+ "SegmentHeader",
+ "Symbol",
+ "Symtab",
+ "SymtabCmd",
+ "Thread",
+ "Type",
+ "TypeBundle",
+ "TypeDylib",
+ "TypeExec",
+ "TypeObj",
+ "X86_64_RELOC_BRANCH",
+ "X86_64_RELOC_GOT",
+ "X86_64_RELOC_GOT_LOAD",
+ "X86_64_RELOC_SIGNED",
+ "X86_64_RELOC_SIGNED_1",
+ "X86_64_RELOC_SIGNED_2",
+ "X86_64_RELOC_SIGNED_4",
+ "X86_64_RELOC_SUBTRACTOR",
+ "X86_64_RELOC_TLV",
+ "X86_64_RELOC_UNSIGNED",
+ },
+ "debug/pe": {
+ "COFFSymbol",
+ "COFFSymbolAuxFormat5",
+ "COFFSymbolSize",
+ "DataDirectory",
+ "File",
+ "FileHeader",
+ "FormatError",
+ "IMAGE_COMDAT_SELECT_ANY",
+ "IMAGE_COMDAT_SELECT_ASSOCIATIVE",
+ "IMAGE_COMDAT_SELECT_EXACT_MATCH",
+ "IMAGE_COMDAT_SELECT_LARGEST",
+ "IMAGE_COMDAT_SELECT_NODUPLICATES",
+ "IMAGE_COMDAT_SELECT_SAME_SIZE",
+ "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE",
+ "IMAGE_DIRECTORY_ENTRY_BASERELOC",
+ "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT",
+ "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR",
+ "IMAGE_DIRECTORY_ENTRY_DEBUG",
+ "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT",
+ "IMAGE_DIRECTORY_ENTRY_EXCEPTION",
+ "IMAGE_DIRECTORY_ENTRY_EXPORT",
+ "IMAGE_DIRECTORY_ENTRY_GLOBALPTR",
+ "IMAGE_DIRECTORY_ENTRY_IAT",
+ "IMAGE_DIRECTORY_ENTRY_IMPORT",
+ "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG",
+ "IMAGE_DIRECTORY_ENTRY_RESOURCE",
+ "IMAGE_DIRECTORY_ENTRY_SECURITY",
+ "IMAGE_DIRECTORY_ENTRY_TLS",
+ "IMAGE_DLLCHARACTERISTICS_APPCONTAINER",
+ "IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE",
+ "IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY",
+ "IMAGE_DLLCHARACTERISTICS_GUARD_CF",
+ "IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA",
+ "IMAGE_DLLCHARACTERISTICS_NO_BIND",
+ "IMAGE_DLLCHARACTERISTICS_NO_ISOLATION",
+ "IMAGE_DLLCHARACTERISTICS_NO_SEH",
+ "IMAGE_DLLCHARACTERISTICS_NX_COMPAT",
+ "IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE",
+ "IMAGE_DLLCHARACTERISTICS_WDM_DRIVER",
+ "IMAGE_FILE_32BIT_MACHINE",
+ "IMAGE_FILE_AGGRESIVE_WS_TRIM",
+ "IMAGE_FILE_BYTES_REVERSED_HI",
+ "IMAGE_FILE_BYTES_REVERSED_LO",
+ "IMAGE_FILE_DEBUG_STRIPPED",
+ "IMAGE_FILE_DLL",
+ "IMAGE_FILE_EXECUTABLE_IMAGE",
+ "IMAGE_FILE_LARGE_ADDRESS_AWARE",
+ "IMAGE_FILE_LINE_NUMS_STRIPPED",
+ "IMAGE_FILE_LOCAL_SYMS_STRIPPED",
+ "IMAGE_FILE_MACHINE_AM33",
+ "IMAGE_FILE_MACHINE_AMD64",
+ "IMAGE_FILE_MACHINE_ARM",
+ "IMAGE_FILE_MACHINE_ARM64",
+ "IMAGE_FILE_MACHINE_ARMNT",
+ "IMAGE_FILE_MACHINE_EBC",
+ "IMAGE_FILE_MACHINE_I386",
+ "IMAGE_FILE_MACHINE_IA64",
+ "IMAGE_FILE_MACHINE_LOONGARCH32",
+ "IMAGE_FILE_MACHINE_LOONGARCH64",
+ "IMAGE_FILE_MACHINE_M32R",
+ "IMAGE_FILE_MACHINE_MIPS16",
+ "IMAGE_FILE_MACHINE_MIPSFPU",
+ "IMAGE_FILE_MACHINE_MIPSFPU16",
+ "IMAGE_FILE_MACHINE_POWERPC",
+ "IMAGE_FILE_MACHINE_POWERPCFP",
+ "IMAGE_FILE_MACHINE_R4000",
+ "IMAGE_FILE_MACHINE_RISCV128",
+ "IMAGE_FILE_MACHINE_RISCV32",
+ "IMAGE_FILE_MACHINE_RISCV64",
+ "IMAGE_FILE_MACHINE_SH3",
+ "IMAGE_FILE_MACHINE_SH3DSP",
+ "IMAGE_FILE_MACHINE_SH4",
+ "IMAGE_FILE_MACHINE_SH5",
+ "IMAGE_FILE_MACHINE_THUMB",
+ "IMAGE_FILE_MACHINE_UNKNOWN",
+ "IMAGE_FILE_MACHINE_WCEMIPSV2",
+ "IMAGE_FILE_NET_RUN_FROM_SWAP",
+ "IMAGE_FILE_RELOCS_STRIPPED",
+ "IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP",
+ "IMAGE_FILE_SYSTEM",
+ "IMAGE_FILE_UP_SYSTEM_ONLY",
+ "IMAGE_SCN_CNT_CODE",
+ "IMAGE_SCN_CNT_INITIALIZED_DATA",
+ "IMAGE_SCN_CNT_UNINITIALIZED_DATA",
+ "IMAGE_SCN_LNK_COMDAT",
+ "IMAGE_SCN_MEM_DISCARDABLE",
+ "IMAGE_SCN_MEM_EXECUTE",
+ "IMAGE_SCN_MEM_READ",
+ "IMAGE_SCN_MEM_WRITE",
+ "IMAGE_SUBSYSTEM_EFI_APPLICATION",
+ "IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER",
+ "IMAGE_SUBSYSTEM_EFI_ROM",
+ "IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER",
+ "IMAGE_SUBSYSTEM_NATIVE",
+ "IMAGE_SUBSYSTEM_NATIVE_WINDOWS",
+ "IMAGE_SUBSYSTEM_OS2_CUI",
+ "IMAGE_SUBSYSTEM_POSIX_CUI",
+ "IMAGE_SUBSYSTEM_UNKNOWN",
+ "IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION",
+ "IMAGE_SUBSYSTEM_WINDOWS_CE_GUI",
+ "IMAGE_SUBSYSTEM_WINDOWS_CUI",
+ "IMAGE_SUBSYSTEM_WINDOWS_GUI",
+ "IMAGE_SUBSYSTEM_XBOX",
+ "ImportDirectory",
+ "NewFile",
+ "Open",
+ "OptionalHeader32",
+ "OptionalHeader64",
+ "Reloc",
+ "Section",
+ "SectionHeader",
+ "SectionHeader32",
+ "StringTable",
+ "Symbol",
+ },
+ "debug/plan9obj": {
+ "ErrNoSymbols",
+ "File",
+ "FileHeader",
+ "Magic386",
+ "Magic64",
+ "MagicAMD64",
+ "MagicARM",
+ "NewFile",
+ "Open",
+ "Section",
+ "SectionHeader",
+ "Sym",
+ },
+ "embed": {
+ "FS",
+ },
+ "encoding": {
+ "BinaryMarshaler",
+ "BinaryUnmarshaler",
+ "TextMarshaler",
+ "TextUnmarshaler",
+ },
+ "encoding/ascii85": {
+ "CorruptInputError",
+ "Decode",
+ "Encode",
+ "MaxEncodedLen",
+ "NewDecoder",
+ "NewEncoder",
+ },
+ "encoding/asn1": {
+ "BitString",
+ "ClassApplication",
+ "ClassContextSpecific",
+ "ClassPrivate",
+ "ClassUniversal",
+ "Enumerated",
+ "Flag",
+ "Marshal",
+ "MarshalWithParams",
+ "NullBytes",
+ "NullRawValue",
+ "ObjectIdentifier",
+ "RawContent",
+ "RawValue",
+ "StructuralError",
+ "SyntaxError",
+ "TagBMPString",
+ "TagBitString",
+ "TagBoolean",
+ "TagEnum",
+ "TagGeneralString",
+ "TagGeneralizedTime",
+ "TagIA5String",
+ "TagInteger",
+ "TagNull",
+ "TagNumericString",
+ "TagOID",
+ "TagOctetString",
+ "TagPrintableString",
+ "TagSequence",
+ "TagSet",
+ "TagT61String",
+ "TagUTCTime",
+ "TagUTF8String",
+ "Unmarshal",
+ "UnmarshalWithParams",
+ },
+ "encoding/base32": {
+ "CorruptInputError",
+ "Encoding",
+ "HexEncoding",
+ "NewDecoder",
+ "NewEncoder",
+ "NewEncoding",
+ "NoPadding",
+ "StdEncoding",
+ "StdPadding",
+ },
+ "encoding/base64": {
+ "CorruptInputError",
+ "Encoding",
+ "NewDecoder",
+ "NewEncoder",
+ "NewEncoding",
+ "NoPadding",
+ "RawStdEncoding",
+ "RawURLEncoding",
+ "StdEncoding",
+ "StdPadding",
+ "URLEncoding",
+ },
+ "encoding/binary": {
+ "AppendByteOrder",
+ "AppendUvarint",
+ "AppendVarint",
+ "BigEndian",
+ "ByteOrder",
+ "LittleEndian",
+ "MaxVarintLen16",
+ "MaxVarintLen32",
+ "MaxVarintLen64",
+ "NativeEndian",
+ "PutUvarint",
+ "PutVarint",
+ "Read",
+ "ReadUvarint",
+ "ReadVarint",
+ "Size",
+ "Uvarint",
+ "Varint",
+ "Write",
+ },
+ "encoding/csv": {
+ "ErrBareQuote",
+ "ErrFieldCount",
+ "ErrQuote",
+ "ErrTrailingComma",
+ "NewReader",
+ "NewWriter",
+ "ParseError",
+ "Reader",
+ "Writer",
+ },
+ "encoding/gob": {
+ "CommonType",
+ "Decoder",
+ "Encoder",
+ "GobDecoder",
+ "GobEncoder",
+ "NewDecoder",
+ "NewEncoder",
+ "Register",
+ "RegisterName",
+ },
+ "encoding/hex": {
+ "Decode",
+ "DecodeString",
+ "DecodedLen",
+ "Dump",
+ "Dumper",
+ "Encode",
+ "EncodeToString",
+ "EncodedLen",
+ "ErrLength",
+ "InvalidByteError",
+ "NewDecoder",
+ "NewEncoder",
+ },
+ "encoding/json": {
+ "Compact",
+ "Decoder",
+ "Delim",
+ "Encoder",
+ "HTMLEscape",
+ "Indent",
+ "InvalidUTF8Error",
+ "InvalidUnmarshalError",
+ "Marshal",
+ "MarshalIndent",
+ "Marshaler",
+ "MarshalerError",
+ "NewDecoder",
+ "NewEncoder",
+ "Number",
+ "RawMessage",
+ "SyntaxError",
+ "Token",
+ "Unmarshal",
+ "UnmarshalFieldError",
+ "UnmarshalTypeError",
+ "Unmarshaler",
+ "UnsupportedTypeError",
+ "UnsupportedValueError",
+ "Valid",
+ },
+ "encoding/pem": {
+ "Block",
+ "Decode",
+ "Encode",
+ "EncodeToMemory",
+ },
+ "encoding/xml": {
+ "Attr",
+ "CharData",
+ "Comment",
+ "CopyToken",
+ "Decoder",
+ "Directive",
+ "Encoder",
+ "EndElement",
+ "Escape",
+ "EscapeText",
+ "HTMLAutoClose",
+ "HTMLEntity",
+ "Header",
+ "Marshal",
+ "MarshalIndent",
+ "Marshaler",
+ "MarshalerAttr",
+ "Name",
+ "NewDecoder",
+ "NewEncoder",
+ "NewTokenDecoder",
+ "ProcInst",
+ "StartElement",
+ "SyntaxError",
+ "TagPathError",
+ "Token",
+ "TokenReader",
+ "Unmarshal",
+ "UnmarshalError",
+ "Unmarshaler",
+ "UnmarshalerAttr",
+ "UnsupportedTypeError",
+ },
+ "errors": {
+ "As",
+ "ErrUnsupported",
+ "Is",
+ "Join",
+ "New",
+ "Unwrap",
+ },
+ "expvar": {
+ "Do",
+ "Float",
+ "Func",
+ "Get",
+ "Handler",
+ "Int",
+ "KeyValue",
+ "Map",
+ "NewFloat",
+ "NewInt",
+ "NewMap",
+ "NewString",
+ "Publish",
+ "String",
+ "Var",
+ },
+ "flag": {
+ "Arg",
+ "Args",
+ "Bool",
+ "BoolFunc",
+ "BoolVar",
+ "CommandLine",
+ "ContinueOnError",
+ "Duration",
+ "DurationVar",
+ "ErrHelp",
+ "ErrorHandling",
+ "ExitOnError",
+ "Flag",
+ "FlagSet",
+ "Float64",
+ "Float64Var",
+ "Func",
+ "Getter",
+ "Int",
+ "Int64",
+ "Int64Var",
+ "IntVar",
+ "Lookup",
+ "NArg",
+ "NFlag",
+ "NewFlagSet",
+ "PanicOnError",
+ "Parse",
+ "Parsed",
+ "PrintDefaults",
+ "Set",
+ "String",
+ "StringVar",
+ "TextVar",
+ "Uint",
+ "Uint64",
+ "Uint64Var",
+ "UintVar",
+ "UnquoteUsage",
+ "Usage",
+ "Value",
+ "Var",
+ "Visit",
+ "VisitAll",
+ },
+ "fmt": {
+ "Append",
+ "Appendf",
+ "Appendln",
+ "Errorf",
+ "FormatString",
+ "Formatter",
+ "Fprint",
+ "Fprintf",
+ "Fprintln",
+ "Fscan",
+ "Fscanf",
+ "Fscanln",
+ "GoStringer",
+ "Print",
+ "Printf",
+ "Println",
+ "Scan",
+ "ScanState",
+ "Scanf",
+ "Scanln",
+ "Scanner",
+ "Sprint",
+ "Sprintf",
+ "Sprintln",
+ "Sscan",
+ "Sscanf",
+ "Sscanln",
+ "State",
+ "Stringer",
+ },
+ "go/ast": {
+ "ArrayType",
+ "AssignStmt",
+ "Bad",
+ "BadDecl",
+ "BadExpr",
+ "BadStmt",
+ "BasicLit",
+ "BinaryExpr",
+ "BlockStmt",
+ "BranchStmt",
+ "CallExpr",
+ "CaseClause",
+ "ChanDir",
+ "ChanType",
+ "CommClause",
+ "Comment",
+ "CommentGroup",
+ "CommentMap",
+ "CompositeLit",
+ "Con",
+ "Decl",
+ "DeclStmt",
+ "DeferStmt",
+ "Ellipsis",
+ "EmptyStmt",
+ "Expr",
+ "ExprStmt",
+ "Field",
+ "FieldFilter",
+ "FieldList",
+ "File",
+ "FileExports",
+ "Filter",
+ "FilterDecl",
+ "FilterFile",
+ "FilterFuncDuplicates",
+ "FilterImportDuplicates",
+ "FilterPackage",
+ "FilterUnassociatedComments",
+ "ForStmt",
+ "Fprint",
+ "Fun",
+ "FuncDecl",
+ "FuncLit",
+ "FuncType",
+ "GenDecl",
+ "GoStmt",
+ "Ident",
+ "IfStmt",
+ "ImportSpec",
+ "Importer",
+ "IncDecStmt",
+ "IndexExpr",
+ "IndexListExpr",
+ "Inspect",
+ "InterfaceType",
+ "IsExported",
+ "IsGenerated",
+ "KeyValueExpr",
+ "LabeledStmt",
+ "Lbl",
+ "MapType",
+ "MergeMode",
+ "MergePackageFiles",
+ "NewCommentMap",
+ "NewIdent",
+ "NewObj",
+ "NewPackage",
+ "NewScope",
+ "Node",
+ "NotNilFilter",
+ "ObjKind",
+ "Object",
+ "Package",
+ "PackageExports",
+ "ParenExpr",
+ "Pkg",
+ "Print",
+ "RECV",
+ "RangeStmt",
+ "ReturnStmt",
+ "SEND",
+ "Scope",
+ "SelectStmt",
+ "SelectorExpr",
+ "SendStmt",
+ "SliceExpr",
+ "SortImports",
+ "Spec",
+ "StarExpr",
+ "Stmt",
+ "StructType",
+ "SwitchStmt",
+ "Typ",
+ "TypeAssertExpr",
+ "TypeSpec",
+ "TypeSwitchStmt",
+ "UnaryExpr",
+ "ValueSpec",
+ "Var",
+ "Visitor",
+ "Walk",
+ },
+ "go/build": {
+ "AllowBinary",
+ "ArchChar",
+ "Context",
+ "Default",
+ "Directive",
+ "FindOnly",
+ "IgnoreVendor",
+ "Import",
+ "ImportComment",
+ "ImportDir",
+ "ImportMode",
+ "IsLocalImport",
+ "MultiplePackageError",
+ "NoGoError",
+ "Package",
+ "ToolDir",
+ },
+ "go/build/constraint": {
+ "AndExpr",
+ "Expr",
+ "GoVersion",
+ "IsGoBuild",
+ "IsPlusBuild",
+ "NotExpr",
+ "OrExpr",
+ "Parse",
+ "PlusBuildLines",
+ "SyntaxError",
+ "TagExpr",
+ },
+ "go/constant": {
+ "BinaryOp",
+ "BitLen",
+ "Bool",
+ "BoolVal",
+ "Bytes",
+ "Compare",
+ "Complex",
+ "Denom",
+ "Float",
+ "Float32Val",
+ "Float64Val",
+ "Imag",
+ "Int",
+ "Int64Val",
+ "Kind",
+ "Make",
+ "MakeBool",
+ "MakeFloat64",
+ "MakeFromBytes",
+ "MakeFromLiteral",
+ "MakeImag",
+ "MakeInt64",
+ "MakeString",
+ "MakeUint64",
+ "MakeUnknown",
+ "Num",
+ "Real",
+ "Shift",
+ "Sign",
+ "String",
+ "StringVal",
+ "ToComplex",
+ "ToFloat",
+ "ToInt",
+ "Uint64Val",
+ "UnaryOp",
+ "Unknown",
+ "Val",
+ "Value",
+ },
+ "go/doc": {
+ "AllDecls",
+ "AllMethods",
+ "Example",
+ "Examples",
+ "Filter",
+ "Func",
+ "IllegalPrefixes",
+ "IsPredeclared",
+ "Mode",
+ "New",
+ "NewFromFiles",
+ "Note",
+ "Package",
+ "PreserveAST",
+ "Synopsis",
+ "ToHTML",
+ "ToText",
+ "Type",
+ "Value",
+ },
+ "go/doc/comment": {
+ "Block",
+ "Code",
+ "DefaultLookupPackage",
+ "Doc",
+ "DocLink",
+ "Heading",
+ "Italic",
+ "Link",
+ "LinkDef",
+ "List",
+ "ListItem",
+ "Paragraph",
+ "Parser",
+ "Plain",
+ "Printer",
+ "Text",
+ },
+ "go/format": {
+ "Node",
+ "Source",
+ },
+ "go/importer": {
+ "Default",
+ "For",
+ "ForCompiler",
+ "Lookup",
+ },
+ "go/parser": {
+ "AllErrors",
+ "DeclarationErrors",
+ "ImportsOnly",
+ "Mode",
+ "PackageClauseOnly",
+ "ParseComments",
+ "ParseDir",
+ "ParseExpr",
+ "ParseExprFrom",
+ "ParseFile",
+ "SkipObjectResolution",
+ "SpuriousErrors",
+ "Trace",
+ },
+ "go/printer": {
+ "CommentedNode",
+ "Config",
+ "Fprint",
+ "Mode",
+ "RawFormat",
+ "SourcePos",
+ "TabIndent",
+ "UseSpaces",
+ },
+ "go/scanner": {
+ "Error",
+ "ErrorHandler",
+ "ErrorList",
+ "Mode",
+ "PrintError",
+ "ScanComments",
+ "Scanner",
+ },
+ "go/token": {
+ "ADD",
+ "ADD_ASSIGN",
+ "AND",
+ "AND_ASSIGN",
+ "AND_NOT",
+ "AND_NOT_ASSIGN",
+ "ARROW",
+ "ASSIGN",
+ "BREAK",
+ "CASE",
+ "CHAN",
+ "CHAR",
+ "COLON",
+ "COMMA",
+ "COMMENT",
+ "CONST",
+ "CONTINUE",
+ "DEC",
+ "DEFAULT",
+ "DEFER",
+ "DEFINE",
+ "ELLIPSIS",
+ "ELSE",
+ "EOF",
+ "EQL",
+ "FALLTHROUGH",
+ "FLOAT",
+ "FOR",
+ "FUNC",
+ "File",
+ "FileSet",
+ "GEQ",
+ "GO",
+ "GOTO",
+ "GTR",
+ "HighestPrec",
+ "IDENT",
+ "IF",
+ "ILLEGAL",
+ "IMAG",
+ "IMPORT",
+ "INC",
+ "INT",
+ "INTERFACE",
+ "IsExported",
+ "IsIdentifier",
+ "IsKeyword",
+ "LAND",
+ "LBRACE",
+ "LBRACK",
+ "LEQ",
+ "LOR",
+ "LPAREN",
+ "LSS",
+ "Lookup",
+ "LowestPrec",
+ "MAP",
+ "MUL",
+ "MUL_ASSIGN",
+ "NEQ",
+ "NOT",
+ "NewFileSet",
+ "NoPos",
+ "OR",
+ "OR_ASSIGN",
+ "PACKAGE",
+ "PERIOD",
+ "Pos",
+ "Position",
+ "QUO",
+ "QUO_ASSIGN",
+ "RANGE",
+ "RBRACE",
+ "RBRACK",
+ "REM",
+ "REM_ASSIGN",
+ "RETURN",
+ "RPAREN",
+ "SELECT",
+ "SEMICOLON",
+ "SHL",
+ "SHL_ASSIGN",
+ "SHR",
+ "SHR_ASSIGN",
+ "STRING",
+ "STRUCT",
+ "SUB",
+ "SUB_ASSIGN",
+ "SWITCH",
+ "TILDE",
+ "TYPE",
+ "Token",
+ "UnaryPrec",
+ "VAR",
+ "XOR",
+ "XOR_ASSIGN",
+ },
+ "go/types": {
+ "ArgumentError",
+ "Array",
+ "AssertableTo",
+ "AssignableTo",
+ "Basic",
+ "BasicInfo",
+ "BasicKind",
+ "Bool",
+ "Builtin",
+ "Byte",
+ "Chan",
+ "ChanDir",
+ "CheckExpr",
+ "Checker",
+ "Comparable",
+ "Complex128",
+ "Complex64",
+ "Config",
+ "Const",
+ "Context",
+ "ConvertibleTo",
+ "DefPredeclaredTestFuncs",
+ "Default",
+ "Error",
+ "Eval",
+ "ExprString",
+ "FieldVal",
+ "Float32",
+ "Float64",
+ "Func",
+ "Id",
+ "Identical",
+ "IdenticalIgnoreTags",
+ "Implements",
+ "ImportMode",
+ "Importer",
+ "ImporterFrom",
+ "Info",
+ "Initializer",
+ "Instance",
+ "Instantiate",
+ "Int",
+ "Int16",
+ "Int32",
+ "Int64",
+ "Int8",
+ "Interface",
+ "Invalid",
+ "IsBoolean",
+ "IsComplex",
+ "IsConstType",
+ "IsFloat",
+ "IsInteger",
+ "IsInterface",
+ "IsNumeric",
+ "IsOrdered",
+ "IsString",
+ "IsUnsigned",
+ "IsUntyped",
+ "Label",
+ "LookupFieldOrMethod",
+ "Map",
+ "MethodExpr",
+ "MethodSet",
+ "MethodVal",
+ "MissingMethod",
+ "Named",
+ "NewArray",
+ "NewChan",
+ "NewChecker",
+ "NewConst",
+ "NewContext",
+ "NewField",
+ "NewFunc",
+ "NewInterface",
+ "NewInterfaceType",
+ "NewLabel",
+ "NewMap",
+ "NewMethodSet",
+ "NewNamed",
+ "NewPackage",
+ "NewParam",
+ "NewPkgName",
+ "NewPointer",
+ "NewScope",
+ "NewSignature",
+ "NewSignatureType",
+ "NewSlice",
+ "NewStruct",
+ "NewTerm",
+ "NewTuple",
+ "NewTypeName",
+ "NewTypeParam",
+ "NewUnion",
+ "NewVar",
+ "Nil",
+ "Object",
+ "ObjectString",
+ "Package",
+ "PkgName",
+ "Pointer",
+ "Qualifier",
+ "RecvOnly",
+ "RelativeTo",
+ "Rune",
+ "Satisfies",
+ "Scope",
+ "Selection",
+ "SelectionKind",
+ "SelectionString",
+ "SendOnly",
+ "SendRecv",
+ "Signature",
+ "Sizes",
+ "SizesFor",
+ "Slice",
+ "StdSizes",
+ "String",
+ "Struct",
+ "Term",
+ "Tuple",
+ "Typ",
+ "Type",
+ "TypeAndValue",
+ "TypeList",
+ "TypeName",
+ "TypeParam",
+ "TypeParamList",
+ "TypeString",
+ "Uint",
+ "Uint16",
+ "Uint32",
+ "Uint64",
+ "Uint8",
+ "Uintptr",
+ "Union",
+ "Universe",
+ "Unsafe",
+ "UnsafePointer",
+ "UntypedBool",
+ "UntypedComplex",
+ "UntypedFloat",
+ "UntypedInt",
+ "UntypedNil",
+ "UntypedRune",
+ "UntypedString",
+ "Var",
+ "WriteExpr",
+ "WriteSignature",
+ "WriteType",
+ },
+ "hash": {
+ "Hash",
+ "Hash32",
+ "Hash64",
+ },
+ "hash/adler32": {
+ "Checksum",
+ "New",
+ "Size",
+ },
+ "hash/crc32": {
+ "Castagnoli",
+ "Checksum",
+ "ChecksumIEEE",
+ "IEEE",
+ "IEEETable",
+ "Koopman",
+ "MakeTable",
+ "New",
+ "NewIEEE",
+ "Size",
+ "Table",
+ "Update",
+ },
+ "hash/crc64": {
+ "Checksum",
+ "ECMA",
+ "ISO",
+ "MakeTable",
+ "New",
+ "Size",
+ "Table",
+ "Update",
+ },
+ "hash/fnv": {
+ "New128",
+ "New128a",
+ "New32",
+ "New32a",
+ "New64",
+ "New64a",
+ },
+ "hash/maphash": {
+ "Bytes",
+ "Hash",
+ "MakeSeed",
+ "Seed",
+ "String",
+ },
+ "html": {
+ "EscapeString",
+ "UnescapeString",
+ },
+ "html/template": {
+ "CSS",
+ "ErrAmbigContext",
+ "ErrBadHTML",
+ "ErrBranchEnd",
+ "ErrEndContext",
+ "ErrJSTemplate",
+ "ErrNoSuchTemplate",
+ "ErrOutputContext",
+ "ErrPartialCharset",
+ "ErrPartialEscape",
+ "ErrPredefinedEscaper",
+ "ErrRangeLoopReentry",
+ "ErrSlashAmbig",
+ "Error",
+ "ErrorCode",
+ "FuncMap",
+ "HTML",
+ "HTMLAttr",
+ "HTMLEscape",
+ "HTMLEscapeString",
+ "HTMLEscaper",
+ "IsTrue",
+ "JS",
+ "JSEscape",
+ "JSEscapeString",
+ "JSEscaper",
+ "JSStr",
+ "Must",
+ "New",
+ "OK",
+ "ParseFS",
+ "ParseFiles",
+ "ParseGlob",
+ "Srcset",
+ "Template",
+ "URL",
+ "URLQueryEscaper",
+ },
+ "image": {
+ "Alpha",
+ "Alpha16",
+ "Black",
+ "CMYK",
+ "Config",
+ "Decode",
+ "DecodeConfig",
+ "ErrFormat",
+ "Gray",
+ "Gray16",
+ "Image",
+ "NRGBA",
+ "NRGBA64",
+ "NYCbCrA",
+ "NewAlpha",
+ "NewAlpha16",
+ "NewCMYK",
+ "NewGray",
+ "NewGray16",
+ "NewNRGBA",
+ "NewNRGBA64",
+ "NewNYCbCrA",
+ "NewPaletted",
+ "NewRGBA",
+ "NewRGBA64",
+ "NewUniform",
+ "NewYCbCr",
+ "Opaque",
+ "Paletted",
+ "PalettedImage",
+ "Point",
+ "Pt",
+ "RGBA",
+ "RGBA64",
+ "RGBA64Image",
+ "Rect",
+ "Rectangle",
+ "RegisterFormat",
+ "Transparent",
+ "Uniform",
+ "White",
+ "YCbCr",
+ "YCbCrSubsampleRatio",
+ "YCbCrSubsampleRatio410",
+ "YCbCrSubsampleRatio411",
+ "YCbCrSubsampleRatio420",
+ "YCbCrSubsampleRatio422",
+ "YCbCrSubsampleRatio440",
+ "YCbCrSubsampleRatio444",
+ "ZP",
+ "ZR",
+ },
+ "image/color": {
+ "Alpha",
+ "Alpha16",
+ "Alpha16Model",
+ "AlphaModel",
+ "Black",
+ "CMYK",
+ "CMYKModel",
+ "CMYKToRGB",
+ "Color",
+ "Gray",
+ "Gray16",
+ "Gray16Model",
+ "GrayModel",
+ "Model",
+ "ModelFunc",
+ "NRGBA",
+ "NRGBA64",
+ "NRGBA64Model",
+ "NRGBAModel",
+ "NYCbCrA",
+ "NYCbCrAModel",
+ "Opaque",
+ "Palette",
+ "RGBA",
+ "RGBA64",
+ "RGBA64Model",
+ "RGBAModel",
+ "RGBToCMYK",
+ "RGBToYCbCr",
+ "Transparent",
+ "White",
+ "YCbCr",
+ "YCbCrModel",
+ "YCbCrToRGB",
+ },
+ "image/color/palette": {
+ "Plan9",
+ "WebSafe",
+ },
+ "image/draw": {
+ "Draw",
+ "DrawMask",
+ "Drawer",
+ "FloydSteinberg",
+ "Image",
+ "Op",
+ "Over",
+ "Quantizer",
+ "RGBA64Image",
+ "Src",
+ },
+ "image/gif": {
+ "Decode",
+ "DecodeAll",
+ "DecodeConfig",
+ "DisposalBackground",
+ "DisposalNone",
+ "DisposalPrevious",
+ "Encode",
+ "EncodeAll",
+ "GIF",
+ "Options",
+ },
+ "image/jpeg": {
+ "Decode",
+ "DecodeConfig",
+ "DefaultQuality",
+ "Encode",
+ "FormatError",
+ "Options",
+ "Reader",
+ "UnsupportedError",
+ },
+ "image/png": {
+ "BestCompression",
+ "BestSpeed",
+ "CompressionLevel",
+ "Decode",
+ "DecodeConfig",
+ "DefaultCompression",
+ "Encode",
+ "Encoder",
+ "EncoderBuffer",
+ "EncoderBufferPool",
+ "FormatError",
+ "NoCompression",
+ "UnsupportedError",
+ },
+ "index/suffixarray": {
+ "Index",
+ "New",
+ },
+ "io": {
+ "ByteReader",
+ "ByteScanner",
+ "ByteWriter",
+ "Closer",
+ "Copy",
+ "CopyBuffer",
+ "CopyN",
+ "Discard",
+ "EOF",
+ "ErrClosedPipe",
+ "ErrNoProgress",
+ "ErrShortBuffer",
+ "ErrShortWrite",
+ "ErrUnexpectedEOF",
+ "LimitReader",
+ "LimitedReader",
+ "MultiReader",
+ "MultiWriter",
+ "NewOffsetWriter",
+ "NewSectionReader",
+ "NopCloser",
+ "OffsetWriter",
+ "Pipe",
+ "PipeReader",
+ "PipeWriter",
+ "ReadAll",
+ "ReadAtLeast",
+ "ReadCloser",
+ "ReadFull",
+ "ReadSeekCloser",
+ "ReadSeeker",
+ "ReadWriteCloser",
+ "ReadWriteSeeker",
+ "ReadWriter",
+ "Reader",
+ "ReaderAt",
+ "ReaderFrom",
+ "RuneReader",
+ "RuneScanner",
+ "SectionReader",
+ "SeekCurrent",
+ "SeekEnd",
+ "SeekStart",
+ "Seeker",
+ "StringWriter",
+ "TeeReader",
+ "WriteCloser",
+ "WriteSeeker",
+ "WriteString",
+ "Writer",
+ "WriterAt",
+ "WriterTo",
+ },
+ "io/fs": {
+ "DirEntry",
+ "ErrClosed",
+ "ErrExist",
+ "ErrInvalid",
+ "ErrNotExist",
+ "ErrPermission",
+ "FS",
+ "File",
+ "FileInfo",
+ "FileInfoToDirEntry",
+ "FileMode",
+ "FormatDirEntry",
+ "FormatFileInfo",
+ "Glob",
+ "GlobFS",
+ "ModeAppend",
+ "ModeCharDevice",
+ "ModeDevice",
+ "ModeDir",
+ "ModeExclusive",
+ "ModeIrregular",
+ "ModeNamedPipe",
+ "ModePerm",
+ "ModeSetgid",
+ "ModeSetuid",
+ "ModeSocket",
+ "ModeSticky",
+ "ModeSymlink",
+ "ModeTemporary",
+ "ModeType",
+ "PathError",
+ "ReadDir",
+ "ReadDirFS",
+ "ReadDirFile",
+ "ReadFile",
+ "ReadFileFS",
+ "SkipAll",
+ "SkipDir",
+ "Stat",
+ "StatFS",
+ "Sub",
+ "SubFS",
+ "ValidPath",
+ "WalkDir",
+ "WalkDirFunc",
+ },
+ "io/ioutil": {
+ "Discard",
+ "NopCloser",
+ "ReadAll",
+ "ReadDir",
+ "ReadFile",
+ "TempDir",
+ "TempFile",
+ "WriteFile",
+ },
+ "log": {
+ "Default",
+ "Fatal",
+ "Fatalf",
+ "Fatalln",
+ "Flags",
+ "LUTC",
+ "Ldate",
+ "Llongfile",
+ "Lmicroseconds",
+ "Lmsgprefix",
+ "Logger",
+ "Lshortfile",
+ "LstdFlags",
+ "Ltime",
+ "New",
+ "Output",
+ "Panic",
+ "Panicf",
+ "Panicln",
+ "Prefix",
+ "Print",
+ "Printf",
+ "Println",
+ "SetFlags",
+ "SetOutput",
+ "SetPrefix",
+ "Writer",
+ },
+ "log/slog": {
+ "Any",
+ "AnyValue",
+ "Attr",
+ "Bool",
+ "BoolValue",
+ "Debug",
+ "DebugContext",
+ "Default",
+ "Duration",
+ "DurationValue",
+ "Error",
+ "ErrorContext",
+ "Float64",
+ "Float64Value",
+ "Group",
+ "GroupValue",
+ "Handler",
+ "HandlerOptions",
+ "Info",
+ "InfoContext",
+ "Int",
+ "Int64",
+ "Int64Value",
+ "IntValue",
+ "JSONHandler",
+ "Kind",
+ "KindAny",
+ "KindBool",
+ "KindDuration",
+ "KindFloat64",
+ "KindGroup",
+ "KindInt64",
+ "KindLogValuer",
+ "KindString",
+ "KindTime",
+ "KindUint64",
+ "Level",
+ "LevelDebug",
+ "LevelError",
+ "LevelInfo",
+ "LevelKey",
+ "LevelVar",
+ "LevelWarn",
+ "Leveler",
+ "Log",
+ "LogAttrs",
+ "LogValuer",
+ "Logger",
+ "MessageKey",
+ "New",
+ "NewJSONHandler",
+ "NewLogLogger",
+ "NewRecord",
+ "NewTextHandler",
+ "Record",
+ "SetDefault",
+ "Source",
+ "SourceKey",
+ "String",
+ "StringValue",
+ "TextHandler",
+ "Time",
+ "TimeKey",
+ "TimeValue",
+ "Uint64",
+ "Uint64Value",
+ "Value",
+ "Warn",
+ "WarnContext",
+ "With",
+ },
+ "log/syslog": {
+ "Dial",
+ "LOG_ALERT",
+ "LOG_AUTH",
+ "LOG_AUTHPRIV",
+ "LOG_CRIT",
+ "LOG_CRON",
+ "LOG_DAEMON",
+ "LOG_DEBUG",
+ "LOG_EMERG",
+ "LOG_ERR",
+ "LOG_FTP",
+ "LOG_INFO",
+ "LOG_KERN",
+ "LOG_LOCAL0",
+ "LOG_LOCAL1",
+ "LOG_LOCAL2",
+ "LOG_LOCAL3",
+ "LOG_LOCAL4",
+ "LOG_LOCAL5",
+ "LOG_LOCAL6",
+ "LOG_LOCAL7",
+ "LOG_LPR",
+ "LOG_MAIL",
+ "LOG_NEWS",
+ "LOG_NOTICE",
+ "LOG_SYSLOG",
+ "LOG_USER",
+ "LOG_UUCP",
+ "LOG_WARNING",
+ "New",
+ "NewLogger",
+ "Priority",
+ "Writer",
+ },
+ "maps": {
+ "Clone",
+ "Copy",
+ "DeleteFunc",
+ "Equal",
+ "EqualFunc",
+ },
+ "math": {
+ "Abs",
+ "Acos",
+ "Acosh",
+ "Asin",
+ "Asinh",
+ "Atan",
+ "Atan2",
+ "Atanh",
+ "Cbrt",
+ "Ceil",
+ "Copysign",
+ "Cos",
+ "Cosh",
+ "Dim",
+ "E",
+ "Erf",
+ "Erfc",
+ "Erfcinv",
+ "Erfinv",
+ "Exp",
+ "Exp2",
+ "Expm1",
+ "FMA",
+ "Float32bits",
+ "Float32frombits",
+ "Float64bits",
+ "Float64frombits",
+ "Floor",
+ "Frexp",
+ "Gamma",
+ "Hypot",
+ "Ilogb",
+ "Inf",
+ "IsInf",
+ "IsNaN",
+ "J0",
+ "J1",
+ "Jn",
+ "Ldexp",
+ "Lgamma",
+ "Ln10",
+ "Ln2",
+ "Log",
+ "Log10",
+ "Log10E",
+ "Log1p",
+ "Log2",
+ "Log2E",
+ "Logb",
+ "Max",
+ "MaxFloat32",
+ "MaxFloat64",
+ "MaxInt",
+ "MaxInt16",
+ "MaxInt32",
+ "MaxInt64",
+ "MaxInt8",
+ "MaxUint",
+ "MaxUint16",
+ "MaxUint32",
+ "MaxUint64",
+ "MaxUint8",
+ "Min",
+ "MinInt",
+ "MinInt16",
+ "MinInt32",
+ "MinInt64",
+ "MinInt8",
+ "Mod",
+ "Modf",
+ "NaN",
+ "Nextafter",
+ "Nextafter32",
+ "Phi",
+ "Pi",
+ "Pow",
+ "Pow10",
+ "Remainder",
+ "Round",
+ "RoundToEven",
+ "Signbit",
+ "Sin",
+ "Sincos",
+ "Sinh",
+ "SmallestNonzeroFloat32",
+ "SmallestNonzeroFloat64",
+ "Sqrt",
+ "Sqrt2",
+ "SqrtE",
+ "SqrtPhi",
+ "SqrtPi",
+ "Tan",
+ "Tanh",
+ "Trunc",
+ "Y0",
+ "Y1",
+ "Yn",
+ },
+ "math/big": {
+ "Above",
+ "Accuracy",
+ "AwayFromZero",
+ "Below",
+ "ErrNaN",
+ "Exact",
+ "Float",
+ "Int",
+ "Jacobi",
+ "MaxBase",
+ "MaxExp",
+ "MaxPrec",
+ "MinExp",
+ "NewFloat",
+ "NewInt",
+ "NewRat",
+ "ParseFloat",
+ "Rat",
+ "RoundingMode",
+ "ToNearestAway",
+ "ToNearestEven",
+ "ToNegativeInf",
+ "ToPositiveInf",
+ "ToZero",
+ "Word",
+ },
+ "math/bits": {
+ "Add",
+ "Add32",
+ "Add64",
+ "Div",
+ "Div32",
+ "Div64",
+ "LeadingZeros",
+ "LeadingZeros16",
+ "LeadingZeros32",
+ "LeadingZeros64",
+ "LeadingZeros8",
+ "Len",
+ "Len16",
+ "Len32",
+ "Len64",
+ "Len8",
+ "Mul",
+ "Mul32",
+ "Mul64",
+ "OnesCount",
+ "OnesCount16",
+ "OnesCount32",
+ "OnesCount64",
+ "OnesCount8",
+ "Rem",
+ "Rem32",
+ "Rem64",
+ "Reverse",
+ "Reverse16",
+ "Reverse32",
+ "Reverse64",
+ "Reverse8",
+ "ReverseBytes",
+ "ReverseBytes16",
+ "ReverseBytes32",
+ "ReverseBytes64",
+ "RotateLeft",
+ "RotateLeft16",
+ "RotateLeft32",
+ "RotateLeft64",
+ "RotateLeft8",
+ "Sub",
+ "Sub32",
+ "Sub64",
+ "TrailingZeros",
+ "TrailingZeros16",
+ "TrailingZeros32",
+ "TrailingZeros64",
+ "TrailingZeros8",
+ "UintSize",
+ },
+ "math/cmplx": {
+ "Abs",
+ "Acos",
+ "Acosh",
+ "Asin",
+ "Asinh",
+ "Atan",
+ "Atanh",
+ "Conj",
+ "Cos",
+ "Cosh",
+ "Cot",
+ "Exp",
+ "Inf",
+ "IsInf",
+ "IsNaN",
+ "Log",
+ "Log10",
+ "NaN",
+ "Phase",
+ "Polar",
+ "Pow",
+ "Rect",
+ "Sin",
+ "Sinh",
+ "Sqrt",
+ "Tan",
+ "Tanh",
+ },
+ "math/rand": {
+ "ExpFloat64",
+ "Float32",
+ "Float64",
+ "Int",
+ "Int31",
+ "Int31n",
+ "Int63",
+ "Int63n",
+ "Intn",
+ "New",
+ "NewSource",
+ "NewZipf",
+ "NormFloat64",
+ "Perm",
+ "Rand",
+ "Read",
+ "Seed",
+ "Shuffle",
+ "Source",
+ "Source64",
+ "Uint32",
+ "Uint64",
+ "Zipf",
+ },
+ "mime": {
+ "AddExtensionType",
+ "BEncoding",
+ "ErrInvalidMediaParameter",
+ "ExtensionsByType",
+ "FormatMediaType",
+ "ParseMediaType",
+ "QEncoding",
+ "TypeByExtension",
+ "WordDecoder",
+ "WordEncoder",
+ },
+ "mime/multipart": {
+ "ErrMessageTooLarge",
+ "File",
+ "FileHeader",
+ "Form",
+ "NewReader",
+ "NewWriter",
+ "Part",
+ "Reader",
+ "Writer",
+ },
+ "mime/quotedprintable": {
+ "NewReader",
+ "NewWriter",
+ "Reader",
+ "Writer",
+ },
+ "net": {
+ "Addr",
+ "AddrError",
+ "Buffers",
+ "CIDRMask",
+ "Conn",
+ "DNSConfigError",
+ "DNSError",
+ "DefaultResolver",
+ "Dial",
+ "DialIP",
+ "DialTCP",
+ "DialTimeout",
+ "DialUDP",
+ "DialUnix",
+ "Dialer",
+ "ErrClosed",
+ "ErrWriteToConnected",
+ "Error",
+ "FileConn",
+ "FileListener",
+ "FilePacketConn",
+ "FlagBroadcast",
+ "FlagLoopback",
+ "FlagMulticast",
+ "FlagPointToPoint",
+ "FlagRunning",
+ "FlagUp",
+ "Flags",
+ "HardwareAddr",
+ "IP",
+ "IPAddr",
+ "IPConn",
+ "IPMask",
+ "IPNet",
+ "IPv4",
+ "IPv4Mask",
+ "IPv4allrouter",
+ "IPv4allsys",
+ "IPv4bcast",
+ "IPv4len",
+ "IPv4zero",
+ "IPv6interfacelocalallnodes",
+ "IPv6len",
+ "IPv6linklocalallnodes",
+ "IPv6linklocalallrouters",
+ "IPv6loopback",
+ "IPv6unspecified",
+ "IPv6zero",
+ "Interface",
+ "InterfaceAddrs",
+ "InterfaceByIndex",
+ "InterfaceByName",
+ "Interfaces",
+ "InvalidAddrError",
+ "JoinHostPort",
+ "Listen",
+ "ListenConfig",
+ "ListenIP",
+ "ListenMulticastUDP",
+ "ListenPacket",
+ "ListenTCP",
+ "ListenUDP",
+ "ListenUnix",
+ "ListenUnixgram",
+ "Listener",
+ "LookupAddr",
+ "LookupCNAME",
+ "LookupHost",
+ "LookupIP",
+ "LookupMX",
+ "LookupNS",
+ "LookupPort",
+ "LookupSRV",
+ "LookupTXT",
+ "MX",
+ "NS",
+ "OpError",
+ "PacketConn",
+ "ParseCIDR",
+ "ParseError",
+ "ParseIP",
+ "ParseMAC",
+ "Pipe",
+ "ResolveIPAddr",
+ "ResolveTCPAddr",
+ "ResolveUDPAddr",
+ "ResolveUnixAddr",
+ "Resolver",
+ "SRV",
+ "SplitHostPort",
+ "TCPAddr",
+ "TCPAddrFromAddrPort",
+ "TCPConn",
+ "TCPListener",
+ "UDPAddr",
+ "UDPAddrFromAddrPort",
+ "UDPConn",
+ "UnixAddr",
+ "UnixConn",
+ "UnixListener",
+ "UnknownNetworkError",
+ },
+ "net/http": {
+ "AllowQuerySemicolons",
+ "CanonicalHeaderKey",
+ "Client",
+ "CloseNotifier",
+ "ConnState",
+ "Cookie",
+ "CookieJar",
+ "DefaultClient",
+ "DefaultMaxHeaderBytes",
+ "DefaultMaxIdleConnsPerHost",
+ "DefaultServeMux",
+ "DefaultTransport",
+ "DetectContentType",
+ "Dir",
+ "ErrAbortHandler",
+ "ErrBodyNotAllowed",
+ "ErrBodyReadAfterClose",
+ "ErrContentLength",
+ "ErrHandlerTimeout",
+ "ErrHeaderTooLong",
+ "ErrHijacked",
+ "ErrLineTooLong",
+ "ErrMissingBoundary",
+ "ErrMissingContentLength",
+ "ErrMissingFile",
+ "ErrNoCookie",
+ "ErrNoLocation",
+ "ErrNotMultipart",
+ "ErrNotSupported",
+ "ErrSchemeMismatch",
+ "ErrServerClosed",
+ "ErrShortBody",
+ "ErrSkipAltProtocol",
+ "ErrUnexpectedTrailer",
+ "ErrUseLastResponse",
+ "ErrWriteAfterFlush",
+ "Error",
+ "FS",
+ "File",
+ "FileServer",
+ "FileSystem",
+ "Flusher",
+ "Get",
+ "Handle",
+ "HandleFunc",
+ "Handler",
+ "HandlerFunc",
+ "Head",
+ "Header",
+ "Hijacker",
+ "ListenAndServe",
+ "ListenAndServeTLS",
+ "LocalAddrContextKey",
+ "MaxBytesError",
+ "MaxBytesHandler",
+ "MaxBytesReader",
+ "MethodConnect",
+ "MethodDelete",
+ "MethodGet",
+ "MethodHead",
+ "MethodOptions",
+ "MethodPatch",
+ "MethodPost",
+ "MethodPut",
+ "MethodTrace",
+ "NewFileTransport",
+ "NewRequest",
+ "NewRequestWithContext",
+ "NewResponseController",
+ "NewServeMux",
+ "NoBody",
+ "NotFound",
+ "NotFoundHandler",
+ "ParseHTTPVersion",
+ "ParseTime",
+ "Post",
+ "PostForm",
+ "ProtocolError",
+ "ProxyFromEnvironment",
+ "ProxyURL",
+ "PushOptions",
+ "Pusher",
+ "ReadRequest",
+ "ReadResponse",
+ "Redirect",
+ "RedirectHandler",
+ "Request",
+ "Response",
+ "ResponseController",
+ "ResponseWriter",
+ "RoundTripper",
+ "SameSite",
+ "SameSiteDefaultMode",
+ "SameSiteLaxMode",
+ "SameSiteNoneMode",
+ "SameSiteStrictMode",
+ "Serve",
+ "ServeContent",
+ "ServeFile",
+ "ServeMux",
+ "ServeTLS",
+ "Server",
+ "ServerContextKey",
+ "SetCookie",
+ "StateActive",
+ "StateClosed",
+ "StateHijacked",
+ "StateIdle",
+ "StateNew",
+ "StatusAccepted",
+ "StatusAlreadyReported",
+ "StatusBadGateway",
+ "StatusBadRequest",
+ "StatusConflict",
+ "StatusContinue",
+ "StatusCreated",
+ "StatusEarlyHints",
+ "StatusExpectationFailed",
+ "StatusFailedDependency",
+ "StatusForbidden",
+ "StatusFound",
+ "StatusGatewayTimeout",
+ "StatusGone",
+ "StatusHTTPVersionNotSupported",
+ "StatusIMUsed",
+ "StatusInsufficientStorage",
+ "StatusInternalServerError",
+ "StatusLengthRequired",
+ "StatusLocked",
+ "StatusLoopDetected",
+ "StatusMethodNotAllowed",
+ "StatusMisdirectedRequest",
+ "StatusMovedPermanently",
+ "StatusMultiStatus",
+ "StatusMultipleChoices",
+ "StatusNetworkAuthenticationRequired",
+ "StatusNoContent",
+ "StatusNonAuthoritativeInfo",
+ "StatusNotAcceptable",
+ "StatusNotExtended",
+ "StatusNotFound",
+ "StatusNotImplemented",
+ "StatusNotModified",
+ "StatusOK",
+ "StatusPartialContent",
+ "StatusPaymentRequired",
+ "StatusPermanentRedirect",
+ "StatusPreconditionFailed",
+ "StatusPreconditionRequired",
+ "StatusProcessing",
+ "StatusProxyAuthRequired",
+ "StatusRequestEntityTooLarge",
+ "StatusRequestHeaderFieldsTooLarge",
+ "StatusRequestTimeout",
+ "StatusRequestURITooLong",
+ "StatusRequestedRangeNotSatisfiable",
+ "StatusResetContent",
+ "StatusSeeOther",
+ "StatusServiceUnavailable",
+ "StatusSwitchingProtocols",
+ "StatusTeapot",
+ "StatusTemporaryRedirect",
+ "StatusText",
+ "StatusTooEarly",
+ "StatusTooManyRequests",
+ "StatusUnauthorized",
+ "StatusUnavailableForLegalReasons",
+ "StatusUnprocessableEntity",
+ "StatusUnsupportedMediaType",
+ "StatusUpgradeRequired",
+ "StatusUseProxy",
+ "StatusVariantAlsoNegotiates",
+ "StripPrefix",
+ "TimeFormat",
+ "TimeoutHandler",
+ "TrailerPrefix",
+ "Transport",
+ },
+ "net/http/cgi": {
+ "Handler",
+ "Request",
+ "RequestFromMap",
+ "Serve",
+ },
+ "net/http/cookiejar": {
+ "Jar",
+ "New",
+ "Options",
+ "PublicSuffixList",
+ },
+ "net/http/fcgi": {
+ "ErrConnClosed",
+ "ErrRequestAborted",
+ "ProcessEnv",
+ "Serve",
+ },
+ "net/http/httptest": {
+ "DefaultRemoteAddr",
+ "NewRecorder",
+ "NewRequest",
+ "NewServer",
+ "NewTLSServer",
+ "NewUnstartedServer",
+ "ResponseRecorder",
+ "Server",
+ },
+ "net/http/httptrace": {
+ "ClientTrace",
+ "ContextClientTrace",
+ "DNSDoneInfo",
+ "DNSStartInfo",
+ "GotConnInfo",
+ "WithClientTrace",
+ "WroteRequestInfo",
+ },
+ "net/http/httputil": {
+ "BufferPool",
+ "ClientConn",
+ "DumpRequest",
+ "DumpRequestOut",
+ "DumpResponse",
+ "ErrClosed",
+ "ErrLineTooLong",
+ "ErrPersistEOF",
+ "ErrPipeline",
+ "NewChunkedReader",
+ "NewChunkedWriter",
+ "NewClientConn",
+ "NewProxyClientConn",
+ "NewServerConn",
+ "NewSingleHostReverseProxy",
+ "ProxyRequest",
+ "ReverseProxy",
+ "ServerConn",
+ },
+ "net/http/pprof": {
+ "Cmdline",
+ "Handler",
+ "Index",
+ "Profile",
+ "Symbol",
+ "Trace",
+ },
+ "net/mail": {
+ "Address",
+ "AddressParser",
+ "ErrHeaderNotPresent",
+ "Header",
+ "Message",
+ "ParseAddress",
+ "ParseAddressList",
+ "ParseDate",
+ "ReadMessage",
+ },
+ "net/netip": {
+ "Addr",
+ "AddrFrom16",
+ "AddrFrom4",
+ "AddrFromSlice",
+ "AddrPort",
+ "AddrPortFrom",
+ "IPv4Unspecified",
+ "IPv6LinkLocalAllNodes",
+ "IPv6LinkLocalAllRouters",
+ "IPv6Loopback",
+ "IPv6Unspecified",
+ "MustParseAddr",
+ "MustParseAddrPort",
+ "MustParsePrefix",
+ "ParseAddr",
+ "ParseAddrPort",
+ "ParsePrefix",
+ "Prefix",
+ "PrefixFrom",
+ },
+ "net/rpc": {
+ "Accept",
+ "Call",
+ "Client",
+ "ClientCodec",
+ "DefaultDebugPath",
+ "DefaultRPCPath",
+ "DefaultServer",
+ "Dial",
+ "DialHTTP",
+ "DialHTTPPath",
+ "ErrShutdown",
+ "HandleHTTP",
+ "NewClient",
+ "NewClientWithCodec",
+ "NewServer",
+ "Register",
+ "RegisterName",
+ "Request",
+ "Response",
+ "ServeCodec",
+ "ServeConn",
+ "ServeRequest",
+ "Server",
+ "ServerCodec",
+ "ServerError",
+ },
+ "net/rpc/jsonrpc": {
+ "Dial",
+ "NewClient",
+ "NewClientCodec",
+ "NewServerCodec",
+ "ServeConn",
+ },
+ "net/smtp": {
+ "Auth",
+ "CRAMMD5Auth",
+ "Client",
+ "Dial",
+ "NewClient",
+ "PlainAuth",
+ "SendMail",
+ "ServerInfo",
+ },
+ "net/textproto": {
+ "CanonicalMIMEHeaderKey",
+ "Conn",
+ "Dial",
+ "Error",
+ "MIMEHeader",
+ "NewConn",
+ "NewReader",
+ "NewWriter",
+ "Pipeline",
+ "ProtocolError",
+ "Reader",
+ "TrimBytes",
+ "TrimString",
+ "Writer",
+ },
+ "net/url": {
+ "Error",
+ "EscapeError",
+ "InvalidHostError",
+ "JoinPath",
+ "Parse",
+ "ParseQuery",
+ "ParseRequestURI",
+ "PathEscape",
+ "PathUnescape",
+ "QueryEscape",
+ "QueryUnescape",
+ "URL",
+ "User",
+ "UserPassword",
+ "Userinfo",
+ "Values",
+ },
+ "os": {
+ "Args",
+ "Chdir",
+ "Chmod",
+ "Chown",
+ "Chtimes",
+ "Clearenv",
+ "Create",
+ "CreateTemp",
+ "DevNull",
+ "DirEntry",
+ "DirFS",
+ "Environ",
+ "ErrClosed",
+ "ErrDeadlineExceeded",
+ "ErrExist",
+ "ErrInvalid",
+ "ErrNoDeadline",
+ "ErrNotExist",
+ "ErrPermission",
+ "ErrProcessDone",
+ "Executable",
+ "Exit",
+ "Expand",
+ "ExpandEnv",
+ "File",
+ "FileInfo",
+ "FileMode",
+ "FindProcess",
+ "Getegid",
+ "Getenv",
+ "Geteuid",
+ "Getgid",
+ "Getgroups",
+ "Getpagesize",
+ "Getpid",
+ "Getppid",
+ "Getuid",
+ "Getwd",
+ "Hostname",
+ "Interrupt",
+ "IsExist",
+ "IsNotExist",
+ "IsPathSeparator",
+ "IsPermission",
+ "IsTimeout",
+ "Kill",
+ "Lchown",
+ "Link",
+ "LinkError",
+ "LookupEnv",
+ "Lstat",
+ "Mkdir",
+ "MkdirAll",
+ "MkdirTemp",
+ "ModeAppend",
+ "ModeCharDevice",
+ "ModeDevice",
+ "ModeDir",
+ "ModeExclusive",
+ "ModeIrregular",
+ "ModeNamedPipe",
+ "ModePerm",
+ "ModeSetgid",
+ "ModeSetuid",
+ "ModeSocket",
+ "ModeSticky",
+ "ModeSymlink",
+ "ModeTemporary",
+ "ModeType",
+ "NewFile",
+ "NewSyscallError",
+ "O_APPEND",
+ "O_CREATE",
+ "O_EXCL",
+ "O_RDONLY",
+ "O_RDWR",
+ "O_SYNC",
+ "O_TRUNC",
+ "O_WRONLY",
+ "Open",
+ "OpenFile",
+ "PathError",
+ "PathListSeparator",
+ "PathSeparator",
+ "Pipe",
+ "ProcAttr",
+ "Process",
+ "ProcessState",
+ "ReadDir",
+ "ReadFile",
+ "Readlink",
+ "Remove",
+ "RemoveAll",
+ "Rename",
+ "SEEK_CUR",
+ "SEEK_END",
+ "SEEK_SET",
+ "SameFile",
+ "Setenv",
+ "Signal",
+ "StartProcess",
+ "Stat",
+ "Stderr",
+ "Stdin",
+ "Stdout",
+ "Symlink",
+ "SyscallError",
+ "TempDir",
+ "Truncate",
+ "Unsetenv",
+ "UserCacheDir",
+ "UserConfigDir",
+ "UserHomeDir",
+ "WriteFile",
+ },
+ "os/exec": {
+ "Cmd",
+ "Command",
+ "CommandContext",
+ "ErrDot",
+ "ErrNotFound",
+ "ErrWaitDelay",
+ "Error",
+ "ExitError",
+ "LookPath",
+ },
+ "os/signal": {
+ "Ignore",
+ "Ignored",
+ "Notify",
+ "NotifyContext",
+ "Reset",
+ "Stop",
+ },
+ "os/user": {
+ "Current",
+ "Group",
+ "Lookup",
+ "LookupGroup",
+ "LookupGroupId",
+ "LookupId",
+ "UnknownGroupError",
+ "UnknownGroupIdError",
+ "UnknownUserError",
+ "UnknownUserIdError",
+ "User",
+ },
+ "path": {
+ "Base",
+ "Clean",
+ "Dir",
+ "ErrBadPattern",
+ "Ext",
+ "IsAbs",
+ "Join",
+ "Match",
+ "Split",
+ },
+ "path/filepath": {
+ "Abs",
+ "Base",
+ "Clean",
+ "Dir",
+ "ErrBadPattern",
+ "EvalSymlinks",
+ "Ext",
+ "FromSlash",
+ "Glob",
+ "HasPrefix",
+ "IsAbs",
+ "IsLocal",
+ "Join",
+ "ListSeparator",
+ "Match",
+ "Rel",
+ "Separator",
+ "SkipAll",
+ "SkipDir",
+ "Split",
+ "SplitList",
+ "ToSlash",
+ "VolumeName",
+ "Walk",
+ "WalkDir",
+ "WalkFunc",
+ },
+ "plugin": {
+ "Open",
+ "Plugin",
+ "Symbol",
+ },
+ "reflect": {
+ "Append",
+ "AppendSlice",
+ "Array",
+ "ArrayOf",
+ "Bool",
+ "BothDir",
+ "Chan",
+ "ChanDir",
+ "ChanOf",
+ "Complex128",
+ "Complex64",
+ "Copy",
+ "DeepEqual",
+ "Float32",
+ "Float64",
+ "Func",
+ "FuncOf",
+ "Indirect",
+ "Int",
+ "Int16",
+ "Int32",
+ "Int64",
+ "Int8",
+ "Interface",
+ "Invalid",
+ "Kind",
+ "MakeChan",
+ "MakeFunc",
+ "MakeMap",
+ "MakeMapWithSize",
+ "MakeSlice",
+ "Map",
+ "MapIter",
+ "MapOf",
+ "Method",
+ "New",
+ "NewAt",
+ "Pointer",
+ "PointerTo",
+ "Ptr",
+ "PtrTo",
+ "RecvDir",
+ "Select",
+ "SelectCase",
+ "SelectDefault",
+ "SelectDir",
+ "SelectRecv",
+ "SelectSend",
+ "SendDir",
+ "Slice",
+ "SliceHeader",
+ "SliceOf",
+ "String",
+ "StringHeader",
+ "Struct",
+ "StructField",
+ "StructOf",
+ "StructTag",
+ "Swapper",
+ "Type",
+ "TypeOf",
+ "Uint",
+ "Uint16",
+ "Uint32",
+ "Uint64",
+ "Uint8",
+ "Uintptr",
+ "UnsafePointer",
+ "Value",
+ "ValueError",
+ "ValueOf",
+ "VisibleFields",
+ "Zero",
+ },
+ "regexp": {
+ "Compile",
+ "CompilePOSIX",
+ "Match",
+ "MatchReader",
+ "MatchString",
+ "MustCompile",
+ "MustCompilePOSIX",
+ "QuoteMeta",
+ "Regexp",
+ },
+ "regexp/syntax": {
+ "ClassNL",
+ "Compile",
+ "DotNL",
+ "EmptyBeginLine",
+ "EmptyBeginText",
+ "EmptyEndLine",
+ "EmptyEndText",
+ "EmptyNoWordBoundary",
+ "EmptyOp",
+ "EmptyOpContext",
+ "EmptyWordBoundary",
+ "ErrInternalError",
+ "ErrInvalidCharClass",
+ "ErrInvalidCharRange",
+ "ErrInvalidEscape",
+ "ErrInvalidNamedCapture",
+ "ErrInvalidPerlOp",
+ "ErrInvalidRepeatOp",
+ "ErrInvalidRepeatSize",
+ "ErrInvalidUTF8",
+ "ErrLarge",
+ "ErrMissingBracket",
+ "ErrMissingParen",
+ "ErrMissingRepeatArgument",
+ "ErrNestingDepth",
+ "ErrTrailingBackslash",
+ "ErrUnexpectedParen",
+ "Error",
+ "ErrorCode",
+ "Flags",
+ "FoldCase",
+ "Inst",
+ "InstAlt",
+ "InstAltMatch",
+ "InstCapture",
+ "InstEmptyWidth",
+ "InstFail",
+ "InstMatch",
+ "InstNop",
+ "InstOp",
+ "InstRune",
+ "InstRune1",
+ "InstRuneAny",
+ "InstRuneAnyNotNL",
+ "IsWordChar",
+ "Literal",
+ "MatchNL",
+ "NonGreedy",
+ "OneLine",
+ "Op",
+ "OpAlternate",
+ "OpAnyChar",
+ "OpAnyCharNotNL",
+ "OpBeginLine",
+ "OpBeginText",
+ "OpCapture",
+ "OpCharClass",
+ "OpConcat",
+ "OpEmptyMatch",
+ "OpEndLine",
+ "OpEndText",
+ "OpLiteral",
+ "OpNoMatch",
+ "OpNoWordBoundary",
+ "OpPlus",
+ "OpQuest",
+ "OpRepeat",
+ "OpStar",
+ "OpWordBoundary",
+ "POSIX",
+ "Parse",
+ "Perl",
+ "PerlX",
+ "Prog",
+ "Regexp",
+ "Simple",
+ "UnicodeGroups",
+ "WasDollar",
+ },
+ "runtime": {
+ "BlockProfile",
+ "BlockProfileRecord",
+ "Breakpoint",
+ "CPUProfile",
+ "Caller",
+ "Callers",
+ "CallersFrames",
+ "Compiler",
+ "Error",
+ "Frame",
+ "Frames",
+ "Func",
+ "FuncForPC",
+ "GC",
+ "GOARCH",
+ "GOMAXPROCS",
+ "GOOS",
+ "GOROOT",
+ "Goexit",
+ "GoroutineProfile",
+ "Gosched",
+ "KeepAlive",
+ "LockOSThread",
+ "MemProfile",
+ "MemProfileRate",
+ "MemProfileRecord",
+ "MemStats",
+ "MutexProfile",
+ "NumCPU",
+ "NumCgoCall",
+ "NumGoroutine",
+ "PanicNilError",
+ "Pinner",
+ "ReadMemStats",
+ "ReadTrace",
+ "SetBlockProfileRate",
+ "SetCPUProfileRate",
+ "SetCgoTraceback",
+ "SetFinalizer",
+ "SetMutexProfileFraction",
+ "Stack",
+ "StackRecord",
+ "StartTrace",
+ "StopTrace",
+ "ThreadCreateProfile",
+ "TypeAssertionError",
+ "UnlockOSThread",
+ "Version",
+ },
+ "runtime/cgo": {
+ "Handle",
+ "Incomplete",
+ "NewHandle",
+ },
+ "runtime/coverage": {
+ "ClearCounters",
+ "WriteCounters",
+ "WriteCountersDir",
+ "WriteMeta",
+ "WriteMetaDir",
+ },
+ "runtime/debug": {
+ "BuildInfo",
+ "BuildSetting",
+ "FreeOSMemory",
+ "GCStats",
+ "Module",
+ "ParseBuildInfo",
+ "PrintStack",
+ "ReadBuildInfo",
+ "ReadGCStats",
+ "SetGCPercent",
+ "SetMaxStack",
+ "SetMaxThreads",
+ "SetMemoryLimit",
+ "SetPanicOnFault",
+ "SetTraceback",
+ "Stack",
+ "WriteHeapDump",
+ },
+ "runtime/metrics": {
+ "All",
+ "Description",
+ "Float64Histogram",
+ "KindBad",
+ "KindFloat64",
+ "KindFloat64Histogram",
+ "KindUint64",
+ "Read",
+ "Sample",
+ "Value",
+ "ValueKind",
+ },
+ "runtime/pprof": {
+ "Do",
+ "ForLabels",
+ "Label",
+ "LabelSet",
+ "Labels",
+ "Lookup",
+ "NewProfile",
+ "Profile",
+ "Profiles",
+ "SetGoroutineLabels",
+ "StartCPUProfile",
+ "StopCPUProfile",
+ "WithLabels",
+ "WriteHeapProfile",
+ },
+ "runtime/trace": {
+ "IsEnabled",
+ "Log",
+ "Logf",
+ "NewTask",
+ "Region",
+ "Start",
+ "StartRegion",
+ "Stop",
+ "Task",
+ "WithRegion",
+ },
+ "slices": {
+ "BinarySearch",
+ "BinarySearchFunc",
+ "Clip",
+ "Clone",
+ "Compact",
+ "CompactFunc",
+ "Compare",
+ "CompareFunc",
+ "Contains",
+ "ContainsFunc",
+ "Delete",
+ "DeleteFunc",
+ "Equal",
+ "EqualFunc",
+ "Grow",
+ "Index",
+ "IndexFunc",
+ "Insert",
+ "IsSorted",
+ "IsSortedFunc",
+ "Max",
+ "MaxFunc",
+ "Min",
+ "MinFunc",
+ "Replace",
+ "Reverse",
+ "Sort",
+ "SortFunc",
+ "SortStableFunc",
+ },
+ "sort": {
+ "Find",
+ "Float64Slice",
+ "Float64s",
+ "Float64sAreSorted",
+ "IntSlice",
+ "Interface",
+ "Ints",
+ "IntsAreSorted",
+ "IsSorted",
+ "Reverse",
+ "Search",
+ "SearchFloat64s",
+ "SearchInts",
+ "SearchStrings",
+ "Slice",
+ "SliceIsSorted",
+ "SliceStable",
+ "Sort",
+ "Stable",
+ "StringSlice",
+ "Strings",
+ "StringsAreSorted",
+ },
+ "strconv": {
+ "AppendBool",
+ "AppendFloat",
+ "AppendInt",
+ "AppendQuote",
+ "AppendQuoteRune",
+ "AppendQuoteRuneToASCII",
+ "AppendQuoteRuneToGraphic",
+ "AppendQuoteToASCII",
+ "AppendQuoteToGraphic",
+ "AppendUint",
+ "Atoi",
+ "CanBackquote",
+ "ErrRange",
+ "ErrSyntax",
+ "FormatBool",
+ "FormatComplex",
+ "FormatFloat",
+ "FormatInt",
+ "FormatUint",
+ "IntSize",
+ "IsGraphic",
+ "IsPrint",
+ "Itoa",
+ "NumError",
+ "ParseBool",
+ "ParseComplex",
+ "ParseFloat",
+ "ParseInt",
+ "ParseUint",
+ "Quote",
+ "QuoteRune",
+ "QuoteRuneToASCII",
+ "QuoteRuneToGraphic",
+ "QuoteToASCII",
+ "QuoteToGraphic",
+ "QuotedPrefix",
+ "Unquote",
+ "UnquoteChar",
+ },
+ "strings": {
+ "Builder",
+ "Clone",
+ "Compare",
+ "Contains",
+ "ContainsAny",
+ "ContainsFunc",
+ "ContainsRune",
+ "Count",
+ "Cut",
+ "CutPrefix",
+ "CutSuffix",
+ "EqualFold",
+ "Fields",
+ "FieldsFunc",
+ "HasPrefix",
+ "HasSuffix",
+ "Index",
+ "IndexAny",
+ "IndexByte",
+ "IndexFunc",
+ "IndexRune",
+ "Join",
+ "LastIndex",
+ "LastIndexAny",
+ "LastIndexByte",
+ "LastIndexFunc",
+ "Map",
+ "NewReader",
+ "NewReplacer",
+ "Reader",
+ "Repeat",
+ "Replace",
+ "ReplaceAll",
+ "Replacer",
+ "Split",
+ "SplitAfter",
+ "SplitAfterN",
+ "SplitN",
+ "Title",
+ "ToLower",
+ "ToLowerSpecial",
+ "ToTitle",
+ "ToTitleSpecial",
+ "ToUpper",
+ "ToUpperSpecial",
+ "ToValidUTF8",
+ "Trim",
+ "TrimFunc",
+ "TrimLeft",
+ "TrimLeftFunc",
+ "TrimPrefix",
+ "TrimRight",
+ "TrimRightFunc",
+ "TrimSpace",
+ "TrimSuffix",
+ },
+ "sync": {
+ "Cond",
+ "Locker",
+ "Map",
+ "Mutex",
+ "NewCond",
+ "Once",
+ "OnceFunc",
+ "OnceValue",
+ "OnceValues",
+ "Pool",
+ "RWMutex",
+ "WaitGroup",
+ },
+ "sync/atomic": {
+ "AddInt32",
+ "AddInt64",
+ "AddUint32",
+ "AddUint64",
+ "AddUintptr",
+ "Bool",
+ "CompareAndSwapInt32",
+ "CompareAndSwapInt64",
+ "CompareAndSwapPointer",
+ "CompareAndSwapUint32",
+ "CompareAndSwapUint64",
+ "CompareAndSwapUintptr",
+ "Int32",
+ "Int64",
+ "LoadInt32",
+ "LoadInt64",
+ "LoadPointer",
+ "LoadUint32",
+ "LoadUint64",
+ "LoadUintptr",
+ "Pointer",
+ "StoreInt32",
+ "StoreInt64",
+ "StorePointer",
+ "StoreUint32",
+ "StoreUint64",
+ "StoreUintptr",
+ "SwapInt32",
+ "SwapInt64",
+ "SwapPointer",
+ "SwapUint32",
+ "SwapUint64",
+ "SwapUintptr",
+ "Uint32",
+ "Uint64",
+ "Uintptr",
+ "Value",
+ },
+ "syscall": {
+ "AF_ALG",
+ "AF_APPLETALK",
+ "AF_ARP",
+ "AF_ASH",
+ "AF_ATM",
+ "AF_ATMPVC",
+ "AF_ATMSVC",
+ "AF_AX25",
+ "AF_BLUETOOTH",
+ "AF_BRIDGE",
+ "AF_CAIF",
+ "AF_CAN",
+ "AF_CCITT",
+ "AF_CHAOS",
+ "AF_CNT",
+ "AF_COIP",
+ "AF_DATAKIT",
+ "AF_DECnet",
+ "AF_DLI",
+ "AF_E164",
+ "AF_ECMA",
+ "AF_ECONET",
+ "AF_ENCAP",
+ "AF_FILE",
+ "AF_HYLINK",
+ "AF_IEEE80211",
+ "AF_IEEE802154",
+ "AF_IMPLINK",
+ "AF_INET",
+ "AF_INET6",
+ "AF_INET6_SDP",
+ "AF_INET_SDP",
+ "AF_IPX",
+ "AF_IRDA",
+ "AF_ISDN",
+ "AF_ISO",
+ "AF_IUCV",
+ "AF_KEY",
+ "AF_LAT",
+ "AF_LINK",
+ "AF_LLC",
+ "AF_LOCAL",
+ "AF_MAX",
+ "AF_MPLS",
+ "AF_NATM",
+ "AF_NDRV",
+ "AF_NETBEUI",
+ "AF_NETBIOS",
+ "AF_NETGRAPH",
+ "AF_NETLINK",
+ "AF_NETROM",
+ "AF_NS",
+ "AF_OROUTE",
+ "AF_OSI",
+ "AF_PACKET",
+ "AF_PHONET",
+ "AF_PPP",
+ "AF_PPPOX",
+ "AF_PUP",
+ "AF_RDS",
+ "AF_RESERVED_36",
+ "AF_ROSE",
+ "AF_ROUTE",
+ "AF_RXRPC",
+ "AF_SCLUSTER",
+ "AF_SECURITY",
+ "AF_SIP",
+ "AF_SLOW",
+ "AF_SNA",
+ "AF_SYSTEM",
+ "AF_TIPC",
+ "AF_UNIX",
+ "AF_UNSPEC",
+ "AF_UTUN",
+ "AF_VENDOR00",
+ "AF_VENDOR01",
+ "AF_VENDOR02",
+ "AF_VENDOR03",
+ "AF_VENDOR04",
+ "AF_VENDOR05",
+ "AF_VENDOR06",
+ "AF_VENDOR07",
+ "AF_VENDOR08",
+ "AF_VENDOR09",
+ "AF_VENDOR10",
+ "AF_VENDOR11",
+ "AF_VENDOR12",
+ "AF_VENDOR13",
+ "AF_VENDOR14",
+ "AF_VENDOR15",
+ "AF_VENDOR16",
+ "AF_VENDOR17",
+ "AF_VENDOR18",
+ "AF_VENDOR19",
+ "AF_VENDOR20",
+ "AF_VENDOR21",
+ "AF_VENDOR22",
+ "AF_VENDOR23",
+ "AF_VENDOR24",
+ "AF_VENDOR25",
+ "AF_VENDOR26",
+ "AF_VENDOR27",
+ "AF_VENDOR28",
+ "AF_VENDOR29",
+ "AF_VENDOR30",
+ "AF_VENDOR31",
+ "AF_VENDOR32",
+ "AF_VENDOR33",
+ "AF_VENDOR34",
+ "AF_VENDOR35",
+ "AF_VENDOR36",
+ "AF_VENDOR37",
+ "AF_VENDOR38",
+ "AF_VENDOR39",
+ "AF_VENDOR40",
+ "AF_VENDOR41",
+ "AF_VENDOR42",
+ "AF_VENDOR43",
+ "AF_VENDOR44",
+ "AF_VENDOR45",
+ "AF_VENDOR46",
+ "AF_VENDOR47",
+ "AF_WANPIPE",
+ "AF_X25",
+ "AI_CANONNAME",
+ "AI_NUMERICHOST",
+ "AI_PASSIVE",
+ "APPLICATION_ERROR",
+ "ARPHRD_ADAPT",
+ "ARPHRD_APPLETLK",
+ "ARPHRD_ARCNET",
+ "ARPHRD_ASH",
+ "ARPHRD_ATM",
+ "ARPHRD_AX25",
+ "ARPHRD_BIF",
+ "ARPHRD_CHAOS",
+ "ARPHRD_CISCO",
+ "ARPHRD_CSLIP",
+ "ARPHRD_CSLIP6",
+ "ARPHRD_DDCMP",
+ "ARPHRD_DLCI",
+ "ARPHRD_ECONET",
+ "ARPHRD_EETHER",
+ "ARPHRD_ETHER",
+ "ARPHRD_EUI64",
+ "ARPHRD_FCAL",
+ "ARPHRD_FCFABRIC",
+ "ARPHRD_FCPL",
+ "ARPHRD_FCPP",
+ "ARPHRD_FDDI",
+ "ARPHRD_FRAD",
+ "ARPHRD_FRELAY",
+ "ARPHRD_HDLC",
+ "ARPHRD_HIPPI",
+ "ARPHRD_HWX25",
+ "ARPHRD_IEEE1394",
+ "ARPHRD_IEEE802",
+ "ARPHRD_IEEE80211",
+ "ARPHRD_IEEE80211_PRISM",
+ "ARPHRD_IEEE80211_RADIOTAP",
+ "ARPHRD_IEEE802154",
+ "ARPHRD_IEEE802154_PHY",
+ "ARPHRD_IEEE802_TR",
+ "ARPHRD_INFINIBAND",
+ "ARPHRD_IPDDP",
+ "ARPHRD_IPGRE",
+ "ARPHRD_IRDA",
+ "ARPHRD_LAPB",
+ "ARPHRD_LOCALTLK",
+ "ARPHRD_LOOPBACK",
+ "ARPHRD_METRICOM",
+ "ARPHRD_NETROM",
+ "ARPHRD_NONE",
+ "ARPHRD_PIMREG",
+ "ARPHRD_PPP",
+ "ARPHRD_PRONET",
+ "ARPHRD_RAWHDLC",
+ "ARPHRD_ROSE",
+ "ARPHRD_RSRVD",
+ "ARPHRD_SIT",
+ "ARPHRD_SKIP",
+ "ARPHRD_SLIP",
+ "ARPHRD_SLIP6",
+ "ARPHRD_STRIP",
+ "ARPHRD_TUNNEL",
+ "ARPHRD_TUNNEL6",
+ "ARPHRD_VOID",
+ "ARPHRD_X25",
+ "AUTHTYPE_CLIENT",
+ "AUTHTYPE_SERVER",
+ "Accept",
+ "Accept4",
+ "AcceptEx",
+ "Access",
+ "Acct",
+ "AddrinfoW",
+ "Adjtime",
+ "Adjtimex",
+ "AllThreadsSyscall",
+ "AllThreadsSyscall6",
+ "AttachLsf",
+ "B0",
+ "B1000000",
+ "B110",
+ "B115200",
+ "B1152000",
+ "B1200",
+ "B134",
+ "B14400",
+ "B150",
+ "B1500000",
+ "B1800",
+ "B19200",
+ "B200",
+ "B2000000",
+ "B230400",
+ "B2400",
+ "B2500000",
+ "B28800",
+ "B300",
+ "B3000000",
+ "B3500000",
+ "B38400",
+ "B4000000",
+ "B460800",
+ "B4800",
+ "B50",
+ "B500000",
+ "B57600",
+ "B576000",
+ "B600",
+ "B7200",
+ "B75",
+ "B76800",
+ "B921600",
+ "B9600",
+ "BASE_PROTOCOL",
+ "BIOCFEEDBACK",
+ "BIOCFLUSH",
+ "BIOCGBLEN",
+ "BIOCGDIRECTION",
+ "BIOCGDIRFILT",
+ "BIOCGDLT",
+ "BIOCGDLTLIST",
+ "BIOCGETBUFMODE",
+ "BIOCGETIF",
+ "BIOCGETZMAX",
+ "BIOCGFEEDBACK",
+ "BIOCGFILDROP",
+ "BIOCGHDRCMPLT",
+ "BIOCGRSIG",
+ "BIOCGRTIMEOUT",
+ "BIOCGSEESENT",
+ "BIOCGSTATS",
+ "BIOCGSTATSOLD",
+ "BIOCGTSTAMP",
+ "BIOCIMMEDIATE",
+ "BIOCLOCK",
+ "BIOCPROMISC",
+ "BIOCROTZBUF",
+ "BIOCSBLEN",
+ "BIOCSDIRECTION",
+ "BIOCSDIRFILT",
+ "BIOCSDLT",
+ "BIOCSETBUFMODE",
+ "BIOCSETF",
+ "BIOCSETFNR",
+ "BIOCSETIF",
+ "BIOCSETWF",
+ "BIOCSETZBUF",
+ "BIOCSFEEDBACK",
+ "BIOCSFILDROP",
+ "BIOCSHDRCMPLT",
+ "BIOCSRSIG",
+ "BIOCSRTIMEOUT",
+ "BIOCSSEESENT",
+ "BIOCSTCPF",
+ "BIOCSTSTAMP",
+ "BIOCSUDPF",
+ "BIOCVERSION",
+ "BPF_A",
+ "BPF_ABS",
+ "BPF_ADD",
+ "BPF_ALIGNMENT",
+ "BPF_ALIGNMENT32",
+ "BPF_ALU",
+ "BPF_AND",
+ "BPF_B",
+ "BPF_BUFMODE_BUFFER",
+ "BPF_BUFMODE_ZBUF",
+ "BPF_DFLTBUFSIZE",
+ "BPF_DIRECTION_IN",
+ "BPF_DIRECTION_OUT",
+ "BPF_DIV",
+ "BPF_H",
+ "BPF_IMM",
+ "BPF_IND",
+ "BPF_JA",
+ "BPF_JEQ",
+ "BPF_JGE",
+ "BPF_JGT",
+ "BPF_JMP",
+ "BPF_JSET",
+ "BPF_K",
+ "BPF_LD",
+ "BPF_LDX",
+ "BPF_LEN",
+ "BPF_LSH",
+ "BPF_MAJOR_VERSION",
+ "BPF_MAXBUFSIZE",
+ "BPF_MAXINSNS",
+ "BPF_MEM",
+ "BPF_MEMWORDS",
+ "BPF_MINBUFSIZE",
+ "BPF_MINOR_VERSION",
+ "BPF_MISC",
+ "BPF_MSH",
+ "BPF_MUL",
+ "BPF_NEG",
+ "BPF_OR",
+ "BPF_RELEASE",
+ "BPF_RET",
+ "BPF_RSH",
+ "BPF_ST",
+ "BPF_STX",
+ "BPF_SUB",
+ "BPF_TAX",
+ "BPF_TXA",
+ "BPF_T_BINTIME",
+ "BPF_T_BINTIME_FAST",
+ "BPF_T_BINTIME_MONOTONIC",
+ "BPF_T_BINTIME_MONOTONIC_FAST",
+ "BPF_T_FAST",
+ "BPF_T_FLAG_MASK",
+ "BPF_T_FORMAT_MASK",
+ "BPF_T_MICROTIME",
+ "BPF_T_MICROTIME_FAST",
+ "BPF_T_MICROTIME_MONOTONIC",
+ "BPF_T_MICROTIME_MONOTONIC_FAST",
+ "BPF_T_MONOTONIC",
+ "BPF_T_MONOTONIC_FAST",
+ "BPF_T_NANOTIME",
+ "BPF_T_NANOTIME_FAST",
+ "BPF_T_NANOTIME_MONOTONIC",
+ "BPF_T_NANOTIME_MONOTONIC_FAST",
+ "BPF_T_NONE",
+ "BPF_T_NORMAL",
+ "BPF_W",
+ "BPF_X",
+ "BRKINT",
+ "Bind",
+ "BindToDevice",
+ "BpfBuflen",
+ "BpfDatalink",
+ "BpfHdr",
+ "BpfHeadercmpl",
+ "BpfInsn",
+ "BpfInterface",
+ "BpfJump",
+ "BpfProgram",
+ "BpfStat",
+ "BpfStats",
+ "BpfStmt",
+ "BpfTimeout",
+ "BpfTimeval",
+ "BpfVersion",
+ "BpfZbuf",
+ "BpfZbufHeader",
+ "ByHandleFileInformation",
+ "BytePtrFromString",
+ "ByteSliceFromString",
+ "CCR0_FLUSH",
+ "CERT_CHAIN_POLICY_AUTHENTICODE",
+ "CERT_CHAIN_POLICY_AUTHENTICODE_TS",
+ "CERT_CHAIN_POLICY_BASE",
+ "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS",
+ "CERT_CHAIN_POLICY_EV",
+ "CERT_CHAIN_POLICY_MICROSOFT_ROOT",
+ "CERT_CHAIN_POLICY_NT_AUTH",
+ "CERT_CHAIN_POLICY_SSL",
+ "CERT_E_CN_NO_MATCH",
+ "CERT_E_EXPIRED",
+ "CERT_E_PURPOSE",
+ "CERT_E_ROLE",
+ "CERT_E_UNTRUSTEDROOT",
+ "CERT_STORE_ADD_ALWAYS",
+ "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG",
+ "CERT_STORE_PROV_MEMORY",
+ "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT",
+ "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT",
+ "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT",
+ "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT",
+ "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT",
+ "CERT_TRUST_INVALID_BASIC_CONSTRAINTS",
+ "CERT_TRUST_INVALID_EXTENSION",
+ "CERT_TRUST_INVALID_NAME_CONSTRAINTS",
+ "CERT_TRUST_INVALID_POLICY_CONSTRAINTS",
+ "CERT_TRUST_IS_CYCLIC",
+ "CERT_TRUST_IS_EXPLICIT_DISTRUST",
+ "CERT_TRUST_IS_NOT_SIGNATURE_VALID",
+ "CERT_TRUST_IS_NOT_TIME_VALID",
+ "CERT_TRUST_IS_NOT_VALID_FOR_USAGE",
+ "CERT_TRUST_IS_OFFLINE_REVOCATION",
+ "CERT_TRUST_IS_REVOKED",
+ "CERT_TRUST_IS_UNTRUSTED_ROOT",
+ "CERT_TRUST_NO_ERROR",
+ "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY",
+ "CERT_TRUST_REVOCATION_STATUS_UNKNOWN",
+ "CFLUSH",
+ "CLOCAL",
+ "CLONE_CHILD_CLEARTID",
+ "CLONE_CHILD_SETTID",
+ "CLONE_CLEAR_SIGHAND",
+ "CLONE_CSIGNAL",
+ "CLONE_DETACHED",
+ "CLONE_FILES",
+ "CLONE_FS",
+ "CLONE_INTO_CGROUP",
+ "CLONE_IO",
+ "CLONE_NEWCGROUP",
+ "CLONE_NEWIPC",
+ "CLONE_NEWNET",
+ "CLONE_NEWNS",
+ "CLONE_NEWPID",
+ "CLONE_NEWTIME",
+ "CLONE_NEWUSER",
+ "CLONE_NEWUTS",
+ "CLONE_PARENT",
+ "CLONE_PARENT_SETTID",
+ "CLONE_PID",
+ "CLONE_PIDFD",
+ "CLONE_PTRACE",
+ "CLONE_SETTLS",
+ "CLONE_SIGHAND",
+ "CLONE_SYSVSEM",
+ "CLONE_THREAD",
+ "CLONE_UNTRACED",
+ "CLONE_VFORK",
+ "CLONE_VM",
+ "CPUID_CFLUSH",
+ "CREAD",
+ "CREATE_ALWAYS",
+ "CREATE_NEW",
+ "CREATE_NEW_PROCESS_GROUP",
+ "CREATE_UNICODE_ENVIRONMENT",
+ "CRYPT_DEFAULT_CONTAINER_OPTIONAL",
+ "CRYPT_DELETEKEYSET",
+ "CRYPT_MACHINE_KEYSET",
+ "CRYPT_NEWKEYSET",
+ "CRYPT_SILENT",
+ "CRYPT_VERIFYCONTEXT",
+ "CS5",
+ "CS6",
+ "CS7",
+ "CS8",
+ "CSIZE",
+ "CSTART",
+ "CSTATUS",
+ "CSTOP",
+ "CSTOPB",
+ "CSUSP",
+ "CTL_MAXNAME",
+ "CTL_NET",
+ "CTL_QUERY",
+ "CTRL_BREAK_EVENT",
+ "CTRL_CLOSE_EVENT",
+ "CTRL_C_EVENT",
+ "CTRL_LOGOFF_EVENT",
+ "CTRL_SHUTDOWN_EVENT",
+ "CancelIo",
+ "CancelIoEx",
+ "CertAddCertificateContextToStore",
+ "CertChainContext",
+ "CertChainElement",
+ "CertChainPara",
+ "CertChainPolicyPara",
+ "CertChainPolicyStatus",
+ "CertCloseStore",
+ "CertContext",
+ "CertCreateCertificateContext",
+ "CertEnhKeyUsage",
+ "CertEnumCertificatesInStore",
+ "CertFreeCertificateChain",
+ "CertFreeCertificateContext",
+ "CertGetCertificateChain",
+ "CertInfo",
+ "CertOpenStore",
+ "CertOpenSystemStore",
+ "CertRevocationCrlInfo",
+ "CertRevocationInfo",
+ "CertSimpleChain",
+ "CertTrustListInfo",
+ "CertTrustStatus",
+ "CertUsageMatch",
+ "CertVerifyCertificateChainPolicy",
+ "Chdir",
+ "CheckBpfVersion",
+ "Chflags",
+ "Chmod",
+ "Chown",
+ "Chroot",
+ "Clearenv",
+ "Close",
+ "CloseHandle",
+ "CloseOnExec",
+ "Closesocket",
+ "CmsgLen",
+ "CmsgSpace",
+ "Cmsghdr",
+ "CommandLineToArgv",
+ "ComputerName",
+ "Conn",
+ "Connect",
+ "ConnectEx",
+ "ConvertSidToStringSid",
+ "ConvertStringSidToSid",
+ "CopySid",
+ "Creat",
+ "CreateDirectory",
+ "CreateFile",
+ "CreateFileMapping",
+ "CreateHardLink",
+ "CreateIoCompletionPort",
+ "CreatePipe",
+ "CreateProcess",
+ "CreateProcessAsUser",
+ "CreateSymbolicLink",
+ "CreateToolhelp32Snapshot",
+ "Credential",
+ "CryptAcquireContext",
+ "CryptGenRandom",
+ "CryptReleaseContext",
+ "DIOCBSFLUSH",
+ "DIOCOSFPFLUSH",
+ "DLL",
+ "DLLError",
+ "DLT_A429",
+ "DLT_A653_ICM",
+ "DLT_AIRONET_HEADER",
+ "DLT_AOS",
+ "DLT_APPLE_IP_OVER_IEEE1394",
+ "DLT_ARCNET",
+ "DLT_ARCNET_LINUX",
+ "DLT_ATM_CLIP",
+ "DLT_ATM_RFC1483",
+ "DLT_AURORA",
+ "DLT_AX25",
+ "DLT_AX25_KISS",
+ "DLT_BACNET_MS_TP",
+ "DLT_BLUETOOTH_HCI_H4",
+ "DLT_BLUETOOTH_HCI_H4_WITH_PHDR",
+ "DLT_CAN20B",
+ "DLT_CAN_SOCKETCAN",
+ "DLT_CHAOS",
+ "DLT_CHDLC",
+ "DLT_CISCO_IOS",
+ "DLT_C_HDLC",
+ "DLT_C_HDLC_WITH_DIR",
+ "DLT_DBUS",
+ "DLT_DECT",
+ "DLT_DOCSIS",
+ "DLT_DVB_CI",
+ "DLT_ECONET",
+ "DLT_EN10MB",
+ "DLT_EN3MB",
+ "DLT_ENC",
+ "DLT_ERF",
+ "DLT_ERF_ETH",
+ "DLT_ERF_POS",
+ "DLT_FC_2",
+ "DLT_FC_2_WITH_FRAME_DELIMS",
+ "DLT_FDDI",
+ "DLT_FLEXRAY",
+ "DLT_FRELAY",
+ "DLT_FRELAY_WITH_DIR",
+ "DLT_GCOM_SERIAL",
+ "DLT_GCOM_T1E1",
+ "DLT_GPF_F",
+ "DLT_GPF_T",
+ "DLT_GPRS_LLC",
+ "DLT_GSMTAP_ABIS",
+ "DLT_GSMTAP_UM",
+ "DLT_HDLC",
+ "DLT_HHDLC",
+ "DLT_HIPPI",
+ "DLT_IBM_SN",
+ "DLT_IBM_SP",
+ "DLT_IEEE802",
+ "DLT_IEEE802_11",
+ "DLT_IEEE802_11_RADIO",
+ "DLT_IEEE802_11_RADIO_AVS",
+ "DLT_IEEE802_15_4",
+ "DLT_IEEE802_15_4_LINUX",
+ "DLT_IEEE802_15_4_NOFCS",
+ "DLT_IEEE802_15_4_NONASK_PHY",
+ "DLT_IEEE802_16_MAC_CPS",
+ "DLT_IEEE802_16_MAC_CPS_RADIO",
+ "DLT_IPFILTER",
+ "DLT_IPMB",
+ "DLT_IPMB_LINUX",
+ "DLT_IPNET",
+ "DLT_IPOIB",
+ "DLT_IPV4",
+ "DLT_IPV6",
+ "DLT_IP_OVER_FC",
+ "DLT_JUNIPER_ATM1",
+ "DLT_JUNIPER_ATM2",
+ "DLT_JUNIPER_ATM_CEMIC",
+ "DLT_JUNIPER_CHDLC",
+ "DLT_JUNIPER_ES",
+ "DLT_JUNIPER_ETHER",
+ "DLT_JUNIPER_FIBRECHANNEL",
+ "DLT_JUNIPER_FRELAY",
+ "DLT_JUNIPER_GGSN",
+ "DLT_JUNIPER_ISM",
+ "DLT_JUNIPER_MFR",
+ "DLT_JUNIPER_MLFR",
+ "DLT_JUNIPER_MLPPP",
+ "DLT_JUNIPER_MONITOR",
+ "DLT_JUNIPER_PIC_PEER",
+ "DLT_JUNIPER_PPP",
+ "DLT_JUNIPER_PPPOE",
+ "DLT_JUNIPER_PPPOE_ATM",
+ "DLT_JUNIPER_SERVICES",
+ "DLT_JUNIPER_SRX_E2E",
+ "DLT_JUNIPER_ST",
+ "DLT_JUNIPER_VP",
+ "DLT_JUNIPER_VS",
+ "DLT_LAPB_WITH_DIR",
+ "DLT_LAPD",
+ "DLT_LIN",
+ "DLT_LINUX_EVDEV",
+ "DLT_LINUX_IRDA",
+ "DLT_LINUX_LAPD",
+ "DLT_LINUX_PPP_WITHDIRECTION",
+ "DLT_LINUX_SLL",
+ "DLT_LOOP",
+ "DLT_LTALK",
+ "DLT_MATCHING_MAX",
+ "DLT_MATCHING_MIN",
+ "DLT_MFR",
+ "DLT_MOST",
+ "DLT_MPEG_2_TS",
+ "DLT_MPLS",
+ "DLT_MTP2",
+ "DLT_MTP2_WITH_PHDR",
+ "DLT_MTP3",
+ "DLT_MUX27010",
+ "DLT_NETANALYZER",
+ "DLT_NETANALYZER_TRANSPARENT",
+ "DLT_NFC_LLCP",
+ "DLT_NFLOG",
+ "DLT_NG40",
+ "DLT_NULL",
+ "DLT_PCI_EXP",
+ "DLT_PFLOG",
+ "DLT_PFSYNC",
+ "DLT_PPI",
+ "DLT_PPP",
+ "DLT_PPP_BSDOS",
+ "DLT_PPP_ETHER",
+ "DLT_PPP_PPPD",
+ "DLT_PPP_SERIAL",
+ "DLT_PPP_WITH_DIR",
+ "DLT_PPP_WITH_DIRECTION",
+ "DLT_PRISM_HEADER",
+ "DLT_PRONET",
+ "DLT_RAIF1",
+ "DLT_RAW",
+ "DLT_RAWAF_MASK",
+ "DLT_RIO",
+ "DLT_SCCP",
+ "DLT_SITA",
+ "DLT_SLIP",
+ "DLT_SLIP_BSDOS",
+ "DLT_STANAG_5066_D_PDU",
+ "DLT_SUNATM",
+ "DLT_SYMANTEC_FIREWALL",
+ "DLT_TZSP",
+ "DLT_USB",
+ "DLT_USB_LINUX",
+ "DLT_USB_LINUX_MMAPPED",
+ "DLT_USER0",
+ "DLT_USER1",
+ "DLT_USER10",
+ "DLT_USER11",
+ "DLT_USER12",
+ "DLT_USER13",
+ "DLT_USER14",
+ "DLT_USER15",
+ "DLT_USER2",
+ "DLT_USER3",
+ "DLT_USER4",
+ "DLT_USER5",
+ "DLT_USER6",
+ "DLT_USER7",
+ "DLT_USER8",
+ "DLT_USER9",
+ "DLT_WIHART",
+ "DLT_X2E_SERIAL",
+ "DLT_X2E_XORAYA",
+ "DNSMXData",
+ "DNSPTRData",
+ "DNSRecord",
+ "DNSSRVData",
+ "DNSTXTData",
+ "DNS_INFO_NO_RECORDS",
+ "DNS_TYPE_A",
+ "DNS_TYPE_A6",
+ "DNS_TYPE_AAAA",
+ "DNS_TYPE_ADDRS",
+ "DNS_TYPE_AFSDB",
+ "DNS_TYPE_ALL",
+ "DNS_TYPE_ANY",
+ "DNS_TYPE_ATMA",
+ "DNS_TYPE_AXFR",
+ "DNS_TYPE_CERT",
+ "DNS_TYPE_CNAME",
+ "DNS_TYPE_DHCID",
+ "DNS_TYPE_DNAME",
+ "DNS_TYPE_DNSKEY",
+ "DNS_TYPE_DS",
+ "DNS_TYPE_EID",
+ "DNS_TYPE_GID",
+ "DNS_TYPE_GPOS",
+ "DNS_TYPE_HINFO",
+ "DNS_TYPE_ISDN",
+ "DNS_TYPE_IXFR",
+ "DNS_TYPE_KEY",
+ "DNS_TYPE_KX",
+ "DNS_TYPE_LOC",
+ "DNS_TYPE_MAILA",
+ "DNS_TYPE_MAILB",
+ "DNS_TYPE_MB",
+ "DNS_TYPE_MD",
+ "DNS_TYPE_MF",
+ "DNS_TYPE_MG",
+ "DNS_TYPE_MINFO",
+ "DNS_TYPE_MR",
+ "DNS_TYPE_MX",
+ "DNS_TYPE_NAPTR",
+ "DNS_TYPE_NBSTAT",
+ "DNS_TYPE_NIMLOC",
+ "DNS_TYPE_NS",
+ "DNS_TYPE_NSAP",
+ "DNS_TYPE_NSAPPTR",
+ "DNS_TYPE_NSEC",
+ "DNS_TYPE_NULL",
+ "DNS_TYPE_NXT",
+ "DNS_TYPE_OPT",
+ "DNS_TYPE_PTR",
+ "DNS_TYPE_PX",
+ "DNS_TYPE_RP",
+ "DNS_TYPE_RRSIG",
+ "DNS_TYPE_RT",
+ "DNS_TYPE_SIG",
+ "DNS_TYPE_SINK",
+ "DNS_TYPE_SOA",
+ "DNS_TYPE_SRV",
+ "DNS_TYPE_TEXT",
+ "DNS_TYPE_TKEY",
+ "DNS_TYPE_TSIG",
+ "DNS_TYPE_UID",
+ "DNS_TYPE_UINFO",
+ "DNS_TYPE_UNSPEC",
+ "DNS_TYPE_WINS",
+ "DNS_TYPE_WINSR",
+ "DNS_TYPE_WKS",
+ "DNS_TYPE_X25",
+ "DT_BLK",
+ "DT_CHR",
+ "DT_DIR",
+ "DT_FIFO",
+ "DT_LNK",
+ "DT_REG",
+ "DT_SOCK",
+ "DT_UNKNOWN",
+ "DT_WHT",
+ "DUPLICATE_CLOSE_SOURCE",
+ "DUPLICATE_SAME_ACCESS",
+ "DeleteFile",
+ "DetachLsf",
+ "DeviceIoControl",
+ "Dirent",
+ "DnsNameCompare",
+ "DnsQuery",
+ "DnsRecordListFree",
+ "DnsSectionAdditional",
+ "DnsSectionAnswer",
+ "DnsSectionAuthority",
+ "DnsSectionQuestion",
+ "Dup",
+ "Dup2",
+ "Dup3",
+ "DuplicateHandle",
+ "E2BIG",
+ "EACCES",
+ "EADDRINUSE",
+ "EADDRNOTAVAIL",
+ "EADV",
+ "EAFNOSUPPORT",
+ "EAGAIN",
+ "EALREADY",
+ "EAUTH",
+ "EBADARCH",
+ "EBADE",
+ "EBADEXEC",
+ "EBADF",
+ "EBADFD",
+ "EBADMACHO",
+ "EBADMSG",
+ "EBADR",
+ "EBADRPC",
+ "EBADRQC",
+ "EBADSLT",
+ "EBFONT",
+ "EBUSY",
+ "ECANCELED",
+ "ECAPMODE",
+ "ECHILD",
+ "ECHO",
+ "ECHOCTL",
+ "ECHOE",
+ "ECHOK",
+ "ECHOKE",
+ "ECHONL",
+ "ECHOPRT",
+ "ECHRNG",
+ "ECOMM",
+ "ECONNABORTED",
+ "ECONNREFUSED",
+ "ECONNRESET",
+ "EDEADLK",
+ "EDEADLOCK",
+ "EDESTADDRREQ",
+ "EDEVERR",
+ "EDOM",
+ "EDOOFUS",
+ "EDOTDOT",
+ "EDQUOT",
+ "EEXIST",
+ "EFAULT",
+ "EFBIG",
+ "EFER_LMA",
+ "EFER_LME",
+ "EFER_NXE",
+ "EFER_SCE",
+ "EFTYPE",
+ "EHOSTDOWN",
+ "EHOSTUNREACH",
+ "EHWPOISON",
+ "EIDRM",
+ "EILSEQ",
+ "EINPROGRESS",
+ "EINTR",
+ "EINVAL",
+ "EIO",
+ "EIPSEC",
+ "EISCONN",
+ "EISDIR",
+ "EISNAM",
+ "EKEYEXPIRED",
+ "EKEYREJECTED",
+ "EKEYREVOKED",
+ "EL2HLT",
+ "EL2NSYNC",
+ "EL3HLT",
+ "EL3RST",
+ "ELAST",
+ "ELF_NGREG",
+ "ELF_PRARGSZ",
+ "ELIBACC",
+ "ELIBBAD",
+ "ELIBEXEC",
+ "ELIBMAX",
+ "ELIBSCN",
+ "ELNRNG",
+ "ELOOP",
+ "EMEDIUMTYPE",
+ "EMFILE",
+ "EMLINK",
+ "EMSGSIZE",
+ "EMT_TAGOVF",
+ "EMULTIHOP",
+ "EMUL_ENABLED",
+ "EMUL_LINUX",
+ "EMUL_LINUX32",
+ "EMUL_MAXID",
+ "EMUL_NATIVE",
+ "ENAMETOOLONG",
+ "ENAVAIL",
+ "ENDRUNDISC",
+ "ENEEDAUTH",
+ "ENETDOWN",
+ "ENETRESET",
+ "ENETUNREACH",
+ "ENFILE",
+ "ENOANO",
+ "ENOATTR",
+ "ENOBUFS",
+ "ENOCSI",
+ "ENODATA",
+ "ENODEV",
+ "ENOENT",
+ "ENOEXEC",
+ "ENOKEY",
+ "ENOLCK",
+ "ENOLINK",
+ "ENOMEDIUM",
+ "ENOMEM",
+ "ENOMSG",
+ "ENONET",
+ "ENOPKG",
+ "ENOPOLICY",
+ "ENOPROTOOPT",
+ "ENOSPC",
+ "ENOSR",
+ "ENOSTR",
+ "ENOSYS",
+ "ENOTBLK",
+ "ENOTCAPABLE",
+ "ENOTCONN",
+ "ENOTDIR",
+ "ENOTEMPTY",
+ "ENOTNAM",
+ "ENOTRECOVERABLE",
+ "ENOTSOCK",
+ "ENOTSUP",
+ "ENOTTY",
+ "ENOTUNIQ",
+ "ENXIO",
+ "EN_SW_CTL_INF",
+ "EN_SW_CTL_PREC",
+ "EN_SW_CTL_ROUND",
+ "EN_SW_DATACHAIN",
+ "EN_SW_DENORM",
+ "EN_SW_INVOP",
+ "EN_SW_OVERFLOW",
+ "EN_SW_PRECLOSS",
+ "EN_SW_UNDERFLOW",
+ "EN_SW_ZERODIV",
+ "EOPNOTSUPP",
+ "EOVERFLOW",
+ "EOWNERDEAD",
+ "EPERM",
+ "EPFNOSUPPORT",
+ "EPIPE",
+ "EPOLLERR",
+ "EPOLLET",
+ "EPOLLHUP",
+ "EPOLLIN",
+ "EPOLLMSG",
+ "EPOLLONESHOT",
+ "EPOLLOUT",
+ "EPOLLPRI",
+ "EPOLLRDBAND",
+ "EPOLLRDHUP",
+ "EPOLLRDNORM",
+ "EPOLLWRBAND",
+ "EPOLLWRNORM",
+ "EPOLL_CLOEXEC",
+ "EPOLL_CTL_ADD",
+ "EPOLL_CTL_DEL",
+ "EPOLL_CTL_MOD",
+ "EPOLL_NONBLOCK",
+ "EPROCLIM",
+ "EPROCUNAVAIL",
+ "EPROGMISMATCH",
+ "EPROGUNAVAIL",
+ "EPROTO",
+ "EPROTONOSUPPORT",
+ "EPROTOTYPE",
+ "EPWROFF",
+ "EQFULL",
+ "ERANGE",
+ "EREMCHG",
+ "EREMOTE",
+ "EREMOTEIO",
+ "ERESTART",
+ "ERFKILL",
+ "EROFS",
+ "ERPCMISMATCH",
+ "ERROR_ACCESS_DENIED",
+ "ERROR_ALREADY_EXISTS",
+ "ERROR_BROKEN_PIPE",
+ "ERROR_BUFFER_OVERFLOW",
+ "ERROR_DIR_NOT_EMPTY",
+ "ERROR_ENVVAR_NOT_FOUND",
+ "ERROR_FILE_EXISTS",
+ "ERROR_FILE_NOT_FOUND",
+ "ERROR_HANDLE_EOF",
+ "ERROR_INSUFFICIENT_BUFFER",
+ "ERROR_IO_PENDING",
+ "ERROR_MOD_NOT_FOUND",
+ "ERROR_MORE_DATA",
+ "ERROR_NETNAME_DELETED",
+ "ERROR_NOT_FOUND",
+ "ERROR_NO_MORE_FILES",
+ "ERROR_OPERATION_ABORTED",
+ "ERROR_PATH_NOT_FOUND",
+ "ERROR_PRIVILEGE_NOT_HELD",
+ "ERROR_PROC_NOT_FOUND",
+ "ESHLIBVERS",
+ "ESHUTDOWN",
+ "ESOCKTNOSUPPORT",
+ "ESPIPE",
+ "ESRCH",
+ "ESRMNT",
+ "ESTALE",
+ "ESTRPIPE",
+ "ETHERCAP_JUMBO_MTU",
+ "ETHERCAP_VLAN_HWTAGGING",
+ "ETHERCAP_VLAN_MTU",
+ "ETHERMIN",
+ "ETHERMTU",
+ "ETHERMTU_JUMBO",
+ "ETHERTYPE_8023",
+ "ETHERTYPE_AARP",
+ "ETHERTYPE_ACCTON",
+ "ETHERTYPE_AEONIC",
+ "ETHERTYPE_ALPHA",
+ "ETHERTYPE_AMBER",
+ "ETHERTYPE_AMOEBA",
+ "ETHERTYPE_AOE",
+ "ETHERTYPE_APOLLO",
+ "ETHERTYPE_APOLLODOMAIN",
+ "ETHERTYPE_APPLETALK",
+ "ETHERTYPE_APPLITEK",
+ "ETHERTYPE_ARGONAUT",
+ "ETHERTYPE_ARP",
+ "ETHERTYPE_AT",
+ "ETHERTYPE_ATALK",
+ "ETHERTYPE_ATOMIC",
+ "ETHERTYPE_ATT",
+ "ETHERTYPE_ATTSTANFORD",
+ "ETHERTYPE_AUTOPHON",
+ "ETHERTYPE_AXIS",
+ "ETHERTYPE_BCLOOP",
+ "ETHERTYPE_BOFL",
+ "ETHERTYPE_CABLETRON",
+ "ETHERTYPE_CHAOS",
+ "ETHERTYPE_COMDESIGN",
+ "ETHERTYPE_COMPUGRAPHIC",
+ "ETHERTYPE_COUNTERPOINT",
+ "ETHERTYPE_CRONUS",
+ "ETHERTYPE_CRONUSVLN",
+ "ETHERTYPE_DCA",
+ "ETHERTYPE_DDE",
+ "ETHERTYPE_DEBNI",
+ "ETHERTYPE_DECAM",
+ "ETHERTYPE_DECCUST",
+ "ETHERTYPE_DECDIAG",
+ "ETHERTYPE_DECDNS",
+ "ETHERTYPE_DECDTS",
+ "ETHERTYPE_DECEXPER",
+ "ETHERTYPE_DECLAST",
+ "ETHERTYPE_DECLTM",
+ "ETHERTYPE_DECMUMPS",
+ "ETHERTYPE_DECNETBIOS",
+ "ETHERTYPE_DELTACON",
+ "ETHERTYPE_DIDDLE",
+ "ETHERTYPE_DLOG1",
+ "ETHERTYPE_DLOG2",
+ "ETHERTYPE_DN",
+ "ETHERTYPE_DOGFIGHT",
+ "ETHERTYPE_DSMD",
+ "ETHERTYPE_ECMA",
+ "ETHERTYPE_ENCRYPT",
+ "ETHERTYPE_ES",
+ "ETHERTYPE_EXCELAN",
+ "ETHERTYPE_EXPERDATA",
+ "ETHERTYPE_FLIP",
+ "ETHERTYPE_FLOWCONTROL",
+ "ETHERTYPE_FRARP",
+ "ETHERTYPE_GENDYN",
+ "ETHERTYPE_HAYES",
+ "ETHERTYPE_HIPPI_FP",
+ "ETHERTYPE_HITACHI",
+ "ETHERTYPE_HP",
+ "ETHERTYPE_IEEEPUP",
+ "ETHERTYPE_IEEEPUPAT",
+ "ETHERTYPE_IMLBL",
+ "ETHERTYPE_IMLBLDIAG",
+ "ETHERTYPE_IP",
+ "ETHERTYPE_IPAS",
+ "ETHERTYPE_IPV6",
+ "ETHERTYPE_IPX",
+ "ETHERTYPE_IPXNEW",
+ "ETHERTYPE_KALPANA",
+ "ETHERTYPE_LANBRIDGE",
+ "ETHERTYPE_LANPROBE",
+ "ETHERTYPE_LAT",
+ "ETHERTYPE_LBACK",
+ "ETHERTYPE_LITTLE",
+ "ETHERTYPE_LLDP",
+ "ETHERTYPE_LOGICRAFT",
+ "ETHERTYPE_LOOPBACK",
+ "ETHERTYPE_MATRA",
+ "ETHERTYPE_MAX",
+ "ETHERTYPE_MERIT",
+ "ETHERTYPE_MICP",
+ "ETHERTYPE_MOPDL",
+ "ETHERTYPE_MOPRC",
+ "ETHERTYPE_MOTOROLA",
+ "ETHERTYPE_MPLS",
+ "ETHERTYPE_MPLS_MCAST",
+ "ETHERTYPE_MUMPS",
+ "ETHERTYPE_NBPCC",
+ "ETHERTYPE_NBPCLAIM",
+ "ETHERTYPE_NBPCLREQ",
+ "ETHERTYPE_NBPCLRSP",
+ "ETHERTYPE_NBPCREQ",
+ "ETHERTYPE_NBPCRSP",
+ "ETHERTYPE_NBPDG",
+ "ETHERTYPE_NBPDGB",
+ "ETHERTYPE_NBPDLTE",
+ "ETHERTYPE_NBPRAR",
+ "ETHERTYPE_NBPRAS",
+ "ETHERTYPE_NBPRST",
+ "ETHERTYPE_NBPSCD",
+ "ETHERTYPE_NBPVCD",
+ "ETHERTYPE_NBS",
+ "ETHERTYPE_NCD",
+ "ETHERTYPE_NESTAR",
+ "ETHERTYPE_NETBEUI",
+ "ETHERTYPE_NOVELL",
+ "ETHERTYPE_NS",
+ "ETHERTYPE_NSAT",
+ "ETHERTYPE_NSCOMPAT",
+ "ETHERTYPE_NTRAILER",
+ "ETHERTYPE_OS9",
+ "ETHERTYPE_OS9NET",
+ "ETHERTYPE_PACER",
+ "ETHERTYPE_PAE",
+ "ETHERTYPE_PCS",
+ "ETHERTYPE_PLANNING",
+ "ETHERTYPE_PPP",
+ "ETHERTYPE_PPPOE",
+ "ETHERTYPE_PPPOEDISC",
+ "ETHERTYPE_PRIMENTS",
+ "ETHERTYPE_PUP",
+ "ETHERTYPE_PUPAT",
+ "ETHERTYPE_QINQ",
+ "ETHERTYPE_RACAL",
+ "ETHERTYPE_RATIONAL",
+ "ETHERTYPE_RAWFR",
+ "ETHERTYPE_RCL",
+ "ETHERTYPE_RDP",
+ "ETHERTYPE_RETIX",
+ "ETHERTYPE_REVARP",
+ "ETHERTYPE_SCA",
+ "ETHERTYPE_SECTRA",
+ "ETHERTYPE_SECUREDATA",
+ "ETHERTYPE_SGITW",
+ "ETHERTYPE_SG_BOUNCE",
+ "ETHERTYPE_SG_DIAG",
+ "ETHERTYPE_SG_NETGAMES",
+ "ETHERTYPE_SG_RESV",
+ "ETHERTYPE_SIMNET",
+ "ETHERTYPE_SLOW",
+ "ETHERTYPE_SLOWPROTOCOLS",
+ "ETHERTYPE_SNA",
+ "ETHERTYPE_SNMP",
+ "ETHERTYPE_SONIX",
+ "ETHERTYPE_SPIDER",
+ "ETHERTYPE_SPRITE",
+ "ETHERTYPE_STP",
+ "ETHERTYPE_TALARIS",
+ "ETHERTYPE_TALARISMC",
+ "ETHERTYPE_TCPCOMP",
+ "ETHERTYPE_TCPSM",
+ "ETHERTYPE_TEC",
+ "ETHERTYPE_TIGAN",
+ "ETHERTYPE_TRAIL",
+ "ETHERTYPE_TRANSETHER",
+ "ETHERTYPE_TYMSHARE",
+ "ETHERTYPE_UBBST",
+ "ETHERTYPE_UBDEBUG",
+ "ETHERTYPE_UBDIAGLOOP",
+ "ETHERTYPE_UBDL",
+ "ETHERTYPE_UBNIU",
+ "ETHERTYPE_UBNMC",
+ "ETHERTYPE_VALID",
+ "ETHERTYPE_VARIAN",
+ "ETHERTYPE_VAXELN",
+ "ETHERTYPE_VEECO",
+ "ETHERTYPE_VEXP",
+ "ETHERTYPE_VGLAB",
+ "ETHERTYPE_VINES",
+ "ETHERTYPE_VINESECHO",
+ "ETHERTYPE_VINESLOOP",
+ "ETHERTYPE_VITAL",
+ "ETHERTYPE_VLAN",
+ "ETHERTYPE_VLTLMAN",
+ "ETHERTYPE_VPROD",
+ "ETHERTYPE_VURESERVED",
+ "ETHERTYPE_WATERLOO",
+ "ETHERTYPE_WELLFLEET",
+ "ETHERTYPE_X25",
+ "ETHERTYPE_X75",
+ "ETHERTYPE_XNSSM",
+ "ETHERTYPE_XTP",
+ "ETHER_ADDR_LEN",
+ "ETHER_ALIGN",
+ "ETHER_CRC_LEN",
+ "ETHER_CRC_POLY_BE",
+ "ETHER_CRC_POLY_LE",
+ "ETHER_HDR_LEN",
+ "ETHER_MAX_DIX_LEN",
+ "ETHER_MAX_LEN",
+ "ETHER_MAX_LEN_JUMBO",
+ "ETHER_MIN_LEN",
+ "ETHER_PPPOE_ENCAP_LEN",
+ "ETHER_TYPE_LEN",
+ "ETHER_VLAN_ENCAP_LEN",
+ "ETH_P_1588",
+ "ETH_P_8021Q",
+ "ETH_P_802_2",
+ "ETH_P_802_3",
+ "ETH_P_AARP",
+ "ETH_P_ALL",
+ "ETH_P_AOE",
+ "ETH_P_ARCNET",
+ "ETH_P_ARP",
+ "ETH_P_ATALK",
+ "ETH_P_ATMFATE",
+ "ETH_P_ATMMPOA",
+ "ETH_P_AX25",
+ "ETH_P_BPQ",
+ "ETH_P_CAIF",
+ "ETH_P_CAN",
+ "ETH_P_CONTROL",
+ "ETH_P_CUST",
+ "ETH_P_DDCMP",
+ "ETH_P_DEC",
+ "ETH_P_DIAG",
+ "ETH_P_DNA_DL",
+ "ETH_P_DNA_RC",
+ "ETH_P_DNA_RT",
+ "ETH_P_DSA",
+ "ETH_P_ECONET",
+ "ETH_P_EDSA",
+ "ETH_P_FCOE",
+ "ETH_P_FIP",
+ "ETH_P_HDLC",
+ "ETH_P_IEEE802154",
+ "ETH_P_IEEEPUP",
+ "ETH_P_IEEEPUPAT",
+ "ETH_P_IP",
+ "ETH_P_IPV6",
+ "ETH_P_IPX",
+ "ETH_P_IRDA",
+ "ETH_P_LAT",
+ "ETH_P_LINK_CTL",
+ "ETH_P_LOCALTALK",
+ "ETH_P_LOOP",
+ "ETH_P_MOBITEX",
+ "ETH_P_MPLS_MC",
+ "ETH_P_MPLS_UC",
+ "ETH_P_PAE",
+ "ETH_P_PAUSE",
+ "ETH_P_PHONET",
+ "ETH_P_PPPTALK",
+ "ETH_P_PPP_DISC",
+ "ETH_P_PPP_MP",
+ "ETH_P_PPP_SES",
+ "ETH_P_PUP",
+ "ETH_P_PUPAT",
+ "ETH_P_RARP",
+ "ETH_P_SCA",
+ "ETH_P_SLOW",
+ "ETH_P_SNAP",
+ "ETH_P_TEB",
+ "ETH_P_TIPC",
+ "ETH_P_TRAILER",
+ "ETH_P_TR_802_2",
+ "ETH_P_WAN_PPP",
+ "ETH_P_WCCP",
+ "ETH_P_X25",
+ "ETIME",
+ "ETIMEDOUT",
+ "ETOOMANYREFS",
+ "ETXTBSY",
+ "EUCLEAN",
+ "EUNATCH",
+ "EUSERS",
+ "EVFILT_AIO",
+ "EVFILT_FS",
+ "EVFILT_LIO",
+ "EVFILT_MACHPORT",
+ "EVFILT_PROC",
+ "EVFILT_READ",
+ "EVFILT_SIGNAL",
+ "EVFILT_SYSCOUNT",
+ "EVFILT_THREADMARKER",
+ "EVFILT_TIMER",
+ "EVFILT_USER",
+ "EVFILT_VM",
+ "EVFILT_VNODE",
+ "EVFILT_WRITE",
+ "EV_ADD",
+ "EV_CLEAR",
+ "EV_DELETE",
+ "EV_DISABLE",
+ "EV_DISPATCH",
+ "EV_DROP",
+ "EV_ENABLE",
+ "EV_EOF",
+ "EV_ERROR",
+ "EV_FLAG0",
+ "EV_FLAG1",
+ "EV_ONESHOT",
+ "EV_OOBAND",
+ "EV_POLL",
+ "EV_RECEIPT",
+ "EV_SYSFLAGS",
+ "EWINDOWS",
+ "EWOULDBLOCK",
+ "EXDEV",
+ "EXFULL",
+ "EXTA",
+ "EXTB",
+ "EXTPROC",
+ "Environ",
+ "EpollCreate",
+ "EpollCreate1",
+ "EpollCtl",
+ "EpollEvent",
+ "EpollWait",
+ "Errno",
+ "EscapeArg",
+ "Exchangedata",
+ "Exec",
+ "Exit",
+ "ExitProcess",
+ "FD_CLOEXEC",
+ "FD_SETSIZE",
+ "FILE_ACTION_ADDED",
+ "FILE_ACTION_MODIFIED",
+ "FILE_ACTION_REMOVED",
+ "FILE_ACTION_RENAMED_NEW_NAME",
+ "FILE_ACTION_RENAMED_OLD_NAME",
+ "FILE_APPEND_DATA",
+ "FILE_ATTRIBUTE_ARCHIVE",
+ "FILE_ATTRIBUTE_DIRECTORY",
+ "FILE_ATTRIBUTE_HIDDEN",
+ "FILE_ATTRIBUTE_NORMAL",
+ "FILE_ATTRIBUTE_READONLY",
+ "FILE_ATTRIBUTE_REPARSE_POINT",
+ "FILE_ATTRIBUTE_SYSTEM",
+ "FILE_BEGIN",
+ "FILE_CURRENT",
+ "FILE_END",
+ "FILE_FLAG_BACKUP_SEMANTICS",
+ "FILE_FLAG_OPEN_REPARSE_POINT",
+ "FILE_FLAG_OVERLAPPED",
+ "FILE_LIST_DIRECTORY",
+ "FILE_MAP_COPY",
+ "FILE_MAP_EXECUTE",
+ "FILE_MAP_READ",
+ "FILE_MAP_WRITE",
+ "FILE_NOTIFY_CHANGE_ATTRIBUTES",
+ "FILE_NOTIFY_CHANGE_CREATION",
+ "FILE_NOTIFY_CHANGE_DIR_NAME",
+ "FILE_NOTIFY_CHANGE_FILE_NAME",
+ "FILE_NOTIFY_CHANGE_LAST_ACCESS",
+ "FILE_NOTIFY_CHANGE_LAST_WRITE",
+ "FILE_NOTIFY_CHANGE_SIZE",
+ "FILE_SHARE_DELETE",
+ "FILE_SHARE_READ",
+ "FILE_SHARE_WRITE",
+ "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS",
+ "FILE_SKIP_SET_EVENT_ON_HANDLE",
+ "FILE_TYPE_CHAR",
+ "FILE_TYPE_DISK",
+ "FILE_TYPE_PIPE",
+ "FILE_TYPE_REMOTE",
+ "FILE_TYPE_UNKNOWN",
+ "FILE_WRITE_ATTRIBUTES",
+ "FLUSHO",
+ "FORMAT_MESSAGE_ALLOCATE_BUFFER",
+ "FORMAT_MESSAGE_ARGUMENT_ARRAY",
+ "FORMAT_MESSAGE_FROM_HMODULE",
+ "FORMAT_MESSAGE_FROM_STRING",
+ "FORMAT_MESSAGE_FROM_SYSTEM",
+ "FORMAT_MESSAGE_IGNORE_INSERTS",
+ "FORMAT_MESSAGE_MAX_WIDTH_MASK",
+ "FSCTL_GET_REPARSE_POINT",
+ "F_ADDFILESIGS",
+ "F_ADDSIGS",
+ "F_ALLOCATEALL",
+ "F_ALLOCATECONTIG",
+ "F_CANCEL",
+ "F_CHKCLEAN",
+ "F_CLOSEM",
+ "F_DUP2FD",
+ "F_DUP2FD_CLOEXEC",
+ "F_DUPFD",
+ "F_DUPFD_CLOEXEC",
+ "F_EXLCK",
+ "F_FINDSIGS",
+ "F_FLUSH_DATA",
+ "F_FREEZE_FS",
+ "F_FSCTL",
+ "F_FSDIRMASK",
+ "F_FSIN",
+ "F_FSINOUT",
+ "F_FSOUT",
+ "F_FSPRIV",
+ "F_FSVOID",
+ "F_FULLFSYNC",
+ "F_GETCODEDIR",
+ "F_GETFD",
+ "F_GETFL",
+ "F_GETLEASE",
+ "F_GETLK",
+ "F_GETLK64",
+ "F_GETLKPID",
+ "F_GETNOSIGPIPE",
+ "F_GETOWN",
+ "F_GETOWN_EX",
+ "F_GETPATH",
+ "F_GETPATH_MTMINFO",
+ "F_GETPIPE_SZ",
+ "F_GETPROTECTIONCLASS",
+ "F_GETPROTECTIONLEVEL",
+ "F_GETSIG",
+ "F_GLOBAL_NOCACHE",
+ "F_LOCK",
+ "F_LOG2PHYS",
+ "F_LOG2PHYS_EXT",
+ "F_MARKDEPENDENCY",
+ "F_MAXFD",
+ "F_NOCACHE",
+ "F_NODIRECT",
+ "F_NOTIFY",
+ "F_OGETLK",
+ "F_OK",
+ "F_OSETLK",
+ "F_OSETLKW",
+ "F_PARAM_MASK",
+ "F_PARAM_MAX",
+ "F_PATHPKG_CHECK",
+ "F_PEOFPOSMODE",
+ "F_PREALLOCATE",
+ "F_RDADVISE",
+ "F_RDAHEAD",
+ "F_RDLCK",
+ "F_READAHEAD",
+ "F_READBOOTSTRAP",
+ "F_SETBACKINGSTORE",
+ "F_SETFD",
+ "F_SETFL",
+ "F_SETLEASE",
+ "F_SETLK",
+ "F_SETLK64",
+ "F_SETLKW",
+ "F_SETLKW64",
+ "F_SETLKWTIMEOUT",
+ "F_SETLK_REMOTE",
+ "F_SETNOSIGPIPE",
+ "F_SETOWN",
+ "F_SETOWN_EX",
+ "F_SETPIPE_SZ",
+ "F_SETPROTECTIONCLASS",
+ "F_SETSIG",
+ "F_SETSIZE",
+ "F_SHLCK",
+ "F_SINGLE_WRITER",
+ "F_TEST",
+ "F_THAW_FS",
+ "F_TLOCK",
+ "F_TRANSCODEKEY",
+ "F_ULOCK",
+ "F_UNLCK",
+ "F_UNLCKSYS",
+ "F_VOLPOSMODE",
+ "F_WRITEBOOTSTRAP",
+ "F_WRLCK",
+ "Faccessat",
+ "Fallocate",
+ "Fbootstraptransfer_t",
+ "Fchdir",
+ "Fchflags",
+ "Fchmod",
+ "Fchmodat",
+ "Fchown",
+ "Fchownat",
+ "FcntlFlock",
+ "FdSet",
+ "Fdatasync",
+ "FileNotifyInformation",
+ "Filetime",
+ "FindClose",
+ "FindFirstFile",
+ "FindNextFile",
+ "Flock",
+ "Flock_t",
+ "FlushBpf",
+ "FlushFileBuffers",
+ "FlushViewOfFile",
+ "ForkExec",
+ "ForkLock",
+ "FormatMessage",
+ "Fpathconf",
+ "FreeAddrInfoW",
+ "FreeEnvironmentStrings",
+ "FreeLibrary",
+ "Fsid",
+ "Fstat",
+ "Fstatat",
+ "Fstatfs",
+ "Fstore_t",
+ "Fsync",
+ "Ftruncate",
+ "FullPath",
+ "Futimes",
+ "Futimesat",
+ "GENERIC_ALL",
+ "GENERIC_EXECUTE",
+ "GENERIC_READ",
+ "GENERIC_WRITE",
+ "GUID",
+ "GetAcceptExSockaddrs",
+ "GetAdaptersInfo",
+ "GetAddrInfoW",
+ "GetCommandLine",
+ "GetComputerName",
+ "GetConsoleMode",
+ "GetCurrentDirectory",
+ "GetCurrentProcess",
+ "GetEnvironmentStrings",
+ "GetEnvironmentVariable",
+ "GetExitCodeProcess",
+ "GetFileAttributes",
+ "GetFileAttributesEx",
+ "GetFileExInfoStandard",
+ "GetFileExMaxInfoLevel",
+ "GetFileInformationByHandle",
+ "GetFileType",
+ "GetFullPathName",
+ "GetHostByName",
+ "GetIfEntry",
+ "GetLastError",
+ "GetLengthSid",
+ "GetLongPathName",
+ "GetProcAddress",
+ "GetProcessTimes",
+ "GetProtoByName",
+ "GetQueuedCompletionStatus",
+ "GetServByName",
+ "GetShortPathName",
+ "GetStartupInfo",
+ "GetStdHandle",
+ "GetSystemTimeAsFileTime",
+ "GetTempPath",
+ "GetTimeZoneInformation",
+ "GetTokenInformation",
+ "GetUserNameEx",
+ "GetUserProfileDirectory",
+ "GetVersion",
+ "Getcwd",
+ "Getdents",
+ "Getdirentries",
+ "Getdtablesize",
+ "Getegid",
+ "Getenv",
+ "Geteuid",
+ "Getfsstat",
+ "Getgid",
+ "Getgroups",
+ "Getpagesize",
+ "Getpeername",
+ "Getpgid",
+ "Getpgrp",
+ "Getpid",
+ "Getppid",
+ "Getpriority",
+ "Getrlimit",
+ "Getrusage",
+ "Getsid",
+ "Getsockname",
+ "Getsockopt",
+ "GetsockoptByte",
+ "GetsockoptICMPv6Filter",
+ "GetsockoptIPMreq",
+ "GetsockoptIPMreqn",
+ "GetsockoptIPv6MTUInfo",
+ "GetsockoptIPv6Mreq",
+ "GetsockoptInet4Addr",
+ "GetsockoptInt",
+ "GetsockoptUcred",
+ "Gettid",
+ "Gettimeofday",
+ "Getuid",
+ "Getwd",
+ "Getxattr",
+ "HANDLE_FLAG_INHERIT",
+ "HKEY_CLASSES_ROOT",
+ "HKEY_CURRENT_CONFIG",
+ "HKEY_CURRENT_USER",
+ "HKEY_DYN_DATA",
+ "HKEY_LOCAL_MACHINE",
+ "HKEY_PERFORMANCE_DATA",
+ "HKEY_USERS",
+ "HUPCL",
+ "Handle",
+ "Hostent",
+ "ICANON",
+ "ICMP6_FILTER",
+ "ICMPV6_FILTER",
+ "ICMPv6Filter",
+ "ICRNL",
+ "IEXTEN",
+ "IFAN_ARRIVAL",
+ "IFAN_DEPARTURE",
+ "IFA_ADDRESS",
+ "IFA_ANYCAST",
+ "IFA_BROADCAST",
+ "IFA_CACHEINFO",
+ "IFA_F_DADFAILED",
+ "IFA_F_DEPRECATED",
+ "IFA_F_HOMEADDRESS",
+ "IFA_F_NODAD",
+ "IFA_F_OPTIMISTIC",
+ "IFA_F_PERMANENT",
+ "IFA_F_SECONDARY",
+ "IFA_F_TEMPORARY",
+ "IFA_F_TENTATIVE",
+ "IFA_LABEL",
+ "IFA_LOCAL",
+ "IFA_MAX",
+ "IFA_MULTICAST",
+ "IFA_ROUTE",
+ "IFA_UNSPEC",
+ "IFF_ALLMULTI",
+ "IFF_ALTPHYS",
+ "IFF_AUTOMEDIA",
+ "IFF_BROADCAST",
+ "IFF_CANTCHANGE",
+ "IFF_CANTCONFIG",
+ "IFF_DEBUG",
+ "IFF_DRV_OACTIVE",
+ "IFF_DRV_RUNNING",
+ "IFF_DYING",
+ "IFF_DYNAMIC",
+ "IFF_LINK0",
+ "IFF_LINK1",
+ "IFF_LINK2",
+ "IFF_LOOPBACK",
+ "IFF_MASTER",
+ "IFF_MONITOR",
+ "IFF_MULTICAST",
+ "IFF_NOARP",
+ "IFF_NOTRAILERS",
+ "IFF_NO_PI",
+ "IFF_OACTIVE",
+ "IFF_ONE_QUEUE",
+ "IFF_POINTOPOINT",
+ "IFF_POINTTOPOINT",
+ "IFF_PORTSEL",
+ "IFF_PPROMISC",
+ "IFF_PROMISC",
+ "IFF_RENAMING",
+ "IFF_RUNNING",
+ "IFF_SIMPLEX",
+ "IFF_SLAVE",
+ "IFF_SMART",
+ "IFF_STATICARP",
+ "IFF_TAP",
+ "IFF_TUN",
+ "IFF_TUN_EXCL",
+ "IFF_UP",
+ "IFF_VNET_HDR",
+ "IFLA_ADDRESS",
+ "IFLA_BROADCAST",
+ "IFLA_COST",
+ "IFLA_IFALIAS",
+ "IFLA_IFNAME",
+ "IFLA_LINK",
+ "IFLA_LINKINFO",
+ "IFLA_LINKMODE",
+ "IFLA_MAP",
+ "IFLA_MASTER",
+ "IFLA_MAX",
+ "IFLA_MTU",
+ "IFLA_NET_NS_PID",
+ "IFLA_OPERSTATE",
+ "IFLA_PRIORITY",
+ "IFLA_PROTINFO",
+ "IFLA_QDISC",
+ "IFLA_STATS",
+ "IFLA_TXQLEN",
+ "IFLA_UNSPEC",
+ "IFLA_WEIGHT",
+ "IFLA_WIRELESS",
+ "IFNAMSIZ",
+ "IFT_1822",
+ "IFT_A12MPPSWITCH",
+ "IFT_AAL2",
+ "IFT_AAL5",
+ "IFT_ADSL",
+ "IFT_AFLANE8023",
+ "IFT_AFLANE8025",
+ "IFT_ARAP",
+ "IFT_ARCNET",
+ "IFT_ARCNETPLUS",
+ "IFT_ASYNC",
+ "IFT_ATM",
+ "IFT_ATMDXI",
+ "IFT_ATMFUNI",
+ "IFT_ATMIMA",
+ "IFT_ATMLOGICAL",
+ "IFT_ATMRADIO",
+ "IFT_ATMSUBINTERFACE",
+ "IFT_ATMVCIENDPT",
+ "IFT_ATMVIRTUAL",
+ "IFT_BGPPOLICYACCOUNTING",
+ "IFT_BLUETOOTH",
+ "IFT_BRIDGE",
+ "IFT_BSC",
+ "IFT_CARP",
+ "IFT_CCTEMUL",
+ "IFT_CELLULAR",
+ "IFT_CEPT",
+ "IFT_CES",
+ "IFT_CHANNEL",
+ "IFT_CNR",
+ "IFT_COFFEE",
+ "IFT_COMPOSITELINK",
+ "IFT_DCN",
+ "IFT_DIGITALPOWERLINE",
+ "IFT_DIGITALWRAPPEROVERHEADCHANNEL",
+ "IFT_DLSW",
+ "IFT_DOCSCABLEDOWNSTREAM",
+ "IFT_DOCSCABLEMACLAYER",
+ "IFT_DOCSCABLEUPSTREAM",
+ "IFT_DOCSCABLEUPSTREAMCHANNEL",
+ "IFT_DS0",
+ "IFT_DS0BUNDLE",
+ "IFT_DS1FDL",
+ "IFT_DS3",
+ "IFT_DTM",
+ "IFT_DUMMY",
+ "IFT_DVBASILN",
+ "IFT_DVBASIOUT",
+ "IFT_DVBRCCDOWNSTREAM",
+ "IFT_DVBRCCMACLAYER",
+ "IFT_DVBRCCUPSTREAM",
+ "IFT_ECONET",
+ "IFT_ENC",
+ "IFT_EON",
+ "IFT_EPLRS",
+ "IFT_ESCON",
+ "IFT_ETHER",
+ "IFT_FAITH",
+ "IFT_FAST",
+ "IFT_FASTETHER",
+ "IFT_FASTETHERFX",
+ "IFT_FDDI",
+ "IFT_FIBRECHANNEL",
+ "IFT_FRAMERELAYINTERCONNECT",
+ "IFT_FRAMERELAYMPI",
+ "IFT_FRDLCIENDPT",
+ "IFT_FRELAY",
+ "IFT_FRELAYDCE",
+ "IFT_FRF16MFRBUNDLE",
+ "IFT_FRFORWARD",
+ "IFT_G703AT2MB",
+ "IFT_G703AT64K",
+ "IFT_GIF",
+ "IFT_GIGABITETHERNET",
+ "IFT_GR303IDT",
+ "IFT_GR303RDT",
+ "IFT_H323GATEKEEPER",
+ "IFT_H323PROXY",
+ "IFT_HDH1822",
+ "IFT_HDLC",
+ "IFT_HDSL2",
+ "IFT_HIPERLAN2",
+ "IFT_HIPPI",
+ "IFT_HIPPIINTERFACE",
+ "IFT_HOSTPAD",
+ "IFT_HSSI",
+ "IFT_HY",
+ "IFT_IBM370PARCHAN",
+ "IFT_IDSL",
+ "IFT_IEEE1394",
+ "IFT_IEEE80211",
+ "IFT_IEEE80212",
+ "IFT_IEEE8023ADLAG",
+ "IFT_IFGSN",
+ "IFT_IMT",
+ "IFT_INFINIBAND",
+ "IFT_INTERLEAVE",
+ "IFT_IP",
+ "IFT_IPFORWARD",
+ "IFT_IPOVERATM",
+ "IFT_IPOVERCDLC",
+ "IFT_IPOVERCLAW",
+ "IFT_IPSWITCH",
+ "IFT_IPXIP",
+ "IFT_ISDN",
+ "IFT_ISDNBASIC",
+ "IFT_ISDNPRIMARY",
+ "IFT_ISDNS",
+ "IFT_ISDNU",
+ "IFT_ISO88022LLC",
+ "IFT_ISO88023",
+ "IFT_ISO88024",
+ "IFT_ISO88025",
+ "IFT_ISO88025CRFPINT",
+ "IFT_ISO88025DTR",
+ "IFT_ISO88025FIBER",
+ "IFT_ISO88026",
+ "IFT_ISUP",
+ "IFT_L2VLAN",
+ "IFT_L3IPVLAN",
+ "IFT_L3IPXVLAN",
+ "IFT_LAPB",
+ "IFT_LAPD",
+ "IFT_LAPF",
+ "IFT_LINEGROUP",
+ "IFT_LOCALTALK",
+ "IFT_LOOP",
+ "IFT_MEDIAMAILOVERIP",
+ "IFT_MFSIGLINK",
+ "IFT_MIOX25",
+ "IFT_MODEM",
+ "IFT_MPC",
+ "IFT_MPLS",
+ "IFT_MPLSTUNNEL",
+ "IFT_MSDSL",
+ "IFT_MVL",
+ "IFT_MYRINET",
+ "IFT_NFAS",
+ "IFT_NSIP",
+ "IFT_OPTICALCHANNEL",
+ "IFT_OPTICALTRANSPORT",
+ "IFT_OTHER",
+ "IFT_P10",
+ "IFT_P80",
+ "IFT_PARA",
+ "IFT_PDP",
+ "IFT_PFLOG",
+ "IFT_PFLOW",
+ "IFT_PFSYNC",
+ "IFT_PLC",
+ "IFT_PON155",
+ "IFT_PON622",
+ "IFT_POS",
+ "IFT_PPP",
+ "IFT_PPPMULTILINKBUNDLE",
+ "IFT_PROPATM",
+ "IFT_PROPBWAP2MP",
+ "IFT_PROPCNLS",
+ "IFT_PROPDOCSWIRELESSDOWNSTREAM",
+ "IFT_PROPDOCSWIRELESSMACLAYER",
+ "IFT_PROPDOCSWIRELESSUPSTREAM",
+ "IFT_PROPMUX",
+ "IFT_PROPVIRTUAL",
+ "IFT_PROPWIRELESSP2P",
+ "IFT_PTPSERIAL",
+ "IFT_PVC",
+ "IFT_Q2931",
+ "IFT_QLLC",
+ "IFT_RADIOMAC",
+ "IFT_RADSL",
+ "IFT_REACHDSL",
+ "IFT_RFC1483",
+ "IFT_RS232",
+ "IFT_RSRB",
+ "IFT_SDLC",
+ "IFT_SDSL",
+ "IFT_SHDSL",
+ "IFT_SIP",
+ "IFT_SIPSIG",
+ "IFT_SIPTG",
+ "IFT_SLIP",
+ "IFT_SMDSDXI",
+ "IFT_SMDSICIP",
+ "IFT_SONET",
+ "IFT_SONETOVERHEADCHANNEL",
+ "IFT_SONETPATH",
+ "IFT_SONETVT",
+ "IFT_SRP",
+ "IFT_SS7SIGLINK",
+ "IFT_STACKTOSTACK",
+ "IFT_STARLAN",
+ "IFT_STF",
+ "IFT_T1",
+ "IFT_TDLC",
+ "IFT_TELINK",
+ "IFT_TERMPAD",
+ "IFT_TR008",
+ "IFT_TRANSPHDLC",
+ "IFT_TUNNEL",
+ "IFT_ULTRA",
+ "IFT_USB",
+ "IFT_V11",
+ "IFT_V35",
+ "IFT_V36",
+ "IFT_V37",
+ "IFT_VDSL",
+ "IFT_VIRTUALIPADDRESS",
+ "IFT_VIRTUALTG",
+ "IFT_VOICEDID",
+ "IFT_VOICEEM",
+ "IFT_VOICEEMFGD",
+ "IFT_VOICEENCAP",
+ "IFT_VOICEFGDEANA",
+ "IFT_VOICEFXO",
+ "IFT_VOICEFXS",
+ "IFT_VOICEOVERATM",
+ "IFT_VOICEOVERCABLE",
+ "IFT_VOICEOVERFRAMERELAY",
+ "IFT_VOICEOVERIP",
+ "IFT_X213",
+ "IFT_X25",
+ "IFT_X25DDN",
+ "IFT_X25HUNTGROUP",
+ "IFT_X25MLP",
+ "IFT_X25PLE",
+ "IFT_XETHER",
+ "IGNBRK",
+ "IGNCR",
+ "IGNORE",
+ "IGNPAR",
+ "IMAXBEL",
+ "INFINITE",
+ "INLCR",
+ "INPCK",
+ "INVALID_FILE_ATTRIBUTES",
+ "IN_ACCESS",
+ "IN_ALL_EVENTS",
+ "IN_ATTRIB",
+ "IN_CLASSA_HOST",
+ "IN_CLASSA_MAX",
+ "IN_CLASSA_NET",
+ "IN_CLASSA_NSHIFT",
+ "IN_CLASSB_HOST",
+ "IN_CLASSB_MAX",
+ "IN_CLASSB_NET",
+ "IN_CLASSB_NSHIFT",
+ "IN_CLASSC_HOST",
+ "IN_CLASSC_NET",
+ "IN_CLASSC_NSHIFT",
+ "IN_CLASSD_HOST",
+ "IN_CLASSD_NET",
+ "IN_CLASSD_NSHIFT",
+ "IN_CLOEXEC",
+ "IN_CLOSE",
+ "IN_CLOSE_NOWRITE",
+ "IN_CLOSE_WRITE",
+ "IN_CREATE",
+ "IN_DELETE",
+ "IN_DELETE_SELF",
+ "IN_DONT_FOLLOW",
+ "IN_EXCL_UNLINK",
+ "IN_IGNORED",
+ "IN_ISDIR",
+ "IN_LINKLOCALNETNUM",
+ "IN_LOOPBACKNET",
+ "IN_MASK_ADD",
+ "IN_MODIFY",
+ "IN_MOVE",
+ "IN_MOVED_FROM",
+ "IN_MOVED_TO",
+ "IN_MOVE_SELF",
+ "IN_NONBLOCK",
+ "IN_ONESHOT",
+ "IN_ONLYDIR",
+ "IN_OPEN",
+ "IN_Q_OVERFLOW",
+ "IN_RFC3021_HOST",
+ "IN_RFC3021_MASK",
+ "IN_RFC3021_NET",
+ "IN_RFC3021_NSHIFT",
+ "IN_UNMOUNT",
+ "IOC_IN",
+ "IOC_INOUT",
+ "IOC_OUT",
+ "IOC_VENDOR",
+ "IOC_WS2",
+ "IO_REPARSE_TAG_SYMLINK",
+ "IPMreq",
+ "IPMreqn",
+ "IPPROTO_3PC",
+ "IPPROTO_ADFS",
+ "IPPROTO_AH",
+ "IPPROTO_AHIP",
+ "IPPROTO_APES",
+ "IPPROTO_ARGUS",
+ "IPPROTO_AX25",
+ "IPPROTO_BHA",
+ "IPPROTO_BLT",
+ "IPPROTO_BRSATMON",
+ "IPPROTO_CARP",
+ "IPPROTO_CFTP",
+ "IPPROTO_CHAOS",
+ "IPPROTO_CMTP",
+ "IPPROTO_COMP",
+ "IPPROTO_CPHB",
+ "IPPROTO_CPNX",
+ "IPPROTO_DCCP",
+ "IPPROTO_DDP",
+ "IPPROTO_DGP",
+ "IPPROTO_DIVERT",
+ "IPPROTO_DIVERT_INIT",
+ "IPPROTO_DIVERT_RESP",
+ "IPPROTO_DONE",
+ "IPPROTO_DSTOPTS",
+ "IPPROTO_EGP",
+ "IPPROTO_EMCON",
+ "IPPROTO_ENCAP",
+ "IPPROTO_EON",
+ "IPPROTO_ESP",
+ "IPPROTO_ETHERIP",
+ "IPPROTO_FRAGMENT",
+ "IPPROTO_GGP",
+ "IPPROTO_GMTP",
+ "IPPROTO_GRE",
+ "IPPROTO_HELLO",
+ "IPPROTO_HMP",
+ "IPPROTO_HOPOPTS",
+ "IPPROTO_ICMP",
+ "IPPROTO_ICMPV6",
+ "IPPROTO_IDP",
+ "IPPROTO_IDPR",
+ "IPPROTO_IDRP",
+ "IPPROTO_IGMP",
+ "IPPROTO_IGP",
+ "IPPROTO_IGRP",
+ "IPPROTO_IL",
+ "IPPROTO_INLSP",
+ "IPPROTO_INP",
+ "IPPROTO_IP",
+ "IPPROTO_IPCOMP",
+ "IPPROTO_IPCV",
+ "IPPROTO_IPEIP",
+ "IPPROTO_IPIP",
+ "IPPROTO_IPPC",
+ "IPPROTO_IPV4",
+ "IPPROTO_IPV6",
+ "IPPROTO_IPV6_ICMP",
+ "IPPROTO_IRTP",
+ "IPPROTO_KRYPTOLAN",
+ "IPPROTO_LARP",
+ "IPPROTO_LEAF1",
+ "IPPROTO_LEAF2",
+ "IPPROTO_MAX",
+ "IPPROTO_MAXID",
+ "IPPROTO_MEAS",
+ "IPPROTO_MH",
+ "IPPROTO_MHRP",
+ "IPPROTO_MICP",
+ "IPPROTO_MOBILE",
+ "IPPROTO_MPLS",
+ "IPPROTO_MTP",
+ "IPPROTO_MUX",
+ "IPPROTO_ND",
+ "IPPROTO_NHRP",
+ "IPPROTO_NONE",
+ "IPPROTO_NSP",
+ "IPPROTO_NVPII",
+ "IPPROTO_OLD_DIVERT",
+ "IPPROTO_OSPFIGP",
+ "IPPROTO_PFSYNC",
+ "IPPROTO_PGM",
+ "IPPROTO_PIGP",
+ "IPPROTO_PIM",
+ "IPPROTO_PRM",
+ "IPPROTO_PUP",
+ "IPPROTO_PVP",
+ "IPPROTO_RAW",
+ "IPPROTO_RCCMON",
+ "IPPROTO_RDP",
+ "IPPROTO_ROUTING",
+ "IPPROTO_RSVP",
+ "IPPROTO_RVD",
+ "IPPROTO_SATEXPAK",
+ "IPPROTO_SATMON",
+ "IPPROTO_SCCSP",
+ "IPPROTO_SCTP",
+ "IPPROTO_SDRP",
+ "IPPROTO_SEND",
+ "IPPROTO_SEP",
+ "IPPROTO_SKIP",
+ "IPPROTO_SPACER",
+ "IPPROTO_SRPC",
+ "IPPROTO_ST",
+ "IPPROTO_SVMTP",
+ "IPPROTO_SWIPE",
+ "IPPROTO_TCF",
+ "IPPROTO_TCP",
+ "IPPROTO_TLSP",
+ "IPPROTO_TP",
+ "IPPROTO_TPXX",
+ "IPPROTO_TRUNK1",
+ "IPPROTO_TRUNK2",
+ "IPPROTO_TTP",
+ "IPPROTO_UDP",
+ "IPPROTO_UDPLITE",
+ "IPPROTO_VINES",
+ "IPPROTO_VISA",
+ "IPPROTO_VMTP",
+ "IPPROTO_VRRP",
+ "IPPROTO_WBEXPAK",
+ "IPPROTO_WBMON",
+ "IPPROTO_WSN",
+ "IPPROTO_XNET",
+ "IPPROTO_XTP",
+ "IPV6_2292DSTOPTS",
+ "IPV6_2292HOPLIMIT",
+ "IPV6_2292HOPOPTS",
+ "IPV6_2292NEXTHOP",
+ "IPV6_2292PKTINFO",
+ "IPV6_2292PKTOPTIONS",
+ "IPV6_2292RTHDR",
+ "IPV6_ADDRFORM",
+ "IPV6_ADD_MEMBERSHIP",
+ "IPV6_AUTHHDR",
+ "IPV6_AUTH_LEVEL",
+ "IPV6_AUTOFLOWLABEL",
+ "IPV6_BINDANY",
+ "IPV6_BINDV6ONLY",
+ "IPV6_BOUND_IF",
+ "IPV6_CHECKSUM",
+ "IPV6_DEFAULT_MULTICAST_HOPS",
+ "IPV6_DEFAULT_MULTICAST_LOOP",
+ "IPV6_DEFHLIM",
+ "IPV6_DONTFRAG",
+ "IPV6_DROP_MEMBERSHIP",
+ "IPV6_DSTOPTS",
+ "IPV6_ESP_NETWORK_LEVEL",
+ "IPV6_ESP_TRANS_LEVEL",
+ "IPV6_FAITH",
+ "IPV6_FLOWINFO_MASK",
+ "IPV6_FLOWLABEL_MASK",
+ "IPV6_FRAGTTL",
+ "IPV6_FW_ADD",
+ "IPV6_FW_DEL",
+ "IPV6_FW_FLUSH",
+ "IPV6_FW_GET",
+ "IPV6_FW_ZERO",
+ "IPV6_HLIMDEC",
+ "IPV6_HOPLIMIT",
+ "IPV6_HOPOPTS",
+ "IPV6_IPCOMP_LEVEL",
+ "IPV6_IPSEC_POLICY",
+ "IPV6_JOIN_ANYCAST",
+ "IPV6_JOIN_GROUP",
+ "IPV6_LEAVE_ANYCAST",
+ "IPV6_LEAVE_GROUP",
+ "IPV6_MAXHLIM",
+ "IPV6_MAXOPTHDR",
+ "IPV6_MAXPACKET",
+ "IPV6_MAX_GROUP_SRC_FILTER",
+ "IPV6_MAX_MEMBERSHIPS",
+ "IPV6_MAX_SOCK_SRC_FILTER",
+ "IPV6_MIN_MEMBERSHIPS",
+ "IPV6_MMTU",
+ "IPV6_MSFILTER",
+ "IPV6_MTU",
+ "IPV6_MTU_DISCOVER",
+ "IPV6_MULTICAST_HOPS",
+ "IPV6_MULTICAST_IF",
+ "IPV6_MULTICAST_LOOP",
+ "IPV6_NEXTHOP",
+ "IPV6_OPTIONS",
+ "IPV6_PATHMTU",
+ "IPV6_PIPEX",
+ "IPV6_PKTINFO",
+ "IPV6_PMTUDISC_DO",
+ "IPV6_PMTUDISC_DONT",
+ "IPV6_PMTUDISC_PROBE",
+ "IPV6_PMTUDISC_WANT",
+ "IPV6_PORTRANGE",
+ "IPV6_PORTRANGE_DEFAULT",
+ "IPV6_PORTRANGE_HIGH",
+ "IPV6_PORTRANGE_LOW",
+ "IPV6_PREFER_TEMPADDR",
+ "IPV6_RECVDSTOPTS",
+ "IPV6_RECVDSTPORT",
+ "IPV6_RECVERR",
+ "IPV6_RECVHOPLIMIT",
+ "IPV6_RECVHOPOPTS",
+ "IPV6_RECVPATHMTU",
+ "IPV6_RECVPKTINFO",
+ "IPV6_RECVRTHDR",
+ "IPV6_RECVTCLASS",
+ "IPV6_ROUTER_ALERT",
+ "IPV6_RTABLE",
+ "IPV6_RTHDR",
+ "IPV6_RTHDRDSTOPTS",
+ "IPV6_RTHDR_LOOSE",
+ "IPV6_RTHDR_STRICT",
+ "IPV6_RTHDR_TYPE_0",
+ "IPV6_RXDSTOPTS",
+ "IPV6_RXHOPOPTS",
+ "IPV6_SOCKOPT_RESERVED1",
+ "IPV6_TCLASS",
+ "IPV6_UNICAST_HOPS",
+ "IPV6_USE_MIN_MTU",
+ "IPV6_V6ONLY",
+ "IPV6_VERSION",
+ "IPV6_VERSION_MASK",
+ "IPV6_XFRM_POLICY",
+ "IP_ADD_MEMBERSHIP",
+ "IP_ADD_SOURCE_MEMBERSHIP",
+ "IP_AUTH_LEVEL",
+ "IP_BINDANY",
+ "IP_BLOCK_SOURCE",
+ "IP_BOUND_IF",
+ "IP_DEFAULT_MULTICAST_LOOP",
+ "IP_DEFAULT_MULTICAST_TTL",
+ "IP_DF",
+ "IP_DIVERTFL",
+ "IP_DONTFRAG",
+ "IP_DROP_MEMBERSHIP",
+ "IP_DROP_SOURCE_MEMBERSHIP",
+ "IP_DUMMYNET3",
+ "IP_DUMMYNET_CONFIGURE",
+ "IP_DUMMYNET_DEL",
+ "IP_DUMMYNET_FLUSH",
+ "IP_DUMMYNET_GET",
+ "IP_EF",
+ "IP_ERRORMTU",
+ "IP_ESP_NETWORK_LEVEL",
+ "IP_ESP_TRANS_LEVEL",
+ "IP_FAITH",
+ "IP_FREEBIND",
+ "IP_FW3",
+ "IP_FW_ADD",
+ "IP_FW_DEL",
+ "IP_FW_FLUSH",
+ "IP_FW_GET",
+ "IP_FW_NAT_CFG",
+ "IP_FW_NAT_DEL",
+ "IP_FW_NAT_GET_CONFIG",
+ "IP_FW_NAT_GET_LOG",
+ "IP_FW_RESETLOG",
+ "IP_FW_TABLE_ADD",
+ "IP_FW_TABLE_DEL",
+ "IP_FW_TABLE_FLUSH",
+ "IP_FW_TABLE_GETSIZE",
+ "IP_FW_TABLE_LIST",
+ "IP_FW_ZERO",
+ "IP_HDRINCL",
+ "IP_IPCOMP_LEVEL",
+ "IP_IPSECFLOWINFO",
+ "IP_IPSEC_LOCAL_AUTH",
+ "IP_IPSEC_LOCAL_CRED",
+ "IP_IPSEC_LOCAL_ID",
+ "IP_IPSEC_POLICY",
+ "IP_IPSEC_REMOTE_AUTH",
+ "IP_IPSEC_REMOTE_CRED",
+ "IP_IPSEC_REMOTE_ID",
+ "IP_MAXPACKET",
+ "IP_MAX_GROUP_SRC_FILTER",
+ "IP_MAX_MEMBERSHIPS",
+ "IP_MAX_SOCK_MUTE_FILTER",
+ "IP_MAX_SOCK_SRC_FILTER",
+ "IP_MAX_SOURCE_FILTER",
+ "IP_MF",
+ "IP_MINFRAGSIZE",
+ "IP_MINTTL",
+ "IP_MIN_MEMBERSHIPS",
+ "IP_MSFILTER",
+ "IP_MSS",
+ "IP_MTU",
+ "IP_MTU_DISCOVER",
+ "IP_MULTICAST_IF",
+ "IP_MULTICAST_IFINDEX",
+ "IP_MULTICAST_LOOP",
+ "IP_MULTICAST_TTL",
+ "IP_MULTICAST_VIF",
+ "IP_NAT__XXX",
+ "IP_OFFMASK",
+ "IP_OLD_FW_ADD",
+ "IP_OLD_FW_DEL",
+ "IP_OLD_FW_FLUSH",
+ "IP_OLD_FW_GET",
+ "IP_OLD_FW_RESETLOG",
+ "IP_OLD_FW_ZERO",
+ "IP_ONESBCAST",
+ "IP_OPTIONS",
+ "IP_ORIGDSTADDR",
+ "IP_PASSSEC",
+ "IP_PIPEX",
+ "IP_PKTINFO",
+ "IP_PKTOPTIONS",
+ "IP_PMTUDISC",
+ "IP_PMTUDISC_DO",
+ "IP_PMTUDISC_DONT",
+ "IP_PMTUDISC_PROBE",
+ "IP_PMTUDISC_WANT",
+ "IP_PORTRANGE",
+ "IP_PORTRANGE_DEFAULT",
+ "IP_PORTRANGE_HIGH",
+ "IP_PORTRANGE_LOW",
+ "IP_RECVDSTADDR",
+ "IP_RECVDSTPORT",
+ "IP_RECVERR",
+ "IP_RECVIF",
+ "IP_RECVOPTS",
+ "IP_RECVORIGDSTADDR",
+ "IP_RECVPKTINFO",
+ "IP_RECVRETOPTS",
+ "IP_RECVRTABLE",
+ "IP_RECVTOS",
+ "IP_RECVTTL",
+ "IP_RETOPTS",
+ "IP_RF",
+ "IP_ROUTER_ALERT",
+ "IP_RSVP_OFF",
+ "IP_RSVP_ON",
+ "IP_RSVP_VIF_OFF",
+ "IP_RSVP_VIF_ON",
+ "IP_RTABLE",
+ "IP_SENDSRCADDR",
+ "IP_STRIPHDR",
+ "IP_TOS",
+ "IP_TRAFFIC_MGT_BACKGROUND",
+ "IP_TRANSPARENT",
+ "IP_TTL",
+ "IP_UNBLOCK_SOURCE",
+ "IP_XFRM_POLICY",
+ "IPv6MTUInfo",
+ "IPv6Mreq",
+ "ISIG",
+ "ISTRIP",
+ "IUCLC",
+ "IUTF8",
+ "IXANY",
+ "IXOFF",
+ "IXON",
+ "IfAddrmsg",
+ "IfAnnounceMsghdr",
+ "IfData",
+ "IfInfomsg",
+ "IfMsghdr",
+ "IfaMsghdr",
+ "IfmaMsghdr",
+ "IfmaMsghdr2",
+ "ImplementsGetwd",
+ "Inet4Pktinfo",
+ "Inet6Pktinfo",
+ "InotifyAddWatch",
+ "InotifyEvent",
+ "InotifyInit",
+ "InotifyInit1",
+ "InotifyRmWatch",
+ "InterfaceAddrMessage",
+ "InterfaceAnnounceMessage",
+ "InterfaceInfo",
+ "InterfaceMessage",
+ "InterfaceMulticastAddrMessage",
+ "InvalidHandle",
+ "Ioperm",
+ "Iopl",
+ "Iovec",
+ "IpAdapterInfo",
+ "IpAddrString",
+ "IpAddressString",
+ "IpMaskString",
+ "Issetugid",
+ "KEY_ALL_ACCESS",
+ "KEY_CREATE_LINK",
+ "KEY_CREATE_SUB_KEY",
+ "KEY_ENUMERATE_SUB_KEYS",
+ "KEY_EXECUTE",
+ "KEY_NOTIFY",
+ "KEY_QUERY_VALUE",
+ "KEY_READ",
+ "KEY_SET_VALUE",
+ "KEY_WOW64_32KEY",
+ "KEY_WOW64_64KEY",
+ "KEY_WRITE",
+ "Kevent",
+ "Kevent_t",
+ "Kill",
+ "Klogctl",
+ "Kqueue",
+ "LANG_ENGLISH",
+ "LAYERED_PROTOCOL",
+ "LCNT_OVERLOAD_FLUSH",
+ "LINUX_REBOOT_CMD_CAD_OFF",
+ "LINUX_REBOOT_CMD_CAD_ON",
+ "LINUX_REBOOT_CMD_HALT",
+ "LINUX_REBOOT_CMD_KEXEC",
+ "LINUX_REBOOT_CMD_POWER_OFF",
+ "LINUX_REBOOT_CMD_RESTART",
+ "LINUX_REBOOT_CMD_RESTART2",
+ "LINUX_REBOOT_CMD_SW_SUSPEND",
+ "LINUX_REBOOT_MAGIC1",
+ "LINUX_REBOOT_MAGIC2",
+ "LOCK_EX",
+ "LOCK_NB",
+ "LOCK_SH",
+ "LOCK_UN",
+ "LazyDLL",
+ "LazyProc",
+ "Lchown",
+ "Linger",
+ "Link",
+ "Listen",
+ "Listxattr",
+ "LoadCancelIoEx",
+ "LoadConnectEx",
+ "LoadCreateSymbolicLink",
+ "LoadDLL",
+ "LoadGetAddrInfo",
+ "LoadLibrary",
+ "LoadSetFileCompletionNotificationModes",
+ "LocalFree",
+ "Log2phys_t",
+ "LookupAccountName",
+ "LookupAccountSid",
+ "LookupSID",
+ "LsfJump",
+ "LsfSocket",
+ "LsfStmt",
+ "Lstat",
+ "MADV_AUTOSYNC",
+ "MADV_CAN_REUSE",
+ "MADV_CORE",
+ "MADV_DOFORK",
+ "MADV_DONTFORK",
+ "MADV_DONTNEED",
+ "MADV_FREE",
+ "MADV_FREE_REUSABLE",
+ "MADV_FREE_REUSE",
+ "MADV_HUGEPAGE",
+ "MADV_HWPOISON",
+ "MADV_MERGEABLE",
+ "MADV_NOCORE",
+ "MADV_NOHUGEPAGE",
+ "MADV_NORMAL",
+ "MADV_NOSYNC",
+ "MADV_PROTECT",
+ "MADV_RANDOM",
+ "MADV_REMOVE",
+ "MADV_SEQUENTIAL",
+ "MADV_SPACEAVAIL",
+ "MADV_UNMERGEABLE",
+ "MADV_WILLNEED",
+ "MADV_ZERO_WIRED_PAGES",
+ "MAP_32BIT",
+ "MAP_ALIGNED_SUPER",
+ "MAP_ALIGNMENT_16MB",
+ "MAP_ALIGNMENT_1TB",
+ "MAP_ALIGNMENT_256TB",
+ "MAP_ALIGNMENT_4GB",
+ "MAP_ALIGNMENT_64KB",
+ "MAP_ALIGNMENT_64PB",
+ "MAP_ALIGNMENT_MASK",
+ "MAP_ALIGNMENT_SHIFT",
+ "MAP_ANON",
+ "MAP_ANONYMOUS",
+ "MAP_COPY",
+ "MAP_DENYWRITE",
+ "MAP_EXECUTABLE",
+ "MAP_FILE",
+ "MAP_FIXED",
+ "MAP_FLAGMASK",
+ "MAP_GROWSDOWN",
+ "MAP_HASSEMAPHORE",
+ "MAP_HUGETLB",
+ "MAP_INHERIT",
+ "MAP_INHERIT_COPY",
+ "MAP_INHERIT_DEFAULT",
+ "MAP_INHERIT_DONATE_COPY",
+ "MAP_INHERIT_NONE",
+ "MAP_INHERIT_SHARE",
+ "MAP_JIT",
+ "MAP_LOCKED",
+ "MAP_NOCACHE",
+ "MAP_NOCORE",
+ "MAP_NOEXTEND",
+ "MAP_NONBLOCK",
+ "MAP_NORESERVE",
+ "MAP_NOSYNC",
+ "MAP_POPULATE",
+ "MAP_PREFAULT_READ",
+ "MAP_PRIVATE",
+ "MAP_RENAME",
+ "MAP_RESERVED0080",
+ "MAP_RESERVED0100",
+ "MAP_SHARED",
+ "MAP_STACK",
+ "MAP_TRYFIXED",
+ "MAP_TYPE",
+ "MAP_WIRED",
+ "MAXIMUM_REPARSE_DATA_BUFFER_SIZE",
+ "MAXLEN_IFDESCR",
+ "MAXLEN_PHYSADDR",
+ "MAX_ADAPTER_ADDRESS_LENGTH",
+ "MAX_ADAPTER_DESCRIPTION_LENGTH",
+ "MAX_ADAPTER_NAME_LENGTH",
+ "MAX_COMPUTERNAME_LENGTH",
+ "MAX_INTERFACE_NAME_LEN",
+ "MAX_LONG_PATH",
+ "MAX_PATH",
+ "MAX_PROTOCOL_CHAIN",
+ "MCL_CURRENT",
+ "MCL_FUTURE",
+ "MNT_DETACH",
+ "MNT_EXPIRE",
+ "MNT_FORCE",
+ "MSG_BCAST",
+ "MSG_CMSG_CLOEXEC",
+ "MSG_COMPAT",
+ "MSG_CONFIRM",
+ "MSG_CONTROLMBUF",
+ "MSG_CTRUNC",
+ "MSG_DONTROUTE",
+ "MSG_DONTWAIT",
+ "MSG_EOF",
+ "MSG_EOR",
+ "MSG_ERRQUEUE",
+ "MSG_FASTOPEN",
+ "MSG_FIN",
+ "MSG_FLUSH",
+ "MSG_HAVEMORE",
+ "MSG_HOLD",
+ "MSG_IOVUSRSPACE",
+ "MSG_LENUSRSPACE",
+ "MSG_MCAST",
+ "MSG_MORE",
+ "MSG_NAMEMBUF",
+ "MSG_NBIO",
+ "MSG_NEEDSA",
+ "MSG_NOSIGNAL",
+ "MSG_NOTIFICATION",
+ "MSG_OOB",
+ "MSG_PEEK",
+ "MSG_PROXY",
+ "MSG_RCVMORE",
+ "MSG_RST",
+ "MSG_SEND",
+ "MSG_SYN",
+ "MSG_TRUNC",
+ "MSG_TRYHARD",
+ "MSG_USERFLAGS",
+ "MSG_WAITALL",
+ "MSG_WAITFORONE",
+ "MSG_WAITSTREAM",
+ "MS_ACTIVE",
+ "MS_ASYNC",
+ "MS_BIND",
+ "MS_DEACTIVATE",
+ "MS_DIRSYNC",
+ "MS_INVALIDATE",
+ "MS_I_VERSION",
+ "MS_KERNMOUNT",
+ "MS_KILLPAGES",
+ "MS_MANDLOCK",
+ "MS_MGC_MSK",
+ "MS_MGC_VAL",
+ "MS_MOVE",
+ "MS_NOATIME",
+ "MS_NODEV",
+ "MS_NODIRATIME",
+ "MS_NOEXEC",
+ "MS_NOSUID",
+ "MS_NOUSER",
+ "MS_POSIXACL",
+ "MS_PRIVATE",
+ "MS_RDONLY",
+ "MS_REC",
+ "MS_RELATIME",
+ "MS_REMOUNT",
+ "MS_RMT_MASK",
+ "MS_SHARED",
+ "MS_SILENT",
+ "MS_SLAVE",
+ "MS_STRICTATIME",
+ "MS_SYNC",
+ "MS_SYNCHRONOUS",
+ "MS_UNBINDABLE",
+ "Madvise",
+ "MapViewOfFile",
+ "MaxTokenInfoClass",
+ "Mclpool",
+ "MibIfRow",
+ "Mkdir",
+ "Mkdirat",
+ "Mkfifo",
+ "Mknod",
+ "Mknodat",
+ "Mlock",
+ "Mlockall",
+ "Mmap",
+ "Mount",
+ "MoveFile",
+ "Mprotect",
+ "Msghdr",
+ "Munlock",
+ "Munlockall",
+ "Munmap",
+ "MustLoadDLL",
+ "NAME_MAX",
+ "NETLINK_ADD_MEMBERSHIP",
+ "NETLINK_AUDIT",
+ "NETLINK_BROADCAST_ERROR",
+ "NETLINK_CONNECTOR",
+ "NETLINK_DNRTMSG",
+ "NETLINK_DROP_MEMBERSHIP",
+ "NETLINK_ECRYPTFS",
+ "NETLINK_FIB_LOOKUP",
+ "NETLINK_FIREWALL",
+ "NETLINK_GENERIC",
+ "NETLINK_INET_DIAG",
+ "NETLINK_IP6_FW",
+ "NETLINK_ISCSI",
+ "NETLINK_KOBJECT_UEVENT",
+ "NETLINK_NETFILTER",
+ "NETLINK_NFLOG",
+ "NETLINK_NO_ENOBUFS",
+ "NETLINK_PKTINFO",
+ "NETLINK_RDMA",
+ "NETLINK_ROUTE",
+ "NETLINK_SCSITRANSPORT",
+ "NETLINK_SELINUX",
+ "NETLINK_UNUSED",
+ "NETLINK_USERSOCK",
+ "NETLINK_XFRM",
+ "NET_RT_DUMP",
+ "NET_RT_DUMP2",
+ "NET_RT_FLAGS",
+ "NET_RT_IFLIST",
+ "NET_RT_IFLIST2",
+ "NET_RT_IFLISTL",
+ "NET_RT_IFMALIST",
+ "NET_RT_MAXID",
+ "NET_RT_OIFLIST",
+ "NET_RT_OOIFLIST",
+ "NET_RT_STAT",
+ "NET_RT_STATS",
+ "NET_RT_TABLE",
+ "NET_RT_TRASH",
+ "NLA_ALIGNTO",
+ "NLA_F_NESTED",
+ "NLA_F_NET_BYTEORDER",
+ "NLA_HDRLEN",
+ "NLMSG_ALIGNTO",
+ "NLMSG_DONE",
+ "NLMSG_ERROR",
+ "NLMSG_HDRLEN",
+ "NLMSG_MIN_TYPE",
+ "NLMSG_NOOP",
+ "NLMSG_OVERRUN",
+ "NLM_F_ACK",
+ "NLM_F_APPEND",
+ "NLM_F_ATOMIC",
+ "NLM_F_CREATE",
+ "NLM_F_DUMP",
+ "NLM_F_ECHO",
+ "NLM_F_EXCL",
+ "NLM_F_MATCH",
+ "NLM_F_MULTI",
+ "NLM_F_REPLACE",
+ "NLM_F_REQUEST",
+ "NLM_F_ROOT",
+ "NOFLSH",
+ "NOTE_ABSOLUTE",
+ "NOTE_ATTRIB",
+ "NOTE_BACKGROUND",
+ "NOTE_CHILD",
+ "NOTE_CRITICAL",
+ "NOTE_DELETE",
+ "NOTE_EOF",
+ "NOTE_EXEC",
+ "NOTE_EXIT",
+ "NOTE_EXITSTATUS",
+ "NOTE_EXIT_CSERROR",
+ "NOTE_EXIT_DECRYPTFAIL",
+ "NOTE_EXIT_DETAIL",
+ "NOTE_EXIT_DETAIL_MASK",
+ "NOTE_EXIT_MEMORY",
+ "NOTE_EXIT_REPARENTED",
+ "NOTE_EXTEND",
+ "NOTE_FFAND",
+ "NOTE_FFCOPY",
+ "NOTE_FFCTRLMASK",
+ "NOTE_FFLAGSMASK",
+ "NOTE_FFNOP",
+ "NOTE_FFOR",
+ "NOTE_FORK",
+ "NOTE_LEEWAY",
+ "NOTE_LINK",
+ "NOTE_LOWAT",
+ "NOTE_NONE",
+ "NOTE_NSECONDS",
+ "NOTE_PCTRLMASK",
+ "NOTE_PDATAMASK",
+ "NOTE_REAP",
+ "NOTE_RENAME",
+ "NOTE_RESOURCEEND",
+ "NOTE_REVOKE",
+ "NOTE_SECONDS",
+ "NOTE_SIGNAL",
+ "NOTE_TRACK",
+ "NOTE_TRACKERR",
+ "NOTE_TRIGGER",
+ "NOTE_TRUNCATE",
+ "NOTE_USECONDS",
+ "NOTE_VM_ERROR",
+ "NOTE_VM_PRESSURE",
+ "NOTE_VM_PRESSURE_SUDDEN_TERMINATE",
+ "NOTE_VM_PRESSURE_TERMINATE",
+ "NOTE_WRITE",
+ "NameCanonical",
+ "NameCanonicalEx",
+ "NameDisplay",
+ "NameDnsDomain",
+ "NameFullyQualifiedDN",
+ "NameSamCompatible",
+ "NameServicePrincipal",
+ "NameUniqueId",
+ "NameUnknown",
+ "NameUserPrincipal",
+ "Nanosleep",
+ "NetApiBufferFree",
+ "NetGetJoinInformation",
+ "NetSetupDomainName",
+ "NetSetupUnjoined",
+ "NetSetupUnknownStatus",
+ "NetSetupWorkgroupName",
+ "NetUserGetInfo",
+ "NetlinkMessage",
+ "NetlinkRIB",
+ "NetlinkRouteAttr",
+ "NetlinkRouteRequest",
+ "NewCallback",
+ "NewCallbackCDecl",
+ "NewLazyDLL",
+ "NlAttr",
+ "NlMsgerr",
+ "NlMsghdr",
+ "NsecToFiletime",
+ "NsecToTimespec",
+ "NsecToTimeval",
+ "Ntohs",
+ "OCRNL",
+ "OFDEL",
+ "OFILL",
+ "OFIOGETBMAP",
+ "OID_PKIX_KP_SERVER_AUTH",
+ "OID_SERVER_GATED_CRYPTO",
+ "OID_SGC_NETSCAPE",
+ "OLCUC",
+ "ONLCR",
+ "ONLRET",
+ "ONOCR",
+ "ONOEOT",
+ "OPEN_ALWAYS",
+ "OPEN_EXISTING",
+ "OPOST",
+ "O_ACCMODE",
+ "O_ALERT",
+ "O_ALT_IO",
+ "O_APPEND",
+ "O_ASYNC",
+ "O_CLOEXEC",
+ "O_CREAT",
+ "O_DIRECT",
+ "O_DIRECTORY",
+ "O_DP_GETRAWENCRYPTED",
+ "O_DSYNC",
+ "O_EVTONLY",
+ "O_EXCL",
+ "O_EXEC",
+ "O_EXLOCK",
+ "O_FSYNC",
+ "O_LARGEFILE",
+ "O_NDELAY",
+ "O_NOATIME",
+ "O_NOCTTY",
+ "O_NOFOLLOW",
+ "O_NONBLOCK",
+ "O_NOSIGPIPE",
+ "O_POPUP",
+ "O_RDONLY",
+ "O_RDWR",
+ "O_RSYNC",
+ "O_SHLOCK",
+ "O_SYMLINK",
+ "O_SYNC",
+ "O_TRUNC",
+ "O_TTY_INIT",
+ "O_WRONLY",
+ "Open",
+ "OpenCurrentProcessToken",
+ "OpenProcess",
+ "OpenProcessToken",
+ "Openat",
+ "Overlapped",
+ "PACKET_ADD_MEMBERSHIP",
+ "PACKET_BROADCAST",
+ "PACKET_DROP_MEMBERSHIP",
+ "PACKET_FASTROUTE",
+ "PACKET_HOST",
+ "PACKET_LOOPBACK",
+ "PACKET_MR_ALLMULTI",
+ "PACKET_MR_MULTICAST",
+ "PACKET_MR_PROMISC",
+ "PACKET_MULTICAST",
+ "PACKET_OTHERHOST",
+ "PACKET_OUTGOING",
+ "PACKET_RECV_OUTPUT",
+ "PACKET_RX_RING",
+ "PACKET_STATISTICS",
+ "PAGE_EXECUTE_READ",
+ "PAGE_EXECUTE_READWRITE",
+ "PAGE_EXECUTE_WRITECOPY",
+ "PAGE_READONLY",
+ "PAGE_READWRITE",
+ "PAGE_WRITECOPY",
+ "PARENB",
+ "PARMRK",
+ "PARODD",
+ "PENDIN",
+ "PFL_HIDDEN",
+ "PFL_MATCHES_PROTOCOL_ZERO",
+ "PFL_MULTIPLE_PROTO_ENTRIES",
+ "PFL_NETWORKDIRECT_PROVIDER",
+ "PFL_RECOMMENDED_PROTO_ENTRY",
+ "PF_FLUSH",
+ "PKCS_7_ASN_ENCODING",
+ "PMC5_PIPELINE_FLUSH",
+ "PRIO_PGRP",
+ "PRIO_PROCESS",
+ "PRIO_USER",
+ "PRI_IOFLUSH",
+ "PROCESS_QUERY_INFORMATION",
+ "PROCESS_TERMINATE",
+ "PROT_EXEC",
+ "PROT_GROWSDOWN",
+ "PROT_GROWSUP",
+ "PROT_NONE",
+ "PROT_READ",
+ "PROT_WRITE",
+ "PROV_DH_SCHANNEL",
+ "PROV_DSS",
+ "PROV_DSS_DH",
+ "PROV_EC_ECDSA_FULL",
+ "PROV_EC_ECDSA_SIG",
+ "PROV_EC_ECNRA_FULL",
+ "PROV_EC_ECNRA_SIG",
+ "PROV_FORTEZZA",
+ "PROV_INTEL_SEC",
+ "PROV_MS_EXCHANGE",
+ "PROV_REPLACE_OWF",
+ "PROV_RNG",
+ "PROV_RSA_AES",
+ "PROV_RSA_FULL",
+ "PROV_RSA_SCHANNEL",
+ "PROV_RSA_SIG",
+ "PROV_SPYRUS_LYNKS",
+ "PROV_SSL",
+ "PR_CAPBSET_DROP",
+ "PR_CAPBSET_READ",
+ "PR_CLEAR_SECCOMP_FILTER",
+ "PR_ENDIAN_BIG",
+ "PR_ENDIAN_LITTLE",
+ "PR_ENDIAN_PPC_LITTLE",
+ "PR_FPEMU_NOPRINT",
+ "PR_FPEMU_SIGFPE",
+ "PR_FP_EXC_ASYNC",
+ "PR_FP_EXC_DISABLED",
+ "PR_FP_EXC_DIV",
+ "PR_FP_EXC_INV",
+ "PR_FP_EXC_NONRECOV",
+ "PR_FP_EXC_OVF",
+ "PR_FP_EXC_PRECISE",
+ "PR_FP_EXC_RES",
+ "PR_FP_EXC_SW_ENABLE",
+ "PR_FP_EXC_UND",
+ "PR_GET_DUMPABLE",
+ "PR_GET_ENDIAN",
+ "PR_GET_FPEMU",
+ "PR_GET_FPEXC",
+ "PR_GET_KEEPCAPS",
+ "PR_GET_NAME",
+ "PR_GET_PDEATHSIG",
+ "PR_GET_SECCOMP",
+ "PR_GET_SECCOMP_FILTER",
+ "PR_GET_SECUREBITS",
+ "PR_GET_TIMERSLACK",
+ "PR_GET_TIMING",
+ "PR_GET_TSC",
+ "PR_GET_UNALIGN",
+ "PR_MCE_KILL",
+ "PR_MCE_KILL_CLEAR",
+ "PR_MCE_KILL_DEFAULT",
+ "PR_MCE_KILL_EARLY",
+ "PR_MCE_KILL_GET",
+ "PR_MCE_KILL_LATE",
+ "PR_MCE_KILL_SET",
+ "PR_SECCOMP_FILTER_EVENT",
+ "PR_SECCOMP_FILTER_SYSCALL",
+ "PR_SET_DUMPABLE",
+ "PR_SET_ENDIAN",
+ "PR_SET_FPEMU",
+ "PR_SET_FPEXC",
+ "PR_SET_KEEPCAPS",
+ "PR_SET_NAME",
+ "PR_SET_PDEATHSIG",
+ "PR_SET_PTRACER",
+ "PR_SET_SECCOMP",
+ "PR_SET_SECCOMP_FILTER",
+ "PR_SET_SECUREBITS",
+ "PR_SET_TIMERSLACK",
+ "PR_SET_TIMING",
+ "PR_SET_TSC",
+ "PR_SET_UNALIGN",
+ "PR_TASK_PERF_EVENTS_DISABLE",
+ "PR_TASK_PERF_EVENTS_ENABLE",
+ "PR_TIMING_STATISTICAL",
+ "PR_TIMING_TIMESTAMP",
+ "PR_TSC_ENABLE",
+ "PR_TSC_SIGSEGV",
+ "PR_UNALIGN_NOPRINT",
+ "PR_UNALIGN_SIGBUS",
+ "PTRACE_ARCH_PRCTL",
+ "PTRACE_ATTACH",
+ "PTRACE_CONT",
+ "PTRACE_DETACH",
+ "PTRACE_EVENT_CLONE",
+ "PTRACE_EVENT_EXEC",
+ "PTRACE_EVENT_EXIT",
+ "PTRACE_EVENT_FORK",
+ "PTRACE_EVENT_VFORK",
+ "PTRACE_EVENT_VFORK_DONE",
+ "PTRACE_GETCRUNCHREGS",
+ "PTRACE_GETEVENTMSG",
+ "PTRACE_GETFPREGS",
+ "PTRACE_GETFPXREGS",
+ "PTRACE_GETHBPREGS",
+ "PTRACE_GETREGS",
+ "PTRACE_GETREGSET",
+ "PTRACE_GETSIGINFO",
+ "PTRACE_GETVFPREGS",
+ "PTRACE_GETWMMXREGS",
+ "PTRACE_GET_THREAD_AREA",
+ "PTRACE_KILL",
+ "PTRACE_OLDSETOPTIONS",
+ "PTRACE_O_MASK",
+ "PTRACE_O_TRACECLONE",
+ "PTRACE_O_TRACEEXEC",
+ "PTRACE_O_TRACEEXIT",
+ "PTRACE_O_TRACEFORK",
+ "PTRACE_O_TRACESYSGOOD",
+ "PTRACE_O_TRACEVFORK",
+ "PTRACE_O_TRACEVFORKDONE",
+ "PTRACE_PEEKDATA",
+ "PTRACE_PEEKTEXT",
+ "PTRACE_PEEKUSR",
+ "PTRACE_POKEDATA",
+ "PTRACE_POKETEXT",
+ "PTRACE_POKEUSR",
+ "PTRACE_SETCRUNCHREGS",
+ "PTRACE_SETFPREGS",
+ "PTRACE_SETFPXREGS",
+ "PTRACE_SETHBPREGS",
+ "PTRACE_SETOPTIONS",
+ "PTRACE_SETREGS",
+ "PTRACE_SETREGSET",
+ "PTRACE_SETSIGINFO",
+ "PTRACE_SETVFPREGS",
+ "PTRACE_SETWMMXREGS",
+ "PTRACE_SET_SYSCALL",
+ "PTRACE_SET_THREAD_AREA",
+ "PTRACE_SINGLEBLOCK",
+ "PTRACE_SINGLESTEP",
+ "PTRACE_SYSCALL",
+ "PTRACE_SYSEMU",
+ "PTRACE_SYSEMU_SINGLESTEP",
+ "PTRACE_TRACEME",
+ "PT_ATTACH",
+ "PT_ATTACHEXC",
+ "PT_CONTINUE",
+ "PT_DATA_ADDR",
+ "PT_DENY_ATTACH",
+ "PT_DETACH",
+ "PT_FIRSTMACH",
+ "PT_FORCEQUOTA",
+ "PT_KILL",
+ "PT_MASK",
+ "PT_READ_D",
+ "PT_READ_I",
+ "PT_READ_U",
+ "PT_SIGEXC",
+ "PT_STEP",
+ "PT_TEXT_ADDR",
+ "PT_TEXT_END_ADDR",
+ "PT_THUPDATE",
+ "PT_TRACE_ME",
+ "PT_WRITE_D",
+ "PT_WRITE_I",
+ "PT_WRITE_U",
+ "ParseDirent",
+ "ParseNetlinkMessage",
+ "ParseNetlinkRouteAttr",
+ "ParseRoutingMessage",
+ "ParseRoutingSockaddr",
+ "ParseSocketControlMessage",
+ "ParseUnixCredentials",
+ "ParseUnixRights",
+ "PathMax",
+ "Pathconf",
+ "Pause",
+ "Pipe",
+ "Pipe2",
+ "PivotRoot",
+ "Pointer",
+ "PostQueuedCompletionStatus",
+ "Pread",
+ "Proc",
+ "ProcAttr",
+ "Process32First",
+ "Process32Next",
+ "ProcessEntry32",
+ "ProcessInformation",
+ "Protoent",
+ "PtraceAttach",
+ "PtraceCont",
+ "PtraceDetach",
+ "PtraceGetEventMsg",
+ "PtraceGetRegs",
+ "PtracePeekData",
+ "PtracePeekText",
+ "PtracePokeData",
+ "PtracePokeText",
+ "PtraceRegs",
+ "PtraceSetOptions",
+ "PtraceSetRegs",
+ "PtraceSingleStep",
+ "PtraceSyscall",
+ "Pwrite",
+ "REG_BINARY",
+ "REG_DWORD",
+ "REG_DWORD_BIG_ENDIAN",
+ "REG_DWORD_LITTLE_ENDIAN",
+ "REG_EXPAND_SZ",
+ "REG_FULL_RESOURCE_DESCRIPTOR",
+ "REG_LINK",
+ "REG_MULTI_SZ",
+ "REG_NONE",
+ "REG_QWORD",
+ "REG_QWORD_LITTLE_ENDIAN",
+ "REG_RESOURCE_LIST",
+ "REG_RESOURCE_REQUIREMENTS_LIST",
+ "REG_SZ",
+ "RLIMIT_AS",
+ "RLIMIT_CORE",
+ "RLIMIT_CPU",
+ "RLIMIT_CPU_USAGE_MONITOR",
+ "RLIMIT_DATA",
+ "RLIMIT_FSIZE",
+ "RLIMIT_NOFILE",
+ "RLIMIT_STACK",
+ "RLIM_INFINITY",
+ "RTAX_ADVMSS",
+ "RTAX_AUTHOR",
+ "RTAX_BRD",
+ "RTAX_CWND",
+ "RTAX_DST",
+ "RTAX_FEATURES",
+ "RTAX_FEATURE_ALLFRAG",
+ "RTAX_FEATURE_ECN",
+ "RTAX_FEATURE_SACK",
+ "RTAX_FEATURE_TIMESTAMP",
+ "RTAX_GATEWAY",
+ "RTAX_GENMASK",
+ "RTAX_HOPLIMIT",
+ "RTAX_IFA",
+ "RTAX_IFP",
+ "RTAX_INITCWND",
+ "RTAX_INITRWND",
+ "RTAX_LABEL",
+ "RTAX_LOCK",
+ "RTAX_MAX",
+ "RTAX_MTU",
+ "RTAX_NETMASK",
+ "RTAX_REORDERING",
+ "RTAX_RTO_MIN",
+ "RTAX_RTT",
+ "RTAX_RTTVAR",
+ "RTAX_SRC",
+ "RTAX_SRCMASK",
+ "RTAX_SSTHRESH",
+ "RTAX_TAG",
+ "RTAX_UNSPEC",
+ "RTAX_WINDOW",
+ "RTA_ALIGNTO",
+ "RTA_AUTHOR",
+ "RTA_BRD",
+ "RTA_CACHEINFO",
+ "RTA_DST",
+ "RTA_FLOW",
+ "RTA_GATEWAY",
+ "RTA_GENMASK",
+ "RTA_IFA",
+ "RTA_IFP",
+ "RTA_IIF",
+ "RTA_LABEL",
+ "RTA_MAX",
+ "RTA_METRICS",
+ "RTA_MULTIPATH",
+ "RTA_NETMASK",
+ "RTA_OIF",
+ "RTA_PREFSRC",
+ "RTA_PRIORITY",
+ "RTA_SRC",
+ "RTA_SRCMASK",
+ "RTA_TABLE",
+ "RTA_TAG",
+ "RTA_UNSPEC",
+ "RTCF_DIRECTSRC",
+ "RTCF_DOREDIRECT",
+ "RTCF_LOG",
+ "RTCF_MASQ",
+ "RTCF_NAT",
+ "RTCF_VALVE",
+ "RTF_ADDRCLASSMASK",
+ "RTF_ADDRCONF",
+ "RTF_ALLONLINK",
+ "RTF_ANNOUNCE",
+ "RTF_BLACKHOLE",
+ "RTF_BROADCAST",
+ "RTF_CACHE",
+ "RTF_CLONED",
+ "RTF_CLONING",
+ "RTF_CONDEMNED",
+ "RTF_DEFAULT",
+ "RTF_DELCLONE",
+ "RTF_DONE",
+ "RTF_DYNAMIC",
+ "RTF_FLOW",
+ "RTF_FMASK",
+ "RTF_GATEWAY",
+ "RTF_GWFLAG_COMPAT",
+ "RTF_HOST",
+ "RTF_IFREF",
+ "RTF_IFSCOPE",
+ "RTF_INTERFACE",
+ "RTF_IRTT",
+ "RTF_LINKRT",
+ "RTF_LLDATA",
+ "RTF_LLINFO",
+ "RTF_LOCAL",
+ "RTF_MASK",
+ "RTF_MODIFIED",
+ "RTF_MPATH",
+ "RTF_MPLS",
+ "RTF_MSS",
+ "RTF_MTU",
+ "RTF_MULTICAST",
+ "RTF_NAT",
+ "RTF_NOFORWARD",
+ "RTF_NONEXTHOP",
+ "RTF_NOPMTUDISC",
+ "RTF_PERMANENT_ARP",
+ "RTF_PINNED",
+ "RTF_POLICY",
+ "RTF_PRCLONING",
+ "RTF_PROTO1",
+ "RTF_PROTO2",
+ "RTF_PROTO3",
+ "RTF_PROXY",
+ "RTF_REINSTATE",
+ "RTF_REJECT",
+ "RTF_RNH_LOCKED",
+ "RTF_ROUTER",
+ "RTF_SOURCE",
+ "RTF_SRC",
+ "RTF_STATIC",
+ "RTF_STICKY",
+ "RTF_THROW",
+ "RTF_TUNNEL",
+ "RTF_UP",
+ "RTF_USETRAILERS",
+ "RTF_WASCLONED",
+ "RTF_WINDOW",
+ "RTF_XRESOLVE",
+ "RTM_ADD",
+ "RTM_BASE",
+ "RTM_CHANGE",
+ "RTM_CHGADDR",
+ "RTM_DELACTION",
+ "RTM_DELADDR",
+ "RTM_DELADDRLABEL",
+ "RTM_DELETE",
+ "RTM_DELLINK",
+ "RTM_DELMADDR",
+ "RTM_DELNEIGH",
+ "RTM_DELQDISC",
+ "RTM_DELROUTE",
+ "RTM_DELRULE",
+ "RTM_DELTCLASS",
+ "RTM_DELTFILTER",
+ "RTM_DESYNC",
+ "RTM_F_CLONED",
+ "RTM_F_EQUALIZE",
+ "RTM_F_NOTIFY",
+ "RTM_F_PREFIX",
+ "RTM_GET",
+ "RTM_GET2",
+ "RTM_GETACTION",
+ "RTM_GETADDR",
+ "RTM_GETADDRLABEL",
+ "RTM_GETANYCAST",
+ "RTM_GETDCB",
+ "RTM_GETLINK",
+ "RTM_GETMULTICAST",
+ "RTM_GETNEIGH",
+ "RTM_GETNEIGHTBL",
+ "RTM_GETQDISC",
+ "RTM_GETROUTE",
+ "RTM_GETRULE",
+ "RTM_GETTCLASS",
+ "RTM_GETTFILTER",
+ "RTM_IEEE80211",
+ "RTM_IFANNOUNCE",
+ "RTM_IFINFO",
+ "RTM_IFINFO2",
+ "RTM_LLINFO_UPD",
+ "RTM_LOCK",
+ "RTM_LOSING",
+ "RTM_MAX",
+ "RTM_MAXSIZE",
+ "RTM_MISS",
+ "RTM_NEWACTION",
+ "RTM_NEWADDR",
+ "RTM_NEWADDRLABEL",
+ "RTM_NEWLINK",
+ "RTM_NEWMADDR",
+ "RTM_NEWMADDR2",
+ "RTM_NEWNDUSEROPT",
+ "RTM_NEWNEIGH",
+ "RTM_NEWNEIGHTBL",
+ "RTM_NEWPREFIX",
+ "RTM_NEWQDISC",
+ "RTM_NEWROUTE",
+ "RTM_NEWRULE",
+ "RTM_NEWTCLASS",
+ "RTM_NEWTFILTER",
+ "RTM_NR_FAMILIES",
+ "RTM_NR_MSGTYPES",
+ "RTM_OIFINFO",
+ "RTM_OLDADD",
+ "RTM_OLDDEL",
+ "RTM_OOIFINFO",
+ "RTM_REDIRECT",
+ "RTM_RESOLVE",
+ "RTM_RTTUNIT",
+ "RTM_SETDCB",
+ "RTM_SETGATE",
+ "RTM_SETLINK",
+ "RTM_SETNEIGHTBL",
+ "RTM_VERSION",
+ "RTNH_ALIGNTO",
+ "RTNH_F_DEAD",
+ "RTNH_F_ONLINK",
+ "RTNH_F_PERVASIVE",
+ "RTNLGRP_IPV4_IFADDR",
+ "RTNLGRP_IPV4_MROUTE",
+ "RTNLGRP_IPV4_ROUTE",
+ "RTNLGRP_IPV4_RULE",
+ "RTNLGRP_IPV6_IFADDR",
+ "RTNLGRP_IPV6_IFINFO",
+ "RTNLGRP_IPV6_MROUTE",
+ "RTNLGRP_IPV6_PREFIX",
+ "RTNLGRP_IPV6_ROUTE",
+ "RTNLGRP_IPV6_RULE",
+ "RTNLGRP_LINK",
+ "RTNLGRP_ND_USEROPT",
+ "RTNLGRP_NEIGH",
+ "RTNLGRP_NONE",
+ "RTNLGRP_NOTIFY",
+ "RTNLGRP_TC",
+ "RTN_ANYCAST",
+ "RTN_BLACKHOLE",
+ "RTN_BROADCAST",
+ "RTN_LOCAL",
+ "RTN_MAX",
+ "RTN_MULTICAST",
+ "RTN_NAT",
+ "RTN_PROHIBIT",
+ "RTN_THROW",
+ "RTN_UNICAST",
+ "RTN_UNREACHABLE",
+ "RTN_UNSPEC",
+ "RTN_XRESOLVE",
+ "RTPROT_BIRD",
+ "RTPROT_BOOT",
+ "RTPROT_DHCP",
+ "RTPROT_DNROUTED",
+ "RTPROT_GATED",
+ "RTPROT_KERNEL",
+ "RTPROT_MRT",
+ "RTPROT_NTK",
+ "RTPROT_RA",
+ "RTPROT_REDIRECT",
+ "RTPROT_STATIC",
+ "RTPROT_UNSPEC",
+ "RTPROT_XORP",
+ "RTPROT_ZEBRA",
+ "RTV_EXPIRE",
+ "RTV_HOPCOUNT",
+ "RTV_MTU",
+ "RTV_RPIPE",
+ "RTV_RTT",
+ "RTV_RTTVAR",
+ "RTV_SPIPE",
+ "RTV_SSTHRESH",
+ "RTV_WEIGHT",
+ "RT_CACHING_CONTEXT",
+ "RT_CLASS_DEFAULT",
+ "RT_CLASS_LOCAL",
+ "RT_CLASS_MAIN",
+ "RT_CLASS_MAX",
+ "RT_CLASS_UNSPEC",
+ "RT_DEFAULT_FIB",
+ "RT_NORTREF",
+ "RT_SCOPE_HOST",
+ "RT_SCOPE_LINK",
+ "RT_SCOPE_NOWHERE",
+ "RT_SCOPE_SITE",
+ "RT_SCOPE_UNIVERSE",
+ "RT_TABLEID_MAX",
+ "RT_TABLE_COMPAT",
+ "RT_TABLE_DEFAULT",
+ "RT_TABLE_LOCAL",
+ "RT_TABLE_MAIN",
+ "RT_TABLE_MAX",
+ "RT_TABLE_UNSPEC",
+ "RUSAGE_CHILDREN",
+ "RUSAGE_SELF",
+ "RUSAGE_THREAD",
+ "Radvisory_t",
+ "RawConn",
+ "RawSockaddr",
+ "RawSockaddrAny",
+ "RawSockaddrDatalink",
+ "RawSockaddrInet4",
+ "RawSockaddrInet6",
+ "RawSockaddrLinklayer",
+ "RawSockaddrNetlink",
+ "RawSockaddrUnix",
+ "RawSyscall",
+ "RawSyscall6",
+ "Read",
+ "ReadConsole",
+ "ReadDirectoryChanges",
+ "ReadDirent",
+ "ReadFile",
+ "Readlink",
+ "Reboot",
+ "Recvfrom",
+ "Recvmsg",
+ "RegCloseKey",
+ "RegEnumKeyEx",
+ "RegOpenKeyEx",
+ "RegQueryInfoKey",
+ "RegQueryValueEx",
+ "RemoveDirectory",
+ "Removexattr",
+ "Rename",
+ "Renameat",
+ "Revoke",
+ "Rlimit",
+ "Rmdir",
+ "RouteMessage",
+ "RouteRIB",
+ "RoutingMessage",
+ "RtAttr",
+ "RtGenmsg",
+ "RtMetrics",
+ "RtMsg",
+ "RtMsghdr",
+ "RtNexthop",
+ "Rusage",
+ "SCM_BINTIME",
+ "SCM_CREDENTIALS",
+ "SCM_CREDS",
+ "SCM_RIGHTS",
+ "SCM_TIMESTAMP",
+ "SCM_TIMESTAMPING",
+ "SCM_TIMESTAMPNS",
+ "SCM_TIMESTAMP_MONOTONIC",
+ "SHUT_RD",
+ "SHUT_RDWR",
+ "SHUT_WR",
+ "SID",
+ "SIDAndAttributes",
+ "SIGABRT",
+ "SIGALRM",
+ "SIGBUS",
+ "SIGCHLD",
+ "SIGCLD",
+ "SIGCONT",
+ "SIGEMT",
+ "SIGFPE",
+ "SIGHUP",
+ "SIGILL",
+ "SIGINFO",
+ "SIGINT",
+ "SIGIO",
+ "SIGIOT",
+ "SIGKILL",
+ "SIGLIBRT",
+ "SIGLWP",
+ "SIGPIPE",
+ "SIGPOLL",
+ "SIGPROF",
+ "SIGPWR",
+ "SIGQUIT",
+ "SIGSEGV",
+ "SIGSTKFLT",
+ "SIGSTOP",
+ "SIGSYS",
+ "SIGTERM",
+ "SIGTHR",
+ "SIGTRAP",
+ "SIGTSTP",
+ "SIGTTIN",
+ "SIGTTOU",
+ "SIGUNUSED",
+ "SIGURG",
+ "SIGUSR1",
+ "SIGUSR2",
+ "SIGVTALRM",
+ "SIGWINCH",
+ "SIGXCPU",
+ "SIGXFSZ",
+ "SIOCADDDLCI",
+ "SIOCADDMULTI",
+ "SIOCADDRT",
+ "SIOCAIFADDR",
+ "SIOCAIFGROUP",
+ "SIOCALIFADDR",
+ "SIOCARPIPLL",
+ "SIOCATMARK",
+ "SIOCAUTOADDR",
+ "SIOCAUTONETMASK",
+ "SIOCBRDGADD",
+ "SIOCBRDGADDS",
+ "SIOCBRDGARL",
+ "SIOCBRDGDADDR",
+ "SIOCBRDGDEL",
+ "SIOCBRDGDELS",
+ "SIOCBRDGFLUSH",
+ "SIOCBRDGFRL",
+ "SIOCBRDGGCACHE",
+ "SIOCBRDGGFD",
+ "SIOCBRDGGHT",
+ "SIOCBRDGGIFFLGS",
+ "SIOCBRDGGMA",
+ "SIOCBRDGGPARAM",
+ "SIOCBRDGGPRI",
+ "SIOCBRDGGRL",
+ "SIOCBRDGGSIFS",
+ "SIOCBRDGGTO",
+ "SIOCBRDGIFS",
+ "SIOCBRDGRTS",
+ "SIOCBRDGSADDR",
+ "SIOCBRDGSCACHE",
+ "SIOCBRDGSFD",
+ "SIOCBRDGSHT",
+ "SIOCBRDGSIFCOST",
+ "SIOCBRDGSIFFLGS",
+ "SIOCBRDGSIFPRIO",
+ "SIOCBRDGSMA",
+ "SIOCBRDGSPRI",
+ "SIOCBRDGSPROTO",
+ "SIOCBRDGSTO",
+ "SIOCBRDGSTXHC",
+ "SIOCDARP",
+ "SIOCDELDLCI",
+ "SIOCDELMULTI",
+ "SIOCDELRT",
+ "SIOCDEVPRIVATE",
+ "SIOCDIFADDR",
+ "SIOCDIFGROUP",
+ "SIOCDIFPHYADDR",
+ "SIOCDLIFADDR",
+ "SIOCDRARP",
+ "SIOCGARP",
+ "SIOCGDRVSPEC",
+ "SIOCGETKALIVE",
+ "SIOCGETLABEL",
+ "SIOCGETPFLOW",
+ "SIOCGETPFSYNC",
+ "SIOCGETSGCNT",
+ "SIOCGETVIFCNT",
+ "SIOCGETVLAN",
+ "SIOCGHIWAT",
+ "SIOCGIFADDR",
+ "SIOCGIFADDRPREF",
+ "SIOCGIFALIAS",
+ "SIOCGIFALTMTU",
+ "SIOCGIFASYNCMAP",
+ "SIOCGIFBOND",
+ "SIOCGIFBR",
+ "SIOCGIFBRDADDR",
+ "SIOCGIFCAP",
+ "SIOCGIFCONF",
+ "SIOCGIFCOUNT",
+ "SIOCGIFDATA",
+ "SIOCGIFDESCR",
+ "SIOCGIFDEVMTU",
+ "SIOCGIFDLT",
+ "SIOCGIFDSTADDR",
+ "SIOCGIFENCAP",
+ "SIOCGIFFIB",
+ "SIOCGIFFLAGS",
+ "SIOCGIFGATTR",
+ "SIOCGIFGENERIC",
+ "SIOCGIFGMEMB",
+ "SIOCGIFGROUP",
+ "SIOCGIFHARDMTU",
+ "SIOCGIFHWADDR",
+ "SIOCGIFINDEX",
+ "SIOCGIFKPI",
+ "SIOCGIFMAC",
+ "SIOCGIFMAP",
+ "SIOCGIFMEDIA",
+ "SIOCGIFMEM",
+ "SIOCGIFMETRIC",
+ "SIOCGIFMTU",
+ "SIOCGIFNAME",
+ "SIOCGIFNETMASK",
+ "SIOCGIFPDSTADDR",
+ "SIOCGIFPFLAGS",
+ "SIOCGIFPHYS",
+ "SIOCGIFPRIORITY",
+ "SIOCGIFPSRCADDR",
+ "SIOCGIFRDOMAIN",
+ "SIOCGIFRTLABEL",
+ "SIOCGIFSLAVE",
+ "SIOCGIFSTATUS",
+ "SIOCGIFTIMESLOT",
+ "SIOCGIFTXQLEN",
+ "SIOCGIFVLAN",
+ "SIOCGIFWAKEFLAGS",
+ "SIOCGIFXFLAGS",
+ "SIOCGLIFADDR",
+ "SIOCGLIFPHYADDR",
+ "SIOCGLIFPHYRTABLE",
+ "SIOCGLIFPHYTTL",
+ "SIOCGLINKSTR",
+ "SIOCGLOWAT",
+ "SIOCGPGRP",
+ "SIOCGPRIVATE_0",
+ "SIOCGPRIVATE_1",
+ "SIOCGRARP",
+ "SIOCGSPPPPARAMS",
+ "SIOCGSTAMP",
+ "SIOCGSTAMPNS",
+ "SIOCGVH",
+ "SIOCGVNETID",
+ "SIOCIFCREATE",
+ "SIOCIFCREATE2",
+ "SIOCIFDESTROY",
+ "SIOCIFGCLONERS",
+ "SIOCINITIFADDR",
+ "SIOCPROTOPRIVATE",
+ "SIOCRSLVMULTI",
+ "SIOCRTMSG",
+ "SIOCSARP",
+ "SIOCSDRVSPEC",
+ "SIOCSETKALIVE",
+ "SIOCSETLABEL",
+ "SIOCSETPFLOW",
+ "SIOCSETPFSYNC",
+ "SIOCSETVLAN",
+ "SIOCSHIWAT",
+ "SIOCSIFADDR",
+ "SIOCSIFADDRPREF",
+ "SIOCSIFALTMTU",
+ "SIOCSIFASYNCMAP",
+ "SIOCSIFBOND",
+ "SIOCSIFBR",
+ "SIOCSIFBRDADDR",
+ "SIOCSIFCAP",
+ "SIOCSIFDESCR",
+ "SIOCSIFDSTADDR",
+ "SIOCSIFENCAP",
+ "SIOCSIFFIB",
+ "SIOCSIFFLAGS",
+ "SIOCSIFGATTR",
+ "SIOCSIFGENERIC",
+ "SIOCSIFHWADDR",
+ "SIOCSIFHWBROADCAST",
+ "SIOCSIFKPI",
+ "SIOCSIFLINK",
+ "SIOCSIFLLADDR",
+ "SIOCSIFMAC",
+ "SIOCSIFMAP",
+ "SIOCSIFMEDIA",
+ "SIOCSIFMEM",
+ "SIOCSIFMETRIC",
+ "SIOCSIFMTU",
+ "SIOCSIFNAME",
+ "SIOCSIFNETMASK",
+ "SIOCSIFPFLAGS",
+ "SIOCSIFPHYADDR",
+ "SIOCSIFPHYS",
+ "SIOCSIFPRIORITY",
+ "SIOCSIFRDOMAIN",
+ "SIOCSIFRTLABEL",
+ "SIOCSIFRVNET",
+ "SIOCSIFSLAVE",
+ "SIOCSIFTIMESLOT",
+ "SIOCSIFTXQLEN",
+ "SIOCSIFVLAN",
+ "SIOCSIFVNET",
+ "SIOCSIFXFLAGS",
+ "SIOCSLIFPHYADDR",
+ "SIOCSLIFPHYRTABLE",
+ "SIOCSLIFPHYTTL",
+ "SIOCSLINKSTR",
+ "SIOCSLOWAT",
+ "SIOCSPGRP",
+ "SIOCSRARP",
+ "SIOCSSPPPPARAMS",
+ "SIOCSVH",
+ "SIOCSVNETID",
+ "SIOCZIFDATA",
+ "SIO_GET_EXTENSION_FUNCTION_POINTER",
+ "SIO_GET_INTERFACE_LIST",
+ "SIO_KEEPALIVE_VALS",
+ "SIO_UDP_CONNRESET",
+ "SOCK_CLOEXEC",
+ "SOCK_DCCP",
+ "SOCK_DGRAM",
+ "SOCK_FLAGS_MASK",
+ "SOCK_MAXADDRLEN",
+ "SOCK_NONBLOCK",
+ "SOCK_NOSIGPIPE",
+ "SOCK_PACKET",
+ "SOCK_RAW",
+ "SOCK_RDM",
+ "SOCK_SEQPACKET",
+ "SOCK_STREAM",
+ "SOL_AAL",
+ "SOL_ATM",
+ "SOL_DECNET",
+ "SOL_ICMPV6",
+ "SOL_IP",
+ "SOL_IPV6",
+ "SOL_IRDA",
+ "SOL_PACKET",
+ "SOL_RAW",
+ "SOL_SOCKET",
+ "SOL_TCP",
+ "SOL_X25",
+ "SOMAXCONN",
+ "SO_ACCEPTCONN",
+ "SO_ACCEPTFILTER",
+ "SO_ATTACH_FILTER",
+ "SO_BINDANY",
+ "SO_BINDTODEVICE",
+ "SO_BINTIME",
+ "SO_BROADCAST",
+ "SO_BSDCOMPAT",
+ "SO_DEBUG",
+ "SO_DETACH_FILTER",
+ "SO_DOMAIN",
+ "SO_DONTROUTE",
+ "SO_DONTTRUNC",
+ "SO_ERROR",
+ "SO_KEEPALIVE",
+ "SO_LABEL",
+ "SO_LINGER",
+ "SO_LINGER_SEC",
+ "SO_LISTENINCQLEN",
+ "SO_LISTENQLEN",
+ "SO_LISTENQLIMIT",
+ "SO_MARK",
+ "SO_NETPROC",
+ "SO_NKE",
+ "SO_NOADDRERR",
+ "SO_NOHEADER",
+ "SO_NOSIGPIPE",
+ "SO_NOTIFYCONFLICT",
+ "SO_NO_CHECK",
+ "SO_NO_DDP",
+ "SO_NO_OFFLOAD",
+ "SO_NP_EXTENSIONS",
+ "SO_NREAD",
+ "SO_NUMRCVPKT",
+ "SO_NWRITE",
+ "SO_OOBINLINE",
+ "SO_OVERFLOWED",
+ "SO_PASSCRED",
+ "SO_PASSSEC",
+ "SO_PEERCRED",
+ "SO_PEERLABEL",
+ "SO_PEERNAME",
+ "SO_PEERSEC",
+ "SO_PRIORITY",
+ "SO_PROTOCOL",
+ "SO_PROTOTYPE",
+ "SO_RANDOMPORT",
+ "SO_RCVBUF",
+ "SO_RCVBUFFORCE",
+ "SO_RCVLOWAT",
+ "SO_RCVTIMEO",
+ "SO_RESTRICTIONS",
+ "SO_RESTRICT_DENYIN",
+ "SO_RESTRICT_DENYOUT",
+ "SO_RESTRICT_DENYSET",
+ "SO_REUSEADDR",
+ "SO_REUSEPORT",
+ "SO_REUSESHAREUID",
+ "SO_RTABLE",
+ "SO_RXQ_OVFL",
+ "SO_SECURITY_AUTHENTICATION",
+ "SO_SECURITY_ENCRYPTION_NETWORK",
+ "SO_SECURITY_ENCRYPTION_TRANSPORT",
+ "SO_SETFIB",
+ "SO_SNDBUF",
+ "SO_SNDBUFFORCE",
+ "SO_SNDLOWAT",
+ "SO_SNDTIMEO",
+ "SO_SPLICE",
+ "SO_TIMESTAMP",
+ "SO_TIMESTAMPING",
+ "SO_TIMESTAMPNS",
+ "SO_TIMESTAMP_MONOTONIC",
+ "SO_TYPE",
+ "SO_UPCALLCLOSEWAIT",
+ "SO_UPDATE_ACCEPT_CONTEXT",
+ "SO_UPDATE_CONNECT_CONTEXT",
+ "SO_USELOOPBACK",
+ "SO_USER_COOKIE",
+ "SO_VENDOR",
+ "SO_WANTMORE",
+ "SO_WANTOOBFLAG",
+ "SSLExtraCertChainPolicyPara",
+ "STANDARD_RIGHTS_ALL",
+ "STANDARD_RIGHTS_EXECUTE",
+ "STANDARD_RIGHTS_READ",
+ "STANDARD_RIGHTS_REQUIRED",
+ "STANDARD_RIGHTS_WRITE",
+ "STARTF_USESHOWWINDOW",
+ "STARTF_USESTDHANDLES",
+ "STD_ERROR_HANDLE",
+ "STD_INPUT_HANDLE",
+ "STD_OUTPUT_HANDLE",
+ "SUBLANG_ENGLISH_US",
+ "SW_FORCEMINIMIZE",
+ "SW_HIDE",
+ "SW_MAXIMIZE",
+ "SW_MINIMIZE",
+ "SW_NORMAL",
+ "SW_RESTORE",
+ "SW_SHOW",
+ "SW_SHOWDEFAULT",
+ "SW_SHOWMAXIMIZED",
+ "SW_SHOWMINIMIZED",
+ "SW_SHOWMINNOACTIVE",
+ "SW_SHOWNA",
+ "SW_SHOWNOACTIVATE",
+ "SW_SHOWNORMAL",
+ "SYMBOLIC_LINK_FLAG_DIRECTORY",
+ "SYNCHRONIZE",
+ "SYSCTL_VERSION",
+ "SYSCTL_VERS_0",
+ "SYSCTL_VERS_1",
+ "SYSCTL_VERS_MASK",
+ "SYS_ABORT2",
+ "SYS_ACCEPT",
+ "SYS_ACCEPT4",
+ "SYS_ACCEPT_NOCANCEL",
+ "SYS_ACCESS",
+ "SYS_ACCESS_EXTENDED",
+ "SYS_ACCT",
+ "SYS_ADD_KEY",
+ "SYS_ADD_PROFIL",
+ "SYS_ADJFREQ",
+ "SYS_ADJTIME",
+ "SYS_ADJTIMEX",
+ "SYS_AFS_SYSCALL",
+ "SYS_AIO_CANCEL",
+ "SYS_AIO_ERROR",
+ "SYS_AIO_FSYNC",
+ "SYS_AIO_MLOCK",
+ "SYS_AIO_READ",
+ "SYS_AIO_RETURN",
+ "SYS_AIO_SUSPEND",
+ "SYS_AIO_SUSPEND_NOCANCEL",
+ "SYS_AIO_WAITCOMPLETE",
+ "SYS_AIO_WRITE",
+ "SYS_ALARM",
+ "SYS_ARCH_PRCTL",
+ "SYS_ARM_FADVISE64_64",
+ "SYS_ARM_SYNC_FILE_RANGE",
+ "SYS_ATGETMSG",
+ "SYS_ATPGETREQ",
+ "SYS_ATPGETRSP",
+ "SYS_ATPSNDREQ",
+ "SYS_ATPSNDRSP",
+ "SYS_ATPUTMSG",
+ "SYS_ATSOCKET",
+ "SYS_AUDIT",
+ "SYS_AUDITCTL",
+ "SYS_AUDITON",
+ "SYS_AUDIT_SESSION_JOIN",
+ "SYS_AUDIT_SESSION_PORT",
+ "SYS_AUDIT_SESSION_SELF",
+ "SYS_BDFLUSH",
+ "SYS_BIND",
+ "SYS_BINDAT",
+ "SYS_BREAK",
+ "SYS_BRK",
+ "SYS_BSDTHREAD_CREATE",
+ "SYS_BSDTHREAD_REGISTER",
+ "SYS_BSDTHREAD_TERMINATE",
+ "SYS_CAPGET",
+ "SYS_CAPSET",
+ "SYS_CAP_ENTER",
+ "SYS_CAP_FCNTLS_GET",
+ "SYS_CAP_FCNTLS_LIMIT",
+ "SYS_CAP_GETMODE",
+ "SYS_CAP_GETRIGHTS",
+ "SYS_CAP_IOCTLS_GET",
+ "SYS_CAP_IOCTLS_LIMIT",
+ "SYS_CAP_NEW",
+ "SYS_CAP_RIGHTS_GET",
+ "SYS_CAP_RIGHTS_LIMIT",
+ "SYS_CHDIR",
+ "SYS_CHFLAGS",
+ "SYS_CHFLAGSAT",
+ "SYS_CHMOD",
+ "SYS_CHMOD_EXTENDED",
+ "SYS_CHOWN",
+ "SYS_CHOWN32",
+ "SYS_CHROOT",
+ "SYS_CHUD",
+ "SYS_CLOCK_ADJTIME",
+ "SYS_CLOCK_GETCPUCLOCKID2",
+ "SYS_CLOCK_GETRES",
+ "SYS_CLOCK_GETTIME",
+ "SYS_CLOCK_NANOSLEEP",
+ "SYS_CLOCK_SETTIME",
+ "SYS_CLONE",
+ "SYS_CLOSE",
+ "SYS_CLOSEFROM",
+ "SYS_CLOSE_NOCANCEL",
+ "SYS_CONNECT",
+ "SYS_CONNECTAT",
+ "SYS_CONNECT_NOCANCEL",
+ "SYS_COPYFILE",
+ "SYS_CPUSET",
+ "SYS_CPUSET_GETAFFINITY",
+ "SYS_CPUSET_GETID",
+ "SYS_CPUSET_SETAFFINITY",
+ "SYS_CPUSET_SETID",
+ "SYS_CREAT",
+ "SYS_CREATE_MODULE",
+ "SYS_CSOPS",
+ "SYS_CSOPS_AUDITTOKEN",
+ "SYS_DELETE",
+ "SYS_DELETE_MODULE",
+ "SYS_DUP",
+ "SYS_DUP2",
+ "SYS_DUP3",
+ "SYS_EACCESS",
+ "SYS_EPOLL_CREATE",
+ "SYS_EPOLL_CREATE1",
+ "SYS_EPOLL_CTL",
+ "SYS_EPOLL_CTL_OLD",
+ "SYS_EPOLL_PWAIT",
+ "SYS_EPOLL_WAIT",
+ "SYS_EPOLL_WAIT_OLD",
+ "SYS_EVENTFD",
+ "SYS_EVENTFD2",
+ "SYS_EXCHANGEDATA",
+ "SYS_EXECVE",
+ "SYS_EXIT",
+ "SYS_EXIT_GROUP",
+ "SYS_EXTATTRCTL",
+ "SYS_EXTATTR_DELETE_FD",
+ "SYS_EXTATTR_DELETE_FILE",
+ "SYS_EXTATTR_DELETE_LINK",
+ "SYS_EXTATTR_GET_FD",
+ "SYS_EXTATTR_GET_FILE",
+ "SYS_EXTATTR_GET_LINK",
+ "SYS_EXTATTR_LIST_FD",
+ "SYS_EXTATTR_LIST_FILE",
+ "SYS_EXTATTR_LIST_LINK",
+ "SYS_EXTATTR_SET_FD",
+ "SYS_EXTATTR_SET_FILE",
+ "SYS_EXTATTR_SET_LINK",
+ "SYS_FACCESSAT",
+ "SYS_FADVISE64",
+ "SYS_FADVISE64_64",
+ "SYS_FALLOCATE",
+ "SYS_FANOTIFY_INIT",
+ "SYS_FANOTIFY_MARK",
+ "SYS_FCHDIR",
+ "SYS_FCHFLAGS",
+ "SYS_FCHMOD",
+ "SYS_FCHMODAT",
+ "SYS_FCHMOD_EXTENDED",
+ "SYS_FCHOWN",
+ "SYS_FCHOWN32",
+ "SYS_FCHOWNAT",
+ "SYS_FCHROOT",
+ "SYS_FCNTL",
+ "SYS_FCNTL64",
+ "SYS_FCNTL_NOCANCEL",
+ "SYS_FDATASYNC",
+ "SYS_FEXECVE",
+ "SYS_FFCLOCK_GETCOUNTER",
+ "SYS_FFCLOCK_GETESTIMATE",
+ "SYS_FFCLOCK_SETESTIMATE",
+ "SYS_FFSCTL",
+ "SYS_FGETATTRLIST",
+ "SYS_FGETXATTR",
+ "SYS_FHOPEN",
+ "SYS_FHSTAT",
+ "SYS_FHSTATFS",
+ "SYS_FILEPORT_MAKEFD",
+ "SYS_FILEPORT_MAKEPORT",
+ "SYS_FKTRACE",
+ "SYS_FLISTXATTR",
+ "SYS_FLOCK",
+ "SYS_FORK",
+ "SYS_FPATHCONF",
+ "SYS_FREEBSD6_FTRUNCATE",
+ "SYS_FREEBSD6_LSEEK",
+ "SYS_FREEBSD6_MMAP",
+ "SYS_FREEBSD6_PREAD",
+ "SYS_FREEBSD6_PWRITE",
+ "SYS_FREEBSD6_TRUNCATE",
+ "SYS_FREMOVEXATTR",
+ "SYS_FSCTL",
+ "SYS_FSETATTRLIST",
+ "SYS_FSETXATTR",
+ "SYS_FSGETPATH",
+ "SYS_FSTAT",
+ "SYS_FSTAT64",
+ "SYS_FSTAT64_EXTENDED",
+ "SYS_FSTATAT",
+ "SYS_FSTATAT64",
+ "SYS_FSTATFS",
+ "SYS_FSTATFS64",
+ "SYS_FSTATV",
+ "SYS_FSTATVFS1",
+ "SYS_FSTAT_EXTENDED",
+ "SYS_FSYNC",
+ "SYS_FSYNC_NOCANCEL",
+ "SYS_FSYNC_RANGE",
+ "SYS_FTIME",
+ "SYS_FTRUNCATE",
+ "SYS_FTRUNCATE64",
+ "SYS_FUTEX",
+ "SYS_FUTIMENS",
+ "SYS_FUTIMES",
+ "SYS_FUTIMESAT",
+ "SYS_GETATTRLIST",
+ "SYS_GETAUDIT",
+ "SYS_GETAUDIT_ADDR",
+ "SYS_GETAUID",
+ "SYS_GETCONTEXT",
+ "SYS_GETCPU",
+ "SYS_GETCWD",
+ "SYS_GETDENTS",
+ "SYS_GETDENTS64",
+ "SYS_GETDIRENTRIES",
+ "SYS_GETDIRENTRIES64",
+ "SYS_GETDIRENTRIESATTR",
+ "SYS_GETDTABLECOUNT",
+ "SYS_GETDTABLESIZE",
+ "SYS_GETEGID",
+ "SYS_GETEGID32",
+ "SYS_GETEUID",
+ "SYS_GETEUID32",
+ "SYS_GETFH",
+ "SYS_GETFSSTAT",
+ "SYS_GETFSSTAT64",
+ "SYS_GETGID",
+ "SYS_GETGID32",
+ "SYS_GETGROUPS",
+ "SYS_GETGROUPS32",
+ "SYS_GETHOSTUUID",
+ "SYS_GETITIMER",
+ "SYS_GETLCID",
+ "SYS_GETLOGIN",
+ "SYS_GETLOGINCLASS",
+ "SYS_GETPEERNAME",
+ "SYS_GETPGID",
+ "SYS_GETPGRP",
+ "SYS_GETPID",
+ "SYS_GETPMSG",
+ "SYS_GETPPID",
+ "SYS_GETPRIORITY",
+ "SYS_GETRESGID",
+ "SYS_GETRESGID32",
+ "SYS_GETRESUID",
+ "SYS_GETRESUID32",
+ "SYS_GETRLIMIT",
+ "SYS_GETRTABLE",
+ "SYS_GETRUSAGE",
+ "SYS_GETSGROUPS",
+ "SYS_GETSID",
+ "SYS_GETSOCKNAME",
+ "SYS_GETSOCKOPT",
+ "SYS_GETTHRID",
+ "SYS_GETTID",
+ "SYS_GETTIMEOFDAY",
+ "SYS_GETUID",
+ "SYS_GETUID32",
+ "SYS_GETVFSSTAT",
+ "SYS_GETWGROUPS",
+ "SYS_GETXATTR",
+ "SYS_GET_KERNEL_SYMS",
+ "SYS_GET_MEMPOLICY",
+ "SYS_GET_ROBUST_LIST",
+ "SYS_GET_THREAD_AREA",
+ "SYS_GSSD_SYSCALL",
+ "SYS_GTTY",
+ "SYS_IDENTITYSVC",
+ "SYS_IDLE",
+ "SYS_INITGROUPS",
+ "SYS_INIT_MODULE",
+ "SYS_INOTIFY_ADD_WATCH",
+ "SYS_INOTIFY_INIT",
+ "SYS_INOTIFY_INIT1",
+ "SYS_INOTIFY_RM_WATCH",
+ "SYS_IOCTL",
+ "SYS_IOPERM",
+ "SYS_IOPL",
+ "SYS_IOPOLICYSYS",
+ "SYS_IOPRIO_GET",
+ "SYS_IOPRIO_SET",
+ "SYS_IO_CANCEL",
+ "SYS_IO_DESTROY",
+ "SYS_IO_GETEVENTS",
+ "SYS_IO_SETUP",
+ "SYS_IO_SUBMIT",
+ "SYS_IPC",
+ "SYS_ISSETUGID",
+ "SYS_JAIL",
+ "SYS_JAIL_ATTACH",
+ "SYS_JAIL_GET",
+ "SYS_JAIL_REMOVE",
+ "SYS_JAIL_SET",
+ "SYS_KAS_INFO",
+ "SYS_KDEBUG_TRACE",
+ "SYS_KENV",
+ "SYS_KEVENT",
+ "SYS_KEVENT64",
+ "SYS_KEXEC_LOAD",
+ "SYS_KEYCTL",
+ "SYS_KILL",
+ "SYS_KLDFIND",
+ "SYS_KLDFIRSTMOD",
+ "SYS_KLDLOAD",
+ "SYS_KLDNEXT",
+ "SYS_KLDSTAT",
+ "SYS_KLDSYM",
+ "SYS_KLDUNLOAD",
+ "SYS_KLDUNLOADF",
+ "SYS_KMQ_NOTIFY",
+ "SYS_KMQ_OPEN",
+ "SYS_KMQ_SETATTR",
+ "SYS_KMQ_TIMEDRECEIVE",
+ "SYS_KMQ_TIMEDSEND",
+ "SYS_KMQ_UNLINK",
+ "SYS_KQUEUE",
+ "SYS_KQUEUE1",
+ "SYS_KSEM_CLOSE",
+ "SYS_KSEM_DESTROY",
+ "SYS_KSEM_GETVALUE",
+ "SYS_KSEM_INIT",
+ "SYS_KSEM_OPEN",
+ "SYS_KSEM_POST",
+ "SYS_KSEM_TIMEDWAIT",
+ "SYS_KSEM_TRYWAIT",
+ "SYS_KSEM_UNLINK",
+ "SYS_KSEM_WAIT",
+ "SYS_KTIMER_CREATE",
+ "SYS_KTIMER_DELETE",
+ "SYS_KTIMER_GETOVERRUN",
+ "SYS_KTIMER_GETTIME",
+ "SYS_KTIMER_SETTIME",
+ "SYS_KTRACE",
+ "SYS_LCHFLAGS",
+ "SYS_LCHMOD",
+ "SYS_LCHOWN",
+ "SYS_LCHOWN32",
+ "SYS_LEDGER",
+ "SYS_LGETFH",
+ "SYS_LGETXATTR",
+ "SYS_LINK",
+ "SYS_LINKAT",
+ "SYS_LIO_LISTIO",
+ "SYS_LISTEN",
+ "SYS_LISTXATTR",
+ "SYS_LLISTXATTR",
+ "SYS_LOCK",
+ "SYS_LOOKUP_DCOOKIE",
+ "SYS_LPATHCONF",
+ "SYS_LREMOVEXATTR",
+ "SYS_LSEEK",
+ "SYS_LSETXATTR",
+ "SYS_LSTAT",
+ "SYS_LSTAT64",
+ "SYS_LSTAT64_EXTENDED",
+ "SYS_LSTATV",
+ "SYS_LSTAT_EXTENDED",
+ "SYS_LUTIMES",
+ "SYS_MAC_SYSCALL",
+ "SYS_MADVISE",
+ "SYS_MADVISE1",
+ "SYS_MAXSYSCALL",
+ "SYS_MBIND",
+ "SYS_MIGRATE_PAGES",
+ "SYS_MINCORE",
+ "SYS_MINHERIT",
+ "SYS_MKCOMPLEX",
+ "SYS_MKDIR",
+ "SYS_MKDIRAT",
+ "SYS_MKDIR_EXTENDED",
+ "SYS_MKFIFO",
+ "SYS_MKFIFOAT",
+ "SYS_MKFIFO_EXTENDED",
+ "SYS_MKNOD",
+ "SYS_MKNODAT",
+ "SYS_MLOCK",
+ "SYS_MLOCKALL",
+ "SYS_MMAP",
+ "SYS_MMAP2",
+ "SYS_MODCTL",
+ "SYS_MODFIND",
+ "SYS_MODFNEXT",
+ "SYS_MODIFY_LDT",
+ "SYS_MODNEXT",
+ "SYS_MODSTAT",
+ "SYS_MODWATCH",
+ "SYS_MOUNT",
+ "SYS_MOVE_PAGES",
+ "SYS_MPROTECT",
+ "SYS_MPX",
+ "SYS_MQUERY",
+ "SYS_MQ_GETSETATTR",
+ "SYS_MQ_NOTIFY",
+ "SYS_MQ_OPEN",
+ "SYS_MQ_TIMEDRECEIVE",
+ "SYS_MQ_TIMEDSEND",
+ "SYS_MQ_UNLINK",
+ "SYS_MREMAP",
+ "SYS_MSGCTL",
+ "SYS_MSGGET",
+ "SYS_MSGRCV",
+ "SYS_MSGRCV_NOCANCEL",
+ "SYS_MSGSND",
+ "SYS_MSGSND_NOCANCEL",
+ "SYS_MSGSYS",
+ "SYS_MSYNC",
+ "SYS_MSYNC_NOCANCEL",
+ "SYS_MUNLOCK",
+ "SYS_MUNLOCKALL",
+ "SYS_MUNMAP",
+ "SYS_NAME_TO_HANDLE_AT",
+ "SYS_NANOSLEEP",
+ "SYS_NEWFSTATAT",
+ "SYS_NFSCLNT",
+ "SYS_NFSSERVCTL",
+ "SYS_NFSSVC",
+ "SYS_NFSTAT",
+ "SYS_NICE",
+ "SYS_NLM_SYSCALL",
+ "SYS_NLSTAT",
+ "SYS_NMOUNT",
+ "SYS_NSTAT",
+ "SYS_NTP_ADJTIME",
+ "SYS_NTP_GETTIME",
+ "SYS_NUMA_GETAFFINITY",
+ "SYS_NUMA_SETAFFINITY",
+ "SYS_OABI_SYSCALL_BASE",
+ "SYS_OBREAK",
+ "SYS_OLDFSTAT",
+ "SYS_OLDLSTAT",
+ "SYS_OLDOLDUNAME",
+ "SYS_OLDSTAT",
+ "SYS_OLDUNAME",
+ "SYS_OPEN",
+ "SYS_OPENAT",
+ "SYS_OPENBSD_POLL",
+ "SYS_OPEN_BY_HANDLE_AT",
+ "SYS_OPEN_DPROTECTED_NP",
+ "SYS_OPEN_EXTENDED",
+ "SYS_OPEN_NOCANCEL",
+ "SYS_OVADVISE",
+ "SYS_PACCEPT",
+ "SYS_PATHCONF",
+ "SYS_PAUSE",
+ "SYS_PCICONFIG_IOBASE",
+ "SYS_PCICONFIG_READ",
+ "SYS_PCICONFIG_WRITE",
+ "SYS_PDFORK",
+ "SYS_PDGETPID",
+ "SYS_PDKILL",
+ "SYS_PERF_EVENT_OPEN",
+ "SYS_PERSONALITY",
+ "SYS_PID_HIBERNATE",
+ "SYS_PID_RESUME",
+ "SYS_PID_SHUTDOWN_SOCKETS",
+ "SYS_PID_SUSPEND",
+ "SYS_PIPE",
+ "SYS_PIPE2",
+ "SYS_PIVOT_ROOT",
+ "SYS_PMC_CONTROL",
+ "SYS_PMC_GET_INFO",
+ "SYS_POLL",
+ "SYS_POLLTS",
+ "SYS_POLL_NOCANCEL",
+ "SYS_POSIX_FADVISE",
+ "SYS_POSIX_FALLOCATE",
+ "SYS_POSIX_OPENPT",
+ "SYS_POSIX_SPAWN",
+ "SYS_PPOLL",
+ "SYS_PRCTL",
+ "SYS_PREAD",
+ "SYS_PREAD64",
+ "SYS_PREADV",
+ "SYS_PREAD_NOCANCEL",
+ "SYS_PRLIMIT64",
+ "SYS_PROCCTL",
+ "SYS_PROCESS_POLICY",
+ "SYS_PROCESS_VM_READV",
+ "SYS_PROCESS_VM_WRITEV",
+ "SYS_PROC_INFO",
+ "SYS_PROF",
+ "SYS_PROFIL",
+ "SYS_PSELECT",
+ "SYS_PSELECT6",
+ "SYS_PSET_ASSIGN",
+ "SYS_PSET_CREATE",
+ "SYS_PSET_DESTROY",
+ "SYS_PSYNCH_CVBROAD",
+ "SYS_PSYNCH_CVCLRPREPOST",
+ "SYS_PSYNCH_CVSIGNAL",
+ "SYS_PSYNCH_CVWAIT",
+ "SYS_PSYNCH_MUTEXDROP",
+ "SYS_PSYNCH_MUTEXWAIT",
+ "SYS_PSYNCH_RW_DOWNGRADE",
+ "SYS_PSYNCH_RW_LONGRDLOCK",
+ "SYS_PSYNCH_RW_RDLOCK",
+ "SYS_PSYNCH_RW_UNLOCK",
+ "SYS_PSYNCH_RW_UNLOCK2",
+ "SYS_PSYNCH_RW_UPGRADE",
+ "SYS_PSYNCH_RW_WRLOCK",
+ "SYS_PSYNCH_RW_YIELDWRLOCK",
+ "SYS_PTRACE",
+ "SYS_PUTPMSG",
+ "SYS_PWRITE",
+ "SYS_PWRITE64",
+ "SYS_PWRITEV",
+ "SYS_PWRITE_NOCANCEL",
+ "SYS_QUERY_MODULE",
+ "SYS_QUOTACTL",
+ "SYS_RASCTL",
+ "SYS_RCTL_ADD_RULE",
+ "SYS_RCTL_GET_LIMITS",
+ "SYS_RCTL_GET_RACCT",
+ "SYS_RCTL_GET_RULES",
+ "SYS_RCTL_REMOVE_RULE",
+ "SYS_READ",
+ "SYS_READAHEAD",
+ "SYS_READDIR",
+ "SYS_READLINK",
+ "SYS_READLINKAT",
+ "SYS_READV",
+ "SYS_READV_NOCANCEL",
+ "SYS_READ_NOCANCEL",
+ "SYS_REBOOT",
+ "SYS_RECV",
+ "SYS_RECVFROM",
+ "SYS_RECVFROM_NOCANCEL",
+ "SYS_RECVMMSG",
+ "SYS_RECVMSG",
+ "SYS_RECVMSG_NOCANCEL",
+ "SYS_REMAP_FILE_PAGES",
+ "SYS_REMOVEXATTR",
+ "SYS_RENAME",
+ "SYS_RENAMEAT",
+ "SYS_REQUEST_KEY",
+ "SYS_RESTART_SYSCALL",
+ "SYS_REVOKE",
+ "SYS_RFORK",
+ "SYS_RMDIR",
+ "SYS_RTPRIO",
+ "SYS_RTPRIO_THREAD",
+ "SYS_RT_SIGACTION",
+ "SYS_RT_SIGPENDING",
+ "SYS_RT_SIGPROCMASK",
+ "SYS_RT_SIGQUEUEINFO",
+ "SYS_RT_SIGRETURN",
+ "SYS_RT_SIGSUSPEND",
+ "SYS_RT_SIGTIMEDWAIT",
+ "SYS_RT_TGSIGQUEUEINFO",
+ "SYS_SBRK",
+ "SYS_SCHED_GETAFFINITY",
+ "SYS_SCHED_GETPARAM",
+ "SYS_SCHED_GETSCHEDULER",
+ "SYS_SCHED_GET_PRIORITY_MAX",
+ "SYS_SCHED_GET_PRIORITY_MIN",
+ "SYS_SCHED_RR_GET_INTERVAL",
+ "SYS_SCHED_SETAFFINITY",
+ "SYS_SCHED_SETPARAM",
+ "SYS_SCHED_SETSCHEDULER",
+ "SYS_SCHED_YIELD",
+ "SYS_SCTP_GENERIC_RECVMSG",
+ "SYS_SCTP_GENERIC_SENDMSG",
+ "SYS_SCTP_GENERIC_SENDMSG_IOV",
+ "SYS_SCTP_PEELOFF",
+ "SYS_SEARCHFS",
+ "SYS_SECURITY",
+ "SYS_SELECT",
+ "SYS_SELECT_NOCANCEL",
+ "SYS_SEMCONFIG",
+ "SYS_SEMCTL",
+ "SYS_SEMGET",
+ "SYS_SEMOP",
+ "SYS_SEMSYS",
+ "SYS_SEMTIMEDOP",
+ "SYS_SEM_CLOSE",
+ "SYS_SEM_DESTROY",
+ "SYS_SEM_GETVALUE",
+ "SYS_SEM_INIT",
+ "SYS_SEM_OPEN",
+ "SYS_SEM_POST",
+ "SYS_SEM_TRYWAIT",
+ "SYS_SEM_UNLINK",
+ "SYS_SEM_WAIT",
+ "SYS_SEM_WAIT_NOCANCEL",
+ "SYS_SEND",
+ "SYS_SENDFILE",
+ "SYS_SENDFILE64",
+ "SYS_SENDMMSG",
+ "SYS_SENDMSG",
+ "SYS_SENDMSG_NOCANCEL",
+ "SYS_SENDTO",
+ "SYS_SENDTO_NOCANCEL",
+ "SYS_SETATTRLIST",
+ "SYS_SETAUDIT",
+ "SYS_SETAUDIT_ADDR",
+ "SYS_SETAUID",
+ "SYS_SETCONTEXT",
+ "SYS_SETDOMAINNAME",
+ "SYS_SETEGID",
+ "SYS_SETEUID",
+ "SYS_SETFIB",
+ "SYS_SETFSGID",
+ "SYS_SETFSGID32",
+ "SYS_SETFSUID",
+ "SYS_SETFSUID32",
+ "SYS_SETGID",
+ "SYS_SETGID32",
+ "SYS_SETGROUPS",
+ "SYS_SETGROUPS32",
+ "SYS_SETHOSTNAME",
+ "SYS_SETITIMER",
+ "SYS_SETLCID",
+ "SYS_SETLOGIN",
+ "SYS_SETLOGINCLASS",
+ "SYS_SETNS",
+ "SYS_SETPGID",
+ "SYS_SETPRIORITY",
+ "SYS_SETPRIVEXEC",
+ "SYS_SETREGID",
+ "SYS_SETREGID32",
+ "SYS_SETRESGID",
+ "SYS_SETRESGID32",
+ "SYS_SETRESUID",
+ "SYS_SETRESUID32",
+ "SYS_SETREUID",
+ "SYS_SETREUID32",
+ "SYS_SETRLIMIT",
+ "SYS_SETRTABLE",
+ "SYS_SETSGROUPS",
+ "SYS_SETSID",
+ "SYS_SETSOCKOPT",
+ "SYS_SETTID",
+ "SYS_SETTID_WITH_PID",
+ "SYS_SETTIMEOFDAY",
+ "SYS_SETUID",
+ "SYS_SETUID32",
+ "SYS_SETWGROUPS",
+ "SYS_SETXATTR",
+ "SYS_SET_MEMPOLICY",
+ "SYS_SET_ROBUST_LIST",
+ "SYS_SET_THREAD_AREA",
+ "SYS_SET_TID_ADDRESS",
+ "SYS_SGETMASK",
+ "SYS_SHARED_REGION_CHECK_NP",
+ "SYS_SHARED_REGION_MAP_AND_SLIDE_NP",
+ "SYS_SHMAT",
+ "SYS_SHMCTL",
+ "SYS_SHMDT",
+ "SYS_SHMGET",
+ "SYS_SHMSYS",
+ "SYS_SHM_OPEN",
+ "SYS_SHM_UNLINK",
+ "SYS_SHUTDOWN",
+ "SYS_SIGACTION",
+ "SYS_SIGALTSTACK",
+ "SYS_SIGNAL",
+ "SYS_SIGNALFD",
+ "SYS_SIGNALFD4",
+ "SYS_SIGPENDING",
+ "SYS_SIGPROCMASK",
+ "SYS_SIGQUEUE",
+ "SYS_SIGQUEUEINFO",
+ "SYS_SIGRETURN",
+ "SYS_SIGSUSPEND",
+ "SYS_SIGSUSPEND_NOCANCEL",
+ "SYS_SIGTIMEDWAIT",
+ "SYS_SIGWAIT",
+ "SYS_SIGWAITINFO",
+ "SYS_SOCKET",
+ "SYS_SOCKETCALL",
+ "SYS_SOCKETPAIR",
+ "SYS_SPLICE",
+ "SYS_SSETMASK",
+ "SYS_SSTK",
+ "SYS_STACK_SNAPSHOT",
+ "SYS_STAT",
+ "SYS_STAT64",
+ "SYS_STAT64_EXTENDED",
+ "SYS_STATFS",
+ "SYS_STATFS64",
+ "SYS_STATV",
+ "SYS_STATVFS1",
+ "SYS_STAT_EXTENDED",
+ "SYS_STIME",
+ "SYS_STTY",
+ "SYS_SWAPCONTEXT",
+ "SYS_SWAPCTL",
+ "SYS_SWAPOFF",
+ "SYS_SWAPON",
+ "SYS_SYMLINK",
+ "SYS_SYMLINKAT",
+ "SYS_SYNC",
+ "SYS_SYNCFS",
+ "SYS_SYNC_FILE_RANGE",
+ "SYS_SYSARCH",
+ "SYS_SYSCALL",
+ "SYS_SYSCALL_BASE",
+ "SYS_SYSFS",
+ "SYS_SYSINFO",
+ "SYS_SYSLOG",
+ "SYS_TEE",
+ "SYS_TGKILL",
+ "SYS_THREAD_SELFID",
+ "SYS_THR_CREATE",
+ "SYS_THR_EXIT",
+ "SYS_THR_KILL",
+ "SYS_THR_KILL2",
+ "SYS_THR_NEW",
+ "SYS_THR_SELF",
+ "SYS_THR_SET_NAME",
+ "SYS_THR_SUSPEND",
+ "SYS_THR_WAKE",
+ "SYS_TIME",
+ "SYS_TIMERFD_CREATE",
+ "SYS_TIMERFD_GETTIME",
+ "SYS_TIMERFD_SETTIME",
+ "SYS_TIMER_CREATE",
+ "SYS_TIMER_DELETE",
+ "SYS_TIMER_GETOVERRUN",
+ "SYS_TIMER_GETTIME",
+ "SYS_TIMER_SETTIME",
+ "SYS_TIMES",
+ "SYS_TKILL",
+ "SYS_TRUNCATE",
+ "SYS_TRUNCATE64",
+ "SYS_TUXCALL",
+ "SYS_UGETRLIMIT",
+ "SYS_ULIMIT",
+ "SYS_UMASK",
+ "SYS_UMASK_EXTENDED",
+ "SYS_UMOUNT",
+ "SYS_UMOUNT2",
+ "SYS_UNAME",
+ "SYS_UNDELETE",
+ "SYS_UNLINK",
+ "SYS_UNLINKAT",
+ "SYS_UNMOUNT",
+ "SYS_UNSHARE",
+ "SYS_USELIB",
+ "SYS_USTAT",
+ "SYS_UTIME",
+ "SYS_UTIMENSAT",
+ "SYS_UTIMES",
+ "SYS_UTRACE",
+ "SYS_UUIDGEN",
+ "SYS_VADVISE",
+ "SYS_VFORK",
+ "SYS_VHANGUP",
+ "SYS_VM86",
+ "SYS_VM86OLD",
+ "SYS_VMSPLICE",
+ "SYS_VM_PRESSURE_MONITOR",
+ "SYS_VSERVER",
+ "SYS_WAIT4",
+ "SYS_WAIT4_NOCANCEL",
+ "SYS_WAIT6",
+ "SYS_WAITEVENT",
+ "SYS_WAITID",
+ "SYS_WAITID_NOCANCEL",
+ "SYS_WAITPID",
+ "SYS_WATCHEVENT",
+ "SYS_WORKQ_KERNRETURN",
+ "SYS_WORKQ_OPEN",
+ "SYS_WRITE",
+ "SYS_WRITEV",
+ "SYS_WRITEV_NOCANCEL",
+ "SYS_WRITE_NOCANCEL",
+ "SYS_YIELD",
+ "SYS__LLSEEK",
+ "SYS__LWP_CONTINUE",
+ "SYS__LWP_CREATE",
+ "SYS__LWP_CTL",
+ "SYS__LWP_DETACH",
+ "SYS__LWP_EXIT",
+ "SYS__LWP_GETNAME",
+ "SYS__LWP_GETPRIVATE",
+ "SYS__LWP_KILL",
+ "SYS__LWP_PARK",
+ "SYS__LWP_SELF",
+ "SYS__LWP_SETNAME",
+ "SYS__LWP_SETPRIVATE",
+ "SYS__LWP_SUSPEND",
+ "SYS__LWP_UNPARK",
+ "SYS__LWP_UNPARK_ALL",
+ "SYS__LWP_WAIT",
+ "SYS__LWP_WAKEUP",
+ "SYS__NEWSELECT",
+ "SYS__PSET_BIND",
+ "SYS__SCHED_GETAFFINITY",
+ "SYS__SCHED_GETPARAM",
+ "SYS__SCHED_SETAFFINITY",
+ "SYS__SCHED_SETPARAM",
+ "SYS__SYSCTL",
+ "SYS__UMTX_LOCK",
+ "SYS__UMTX_OP",
+ "SYS__UMTX_UNLOCK",
+ "SYS___ACL_ACLCHECK_FD",
+ "SYS___ACL_ACLCHECK_FILE",
+ "SYS___ACL_ACLCHECK_LINK",
+ "SYS___ACL_DELETE_FD",
+ "SYS___ACL_DELETE_FILE",
+ "SYS___ACL_DELETE_LINK",
+ "SYS___ACL_GET_FD",
+ "SYS___ACL_GET_FILE",
+ "SYS___ACL_GET_LINK",
+ "SYS___ACL_SET_FD",
+ "SYS___ACL_SET_FILE",
+ "SYS___ACL_SET_LINK",
+ "SYS___CAP_RIGHTS_GET",
+ "SYS___CLONE",
+ "SYS___DISABLE_THREADSIGNAL",
+ "SYS___GETCWD",
+ "SYS___GETLOGIN",
+ "SYS___GET_TCB",
+ "SYS___MAC_EXECVE",
+ "SYS___MAC_GETFSSTAT",
+ "SYS___MAC_GET_FD",
+ "SYS___MAC_GET_FILE",
+ "SYS___MAC_GET_LCID",
+ "SYS___MAC_GET_LCTX",
+ "SYS___MAC_GET_LINK",
+ "SYS___MAC_GET_MOUNT",
+ "SYS___MAC_GET_PID",
+ "SYS___MAC_GET_PROC",
+ "SYS___MAC_MOUNT",
+ "SYS___MAC_SET_FD",
+ "SYS___MAC_SET_FILE",
+ "SYS___MAC_SET_LCTX",
+ "SYS___MAC_SET_LINK",
+ "SYS___MAC_SET_PROC",
+ "SYS___MAC_SYSCALL",
+ "SYS___OLD_SEMWAIT_SIGNAL",
+ "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL",
+ "SYS___POSIX_CHOWN",
+ "SYS___POSIX_FCHOWN",
+ "SYS___POSIX_LCHOWN",
+ "SYS___POSIX_RENAME",
+ "SYS___PTHREAD_CANCELED",
+ "SYS___PTHREAD_CHDIR",
+ "SYS___PTHREAD_FCHDIR",
+ "SYS___PTHREAD_KILL",
+ "SYS___PTHREAD_MARKCANCEL",
+ "SYS___PTHREAD_SIGMASK",
+ "SYS___QUOTACTL",
+ "SYS___SEMCTL",
+ "SYS___SEMWAIT_SIGNAL",
+ "SYS___SEMWAIT_SIGNAL_NOCANCEL",
+ "SYS___SETLOGIN",
+ "SYS___SETUGID",
+ "SYS___SET_TCB",
+ "SYS___SIGACTION_SIGTRAMP",
+ "SYS___SIGTIMEDWAIT",
+ "SYS___SIGWAIT",
+ "SYS___SIGWAIT_NOCANCEL",
+ "SYS___SYSCTL",
+ "SYS___TFORK",
+ "SYS___THREXIT",
+ "SYS___THRSIGDIVERT",
+ "SYS___THRSLEEP",
+ "SYS___THRWAKEUP",
+ "S_ARCH1",
+ "S_ARCH2",
+ "S_BLKSIZE",
+ "S_IEXEC",
+ "S_IFBLK",
+ "S_IFCHR",
+ "S_IFDIR",
+ "S_IFIFO",
+ "S_IFLNK",
+ "S_IFMT",
+ "S_IFREG",
+ "S_IFSOCK",
+ "S_IFWHT",
+ "S_IREAD",
+ "S_IRGRP",
+ "S_IROTH",
+ "S_IRUSR",
+ "S_IRWXG",
+ "S_IRWXO",
+ "S_IRWXU",
+ "S_ISGID",
+ "S_ISTXT",
+ "S_ISUID",
+ "S_ISVTX",
+ "S_IWGRP",
+ "S_IWOTH",
+ "S_IWRITE",
+ "S_IWUSR",
+ "S_IXGRP",
+ "S_IXOTH",
+ "S_IXUSR",
+ "S_LOGIN_SET",
+ "SecurityAttributes",
+ "Seek",
+ "Select",
+ "Sendfile",
+ "Sendmsg",
+ "SendmsgN",
+ "Sendto",
+ "Servent",
+ "SetBpf",
+ "SetBpfBuflen",
+ "SetBpfDatalink",
+ "SetBpfHeadercmpl",
+ "SetBpfImmediate",
+ "SetBpfInterface",
+ "SetBpfPromisc",
+ "SetBpfTimeout",
+ "SetCurrentDirectory",
+ "SetEndOfFile",
+ "SetEnvironmentVariable",
+ "SetFileAttributes",
+ "SetFileCompletionNotificationModes",
+ "SetFilePointer",
+ "SetFileTime",
+ "SetHandleInformation",
+ "SetKevent",
+ "SetLsfPromisc",
+ "SetNonblock",
+ "Setdomainname",
+ "Setegid",
+ "Setenv",
+ "Seteuid",
+ "Setfsgid",
+ "Setfsuid",
+ "Setgid",
+ "Setgroups",
+ "Sethostname",
+ "Setlogin",
+ "Setpgid",
+ "Setpriority",
+ "Setprivexec",
+ "Setregid",
+ "Setresgid",
+ "Setresuid",
+ "Setreuid",
+ "Setrlimit",
+ "Setsid",
+ "Setsockopt",
+ "SetsockoptByte",
+ "SetsockoptICMPv6Filter",
+ "SetsockoptIPMreq",
+ "SetsockoptIPMreqn",
+ "SetsockoptIPv6Mreq",
+ "SetsockoptInet4Addr",
+ "SetsockoptInt",
+ "SetsockoptLinger",
+ "SetsockoptString",
+ "SetsockoptTimeval",
+ "Settimeofday",
+ "Setuid",
+ "Setxattr",
+ "Shutdown",
+ "SidTypeAlias",
+ "SidTypeComputer",
+ "SidTypeDeletedAccount",
+ "SidTypeDomain",
+ "SidTypeGroup",
+ "SidTypeInvalid",
+ "SidTypeLabel",
+ "SidTypeUnknown",
+ "SidTypeUser",
+ "SidTypeWellKnownGroup",
+ "Signal",
+ "SizeofBpfHdr",
+ "SizeofBpfInsn",
+ "SizeofBpfProgram",
+ "SizeofBpfStat",
+ "SizeofBpfVersion",
+ "SizeofBpfZbuf",
+ "SizeofBpfZbufHeader",
+ "SizeofCmsghdr",
+ "SizeofICMPv6Filter",
+ "SizeofIPMreq",
+ "SizeofIPMreqn",
+ "SizeofIPv6MTUInfo",
+ "SizeofIPv6Mreq",
+ "SizeofIfAddrmsg",
+ "SizeofIfAnnounceMsghdr",
+ "SizeofIfData",
+ "SizeofIfInfomsg",
+ "SizeofIfMsghdr",
+ "SizeofIfaMsghdr",
+ "SizeofIfmaMsghdr",
+ "SizeofIfmaMsghdr2",
+ "SizeofInet4Pktinfo",
+ "SizeofInet6Pktinfo",
+ "SizeofInotifyEvent",
+ "SizeofLinger",
+ "SizeofMsghdr",
+ "SizeofNlAttr",
+ "SizeofNlMsgerr",
+ "SizeofNlMsghdr",
+ "SizeofRtAttr",
+ "SizeofRtGenmsg",
+ "SizeofRtMetrics",
+ "SizeofRtMsg",
+ "SizeofRtMsghdr",
+ "SizeofRtNexthop",
+ "SizeofSockFilter",
+ "SizeofSockFprog",
+ "SizeofSockaddrAny",
+ "SizeofSockaddrDatalink",
+ "SizeofSockaddrInet4",
+ "SizeofSockaddrInet6",
+ "SizeofSockaddrLinklayer",
+ "SizeofSockaddrNetlink",
+ "SizeofSockaddrUnix",
+ "SizeofTCPInfo",
+ "SizeofUcred",
+ "SlicePtrFromStrings",
+ "SockFilter",
+ "SockFprog",
+ "Sockaddr",
+ "SockaddrDatalink",
+ "SockaddrGen",
+ "SockaddrInet4",
+ "SockaddrInet6",
+ "SockaddrLinklayer",
+ "SockaddrNetlink",
+ "SockaddrUnix",
+ "Socket",
+ "SocketControlMessage",
+ "SocketDisableIPv6",
+ "Socketpair",
+ "Splice",
+ "StartProcess",
+ "StartupInfo",
+ "Stat",
+ "Stat_t",
+ "Statfs",
+ "Statfs_t",
+ "Stderr",
+ "Stdin",
+ "Stdout",
+ "StringBytePtr",
+ "StringByteSlice",
+ "StringSlicePtr",
+ "StringToSid",
+ "StringToUTF16",
+ "StringToUTF16Ptr",
+ "Symlink",
+ "Sync",
+ "SyncFileRange",
+ "SysProcAttr",
+ "SysProcIDMap",
+ "Syscall",
+ "Syscall12",
+ "Syscall15",
+ "Syscall18",
+ "Syscall6",
+ "Syscall9",
+ "SyscallN",
+ "Sysctl",
+ "SysctlUint32",
+ "Sysctlnode",
+ "Sysinfo",
+ "Sysinfo_t",
+ "Systemtime",
+ "TCGETS",
+ "TCIFLUSH",
+ "TCIOFLUSH",
+ "TCOFLUSH",
+ "TCPInfo",
+ "TCPKeepalive",
+ "TCP_CA_NAME_MAX",
+ "TCP_CONGCTL",
+ "TCP_CONGESTION",
+ "TCP_CONNECTIONTIMEOUT",
+ "TCP_CORK",
+ "TCP_DEFER_ACCEPT",
+ "TCP_ENABLE_ECN",
+ "TCP_INFO",
+ "TCP_KEEPALIVE",
+ "TCP_KEEPCNT",
+ "TCP_KEEPIDLE",
+ "TCP_KEEPINIT",
+ "TCP_KEEPINTVL",
+ "TCP_LINGER2",
+ "TCP_MAXBURST",
+ "TCP_MAXHLEN",
+ "TCP_MAXOLEN",
+ "TCP_MAXSEG",
+ "TCP_MAXWIN",
+ "TCP_MAX_SACK",
+ "TCP_MAX_WINSHIFT",
+ "TCP_MD5SIG",
+ "TCP_MD5SIG_MAXKEYLEN",
+ "TCP_MINMSS",
+ "TCP_MINMSSOVERLOAD",
+ "TCP_MSS",
+ "TCP_NODELAY",
+ "TCP_NOOPT",
+ "TCP_NOPUSH",
+ "TCP_NOTSENT_LOWAT",
+ "TCP_NSTATES",
+ "TCP_QUICKACK",
+ "TCP_RXT_CONNDROPTIME",
+ "TCP_RXT_FINDROP",
+ "TCP_SACK_ENABLE",
+ "TCP_SENDMOREACKS",
+ "TCP_SYNCNT",
+ "TCP_VENDOR",
+ "TCP_WINDOW_CLAMP",
+ "TCSAFLUSH",
+ "TCSETS",
+ "TF_DISCONNECT",
+ "TF_REUSE_SOCKET",
+ "TF_USE_DEFAULT_WORKER",
+ "TF_USE_KERNEL_APC",
+ "TF_USE_SYSTEM_THREAD",
+ "TF_WRITE_BEHIND",
+ "TH32CS_INHERIT",
+ "TH32CS_SNAPALL",
+ "TH32CS_SNAPHEAPLIST",
+ "TH32CS_SNAPMODULE",
+ "TH32CS_SNAPMODULE32",
+ "TH32CS_SNAPPROCESS",
+ "TH32CS_SNAPTHREAD",
+ "TIME_ZONE_ID_DAYLIGHT",
+ "TIME_ZONE_ID_STANDARD",
+ "TIME_ZONE_ID_UNKNOWN",
+ "TIOCCBRK",
+ "TIOCCDTR",
+ "TIOCCONS",
+ "TIOCDCDTIMESTAMP",
+ "TIOCDRAIN",
+ "TIOCDSIMICROCODE",
+ "TIOCEXCL",
+ "TIOCEXT",
+ "TIOCFLAG_CDTRCTS",
+ "TIOCFLAG_CLOCAL",
+ "TIOCFLAG_CRTSCTS",
+ "TIOCFLAG_MDMBUF",
+ "TIOCFLAG_PPS",
+ "TIOCFLAG_SOFTCAR",
+ "TIOCFLUSH",
+ "TIOCGDEV",
+ "TIOCGDRAINWAIT",
+ "TIOCGETA",
+ "TIOCGETD",
+ "TIOCGFLAGS",
+ "TIOCGICOUNT",
+ "TIOCGLCKTRMIOS",
+ "TIOCGLINED",
+ "TIOCGPGRP",
+ "TIOCGPTN",
+ "TIOCGQSIZE",
+ "TIOCGRANTPT",
+ "TIOCGRS485",
+ "TIOCGSERIAL",
+ "TIOCGSID",
+ "TIOCGSIZE",
+ "TIOCGSOFTCAR",
+ "TIOCGTSTAMP",
+ "TIOCGWINSZ",
+ "TIOCINQ",
+ "TIOCIXOFF",
+ "TIOCIXON",
+ "TIOCLINUX",
+ "TIOCMBIC",
+ "TIOCMBIS",
+ "TIOCMGDTRWAIT",
+ "TIOCMGET",
+ "TIOCMIWAIT",
+ "TIOCMODG",
+ "TIOCMODS",
+ "TIOCMSDTRWAIT",
+ "TIOCMSET",
+ "TIOCM_CAR",
+ "TIOCM_CD",
+ "TIOCM_CTS",
+ "TIOCM_DCD",
+ "TIOCM_DSR",
+ "TIOCM_DTR",
+ "TIOCM_LE",
+ "TIOCM_RI",
+ "TIOCM_RNG",
+ "TIOCM_RTS",
+ "TIOCM_SR",
+ "TIOCM_ST",
+ "TIOCNOTTY",
+ "TIOCNXCL",
+ "TIOCOUTQ",
+ "TIOCPKT",
+ "TIOCPKT_DATA",
+ "TIOCPKT_DOSTOP",
+ "TIOCPKT_FLUSHREAD",
+ "TIOCPKT_FLUSHWRITE",
+ "TIOCPKT_IOCTL",
+ "TIOCPKT_NOSTOP",
+ "TIOCPKT_START",
+ "TIOCPKT_STOP",
+ "TIOCPTMASTER",
+ "TIOCPTMGET",
+ "TIOCPTSNAME",
+ "TIOCPTYGNAME",
+ "TIOCPTYGRANT",
+ "TIOCPTYUNLK",
+ "TIOCRCVFRAME",
+ "TIOCREMOTE",
+ "TIOCSBRK",
+ "TIOCSCONS",
+ "TIOCSCTTY",
+ "TIOCSDRAINWAIT",
+ "TIOCSDTR",
+ "TIOCSERCONFIG",
+ "TIOCSERGETLSR",
+ "TIOCSERGETMULTI",
+ "TIOCSERGSTRUCT",
+ "TIOCSERGWILD",
+ "TIOCSERSETMULTI",
+ "TIOCSERSWILD",
+ "TIOCSER_TEMT",
+ "TIOCSETA",
+ "TIOCSETAF",
+ "TIOCSETAW",
+ "TIOCSETD",
+ "TIOCSFLAGS",
+ "TIOCSIG",
+ "TIOCSLCKTRMIOS",
+ "TIOCSLINED",
+ "TIOCSPGRP",
+ "TIOCSPTLCK",
+ "TIOCSQSIZE",
+ "TIOCSRS485",
+ "TIOCSSERIAL",
+ "TIOCSSIZE",
+ "TIOCSSOFTCAR",
+ "TIOCSTART",
+ "TIOCSTAT",
+ "TIOCSTI",
+ "TIOCSTOP",
+ "TIOCSTSTAMP",
+ "TIOCSWINSZ",
+ "TIOCTIMESTAMP",
+ "TIOCUCNTL",
+ "TIOCVHANGUP",
+ "TIOCXMTFRAME",
+ "TOKEN_ADJUST_DEFAULT",
+ "TOKEN_ADJUST_GROUPS",
+ "TOKEN_ADJUST_PRIVILEGES",
+ "TOKEN_ADJUST_SESSIONID",
+ "TOKEN_ALL_ACCESS",
+ "TOKEN_ASSIGN_PRIMARY",
+ "TOKEN_DUPLICATE",
+ "TOKEN_EXECUTE",
+ "TOKEN_IMPERSONATE",
+ "TOKEN_QUERY",
+ "TOKEN_QUERY_SOURCE",
+ "TOKEN_READ",
+ "TOKEN_WRITE",
+ "TOSTOP",
+ "TRUNCATE_EXISTING",
+ "TUNATTACHFILTER",
+ "TUNDETACHFILTER",
+ "TUNGETFEATURES",
+ "TUNGETIFF",
+ "TUNGETSNDBUF",
+ "TUNGETVNETHDRSZ",
+ "TUNSETDEBUG",
+ "TUNSETGROUP",
+ "TUNSETIFF",
+ "TUNSETLINK",
+ "TUNSETNOCSUM",
+ "TUNSETOFFLOAD",
+ "TUNSETOWNER",
+ "TUNSETPERSIST",
+ "TUNSETSNDBUF",
+ "TUNSETTXFILTER",
+ "TUNSETVNETHDRSZ",
+ "Tee",
+ "TerminateProcess",
+ "Termios",
+ "Tgkill",
+ "Time",
+ "Time_t",
+ "Times",
+ "Timespec",
+ "TimespecToNsec",
+ "Timeval",
+ "Timeval32",
+ "TimevalToNsec",
+ "Timex",
+ "Timezoneinformation",
+ "Tms",
+ "Token",
+ "TokenAccessInformation",
+ "TokenAuditPolicy",
+ "TokenDefaultDacl",
+ "TokenElevation",
+ "TokenElevationType",
+ "TokenGroups",
+ "TokenGroupsAndPrivileges",
+ "TokenHasRestrictions",
+ "TokenImpersonationLevel",
+ "TokenIntegrityLevel",
+ "TokenLinkedToken",
+ "TokenLogonSid",
+ "TokenMandatoryPolicy",
+ "TokenOrigin",
+ "TokenOwner",
+ "TokenPrimaryGroup",
+ "TokenPrivileges",
+ "TokenRestrictedSids",
+ "TokenSandBoxInert",
+ "TokenSessionId",
+ "TokenSessionReference",
+ "TokenSource",
+ "TokenStatistics",
+ "TokenType",
+ "TokenUIAccess",
+ "TokenUser",
+ "TokenVirtualizationAllowed",
+ "TokenVirtualizationEnabled",
+ "Tokenprimarygroup",
+ "Tokenuser",
+ "TranslateAccountName",
+ "TranslateName",
+ "TransmitFile",
+ "TransmitFileBuffers",
+ "Truncate",
+ "UNIX_PATH_MAX",
+ "USAGE_MATCH_TYPE_AND",
+ "USAGE_MATCH_TYPE_OR",
+ "UTF16FromString",
+ "UTF16PtrFromString",
+ "UTF16ToString",
+ "Ucred",
+ "Umask",
+ "Uname",
+ "Undelete",
+ "UnixCredentials",
+ "UnixRights",
+ "Unlink",
+ "Unlinkat",
+ "UnmapViewOfFile",
+ "Unmount",
+ "Unsetenv",
+ "Unshare",
+ "UserInfo10",
+ "Ustat",
+ "Ustat_t",
+ "Utimbuf",
+ "Utime",
+ "Utimes",
+ "UtimesNano",
+ "Utsname",
+ "VDISCARD",
+ "VDSUSP",
+ "VEOF",
+ "VEOL",
+ "VEOL2",
+ "VERASE",
+ "VERASE2",
+ "VINTR",
+ "VKILL",
+ "VLNEXT",
+ "VMIN",
+ "VQUIT",
+ "VREPRINT",
+ "VSTART",
+ "VSTATUS",
+ "VSTOP",
+ "VSUSP",
+ "VSWTC",
+ "VT0",
+ "VT1",
+ "VTDLY",
+ "VTIME",
+ "VWERASE",
+ "VirtualLock",
+ "VirtualUnlock",
+ "WAIT_ABANDONED",
+ "WAIT_FAILED",
+ "WAIT_OBJECT_0",
+ "WAIT_TIMEOUT",
+ "WALL",
+ "WALLSIG",
+ "WALTSIG",
+ "WCLONE",
+ "WCONTINUED",
+ "WCOREFLAG",
+ "WEXITED",
+ "WLINUXCLONE",
+ "WNOHANG",
+ "WNOTHREAD",
+ "WNOWAIT",
+ "WNOZOMBIE",
+ "WOPTSCHECKED",
+ "WORDSIZE",
+ "WSABuf",
+ "WSACleanup",
+ "WSADESCRIPTION_LEN",
+ "WSAData",
+ "WSAEACCES",
+ "WSAECONNABORTED",
+ "WSAECONNRESET",
+ "WSAEnumProtocols",
+ "WSAID_CONNECTEX",
+ "WSAIoctl",
+ "WSAPROTOCOL_LEN",
+ "WSAProtocolChain",
+ "WSAProtocolInfo",
+ "WSARecv",
+ "WSARecvFrom",
+ "WSASYS_STATUS_LEN",
+ "WSASend",
+ "WSASendTo",
+ "WSASendto",
+ "WSAStartup",
+ "WSTOPPED",
+ "WTRAPPED",
+ "WUNTRACED",
+ "Wait4",
+ "WaitForSingleObject",
+ "WaitStatus",
+ "Win32FileAttributeData",
+ "Win32finddata",
+ "Write",
+ "WriteConsole",
+ "WriteFile",
+ "X509_ASN_ENCODING",
+ "XCASE",
+ "XP1_CONNECTIONLESS",
+ "XP1_CONNECT_DATA",
+ "XP1_DISCONNECT_DATA",
+ "XP1_EXPEDITED_DATA",
+ "XP1_GRACEFUL_CLOSE",
+ "XP1_GUARANTEED_DELIVERY",
+ "XP1_GUARANTEED_ORDER",
+ "XP1_IFS_HANDLES",
+ "XP1_MESSAGE_ORIENTED",
+ "XP1_MULTIPOINT_CONTROL_PLANE",
+ "XP1_MULTIPOINT_DATA_PLANE",
+ "XP1_PARTIAL_MESSAGE",
+ "XP1_PSEUDO_STREAM",
+ "XP1_QOS_SUPPORTED",
+ "XP1_SAN_SUPPORT_SDP",
+ "XP1_SUPPORT_BROADCAST",
+ "XP1_SUPPORT_MULTIPOINT",
+ "XP1_UNI_RECV",
+ "XP1_UNI_SEND",
+ },
+ "syscall/js": {
+ "CopyBytesToGo",
+ "CopyBytesToJS",
+ "Error",
+ "Func",
+ "FuncOf",
+ "Global",
+ "Null",
+ "Type",
+ "TypeBoolean",
+ "TypeFunction",
+ "TypeNull",
+ "TypeNumber",
+ "TypeObject",
+ "TypeString",
+ "TypeSymbol",
+ "TypeUndefined",
+ "Undefined",
+ "Value",
+ "ValueError",
+ "ValueOf",
+ },
+ "testing": {
+ "AllocsPerRun",
+ "B",
+ "Benchmark",
+ "BenchmarkResult",
+ "Cover",
+ "CoverBlock",
+ "CoverMode",
+ "Coverage",
+ "F",
+ "Init",
+ "InternalBenchmark",
+ "InternalExample",
+ "InternalFuzzTarget",
+ "InternalTest",
+ "M",
+ "Main",
+ "MainStart",
+ "PB",
+ "RegisterCover",
+ "RunBenchmarks",
+ "RunExamples",
+ "RunTests",
+ "Short",
+ "T",
+ "TB",
+ "Testing",
+ "Verbose",
+ },
+ "testing/fstest": {
+ "MapFS",
+ "MapFile",
+ "TestFS",
+ },
+ "testing/iotest": {
+ "DataErrReader",
+ "ErrReader",
+ "ErrTimeout",
+ "HalfReader",
+ "NewReadLogger",
+ "NewWriteLogger",
+ "OneByteReader",
+ "TestReader",
+ "TimeoutReader",
+ "TruncateWriter",
+ },
+ "testing/quick": {
+ "Check",
+ "CheckEqual",
+ "CheckEqualError",
+ "CheckError",
+ "Config",
+ "Generator",
+ "SetupError",
+ "Value",
+ },
+ "testing/slogtest": {
+ "TestHandler",
+ },
+ "text/scanner": {
+ "Char",
+ "Comment",
+ "EOF",
+ "Float",
+ "GoTokens",
+ "GoWhitespace",
+ "Ident",
+ "Int",
+ "Position",
+ "RawString",
+ "ScanChars",
+ "ScanComments",
+ "ScanFloats",
+ "ScanIdents",
+ "ScanInts",
+ "ScanRawStrings",
+ "ScanStrings",
+ "Scanner",
+ "SkipComments",
+ "String",
+ "TokenString",
+ },
+ "text/tabwriter": {
+ "AlignRight",
+ "Debug",
+ "DiscardEmptyColumns",
+ "Escape",
+ "FilterHTML",
+ "NewWriter",
+ "StripEscape",
+ "TabIndent",
+ "Writer",
+ },
+ "text/template": {
+ "ExecError",
+ "FuncMap",
+ "HTMLEscape",
+ "HTMLEscapeString",
+ "HTMLEscaper",
+ "IsTrue",
+ "JSEscape",
+ "JSEscapeString",
+ "JSEscaper",
+ "Must",
+ "New",
+ "ParseFS",
+ "ParseFiles",
+ "ParseGlob",
+ "Template",
+ "URLQueryEscaper",
+ },
+ "text/template/parse": {
+ "ActionNode",
+ "BoolNode",
+ "BranchNode",
+ "BreakNode",
+ "ChainNode",
+ "CommandNode",
+ "CommentNode",
+ "ContinueNode",
+ "DotNode",
+ "FieldNode",
+ "IdentifierNode",
+ "IfNode",
+ "IsEmptyTree",
+ "ListNode",
+ "Mode",
+ "New",
+ "NewIdentifier",
+ "NilNode",
+ "Node",
+ "NodeAction",
+ "NodeBool",
+ "NodeBreak",
+ "NodeChain",
+ "NodeCommand",
+ "NodeComment",
+ "NodeContinue",
+ "NodeDot",
+ "NodeField",
+ "NodeIdentifier",
+ "NodeIf",
+ "NodeList",
+ "NodeNil",
+ "NodeNumber",
+ "NodePipe",
+ "NodeRange",
+ "NodeString",
+ "NodeTemplate",
+ "NodeText",
+ "NodeType",
+ "NodeVariable",
+ "NodeWith",
+ "NumberNode",
+ "Parse",
+ "ParseComments",
+ "PipeNode",
+ "Pos",
+ "RangeNode",
+ "SkipFuncCheck",
+ "StringNode",
+ "TemplateNode",
+ "TextNode",
+ "Tree",
+ "VariableNode",
+ "WithNode",
+ },
+ "time": {
+ "ANSIC",
+ "After",
+ "AfterFunc",
+ "April",
+ "August",
+ "Date",
+ "DateOnly",
+ "DateTime",
+ "December",
+ "Duration",
+ "February",
+ "FixedZone",
+ "Friday",
+ "Hour",
+ "January",
+ "July",
+ "June",
+ "Kitchen",
+ "Layout",
+ "LoadLocation",
+ "LoadLocationFromTZData",
+ "Local",
+ "Location",
+ "March",
+ "May",
+ "Microsecond",
+ "Millisecond",
+ "Minute",
+ "Monday",
+ "Month",
+ "Nanosecond",
+ "NewTicker",
+ "NewTimer",
+ "November",
+ "Now",
+ "October",
+ "Parse",
+ "ParseDuration",
+ "ParseError",
+ "ParseInLocation",
+ "RFC1123",
+ "RFC1123Z",
+ "RFC3339",
+ "RFC3339Nano",
+ "RFC822",
+ "RFC822Z",
+ "RFC850",
+ "RubyDate",
+ "Saturday",
+ "Second",
+ "September",
+ "Since",
+ "Sleep",
+ "Stamp",
+ "StampMicro",
+ "StampMilli",
+ "StampNano",
+ "Sunday",
+ "Thursday",
+ "Tick",
+ "Ticker",
+ "Time",
+ "TimeOnly",
+ "Timer",
+ "Tuesday",
+ "UTC",
+ "Unix",
+ "UnixDate",
+ "UnixMicro",
+ "UnixMilli",
+ "Until",
+ "Wednesday",
+ "Weekday",
+ },
+ "unicode": {
+ "ASCII_Hex_Digit",
+ "Adlam",
+ "Ahom",
+ "Anatolian_Hieroglyphs",
+ "Arabic",
+ "Armenian",
+ "Avestan",
+ "AzeriCase",
+ "Balinese",
+ "Bamum",
+ "Bassa_Vah",
+ "Batak",
+ "Bengali",
+ "Bhaiksuki",
+ "Bidi_Control",
+ "Bopomofo",
+ "Brahmi",
+ "Braille",
+ "Buginese",
+ "Buhid",
+ "C",
+ "Canadian_Aboriginal",
+ "Carian",
+ "CaseRange",
+ "CaseRanges",
+ "Categories",
+ "Caucasian_Albanian",
+ "Cc",
+ "Cf",
+ "Chakma",
+ "Cham",
+ "Cherokee",
+ "Chorasmian",
+ "Co",
+ "Common",
+ "Coptic",
+ "Cs",
+ "Cuneiform",
+ "Cypriot",
+ "Cypro_Minoan",
+ "Cyrillic",
+ "Dash",
+ "Deprecated",
+ "Deseret",
+ "Devanagari",
+ "Diacritic",
+ "Digit",
+ "Dives_Akuru",
+ "Dogra",
+ "Duployan",
+ "Egyptian_Hieroglyphs",
+ "Elbasan",
+ "Elymaic",
+ "Ethiopic",
+ "Extender",
+ "FoldCategory",
+ "FoldScript",
+ "Georgian",
+ "Glagolitic",
+ "Gothic",
+ "Grantha",
+ "GraphicRanges",
+ "Greek",
+ "Gujarati",
+ "Gunjala_Gondi",
+ "Gurmukhi",
+ "Han",
+ "Hangul",
+ "Hanifi_Rohingya",
+ "Hanunoo",
+ "Hatran",
+ "Hebrew",
+ "Hex_Digit",
+ "Hiragana",
+ "Hyphen",
+ "IDS_Binary_Operator",
+ "IDS_Trinary_Operator",
+ "Ideographic",
+ "Imperial_Aramaic",
+ "In",
+ "Inherited",
+ "Inscriptional_Pahlavi",
+ "Inscriptional_Parthian",
+ "Is",
+ "IsControl",
+ "IsDigit",
+ "IsGraphic",
+ "IsLetter",
+ "IsLower",
+ "IsMark",
+ "IsNumber",
+ "IsOneOf",
+ "IsPrint",
+ "IsPunct",
+ "IsSpace",
+ "IsSymbol",
+ "IsTitle",
+ "IsUpper",
+ "Javanese",
+ "Join_Control",
+ "Kaithi",
+ "Kannada",
+ "Katakana",
+ "Kawi",
+ "Kayah_Li",
+ "Kharoshthi",
+ "Khitan_Small_Script",
+ "Khmer",
+ "Khojki",
+ "Khudawadi",
+ "L",
+ "Lao",
+ "Latin",
+ "Lepcha",
+ "Letter",
+ "Limbu",
+ "Linear_A",
+ "Linear_B",
+ "Lisu",
+ "Ll",
+ "Lm",
+ "Lo",
+ "Logical_Order_Exception",
+ "Lower",
+ "LowerCase",
+ "Lt",
+ "Lu",
+ "Lycian",
+ "Lydian",
+ "M",
+ "Mahajani",
+ "Makasar",
+ "Malayalam",
+ "Mandaic",
+ "Manichaean",
+ "Marchen",
+ "Mark",
+ "Masaram_Gondi",
+ "MaxASCII",
+ "MaxCase",
+ "MaxLatin1",
+ "MaxRune",
+ "Mc",
+ "Me",
+ "Medefaidrin",
+ "Meetei_Mayek",
+ "Mende_Kikakui",
+ "Meroitic_Cursive",
+ "Meroitic_Hieroglyphs",
+ "Miao",
+ "Mn",
+ "Modi",
+ "Mongolian",
+ "Mro",
+ "Multani",
+ "Myanmar",
+ "N",
+ "Nabataean",
+ "Nag_Mundari",
+ "Nandinagari",
+ "Nd",
+ "New_Tai_Lue",
+ "Newa",
+ "Nko",
+ "Nl",
+ "No",
+ "Noncharacter_Code_Point",
+ "Number",
+ "Nushu",
+ "Nyiakeng_Puachue_Hmong",
+ "Ogham",
+ "Ol_Chiki",
+ "Old_Hungarian",
+ "Old_Italic",
+ "Old_North_Arabian",
+ "Old_Permic",
+ "Old_Persian",
+ "Old_Sogdian",
+ "Old_South_Arabian",
+ "Old_Turkic",
+ "Old_Uyghur",
+ "Oriya",
+ "Osage",
+ "Osmanya",
+ "Other",
+ "Other_Alphabetic",
+ "Other_Default_Ignorable_Code_Point",
+ "Other_Grapheme_Extend",
+ "Other_ID_Continue",
+ "Other_ID_Start",
+ "Other_Lowercase",
+ "Other_Math",
+ "Other_Uppercase",
+ "P",
+ "Pahawh_Hmong",
+ "Palmyrene",
+ "Pattern_Syntax",
+ "Pattern_White_Space",
+ "Pau_Cin_Hau",
+ "Pc",
+ "Pd",
+ "Pe",
+ "Pf",
+ "Phags_Pa",
+ "Phoenician",
+ "Pi",
+ "Po",
+ "Prepended_Concatenation_Mark",
+ "PrintRanges",
+ "Properties",
+ "Ps",
+ "Psalter_Pahlavi",
+ "Punct",
+ "Quotation_Mark",
+ "Radical",
+ "Range16",
+ "Range32",
+ "RangeTable",
+ "Regional_Indicator",
+ "Rejang",
+ "ReplacementChar",
+ "Runic",
+ "S",
+ "STerm",
+ "Samaritan",
+ "Saurashtra",
+ "Sc",
+ "Scripts",
+ "Sentence_Terminal",
+ "Sharada",
+ "Shavian",
+ "Siddham",
+ "SignWriting",
+ "SimpleFold",
+ "Sinhala",
+ "Sk",
+ "Sm",
+ "So",
+ "Soft_Dotted",
+ "Sogdian",
+ "Sora_Sompeng",
+ "Soyombo",
+ "Space",
+ "SpecialCase",
+ "Sundanese",
+ "Syloti_Nagri",
+ "Symbol",
+ "Syriac",
+ "Tagalog",
+ "Tagbanwa",
+ "Tai_Le",
+ "Tai_Tham",
+ "Tai_Viet",
+ "Takri",
+ "Tamil",
+ "Tangsa",
+ "Tangut",
+ "Telugu",
+ "Terminal_Punctuation",
+ "Thaana",
+ "Thai",
+ "Tibetan",
+ "Tifinagh",
+ "Tirhuta",
+ "Title",
+ "TitleCase",
+ "To",
+ "ToLower",
+ "ToTitle",
+ "ToUpper",
+ "Toto",
+ "TurkishCase",
+ "Ugaritic",
+ "Unified_Ideograph",
+ "Upper",
+ "UpperCase",
+ "UpperLower",
+ "Vai",
+ "Variation_Selector",
+ "Version",
+ "Vithkuqi",
+ "Wancho",
+ "Warang_Citi",
+ "White_Space",
+ "Yezidi",
+ "Yi",
+ "Z",
+ "Zanabazar_Square",
+ "Zl",
+ "Zp",
+ "Zs",
+ },
+ "unicode/utf16": {
+ "AppendRune",
+ "Decode",
+ "DecodeRune",
+ "Encode",
+ "EncodeRune",
+ "IsSurrogate",
+ },
+ "unicode/utf8": {
+ "AppendRune",
+ "DecodeLastRune",
+ "DecodeLastRuneInString",
+ "DecodeRune",
+ "DecodeRuneInString",
+ "EncodeRune",
+ "FullRune",
+ "FullRuneInString",
+ "MaxRune",
+ "RuneCount",
+ "RuneCountInString",
+ "RuneError",
+ "RuneLen",
+ "RuneSelf",
+ "RuneStart",
+ "UTFMax",
+ "Valid",
+ "ValidRune",
+ "ValidString",
+ },
+ "unsafe": {
+ "Add",
+ "Alignof",
+ "Offsetof",
+ "Pointer",
+ "Sizeof",
+ "Slice",
+ "SliceData",
+ "String",
+ "StringData",
+ },
+}
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 4ddf4e9fe..62008ddbb 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -69,11 +69,23 @@ github.com/DmitriyVTitov/size
## explicit; go 1.19
github.com/KimMachineGun/automemlimit
github.com/KimMachineGun/automemlimit/memlimit
+# github.com/Masterminds/goutils v1.1.1
+## explicit
+github.com/Masterminds/goutils
+# github.com/Masterminds/semver/v3 v3.2.0
+## explicit; go 1.18
+github.com/Masterminds/semver/v3
+# github.com/Masterminds/sprig/v3 v3.2.3
+## explicit; go 1.13
+github.com/Masterminds/sprig/v3
# github.com/abema/go-mp4 v1.2.0
## explicit; go 1.14
github.com/abema/go-mp4
github.com/abema/go-mp4/internal/bitio
github.com/abema/go-mp4/internal/util
+# github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
+## explicit; go 1.13
+github.com/asaskevich/govalidator
# github.com/aymerick/douceur v0.2.0
## explicit
github.com/aymerick/douceur/css
@@ -152,7 +164,7 @@ github.com/davecgh/go-spew/spew
# github.com/disintegration/imaging v1.6.2
## explicit
github.com/disintegration/imaging
-# github.com/docker/go-units v0.4.0
+# github.com/docker/go-units v0.5.0
## explicit
github.com/docker/go-units
# github.com/dsoprea/go-exif/v3 v3.0.0-20210625224831-a6301f85c82b
@@ -176,6 +188,9 @@ github.com/dsoprea/go-utility/v2/image
# github.com/dustin/go-humanize v1.0.1
## explicit; go 1.16
github.com/dustin/go-humanize
+# github.com/felixge/httpsnoop v1.0.3
+## explicit; go 1.13
+github.com/felixge/httpsnoop
# github.com/fsnotify/fsnotify v1.7.0
## explicit; go 1.17
github.com/fsnotify/fsnotify
@@ -223,6 +238,53 @@ github.com/go-logr/logr/funcr
# github.com/go-logr/stdr v1.2.2
## explicit; go 1.16
github.com/go-logr/stdr
+# github.com/go-openapi/analysis v0.21.4
+## explicit; go 1.13
+github.com/go-openapi/analysis
+github.com/go-openapi/analysis/internal/debug
+github.com/go-openapi/analysis/internal/flatten/normalize
+github.com/go-openapi/analysis/internal/flatten/operations
+github.com/go-openapi/analysis/internal/flatten/replace
+github.com/go-openapi/analysis/internal/flatten/schutils
+github.com/go-openapi/analysis/internal/flatten/sortref
+# github.com/go-openapi/errors v0.20.4
+## explicit; go 1.14
+github.com/go-openapi/errors
+# github.com/go-openapi/inflect v0.19.0
+## explicit
+github.com/go-openapi/inflect
+# github.com/go-openapi/jsonpointer v0.19.6
+## explicit; go 1.13
+github.com/go-openapi/jsonpointer
+# github.com/go-openapi/jsonreference v0.20.2
+## explicit; go 1.13
+github.com/go-openapi/jsonreference
+github.com/go-openapi/jsonreference/internal
+# github.com/go-openapi/loads v0.21.2
+## explicit; go 1.13
+github.com/go-openapi/loads
+github.com/go-openapi/loads/fmts
+# github.com/go-openapi/runtime v0.26.0
+## explicit; go 1.18
+github.com/go-openapi/runtime
+github.com/go-openapi/runtime/logger
+github.com/go-openapi/runtime/middleware
+github.com/go-openapi/runtime/middleware/denco
+github.com/go-openapi/runtime/middleware/header
+github.com/go-openapi/runtime/middleware/untyped
+github.com/go-openapi/runtime/security
+# github.com/go-openapi/spec v0.20.9
+## explicit; go 1.13
+github.com/go-openapi/spec
+# github.com/go-openapi/strfmt v0.21.7
+## explicit; go 1.19
+github.com/go-openapi/strfmt
+# github.com/go-openapi/swag v0.22.4
+## explicit; go 1.18
+github.com/go-openapi/swag
+# github.com/go-openapi/validate v0.22.1
+## explicit; go 1.14
+github.com/go-openapi/validate
# github.com/go-playground/form/v4 v4.2.1
## explicit; go 1.13
github.com/go-playground/form/v4
@@ -236,6 +298,16 @@ github.com/go-playground/universal-translator
# github.com/go-playground/validator/v10 v10.15.5
## explicit; go 1.18
github.com/go-playground/validator/v10
+# github.com/go-swagger/go-swagger v0.30.5
+## explicit; go 1.19
+github.com/go-swagger/go-swagger/cmd/swagger
+github.com/go-swagger/go-swagger/cmd/swagger/commands
+github.com/go-swagger/go-swagger/cmd/swagger/commands/diff
+github.com/go-swagger/go-swagger/cmd/swagger/commands/generate
+github.com/go-swagger/go-swagger/cmd/swagger/commands/initcmd
+github.com/go-swagger/go-swagger/codescan
+github.com/go-swagger/go-swagger/generator
+github.com/go-swagger/go-swagger/scan
# github.com/go-xmlfmt/xmlfmt v0.0.0-20211206191508-7fd73a941850
## explicit
github.com/go-xmlfmt/xmlfmt
@@ -283,6 +355,9 @@ github.com/gorilla/css/scanner
# github.com/gorilla/feeds v1.1.2
## explicit; go 1.20
github.com/gorilla/feeds
+# github.com/gorilla/handlers v1.5.1
+## explicit; go 1.14
+github.com/gorilla/handlers
# github.com/gorilla/securecookie v1.1.1
## explicit
github.com/gorilla/securecookie
@@ -320,6 +395,12 @@ github.com/hashicorp/hcl/hcl/token
github.com/hashicorp/hcl/json/parser
github.com/hashicorp/hcl/json/scanner
github.com/hashicorp/hcl/json/token
+# github.com/huandu/xstrings v1.3.3
+## explicit; go 1.12
+github.com/huandu/xstrings
+# github.com/imdario/mergo v0.3.12
+## explicit; go 1.13
+github.com/imdario/mergo
# github.com/inconshreveable/mousetrap v1.1.0
## explicit; go 1.18
github.com/inconshreveable/mousetrap
@@ -348,9 +429,15 @@ github.com/jackc/pgx/v5/stdlib
## explicit; go 1.19
github.com/jackc/puddle/v2
github.com/jackc/puddle/v2/internal/genstack
+# github.com/jessevdk/go-flags v1.5.0
+## explicit; go 1.15
+github.com/jessevdk/go-flags
# github.com/jinzhu/inflection v1.0.0
## explicit
github.com/jinzhu/inflection
+# github.com/josharian/intern v1.0.0
+## explicit; go 1.5
+github.com/josharian/intern
# github.com/json-iterator/go v1.1.12
## explicit; go 1.12
github.com/json-iterator/go
@@ -364,12 +451,23 @@ github.com/klauspost/compress/zlib
# github.com/klauspost/cpuid/v2 v2.2.6
## explicit; go 1.15
github.com/klauspost/cpuid/v2
+# github.com/kr/pretty v0.3.1
+## explicit; go 1.12
+github.com/kr/pretty
+# github.com/kr/text v0.2.0
+## explicit
+github.com/kr/text
# github.com/leodido/go-urn v1.2.4
## explicit; go 1.16
github.com/leodido/go-urn
# github.com/magiconair/properties v1.8.7
## explicit; go 1.19
github.com/magiconair/properties
+# github.com/mailru/easyjson v0.7.7
+## explicit; go 1.12
+github.com/mailru/easyjson/buffer
+github.com/mailru/easyjson/jlexer
+github.com/mailru/easyjson/jwriter
# github.com/mattn/go-isatty v0.0.20
## explicit; go 1.15
github.com/mattn/go-isatty
@@ -402,9 +500,15 @@ github.com/minio/minio-go/v7/pkg/tags
# github.com/minio/sha256-simd v1.0.1
## explicit; go 1.17
github.com/minio/sha256-simd
+# github.com/mitchellh/copystructure v1.2.0
+## explicit; go 1.15
+github.com/mitchellh/copystructure
# github.com/mitchellh/mapstructure v1.5.0
## explicit; go 1.14
github.com/mitchellh/mapstructure
+# github.com/mitchellh/reflectwalk v1.0.2
+## explicit
+github.com/mitchellh/reflectwalk
# github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
## explicit
github.com/modern-go/concurrent
@@ -460,6 +564,9 @@ github.com/quasoft/memstore
# github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
## explicit; go 1.12
github.com/remyoudompheng/bigfft
+# github.com/rogpeppe/go-internal v1.10.0
+## explicit; go 1.19
+github.com/rogpeppe/go-internal/fmtsort
# github.com/rs/xid v1.5.0
## explicit; go 1.12
github.com/rs/xid
@@ -469,6 +576,9 @@ github.com/sagikazarmark/locafero
# github.com/sagikazarmark/slog-shim v0.1.0
## explicit; go 1.20
github.com/sagikazarmark/slog-shim
+# github.com/shopspring/decimal v1.2.0
+## explicit; go 1.13
+github.com/shopspring/decimal
# github.com/sirupsen/logrus v1.9.3
## explicit; go 1.13
github.com/sirupsen/logrus
@@ -714,6 +824,9 @@ github.com/tmthrgd/go-hex
# github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
## explicit
github.com/tomnomnom/linkheader
+# github.com/toqueteos/webbrowser v1.2.0
+## explicit; go 1.12
+github.com/toqueteos/webbrowser
# github.com/twitchyliquid64/golang-asm v0.15.1
## explicit; go 1.13
github.com/twitchyliquid64/golang-asm/asm/arch
@@ -791,6 +904,15 @@ github.com/yuin/goldmark/util
# github.com/zeebo/xxh3 v1.0.2
## explicit; go 1.17
github.com/zeebo/xxh3
+# go.mongodb.org/mongo-driver v1.11.3
+## explicit; go 1.13
+go.mongodb.org/mongo-driver/bson
+go.mongodb.org/mongo-driver/bson/bsoncodec
+go.mongodb.org/mongo-driver/bson/bsonoptions
+go.mongodb.org/mongo-driver/bson/bsonrw
+go.mongodb.org/mongo-driver/bson/bsontype
+go.mongodb.org/mongo-driver/bson/primitive
+go.mongodb.org/mongo-driver/x/bsonx/bsoncore
# go.opentelemetry.io/otel v1.20.0
## explicit; go 1.20
go.opentelemetry.io/otel
@@ -889,6 +1011,7 @@ golang.org/x/crypto/internal/alias
golang.org/x/crypto/internal/poly1305
golang.org/x/crypto/pbkdf2
golang.org/x/crypto/ripemd160
+golang.org/x/crypto/scrypt
golang.org/x/crypto/sha3
golang.org/x/crypto/ssh
golang.org/x/crypto/ssh/internal/bcrypt_pbkdf
@@ -911,6 +1034,8 @@ golang.org/x/image/vp8l
golang.org/x/image/webp
# golang.org/x/mod v0.14.0
## explicit; go 1.18
+golang.org/x/mod/internal/lazyregexp
+golang.org/x/mod/module
golang.org/x/mod/semver
# golang.org/x/net v0.21.0
## explicit; go 1.18
@@ -969,10 +1094,15 @@ golang.org/x/text/unicode/norm
golang.org/x/text/width
# golang.org/x/tools v0.17.0
## explicit; go 1.18
+golang.org/x/tools/go/ast/astutil
+golang.org/x/tools/go/buildutil
golang.org/x/tools/go/gcexportdata
+golang.org/x/tools/go/internal/cgo
golang.org/x/tools/go/internal/packagesdriver
+golang.org/x/tools/go/loader
golang.org/x/tools/go/packages
golang.org/x/tools/go/types/objectpath
+golang.org/x/tools/imports
golang.org/x/tools/internal/event
golang.org/x/tools/internal/event/core
golang.org/x/tools/internal/event/keys
@@ -980,6 +1110,8 @@ golang.org/x/tools/internal/event/label
golang.org/x/tools/internal/event/tag
golang.org/x/tools/internal/gcimporter
golang.org/x/tools/internal/gocommand
+golang.org/x/tools/internal/gopathwalk
+golang.org/x/tools/internal/imports
golang.org/x/tools/internal/packagesinternal
golang.org/x/tools/internal/pkgbits
golang.org/x/tools/internal/tokeninternal