summaryrefslogtreecommitdiff
path: root/vendor/github.com/go-openapi/analysis
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/go-openapi/analysis')
-rw-r--r--vendor/github.com/go-openapi/analysis/.codecov.yml5
-rw-r--r--vendor/github.com/go-openapi/analysis/.gitattributes2
-rw-r--r--vendor/github.com/go-openapi/analysis/.gitignore5
-rw-r--r--vendor/github.com/go-openapi/analysis/.golangci.yml56
-rw-r--r--vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/analysis/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/analysis/README.md31
-rw-r--r--vendor/github.com/go-openapi/analysis/analyzer.go1064
-rw-r--r--vendor/github.com/go-openapi/analysis/appveyor.yml32
-rw-r--r--vendor/github.com/go-openapi/analysis/debug.go23
-rw-r--r--vendor/github.com/go-openapi/analysis/doc.go43
-rw-r--r--vendor/github.com/go-openapi/analysis/fixer.go79
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten.go802
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten_name.go293
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten_options.go78
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/debug/debug.go41
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go87
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go90
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go434
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go29
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go201
-rw-r--r--vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go141
-rw-r--r--vendor/github.com/go-openapi/analysis/mixin.go515
-rw-r--r--vendor/github.com/go-openapi/analysis/schema.go256
24 files changed, 4583 insertions, 0 deletions
diff --git a/vendor/github.com/go-openapi/analysis/.codecov.yml b/vendor/github.com/go-openapi/analysis/.codecov.yml
new file mode 100644
index 000000000..841c4281e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.codecov.yml
@@ -0,0 +1,5 @@
+coverage:
+ status:
+ patch:
+ default:
+ target: 80%
diff --git a/vendor/github.com/go-openapi/analysis/.gitattributes b/vendor/github.com/go-openapi/analysis/.gitattributes
new file mode 100644
index 000000000..d020be8ea
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.gitattributes
@@ -0,0 +1,2 @@
+*.go text eol=lf
+
diff --git a/vendor/github.com/go-openapi/analysis/.gitignore b/vendor/github.com/go-openapi/analysis/.gitignore
new file mode 100644
index 000000000..87c3bd3e6
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.gitignore
@@ -0,0 +1,5 @@
+secrets.yml
+coverage.out
+coverage.txt
+*.cov
+.idea
diff --git a/vendor/github.com/go-openapi/analysis/.golangci.yml b/vendor/github.com/go-openapi/analysis/.golangci.yml
new file mode 100644
index 000000000..e24a6c14e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/.golangci.yml
@@ -0,0 +1,56 @@
+linters-settings:
+ govet:
+ check-shadowing: true
+ golint:
+ min-confidence: 0
+ gocyclo:
+ min-complexity: 40
+ gocognit:
+ min-complexity: 40
+ maligned:
+ suggest-new: true
+ dupl:
+ threshold: 150
+ goconst:
+ min-len: 2
+ min-occurrences: 4
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - lll
+ - gochecknoglobals
+ - gochecknoinits
+ # scopelint is useful, but also reports false positives
+ # that unfortunately can't be disabled. So we disable the
+ # linter rather than changing code that works.
+ # see: https://github.com/kyoh86/scopelint/issues/4
+ - scopelint
+ - godox
+ - gocognit
+ #- whitespace
+ - wsl
+ - funlen
+ - testpackage
+ - wrapcheck
+ #- nlreturn
+ - gomnd
+ - goerr113
+ - exhaustivestruct
+ #- errorlint
+ #- nestif
+ - gofumpt
+ - godot
+ - gci
+ - dogsled
+ - paralleltest
+ - tparallel
+ - thelper
+ - ifshort
+ - forbidigo
+ - cyclop
+ - varnamelen
+ - exhaustruct
+ - nonamedreturns
+ - nosnakecase
diff --git a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..9322b065e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
@@ -0,0 +1,74 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of experience,
+nationality, personal appearance, race, religion, or sexual identity and
+orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or
+advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or
+reject comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct, or to ban temporarily or
+permanently any contributor for other behaviors that they deem inappropriate,
+threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported by contacting the project team at ivan+abuse@flanders.co.nz. All
+complaints will be reviewed and investigated and will result in a response that
+is deemed necessary and appropriate to the circumstances. The project team is
+obligated to maintain confidentiality with regard to the reporter of an incident.
+Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good
+faith may face temporary or permanent repercussions as determined by other
+members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
+available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/analysis/LICENSE b/vendor/github.com/go-openapi/analysis/LICENSE
new file mode 100644
index 000000000..d64569567
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/go-openapi/analysis/README.md b/vendor/github.com/go-openapi/analysis/README.md
new file mode 100644
index 000000000..aad6da10f
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/README.md
@@ -0,0 +1,31 @@
+# OpenAPI initiative analysis
+
+[![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis)
+[![Build status](https://ci.appveyor.com/api/projects/status/x377t5o9ennm847o/branch/master?svg=true)](https://ci.appveyor.com/project/casualjim/go-openapi/analysis/branch/master)
+[![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis)
+[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
+[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
+[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/analysis.svg)](https://pkg.go.dev/github.com/go-openapi/analysis)
+[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/analysis)](https://goreportcard.com/report/github.com/go-openapi/analysis)
+
+
+A foundational library to analyze an OAI specification document for easier reasoning about the content.
+
+## What's inside?
+
+* A analyzer providing methods to walk the functional content of a specification
+* A spec flattener producing a self-contained document bundle, while preserving `$ref`s
+* A spec merger ("mixin") to merge several spec documents into a primary spec
+* A spec "fixer" ensuring that response descriptions are non empty
+
+[Documentation](https://godoc.org/github.com/go-openapi/analysis)
+
+## FAQ
+
+* Does this library support OpenAPI 3?
+
+> No.
+> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
+> There is no plan to make it evolve toward supporting OpenAPI 3.x.
+> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
+>
diff --git a/vendor/github.com/go-openapi/analysis/analyzer.go b/vendor/github.com/go-openapi/analysis/analyzer.go
new file mode 100644
index 000000000..c17aee1b6
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/analyzer.go
@@ -0,0 +1,1064 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "fmt"
+ slashpath "path"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+type referenceAnalysis struct {
+ schemas map[string]spec.Ref
+ responses map[string]spec.Ref
+ parameters map[string]spec.Ref
+ items map[string]spec.Ref
+ headerItems map[string]spec.Ref
+ parameterItems map[string]spec.Ref
+ allRefs map[string]spec.Ref
+ pathItems map[string]spec.Ref
+}
+
+func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
+ r.allRefs["#"+key] = ref
+}
+
+func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
+ r.items["#"+key] = items.Ref
+ r.addRef(key, items.Ref)
+ if location == "header" {
+ // NOTE: in swagger 2.0, headers and parameters (but not body param schemas) are simple schemas
+ // and $ref are not supported here. However it is possible to analyze this.
+ r.headerItems["#"+key] = items.Ref
+ } else {
+ r.parameterItems["#"+key] = items.Ref
+ }
+}
+
+func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
+ r.schemas["#"+key] = ref.Schema.Ref
+ r.addRef(key, ref.Schema.Ref)
+}
+
+func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
+ r.responses["#"+key] = resp.Ref
+ r.addRef(key, resp.Ref)
+}
+
+func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
+ r.parameters["#"+key] = param.Ref
+ r.addRef(key, param.Ref)
+}
+
+func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
+ r.pathItems["#"+key] = pathItem.Ref
+ r.addRef(key, pathItem.Ref)
+}
+
+type patternAnalysis struct {
+ parameters map[string]string
+ headers map[string]string
+ items map[string]string
+ schemas map[string]string
+ allPatterns map[string]string
+}
+
+func (p *patternAnalysis) addPattern(key, pattern string) {
+ p.allPatterns["#"+key] = pattern
+}
+
+func (p *patternAnalysis) addParameterPattern(key, pattern string) {
+ p.parameters["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
+ p.headers["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addItemsPattern(key, pattern string) {
+ p.items["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
+ p.schemas["#"+key] = pattern
+ p.addPattern(key, pattern)
+}
+
+type enumAnalysis struct {
+ parameters map[string][]interface{}
+ headers map[string][]interface{}
+ items map[string][]interface{}
+ schemas map[string][]interface{}
+ allEnums map[string][]interface{}
+}
+
+func (p *enumAnalysis) addEnum(key string, enum []interface{}) {
+ p.allEnums["#"+key] = enum
+}
+
+func (p *enumAnalysis) addParameterEnum(key string, enum []interface{}) {
+ p.parameters["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+func (p *enumAnalysis) addHeaderEnum(key string, enum []interface{}) {
+ p.headers["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+func (p *enumAnalysis) addItemsEnum(key string, enum []interface{}) {
+ p.items["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+func (p *enumAnalysis) addSchemaEnum(key string, enum []interface{}) {
+ p.schemas["#"+key] = enum
+ p.addEnum(key, enum)
+}
+
+// New takes a swagger spec object and returns an analyzed spec document.
+// The analyzed document contains a number of indices that make it easier to
+// reason about semantics of a swagger specification for use in code generation
+// or validation etc.
+func New(doc *spec.Swagger) *Spec {
+ a := &Spec{
+ spec: doc,
+ references: referenceAnalysis{},
+ patterns: patternAnalysis{},
+ enums: enumAnalysis{},
+ }
+ a.reset()
+ a.initialize()
+
+ return a
+}
+
+// Spec is an analyzed specification object. It takes a swagger spec object and turns it into a registry
+// with a bunch of utility methods to act on the information in the spec.
+type Spec struct {
+ spec *spec.Swagger
+ consumes map[string]struct{}
+ produces map[string]struct{}
+ authSchemes map[string]struct{}
+ operations map[string]map[string]*spec.Operation
+ references referenceAnalysis
+ patterns patternAnalysis
+ enums enumAnalysis
+ allSchemas map[string]SchemaRef
+ allOfs map[string]SchemaRef
+}
+
+func (s *Spec) reset() {
+ s.consumes = make(map[string]struct{}, 150)
+ s.produces = make(map[string]struct{}, 150)
+ s.authSchemes = make(map[string]struct{}, 150)
+ s.operations = make(map[string]map[string]*spec.Operation, 150)
+ s.allSchemas = make(map[string]SchemaRef, 150)
+ s.allOfs = make(map[string]SchemaRef, 150)
+ s.references.schemas = make(map[string]spec.Ref, 150)
+ s.references.pathItems = make(map[string]spec.Ref, 150)
+ s.references.responses = make(map[string]spec.Ref, 150)
+ s.references.parameters = make(map[string]spec.Ref, 150)
+ s.references.items = make(map[string]spec.Ref, 150)
+ s.references.headerItems = make(map[string]spec.Ref, 150)
+ s.references.parameterItems = make(map[string]spec.Ref, 150)
+ s.references.allRefs = make(map[string]spec.Ref, 150)
+ s.patterns.parameters = make(map[string]string, 150)
+ s.patterns.headers = make(map[string]string, 150)
+ s.patterns.items = make(map[string]string, 150)
+ s.patterns.schemas = make(map[string]string, 150)
+ s.patterns.allPatterns = make(map[string]string, 150)
+ s.enums.parameters = make(map[string][]interface{}, 150)
+ s.enums.headers = make(map[string][]interface{}, 150)
+ s.enums.items = make(map[string][]interface{}, 150)
+ s.enums.schemas = make(map[string][]interface{}, 150)
+ s.enums.allEnums = make(map[string][]interface{}, 150)
+}
+
+func (s *Spec) reload() {
+ s.reset()
+ s.initialize()
+}
+
+func (s *Spec) initialize() {
+ for _, c := range s.spec.Consumes {
+ s.consumes[c] = struct{}{}
+ }
+ for _, c := range s.spec.Produces {
+ s.produces[c] = struct{}{}
+ }
+ for _, ss := range s.spec.Security {
+ for k := range ss {
+ s.authSchemes[k] = struct{}{}
+ }
+ }
+ for path, pathItem := range s.AllPaths() {
+ s.analyzeOperations(path, &pathItem) //#nosec
+ }
+
+ for name, parameter := range s.spec.Parameters {
+ refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
+ if parameter.Items != nil {
+ s.analyzeItems("items", parameter.Items, refPref, "parameter")
+ }
+ if parameter.In == "body" && parameter.Schema != nil {
+ s.analyzeSchema("schema", parameter.Schema, refPref)
+ }
+ if parameter.Pattern != "" {
+ s.patterns.addParameterPattern(refPref, parameter.Pattern)
+ }
+ if len(parameter.Enum) > 0 {
+ s.enums.addParameterEnum(refPref, parameter.Enum)
+ }
+ }
+
+ for name, response := range s.spec.Responses {
+ refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
+ for k, v := range response.Headers {
+ hRefPref := slashpath.Join(refPref, "headers", k)
+ if v.Items != nil {
+ s.analyzeItems("items", v.Items, hRefPref, "header")
+ }
+ if v.Pattern != "" {
+ s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+ }
+ if len(v.Enum) > 0 {
+ s.enums.addHeaderEnum(hRefPref, v.Enum)
+ }
+ }
+ if response.Schema != nil {
+ s.analyzeSchema("schema", response.Schema, refPref)
+ }
+ }
+
+ for name := range s.spec.Definitions {
+ schema := s.spec.Definitions[name]
+ s.analyzeSchema(name, &schema, "/definitions")
+ }
+ // TODO: after analyzing all things and flattening schemas etc
+ // resolve all the collected references to their final representations
+ // best put in a separate method because this could get expensive
+}
+
+func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
+ // TODO: resolve refs here?
+ // Currently, operations declared via pathItem $ref are known only after expansion
+ op := pi
+ if pi.Ref.String() != "" {
+ key := slashpath.Join("/paths", jsonpointer.Escape(path))
+ s.references.addPathItemRef(key, pi)
+ }
+ s.analyzeOperation("GET", path, op.Get)
+ s.analyzeOperation("PUT", path, op.Put)
+ s.analyzeOperation("POST", path, op.Post)
+ s.analyzeOperation("PATCH", path, op.Patch)
+ s.analyzeOperation("DELETE", path, op.Delete)
+ s.analyzeOperation("HEAD", path, op.Head)
+ s.analyzeOperation("OPTIONS", path, op.Options)
+ for i, param := range op.Parameters {
+ refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
+ if param.Ref.String() != "" {
+ s.references.addParamRef(refPref, &param) //#nosec
+ }
+ if param.Pattern != "" {
+ s.patterns.addParameterPattern(refPref, param.Pattern)
+ }
+ if len(param.Enum) > 0 {
+ s.enums.addParameterEnum(refPref, param.Enum)
+ }
+ if param.Items != nil {
+ s.analyzeItems("items", param.Items, refPref, "parameter")
+ }
+ if param.Schema != nil {
+ s.analyzeSchema("schema", param.Schema, refPref)
+ }
+ }
+}
+
+func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
+ if items == nil {
+ return
+ }
+ refPref := slashpath.Join(prefix, name)
+ s.analyzeItems(name, items.Items, refPref, location)
+ if items.Ref.String() != "" {
+ s.references.addItemsRef(refPref, items, location)
+ }
+ if items.Pattern != "" {
+ s.patterns.addItemsPattern(refPref, items.Pattern)
+ }
+ if len(items.Enum) > 0 {
+ s.enums.addItemsEnum(refPref, items.Enum)
+ }
+}
+
+func (s *Spec) analyzeParameter(prefix string, i int, param spec.Parameter) {
+ refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
+ if param.Ref.String() != "" {
+ s.references.addParamRef(refPref, &param) //#nosec
+ }
+
+ if param.Pattern != "" {
+ s.patterns.addParameterPattern(refPref, param.Pattern)
+ }
+
+ if len(param.Enum) > 0 {
+ s.enums.addParameterEnum(refPref, param.Enum)
+ }
+
+ s.analyzeItems("items", param.Items, refPref, "parameter")
+ if param.In == "body" && param.Schema != nil {
+ s.analyzeSchema("schema", param.Schema, refPref)
+ }
+}
+
+func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
+ if op == nil {
+ return
+ }
+
+ for _, c := range op.Consumes {
+ s.consumes[c] = struct{}{}
+ }
+
+ for _, c := range op.Produces {
+ s.produces[c] = struct{}{}
+ }
+
+ for _, ss := range op.Security {
+ for k := range ss {
+ s.authSchemes[k] = struct{}{}
+ }
+ }
+
+ if _, ok := s.operations[method]; !ok {
+ s.operations[method] = make(map[string]*spec.Operation)
+ }
+
+ s.operations[method][path] = op
+ prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
+ for i, param := range op.Parameters {
+ s.analyzeParameter(prefix, i, param)
+ }
+
+ if op.Responses == nil {
+ return
+ }
+
+ if op.Responses.Default != nil {
+ s.analyzeDefaultResponse(prefix, op.Responses.Default)
+ }
+
+ for k, res := range op.Responses.StatusCodeResponses {
+ s.analyzeResponse(prefix, k, res)
+ }
+}
+
+func (s *Spec) analyzeDefaultResponse(prefix string, res *spec.Response) {
+ refPref := slashpath.Join(prefix, "responses", "default")
+ if res.Ref.String() != "" {
+ s.references.addResponseRef(refPref, res)
+ }
+
+ for k, v := range res.Headers {
+ hRefPref := slashpath.Join(refPref, "headers", k)
+ s.analyzeItems("items", v.Items, hRefPref, "header")
+ if v.Pattern != "" {
+ s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+ }
+ }
+
+ if res.Schema != nil {
+ s.analyzeSchema("schema", res.Schema, refPref)
+ }
+}
+
+func (s *Spec) analyzeResponse(prefix string, k int, res spec.Response) {
+ refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
+ if res.Ref.String() != "" {
+ s.references.addResponseRef(refPref, &res) //#nosec
+ }
+
+ for k, v := range res.Headers {
+ hRefPref := slashpath.Join(refPref, "headers", k)
+ s.analyzeItems("items", v.Items, hRefPref, "header")
+ if v.Pattern != "" {
+ s.patterns.addHeaderPattern(hRefPref, v.Pattern)
+ }
+
+ if len(v.Enum) > 0 {
+ s.enums.addHeaderEnum(hRefPref, v.Enum)
+ }
+ }
+
+ if res.Schema != nil {
+ s.analyzeSchema("schema", res.Schema, refPref)
+ }
+}
+
+func (s *Spec) analyzeSchema(name string, schema *spec.Schema, prefix string) {
+ refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
+ schRef := SchemaRef{
+ Name: name,
+ Schema: schema,
+ Ref: spec.MustCreateRef("#" + refURI),
+ TopLevel: prefix == "/definitions",
+ }
+
+ s.allSchemas["#"+refURI] = schRef
+
+ if schema.Ref.String() != "" {
+ s.references.addSchemaRef(refURI, schRef)
+ }
+
+ if schema.Pattern != "" {
+ s.patterns.addSchemaPattern(refURI, schema.Pattern)
+ }
+
+ if len(schema.Enum) > 0 {
+ s.enums.addSchemaEnum(refURI, schema.Enum)
+ }
+
+ for k, v := range schema.Definitions {
+ v := v
+ s.analyzeSchema(k, &v, slashpath.Join(refURI, "definitions"))
+ }
+
+ for k, v := range schema.Properties {
+ v := v
+ s.analyzeSchema(k, &v, slashpath.Join(refURI, "properties"))
+ }
+
+ for k, v := range schema.PatternProperties {
+ v := v
+ // NOTE: swagger 2.0 does not support PatternProperties.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema(k, &v, slashpath.Join(refURI, "patternProperties"))
+ }
+
+ for i := range schema.AllOf {
+ v := &schema.AllOf[i]
+ s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
+ }
+
+ if len(schema.AllOf) > 0 {
+ s.allOfs["#"+refURI] = schRef
+ }
+
+ for i := range schema.AnyOf {
+ v := &schema.AnyOf[i]
+ // NOTE: swagger 2.0 does not support anyOf constructs.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
+ }
+
+ for i := range schema.OneOf {
+ v := &schema.OneOf[i]
+ // NOTE: swagger 2.0 does not support oneOf constructs.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
+ }
+
+ if schema.Not != nil {
+ // NOTE: swagger 2.0 does not support "not" constructs.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema("not", schema.Not, refURI)
+ }
+
+ if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
+ s.analyzeSchema("additionalProperties", schema.AdditionalProperties.Schema, refURI)
+ }
+
+ if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
+ // NOTE: swagger 2.0 does not support AdditionalItems.
+ // However it is possible to analyze this in a schema
+ s.analyzeSchema("additionalItems", schema.AdditionalItems.Schema, refURI)
+ }
+
+ if schema.Items != nil {
+ if schema.Items.Schema != nil {
+ s.analyzeSchema("items", schema.Items.Schema, refURI)
+ }
+
+ for i := range schema.Items.Schemas {
+ sch := &schema.Items.Schemas[i]
+ s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
+ }
+ }
+}
+
+// SecurityRequirement is a representation of a security requirement for an operation
+type SecurityRequirement struct {
+ Name string
+ Scopes []string
+}
+
+// SecurityRequirementsFor gets the security requirements for the operation
+func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) [][]SecurityRequirement {
+ if s.spec.Security == nil && operation.Security == nil {
+ return nil
+ }
+
+ schemes := s.spec.Security
+ if operation.Security != nil {
+ schemes = operation.Security
+ }
+
+ result := [][]SecurityRequirement{}
+ for _, scheme := range schemes {
+ if len(scheme) == 0 {
+ // append a zero object for anonymous
+ result = append(result, []SecurityRequirement{{}})
+
+ continue
+ }
+
+ var reqs []SecurityRequirement
+ for k, v := range scheme {
+ if v == nil {
+ v = []string{}
+ }
+ reqs = append(reqs, SecurityRequirement{Name: k, Scopes: v})
+ }
+
+ result = append(result, reqs)
+ }
+
+ return result
+}
+
+// SecurityDefinitionsForRequirements gets the matching security definitions for a set of requirements
+func (s *Spec) SecurityDefinitionsForRequirements(requirements []SecurityRequirement) map[string]spec.SecurityScheme {
+ result := make(map[string]spec.SecurityScheme)
+
+ for _, v := range requirements {
+ if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
+ if definition != nil {
+ result[v.Name] = *definition
+ }
+ }
+ }
+
+ return result
+}
+
+// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
+func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
+ requirements := s.SecurityRequirementsFor(operation)
+ if len(requirements) == 0 {
+ return nil
+ }
+
+ result := make(map[string]spec.SecurityScheme)
+ for _, reqs := range requirements {
+ for _, v := range reqs {
+ if v.Name == "" {
+ // optional requirement
+ continue
+ }
+
+ if _, ok := result[v.Name]; ok {
+ // duplicate requirement
+ continue
+ }
+
+ if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
+ if definition != nil {
+ result[v.Name] = *definition
+ }
+ }
+ }
+ }
+
+ return result
+}
+
+// ConsumesFor gets the mediatypes for the operation
+func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
+ if len(operation.Consumes) == 0 {
+ cons := make(map[string]struct{}, len(s.spec.Consumes))
+ for _, k := range s.spec.Consumes {
+ cons[k] = struct{}{}
+ }
+
+ return s.structMapKeys(cons)
+ }
+
+ cons := make(map[string]struct{}, len(operation.Consumes))
+ for _, c := range operation.Consumes {
+ cons[c] = struct{}{}
+ }
+
+ return s.structMapKeys(cons)
+}
+
+// ProducesFor gets the mediatypes for the operation
+func (s *Spec) ProducesFor(operation *spec.Operation) []string {
+ if len(operation.Produces) == 0 {
+ prod := make(map[string]struct{}, len(s.spec.Produces))
+ for _, k := range s.spec.Produces {
+ prod[k] = struct{}{}
+ }
+
+ return s.structMapKeys(prod)
+ }
+
+ prod := make(map[string]struct{}, len(operation.Produces))
+ for _, c := range operation.Produces {
+ prod[c] = struct{}{}
+ }
+
+ return s.structMapKeys(prod)
+}
+
+func mapKeyFromParam(param *spec.Parameter) string {
+ return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
+}
+
+func fieldNameFromParam(param *spec.Parameter) string {
+ // TODO: this should be x-go-name
+ if nm, ok := param.Extensions.GetString("go-name"); ok {
+ return nm
+ }
+
+ return swag.ToGoName(param.Name)
+}
+
+// ErrorOnParamFunc is a callback function to be invoked
+// whenever an error is encountered while resolving references
+// on parameters.
+//
+// This function takes as input the spec.Parameter which triggered the
+// error and the error itself.
+//
+// If the callback function returns false, the calling function should bail.
+//
+// If it returns true, the calling function should continue evaluating parameters.
+// A nil ErrorOnParamFunc must be evaluated as equivalent to panic().
+type ErrorOnParamFunc func(spec.Parameter, error) bool
+
+func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter, callmeOnError ErrorOnParamFunc) {
+ for _, param := range parameters {
+ pr := param
+ if pr.Ref.String() == "" {
+ res[mapKeyFromParam(&pr)] = pr
+
+ continue
+ }
+
+ // resolve $ref
+ if callmeOnError == nil {
+ callmeOnError = func(_ spec.Parameter, err error) bool {
+ panic(err)
+ }
+ }
+
+ obj, _, err := pr.Ref.GetPointer().Get(s.spec)
+ if err != nil {
+ if callmeOnError(param, fmt.Errorf("invalid reference: %q", pr.Ref.String())) {
+ continue
+ }
+
+ break
+ }
+
+ objAsParam, ok := obj.(spec.Parameter)
+ if !ok {
+ if callmeOnError(param, fmt.Errorf("resolved reference is not a parameter: %q", pr.Ref.String())) {
+ continue
+ }
+
+ break
+ }
+
+ pr = objAsParam
+ res[mapKeyFromParam(&pr)] = pr
+ }
+}
+
+// ParametersFor the specified operation id.
+//
+// Assumes parameters properly resolve references if any and that
+// such references actually resolve to a parameter object.
+// Otherwise, panics.
+func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
+ return s.SafeParametersFor(operationID, nil)
+}
+
+// SafeParametersFor the specified operation id.
+//
+// Does not assume parameters properly resolve references or that
+// such references actually resolve to a parameter object.
+//
+// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
+// parameters. If the callback is set to nil, panics upon errors.
+func (s *Spec) SafeParametersFor(operationID string, callmeOnError ErrorOnParamFunc) []spec.Parameter {
+ gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
+ bag := make(map[string]spec.Parameter)
+ s.paramsAsMap(pi.Parameters, bag, callmeOnError)
+ s.paramsAsMap(op.Parameters, bag, callmeOnError)
+
+ var res []spec.Parameter
+ for _, v := range bag {
+ res = append(res, v)
+ }
+
+ return res
+ }
+
+ for _, pi := range s.spec.Paths.Paths {
+ if pi.Get != nil && pi.Get.ID == operationID {
+ return gatherParams(&pi, pi.Get) //#nosec
+ }
+ if pi.Head != nil && pi.Head.ID == operationID {
+ return gatherParams(&pi, pi.Head) //#nosec
+ }
+ if pi.Options != nil && pi.Options.ID == operationID {
+ return gatherParams(&pi, pi.Options) //#nosec
+ }
+ if pi.Post != nil && pi.Post.ID == operationID {
+ return gatherParams(&pi, pi.Post) //#nosec
+ }
+ if pi.Patch != nil && pi.Patch.ID == operationID {
+ return gatherParams(&pi, pi.Patch) //#nosec
+ }
+ if pi.Put != nil && pi.Put.ID == operationID {
+ return gatherParams(&pi, pi.Put) //#nosec
+ }
+ if pi.Delete != nil && pi.Delete.ID == operationID {
+ return gatherParams(&pi, pi.Delete) //#nosec
+ }
+ }
+
+ return nil
+}
+
+// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
+// apply for the method and path.
+//
+// Assumes parameters properly resolve references if any and that
+// such references actually resolve to a parameter object.
+// Otherwise, panics.
+func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
+ return s.SafeParamsFor(method, path, nil)
+}
+
+// SafeParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
+// apply for the method and path.
+//
+// Does not assume parameters properly resolve references or that
+// such references actually resolve to a parameter object.
+//
+// Upon error, invoke a ErrorOnParamFunc callback with the erroneous
+// parameters. If the callback is set to nil, panics upon errors.
+func (s *Spec) SafeParamsFor(method, path string, callmeOnError ErrorOnParamFunc) map[string]spec.Parameter {
+ res := make(map[string]spec.Parameter)
+ if pi, ok := s.spec.Paths.Paths[path]; ok {
+ s.paramsAsMap(pi.Parameters, res, callmeOnError)
+ s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res, callmeOnError)
+ }
+
+ return res
+}
+
+// OperationForName gets the operation for the given id
+func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
+ for method, pathItem := range s.operations {
+ for path, op := range pathItem {
+ if operationID == op.ID {
+ return method, path, op, true
+ }
+ }
+ }
+
+ return "", "", nil, false
+}
+
+// OperationFor the given method and path
+func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
+ if mp, ok := s.operations[strings.ToUpper(method)]; ok {
+ op, fn := mp[path]
+
+ return op, fn
+ }
+
+ return nil, false
+}
+
+// Operations gathers all the operations specified in the spec document
+func (s *Spec) Operations() map[string]map[string]*spec.Operation {
+ return s.operations
+}
+
+func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
+ if len(mp) == 0 {
+ return nil
+ }
+
+ result := make([]string, 0, len(mp))
+ for k := range mp {
+ result = append(result, k)
+ }
+
+ return result
+}
+
+// AllPaths returns all the paths in the swagger spec
+func (s *Spec) AllPaths() map[string]spec.PathItem {
+ if s.spec == nil || s.spec.Paths == nil {
+ return nil
+ }
+
+ return s.spec.Paths.Paths
+}
+
+// OperationIDs gets all the operation ids based on method an dpath
+func (s *Spec) OperationIDs() []string {
+ if len(s.operations) == 0 {
+ return nil
+ }
+
+ result := make([]string, 0, len(s.operations))
+ for method, v := range s.operations {
+ for p, o := range v {
+ if o.ID != "" {
+ result = append(result, o.ID)
+ } else {
+ result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
+ }
+ }
+ }
+
+ return result
+}
+
+// OperationMethodPaths gets all the operation ids based on method an dpath
+func (s *Spec) OperationMethodPaths() []string {
+ if len(s.operations) == 0 {
+ return nil
+ }
+
+ result := make([]string, 0, len(s.operations))
+ for method, v := range s.operations {
+ for p := range v {
+ result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
+ }
+ }
+
+ return result
+}
+
+// RequiredConsumes gets all the distinct consumes that are specified in the specification document
+func (s *Spec) RequiredConsumes() []string {
+ return s.structMapKeys(s.consumes)
+}
+
+// RequiredProduces gets all the distinct produces that are specified in the specification document
+func (s *Spec) RequiredProduces() []string {
+ return s.structMapKeys(s.produces)
+}
+
+// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
+func (s *Spec) RequiredSecuritySchemes() []string {
+ return s.structMapKeys(s.authSchemes)
+}
+
+// SchemaRef is a reference to a schema
+type SchemaRef struct {
+ Name string
+ Ref spec.Ref
+ Schema *spec.Schema
+ TopLevel bool
+}
+
+// SchemasWithAllOf returns schema references to all schemas that are defined
+// with an allOf key
+func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
+ for _, v := range s.allOfs {
+ result = append(result, v)
+ }
+
+ return
+}
+
+// AllDefinitions returns schema references for all the definitions that were discovered
+func (s *Spec) AllDefinitions() (result []SchemaRef) {
+ for _, v := range s.allSchemas {
+ result = append(result, v)
+ }
+
+ return
+}
+
+// AllDefinitionReferences returns json refs for all the discovered schemas
+func (s *Spec) AllDefinitionReferences() (result []string) {
+ for _, v := range s.references.schemas {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllParameterReferences returns json refs for all the discovered parameters
+func (s *Spec) AllParameterReferences() (result []string) {
+ for _, v := range s.references.parameters {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllResponseReferences returns json refs for all the discovered responses
+func (s *Spec) AllResponseReferences() (result []string) {
+ for _, v := range s.references.responses {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllPathItemReferences returns the references for all the items
+func (s *Spec) AllPathItemReferences() (result []string) {
+ for _, v := range s.references.pathItems {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllItemsReferences returns the references for all the items in simple schemas (parameters or headers).
+//
+// NOTE: since Swagger 2.0 forbids $ref in simple params, this should always yield an empty slice for a valid
+// Swagger 2.0 spec.
+func (s *Spec) AllItemsReferences() (result []string) {
+ for _, v := range s.references.items {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllReferences returns all the references found in the document, with possible duplicates
+func (s *Spec) AllReferences() (result []string) {
+ for _, v := range s.references.allRefs {
+ result = append(result, v.String())
+ }
+
+ return
+}
+
+// AllRefs returns all the unique references found in the document
+func (s *Spec) AllRefs() (result []spec.Ref) {
+ set := make(map[string]struct{})
+ for _, v := range s.references.allRefs {
+ a := v.String()
+ if a == "" {
+ continue
+ }
+
+ if _, ok := set[a]; !ok {
+ set[a] = struct{}{}
+ result = append(result, v)
+ }
+ }
+
+ return
+}
+
+func cloneStringMap(source map[string]string) map[string]string {
+ res := make(map[string]string, len(source))
+ for k, v := range source {
+ res[k] = v
+ }
+
+ return res
+}
+
+func cloneEnumMap(source map[string][]interface{}) map[string][]interface{} {
+ res := make(map[string][]interface{}, len(source))
+ for k, v := range source {
+ res[k] = v
+ }
+
+ return res
+}
+
+// ParameterPatterns returns all the patterns found in parameters
+// the map is cloned to avoid accidental changes
+func (s *Spec) ParameterPatterns() map[string]string {
+ return cloneStringMap(s.patterns.parameters)
+}
+
+// HeaderPatterns returns all the patterns found in response headers
+// the map is cloned to avoid accidental changes
+func (s *Spec) HeaderPatterns() map[string]string {
+ return cloneStringMap(s.patterns.headers)
+}
+
+// ItemsPatterns returns all the patterns found in simple array items
+// the map is cloned to avoid accidental changes
+func (s *Spec) ItemsPatterns() map[string]string {
+ return cloneStringMap(s.patterns.items)
+}
+
+// SchemaPatterns returns all the patterns found in schemas
+// the map is cloned to avoid accidental changes
+func (s *Spec) SchemaPatterns() map[string]string {
+ return cloneStringMap(s.patterns.schemas)
+}
+
+// AllPatterns returns all the patterns found in the spec
+// the map is cloned to avoid accidental changes
+func (s *Spec) AllPatterns() map[string]string {
+ return cloneStringMap(s.patterns.allPatterns)
+}
+
+// ParameterEnums returns all the enums found in parameters
+// the map is cloned to avoid accidental changes
+func (s *Spec) ParameterEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.parameters)
+}
+
+// HeaderEnums returns all the enums found in response headers
+// the map is cloned to avoid accidental changes
+func (s *Spec) HeaderEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.headers)
+}
+
+// ItemsEnums returns all the enums found in simple array items
+// the map is cloned to avoid accidental changes
+func (s *Spec) ItemsEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.items)
+}
+
+// SchemaEnums returns all the enums found in schemas
+// the map is cloned to avoid accidental changes
+func (s *Spec) SchemaEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.schemas)
+}
+
+// AllEnums returns all the enums found in the spec
+// the map is cloned to avoid accidental changes
+func (s *Spec) AllEnums() map[string][]interface{} {
+ return cloneEnumMap(s.enums.allEnums)
+}
diff --git a/vendor/github.com/go-openapi/analysis/appveyor.yml b/vendor/github.com/go-openapi/analysis/appveyor.yml
new file mode 100644
index 000000000..c2f6fd733
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/appveyor.yml
@@ -0,0 +1,32 @@
+version: "0.1.{build}"
+
+clone_folder: C:\go-openapi\analysis
+shallow_clone: true # for startup speed
+pull_requests:
+ do_not_increment_build_number: true
+
+#skip_tags: true
+#skip_branch_with_pr: true
+
+# appveyor.yml
+build: off
+
+environment:
+ GOPATH: c:\gopath
+
+stack: go 1.16
+
+test_script:
+ - go test -v -timeout 20m ./...
+
+deploy: off
+
+notifications:
+ - provider: Slack
+ incoming_webhook: https://hooks.slack.com/services/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
+ auth_token:
+ secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
+ channel: bots
+ on_build_success: false
+ on_build_failure: true
+ on_build_status_changed: true
diff --git a/vendor/github.com/go-openapi/analysis/debug.go b/vendor/github.com/go-openapi/analysis/debug.go
new file mode 100644
index 000000000..33c15704e
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/debug.go
@@ -0,0 +1,23 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "os"
+
+ "github.com/go-openapi/analysis/internal/debug"
+)
+
+var debugLog = debug.GetLogger("analysis", os.Getenv("SWAGGER_DEBUG") != "")
diff --git a/vendor/github.com/go-openapi/analysis/doc.go b/vendor/github.com/go-openapi/analysis/doc.go
new file mode 100644
index 000000000..d5294c095
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/doc.go
@@ -0,0 +1,43 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package analysis provides methods to work with a Swagger specification document from
+package go-openapi/spec.
+
+Analyzing a specification
+
+An analysed specification object (type Spec) provides methods to work with swagger definition.
+
+Flattening or expanding a specification
+
+Flattening a specification bundles all remote $ref in the main spec document.
+Depending on flattening options, additional preprocessing may take place:
+ - full flattening: replacing all inline complex constructs by a named entry in #/definitions
+ - expand: replace all $ref's in the document by their expanded content
+
+Merging several specifications
+
+Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
+
+Fixing a specification
+
+Unmarshalling a specification with golang json unmarshalling may lead to
+some unwanted result on present but empty fields.
+
+Analyzing a Swagger schema
+
+Swagger schemas are analyzed to determine their complexity and qualify their content.
+*/
+package analysis
diff --git a/vendor/github.com/go-openapi/analysis/fixer.go b/vendor/github.com/go-openapi/analysis/fixer.go
new file mode 100644
index 000000000..7c2ca0841
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/fixer.go
@@ -0,0 +1,79 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import "github.com/go-openapi/spec"
+
+// FixEmptyResponseDescriptions replaces empty ("") response
+// descriptions in the input with "(empty)" to ensure that the
+// resulting Swagger is stays valid. The problem appears to arise
+// from reading in valid specs that have a explicit response
+// description of "" (valid, response.description is required), but
+// due to zero values being omitted upon re-serializing (omitempty) we
+// lose them unless we stick some chars in there.
+func FixEmptyResponseDescriptions(s *spec.Swagger) {
+ for k, v := range s.Responses {
+ FixEmptyDesc(&v) //#nosec
+ s.Responses[k] = v
+ }
+
+ if s.Paths == nil {
+ return
+ }
+
+ for _, v := range s.Paths.Paths {
+ if v.Get != nil {
+ FixEmptyDescs(v.Get.Responses)
+ }
+ if v.Put != nil {
+ FixEmptyDescs(v.Put.Responses)
+ }
+ if v.Post != nil {
+ FixEmptyDescs(v.Post.Responses)
+ }
+ if v.Delete != nil {
+ FixEmptyDescs(v.Delete.Responses)
+ }
+ if v.Options != nil {
+ FixEmptyDescs(v.Options.Responses)
+ }
+ if v.Head != nil {
+ FixEmptyDescs(v.Head.Responses)
+ }
+ if v.Patch != nil {
+ FixEmptyDescs(v.Patch.Responses)
+ }
+ }
+}
+
+// FixEmptyDescs adds "(empty)" as the description for any Response in
+// the given Responses object that doesn't already have one.
+func FixEmptyDescs(rs *spec.Responses) {
+ FixEmptyDesc(rs.Default)
+ for k, v := range rs.StatusCodeResponses {
+ FixEmptyDesc(&v) //#nosec
+ rs.StatusCodeResponses[k] = v
+ }
+}
+
+// FixEmptyDesc adds "(empty)" as the description to the given
+// Response object if it doesn't already have one and isn't a
+// ref. No-op on nil input.
+func FixEmptyDesc(rs *spec.Response) {
+ if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
+ return
+ }
+ rs.Description = "(empty)"
+}
diff --git a/vendor/github.com/go-openapi/analysis/flatten.go b/vendor/github.com/go-openapi/analysis/flatten.go
new file mode 100644
index 000000000..0576220fb
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/flatten.go
@@ -0,0 +1,802 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "fmt"
+ "log"
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis/internal/flatten/normalize"
+ "github.com/go-openapi/analysis/internal/flatten/operations"
+ "github.com/go-openapi/analysis/internal/flatten/replace"
+ "github.com/go-openapi/analysis/internal/flatten/schutils"
+ "github.com/go-openapi/analysis/internal/flatten/sortref"
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+)
+
+const definitionsPath = "#/definitions"
+
+// newRef stores information about refs created during the flattening process
+type newRef struct {
+ key string
+ newName string
+ path string
+ isOAIGen bool
+ resolved bool
+ schema *spec.Schema
+ parents []string
+}
+
+// context stores intermediary results from flatten
+type context struct {
+ newRefs map[string]*newRef
+ warnings []string
+ resolved map[string]string
+}
+
+func newContext() *context {
+ return &context{
+ newRefs: make(map[string]*newRef, 150),
+ warnings: make([]string, 0),
+ resolved: make(map[string]string, 50),
+ }
+}
+
+// Flatten an analyzed spec and produce a self-contained spec bundle.
+//
+// There is a minimal and a full flattening mode.
+//
+//
+// Minimally flattening a spec means:
+// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left
+// unscathed)
+// - Importing external (http, file) references so they become internal to the document
+// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers
+// like "$ref": "#/definitions/myObject/allOfs/1")
+//
+// A minimally flattened spec thus guarantees the following properties:
+// - all $refs point to a local definition (i.e. '#/definitions/...')
+// - definitions are unique
+//
+// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they
+// represent a complex schema or express commonality in the spec.
+// Otherwise, they are simply expanded.
+// Self-referencing JSON pointers cannot resolve to a type and trigger an error.
+//
+//
+// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger.
+//
+// Fully flattening a spec means:
+// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion.
+//
+// By complex, we mean every JSON object with some properties.
+// Arrays, when they do not define a tuple,
+// or empty objects with or without additionalProperties, are not considered complex and remain inline.
+//
+// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions
+// have been created.
+//
+// Available flattening options:
+// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched
+// - Expand: expand all $ref's in the document (inoperant if Minimal set to true)
+// - Verbose: croaks about name conflicts detected
+// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening
+//
+// NOTE: expansion removes all $ref save circular $ref, which remain in place
+//
+// TODO: additional options
+// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a
+// x-go-name extension
+// - LiftAllOfs:
+// - limit the flattening of allOf members when simple objects
+// - merge allOf with validation only
+// - merge allOf with extensions only
+// - ...
+//
+func Flatten(opts FlattenOpts) error {
+ debugLog("FlattenOpts: %#v", opts)
+
+ opts.flattenContext = newContext()
+
+ // 1. Recursively expand responses, parameters, path items and items in simple schemas.
+ //
+ // This simplifies the spec and leaves only the $ref's in schema objects.
+ if err := expand(&opts); err != nil {
+ return err
+ }
+
+ // 2. Strip the current document from absolute $ref's that actually a in the root,
+ // so we can recognize them as proper definitions
+ //
+ // In particular, this works around issue go-openapi/spec#76: leading absolute file in $ref is stripped
+ if err := normalizeRef(&opts); err != nil {
+ return err
+ }
+
+ // 3. Optionally remove shared parameters and responses already expanded (now unused).
+ //
+ // Operation parameters (i.e. under paths) remain.
+ if opts.RemoveUnused {
+ removeUnusedShared(&opts)
+ }
+
+ // 4. Import all remote references.
+ if err := importReferences(&opts); err != nil {
+ return err
+ }
+
+ // 5. full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps)
+ if !opts.Minimal && !opts.Expand {
+ if err := nameInlinedSchemas(&opts); err != nil {
+ return err
+ }
+ }
+
+ // 6. Rewrite JSON pointers other than $ref to named definitions
+ // and attempt to resolve conflicting names whenever possible.
+ if err := stripPointersAndOAIGen(&opts); err != nil {
+ return err
+ }
+
+ // 7. Strip the spec from unused definitions
+ if opts.RemoveUnused {
+ removeUnused(&opts)
+ }
+
+ // 8. Issue warning notifications, if any
+ opts.croak()
+
+ // TODO: simplify known schema patterns to flat objects with properties
+ // examples:
+ // - lift simple allOf object,
+ // - empty allOf with validation only or extensions only
+ // - rework allOf arrays
+ // - rework allOf additionalProperties
+
+ return nil
+}
+
+func expand(opts *FlattenOpts) error {
+ if err := spec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil {
+ return err
+ }
+
+ opts.Spec.reload() // re-analyze
+
+ return nil
+}
+
+// normalizeRef strips the current file from any absolute file $ref. This works around issue go-openapi/spec#76:
+// leading absolute file in $ref is stripped
+func normalizeRef(opts *FlattenOpts) error {
+ debugLog("normalizeRef")
+
+ altered := false
+ for k, w := range opts.Spec.references.allRefs {
+ if !strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS
+ continue
+ }
+
+ altered = true
+ debugLog("stripping absolute path for: %s", w.String())
+
+ // strip the base path from definition
+ if err := replace.UpdateRef(opts.Swagger(), k,
+ spec.MustCreateRef(path.Join(definitionsPath, path.Base(w.String())))); err != nil {
+ return err
+ }
+ }
+
+ if altered {
+ opts.Spec.reload() // re-analyze
+ }
+
+ return nil
+}
+
+func removeUnusedShared(opts *FlattenOpts) {
+ opts.Swagger().Parameters = nil
+ opts.Swagger().Responses = nil
+
+ opts.Spec.reload() // re-analyze
+}
+
+func importReferences(opts *FlattenOpts) error {
+ var (
+ imported bool
+ err error
+ )
+
+ for !imported && err == nil {
+ // iteratively import remote references until none left.
+ // This inlining deals with name conflicts by introducing auto-generated names ("OAIGen")
+ imported, err = importExternalReferences(opts)
+
+ opts.Spec.reload() // re-analyze
+ }
+
+ return err
+}
+
+// nameInlinedSchemas replaces every complex inline construct by a named definition.
+func nameInlinedSchemas(opts *FlattenOpts) error {
+ debugLog("nameInlinedSchemas")
+
+ namer := &InlineSchemaNamer{
+ Spec: opts.Swagger(),
+ Operations: operations.AllOpRefsByRef(opts.Spec, nil),
+ flattenContext: opts.flattenContext,
+ opts: opts,
+ }
+
+ depthFirst := sortref.DepthFirst(opts.Spec.allSchemas)
+ for _, key := range depthFirst {
+ sch := opts.Spec.allSchemas[key]
+ if sch.Schema == nil || sch.Schema.Ref.String() != "" || sch.TopLevel {
+ continue
+ }
+
+ asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+ if err != nil {
+ return fmt.Errorf("schema analysis [%s]: %w", key, err)
+ }
+
+ if asch.isAnalyzedAsComplex() { // move complex schemas to definitions
+ if err := namer.Name(key, sch.Schema, asch); err != nil {
+ return err
+ }
+ }
+ }
+
+ opts.Spec.reload() // re-analyze
+
+ return nil
+}
+
+func removeUnused(opts *FlattenOpts) {
+ expected := make(map[string]struct{})
+ for k := range opts.Swagger().Definitions {
+ expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{}
+ }
+
+ for _, k := range opts.Spec.AllDefinitionReferences() {
+ delete(expected, k)
+ }
+
+ for k := range expected {
+ debugLog("removing unused definition %s", path.Base(k))
+ if opts.Verbose {
+ log.Printf("info: removing unused definition: %s", path.Base(k))
+ }
+ delete(opts.Swagger().Definitions, path.Base(k))
+ }
+
+ opts.Spec.reload() // re-analyze
+}
+
+func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error {
+ // rewrite ref with already resolved external ref (useful for cyclical refs):
+ // rewrite external refs to local ones
+ debugLog("resolving known ref [%s] to %s", refStr, newName)
+
+ for _, key := range entry.Keys {
+ if err := replace.UpdateRef(opts.Swagger(), key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) error {
+ var (
+ isOAIGen bool
+ newName string
+ )
+
+ debugLog("resolving schema from remote $ref [%s]", refStr)
+
+ sch, err := spec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false))
+ if err != nil {
+ return fmt.Errorf("could not resolve schema: %w", err)
+ }
+
+ // at this stage only $ref analysis matters
+ partialAnalyzer := &Spec{
+ references: referenceAnalysis{},
+ patterns: patternAnalysis{},
+ enums: enumAnalysis{},
+ }
+ partialAnalyzer.reset()
+ partialAnalyzer.analyzeSchema("", sch, "/")
+
+ // now rewrite those refs with rebase
+ for key, ref := range partialAnalyzer.references.allRefs {
+ if err := replace.UpdateRef(sch, key, spec.MustCreateRef(normalize.RebaseRef(entry.Ref.String(), ref.String()))); err != nil {
+ return fmt.Errorf("failed to rewrite ref for key %q at %s: %w", key, entry.Ref.String(), err)
+ }
+ }
+
+ // generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name
+ newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
+ debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen)
+
+ opts.flattenContext.resolved[refStr] = newName
+
+ // rewrite the external refs to local ones
+ for _, key := range entry.Keys {
+ if err := replace.UpdateRef(opts.Swagger(), key,
+ spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return err
+ }
+
+ // keep track of created refs
+ resolved := false
+ if _, ok := opts.flattenContext.newRefs[key]; ok {
+ resolved = opts.flattenContext.newRefs[key].resolved
+ }
+
+ debugLog("keeping track of ref: %s (%s), resolved: %t", key, newName, resolved)
+ opts.flattenContext.newRefs[key] = &newRef{
+ key: key,
+ newName: newName,
+ path: path.Join(definitionsPath, newName),
+ isOAIGen: isOAIGen,
+ resolved: resolved,
+ schema: sch,
+ }
+ }
+
+ // add the resolved schema to the definitions
+ schutils.Save(opts.Swagger(), newName, sch)
+
+ return nil
+}
+
+// importExternalReferences iteratively digs remote references and imports them into the main schema.
+//
+// At every iteration, new remotes may be found when digging deeper: they are rebased to the current schema before being imported.
+//
+// This returns true when no more remote references can be found.
+func importExternalReferences(opts *FlattenOpts) (bool, error) {
+ debugLog("importExternalReferences")
+
+ groupedRefs := sortref.ReverseIndex(opts.Spec.references.schemas, opts.BasePath)
+ sortedRefStr := make([]string, 0, len(groupedRefs))
+ if opts.flattenContext == nil {
+ opts.flattenContext = newContext()
+ }
+
+ // sort $ref resolution to ensure deterministic name conflict resolution
+ for refStr := range groupedRefs {
+ sortedRefStr = append(sortedRefStr, refStr)
+ }
+ sort.Strings(sortedRefStr)
+
+ complete := true
+
+ for _, refStr := range sortedRefStr {
+ entry := groupedRefs[refStr]
+ if entry.Ref.HasFragmentOnly {
+ continue
+ }
+
+ complete = false
+
+ newName := opts.flattenContext.resolved[refStr]
+ if newName != "" {
+ if err := importKnownRef(entry, refStr, newName, opts); err != nil {
+ return false, err
+ }
+
+ continue
+ }
+
+ // resolve schemas
+ if err := importNewRef(entry, refStr, opts); err != nil {
+ return false, err
+ }
+ }
+
+ // maintains ref index entries
+ for k := range opts.flattenContext.newRefs {
+ r := opts.flattenContext.newRefs[k]
+
+ // update tracking with resolved schemas
+ if r.schema.Ref.String() != "" {
+ ref := spec.MustCreateRef(r.path)
+ sch, err := spec.ResolveRefWithBase(opts.Swagger(), &ref, opts.ExpandOpts(false))
+ if err != nil {
+ return false, fmt.Errorf("could not resolve schema: %w", err)
+ }
+
+ r.schema = sch
+ }
+
+ if r.path == k {
+ continue
+ }
+
+ // update tracking with renamed keys: got a cascade of refs
+ renamed := *r
+ renamed.key = r.path
+ opts.flattenContext.newRefs[renamed.path] = &renamed
+
+ // indirect ref
+ r.newName = path.Base(k)
+ r.schema = spec.RefSchema(r.path)
+ r.path = k
+ r.isOAIGen = strings.Contains(k, "OAIGen")
+ }
+
+ return complete, nil
+}
+
+// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler.
+// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible.
+func stripPointersAndOAIGen(opts *FlattenOpts) error {
+ // name all JSON pointers to anonymous documents
+ if err := namePointers(opts); err != nil {
+ return err
+ }
+
+ // remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts)
+ hasIntroducedPointerOrInline, ers := stripOAIGen(opts)
+ if ers != nil {
+ return ers
+ }
+
+ // iterate as pointer or OAIGen resolution may introduce inline schemas or pointers
+ for hasIntroducedPointerOrInline {
+ if !opts.Minimal {
+ opts.Spec.reload() // re-analyze
+ if err := nameInlinedSchemas(opts); err != nil {
+ return err
+ }
+ }
+
+ if err := namePointers(opts); err != nil {
+ return err
+ }
+
+ // restrip and re-analyze
+ var err error
+ if hasIntroducedPointerOrInline, err = stripOAIGen(opts); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions.
+//
+// A dedupe is deemed unnecessary whenever:
+// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining)
+// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to
+// the first parent.
+//
+// This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate
+// pointer and name resolution again.
+func stripOAIGen(opts *FlattenOpts) (bool, error) {
+ debugLog("stripOAIGen")
+ replacedWithComplex := false
+
+ // figure out referers of OAIGen definitions (doing it before the ref start mutating)
+ for _, r := range opts.flattenContext.newRefs {
+ updateRefParents(opts.Spec.references.allRefs, r)
+ }
+
+ for k := range opts.flattenContext.newRefs {
+ r := opts.flattenContext.newRefs[k]
+ debugLog("newRefs[%s]: isOAIGen: %t, resolved: %t, name: %s, path:%s, #parents: %d, parents: %v, ref: %s",
+ k, r.isOAIGen, r.resolved, r.newName, r.path, len(r.parents), r.parents, r.schema.Ref.String())
+
+ if !r.isOAIGen || len(r.parents) == 0 {
+ continue
+ }
+
+ hasReplacedWithComplex, err := stripOAIGenForRef(opts, k, r)
+ if err != nil {
+ return replacedWithComplex, err
+ }
+
+ replacedWithComplex = replacedWithComplex || hasReplacedWithComplex
+ }
+
+ debugLog("replacedWithComplex: %t", replacedWithComplex)
+ opts.Spec.reload() // re-analyze
+
+ return replacedWithComplex, nil
+}
+
+// updateRefParents updates all parents of an updated $ref
+func updateRefParents(allRefs map[string]spec.Ref, r *newRef) {
+ if !r.isOAIGen || r.resolved { // bail on already resolved entries (avoid looping)
+ return
+ }
+ for k, v := range allRefs {
+ if r.path != v.String() {
+ continue
+ }
+
+ found := false
+ for _, p := range r.parents {
+ if p == k {
+ found = true
+
+ break
+ }
+ }
+ if !found {
+ r.parents = append(r.parents, k)
+ }
+ }
+}
+
+func stripOAIGenForRef(opts *FlattenOpts, k string, r *newRef) (bool, error) {
+ replacedWithComplex := false
+
+ pr := sortref.TopmostFirst(r.parents)
+
+ // rewrite first parent schema in hierarchical then lexicographical order
+ debugLog("rewrite first parent %s with schema", pr[0])
+ if err := replace.UpdateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil {
+ return false, err
+ }
+
+ if pa, ok := opts.flattenContext.newRefs[pr[0]]; ok && pa.isOAIGen {
+ // update parent in ref index entry
+ debugLog("update parent entry: %s", pr[0])
+ pa.schema = r.schema
+ pa.resolved = false
+ replacedWithComplex = true
+ }
+
+ // rewrite other parents to point to first parent
+ if len(pr) > 1 {
+ for _, p := range pr[1:] {
+ replacingRef := spec.MustCreateRef(pr[0])
+
+ // set complex when replacing ref is an anonymous jsonpointer: further processing may be required
+ replacedWithComplex = replacedWithComplex || path.Dir(replacingRef.String()) != definitionsPath
+ debugLog("rewrite parent with ref: %s", replacingRef.String())
+
+ // NOTE: it is possible at this stage to introduce json pointers (to non-definitions places).
+ // Those are stripped later on.
+ if err := replace.UpdateRef(opts.Swagger(), p, replacingRef); err != nil {
+ return false, err
+ }
+
+ if pa, ok := opts.flattenContext.newRefs[p]; ok && pa.isOAIGen {
+ // update parent in ref index
+ debugLog("update parent entry: %s", p)
+ pa.schema = r.schema
+ pa.resolved = false
+ replacedWithComplex = true
+ }
+ }
+ }
+
+ // remove OAIGen definition
+ debugLog("removing definition %s", path.Base(r.path))
+ delete(opts.Swagger().Definitions, path.Base(r.path))
+
+ // propagate changes in ref index for keys which have this one as a parent
+ for kk, value := range opts.flattenContext.newRefs {
+ if kk == k || !value.isOAIGen || value.resolved {
+ continue
+ }
+
+ found := false
+ newParents := make([]string, 0, len(value.parents))
+ for _, parent := range value.parents {
+ switch {
+ case parent == r.path:
+ found = true
+ parent = pr[0]
+ case strings.HasPrefix(parent, r.path+"/"):
+ found = true
+ parent = path.Join(pr[0], strings.TrimPrefix(parent, r.path))
+ }
+
+ newParents = append(newParents, parent)
+ }
+
+ if found {
+ value.parents = newParents
+ }
+ }
+
+ // mark naming conflict as resolved
+ debugLog("marking naming conflict resolved for key: %s", r.key)
+ opts.flattenContext.newRefs[r.key].isOAIGen = false
+ opts.flattenContext.newRefs[r.key].resolved = true
+
+ // determine if the previous substitution did inline a complex schema
+ if r.schema != nil && r.schema.Ref.String() == "" { // inline schema
+ asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+ if err != nil {
+ return false, err
+ }
+
+ debugLog("re-inlined schema: parent: %s, %t", pr[0], asch.isAnalyzedAsComplex())
+ replacedWithComplex = replacedWithComplex || !(path.Dir(pr[0]) == definitionsPath) && asch.isAnalyzedAsComplex()
+ }
+
+ return replacedWithComplex, nil
+}
+
+// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions.
+//
+// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself.
+// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used).
+func namePointers(opts *FlattenOpts) error {
+ debugLog("name pointers")
+
+ refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas))
+ for k, ref := range opts.Spec.references.allRefs {
+ if path.Dir(ref.String()) == definitionsPath {
+ // this a ref to a top-level definition: ok
+ continue
+ }
+
+ result, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), ref)
+ if err != nil {
+ return fmt.Errorf("at %s, %w", k, err)
+ }
+
+ replacingRef := result.Ref
+ sch := result.Schema
+ if opts.flattenContext != nil {
+ opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
+ }
+
+ debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String())
+ refsToReplace[k] = SchemaRef{
+ Name: k, // caller
+ Ref: replacingRef, // called
+ Schema: sch,
+ TopLevel: path.Dir(replacingRef.String()) == definitionsPath,
+ }
+ }
+
+ depthFirst := sortref.DepthFirst(refsToReplace)
+ namer := &InlineSchemaNamer{
+ Spec: opts.Swagger(),
+ Operations: operations.AllOpRefsByRef(opts.Spec, nil),
+ flattenContext: opts.flattenContext,
+ opts: opts,
+ }
+
+ for _, key := range depthFirst {
+ v := refsToReplace[key]
+ // update current replacement, which may have been updated by previous changes of deeper elements
+ result, erd := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), v.Ref)
+ if erd != nil {
+ return fmt.Errorf("at %s, %w", key, erd)
+ }
+
+ if opts.flattenContext != nil {
+ opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
+ }
+
+ v.Ref = result.Ref
+ v.Schema = result.Schema
+ v.TopLevel = path.Dir(result.Ref.String()) == definitionsPath
+ debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String())
+
+ if v.TopLevel {
+ debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String())
+
+ // if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref
+ if err := replace.UpdateRef(opts.Swagger(), key, v.Ref); err != nil {
+ return err
+ }
+
+ continue
+ }
+
+ if err := flattenAnonPointer(key, v, refsToReplace, namer, opts); err != nil {
+ return err
+ }
+ }
+
+ opts.Spec.reload() // re-analyze
+
+ return nil
+}
+
+func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]SchemaRef, namer *InlineSchemaNamer, opts *FlattenOpts) error {
+ // this is a JSON pointer to an anonymous document (internal or external):
+ // create a definition for this schema when:
+ // - it is a complex schema
+ // - or it is pointed by more than one $ref (i.e. expresses commonality)
+ // otherwise, expand the pointer (single reference to a simple type)
+ //
+ // The named definition for this follows the target's key, not the caller's
+ debugLog("namePointers at %s for %s", key, v.Ref.String())
+
+ // qualify the expanded schema
+ asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
+ if ers != nil {
+ return fmt.Errorf("schema analysis [%s]: %w", key, ers)
+ }
+ callers := make([]string, 0, 64)
+
+ debugLog("looking for callers")
+
+ an := New(opts.Swagger())
+ for k, w := range an.references.allRefs {
+ r, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), w)
+ if err != nil {
+ return fmt.Errorf("at %s, %w", key, err)
+ }
+
+ if opts.flattenContext != nil {
+ opts.flattenContext.warnings = append(opts.flattenContext.warnings, r.Warnings...)
+ }
+
+ if r.Ref.String() == v.Ref.String() {
+ callers = append(callers, k)
+ }
+ }
+
+ debugLog("callers for %s: %d", v.Ref.String(), len(callers))
+ if len(callers) == 0 {
+ // has already been updated and resolved
+ return nil
+ }
+
+ parts := sortref.KeyParts(v.Ref.String())
+ debugLog("number of callers for %s: %d", v.Ref.String(), len(callers))
+
+ // identifying edge case when the namer did nothing because we point to a non-schema object
+ // no definition is created and we expand the $ref for all callers
+ if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() {
+ debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String())
+ if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil {
+ return err
+ }
+
+ // regular case: we named the $ref as a definition, and we move all callers to this new $ref
+ for _, caller := range callers {
+ if caller == key {
+ continue
+ }
+
+ // move $ref for next to resolve
+ debugLog("identified caller of %s at [%s]", v.Ref.String(), caller)
+ c := refsToReplace[caller]
+ c.Ref = v.Ref
+ refsToReplace[caller] = c
+ }
+
+ return nil
+ }
+
+ debugLog("expand JSON pointer for key=%s", key)
+
+ if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil {
+ return err
+ }
+ // NOTE: there is no other caller to update
+
+ return nil
+}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_name.go b/vendor/github.com/go-openapi/analysis/flatten_name.go
new file mode 100644
index 000000000..3ad2ccfbf
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/flatten_name.go
@@ -0,0 +1,293 @@
+package analysis
+
+import (
+ "fmt"
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis/internal/flatten/operations"
+ "github.com/go-openapi/analysis/internal/flatten/replace"
+ "github.com/go-openapi/analysis/internal/flatten/schutils"
+ "github.com/go-openapi/analysis/internal/flatten/sortref"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// InlineSchemaNamer finds a new name for an inlined type
+type InlineSchemaNamer struct {
+ Spec *spec.Swagger
+ Operations map[string]operations.OpRef
+ flattenContext *context
+ opts *FlattenOpts
+}
+
+// Name yields a new name for the inline schema
+func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *AnalyzedSchema) error {
+ debugLog("naming inlined schema at %s", key)
+
+ parts := sortref.KeyParts(key)
+ for _, name := range namesFromKey(parts, aschema, isn.Operations) {
+ if name == "" {
+ continue
+ }
+
+ // create unique name
+ newName, isOAIGen := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
+
+ // clone schema
+ sch := schutils.Clone(schema)
+
+ // replace values on schema
+ if err := replace.RewriteSchemaToRef(isn.Spec, key,
+ spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err)
+ }
+
+ // rewrite any dependent $ref pointing to this place,
+ // when not already pointing to a top-level definition.
+ //
+ // NOTE: this is important if such referers use arbitrary JSON pointers.
+ an := New(isn.Spec)
+ for k, v := range an.references.allRefs {
+ r, erd := replace.DeepestRef(isn.opts.Swagger(), isn.opts.ExpandOpts(false), v)
+ if erd != nil {
+ return fmt.Errorf("at %s, %w", k, erd)
+ }
+
+ if isn.opts.flattenContext != nil {
+ isn.opts.flattenContext.warnings = append(isn.opts.flattenContext.warnings, r.Warnings...)
+ }
+
+ if r.Ref.String() != key && (r.Ref.String() != path.Join(definitionsPath, newName) || path.Dir(v.String()) == definitionsPath) {
+ continue
+ }
+
+ debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String())
+
+ // rewrite $ref to the new target
+ if err := replace.UpdateRef(isn.Spec, k,
+ spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
+ return err
+ }
+ }
+
+ // NOTE: this extension is currently not used by go-swagger (provided for information only)
+ sch.AddExtension("x-go-gen-location", GenLocation(parts))
+
+ // save cloned schema to definitions
+ schutils.Save(isn.Spec, newName, sch)
+
+ // keep track of created refs
+ if isn.flattenContext == nil {
+ continue
+ }
+
+ debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen)
+ resolved := false
+
+ if _, ok := isn.flattenContext.newRefs[key]; ok {
+ resolved = isn.flattenContext.newRefs[key].resolved
+ }
+
+ isn.flattenContext.newRefs[key] = &newRef{
+ key: key,
+ newName: newName,
+ path: path.Join(definitionsPath, newName),
+ isOAIGen: isOAIGen,
+ resolved: resolved,
+ schema: sch,
+ }
+ }
+
+ return nil
+}
+
+// uniqifyName yields a unique name for a definition
+func uniqifyName(definitions spec.Definitions, name string) (string, bool) {
+ isOAIGen := false
+ if name == "" {
+ name = "oaiGen"
+ isOAIGen = true
+ }
+
+ if len(definitions) == 0 {
+ return name, isOAIGen
+ }
+
+ unq := true
+ for k := range definitions {
+ if strings.EqualFold(k, name) {
+ unq = false
+
+ break
+ }
+ }
+
+ if unq {
+ return name, isOAIGen
+ }
+
+ name += "OAIGen"
+ isOAIGen = true
+ var idx int
+ unique := name
+ _, known := definitions[unique]
+
+ for known {
+ idx++
+ unique = fmt.Sprintf("%s%d", name, idx)
+ _, known = definitions[unique]
+ }
+
+ return unique, isOAIGen
+}
+
+func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations map[string]operations.OpRef) []string {
+ var (
+ baseNames [][]string
+ startIndex int
+ )
+
+ if parts.IsOperation() {
+ baseNames, startIndex = namesForOperation(parts, operations)
+ }
+
+ // definitions
+ if parts.IsDefinition() {
+ baseNames, startIndex = namesForDefinition(parts)
+ }
+
+ result := make([]string, 0, len(baseNames))
+ for _, segments := range baseNames {
+ nm := parts.BuildName(segments, startIndex, partAdder(aschema))
+ if nm == "" {
+ continue
+ }
+
+ result = append(result, nm)
+ }
+ sort.Strings(result)
+
+ return result
+}
+
+func namesForParam(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
+ var (
+ baseNames [][]string
+ startIndex int
+ )
+
+ piref := parts.PathItemRef()
+ if piref.String() != "" && parts.IsOperationParam() {
+ if op, ok := operations[piref.String()]; ok {
+ startIndex = 5
+ baseNames = append(baseNames, []string{op.ID, "params", "body"})
+ }
+ } else if parts.IsSharedOperationParam() {
+ pref := parts.PathRef()
+ for k, v := range operations {
+ if strings.HasPrefix(k, pref.String()) {
+ startIndex = 4
+ baseNames = append(baseNames, []string{v.ID, "params", "body"})
+ }
+ }
+ }
+
+ return baseNames, startIndex
+}
+
+func namesForOperation(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
+ var (
+ baseNames [][]string
+ startIndex int
+ )
+
+ // params
+ if parts.IsOperationParam() || parts.IsSharedOperationParam() {
+ baseNames, startIndex = namesForParam(parts, operations)
+ }
+
+ // responses
+ if parts.IsOperationResponse() {
+ piref := parts.PathItemRef()
+ if piref.String() != "" {
+ if op, ok := operations[piref.String()]; ok {
+ startIndex = 6
+ baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
+ }
+ }
+ }
+
+ return baseNames, startIndex
+}
+
+func namesForDefinition(parts sortref.SplitKey) ([][]string, int) {
+ nm := parts.DefinitionName()
+ if nm != "" {
+ return [][]string{{parts.DefinitionName()}}, 2
+ }
+
+ return [][]string{}, 0
+}
+
+// partAdder knows how to interpret a schema when it comes to build a name from parts
+func partAdder(aschema *AnalyzedSchema) sortref.PartAdder {
+ return func(part string) []string {
+ segments := make([]string, 0, 2)
+
+ if part == "items" || part == "additionalItems" {
+ if aschema.IsTuple || aschema.IsTupleWithExtra {
+ segments = append(segments, "tuple")
+ } else {
+ segments = append(segments, "items")
+ }
+
+ if part == "additionalItems" {
+ segments = append(segments, part)
+ }
+
+ return segments
+ }
+
+ segments = append(segments, part)
+
+ return segments
+ }
+}
+
+func nameFromRef(ref spec.Ref) string {
+ u := ref.GetURL()
+ if u.Fragment != "" {
+ return swag.ToJSONName(path.Base(u.Fragment))
+ }
+
+ if u.Path != "" {
+ bn := path.Base(u.Path)
+ if bn != "" && bn != "/" {
+ ext := path.Ext(bn)
+ if ext != "" {
+ return swag.ToJSONName(bn[:len(bn)-len(ext)])
+ }
+
+ return swag.ToJSONName(bn)
+ }
+ }
+
+ return swag.ToJSONName(strings.ReplaceAll(u.Host, ".", " "))
+}
+
+// GenLocation indicates from which section of the specification (models or operations) a definition has been created.
+//
+// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is is provided
+// for information only.
+func GenLocation(parts sortref.SplitKey) string {
+ switch {
+ case parts.IsOperation():
+ return "operations"
+ case parts.IsDefinition():
+ return "models"
+ default:
+ return ""
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_options.go b/vendor/github.com/go-openapi/analysis/flatten_options.go
new file mode 100644
index 000000000..c5bb97b0a
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/flatten_options.go
@@ -0,0 +1,78 @@
+package analysis
+
+import (
+ "log"
+
+ "github.com/go-openapi/spec"
+)
+
+// FlattenOpts configuration for flattening a swagger specification.
+//
+// The BasePath parameter is used to locate remote relative $ref found in the specification.
+// This path is a file: it points to the location of the root document and may be either a local
+// file path or a URL.
+//
+// If none specified, relative references (e.g. "$ref": "folder/schema.yaml#/definitions/...")
+// found in the spec are searched from the current working directory.
+type FlattenOpts struct {
+ Spec *Spec // The analyzed spec to work with
+ flattenContext *context // Internal context to track flattening activity
+
+ BasePath string // The location of the root document for this spec to resolve relative $ref
+
+ // Flattening options
+ Expand bool // When true, skip flattening the spec and expand it instead (if Minimal is false)
+ Minimal bool // When true, do not decompose complex structures such as allOf
+ Verbose bool // enable some reporting on possible name conflicts detected
+ RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening
+ ContinueOnError bool // Continue when spec expansion issues are found
+
+ /* Extra keys */
+ _ struct{} // require keys
+}
+
+// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
+func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *spec.ExpandOptions {
+ return &spec.ExpandOptions{
+ RelativeBase: f.BasePath,
+ SkipSchemas: skipSchemas,
+ ContinueOnError: f.ContinueOnError,
+ }
+}
+
+// Swagger gets the swagger specification for this flatten operation
+func (f *FlattenOpts) Swagger() *spec.Swagger {
+ return f.Spec.spec
+}
+
+// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting
+// from flattening a spec
+func (f *FlattenOpts) croak() {
+ if !f.Verbose {
+ return
+ }
+
+ reported := make(map[string]bool, len(f.flattenContext.newRefs))
+ for _, v := range f.Spec.references.allRefs {
+ // warns about duplicate handling
+ for _, r := range f.flattenContext.newRefs {
+ if r.isOAIGen && r.path == v.String() {
+ reported[r.newName] = true
+ }
+ }
+ }
+
+ for k := range reported {
+ log.Printf("warning: duplicate flattened definition name resolved as %s", k)
+ }
+
+ // warns about possible type mismatches
+ uniqueMsg := make(map[string]bool)
+ for _, msg := range f.flattenContext.warnings {
+ if _, ok := uniqueMsg[msg]; ok {
+ continue
+ }
+ log.Printf("warning: %s", msg)
+ uniqueMsg[msg] = true
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/debug/debug.go b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go
new file mode 100644
index 000000000..ec0fec022
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/debug/debug.go
@@ -0,0 +1,41 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package debug
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+)
+
+var (
+ output = os.Stdout
+)
+
+// GetLogger provides a prefix debug logger
+func GetLogger(prefix string, debug bool) func(string, ...interface{}) {
+ if debug {
+ logger := log.New(output, fmt.Sprintf("%s:", prefix), log.LstdFlags)
+
+ return func(msg string, args ...interface{}) {
+ _, file1, pos1, _ := runtime.Caller(1)
+ logger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
+ }
+ }
+
+ return func(msg string, args ...interface{}) {}
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
new file mode 100644
index 000000000..8c9df0580
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/normalize/normalize.go
@@ -0,0 +1,87 @@
+package normalize
+
+import (
+ "net/url"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "github.com/go-openapi/spec"
+)
+
+// RebaseRef rebases a remote ref relative to a base ref.
+//
+// NOTE: does not support JSONschema ID for $ref (we assume we are working with swagger specs here).
+//
+// NOTE(windows):
+// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
+// * "/ in paths may appear as escape sequences
+func RebaseRef(baseRef string, ref string) string {
+ baseRef, _ = url.PathUnescape(baseRef)
+ ref, _ = url.PathUnescape(ref)
+
+ if baseRef == "" || baseRef == "." || strings.HasPrefix(baseRef, "#") {
+ return ref
+ }
+
+ parts := strings.Split(ref, "#")
+
+ baseParts := strings.Split(baseRef, "#")
+ baseURL, _ := url.Parse(baseParts[0])
+ if strings.HasPrefix(ref, "#") {
+ if baseURL.Host == "" {
+ return strings.Join([]string{baseParts[0], parts[1]}, "#")
+ }
+
+ return strings.Join([]string{baseParts[0], parts[1]}, "#")
+ }
+
+ refURL, _ := url.Parse(parts[0])
+ if refURL.Host != "" || filepath.IsAbs(parts[0]) {
+ // not rebasing an absolute path
+ return ref
+ }
+
+ // there is a relative path
+ var basePath string
+ if baseURL.Host != "" {
+ // when there is a host, standard URI rules apply (with "/")
+ baseURL.Path = path.Dir(baseURL.Path)
+ baseURL.Path = path.Join(baseURL.Path, "/"+parts[0])
+
+ return baseURL.String()
+ }
+
+ // this is a local relative path
+ // basePart[0] and parts[0] are local filesystem directories/files
+ basePath = filepath.Dir(baseParts[0])
+ relPath := filepath.Join(basePath, string(filepath.Separator)+parts[0])
+ if len(parts) > 1 {
+ return strings.Join([]string{relPath, parts[1]}, "#")
+ }
+
+ return relPath
+}
+
+// Path renders absolute path on remote file refs
+//
+// NOTE(windows):
+// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
+// * "/ in paths may appear as escape sequences
+func Path(ref spec.Ref, basePath string) string {
+ uri, _ := url.PathUnescape(ref.String())
+ if ref.HasFragmentOnly || filepath.IsAbs(uri) {
+ return uri
+ }
+
+ refURL, _ := url.Parse(uri)
+ if refURL.Host != "" {
+ return uri
+ }
+
+ parts := strings.Split(uri, "#")
+ // BasePath, parts[0] are local filesystem directories, guaranteed to be absolute at this stage
+ parts[0] = filepath.Join(filepath.Dir(basePath), parts[0])
+
+ return strings.Join(parts, "#")
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
new file mode 100644
index 000000000..7f3a2b871
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/operations/operations.go
@@ -0,0 +1,90 @@
+package operations
+
+import (
+ "path"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// AllOpRefsByRef returns an index of sortable operations
+func AllOpRefsByRef(specDoc Provider, operationIDs []string) map[string]OpRef {
+ return OpRefsByRef(GatherOperations(specDoc, operationIDs))
+}
+
+// OpRefsByRef indexes a map of sortable operations
+func OpRefsByRef(oprefs map[string]OpRef) map[string]OpRef {
+ result := make(map[string]OpRef, len(oprefs))
+ for _, v := range oprefs {
+ result[v.Ref.String()] = v
+ }
+
+ return result
+}
+
+// OpRef is an indexable, sortable operation
+type OpRef struct {
+ Method string
+ Path string
+ Key string
+ ID string
+ Op *spec.Operation
+ Ref spec.Ref
+}
+
+// OpRefs is a sortable collection of operations
+type OpRefs []OpRef
+
+func (o OpRefs) Len() int { return len(o) }
+func (o OpRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
+func (o OpRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
+
+// Provider knows how to collect operations from a spec
+type Provider interface {
+ Operations() map[string]map[string]*spec.Operation
+}
+
+// GatherOperations builds a map of sorted operations from a spec
+func GatherOperations(specDoc Provider, operationIDs []string) map[string]OpRef {
+ var oprefs OpRefs
+
+ for method, pathItem := range specDoc.Operations() {
+ for pth, operation := range pathItem {
+ vv := *operation
+ oprefs = append(oprefs, OpRef{
+ Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
+ Method: method,
+ Path: pth,
+ ID: vv.ID,
+ Op: &vv,
+ Ref: spec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
+ })
+ }
+ }
+
+ sort.Sort(oprefs)
+
+ operations := make(map[string]OpRef)
+ for _, opr := range oprefs {
+ nm := opr.ID
+ if nm == "" {
+ nm = opr.Key
+ }
+
+ oo, found := operations[nm]
+ if found && oo.Method != opr.Method && oo.Path != opr.Path {
+ nm = opr.Key
+ }
+
+ if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
+ opr.ID = nm
+ opr.Op.ID = nm
+ operations[nm] = opr
+ }
+ }
+
+ return operations
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
new file mode 100644
index 000000000..26c2a05a3
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/replace/replace.go
@@ -0,0 +1,434 @@
+package replace
+
+import (
+ "fmt"
+ "net/url"
+ "os"
+ "path"
+ "strconv"
+
+ "github.com/go-openapi/analysis/internal/debug"
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+)
+
+const definitionsPath = "#/definitions"
+
+var debugLog = debug.GetLogger("analysis/flatten/replace", os.Getenv("SWAGGER_DEBUG") != "")
+
+// RewriteSchemaToRef replaces a schema with a Ref
+func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error {
+ debugLog("rewriting schema to ref for %s with %s", key, ref.String())
+ _, value, err := getPointerFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ return rewriteParentRef(sp, key, ref)
+
+ case spec.Schema:
+ return rewriteParentRef(sp, key, ref)
+
+ case *spec.SchemaOrArray:
+ if refable.Schema != nil {
+ refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ }
+
+ case *spec.SchemaOrBool:
+ if refable.Schema != nil {
+ refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ }
+ default:
+ return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+ }
+
+ return nil
+}
+
+func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error {
+ parent, entry, pvalue, err := getParentFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ debugLog("rewriting holder for %T", pvalue)
+ switch container := pvalue.(type) {
+ case spec.Response:
+ if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
+ return err
+ }
+
+ case *spec.Response:
+ container.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case *spec.Responses:
+ statusCode, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ resp := container.StatusCodeResponses[statusCode]
+ resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container.StatusCodeResponses[statusCode] = resp
+
+ case map[string]spec.Response:
+ resp := container[entry]
+ resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container[entry] = resp
+
+ case spec.Parameter:
+ if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
+ return err
+ }
+
+ case map[string]spec.Parameter:
+ param := container[entry]
+ param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container[entry] = param
+
+ case []spec.Parameter:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ param := container[idx]
+ param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+ container[idx] = param
+
+ case spec.Definitions:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case map[string]spec.Schema:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case []spec.Schema:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case *spec.SchemaOrArray:
+ // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", key[1:], err)
+ }
+ container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case spec.SchemaProperties:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+
+ default:
+ return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
+ }
+
+ return nil
+}
+
+// getPointerFromKey retrieves the content of the JSON pointer "key"
+func getPointerFromKey(sp interface{}, key string) (string, interface{}, error) {
+ switch sp.(type) {
+ case *spec.Schema:
+ case *spec.Swagger:
+ default:
+ panic("unexpected type used in getPointerFromKey")
+ }
+ if key == "#/" {
+ return "", sp, nil
+ }
+ // unescape chars in key, e.g. "{}" from path params
+ pth, _ := url.PathUnescape(key[1:])
+ ptr, err := jsonpointer.New(pth)
+ if err != nil {
+ return "", nil, err
+ }
+
+ value, _, err := ptr.Get(sp)
+ if err != nil {
+ debugLog("error when getting key: %s with path: %s", key, pth)
+
+ return "", nil, err
+ }
+
+ return pth, value, nil
+}
+
+// getParentFromKey retrieves the container of the JSON pointer "key"
+func getParentFromKey(sp interface{}, key string) (string, string, interface{}, error) {
+ switch sp.(type) {
+ case *spec.Schema:
+ case *spec.Swagger:
+ default:
+ panic("unexpected type used in getPointerFromKey")
+ }
+ // unescape chars in key, e.g. "{}" from path params
+ pth, _ := url.PathUnescape(key[1:])
+
+ parent, entry := path.Dir(pth), path.Base(pth)
+ debugLog("getting schema holder at: %s, with entry: %s", parent, entry)
+
+ pptr, err := jsonpointer.New(parent)
+ if err != nil {
+ return "", "", nil, err
+ }
+ pvalue, _, err := pptr.Get(sp)
+ if err != nil {
+ return "", "", nil, fmt.Errorf("can't get parent for %s: %w", parent, err)
+ }
+
+ return parent, entry, pvalue, nil
+}
+
+// UpdateRef replaces a ref by another one
+func UpdateRef(sp interface{}, key string, ref spec.Ref) error {
+ switch sp.(type) {
+ case *spec.Schema:
+ case *spec.Swagger:
+ default:
+ panic("unexpected type used in getPointerFromKey")
+ }
+ debugLog("updating ref for %s with %s", key, ref.String())
+ pth, value, err := getPointerFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ refable.Ref = ref
+ case *spec.SchemaOrArray:
+ if refable.Schema != nil {
+ refable.Schema.Ref = ref
+ }
+ case *spec.SchemaOrBool:
+ if refable.Schema != nil {
+ refable.Schema.Ref = ref
+ }
+ case spec.Schema:
+ debugLog("rewriting holder for %T", refable)
+ _, entry, pvalue, erp := getParentFromKey(sp, key)
+ if erp != nil {
+ return err
+ }
+ switch container := pvalue.(type) {
+ case spec.Definitions:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case map[string]spec.Schema:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case []spec.Schema:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case *spec.SchemaOrArray:
+ // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ case spec.SchemaProperties:
+ container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
+
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+
+ default:
+ return fmt.Errorf("unhandled container type at %s: %T", key, value)
+ }
+
+ default:
+ return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+ }
+
+ return nil
+}
+
+// UpdateRefWithSchema replaces a ref with a schema (i.e. re-inline schema)
+func UpdateRefWithSchema(sp *spec.Swagger, key string, sch *spec.Schema) error {
+ debugLog("updating ref for %s with schema", key)
+ pth, value, err := getPointerFromKey(sp, key)
+ if err != nil {
+ return err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ *refable = *sch
+ case spec.Schema:
+ _, entry, pvalue, erp := getParentFromKey(sp, key)
+ if erp != nil {
+ return err
+ }
+ switch container := pvalue.(type) {
+ case spec.Definitions:
+ container[entry] = *sch
+
+ case map[string]spec.Schema:
+ container[entry] = *sch
+
+ case []spec.Schema:
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container[idx] = *sch
+
+ case *spec.SchemaOrArray:
+ // NOTE: this is necessarily an array - otherwise, the parent would be *Schema
+ idx, err := strconv.Atoi(entry)
+ if err != nil {
+ return fmt.Errorf("%s not a number: %w", pth, err)
+ }
+ container.Schemas[idx] = *sch
+
+ case spec.SchemaProperties:
+ container[entry] = *sch
+
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+
+ default:
+ return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value)
+ }
+ case *spec.SchemaOrArray:
+ *refable.Schema = *sch
+ // NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
+ case *spec.SchemaOrBool:
+ *refable.Schema = *sch
+ default:
+ return fmt.Errorf("no schema with ref found at %s for %T", key, value)
+ }
+
+ return nil
+}
+
+// DeepestRefResult holds the results from DeepestRef analysis
+type DeepestRefResult struct {
+ Ref spec.Ref
+ Schema *spec.Schema
+ Warnings []string
+}
+
+// DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions.
+// - if no definition is found, returns the deepest ref.
+// - pointers to external files are expanded
+//
+// NOTE: all external $ref's are assumed to be already expanded at this stage.
+func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) {
+ if !ref.HasFragmentOnly {
+ // we found an external $ref, which is odd at this stage:
+ // do nothing on external $refs
+ return &DeepestRefResult{Ref: ref}, nil
+ }
+
+ currentRef := ref
+ visited := make(map[string]bool, 64)
+ warnings := make([]string, 0, 2)
+
+DOWNREF:
+ for currentRef.String() != "" {
+ if path.Dir(currentRef.String()) == definitionsPath {
+ // this is a top-level definition: stop here and return this ref
+ return &DeepestRefResult{Ref: currentRef}, nil
+ }
+
+ if _, beenThere := visited[currentRef.String()]; beenThere {
+ return nil,
+ fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String())
+ }
+
+ visited[currentRef.String()] = true
+ value, _, err := currentRef.GetPointer().Get(sp)
+ if err != nil {
+ return nil, err
+ }
+
+ switch refable := value.(type) {
+ case *spec.Schema:
+ if refable.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Ref
+
+ case spec.Schema:
+ if refable.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Ref
+
+ case *spec.SchemaOrArray:
+ if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Schema.Ref
+
+ case *spec.SchemaOrBool:
+ if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = refable.Schema.Ref
+
+ case spec.Response:
+ // a pointer points to a schema initially marshalled in responses section...
+ // Attempt to convert this to a schema. If this fails, the spec is invalid
+ asJSON, _ := refable.MarshalJSON()
+ var asSchema spec.Schema
+
+ err := asSchema.UnmarshalJSON(asJSON)
+ if err != nil {
+ return nil,
+ fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
+ currentRef.String(), value)
+ }
+ warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String()))
+
+ if asSchema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = asSchema.Ref
+
+ case spec.Parameter:
+ // a pointer points to a schema initially marshalled in parameters section...
+ // Attempt to convert this to a schema. If this fails, the spec is invalid
+ asJSON, _ := refable.MarshalJSON()
+ var asSchema spec.Schema
+ if err := asSchema.UnmarshalJSON(asJSON); err != nil {
+ return nil,
+ fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
+ currentRef.String(), value)
+ }
+
+ warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String()))
+
+ if asSchema.Ref.String() == "" {
+ break DOWNREF
+ }
+ currentRef = asSchema.Ref
+
+ default:
+ return nil,
+ fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T",
+ currentRef.String(), value)
+ }
+ }
+
+ // assess what schema we're ending with
+ sch, erv := spec.ResolveRefWithBase(sp, &currentRef, opts)
+ if erv != nil {
+ return nil, erv
+ }
+
+ if sch == nil {
+ return nil, fmt.Errorf("no schema found at %s", currentRef.String())
+ }
+
+ return &DeepestRefResult{Ref: currentRef, Schema: sch, Warnings: warnings}, nil
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
new file mode 100644
index 000000000..4590236e6
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/schutils/flatten_schema.go
@@ -0,0 +1,29 @@
+// Package schutils provides tools to save or clone a schema
+// when flattening a spec.
+package schutils
+
+import (
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/swag"
+)
+
+// Save registers a schema as an entry in spec #/definitions
+func Save(sp *spec.Swagger, name string, schema *spec.Schema) {
+ if schema == nil {
+ return
+ }
+
+ if sp.Definitions == nil {
+ sp.Definitions = make(map[string]spec.Schema, 150)
+ }
+
+ sp.Definitions[name] = *schema
+}
+
+// Clone deep-clones a schema
+func Clone(schema *spec.Schema) *spec.Schema {
+ var sch spec.Schema
+ _ = swag.FromDynamicJSON(schema, &sch)
+
+ return &sch
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
new file mode 100644
index 000000000..18e552ead
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/keys.go
@@ -0,0 +1,201 @@
+package sortref
+
+import (
+ "net/http"
+ "path"
+ "strconv"
+ "strings"
+
+ "github.com/go-openapi/jsonpointer"
+ "github.com/go-openapi/spec"
+)
+
+const (
+ paths = "paths"
+ responses = "responses"
+ parameters = "parameters"
+ definitions = "definitions"
+)
+
+var (
+ ignoredKeys map[string]struct{}
+ validMethods map[string]struct{}
+)
+
+func init() {
+ ignoredKeys = map[string]struct{}{
+ "schema": {},
+ "properties": {},
+ "not": {},
+ "anyOf": {},
+ "oneOf": {},
+ }
+
+ validMethods = map[string]struct{}{
+ "GET": {},
+ "HEAD": {},
+ "OPTIONS": {},
+ "PATCH": {},
+ "POST": {},
+ "PUT": {},
+ "DELETE": {},
+ }
+}
+
+// Key represent a key item constructed from /-separated segments
+type Key struct {
+ Segments int
+ Key string
+}
+
+// Keys is a sortable collable collection of Keys
+type Keys []Key
+
+func (k Keys) Len() int { return len(k) }
+func (k Keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
+func (k Keys) Less(i, j int) bool {
+ return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
+}
+
+// KeyParts construct a SplitKey with all its /-separated segments decomposed. It is sortable.
+func KeyParts(key string) SplitKey {
+ var res []string
+ for _, part := range strings.Split(key[1:], "/") {
+ if part != "" {
+ res = append(res, jsonpointer.Unescape(part))
+ }
+ }
+
+ return res
+}
+
+// SplitKey holds of the parts of a /-separated key, soi that their location may be determined.
+type SplitKey []string
+
+// IsDefinition is true when the split key is in the #/definitions section of a spec
+func (s SplitKey) IsDefinition() bool {
+ return len(s) > 1 && s[0] == definitions
+}
+
+// DefinitionName yields the name of the definition
+func (s SplitKey) DefinitionName() string {
+ if !s.IsDefinition() {
+ return ""
+ }
+
+ return s[1]
+}
+
+func (s SplitKey) isKeyName(i int) bool {
+ if i <= 0 {
+ return false
+ }
+
+ count := 0
+ for idx := i - 1; idx > 0; idx-- {
+ if s[idx] != "properties" {
+ break
+ }
+ count++
+ }
+
+ return count%2 != 0
+}
+
+// PartAdder know how to construct the components of a new name
+type PartAdder func(string) []string
+
+// BuildName builds a name from segments
+func (s SplitKey) BuildName(segments []string, startIndex int, adder PartAdder) string {
+ for i, part := range s[startIndex:] {
+ if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
+ segments = append(segments, adder(part)...)
+ }
+ }
+
+ return strings.Join(segments, " ")
+}
+
+// IsOperation is true when the split key is in the operations section
+func (s SplitKey) IsOperation() bool {
+ return len(s) > 1 && s[0] == paths
+}
+
+// IsSharedOperationParam is true when the split key is in the parameters section of a path
+func (s SplitKey) IsSharedOperationParam() bool {
+ return len(s) > 2 && s[0] == paths && s[2] == parameters
+}
+
+// IsSharedParam is true when the split key is in the #/parameters section of a spec
+func (s SplitKey) IsSharedParam() bool {
+ return len(s) > 1 && s[0] == parameters
+}
+
+// IsOperationParam is true when the split key is in the parameters section of an operation
+func (s SplitKey) IsOperationParam() bool {
+ return len(s) > 3 && s[0] == paths && s[3] == parameters
+}
+
+// IsOperationResponse is true when the split key is in the responses section of an operation
+func (s SplitKey) IsOperationResponse() bool {
+ return len(s) > 3 && s[0] == paths && s[3] == responses
+}
+
+// IsSharedResponse is true when the split key is in the #/responses section of a spec
+func (s SplitKey) IsSharedResponse() bool {
+ return len(s) > 1 && s[0] == responses
+}
+
+// IsDefaultResponse is true when the split key is the default response for an operation
+func (s SplitKey) IsDefaultResponse() bool {
+ return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default"
+}
+
+// IsStatusCodeResponse is true when the split key is an operation response with a status code
+func (s SplitKey) IsStatusCodeResponse() bool {
+ isInt := func() bool {
+ _, err := strconv.Atoi(s[4])
+
+ return err == nil
+ }
+
+ return len(s) > 4 && s[0] == paths && s[3] == responses && isInt()
+}
+
+// ResponseName yields either the status code or "Default" for a response
+func (s SplitKey) ResponseName() string {
+ if s.IsStatusCodeResponse() {
+ code, _ := strconv.Atoi(s[4])
+
+ return http.StatusText(code)
+ }
+
+ if s.IsDefaultResponse() {
+ return "Default"
+ }
+
+ return ""
+}
+
+// PathItemRef constructs a $ref object from a split key of the form /{path}/{method}
+func (s SplitKey) PathItemRef() spec.Ref {
+ if len(s) < 3 {
+ return spec.Ref{}
+ }
+
+ pth, method := s[1], s[2]
+ if _, isValidMethod := validMethods[strings.ToUpper(method)]; !isValidMethod && !strings.HasPrefix(method, "x-") {
+ return spec.Ref{}
+ }
+
+ return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method)))
+}
+
+// PathRef constructs a $ref object from a split key of the form /paths/{reference}
+func (s SplitKey) PathRef() spec.Ref {
+ if !s.IsOperation() {
+ return spec.Ref{}
+ }
+
+ return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(s[1])))
+}
diff --git a/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
new file mode 100644
index 000000000..73243df87
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/internal/flatten/sortref/sort_ref.go
@@ -0,0 +1,141 @@
+package sortref
+
+import (
+ "reflect"
+ "sort"
+ "strings"
+
+ "github.com/go-openapi/analysis/internal/flatten/normalize"
+ "github.com/go-openapi/spec"
+)
+
+var depthGroupOrder = []string{
+ "sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition",
+}
+
+type mapIterator struct {
+ len int
+ mapIter *reflect.MapIter
+}
+
+func (i *mapIterator) Next() bool {
+ return i.mapIter.Next()
+}
+
+func (i *mapIterator) Len() int {
+ return i.len
+}
+
+func (i *mapIterator) Key() string {
+ return i.mapIter.Key().String()
+}
+
+func mustMapIterator(anyMap interface{}) *mapIterator {
+ val := reflect.ValueOf(anyMap)
+
+ return &mapIterator{mapIter: val.MapRange(), len: val.Len()}
+}
+
+// DepthFirst sorts a map of anything. It groups keys by category
+// (shared params, op param, statuscode response, default response, definitions)
+// sort groups internally by number of parts in the key and lexical names
+// flatten groups into a single list of keys
+func DepthFirst(in interface{}) []string {
+ iterator := mustMapIterator(in)
+ sorted := make([]string, 0, iterator.Len())
+ grouped := make(map[string]Keys, iterator.Len())
+
+ for iterator.Next() {
+ k := iterator.Key()
+ split := KeyParts(k)
+ var pk string
+
+ if split.IsSharedOperationParam() {
+ pk = "sharedOpParam"
+ }
+ if split.IsOperationParam() {
+ pk = "opParam"
+ }
+ if split.IsStatusCodeResponse() {
+ pk = "codeResponse"
+ }
+ if split.IsDefaultResponse() {
+ pk = "defaultResponse"
+ }
+ if split.IsDefinition() {
+ pk = "definition"
+ }
+ if split.IsSharedParam() {
+ pk = "sharedParam"
+ }
+ if split.IsSharedResponse() {
+ pk = "sharedResponse"
+ }
+ grouped[pk] = append(grouped[pk], Key{Segments: len(split), Key: k})
+ }
+
+ for _, pk := range depthGroupOrder {
+ res := grouped[pk]
+ sort.Sort(res)
+
+ for _, v := range res {
+ sorted = append(sorted, v.Key)
+ }
+ }
+
+ return sorted
+}
+
+// topMostRefs is able to sort refs by hierarchical then lexicographic order,
+// yielding refs ordered breadth-first.
+type topmostRefs []string
+
+func (k topmostRefs) Len() int { return len(k) }
+func (k topmostRefs) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
+func (k topmostRefs) Less(i, j int) bool {
+ li, lj := len(strings.Split(k[i], "/")), len(strings.Split(k[j], "/"))
+ if li == lj {
+ return k[i] < k[j]
+ }
+
+ return li < lj
+}
+
+// TopmostFirst sorts references by depth
+func TopmostFirst(refs []string) []string {
+ res := topmostRefs(refs)
+ sort.Sort(res)
+
+ return res
+}
+
+// RefRevIdx is a reverse index for references
+type RefRevIdx struct {
+ Ref spec.Ref
+ Keys []string
+}
+
+// ReverseIndex builds a reverse index for references in schemas
+func ReverseIndex(schemas map[string]spec.Ref, basePath string) map[string]RefRevIdx {
+ collected := make(map[string]RefRevIdx)
+ for key, schRef := range schemas {
+ // normalize paths before sorting,
+ // so we get together keys that are from the same external file
+ normalizedPath := normalize.Path(schRef, basePath)
+
+ entry, ok := collected[normalizedPath]
+ if ok {
+ entry.Keys = append(entry.Keys, key)
+ collected[normalizedPath] = entry
+
+ continue
+ }
+
+ collected[normalizedPath] = RefRevIdx{
+ Ref: schRef,
+ Keys: []string{key},
+ }
+ }
+
+ return collected
+}
diff --git a/vendor/github.com/go-openapi/analysis/mixin.go b/vendor/github.com/go-openapi/analysis/mixin.go
new file mode 100644
index 000000000..b25305264
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/mixin.go
@@ -0,0 +1,515 @@
+// Copyright 2015 go-swagger maintainers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package analysis
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/go-openapi/spec"
+)
+
+// Mixin modifies the primary swagger spec by adding the paths and
+// definitions from the mixin specs. Top level parameters and
+// responses from the mixins are also carried over. Operation id
+// collisions are avoided by appending "Mixin<N>" but only if
+// needed.
+//
+// The following parts of primary are subject to merge, filling empty details
+// - Info
+// - BasePath
+// - Host
+// - ExternalDocs
+//
+// Consider calling FixEmptyResponseDescriptions() on the modified primary
+// if you read them from storage and they are valid to start with.
+//
+// Entries in "paths", "definitions", "parameters" and "responses" are
+// added to the primary in the order of the given mixins. If the entry
+// already exists in primary it is skipped with a warning message.
+//
+// The count of skipped entries (from collisions) is returned so any
+// deviation from the number expected can flag a warning in your build
+// scripts. Carefully review the collisions before accepting them;
+// consider renaming things if possible.
+//
+// No key normalization takes place (paths, type defs,
+// etc). Ensure they are canonical if your downstream tools do
+// key normalization of any form.
+//
+// Merging schemes (http, https), and consumers/producers do not account for
+// collisions.
+func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
+ skipped := make([]string, 0, len(mixins))
+ opIds := getOpIds(primary)
+ initPrimary(primary)
+
+ for i, m := range mixins {
+ skipped = append(skipped, mergeSwaggerProps(primary, m)...)
+
+ skipped = append(skipped, mergeConsumes(primary, m)...)
+
+ skipped = append(skipped, mergeProduces(primary, m)...)
+
+ skipped = append(skipped, mergeTags(primary, m)...)
+
+ skipped = append(skipped, mergeSchemes(primary, m)...)
+
+ skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
+
+ skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
+
+ skipped = append(skipped, mergeDefinitions(primary, m)...)
+
+ // merging paths requires a map of operationIDs to work with
+ skipped = append(skipped, mergePaths(primary, m, opIds, i)...)
+
+ skipped = append(skipped, mergeParameters(primary, m)...)
+
+ skipped = append(skipped, mergeResponses(primary, m)...)
+ }
+
+ return skipped
+}
+
+// getOpIds extracts all the paths.<path>.operationIds from the given
+// spec and returns them as the keys in a map with 'true' values.
+func getOpIds(s *spec.Swagger) map[string]bool {
+ rv := make(map[string]bool)
+ if s.Paths == nil {
+ return rv
+ }
+
+ for _, v := range s.Paths.Paths {
+ piops := pathItemOps(v)
+
+ for _, op := range piops {
+ rv[op.ID] = true
+ }
+ }
+
+ return rv
+}
+
+func pathItemOps(p spec.PathItem) []*spec.Operation {
+ var rv []*spec.Operation
+ rv = appendOp(rv, p.Get)
+ rv = appendOp(rv, p.Put)
+ rv = appendOp(rv, p.Post)
+ rv = appendOp(rv, p.Delete)
+ rv = appendOp(rv, p.Head)
+ rv = appendOp(rv, p.Patch)
+
+ return rv
+}
+
+func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
+ if op == nil {
+ return ops
+ }
+
+ return append(ops, op)
+}
+
+func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.SecurityDefinitions {
+ if _, exists := primary.SecurityDefinitions[k]; exists {
+ warn := fmt.Sprintf(
+ "SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+
+ primary.SecurityDefinitions[k] = v
+ }
+
+ return
+}
+
+func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for _, v := range m.Security {
+ found := false
+ for _, vv := range primary.Security {
+ if reflect.DeepEqual(v, vv) {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ warn := fmt.Sprintf(
+ "Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Security = append(primary.Security, v)
+ }
+
+ return
+}
+
+func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.Definitions {
+ // assume name collisions represent IDENTICAL type. careful.
+ if _, exists := primary.Definitions[k]; exists {
+ warn := fmt.Sprintf(
+ "definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Definitions[k] = v
+ }
+
+ return
+}
+
+func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) {
+ if m.Paths != nil {
+ for k, v := range m.Paths.Paths {
+ if _, exists := primary.Paths.Paths[k]; exists {
+ warn := fmt.Sprintf(
+ "paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+
+ // Swagger requires that operationIds be
+ // unique within a spec. If we find a
+ // collision we append "Mixin0" to the
+ // operatoinId we are adding, where 0 is mixin
+ // index. We assume that operationIds with
+ // all the proivded specs are already unique.
+ piops := pathItemOps(v)
+ for _, piop := range piops {
+ if opIds[piop.ID] {
+ piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
+ }
+ opIds[piop.ID] = true
+ }
+ primary.Paths.Paths[k] = v
+ }
+ }
+
+ return
+}
+
+func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.Parameters {
+ // could try to rename on conflict but would
+ // have to fix $refs in the mixin. Complain
+ // for now
+ if _, exists := primary.Parameters[k]; exists {
+ warn := fmt.Sprintf(
+ "top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Parameters[k] = v
+ }
+
+ return
+}
+
+func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for k, v := range m.Responses {
+ // could try to rename on conflict but would
+ // have to fix $refs in the mixin. Complain
+ // for now
+ if _, exists := primary.Responses[k]; exists {
+ warn := fmt.Sprintf(
+ "top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
+ skipped = append(skipped, warn)
+
+ continue
+ }
+ primary.Responses[k] = v
+ }
+
+ return skipped
+}
+
+func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) []string {
+ for _, v := range m.Consumes {
+ found := false
+ for _, vv := range primary.Consumes {
+ if v == vv {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ // no warning here: we just skip it
+ continue
+ }
+ primary.Consumes = append(primary.Consumes, v)
+ }
+
+ return []string{}
+}
+
+func mergeProduces(primary *spec.Swagger, m *spec.Swagger) []string {
+ for _, v := range m.Produces {
+ found := false
+ for _, vv := range primary.Produces {
+ if v == vv {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ // no warning here: we just skip it
+ continue
+ }
+ primary.Produces = append(primary.Produces, v)
+ }
+
+ return []string{}
+}
+
+func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
+ for _, v := range m.Tags {
+ found := false
+ for _, vv := range primary.Tags {
+ if v.Name == vv.Name {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ warn := fmt.Sprintf(
+ "top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n",
+ v.Name,
+ )
+ skipped = append(skipped, warn)
+
+ continue
+ }
+
+ primary.Tags = append(primary.Tags, v)
+ }
+
+ return
+}
+
+func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) []string {
+ for _, v := range m.Schemes {
+ found := false
+ for _, vv := range primary.Schemes {
+ if v == vv {
+ found = true
+
+ break
+ }
+ }
+
+ if found {
+ // no warning here: we just skip it
+ continue
+ }
+ primary.Schemes = append(primary.Schemes, v)
+ }
+
+ return []string{}
+}
+
+func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string {
+ var skipped, skippedInfo, skippedDocs []string
+
+ primary.Extensions, skipped = mergeExtensions(primary.Extensions, m.Extensions)
+
+ // merging details in swagger top properties
+ if primary.Host == "" {
+ primary.Host = m.Host
+ }
+
+ if primary.BasePath == "" {
+ primary.BasePath = m.BasePath
+ }
+
+ if primary.Info == nil {
+ primary.Info = m.Info
+ } else if m.Info != nil {
+ skippedInfo = mergeInfo(primary.Info, m.Info)
+ skipped = append(skipped, skippedInfo...)
+ }
+
+ if primary.ExternalDocs == nil {
+ primary.ExternalDocs = m.ExternalDocs
+ } else if m != nil {
+ skippedDocs = mergeExternalDocs(primary.ExternalDocs, m.ExternalDocs)
+ skipped = append(skipped, skippedDocs...)
+ }
+
+ return skipped
+}
+
+// nolint: unparam
+func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string {
+ if primary.Description == "" {
+ primary.Description = m.Description
+ }
+
+ if primary.URL == "" {
+ primary.URL = m.URL
+ }
+
+ return nil
+}
+
+func mergeInfo(primary *spec.Info, m *spec.Info) []string {
+ var sk, skipped []string
+
+ primary.Extensions, sk = mergeExtensions(primary.Extensions, m.Extensions)
+ skipped = append(skipped, sk...)
+
+ if primary.Description == "" {
+ primary.Description = m.Description
+ }
+
+ if primary.Title == "" {
+ primary.Description = m.Description
+ }
+
+ if primary.TermsOfService == "" {
+ primary.TermsOfService = m.TermsOfService
+ }
+
+ if primary.Version == "" {
+ primary.Version = m.Version
+ }
+
+ if primary.Contact == nil {
+ primary.Contact = m.Contact
+ } else if m.Contact != nil {
+ var csk []string
+ primary.Contact.Extensions, csk = mergeExtensions(primary.Contact.Extensions, m.Contact.Extensions)
+ skipped = append(skipped, csk...)
+
+ if primary.Contact.Name == "" {
+ primary.Contact.Name = m.Contact.Name
+ }
+
+ if primary.Contact.URL == "" {
+ primary.Contact.URL = m.Contact.URL
+ }
+
+ if primary.Contact.Email == "" {
+ primary.Contact.Email = m.Contact.Email
+ }
+ }
+
+ if primary.License == nil {
+ primary.License = m.License
+ } else if m.License != nil {
+ var lsk []string
+ primary.License.Extensions, lsk = mergeExtensions(primary.License.Extensions, m.License.Extensions)
+ skipped = append(skipped, lsk...)
+
+ if primary.License.Name == "" {
+ primary.License.Name = m.License.Name
+ }
+
+ if primary.License.URL == "" {
+ primary.License.URL = m.License.URL
+ }
+ }
+
+ return skipped
+}
+
+func mergeExtensions(primary spec.Extensions, m spec.Extensions) (result spec.Extensions, skipped []string) {
+ if primary == nil {
+ result = m
+
+ return
+ }
+
+ if m == nil {
+ result = primary
+
+ return
+ }
+
+ result = primary
+ for k, v := range m {
+ if _, found := primary[k]; found {
+ skipped = append(skipped, k)
+
+ continue
+ }
+
+ primary[k] = v
+ }
+
+ return
+}
+
+func initPrimary(primary *spec.Swagger) {
+ if primary.SecurityDefinitions == nil {
+ primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
+ }
+
+ if primary.Security == nil {
+ primary.Security = make([]map[string][]string, 0, 10)
+ }
+
+ if primary.Produces == nil {
+ primary.Produces = make([]string, 0, 10)
+ }
+
+ if primary.Consumes == nil {
+ primary.Consumes = make([]string, 0, 10)
+ }
+
+ if primary.Tags == nil {
+ primary.Tags = make([]spec.Tag, 0, 10)
+ }
+
+ if primary.Schemes == nil {
+ primary.Schemes = make([]string, 0, 10)
+ }
+
+ if primary.Paths == nil {
+ primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
+ }
+
+ if primary.Paths.Paths == nil {
+ primary.Paths.Paths = make(map[string]spec.PathItem)
+ }
+
+ if primary.Definitions == nil {
+ primary.Definitions = make(spec.Definitions)
+ }
+
+ if primary.Parameters == nil {
+ primary.Parameters = make(map[string]spec.Parameter)
+ }
+
+ if primary.Responses == nil {
+ primary.Responses = make(map[string]spec.Response)
+ }
+}
diff --git a/vendor/github.com/go-openapi/analysis/schema.go b/vendor/github.com/go-openapi/analysis/schema.go
new file mode 100644
index 000000000..fc055095c
--- /dev/null
+++ b/vendor/github.com/go-openapi/analysis/schema.go
@@ -0,0 +1,256 @@
+package analysis
+
+import (
+ "fmt"
+
+ "github.com/go-openapi/spec"
+ "github.com/go-openapi/strfmt"
+)
+
+// SchemaOpts configures the schema analyzer
+type SchemaOpts struct {
+ Schema *spec.Schema
+ Root interface{}
+ BasePath string
+ _ struct{}
+}
+
+// Schema analysis, will classify the schema according to known
+// patterns.
+func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
+ if opts.Schema == nil {
+ return nil, fmt.Errorf("no schema to analyze")
+ }
+
+ a := &AnalyzedSchema{
+ schema: opts.Schema,
+ root: opts.Root,
+ basePath: opts.BasePath,
+ }
+
+ a.initializeFlags()
+ a.inferKnownType()
+ a.inferEnum()
+ a.inferBaseType()
+
+ if err := a.inferMap(); err != nil {
+ return nil, err
+ }
+ if err := a.inferArray(); err != nil {
+ return nil, err
+ }
+
+ a.inferTuple()
+
+ if err := a.inferFromRef(); err != nil {
+ return nil, err
+ }
+
+ a.inferSimpleSchema()
+
+ return a, nil
+}
+
+// AnalyzedSchema indicates what the schema represents
+type AnalyzedSchema struct {
+ schema *spec.Schema
+ root interface{}
+ basePath string
+
+ hasProps bool
+ hasAllOf bool
+ hasItems bool
+ hasAdditionalProps bool
+ hasAdditionalItems bool
+ hasRef bool
+
+ IsKnownType bool
+ IsSimpleSchema bool
+ IsArray bool
+ IsSimpleArray bool
+ IsMap bool
+ IsSimpleMap bool
+ IsExtendedObject bool
+ IsTuple bool
+ IsTupleWithExtra bool
+ IsBaseType bool
+ IsEnum bool
+}
+
+// Inherits copies value fields from other onto this schema
+func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
+ if other == nil {
+ return
+ }
+ a.hasProps = other.hasProps
+ a.hasAllOf = other.hasAllOf
+ a.hasItems = other.hasItems
+ a.hasAdditionalItems = other.hasAdditionalItems
+ a.hasAdditionalProps = other.hasAdditionalProps
+ a.hasRef = other.hasRef
+
+ a.IsKnownType = other.IsKnownType
+ a.IsSimpleSchema = other.IsSimpleSchema
+ a.IsArray = other.IsArray
+ a.IsSimpleArray = other.IsSimpleArray
+ a.IsMap = other.IsMap
+ a.IsSimpleMap = other.IsSimpleMap
+ a.IsExtendedObject = other.IsExtendedObject
+ a.IsTuple = other.IsTuple
+ a.IsTupleWithExtra = other.IsTupleWithExtra
+ a.IsBaseType = other.IsBaseType
+ a.IsEnum = other.IsEnum
+}
+
+func (a *AnalyzedSchema) inferFromRef() error {
+ if a.hasRef {
+ sch := new(spec.Schema)
+ sch.Ref = a.schema.Ref
+ err := spec.ExpandSchema(sch, a.root, nil)
+ if err != nil {
+ return err
+ }
+ rsch, err := Schema(SchemaOpts{
+ Schema: sch,
+ Root: a.root,
+ BasePath: a.basePath,
+ })
+ if err != nil {
+ // NOTE(fredbi): currently the only cause for errors is
+ // unresolved ref. Since spec.ExpandSchema() expands the
+ // schema recursively, there is no chance to get there,
+ // until we add more causes for error in this schema analysis.
+ return err
+ }
+ a.inherits(rsch)
+ }
+
+ return nil
+}
+
+func (a *AnalyzedSchema) inferSimpleSchema() {
+ a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
+}
+
+func (a *AnalyzedSchema) inferKnownType() {
+ tpe := a.schema.Type
+ format := a.schema.Format
+ a.IsKnownType = tpe.Contains("boolean") ||
+ tpe.Contains("integer") ||
+ tpe.Contains("number") ||
+ tpe.Contains("string") ||
+ (format != "" && strfmt.Default.ContainsName(format)) ||
+ (a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
+}
+
+func (a *AnalyzedSchema) inferMap() error {
+ if !a.isObjectType() {
+ return nil
+ }
+
+ hasExtra := a.hasProps || a.hasAllOf
+ a.IsMap = a.hasAdditionalProps && !hasExtra
+ a.IsExtendedObject = a.hasAdditionalProps && hasExtra
+
+ if !a.IsMap {
+ return nil
+ }
+
+ // maps
+ if a.schema.AdditionalProperties.Schema != nil {
+ msch, err := Schema(SchemaOpts{
+ Schema: a.schema.AdditionalProperties.Schema,
+ Root: a.root,
+ BasePath: a.basePath,
+ })
+ if err != nil {
+ return err
+ }
+ a.IsSimpleMap = msch.IsSimpleSchema
+ } else if a.schema.AdditionalProperties.Allows {
+ a.IsSimpleMap = true
+ }
+
+ return nil
+}
+
+func (a *AnalyzedSchema) inferArray() error {
+ // an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
+ // (yes, even if the Items array contains only one element).
+ // arrays in JSON schema may be unrestricted (i.e no Items specified).
+ // Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
+ //
+ // NOTE: the spec package misses the distinction between:
+ // items: [] and items: {}, so we consider both arrays here.
+ a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
+ if a.IsArray && a.hasItems {
+ if a.schema.Items.Schema != nil {
+ itsch, err := Schema(SchemaOpts{
+ Schema: a.schema.Items.Schema,
+ Root: a.root,
+ BasePath: a.basePath,
+ })
+ if err != nil {
+ return err
+ }
+
+ a.IsSimpleArray = itsch.IsSimpleSchema
+ }
+ }
+
+ if a.IsArray && !a.hasItems {
+ a.IsSimpleArray = true
+ }
+
+ return nil
+}
+
+func (a *AnalyzedSchema) inferTuple() {
+ tuple := a.hasItems && a.schema.Items.Schemas != nil
+ a.IsTuple = tuple && !a.hasAdditionalItems
+ a.IsTupleWithExtra = tuple && a.hasAdditionalItems
+}
+
+func (a *AnalyzedSchema) inferBaseType() {
+ if a.isObjectType() {
+ a.IsBaseType = a.schema.Discriminator != ""
+ }
+}
+
+func (a *AnalyzedSchema) inferEnum() {
+ a.IsEnum = len(a.schema.Enum) > 0
+}
+
+func (a *AnalyzedSchema) initializeFlags() {
+ a.hasProps = len(a.schema.Properties) > 0
+ a.hasAllOf = len(a.schema.AllOf) > 0
+ a.hasRef = a.schema.Ref.String() != ""
+
+ a.hasItems = a.schema.Items != nil &&
+ (a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
+
+ a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
+ (a.schema.AdditionalProperties.Schema != nil || a.schema.AdditionalProperties.Allows)
+
+ a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
+ (a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
+}
+
+func (a *AnalyzedSchema) isObjectType() bool {
+ return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
+}
+
+func (a *AnalyzedSchema) isArrayType() bool {
+ return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
+}
+
+// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex").
+//
+// Complex means the schema is any of:
+// - a simple type (primitive)
+// - an array of something (items are possibly complex ; if this is the case, items will generate a definition)
+// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will
+// generate a definition)
+func (a *AnalyzedSchema) isAnalyzedAsComplex() bool {
+ return !a.IsSimpleSchema && !a.IsArray && !a.IsMap
+}